Initial implementation of vault-os

Complete implementation across all 13 phases:

- vault-core: types, YAML frontmatter parsing, entity classification,
  filesystem ops, config, prompt composition, validation, search
- vault-watch: filesystem watcher with daemon write filtering, event
  classification
- vault-scheduler: cron engine, process executor, task runner with
  retry logic and concurrency limiting
- vault-api: Axum REST API (15 route modules), WebSocket with broadcast,
  AI assistant proxy, validation, templates
- Dashboard: React + TypeScript + Tailwind v4 with kanban, CodeMirror
  editor, dynamic view system, AI chat sidebar
- Nix flake with dev shell and NixOS module
- Graceful shutdown, inotify overflow recovery, tracing instrumentation

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Harald Hoyer 2026-03-03 01:21:17 +01:00
commit f820a72b04
123 changed files with 18288 additions and 0 deletions

View file

@ -0,0 +1,24 @@
[package]
name = "vault-api"
version.workspace = true
edition.workspace = true
[dependencies]
vault-core.workspace = true
vault-watch.workspace = true
vault-scheduler.workspace = true
axum.workspace = true
tower.workspace = true
tower-http.workspace = true
tokio.workspace = true
serde.workspace = true
serde_json.workspace = true
serde_yaml.workspace = true
tracing.workspace = true
thiserror.workspace = true
rust-embed.workspace = true
pulldown-cmark.workspace = true
uuid.workspace = true
chrono.workspace = true
futures-util = "0.3"
reqwest.workspace = true

View file

@ -0,0 +1,44 @@
use axum::http::StatusCode;
use axum::response::{IntoResponse, Response};
use serde_json::json;
#[derive(Debug, thiserror::Error)]
pub enum ApiError {
#[error("Not found: {0}")]
NotFound(String),
#[error("Bad request: {0}")]
BadRequest(String),
#[error("Internal error: {0}")]
Internal(String),
#[error("Vault error: {0}")]
Vault(#[from] vault_core::VaultError),
}
impl IntoResponse for ApiError {
fn into_response(self) -> Response {
let (status, message) = match &self {
ApiError::NotFound(msg) => (StatusCode::NOT_FOUND, msg.clone()),
ApiError::BadRequest(msg) => (StatusCode::BAD_REQUEST, msg.clone()),
ApiError::Internal(msg) => (StatusCode::INTERNAL_SERVER_ERROR, msg.clone()),
ApiError::Vault(e) => match e {
vault_core::VaultError::NotFound(msg) => {
(StatusCode::NOT_FOUND, msg.clone())
}
vault_core::VaultError::MissingFrontmatter(p) => {
(StatusCode::BAD_REQUEST, format!("Missing frontmatter: {:?}", p))
}
_ => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()),
},
};
let body = json!({
"error": message,
"status": status.as_u16(),
});
(status, axum::Json(body)).into_response()
}
}

View file

@ -0,0 +1,28 @@
pub mod error;
pub mod routes;
pub mod state;
pub mod ws;
pub mod ws_protocol;
use axum::Router;
use std::sync::Arc;
use tower_http::cors::{Any, CorsLayer};
use tower_http::trace::TraceLayer;
pub use state::AppState;
pub fn build_router(state: Arc<AppState>) -> Router {
let cors = CorsLayer::new()
.allow_origin(Any)
.allow_methods(Any)
.allow_headers(Any);
let api = routes::api_routes();
Router::new()
.nest("/api", api)
.route("/ws", axum::routing::get(ws::ws_handler))
.layer(cors)
.layer(TraceLayer::new_for_http())
.with_state(state)
}

View file

@ -0,0 +1,111 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::{Path, State};
use axum::routing::get;
use axum::{Json, Router};
use serde_json::{json, Value};
use std::sync::Arc;
use vault_core::filesystem;
use vault_core::types::AgentTask;
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/agents", get(list_agents))
.route("/agents/{name}", get(get_agent))
.route(
"/agents/{name}/trigger",
axum::routing::post(trigger_agent),
)
}
async fn list_agents(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
let agents = state.agents.read().unwrap();
let list: Vec<Value> = agents
.values()
.map(|a| {
json!({
"name": a.frontmatter.name,
"executable": a.frontmatter.executable,
"model": a.frontmatter.model,
"skills": a.frontmatter.skills,
"timeout": a.frontmatter.timeout,
})
})
.collect();
Ok(Json(json!(list)))
}
async fn get_agent(
State(state): State<Arc<AppState>>,
Path(name): Path<String>,
) -> Result<Json<Value>, ApiError> {
let agents = state.agents.read().unwrap();
let agent = agents
.get(&name)
.ok_or_else(|| ApiError::NotFound(format!("Agent '{}' not found", name)))?;
Ok(Json(json!({
"name": agent.frontmatter.name,
"executable": agent.frontmatter.executable,
"model": agent.frontmatter.model,
"escalate_to": agent.frontmatter.escalate_to,
"mcp_servers": agent.frontmatter.mcp_servers,
"skills": agent.frontmatter.skills,
"timeout": agent.frontmatter.timeout,
"max_retries": agent.frontmatter.max_retries,
"env": agent.frontmatter.env,
"body": agent.body,
})))
}
async fn trigger_agent(
State(state): State<Arc<AppState>>,
Path(name): Path<String>,
body: Option<Json<Value>>,
) -> Result<Json<Value>, ApiError> {
let agents = state.agents.read().unwrap();
if !agents.contains_key(&name) {
return Err(ApiError::NotFound(format!("Agent '{}' not found", name)));
}
drop(agents);
let context = body
.and_then(|b| b.get("context").and_then(|c| c.as_str().map(String::from)))
.unwrap_or_default();
let title = format!("Manual trigger: {}", name);
let slug = filesystem::timestamped_slug(&title);
let task_path = state
.vault_root
.join("todos/agent/queued")
.join(format!("{}.md", slug));
let task = AgentTask {
title,
agent: name,
priority: vault_core::types::Priority::Medium,
task_type: Some("manual".into()),
created: chrono::Utc::now(),
started: None,
completed: None,
retry: 0,
max_retries: 0,
input: None,
output: None,
error: None,
};
let entity = vault_core::entity::VaultEntity {
path: task_path.clone(),
frontmatter: task,
body: context,
};
state.write_filter.register(task_path.clone());
vault_core::filesystem::write_entity(&entity).map_err(ApiError::Vault)?;
Ok(Json(json!({
"status": "queued",
"task_path": task_path.strip_prefix(&state.vault_root).unwrap_or(&task_path),
})))
}

View file

@ -0,0 +1,390 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::State;
use axum::routing::{get, post};
use axum::{Json, Router};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
// --- Types ---
#[derive(Debug, Deserialize)]
pub struct ChatRequest {
pub messages: Vec<ChatMessage>,
pub model: Option<String>,
/// Optional path of the file being edited (for context)
pub file_path: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChatMessage {
pub role: String,
pub content: String,
}
#[derive(Debug, Serialize)]
pub struct ChatResponse {
pub message: ChatMessage,
pub model: String,
}
#[derive(Debug, Deserialize)]
pub struct ApplyDiffRequest {
pub file_path: String,
pub diff: String,
}
#[derive(Debug, Serialize)]
pub struct ModelInfo {
pub id: String,
pub name: String,
}
// --- Routes ---
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/assistant/chat", post(chat))
.route("/assistant/apply-diff", post(apply_diff))
.route("/assistant/models", get(list_models))
}
/// POST /api/assistant/chat — proxy chat to configured LLM
async fn chat(
State(state): State<Arc<AppState>>,
Json(req): Json<ChatRequest>,
) -> Result<Json<ChatResponse>, ApiError> {
let model = req
.model
.unwrap_or_else(|| state.config.assistant.default_model.clone());
// Build system prompt with vault context
let mut system_parts = vec![
"You are an AI assistant integrated into vault-os, a personal operations platform.".into(),
"You help the user edit markdown files with YAML frontmatter.".into(),
"When suggesting changes, output unified diffs that can be applied.".into(),
];
// If a file path is provided, include its content as context
if let Some(ref fp) = req.file_path {
let full = state.vault_root.join(fp);
if let Ok(content) = tokio::fs::read_to_string(&full).await {
system_parts.push(format!("\n--- Current file: {} ---\n{}", fp, content));
}
}
let system_prompt = system_parts.join("\n");
// Build messages for the LLM
let mut messages = vec![ChatMessage {
role: "system".into(),
content: system_prompt,
}];
messages.extend(req.messages);
// Determine backend from model string
let response = if model.starts_with("claude") || model.starts_with("anthropic/") {
call_anthropic(&state, &model, &messages).await?
} else {
// Default: OpenAI-compatible API (works with Ollama, vLLM, LM Studio, etc.)
call_openai_compatible(&state, &model, &messages).await?
};
Ok(Json(ChatResponse {
message: response,
model,
}))
}
/// Call Anthropic Messages API
async fn call_anthropic(
_state: &AppState,
model: &str,
messages: &[ChatMessage],
) -> Result<ChatMessage, ApiError> {
let api_key = std::env::var("ANTHROPIC_API_KEY")
.map_err(|_| ApiError::BadRequest("ANTHROPIC_API_KEY not set".into()))?;
// Extract system message
let system = messages
.iter()
.find(|m| m.role == "system")
.map(|m| m.content.clone())
.unwrap_or_default();
let user_messages: Vec<serde_json::Value> = messages
.iter()
.filter(|m| m.role != "system")
.map(|m| {
serde_json::json!({
"role": m.role,
"content": m.content,
})
})
.collect();
let model_id = model.strip_prefix("anthropic/").unwrap_or(model);
let body = serde_json::json!({
"model": model_id,
"max_tokens": 4096,
"system": system,
"messages": user_messages,
});
let client = reqwest::Client::new();
let resp = client
.post("https://api.anthropic.com/v1/messages")
.header("x-api-key", &api_key)
.header("anthropic-version", "2023-06-01")
.header("content-type", "application/json")
.json(&body)
.send()
.await
.map_err(|e| ApiError::Internal(format!("Anthropic request failed: {e}")))?;
if !resp.status().is_success() {
let status = resp.status();
let text = resp.text().await.unwrap_or_default();
return Err(ApiError::Internal(format!(
"Anthropic API error {status}: {text}"
)));
}
let json: serde_json::Value = resp
.json()
.await
.map_err(|e| ApiError::Internal(format!("Failed to parse Anthropic response: {e}")))?;
let content = json["content"]
.as_array()
.and_then(|arr| arr.first())
.and_then(|block| block["text"].as_str())
.unwrap_or("")
.to_string();
Ok(ChatMessage {
role: "assistant".into(),
content,
})
}
/// Call OpenAI-compatible API (Ollama, vLLM, LM Studio, etc.)
async fn call_openai_compatible(
state: &AppState,
model: &str,
messages: &[ChatMessage],
) -> Result<ChatMessage, ApiError> {
// Check for configured executor base_url, fall back to Ollama default
let base_url = state
.config
.executors
.values()
.find_map(|e| e.base_url.clone())
.unwrap_or_else(|| "http://localhost:11434".into());
let model_id = model.split('/').next_back().unwrap_or(model);
let body = serde_json::json!({
"model": model_id,
"messages": messages.iter().map(|m| serde_json::json!({
"role": m.role,
"content": m.content,
})).collect::<Vec<_>>(),
});
let client = reqwest::Client::new();
let resp = client
.post(format!("{}/v1/chat/completions", base_url))
.header("content-type", "application/json")
.json(&body)
.send()
.await
.map_err(|e| ApiError::Internal(format!("LLM request failed: {e}")))?;
if !resp.status().is_success() {
let status = resp.status();
let text = resp.text().await.unwrap_or_default();
return Err(ApiError::Internal(format!(
"LLM API error {status}: {text}"
)));
}
let json: serde_json::Value = resp
.json()
.await
.map_err(|e| ApiError::Internal(format!("Failed to parse LLM response: {e}")))?;
let content = json["choices"]
.as_array()
.and_then(|arr| arr.first())
.and_then(|choice| choice["message"]["content"].as_str())
.unwrap_or("")
.to_string();
Ok(ChatMessage {
role: "assistant".into(),
content,
})
}
/// POST /api/assistant/apply-diff — apply a unified diff to a file
async fn apply_diff(
State(state): State<Arc<AppState>>,
Json(req): Json<ApplyDiffRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let full_path = state.vault_root.join(&req.file_path);
if !full_path.exists() {
return Err(ApiError::NotFound(format!("File not found: {}", req.file_path)));
}
let original = tokio::fs::read_to_string(&full_path)
.await
.map_err(|e| ApiError::Internal(format!("Failed to read file: {e}")))?;
let patched = apply_unified_diff(&original, &req.diff)
.map_err(|e| ApiError::BadRequest(format!("Failed to apply diff: {e}")))?;
// Register with write filter to prevent feedback loop
state.write_filter.register(full_path.clone());
tokio::fs::write(&full_path, &patched)
.await
.map_err(|e| ApiError::Internal(format!("Failed to write file: {e}")))?;
Ok(Json(serde_json::json!({ "status": "ok", "path": req.file_path })))
}
/// Simple unified diff applier
fn apply_unified_diff(original: &str, diff: &str) -> Result<String, String> {
let mut result_lines: Vec<String> = original.lines().map(String::from).collect();
let mut offset: i64 = 0;
for hunk in parse_hunks(diff) {
let start = ((hunk.old_start as i64) - 1 + offset) as usize;
let end = start + hunk.old_count;
if end > result_lines.len() {
return Err(format!(
"Hunk at line {} extends beyond file (file has {} lines)",
hunk.old_start,
result_lines.len()
));
}
result_lines.splice(start..end, hunk.new_lines);
offset += hunk.new_count as i64 - hunk.old_count as i64;
}
let mut result = result_lines.join("\n");
if original.ends_with('\n') && !result.ends_with('\n') {
result.push('\n');
}
Ok(result)
}
struct Hunk {
old_start: usize,
old_count: usize,
new_count: usize,
new_lines: Vec<String>,
}
fn parse_hunks(diff: &str) -> Vec<Hunk> {
let mut hunks = Vec::new();
let mut lines = diff.lines().peekable();
while let Some(line) = lines.next() {
if line.starts_with("@@") {
// Parse @@ -old_start,old_count +new_start,new_count @@
if let Some(hunk) = parse_hunk_header(line) {
let mut old_count = 0;
let mut new_lines = Vec::new();
let mut new_count = 0;
while old_count < hunk.0 || new_count < hunk.1 {
match lines.next() {
Some(l) if l.starts_with('-') => {
old_count += 1;
}
Some(l) if l.starts_with('+') => {
new_lines.push(l[1..].to_string());
new_count += 1;
}
Some(l) => {
// Context line (starts with ' ' or no prefix)
let content = l.strip_prefix(' ').unwrap_or(l);
new_lines.push(content.to_string());
old_count += 1;
new_count += 1;
}
None => break,
}
}
hunks.push(Hunk {
old_start: hunk.2,
old_count: hunk.0,
new_count: new_lines.len(),
new_lines,
});
}
}
}
hunks
}
/// Parse "@@ -start,count +start,count @@" returning (old_count, new_count, old_start)
fn parse_hunk_header(line: &str) -> Option<(usize, usize, usize)> {
let stripped = line.trim_start_matches("@@").trim_end_matches("@@").trim();
let parts: Vec<&str> = stripped.split_whitespace().collect();
if parts.len() < 2 {
return None;
}
let old_part = parts[0].trim_start_matches('-');
let new_part = parts[1].trim_start_matches('+');
let (old_start, old_count) = parse_range(old_part)?;
let (_new_start, new_count) = parse_range(new_part)?;
Some((old_count, new_count, old_start))
}
fn parse_range(s: &str) -> Option<(usize, usize)> {
if let Some((start, count)) = s.split_once(',') {
Some((start.parse().ok()?, count.parse().ok()?))
} else {
Some((s.parse().ok()?, 1))
}
}
/// GET /api/assistant/models — list available models from config
async fn list_models(State(state): State<Arc<AppState>>) -> Json<Vec<ModelInfo>> {
let mut models: Vec<ModelInfo> = state
.config
.assistant
.models
.iter()
.map(|m| ModelInfo {
id: m.clone(),
name: m.clone(),
})
.collect();
// Always include the default model
let default = &state.config.assistant.default_model;
if !models.iter().any(|m| m.id == *default) {
models.insert(
0,
ModelInfo {
id: default.clone(),
name: format!("{} (default)", default),
},
);
}
Json(models)
}

View file

@ -0,0 +1,127 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::{Path, State};
use axum::routing::{get, post};
use axum::{Json, Router};
use serde_json::{json, Value};
use std::sync::Arc;
use vault_core::filesystem;
use vault_core::types::CronJob;
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/crons", get(list_crons))
.route("/crons/{name}/trigger", post(trigger_cron))
.route("/crons/{name}/pause", post(pause_cron))
.route("/crons/{name}/resume", post(resume_cron))
}
async fn list_crons(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
let mut crons = Vec::new();
for subdir in &["active", "paused"] {
let dir = state.vault_root.join("crons").join(subdir);
let files = filesystem::list_md_files(&dir).map_err(ApiError::Vault)?;
for file in files {
match filesystem::read_entity::<CronJob>(&file) {
Ok(entity) => {
crons.push(json!({
"name": file.file_stem().and_then(|s| s.to_str()),
"title": entity.frontmatter.title,
"schedule": entity.frontmatter.schedule,
"agent": entity.frontmatter.agent,
"enabled": *subdir == "active" && entity.frontmatter.enabled,
"status": subdir,
"last_run": entity.frontmatter.last_run,
"last_status": entity.frontmatter.last_status,
"next_run": entity.frontmatter.next_run,
"run_count": entity.frontmatter.run_count,
}));
}
Err(e) => {
tracing::warn!(path = ?file, error = %e, "Failed to read cron");
}
}
}
}
Ok(Json(json!(crons)))
}
async fn trigger_cron(
State(state): State<Arc<AppState>>,
Path(name): Path<String>,
) -> Result<Json<Value>, ApiError> {
let cron_path = state
.vault_root
.join("crons/active")
.join(format!("{}.md", name));
if !cron_path.exists() {
return Err(ApiError::NotFound(format!("Cron '{}' not found in active/", name)));
}
let mut engine = state.cron_engine.lock().unwrap();
let task_path = engine
.fire_cron(&cron_path, &state.write_filter)
.map_err(|e| ApiError::Internal(e.to_string()))?;
Ok(Json(json!({
"status": "fired",
"task_path": task_path.strip_prefix(&state.vault_root).unwrap_or(&task_path),
})))
}
async fn pause_cron(
State(state): State<Arc<AppState>>,
Path(name): Path<String>,
) -> Result<Json<Value>, ApiError> {
let from = state
.vault_root
.join("crons/active")
.join(format!("{}.md", name));
let to = state
.vault_root
.join("crons/paused")
.join(format!("{}.md", name));
if !from.exists() {
return Err(ApiError::NotFound(format!("Cron '{}' not found in active/", name)));
}
state.write_filter.register(to.clone());
filesystem::move_file(&from, &to).map_err(ApiError::Vault)?;
let mut engine = state.cron_engine.lock().unwrap();
engine.remove_cron(&from);
Ok(Json(json!({ "status": "paused" })))
}
async fn resume_cron(
State(state): State<Arc<AppState>>,
Path(name): Path<String>,
) -> Result<Json<Value>, ApiError> {
let from = state
.vault_root
.join("crons/paused")
.join(format!("{}.md", name));
let to = state
.vault_root
.join("crons/active")
.join(format!("{}.md", name));
if !from.exists() {
return Err(ApiError::NotFound(format!("Cron '{}' not found in paused/", name)));
}
state.write_filter.register(to.clone());
filesystem::move_file(&from, &to).map_err(ApiError::Vault)?;
let mut engine = state.cron_engine.lock().unwrap();
if let Err(e) = engine.upsert_cron(&to) {
tracing::warn!(error = %e, "Failed to schedule resumed cron");
}
Ok(Json(json!({ "status": "active" })))
}

View file

@ -0,0 +1,126 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::{Path, State};
use axum::routing::get;
use axum::{Json, Router};
use serde::Deserialize;
use serde_json::{json, Value};
use std::sync::Arc;
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/files/{*path}", get(read_file).put(write_file).patch(patch_file).delete(delete_file))
}
async fn read_file(
State(state): State<Arc<AppState>>,
Path(path): Path<String>,
) -> Result<Json<Value>, ApiError> {
let file_path = state.vault_root.join(&path);
if !file_path.exists() {
return Err(ApiError::NotFound(format!("File '{}' not found", path)));
}
let content = std::fs::read_to_string(&file_path)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &file_path)))?;
// Try to split frontmatter
if let Ok((yaml, body)) = vault_core::frontmatter::split_frontmatter(&content) {
let frontmatter: Value = serde_yaml::from_str(yaml).unwrap_or(Value::Null);
Ok(Json(json!({
"path": path,
"frontmatter": frontmatter,
"body": body,
})))
} else {
Ok(Json(json!({
"path": path,
"frontmatter": null,
"body": content,
})))
}
}
#[derive(Deserialize)]
struct WriteFileBody {
#[serde(default)]
frontmatter: Option<Value>,
#[serde(default)]
body: Option<String>,
#[serde(default)]
raw: Option<String>,
}
async fn write_file(
State(state): State<Arc<AppState>>,
Path(path): Path<String>,
Json(data): Json<WriteFileBody>,
) -> Result<Json<Value>, ApiError> {
let file_path = state.vault_root.join(&path);
if let Some(parent) = file_path.parent() {
std::fs::create_dir_all(parent)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, parent)))?;
}
let content = if let Some(raw) = data.raw {
raw
} else {
let body = data.body.unwrap_or_default();
if let Some(fm) = data.frontmatter {
let yaml = serde_yaml::to_string(&fm)
.map_err(|e| ApiError::Internal(e.to_string()))?;
format!("---\n{}---\n{}", yaml, body)
} else {
body
}
};
state.write_filter.register(file_path.clone());
std::fs::write(&file_path, content)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &file_path)))?;
Ok(Json(json!({ "status": "written", "path": path })))
}
async fn patch_file(
State(state): State<Arc<AppState>>,
Path(path): Path<String>,
Json(updates): Json<Value>,
) -> Result<Json<Value>, ApiError> {
let file_path = state.vault_root.join(&path);
if !file_path.exists() {
return Err(ApiError::NotFound(format!("File '{}' not found", path)));
}
let content = std::fs::read_to_string(&file_path)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &file_path)))?;
let updated =
vault_core::frontmatter::update_frontmatter_fields(&content, &file_path, &updates)
.map_err(ApiError::Vault)?;
state.write_filter.register(file_path.clone());
std::fs::write(&file_path, updated)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &file_path)))?;
Ok(Json(json!({ "status": "patched", "path": path })))
}
async fn delete_file(
State(state): State<Arc<AppState>>,
Path(path): Path<String>,
) -> Result<Json<Value>, ApiError> {
let file_path = state.vault_root.join(&path);
if !file_path.exists() {
return Err(ApiError::NotFound(format!("File '{}' not found", path)));
}
std::fs::remove_file(&file_path)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &file_path)))?;
Ok(Json(json!({ "status": "deleted", "path": path })))
}

View file

@ -0,0 +1,126 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::{Path, Query, State};
use axum::routing::get;
use axum::{Json, Router};
use pulldown_cmark::{html, Parser};
use serde::Deserialize;
use serde_json::{json, Value};
use std::sync::Arc;
use vault_core::entity::VaultEntity;
use vault_core::filesystem;
use vault_core::types::KnowledgeNote;
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/knowledge", get(list_knowledge))
.route("/knowledge/{*path}", get(get_knowledge))
}
#[derive(Deserialize, Default)]
struct SearchQuery {
#[serde(default)]
q: Option<String>,
#[serde(default)]
tag: Option<String>,
}
async fn list_knowledge(
State(state): State<Arc<AppState>>,
Query(query): Query<SearchQuery>,
) -> Result<Json<Value>, ApiError> {
let dir = state.vault_root.join("knowledge");
let files = filesystem::list_md_files_recursive(&dir).map_err(ApiError::Vault)?;
let mut notes = Vec::new();
for file in files {
// Try parsing with frontmatter
let content = std::fs::read_to_string(&file)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &file)))?;
let (title, tags) = if let Ok(entity) = VaultEntity::<KnowledgeNote>::from_content(file.clone(), &content) {
(
entity.frontmatter.title.unwrap_or_else(|| {
file.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("untitled")
.to_string()
}),
entity.frontmatter.tags,
)
} else {
(
file.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("untitled")
.to_string(),
vec![],
)
};
// Apply filters
if let Some(ref q) = query.q {
let q_lower = q.to_lowercase();
if !title.to_lowercase().contains(&q_lower)
&& !content.to_lowercase().contains(&q_lower)
{
continue;
}
}
if let Some(ref tag) = query.tag {
if !tags.iter().any(|t| t == tag) {
continue;
}
}
let relative = file.strip_prefix(&state.vault_root).unwrap_or(&file);
notes.push(json!({
"path": relative,
"title": title,
"tags": tags,
}));
}
Ok(Json(json!(notes)))
}
async fn get_knowledge(
State(state): State<Arc<AppState>>,
Path(path): Path<String>,
) -> Result<Json<Value>, ApiError> {
let file_path = state.vault_root.join("knowledge").join(&path);
if !file_path.exists() {
return Err(ApiError::NotFound(format!("Knowledge note '{}' not found", path)));
}
let content = std::fs::read_to_string(&file_path)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &file_path)))?;
let (frontmatter, body) = if let Ok(entity) = VaultEntity::<KnowledgeNote>::from_content(file_path.clone(), &content) {
(
json!({
"title": entity.frontmatter.title,
"tags": entity.frontmatter.tags,
"source": entity.frontmatter.source,
"created": entity.frontmatter.created,
"related": entity.frontmatter.related,
}),
entity.body,
)
} else {
(json!({}), content.clone())
};
// Render markdown to HTML
let parser = Parser::new(&body);
let mut html_output = String::new();
html::push_html(&mut html_output, parser);
Ok(Json(json!({
"path": path,
"frontmatter": frontmatter,
"body": body,
"html": html_output,
})))
}

View file

@ -0,0 +1,36 @@
pub mod agents;
pub mod assistant;
pub mod crons;
pub mod files;
pub mod knowledge;
pub mod skills;
pub mod stats;
pub mod suggest;
pub mod templates;
pub mod todos_agent;
pub mod todos_human;
pub mod tree;
pub mod validate;
pub mod views;
use crate::state::AppState;
use axum::Router;
use std::sync::Arc;
pub fn api_routes() -> Router<Arc<AppState>> {
Router::new()
.merge(agents::routes())
.merge(skills::routes())
.merge(crons::routes())
.merge(todos_human::routes())
.merge(todos_agent::routes())
.merge(knowledge::routes())
.merge(files::routes())
.merge(tree::routes())
.merge(suggest::routes())
.merge(stats::routes())
.merge(views::routes())
.merge(assistant::routes())
.merge(validate::routes())
.merge(templates::routes())
}

View file

@ -0,0 +1,62 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::{Path, State};
use axum::routing::get;
use axum::{Json, Router};
use serde_json::{json, Value};
use std::sync::Arc;
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/skills", get(list_skills))
.route("/skills/{name}", get(get_skill))
.route("/skills/{name}/used-by", get(skill_used_by))
}
async fn list_skills(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
let skills = state.skills.read().unwrap();
let list: Vec<Value> = skills
.values()
.map(|s| {
json!({
"name": s.frontmatter.name,
"description": s.frontmatter.description,
"version": s.frontmatter.version,
})
})
.collect();
Ok(Json(json!(list)))
}
async fn get_skill(
State(state): State<Arc<AppState>>,
Path(name): Path<String>,
) -> Result<Json<Value>, ApiError> {
let skills = state.skills.read().unwrap();
let skill = skills
.get(&name)
.ok_or_else(|| ApiError::NotFound(format!("Skill '{}' not found", name)))?;
Ok(Json(json!({
"name": skill.frontmatter.name,
"description": skill.frontmatter.description,
"version": skill.frontmatter.version,
"requires_mcp": skill.frontmatter.requires_mcp,
"inputs": skill.frontmatter.inputs,
"outputs": skill.frontmatter.outputs,
"body": skill.body,
})))
}
async fn skill_used_by(
State(state): State<Arc<AppState>>,
Path(name): Path<String>,
) -> Result<Json<Value>, ApiError> {
let agents = state.agents.read().unwrap();
let users: Vec<String> = agents
.values()
.filter(|a| a.frontmatter.skills.contains(&name))
.map(|a| a.frontmatter.name.clone())
.collect();
Ok(Json(json!(users)))
}

View file

@ -0,0 +1,112 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::State;
use axum::routing::get;
use axum::{Json, Router};
use serde_json::{json, Value};
use std::sync::Arc;
use vault_core::filesystem;
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/stats", get(get_stats))
.route("/activity", get(get_activity))
.route("/health", get(health_check))
}
async fn get_stats(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
let agents_count = state.agents.read().unwrap().len();
let skills_count = state.skills.read().unwrap().len();
let crons_scheduled = state.cron_engine.lock().unwrap().scheduled_count();
let mut task_counts = serde_json::Map::new();
for status in &["urgent", "open", "in-progress", "done"] {
let dir = state.vault_root.join("todos/harald").join(status);
let count = filesystem::list_md_files(&dir)
.map(|f| f.len())
.unwrap_or(0);
task_counts.insert(status.to_string(), json!(count));
}
let mut agent_task_counts = serde_json::Map::new();
for status in &["queued", "running", "done", "failed"] {
let dir = state.vault_root.join("todos/agent").join(status);
let count = filesystem::list_md_files(&dir)
.map(|f| f.len())
.unwrap_or(0);
agent_task_counts.insert(status.to_string(), json!(count));
}
let knowledge_count = filesystem::list_md_files_recursive(&state.vault_root.join("knowledge"))
.map(|f| f.len())
.unwrap_or(0);
let runtime_state = state.runtime_state.lock().unwrap();
Ok(Json(json!({
"agents": agents_count,
"skills": skills_count,
"crons_scheduled": crons_scheduled,
"human_tasks": task_counts,
"agent_tasks": agent_task_counts,
"knowledge_notes": knowledge_count,
"total_tasks_executed": runtime_state.total_tasks_executed,
"total_cron_fires": runtime_state.total_cron_fires,
})))
}
async fn get_activity(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
// Collect recently modified files across the vault as activity items
let mut activity = Vec::new();
let dirs = [
("todos/harald", "human_task"),
("todos/agent", "agent_task"),
("knowledge", "knowledge"),
];
for (dir, kind) in &dirs {
if let Ok(files) = filesystem::list_md_files_recursive(&state.vault_root.join(dir)) {
for file in files.iter().rev().take(20) {
if let Ok(metadata) = std::fs::metadata(file) {
if let Ok(modified) = metadata.modified() {
let relative = file.strip_prefix(&state.vault_root).unwrap_or(file);
activity.push(json!({
"path": relative,
"kind": kind,
"modified": chrono::DateTime::<chrono::Utc>::from(modified),
"name": file.file_stem().and_then(|s| s.to_str()),
}));
}
}
}
}
}
// Sort by modification time, newest first
activity.sort_by(|a, b| {
let a_time = a.get("modified").and_then(|t| t.as_str()).unwrap_or("");
let b_time = b.get("modified").and_then(|t| t.as_str()).unwrap_or("");
b_time.cmp(a_time)
});
activity.truncate(50);
Ok(Json(json!(activity)))
}
async fn health_check(State(state): State<Arc<AppState>>) -> Json<Value> {
let runtime_state = state.runtime_state.lock().unwrap();
let uptime = chrono::Utc::now() - state.startup_time;
let crons = state.cron_engine.lock().unwrap().scheduled_count();
let agents = state.agents.read().unwrap().len();
Json(json!({
"status": "ok",
"version": env!("CARGO_PKG_VERSION"),
"uptime_secs": uptime.num_seconds(),
"agents": agents,
"crons_scheduled": crons,
"total_tasks_executed": runtime_state.total_tasks_executed,
}))
}

View file

@ -0,0 +1,141 @@
use crate::state::AppState;
use axum::extract::{Query, State};
use axum::routing::get;
use axum::{Json, Router};
use serde::Deserialize;
use serde_json::{json, Value};
use std::collections::HashSet;
use std::sync::Arc;
use vault_core::filesystem;
use vault_core::types::{HumanTask, KnowledgeNote};
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/suggest/agents", get(suggest_agents))
.route("/suggest/skills", get(suggest_skills))
.route("/suggest/tags", get(suggest_tags))
.route("/suggest/repos", get(suggest_repos))
.route("/suggest/labels", get(suggest_labels))
.route("/suggest/files", get(suggest_files))
.route("/suggest/models", get(suggest_models))
.route("/suggest/mcp-servers", get(suggest_mcp_servers))
}
async fn suggest_agents(State(state): State<Arc<AppState>>) -> Json<Value> {
let agents = state.agents.read().unwrap();
let names: Vec<&str> = agents.keys().map(|s| s.as_str()).collect();
Json(json!(names))
}
async fn suggest_skills(State(state): State<Arc<AppState>>) -> Json<Value> {
let skills = state.skills.read().unwrap();
let names: Vec<&str> = skills.keys().map(|s| s.as_str()).collect();
Json(json!(names))
}
async fn suggest_tags(State(state): State<Arc<AppState>>) -> Json<Value> {
let mut tags = HashSet::new();
// Collect from knowledge notes
if let Ok(files) = filesystem::list_md_files_recursive(&state.vault_root.join("knowledge")) {
for file in files {
if let Ok(entity) = filesystem::read_entity::<KnowledgeNote>(&file) {
for tag in &entity.frontmatter.tags {
tags.insert(tag.clone());
}
}
}
}
let mut tags: Vec<String> = tags.into_iter().collect();
tags.sort();
Json(json!(tags))
}
async fn suggest_repos(State(state): State<Arc<AppState>>) -> Json<Value> {
let mut repos = HashSet::new();
for status in &["urgent", "open", "in-progress", "done"] {
let dir = state.vault_root.join("todos/harald").join(status);
if let Ok(files) = filesystem::list_md_files(&dir) {
for file in files {
if let Ok(entity) = filesystem::read_entity::<HumanTask>(&file) {
if let Some(repo) = &entity.frontmatter.repo {
repos.insert(repo.clone());
}
}
}
}
}
let mut repos: Vec<String> = repos.into_iter().collect();
repos.sort();
Json(json!(repos))
}
async fn suggest_labels(State(state): State<Arc<AppState>>) -> Json<Value> {
let mut labels = HashSet::new();
for status in &["urgent", "open", "in-progress", "done"] {
let dir = state.vault_root.join("todos/harald").join(status);
if let Ok(files) = filesystem::list_md_files(&dir) {
for file in files {
if let Ok(entity) = filesystem::read_entity::<HumanTask>(&file) {
for label in &entity.frontmatter.labels {
labels.insert(label.clone());
}
}
}
}
}
let mut labels: Vec<String> = labels.into_iter().collect();
labels.sort();
Json(json!(labels))
}
#[derive(Deserialize, Default)]
struct FileQuery {
#[serde(default)]
q: Option<String>,
}
async fn suggest_files(
State(state): State<Arc<AppState>>,
Query(query): Query<FileQuery>,
) -> Json<Value> {
let mut files = Vec::new();
if let Ok(all_files) = filesystem::list_md_files_recursive(&state.vault_root) {
for file in all_files {
if let Ok(relative) = file.strip_prefix(&state.vault_root) {
let rel_str = relative.to_string_lossy().to_string();
// Skip .vault internal files
if rel_str.starts_with(".vault") {
continue;
}
if let Some(ref q) = query.q {
if !rel_str.to_lowercase().contains(&q.to_lowercase()) {
continue;
}
}
files.push(rel_str);
}
}
}
files.sort();
Json(json!(files))
}
async fn suggest_models(State(state): State<Arc<AppState>>) -> Json<Value> {
Json(json!(state.config.assistant.models))
}
async fn suggest_mcp_servers(State(state): State<Arc<AppState>>) -> Json<Value> {
let servers: Vec<&str> = state.config.mcp_servers.keys().map(|s| s.as_str()).collect();
Json(json!(servers))
}

View file

@ -0,0 +1,144 @@
use crate::state::AppState;
use axum::extract::Path;
use axum::routing::get;
use axum::{Json, Router};
use std::sync::Arc;
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/templates", get(list_templates))
.route("/templates/{name}", get(get_template))
}
#[derive(serde::Serialize)]
struct TemplateInfo {
name: String,
description: String,
category: String,
}
async fn list_templates() -> Json<Vec<TemplateInfo>> {
Json(vec![
TemplateInfo {
name: "agent".into(),
description: "New AI agent definition".into(),
category: "agents".into(),
},
TemplateInfo {
name: "skill".into(),
description: "New agent skill".into(),
category: "skills".into(),
},
TemplateInfo {
name: "cron".into(),
description: "New cron schedule".into(),
category: "crons".into(),
},
TemplateInfo {
name: "human-task".into(),
description: "New human task".into(),
category: "todos/harald".into(),
},
TemplateInfo {
name: "agent-task".into(),
description: "New agent task".into(),
category: "todos/agent".into(),
},
TemplateInfo {
name: "knowledge".into(),
description: "New knowledge note".into(),
category: "knowledge".into(),
},
TemplateInfo {
name: "view-page".into(),
description: "New dashboard view page".into(),
category: "views/pages".into(),
},
])
}
async fn get_template(Path(name): Path<String>) -> Json<serde_json::Value> {
let template = match name.as_str() {
"agent" => serde_json::json!({
"frontmatter": {
"name": "new-agent",
"executable": "claude-code",
"model": "",
"skills": [],
"mcp_servers": [],
"timeout": 600,
"max_retries": 0,
"env": {}
},
"body": "You are an AI agent.\n\nDescribe your agent's purpose and behavior here.\n"
}),
"skill" => serde_json::json!({
"frontmatter": {
"name": "new-skill",
"description": "Describe what this skill does",
"version": 1,
"inputs": [],
"outputs": [],
"requires_mcp": []
},
"body": "## Instructions\n\nDescribe the skill instructions here.\n"
}),
"cron" => serde_json::json!({
"frontmatter": {
"title": "New Cron Job",
"schedule": "0 9 * * *",
"agent": "",
"enabled": true
},
"body": "Optional context for the cron job execution.\n"
}),
"human-task" => serde_json::json!({
"frontmatter": {
"title": "New Task",
"priority": "medium",
"labels": [],
"created": chrono::Utc::now().to_rfc3339()
},
"body": "Task description goes here.\n"
}),
"agent-task" => serde_json::json!({
"frontmatter": {
"title": "New Agent Task",
"agent": "",
"priority": "medium",
"created": chrono::Utc::now().to_rfc3339(),
"retry": 0,
"max_retries": 0
},
"body": "Task instructions for the agent.\n"
}),
"knowledge" => serde_json::json!({
"frontmatter": {
"title": "New Note",
"tags": [],
"created": chrono::Utc::now().to_rfc3339()
},
"body": "Write your knowledge note here.\n"
}),
"view-page" => serde_json::json!({
"frontmatter": {
"type": "page",
"title": "New View",
"icon": "",
"route": "/view/new-view",
"position": 10,
"layout": "single",
"regions": {
"main": []
}
},
"body": ""
}),
_ => serde_json::json!({
"frontmatter": {},
"body": ""
}),
};
Json(template)
}

View file

@ -0,0 +1,149 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::{Path, State};
use axum::routing::get;
use axum::{Json, Router};
use serde::Deserialize;
use serde_json::{json, Value};
use std::sync::Arc;
use vault_core::entity::VaultEntity;
use vault_core::filesystem;
use vault_core::types::AgentTask;
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/todos/agent", get(list_all).post(create_task))
.route("/todos/agent/{id}", get(get_task))
}
async fn list_all(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
let mut tasks = Vec::new();
for status in &["queued", "running", "done", "failed"] {
let dir = state.vault_root.join("todos/agent").join(status);
let files = filesystem::list_md_files(&dir).map_err(ApiError::Vault)?;
for file in files {
if let Ok(entity) = filesystem::read_entity::<AgentTask>(&file) {
tasks.push(agent_task_to_json(&entity, status));
}
}
}
Ok(Json(json!(tasks)))
}
async fn get_task(
State(state): State<Arc<AppState>>,
Path(id): Path<String>,
) -> Result<Json<Value>, ApiError> {
for status in &["queued", "running", "done", "failed"] {
let path = state
.vault_root
.join("todos/agent")
.join(status)
.join(format!("{}.md", id));
if path.exists() {
let entity = filesystem::read_entity::<AgentTask>(&path).map_err(ApiError::Vault)?;
return Ok(Json(agent_task_to_json(&entity, status)));
}
}
Err(ApiError::NotFound(format!("Agent task '{}' not found", id)))
}
#[derive(Deserialize)]
struct CreateAgentTaskBody {
title: String,
agent: String,
#[serde(default)]
priority: Option<String>,
#[serde(default, rename = "type")]
task_type: Option<String>,
#[serde(default)]
max_retries: Option<u32>,
#[serde(default)]
input: Option<Value>,
#[serde(default)]
body: Option<String>,
}
async fn create_task(
State(state): State<Arc<AppState>>,
Json(body): Json<CreateAgentTaskBody>,
) -> Result<Json<Value>, ApiError> {
// Verify agent exists
{
let agents = state.agents.read().unwrap();
if !agents.contains_key(&body.agent) {
return Err(ApiError::BadRequest(format!(
"Agent '{}' not found",
body.agent
)));
}
}
let priority = match body.priority.as_deref() {
Some("urgent") => vault_core::types::Priority::Urgent,
Some("high") => vault_core::types::Priority::High,
Some("low") => vault_core::types::Priority::Low,
_ => vault_core::types::Priority::Medium,
};
let slug = filesystem::timestamped_slug(&body.title);
let path = state
.vault_root
.join("todos/agent/queued")
.join(format!("{}.md", slug));
let task = AgentTask {
title: body.title,
agent: body.agent,
priority,
task_type: body.task_type,
created: chrono::Utc::now(),
started: None,
completed: None,
retry: 0,
max_retries: body.max_retries.unwrap_or(0),
input: body.input,
output: None,
error: None,
};
let entity = VaultEntity {
path: path.clone(),
frontmatter: task,
body: body.body.unwrap_or_default(),
};
state.write_filter.register(path.clone());
filesystem::write_entity(&entity).map_err(ApiError::Vault)?;
Ok(Json(json!({
"status": "queued",
"path": path.strip_prefix(&state.vault_root).unwrap_or(&path),
})))
}
fn agent_task_to_json(entity: &VaultEntity<AgentTask>, status: &str) -> Value {
let id = entity
.path
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("unknown");
json!({
"id": id,
"title": entity.frontmatter.title,
"agent": entity.frontmatter.agent,
"priority": entity.frontmatter.priority,
"type": entity.frontmatter.task_type,
"status": status,
"created": entity.frontmatter.created,
"started": entity.frontmatter.started,
"completed": entity.frontmatter.completed,
"retry": entity.frontmatter.retry,
"max_retries": entity.frontmatter.max_retries,
"input": entity.frontmatter.input,
"output": entity.frontmatter.output,
"error": entity.frontmatter.error,
"body": entity.body,
})
}

View file

@ -0,0 +1,205 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::{Path, State};
use axum::routing::{get, patch};
use axum::{Json, Router};
use serde::Deserialize;
use serde_json::{json, Value};
use std::sync::Arc;
use vault_core::entity::VaultEntity;
use vault_core::filesystem;
use vault_core::types::HumanTask;
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/todos/harald", get(list_all).post(create_task))
.route("/todos/harald/{status}", get(list_by_status))
.route("/todos/harald/{status}/{id}/move", patch(move_task))
.route(
"/todos/harald/{status}/{id}",
axum::routing::delete(delete_task),
)
}
async fn list_all(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
let mut tasks = Vec::new();
for status in &["urgent", "open", "in-progress", "done"] {
let dir = state
.vault_root
.join("todos/harald")
.join(status);
let files = filesystem::list_md_files(&dir).map_err(ApiError::Vault)?;
for file in files {
if let Ok(entity) = filesystem::read_entity::<HumanTask>(&file) {
tasks.push(task_to_json(&entity, status));
}
}
}
Ok(Json(json!(tasks)))
}
async fn list_by_status(
State(state): State<Arc<AppState>>,
Path(status): Path<String>,
) -> Result<Json<Value>, ApiError> {
let dir = state
.vault_root
.join("todos/harald")
.join(&status);
if !dir.exists() {
return Err(ApiError::NotFound(format!("Status '{}' not found", status)));
}
let files = filesystem::list_md_files(&dir).map_err(ApiError::Vault)?;
let mut tasks = Vec::new();
for file in files {
if let Ok(entity) = filesystem::read_entity::<HumanTask>(&file) {
tasks.push(task_to_json(&entity, &status));
}
}
Ok(Json(json!(tasks)))
}
#[derive(Deserialize)]
struct CreateTaskBody {
title: String,
#[serde(default)]
priority: Option<String>,
#[serde(default)]
labels: Vec<String>,
#[serde(default)]
repo: Option<String>,
#[serde(default)]
due: Option<String>,
#[serde(default)]
body: Option<String>,
}
async fn create_task(
State(state): State<Arc<AppState>>,
Json(body): Json<CreateTaskBody>,
) -> Result<Json<Value>, ApiError> {
let priority = match body.priority.as_deref() {
Some("urgent") => vault_core::types::Priority::Urgent,
Some("high") => vault_core::types::Priority::High,
Some("low") => vault_core::types::Priority::Low,
_ => vault_core::types::Priority::Medium,
};
let status_dir = match priority {
vault_core::types::Priority::Urgent => "urgent",
_ => "open",
};
let slug = filesystem::timestamped_slug(&body.title);
let path = state
.vault_root
.join("todos/harald")
.join(status_dir)
.join(format!("{}.md", slug));
let due = body
.due
.and_then(|d| chrono::DateTime::parse_from_rfc3339(&d).ok())
.map(|d| d.with_timezone(&chrono::Utc));
let task = HumanTask {
title: body.title,
priority,
source: Some("dashboard".into()),
repo: body.repo,
labels: body.labels,
created: chrono::Utc::now(),
due,
};
let entity = VaultEntity {
path: path.clone(),
frontmatter: task,
body: body.body.unwrap_or_default(),
};
state.write_filter.register(path.clone());
filesystem::write_entity(&entity).map_err(ApiError::Vault)?;
Ok(Json(json!({
"status": "created",
"path": path.strip_prefix(&state.vault_root).unwrap_or(&path),
})))
}
#[derive(Deserialize)]
struct MoveBody {
to: String,
}
async fn move_task(
State(state): State<Arc<AppState>>,
Path((status, id)): Path<(String, String)>,
Json(body): Json<MoveBody>,
) -> Result<Json<Value>, ApiError> {
let from = state
.vault_root
.join("todos/harald")
.join(&status)
.join(format!("{}.md", id));
if !from.exists() {
return Err(ApiError::NotFound(format!("Task '{}' not found in {}", id, status)));
}
let to = state
.vault_root
.join("todos/harald")
.join(&body.to)
.join(format!("{}.md", id));
state.write_filter.register(to.clone());
filesystem::move_file(&from, &to).map_err(ApiError::Vault)?;
Ok(Json(json!({
"status": "moved",
"from": status,
"to": body.to,
})))
}
async fn delete_task(
State(state): State<Arc<AppState>>,
Path((status, id)): Path<(String, String)>,
) -> Result<Json<Value>, ApiError> {
let path = state
.vault_root
.join("todos/harald")
.join(&status)
.join(format!("{}.md", id));
if !path.exists() {
return Err(ApiError::NotFound(format!("Task '{}' not found", id)));
}
std::fs::remove_file(&path).map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &path)))?;
Ok(Json(json!({ "status": "deleted" })))
}
fn task_to_json(entity: &VaultEntity<HumanTask>, status: &str) -> Value {
let id = entity
.path
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("unknown");
json!({
"id": id,
"title": entity.frontmatter.title,
"priority": entity.frontmatter.priority,
"status": status,
"source": entity.frontmatter.source,
"repo": entity.frontmatter.repo,
"labels": entity.frontmatter.labels,
"created": entity.frontmatter.created,
"due": entity.frontmatter.due,
"body": entity.body,
})
}

View file

@ -0,0 +1,93 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::{Path, State};
use axum::routing::{get, post};
use axum::{Json, Router};
use serde_json::{json, Value};
use std::sync::Arc;
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/tree", get(get_tree))
.route("/tree/{*path}", post(create_dir).delete(delete_dir))
}
async fn get_tree(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
let tree = build_tree(&state.vault_root, &state.vault_root)?;
Ok(Json(tree))
}
fn build_tree(root: &std::path::Path, dir: &std::path::Path) -> Result<Value, ApiError> {
let mut children = Vec::new();
let entries = std::fs::read_dir(dir)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, dir)))?;
let mut entries: Vec<_> = entries
.filter_map(|e| e.ok())
.collect();
entries.sort_by_key(|e| e.file_name());
for entry in entries {
let path = entry.path();
let name = entry.file_name().to_string_lossy().to_string();
// Skip hidden files/dirs
if name.starts_with('.') {
continue;
}
let relative = path.strip_prefix(root).unwrap_or(&path);
if path.is_dir() {
let subtree = build_tree(root, &path)?;
children.push(json!({
"name": name,
"path": relative,
"type": "directory",
"children": subtree.get("children").unwrap_or(&json!([])),
}));
} else {
children.push(json!({
"name": name,
"path": relative,
"type": "file",
}));
}
}
Ok(json!({
"name": dir.file_name().and_then(|n| n.to_str()).unwrap_or("vault"),
"path": dir.strip_prefix(root).unwrap_or(dir),
"type": "directory",
"children": children,
}))
}
async fn create_dir(
State(state): State<Arc<AppState>>,
Path(path): Path<String>,
) -> Result<Json<Value>, ApiError> {
let dir_path = state.vault_root.join(&path);
std::fs::create_dir_all(&dir_path)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &dir_path)))?;
Ok(Json(json!({ "status": "created", "path": path })))
}
async fn delete_dir(
State(state): State<Arc<AppState>>,
Path(path): Path<String>,
) -> Result<Json<Value>, ApiError> {
let dir_path = state.vault_root.join(&path);
if !dir_path.exists() {
return Err(ApiError::NotFound(format!("Directory '{}' not found", path)));
}
std::fs::remove_dir_all(&dir_path)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &dir_path)))?;
Ok(Json(json!({ "status": "deleted", "path": path })))
}

View file

@ -0,0 +1,78 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::State;
use axum::routing::post;
use axum::{Json, Router};
use serde::Deserialize;
use std::collections::HashSet;
use std::path::Path;
use std::sync::Arc;
use vault_core::validation;
#[derive(Debug, Deserialize)]
pub struct ValidateRequest {
/// Relative path within the vault
pub path: String,
/// Raw file content to validate (optional; if omitted, reads from disk)
pub content: Option<String>,
}
pub fn routes() -> Router<Arc<AppState>> {
Router::new().route("/validate", post(validate))
}
async fn validate(
State(state): State<Arc<AppState>>,
Json(req): Json<ValidateRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let relative = Path::new(&req.path);
let content = if let Some(c) = req.content {
c
} else {
let full = state.vault_root.join(&req.path);
tokio::fs::read_to_string(&full)
.await
.map_err(|e| ApiError::NotFound(format!("File not found: {} ({})", req.path, e)))?
};
let issues = validation::validate(relative, &content);
// Also check references
let agent_names: HashSet<String> = state
.agents
.read()
.unwrap()
.keys()
.cloned()
.collect();
let skill_names: HashSet<String> = state
.skills
.read()
.unwrap()
.keys()
.cloned()
.collect();
let ref_issues = validation::validate_references(&state.vault_root, &agent_names, &skill_names);
let mut all_issues: Vec<serde_json::Value> = issues
.into_iter()
.map(|i| serde_json::to_value(i).unwrap_or_default())
.collect();
for (entity, issue) in ref_issues {
let mut val = serde_json::to_value(&issue).unwrap_or_default();
if let Some(obj) = val.as_object_mut() {
obj.insert("entity".into(), serde_json::Value::String(entity));
}
all_issues.push(val);
}
Ok(Json(serde_json::json!({
"path": req.path,
"issues": all_issues,
"valid": all_issues.iter().all(|i|
i.get("level").and_then(|l| l.as_str()) != Some("error")
),
})))
}

View file

@ -0,0 +1,214 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::{Path, State};
use axum::routing::get;
use axum::{Json, Router};
use serde_json::{json, Value};
use std::sync::Arc;
use vault_core::filesystem;
use vault_core::types::{Notification, ViewDefinition};
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/views/pages", get(list_pages))
.route("/views/widgets", get(list_widgets))
.route("/views/layouts", get(list_layouts))
.route("/views/{*path}", get(get_view).put(put_view).delete(delete_view))
.route("/notifications", get(list_notifications))
.route(
"/notifications/{id}",
axum::routing::delete(dismiss_notification),
)
}
async fn list_pages(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
list_view_dir(&state, "views/pages").await
}
async fn list_widgets(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
list_view_dir(&state, "views/widgets").await
}
async fn list_layouts(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
list_view_dir(&state, "views/layouts").await
}
async fn list_view_dir(state: &AppState, subdir: &str) -> Result<Json<Value>, ApiError> {
let dir = state.vault_root.join(subdir);
let files = filesystem::list_md_files(&dir).map_err(ApiError::Vault)?;
let mut views = Vec::new();
for file in files {
match filesystem::read_entity::<ViewDefinition>(&file) {
Ok(entity) => {
let name = file.file_stem().and_then(|s| s.to_str()).unwrap_or("unknown");
views.push(json!({
"name": name,
"type": entity.frontmatter.view_type,
"title": entity.frontmatter.title,
"icon": entity.frontmatter.icon,
"route": entity.frontmatter.route,
"position": entity.frontmatter.position,
"layout": entity.frontmatter.layout,
"component": entity.frontmatter.component,
"description": entity.frontmatter.description,
}));
}
Err(e) => {
tracing::warn!(path = ?file, error = %e, "Failed to read view definition");
}
}
}
views.sort_by_key(|v| v.get("position").and_then(|p| p.as_i64()).unwrap_or(999));
Ok(Json(json!(views)))
}
async fn get_view(
State(state): State<Arc<AppState>>,
Path(path): Path<String>,
) -> Result<Json<Value>, ApiError> {
let file_path = state.vault_root.join("views").join(&path);
let file_path = if file_path.extension().is_none() {
file_path.with_extension("md")
} else {
file_path
};
if !file_path.exists() {
return Err(ApiError::NotFound(format!("View '{}' not found", path)));
}
let content = std::fs::read_to_string(&file_path)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &file_path)))?;
if let Ok((yaml, body)) = vault_core::frontmatter::split_frontmatter(&content) {
let frontmatter: Value = serde_yaml::from_str(yaml).unwrap_or(Value::Null);
Ok(Json(json!({
"path": path,
"frontmatter": frontmatter,
"body": body,
})))
} else {
Ok(Json(json!({
"path": path,
"frontmatter": null,
"body": content,
})))
}
}
async fn put_view(
State(state): State<Arc<AppState>>,
Path(path): Path<String>,
Json(data): Json<Value>,
) -> Result<Json<Value>, ApiError> {
let file_path = state.vault_root.join("views").join(&path);
let file_path = if file_path.extension().is_none() {
file_path.with_extension("md")
} else {
file_path
};
if let Some(parent) = file_path.parent() {
std::fs::create_dir_all(parent)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, parent)))?;
}
let content = if let Some(raw) = data.get("raw").and_then(|r| r.as_str()) {
raw.to_string()
} else {
let body = data.get("body").and_then(|b| b.as_str()).unwrap_or("");
if let Some(fm) = data.get("frontmatter") {
let yaml = serde_yaml::to_string(fm).map_err(|e| ApiError::Internal(e.to_string()))?;
format!("---\n{}---\n{}", yaml, body)
} else {
body.to_string()
}
};
state.write_filter.register(file_path.clone());
std::fs::write(&file_path, content)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &file_path)))?;
Ok(Json(json!({ "status": "saved", "path": path })))
}
async fn delete_view(
State(state): State<Arc<AppState>>,
Path(path): Path<String>,
) -> Result<Json<Value>, ApiError> {
let file_path = state.vault_root.join("views").join(&path);
let file_path = if file_path.extension().is_none() {
file_path.with_extension("md")
} else {
file_path
};
if !file_path.exists() {
return Err(ApiError::NotFound(format!("View '{}' not found", path)));
}
std::fs::remove_file(&file_path)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &file_path)))?;
Ok(Json(json!({ "status": "deleted", "path": path })))
}
async fn list_notifications(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
let dir = state.vault_root.join("views/notifications");
let files = filesystem::list_md_files(&dir).map_err(ApiError::Vault)?;
let mut notifications = Vec::new();
let now = chrono::Utc::now();
for file in files {
match filesystem::read_entity::<Notification>(&file) {
Ok(entity) => {
// Skip expired notifications
if let Some(expires) = entity.frontmatter.expires {
if expires < now {
// Auto-clean expired
let _ = std::fs::remove_file(&file);
continue;
}
}
let id = file.file_stem().and_then(|s| s.to_str()).unwrap_or("unknown");
notifications.push(json!({
"id": id,
"title": entity.frontmatter.title,
"message": entity.frontmatter.message,
"level": entity.frontmatter.level,
"source": entity.frontmatter.source,
"created": entity.frontmatter.created,
"expires": entity.frontmatter.expires,
}));
}
Err(e) => {
tracing::warn!(path = ?file, error = %e, "Failed to read notification");
}
}
}
Ok(Json(json!(notifications)))
}
async fn dismiss_notification(
State(state): State<Arc<AppState>>,
Path(id): Path<String>,
) -> Result<Json<Value>, ApiError> {
let path = state
.vault_root
.join("views/notifications")
.join(format!("{}.md", id));
if !path.exists() {
return Err(ApiError::NotFound(format!("Notification '{}' not found", id)));
}
std::fs::remove_file(&path)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &path)))?;
Ok(Json(json!({ "status": "dismissed" })))
}

View file

@ -0,0 +1,111 @@
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::{Arc, Mutex, RwLock};
use vault_core::config::VaultConfig;
use vault_core::entity::VaultEntity;
use vault_core::filesystem;
use vault_core::types::{Agent, Skill};
use vault_scheduler::cron_engine::CronEngine;
use vault_scheduler::executor::Executor;
use vault_scheduler::executors::process::GenericProcessExecutor;
use vault_scheduler::state::RuntimeState;
use vault_scheduler::task_runner::TaskRunner;
use vault_watch::events::VaultEvent;
use vault_watch::write_filter::DaemonWriteFilter;
pub struct AppState {
pub vault_root: PathBuf,
pub config: VaultConfig,
pub cron_engine: Mutex<CronEngine>,
pub write_filter: Arc<DaemonWriteFilter>,
pub event_tx: tokio::sync::broadcast::Sender<Arc<VaultEvent>>,
pub agents: RwLock<HashMap<String, VaultEntity<Agent>>>,
pub skills: RwLock<HashMap<String, VaultEntity<Skill>>>,
pub runtime_state: Mutex<RuntimeState>,
pub startup_time: chrono::DateTime<chrono::Utc>,
executor: Arc<dyn Executor>,
max_parallel: usize,
}
impl AppState {
pub fn new(vault_root: PathBuf, config: VaultConfig, max_parallel: usize) -> Self {
let (event_tx, _) = tokio::sync::broadcast::channel(256);
let write_filter = Arc::new(DaemonWriteFilter::new());
let executor: Arc<dyn Executor> =
Arc::new(GenericProcessExecutor::new(vault_root.clone()));
let now = chrono::Utc::now();
let mut runtime_state = RuntimeState::load(&vault_root).unwrap_or_default();
runtime_state.last_startup = Some(now);
let _ = runtime_state.save(&vault_root);
Self {
cron_engine: Mutex::new(CronEngine::new(vault_root.clone())),
vault_root,
config,
write_filter,
event_tx,
agents: RwLock::new(HashMap::new()),
skills: RwLock::new(HashMap::new()),
runtime_state: Mutex::new(runtime_state),
startup_time: now,
executor,
max_parallel,
}
}
pub fn task_runner(&self) -> TaskRunner {
TaskRunner::new(
self.vault_root.clone(),
self.max_parallel,
self.executor.clone(),
self.write_filter.clone(),
)
}
/// Load all agent and skill definitions from disk.
pub fn reload_definitions(&self) -> Result<(), vault_core::VaultError> {
// Load agents
let agent_files = filesystem::list_md_files(&self.vault_root.join("agents"))?;
let mut agents = HashMap::new();
for path in agent_files {
match filesystem::read_entity::<Agent>(&path) {
Ok(entity) => {
agents.insert(entity.frontmatter.name.clone(), entity);
}
Err(e) => {
tracing::warn!(path = ?path, error = %e, "Failed to load agent");
}
}
}
tracing::info!(count = agents.len(), "Loaded agents");
*self.agents.write().unwrap() = agents;
// Load skills
let skill_files =
filesystem::list_md_files_recursive(&self.vault_root.join("skills"))?;
let mut skills = HashMap::new();
for path in skill_files {
match filesystem::read_entity::<Skill>(&path) {
Ok(entity) => {
skills.insert(entity.frontmatter.name.clone(), entity);
}
Err(e) => {
tracing::warn!(path = ?path, error = %e, "Failed to load skill");
}
}
}
tracing::info!(count = skills.len(), "Loaded skills");
*self.skills.write().unwrap() = skills;
Ok(())
}
pub fn broadcast(&self, event: VaultEvent) {
let _ = self.event_tx.send(Arc::new(event));
}
pub fn subscribe(&self) -> tokio::sync::broadcast::Receiver<Arc<VaultEvent>> {
self.event_tx.subscribe()
}
}

129
crates/vault-api/src/ws.rs Normal file
View file

@ -0,0 +1,129 @@
use crate::state::AppState;
use crate::ws_protocol::{WsAction, WsEvent};
use axum::extract::ws::{Message, WebSocket};
use axum::extract::{State, WebSocketUpgrade};
use axum::response::Response;
use std::sync::Arc;
pub async fn ws_handler(
ws: WebSocketUpgrade,
State(state): State<Arc<AppState>>,
) -> Response {
ws.on_upgrade(move |socket| handle_socket(socket, state))
}
async fn handle_socket(socket: WebSocket, state: Arc<AppState>) {
let (mut sender, mut receiver) = socket.split();
use futures_util::{SinkExt, StreamExt};
let mut event_rx = state.subscribe();
// Send task: forward vault events to the client
let send_state = state.clone();
let send_task = tokio::spawn(async move {
while let Ok(event) = event_rx.recv().await {
let ws_event = WsEvent::from_vault_event(&event, &send_state.vault_root);
match serde_json::to_string(&ws_event) {
Ok(json) => {
if sender.send(Message::Text(json.into())).await.is_err() {
break;
}
}
Err(e) => {
tracing::warn!(error = %e, "Failed to serialize WS event");
}
}
}
});
// Receive task: handle client actions
let recv_state = state.clone();
let recv_task = tokio::spawn(async move {
while let Some(msg) = receiver.next().await {
match msg {
Ok(Message::Text(text)) => {
match serde_json::from_str::<WsAction>(&text) {
Ok(action) => handle_action(&recv_state, action).await,
Err(e) => {
tracing::warn!(error = %e, text = %text, "Invalid WS action");
}
}
}
Ok(Message::Close(_)) => break,
Err(e) => {
tracing::debug!(error = %e, "WebSocket error");
break;
}
_ => {}
}
}
});
// Wait for either task to finish
tokio::select! {
_ = send_task => {},
_ = recv_task => {},
}
tracing::debug!("WebSocket connection closed");
}
async fn handle_action(state: &AppState, action: WsAction) {
match action {
WsAction::MoveTask { from, to } => {
let from_path = state.vault_root.join(&from);
let to_path = state.vault_root.join(&to);
state.write_filter.register(to_path.clone());
if let Err(e) = vault_core::filesystem::move_file(&from_path, &to_path) {
tracing::error!(error = %e, "WS move_task failed");
}
}
WsAction::TriggerCron { name } => {
let cron_path = state
.vault_root
.join("crons/active")
.join(format!("{}.md", name));
let mut engine = state.cron_engine.lock().unwrap();
if let Err(e) = engine.fire_cron(&cron_path, &state.write_filter) {
tracing::error!(error = %e, "WS trigger_cron failed");
}
}
WsAction::TriggerAgent { name, context } => {
let title = format!("WS trigger: {}", name);
let slug = vault_core::filesystem::timestamped_slug(&title);
let task_path = state
.vault_root
.join("todos/agent/queued")
.join(format!("{}.md", slug));
let task = vault_core::types::AgentTask {
title,
agent: name,
priority: vault_core::types::Priority::Medium,
task_type: Some("ws-trigger".into()),
created: chrono::Utc::now(),
started: None,
completed: None,
retry: 0,
max_retries: 0,
input: None,
output: None,
error: None,
};
let entity = vault_core::entity::VaultEntity {
path: task_path.clone(),
frontmatter: task,
body: context.unwrap_or_default(),
};
state.write_filter.register(task_path.clone());
if let Err(e) = vault_core::filesystem::write_entity(&entity) {
tracing::error!(error = %e, "WS trigger_agent failed");
}
}
WsAction::Ping => {
tracing::debug!("WS ping received");
}
}
}

View file

@ -0,0 +1,76 @@
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::path::Path;
use vault_watch::events::VaultEvent;
/// Server -> Client event
#[derive(Debug, Clone, Serialize)]
pub struct WsEvent {
#[serde(rename = "type")]
pub event_type: String,
pub area: String,
pub path: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub data: Option<Value>,
}
impl WsEvent {
pub fn from_vault_event(event: &VaultEvent, vault_root: &Path) -> Self {
let path = event.path();
let relative = path
.strip_prefix(vault_root)
.unwrap_or(path)
.to_string_lossy()
.to_string();
// Derive area from relative path (first two components)
let area = relative
.split('/')
.take(2)
.collect::<Vec<_>>()
.join("/");
// Try to read frontmatter data
let data = if path.exists() {
std::fs::read_to_string(path)
.ok()
.and_then(|content| {
vault_core::frontmatter::split_frontmatter(&content)
.ok()
.and_then(|(yaml, _)| serde_yaml::from_str::<Value>(yaml).ok())
})
} else {
None
};
Self {
event_type: event.event_type().to_string(),
area,
path: relative,
data,
}
}
}
/// Client -> Server action
#[derive(Debug, Deserialize)]
#[serde(tag = "action")]
pub enum WsAction {
#[serde(rename = "move_task")]
MoveTask {
from: String,
to: String,
},
#[serde(rename = "trigger_cron")]
TriggerCron {
name: String,
},
#[serde(rename = "trigger_agent")]
TriggerAgent {
name: String,
#[serde(default)]
context: Option<String>,
},
#[serde(rename = "ping")]
Ping,
}

View file

@ -0,0 +1,14 @@
[package]
name = "vault-core"
version.workspace = true
edition.workspace = true
[dependencies]
serde.workspace = true
serde_yaml.workspace = true
serde_json.workspace = true
chrono.workspace = true
thiserror.workspace = true
uuid.workspace = true
tracing.workspace = true
cron.workspace = true

View file

@ -0,0 +1,101 @@
use crate::error::VaultError;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::Path;
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct VaultConfig {
#[serde(default)]
pub mcp_servers: HashMap<String, McpServerConfig>,
#[serde(default)]
pub executors: HashMap<String, ExecutorConfig>,
#[serde(default)]
pub queue: QueueConfig,
#[serde(default)]
pub assistant: AssistantConfig,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct McpServerConfig {
pub command: String,
#[serde(default)]
pub args: Vec<String>,
#[serde(default)]
pub env: HashMap<String, String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ExecutorConfig {
#[serde(default)]
pub command: Option<String>,
#[serde(default)]
pub base_url: Option<String>,
#[serde(default)]
pub default_model: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct QueueConfig {
#[serde(default = "default_max_parallel")]
pub max_parallel: usize,
#[serde(default = "default_timeout")]
pub default_timeout: u64,
#[serde(default = "default_retry_delay")]
pub retry_delay: u64,
}
impl Default for QueueConfig {
fn default() -> Self {
Self {
max_parallel: default_max_parallel(),
default_timeout: default_timeout(),
retry_delay: default_retry_delay(),
}
}
}
fn default_max_parallel() -> usize {
4
}
fn default_timeout() -> u64 {
600
}
fn default_retry_delay() -> u64 {
60
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AssistantConfig {
#[serde(default = "default_assistant_model")]
pub default_model: String,
#[serde(default)]
pub models: Vec<String>,
}
impl Default for AssistantConfig {
fn default() -> Self {
Self {
default_model: default_assistant_model(),
models: vec![],
}
}
}
fn default_assistant_model() -> String {
"local/qwen3".into()
}
impl VaultConfig {
/// Load config from `.vault/config.yaml` in the vault root.
/// Returns default config if file doesn't exist.
pub fn load(vault_root: &Path) -> Result<Self, VaultError> {
let config_path = vault_root.join(".vault/config.yaml");
if !config_path.exists() {
return Ok(Self::default());
}
let content = std::fs::read_to_string(&config_path)
.map_err(|e| VaultError::io(e, &config_path))?;
let config: VaultConfig = serde_yaml::from_str(&content)?;
Ok(config)
}
}

View file

@ -0,0 +1,195 @@
use crate::error::VaultError;
use crate::types::{AgentTaskStatus, TaskStatus};
use serde::{de::DeserializeOwned, Serialize};
use std::path::{Path, PathBuf};
/// A vault entity: parsed frontmatter + markdown body + file path.
#[derive(Debug, Clone)]
pub struct VaultEntity<T> {
pub path: PathBuf,
pub frontmatter: T,
pub body: String,
}
/// The kind of entity inferred from its relative path within the vault.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum EntityKind {
Agent,
Skill,
CronActive,
CronPaused,
CronTemplate,
HumanTask(TaskStatus),
AgentTask(AgentTaskStatus),
Knowledge,
ViewPage,
ViewWidget,
ViewLayout,
ViewCustom,
Notification,
Unknown,
}
/// Classify a relative path within the vault to determine entity kind.
pub fn classify_path(relative: &Path) -> EntityKind {
let components: Vec<&str> = relative
.components()
.filter_map(|c| c.as_os_str().to_str())
.collect();
match components.as_slice() {
["agents", ..] => EntityKind::Agent,
["skills", ..] => EntityKind::Skill,
["crons", "active", ..] => EntityKind::CronActive,
["crons", "paused", ..] => EntityKind::CronPaused,
["crons", "templates", ..] => EntityKind::CronTemplate,
["todos", "harald", status, ..] => {
EntityKind::HumanTask(task_status_from_dir(status))
}
["todos", "agent", status, ..] => {
EntityKind::AgentTask(agent_task_status_from_dir(status))
}
["knowledge", ..] => EntityKind::Knowledge,
["views", "pages", ..] => EntityKind::ViewPage,
["views", "widgets", ..] => EntityKind::ViewWidget,
["views", "layouts", ..] => EntityKind::ViewLayout,
["views", "custom", ..] => EntityKind::ViewCustom,
["views", "notifications", ..] => EntityKind::Notification,
_ => EntityKind::Unknown,
}
}
pub fn task_status_from_dir(dir: &str) -> TaskStatus {
match dir {
"urgent" => TaskStatus::Urgent,
"open" => TaskStatus::Open,
"in-progress" => TaskStatus::InProgress,
"done" => TaskStatus::Done,
_ => TaskStatus::Open,
}
}
pub fn agent_task_status_from_dir(dir: &str) -> AgentTaskStatus {
match dir {
"queued" => AgentTaskStatus::Queued,
"running" => AgentTaskStatus::Running,
"done" => AgentTaskStatus::Done,
"failed" => AgentTaskStatus::Failed,
_ => AgentTaskStatus::Queued,
}
}
pub fn task_status_dir(status: &TaskStatus) -> &'static str {
match status {
TaskStatus::Urgent => "urgent",
TaskStatus::Open => "open",
TaskStatus::InProgress => "in-progress",
TaskStatus::Done => "done",
}
}
pub fn agent_task_status_dir(status: &AgentTaskStatus) -> &'static str {
match status {
AgentTaskStatus::Queued => "queued",
AgentTaskStatus::Running => "running",
AgentTaskStatus::Done => "done",
AgentTaskStatus::Failed => "failed",
}
}
impl<T> VaultEntity<T>
where
T: DeserializeOwned + Serialize,
{
pub fn from_content(path: PathBuf, content: &str) -> Result<Self, VaultError> {
let (yaml, body) =
crate::frontmatter::split_frontmatter_with_path(content, &path)?;
let frontmatter: T = crate::frontmatter::parse_entity(yaml)?;
Ok(Self {
path,
frontmatter,
body: body.to_string(),
})
}
pub fn to_string(&self) -> Result<String, VaultError> {
crate::frontmatter::write_frontmatter(&self.frontmatter, &self.body)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_classify_agent() {
assert_eq!(
classify_path(Path::new("agents/reviewer.md")),
EntityKind::Agent
);
}
#[test]
fn test_classify_skill() {
assert_eq!(
classify_path(Path::new("skills/vault/read-vault.md")),
EntityKind::Skill
);
}
#[test]
fn test_classify_cron() {
assert_eq!(
classify_path(Path::new("crons/active/daily-review.md")),
EntityKind::CronActive
);
assert_eq!(
classify_path(Path::new("crons/paused/old-job.md")),
EntityKind::CronPaused
);
}
#[test]
fn test_classify_human_task() {
assert_eq!(
classify_path(Path::new("todos/harald/urgent/fix-bug.md")),
EntityKind::HumanTask(TaskStatus::Urgent)
);
assert_eq!(
classify_path(Path::new("todos/harald/in-progress/feature.md")),
EntityKind::HumanTask(TaskStatus::InProgress)
);
}
#[test]
fn test_classify_agent_task() {
assert_eq!(
classify_path(Path::new("todos/agent/queued/task-1.md")),
EntityKind::AgentTask(AgentTaskStatus::Queued)
);
assert_eq!(
classify_path(Path::new("todos/agent/running/task-2.md")),
EntityKind::AgentTask(AgentTaskStatus::Running)
);
}
#[test]
fn test_classify_knowledge() {
assert_eq!(
classify_path(Path::new("knowledge/notes/rust-tips.md")),
EntityKind::Knowledge
);
}
#[test]
fn test_classify_views() {
assert_eq!(
classify_path(Path::new("views/pages/home.md")),
EntityKind::ViewPage
);
assert_eq!(
classify_path(Path::new("views/notifications/alert.md")),
EntityKind::Notification
);
}
}

View file

@ -0,0 +1,34 @@
use std::path::PathBuf;
#[derive(Debug, thiserror::Error)]
pub enum VaultError {
#[error("IO error: {source} (path: {path:?})")]
Io {
source: std::io::Error,
path: PathBuf,
},
#[error("YAML parsing error: {0}")]
Yaml(#[from] serde_yaml::Error),
#[error("Missing frontmatter in {0}")]
MissingFrontmatter(PathBuf),
#[error("Invalid entity at {path}: {reason}")]
InvalidEntity { path: PathBuf, reason: String },
#[error("Not found: {0}")]
NotFound(String),
#[error("Broken reference from {from} to {to}")]
BrokenReference { from: PathBuf, to: String },
}
impl VaultError {
pub fn io(source: std::io::Error, path: impl Into<PathBuf>) -> Self {
Self::Io {
source,
path: path.into(),
}
}
}

View file

@ -0,0 +1,167 @@
use crate::entity::VaultEntity;
use crate::error::VaultError;
use serde::{de::DeserializeOwned, Serialize};
use std::path::{Path, PathBuf};
/// Read and parse a vault entity from a markdown file.
pub fn read_entity<T: DeserializeOwned + Serialize>(path: &Path) -> Result<VaultEntity<T>, VaultError> {
let content =
std::fs::read_to_string(path).map_err(|e| VaultError::io(e, path))?;
VaultEntity::from_content(path.to_path_buf(), &content)
}
/// Write a vault entity to disk.
pub fn write_entity<T: DeserializeOwned + Serialize>(entity: &VaultEntity<T>) -> Result<(), VaultError> {
let content = entity.to_string()?;
if let Some(parent) = entity.path.parent() {
std::fs::create_dir_all(parent).map_err(|e| VaultError::io(e, parent))?;
}
std::fs::write(&entity.path, content).map_err(|e| VaultError::io(e, &entity.path))
}
/// Move a file from one path to another, creating parent dirs as needed.
pub fn move_file(from: &Path, to: &Path) -> Result<(), VaultError> {
if let Some(parent) = to.parent() {
std::fs::create_dir_all(parent).map_err(|e| VaultError::io(e, parent))?;
}
std::fs::rename(from, to).map_err(|e| VaultError::io(e, from))
}
/// Ensure the standard vault directory structure exists.
pub fn ensure_vault_structure(vault_root: &Path) -> Result<(), VaultError> {
let dirs = [
"agents",
"skills/vault",
"crons/active",
"crons/paused",
"crons/templates",
"todos/harald/urgent",
"todos/harald/open",
"todos/harald/in-progress",
"todos/harald/done",
"todos/agent/queued",
"todos/agent/running",
"todos/agent/done",
"todos/agent/failed",
"knowledge",
"views/pages",
"views/widgets",
"views/layouts",
"views/custom",
"views/notifications",
".vault/logs",
".vault/templates",
];
for dir in &dirs {
let path = vault_root.join(dir);
std::fs::create_dir_all(&path).map_err(|e| VaultError::io(e, &path))?;
}
Ok(())
}
/// List all .md files in a directory (non-recursive).
pub fn list_md_files(dir: &Path) -> Result<Vec<PathBuf>, VaultError> {
if !dir.exists() {
return Ok(vec![]);
}
let mut files = Vec::new();
let entries = std::fs::read_dir(dir).map_err(|e| VaultError::io(e, dir))?;
for entry in entries {
let entry = entry.map_err(|e| VaultError::io(e, dir))?;
let path = entry.path();
if path.is_file() && path.extension().is_some_and(|e| e == "md") {
files.push(path);
}
}
files.sort();
Ok(files)
}
/// List all .md files in a directory tree (recursive).
pub fn list_md_files_recursive(dir: &Path) -> Result<Vec<PathBuf>, VaultError> {
if !dir.exists() {
return Ok(vec![]);
}
let mut files = Vec::new();
walk_dir_recursive(dir, &mut files)?;
files.sort();
Ok(files)
}
fn walk_dir_recursive(dir: &Path, files: &mut Vec<PathBuf>) -> Result<(), VaultError> {
let entries = std::fs::read_dir(dir).map_err(|e| VaultError::io(e, dir))?;
for entry in entries {
let entry = entry.map_err(|e| VaultError::io(e, dir))?;
let path = entry.path();
if path.is_dir() {
// Skip dotfiles/dirs
if path
.file_name()
.is_some_and(|n| n.to_str().is_some_and(|s| s.starts_with('.')))
{
continue;
}
walk_dir_recursive(&path, files)?;
} else if path.is_file() && path.extension().is_some_and(|e| e == "md") {
files.push(path);
}
}
Ok(())
}
/// Convert a string to a URL-safe slug.
pub fn slugify(s: &str) -> String {
s.to_lowercase()
.chars()
.map(|c| {
if c.is_alphanumeric() {
c
} else {
'-'
}
})
.collect::<String>()
.split('-')
.filter(|s| !s.is_empty())
.collect::<Vec<_>>()
.join("-")
}
/// Create a timestamped slug: `YYYYMMDD-HHMMSS-slug`
pub fn timestamped_slug(title: &str) -> String {
let now = chrono::Utc::now();
format!("{}-{}", now.format("%Y%m%d-%H%M%S"), slugify(title))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_slugify() {
assert_eq!(slugify("Hello World!"), "hello-world");
assert_eq!(slugify("Review PR #1234"), "review-pr-1234");
assert_eq!(slugify(" spaces everywhere "), "spaces-everywhere");
}
#[test]
fn test_timestamped_slug() {
let slug = timestamped_slug("My Task");
assert!(slug.ends_with("-my-task"));
assert!(slug.len() > 20);
}
#[test]
fn test_ensure_vault_structure() {
let tmp = std::env::temp_dir().join("vault-os-test-structure");
let _ = std::fs::remove_dir_all(&tmp);
ensure_vault_structure(&tmp).unwrap();
assert!(tmp.join("agents").is_dir());
assert!(tmp.join("todos/harald/urgent").is_dir());
assert!(tmp.join("todos/agent/queued").is_dir());
assert!(tmp.join(".vault/logs").is_dir());
let _ = std::fs::remove_dir_all(&tmp);
}
}

View file

@ -0,0 +1,180 @@
use crate::error::VaultError;
use serde::{de::DeserializeOwned, Serialize};
use std::path::Path;
const DELIMITER: &str = "---";
/// Split a markdown file into frontmatter YAML and body.
/// Returns (yaml_str, body_str). Body preserves original content byte-for-byte.
pub fn split_frontmatter(content: &str) -> Result<(&str, &str), VaultError> {
let trimmed = content.trim_start();
if !trimmed.starts_with(DELIMITER) {
return Err(VaultError::MissingFrontmatter(
"<unknown>".into(),
));
}
// Find the opening delimiter
let after_first = &trimmed[DELIMITER.len()..];
let after_first = after_first.strip_prefix('\n').unwrap_or(
after_first.strip_prefix("\r\n").unwrap_or(after_first),
);
// Find the closing delimiter
if let Some(end_pos) = find_closing_delimiter(after_first) {
let yaml = &after_first[..end_pos];
let rest = &after_first[end_pos + DELIMITER.len()..];
// Skip the newline after closing ---
let body = rest
.strip_prefix('\n')
.unwrap_or(rest.strip_prefix("\r\n").unwrap_or(rest));
Ok((yaml, body))
} else {
Err(VaultError::MissingFrontmatter(
"<unknown>".into(),
))
}
}
/// Split frontmatter with path context for error messages.
pub fn split_frontmatter_with_path<'a>(
content: &'a str,
path: &Path,
) -> Result<(&'a str, &'a str), VaultError> {
split_frontmatter(content).map_err(|e| match e {
VaultError::MissingFrontmatter(_) => VaultError::MissingFrontmatter(path.to_path_buf()),
other => other,
})
}
fn find_closing_delimiter(s: &str) -> Option<usize> {
for (i, line) in s.lines().enumerate() {
if line.trim() == DELIMITER {
// Calculate byte offset
let offset: usize = s.lines().take(i).map(|l| l.len() + 1).sum();
return Some(offset);
}
}
None
}
/// Parse frontmatter YAML into a typed struct.
pub fn parse_entity<T: DeserializeOwned>(yaml: &str) -> Result<T, VaultError> {
serde_yaml::from_str(yaml).map_err(VaultError::Yaml)
}
/// Serialize frontmatter and combine with body, preserving body byte-for-byte.
pub fn write_frontmatter<T: Serialize>(frontmatter: &T, body: &str) -> Result<String, VaultError> {
let yaml = serde_yaml::to_string(frontmatter).map_err(VaultError::Yaml)?;
let mut out = String::new();
out.push_str(DELIMITER);
out.push('\n');
out.push_str(&yaml);
// serde_yaml adds trailing newline, but ensure delimiter is on its own line
if !yaml.ends_with('\n') {
out.push('\n');
}
out.push_str(DELIMITER);
out.push('\n');
if !body.is_empty() {
out.push_str(body);
}
Ok(out)
}
/// Update specific fields in frontmatter YAML without re-serializing the entire struct.
/// This preserves unknown fields and ordering as much as possible.
pub fn update_frontmatter_fields(
content: &str,
path: &Path,
updates: &serde_json::Value,
) -> Result<String, VaultError> {
let (yaml, body) = split_frontmatter_with_path(content, path)?;
let mut mapping: serde_yaml::Value = serde_yaml::from_str(yaml).map_err(VaultError::Yaml)?;
if let (serde_yaml::Value::Mapping(ref mut map), serde_json::Value::Object(ref obj)) =
(&mut mapping, updates)
{
for (key, value) in obj {
let yaml_key = serde_yaml::Value::String(key.clone());
let yaml_value: serde_yaml::Value =
serde_json::from_value(value.clone()).unwrap_or(serde_yaml::Value::Null);
map.insert(yaml_key, yaml_value);
}
}
let yaml_out = serde_yaml::to_string(&mapping).map_err(VaultError::Yaml)?;
let mut out = String::new();
out.push_str(DELIMITER);
out.push('\n');
out.push_str(&yaml_out);
if !yaml_out.ends_with('\n') {
out.push('\n');
}
out.push_str(DELIMITER);
out.push('\n');
if !body.is_empty() {
out.push_str(body);
}
Ok(out)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::types::Agent;
#[test]
fn test_split_frontmatter() {
let content = "---\nname: test\n---\nHello world\n";
let (yaml, body) = split_frontmatter(content).unwrap();
assert_eq!(yaml, "name: test\n");
assert_eq!(body, "Hello world\n");
}
#[test]
fn test_split_missing_frontmatter() {
let content = "Hello world\n";
assert!(split_frontmatter(content).is_err());
}
#[test]
fn test_roundtrip() {
let original_body = "# System Prompt\n\nYou are a helpful agent.\n\n- Rule 1\n- Rule 2\n";
let agent = Agent {
name: "test-agent".into(),
executable: "claude-code".into(),
model: Some("sonnet".into()),
escalate_to: None,
escalate_when: vec![],
mcp_servers: vec![],
skills: vec!["read-vault".into()],
timeout: 600,
max_retries: 2,
env: Default::default(),
};
let written = write_frontmatter(&agent, original_body).unwrap();
let (yaml, body) = split_frontmatter(&written).unwrap();
let parsed: Agent = parse_entity(yaml).unwrap();
assert_eq!(parsed.name, "test-agent");
assert_eq!(parsed.executable, "claude-code");
assert_eq!(body, original_body);
}
#[test]
fn test_update_fields() {
let content = "---\nname: test\nschedule: '* * * * *'\n---\nBody\n";
let updates = serde_json::json!({
"last_run": "2024-01-01T00:00:00Z",
"run_count": 5
});
let result =
update_frontmatter_fields(content, Path::new("test.md"), &updates).unwrap();
assert!(result.contains("last_run"));
assert!(result.contains("run_count"));
assert!(result.contains("Body\n"));
}
}

View file

@ -0,0 +1,12 @@
pub mod config;
pub mod entity;
pub mod error;
pub mod filesystem;
pub mod frontmatter;
pub mod prompt;
pub mod search;
pub mod types;
pub mod validation;
pub use error::VaultError;
pub type Result<T> = std::result::Result<T, VaultError>;

View file

@ -0,0 +1,64 @@
use crate::entity::VaultEntity;
use crate::error::VaultError;
use crate::filesystem;
use crate::types::{Agent, Skill};
use std::path::Path;
/// Resolve a skill name to its file path under the vault's `skills/` directory.
pub fn resolve_skill_path(vault_root: &Path, skill_name: &str) -> Option<std::path::PathBuf> {
// Try direct: skills/{name}.md
let direct = vault_root.join("skills").join(format!("{}.md", skill_name));
if direct.exists() {
return Some(direct);
}
// Try nested: skills/vault/{name}.md
let nested = vault_root
.join("skills/vault")
.join(format!("{}.md", skill_name));
if nested.exists() {
return Some(nested);
}
// Try recursive search
if let Ok(files) = filesystem::list_md_files_recursive(&vault_root.join("skills")) {
for file in files {
if let Some(stem) = file.file_stem() {
if stem == skill_name {
return Some(file);
}
}
}
}
None
}
/// Compose the full prompt for an agent execution.
/// Agent body + skill bodies appended under `## Skills` sections.
pub fn compose_prompt(
vault_root: &Path,
agent: &VaultEntity<Agent>,
task_context: Option<&str>,
) -> Result<String, VaultError> {
let mut prompt = agent.body.clone();
// Append skills
if !agent.frontmatter.skills.is_empty() {
prompt.push_str("\n\n## Skills\n");
for skill_name in &agent.frontmatter.skills {
if let Some(skill_path) = resolve_skill_path(vault_root, skill_name) {
let skill_entity: VaultEntity<Skill> = filesystem::read_entity(&skill_path)?;
prompt.push_str(&format!("\n### {}\n", skill_entity.frontmatter.name));
prompt.push_str(&skill_entity.body);
} else {
tracing::warn!(skill = %skill_name, "Skill not found, skipping");
}
}
}
// Append task context if provided
if let Some(ctx) = task_context {
prompt.push_str("\n\n## Task\n\n");
prompt.push_str(ctx);
}
Ok(prompt)
}

View file

@ -0,0 +1,164 @@
use crate::filesystem::{list_md_files_recursive, read_entity};
use crate::frontmatter::split_frontmatter_with_path;
use crate::types::KnowledgeNote;
use std::path::Path;
#[derive(Debug, Clone, serde::Serialize)]
pub struct SearchResult {
pub path: String,
pub title: String,
pub snippet: String,
pub score: f64,
}
/// Search vault files by query string.
/// Matches against frontmatter title, tags, and body content.
pub fn search_vault(vault_root: &Path, query: &str) -> Vec<SearchResult> {
let query_lower = query.to_lowercase();
let terms: Vec<&str> = query_lower.split_whitespace().collect();
if terms.is_empty() {
return Vec::new();
}
let mut results = Vec::new();
// Search across key directories
let dirs = ["knowledge", "agents", "skills", "todos/harald", "todos/agent"];
for dir in dirs {
let full_dir = vault_root.join(dir);
if !full_dir.exists() {
continue;
}
if let Ok(files) = list_md_files_recursive(&full_dir) {
for path in files {
if let Some(result) = score_file(&path, vault_root, &terms) {
results.push(result);
}
}
}
}
results.sort_by(|a, b| b.score.partial_cmp(&a.score).unwrap_or(std::cmp::Ordering::Equal));
results.truncate(50);
results
}
/// Search specifically by tag.
pub fn search_by_tag(vault_root: &Path, tag: &str) -> Vec<SearchResult> {
let tag_lower = tag.to_lowercase();
let mut results = Vec::new();
let knowledge_dir = vault_root.join("knowledge");
if let Ok(files) = list_md_files_recursive(&knowledge_dir) {
for path in files {
if let Ok(entity) = read_entity::<KnowledgeNote>(&path) {
let has_tag = entity
.frontmatter
.tags
.iter()
.any(|t| t.to_lowercase() == tag_lower);
if has_tag {
let relative = path
.strip_prefix(vault_root)
.unwrap_or(&path)
.to_string_lossy()
.to_string();
let title = entity
.frontmatter
.title
.unwrap_or_else(|| relative.clone());
results.push(SearchResult {
path: relative,
title,
snippet: entity.body.chars().take(120).collect(),
score: 1.0,
});
}
}
}
}
results
}
fn score_file(path: &Path, vault_root: &Path, terms: &[&str]) -> Option<SearchResult> {
let content = std::fs::read_to_string(path).ok()?;
let content_lower = content.to_lowercase();
let relative = path
.strip_prefix(vault_root)
.unwrap_or(path)
.to_string_lossy()
.to_string();
let mut score = 0.0;
let mut all_matched = true;
for term in terms {
let mut term_score = 0.0;
// Title matches (higher weight)
if relative.to_lowercase().contains(term) {
term_score += 3.0;
}
// Body/content matches
let count = content_lower.matches(term).count();
if count > 0 {
term_score += 1.0 + (count as f64).min(5.0) * 0.2;
}
if term_score == 0.0 {
all_matched = false;
break;
}
score += term_score;
}
if !all_matched || score == 0.0 {
return None;
}
// Extract title from frontmatter if possible
let title = if let Ok((yaml, _body)) = split_frontmatter_with_path(&content, path) {
serde_yaml::from_str::<serde_json::Value>(yaml)
.ok()
.and_then(|v| v.get("title").and_then(|t| t.as_str()).map(String::from))
.or_else(|| {
serde_yaml::from_str::<serde_json::Value>(yaml)
.ok()
.and_then(|v| v.get("name").and_then(|t| t.as_str()).map(String::from))
})
.unwrap_or_else(|| relative.clone())
} else {
relative.clone()
};
// Extract a snippet around the first match
let snippet = extract_snippet(&content, terms.first().unwrap_or(&""));
Some(SearchResult {
path: relative,
title,
snippet,
score,
})
}
fn extract_snippet(content: &str, term: &str) -> String {
let lower = content.to_lowercase();
if let Some(pos) = lower.find(&term.to_lowercase()) {
let start = content[..pos]
.rfind('\n')
.map(|p| p + 1)
.unwrap_or(pos.saturating_sub(60));
let end = content[pos..]
.find('\n')
.map(|p| pos + p)
.unwrap_or((pos + 120).min(content.len()));
content[start..end].chars().take(150).collect()
} else {
content.lines().next().unwrap_or("").chars().take(150).collect()
}
}

View file

@ -0,0 +1,204 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
pub enum Priority {
Urgent,
High,
#[default]
Medium,
Low,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
pub enum RunStatus {
Success,
Failure,
Timeout,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
pub enum TaskStatus {
Urgent,
Open,
#[serde(rename = "in-progress")]
InProgress,
Done,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
pub enum AgentTaskStatus {
Queued,
Running,
Done,
Failed,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Agent {
pub name: String,
pub executable: String,
#[serde(default)]
pub model: Option<String>,
#[serde(default)]
pub escalate_to: Option<String>,
#[serde(default)]
pub escalate_when: Vec<String>,
#[serde(default)]
pub mcp_servers: Vec<String>,
#[serde(default)]
pub skills: Vec<String>,
#[serde(default = "default_timeout")]
pub timeout: u64,
#[serde(default)]
pub max_retries: u32,
#[serde(default)]
pub env: HashMap<String, String>,
}
fn default_timeout() -> u64 {
600
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Skill {
pub name: String,
pub description: String,
#[serde(default)]
pub version: Option<u32>,
#[serde(default)]
pub requires_mcp: Vec<String>,
#[serde(default)]
pub inputs: Vec<String>,
#[serde(default)]
pub outputs: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CronJob {
pub schedule: String,
pub agent: String,
pub title: String,
#[serde(default = "default_true")]
pub enabled: bool,
#[serde(default)]
pub last_run: Option<DateTime<Utc>>,
#[serde(default)]
pub last_status: Option<RunStatus>,
#[serde(default)]
pub next_run: Option<DateTime<Utc>>,
#[serde(default)]
pub run_count: u64,
}
fn default_true() -> bool {
true
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HumanTask {
pub title: String,
#[serde(default)]
pub priority: Priority,
#[serde(default)]
pub source: Option<String>,
#[serde(default)]
pub repo: Option<String>,
#[serde(default)]
pub labels: Vec<String>,
pub created: DateTime<Utc>,
#[serde(default)]
pub due: Option<DateTime<Utc>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AgentTask {
pub title: String,
pub agent: String,
#[serde(default)]
pub priority: Priority,
#[serde(default, rename = "type")]
pub task_type: Option<String>,
pub created: DateTime<Utc>,
#[serde(default)]
pub started: Option<DateTime<Utc>>,
#[serde(default)]
pub completed: Option<DateTime<Utc>>,
#[serde(default)]
pub retry: u32,
#[serde(default)]
pub max_retries: u32,
#[serde(default)]
pub input: Option<serde_json::Value>,
#[serde(default)]
pub output: Option<serde_json::Value>,
#[serde(default)]
pub error: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct KnowledgeNote {
#[serde(default)]
pub title: Option<String>,
#[serde(default)]
pub tags: Vec<String>,
#[serde(default)]
pub source: Option<String>,
#[serde(default)]
pub created: Option<DateTime<Utc>>,
#[serde(default)]
pub related: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ViewDefinition {
#[serde(rename = "type")]
pub view_type: String,
pub title: Option<String>,
#[serde(default)]
pub icon: Option<String>,
#[serde(default)]
pub route: Option<String>,
#[serde(default)]
pub position: Option<i32>,
#[serde(default)]
pub layout: Option<String>,
#[serde(default)]
pub regions: HashMap<String, Vec<WidgetInstance>>,
// Widget-specific fields
#[serde(default)]
pub name: Option<String>,
#[serde(default)]
pub description: Option<String>,
#[serde(default)]
pub component: Option<String>,
#[serde(default)]
pub props_schema: Option<serde_json::Value>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WidgetInstance {
pub widget: String,
#[serde(default)]
pub props: serde_json::Value,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Notification {
pub title: String,
#[serde(default)]
pub message: Option<String>,
#[serde(default)]
pub level: Option<String>,
#[serde(default)]
pub source: Option<String>,
#[serde(default)]
pub created: Option<DateTime<Utc>>,
#[serde(default)]
pub expires: Option<DateTime<Utc>>,
}

View file

@ -0,0 +1,317 @@
use crate::entity::{classify_path, EntityKind};
use crate::frontmatter::split_frontmatter_with_path;
use crate::types::*;
use std::collections::HashSet;
use std::path::Path;
#[derive(Debug, Clone)]
pub struct ValidationIssue {
pub level: IssueLevel,
pub field: Option<String>,
pub message: String,
}
#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize)]
#[serde(rename_all = "lowercase")]
pub enum IssueLevel {
Error,
Warning,
}
impl serde::Serialize for ValidationIssue {
fn serialize<S: serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
use serde::ser::SerializeStruct;
let mut st = s.serialize_struct("ValidationIssue", 3)?;
st.serialize_field("level", &self.level)?;
st.serialize_field("field", &self.field)?;
st.serialize_field("message", &self.message)?;
st.end()
}
}
/// Validate a vault file given its relative path and raw content.
pub fn validate(relative_path: &Path, content: &str) -> Vec<ValidationIssue> {
let mut issues = Vec::new();
// Check frontmatter exists
let (yaml, _body) = match split_frontmatter_with_path(content, relative_path) {
Ok(pair) => pair,
Err(_) => {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: None,
message: "Missing or malformed frontmatter".into(),
});
return issues;
}
};
let kind = classify_path(relative_path);
match kind {
EntityKind::Agent => validate_agent(yaml, &mut issues),
EntityKind::Skill => validate_skill(yaml, &mut issues),
EntityKind::CronActive | EntityKind::CronPaused | EntityKind::CronTemplate => {
validate_cron(yaml, &mut issues)
}
EntityKind::HumanTask(_) => validate_human_task(yaml, &mut issues),
EntityKind::AgentTask(_) => validate_agent_task(yaml, &mut issues),
_ => {}
}
issues
}
fn validate_agent(yaml: &str, issues: &mut Vec<ValidationIssue>) {
match serde_yaml::from_str::<Agent>(yaml) {
Ok(agent) => {
if agent.name.is_empty() {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: Some("name".into()),
message: "Agent name is required".into(),
});
}
if agent.executable.is_empty() {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: Some("executable".into()),
message: "Agent executable is required".into(),
});
}
let valid_executables = ["claude-code", "ollama", "openai-compat"];
if !valid_executables.contains(&agent.executable.as_str())
&& !agent.executable.starts_with('/')
&& !agent.executable.contains('/')
{
issues.push(ValidationIssue {
level: IssueLevel::Warning,
field: Some("executable".into()),
message: format!(
"Executable '{}' is not a known executor. Expected one of: {:?} or an absolute path",
agent.executable, valid_executables
),
});
}
}
Err(e) => {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: None,
message: format!("Invalid agent frontmatter: {e}"),
});
}
}
}
fn validate_skill(yaml: &str, issues: &mut Vec<ValidationIssue>) {
match serde_yaml::from_str::<Skill>(yaml) {
Ok(skill) => {
if skill.name.is_empty() {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: Some("name".into()),
message: "Skill name is required".into(),
});
}
if skill.description.is_empty() {
issues.push(ValidationIssue {
level: IssueLevel::Warning,
field: Some("description".into()),
message: "Skill should have a description".into(),
});
}
}
Err(e) => {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: None,
message: format!("Invalid skill frontmatter: {e}"),
});
}
}
}
fn validate_cron(yaml: &str, issues: &mut Vec<ValidationIssue>) {
match serde_yaml::from_str::<CronJob>(yaml) {
Ok(cron) => {
if cron.title.is_empty() {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: Some("title".into()),
message: "Cron title is required".into(),
});
}
if cron.agent.is_empty() {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: Some("agent".into()),
message: "Cron agent is required".into(),
});
}
// Validate cron expression
let expr = if cron.schedule.split_whitespace().count() == 5 {
format!("0 {}", cron.schedule)
} else {
cron.schedule.clone()
};
if cron::Schedule::from_str(&expr).is_err() {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: Some("schedule".into()),
message: format!("Invalid cron expression: '{}'", cron.schedule),
});
}
}
Err(e) => {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: None,
message: format!("Invalid cron frontmatter: {e}"),
});
}
}
}
fn validate_human_task(yaml: &str, issues: &mut Vec<ValidationIssue>) {
match serde_yaml::from_str::<HumanTask>(yaml) {
Ok(task) => {
if task.title.is_empty() {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: Some("title".into()),
message: "Task title is required".into(),
});
}
}
Err(e) => {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: None,
message: format!("Invalid task frontmatter: {e}"),
});
}
}
}
fn validate_agent_task(yaml: &str, issues: &mut Vec<ValidationIssue>) {
match serde_yaml::from_str::<AgentTask>(yaml) {
Ok(task) => {
if task.title.is_empty() {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: Some("title".into()),
message: "Task title is required".into(),
});
}
if task.agent.is_empty() {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: Some("agent".into()),
message: "Agent name is required for agent tasks".into(),
});
}
}
Err(e) => {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: None,
message: format!("Invalid agent task frontmatter: {e}"),
});
}
}
}
/// Validate that references between entities are valid.
/// Checks that agent skills and cron agents exist.
pub fn validate_references(
vault_root: &Path,
agent_names: &HashSet<String>,
skill_names: &HashSet<String>,
) -> Vec<(String, ValidationIssue)> {
let mut issues = Vec::new();
// Check agent skill references
let agents_dir = vault_root.join("agents");
if let Ok(files) = crate::filesystem::list_md_files(&agents_dir) {
for path in files {
if let Ok(entity) = crate::filesystem::read_entity::<Agent>(&path) {
for skill in &entity.frontmatter.skills {
if !skill_names.contains(skill) {
issues.push((
entity.frontmatter.name.clone(),
ValidationIssue {
level: IssueLevel::Warning,
field: Some("skills".into()),
message: format!("Referenced skill '{}' not found", skill),
},
));
}
}
}
}
}
// Check cron agent references
let crons_dir = vault_root.join("crons/active");
if let Ok(files) = crate::filesystem::list_md_files(&crons_dir) {
for path in files {
if let Ok(entity) = crate::filesystem::read_entity::<CronJob>(&path) {
if !agent_names.contains(&entity.frontmatter.agent) {
issues.push((
entity.frontmatter.title.clone(),
ValidationIssue {
level: IssueLevel::Warning,
field: Some("agent".into()),
message: format!(
"Referenced agent '{}' not found",
entity.frontmatter.agent
),
},
));
}
}
}
}
issues
}
use std::str::FromStr;
#[cfg(test)]
mod tests {
use super::*;
use std::path::Path;
#[test]
fn test_validate_valid_agent() {
let content = "---\nname: test-agent\nexecutable: claude-code\n---\nBody";
let issues = validate(Path::new("agents/test-agent.md"), content);
assert!(issues.is_empty(), "Expected no issues: {:?}", issues);
}
#[test]
fn test_validate_agent_missing_name() {
let content = "---\nname: \"\"\nexecutable: claude-code\n---\n";
let issues = validate(Path::new("agents/bad.md"), content);
assert!(issues.iter().any(|i| i.field.as_deref() == Some("name")));
}
#[test]
fn test_validate_missing_frontmatter() {
let content = "No frontmatter here";
let issues = validate(Path::new("agents/bad.md"), content);
assert_eq!(issues.len(), 1);
assert_eq!(issues[0].level, IssueLevel::Error);
}
#[test]
fn test_validate_cron_bad_expression() {
let content = "---\ntitle: bad\nagent: test\nschedule: \"not a cron\"\n---\n";
let issues = validate(Path::new("crons/active/bad.md"), content);
assert!(issues
.iter()
.any(|i| i.field.as_deref() == Some("schedule")));
}
}

View file

@ -0,0 +1,19 @@
[package]
name = "vault-scheduler"
version.workspace = true
edition.workspace = true
[dependencies]
vault-core.workspace = true
vault-watch.workspace = true
tokio.workspace = true
cron.workspace = true
chrono.workspace = true
tracing.workspace = true
thiserror.workspace = true
async-trait.workspace = true
reqwest.workspace = true
serde.workspace = true
serde_json.workspace = true
serde_yaml.workspace = true
uuid.workspace = true

View file

@ -0,0 +1,206 @@
use chrono::{DateTime, Utc};
use cron::Schedule;
use std::cmp::Reverse;
use std::collections::BinaryHeap;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use vault_core::entity::VaultEntity;
use vault_core::error::VaultError;
use vault_core::filesystem;
use vault_core::frontmatter;
use vault_core::types::CronJob;
#[derive(Debug, thiserror::Error)]
pub enum CronError {
#[error("Invalid cron expression '{expr}': {reason}")]
InvalidExpression { expr: String, reason: String },
#[error("Vault error: {0}")]
Vault(#[from] VaultError),
}
#[derive(Debug, Clone, PartialEq, Eq)]
struct ScheduleEntry {
next_fire: DateTime<Utc>,
path: PathBuf,
}
impl PartialOrd for ScheduleEntry {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for ScheduleEntry {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.next_fire.cmp(&other.next_fire)
}
}
pub struct CronEngine {
vault_root: PathBuf,
schedule: BinaryHeap<Reverse<ScheduleEntry>>,
}
impl CronEngine {
pub fn new(vault_root: PathBuf) -> Self {
Self {
vault_root,
schedule: BinaryHeap::new(),
}
}
/// Rebuild the entire schedule by scanning `crons/active/`.
pub fn rebuild_schedule(&mut self) -> Result<(), CronError> {
self.schedule.clear();
let active_dir = self.vault_root.join("crons/active");
let files = filesystem::list_md_files(&active_dir)?;
for file in files {
if let Err(e) = self.add_cron(&file) {
tracing::warn!(?file, error = %e, "Skipping invalid cron");
}
}
tracing::info!(count = self.schedule.len(), "Rebuilt cron schedule");
Ok(())
}
/// Add or update a cron job in the schedule.
pub fn upsert_cron(&mut self, path: &Path) -> Result<(), CronError> {
self.remove_cron(path);
self.add_cron(path)
}
/// Remove a cron job from the schedule.
pub fn remove_cron(&mut self, path: &Path) {
let entries: Vec<_> = self
.schedule
.drain()
.filter(|Reverse(e)| e.path != path)
.collect();
self.schedule = entries.into_iter().collect();
}
/// Get the next fire time, if any crons are scheduled.
pub fn next_fire_time(&self) -> Option<DateTime<Utc>> {
self.schedule.peek().map(|Reverse(e)| e.next_fire)
}
/// Pop all crons that are due (fire time <= now).
pub fn pop_due(&mut self) -> Vec<PathBuf> {
let now = Utc::now();
let mut due = Vec::new();
while let Some(Reverse(entry)) = self.schedule.peek() {
if entry.next_fire <= now {
let Reverse(entry) = self.schedule.pop().unwrap();
due.push(entry.path);
} else {
break;
}
}
due
}
/// Fire a cron: create an agent task in queued/, update cron frontmatter.
/// Returns the path to the created agent task.
#[tracing::instrument(skip(self, write_filter), fields(cron = ?cron_path.file_name()))]
pub fn fire_cron(
&mut self,
cron_path: &Path,
write_filter: &vault_watch::write_filter::DaemonWriteFilter,
) -> Result<PathBuf, CronError> {
let entity: VaultEntity<CronJob> = filesystem::read_entity(cron_path)?;
let cron = &entity.frontmatter;
// Create agent task
let slug = filesystem::timestamped_slug(&cron.title);
let task_path = self
.vault_root
.join("todos/agent/queued")
.join(format!("{}.md", slug));
let now = Utc::now();
let agent_task = vault_core::types::AgentTask {
title: cron.title.clone(),
agent: cron.agent.clone(),
priority: vault_core::types::Priority::Medium,
task_type: Some("cron".into()),
created: now,
started: None,
completed: None,
retry: 0,
max_retries: 0,
input: None,
output: None,
error: None,
};
let task_entity = VaultEntity {
path: task_path.clone(),
frontmatter: agent_task,
body: entity.body.clone(),
};
write_filter.register(task_path.clone());
filesystem::write_entity(&task_entity)?;
// Update cron frontmatter
let content = std::fs::read_to_string(cron_path)
.map_err(|e| VaultError::io(e, cron_path))?;
let updates = serde_json::json!({
"last_run": now.to_rfc3339(),
"last_status": "success",
"run_count": cron.run_count + 1,
});
let updated = frontmatter::update_frontmatter_fields(&content, cron_path, &updates)?;
write_filter.register(cron_path.to_path_buf());
std::fs::write(cron_path, updated).map_err(|e| VaultError::io(e, cron_path))?;
// Re-schedule this cron
if let Err(e) = self.add_cron(cron_path) {
tracing::warn!(?cron_path, error = %e, "Failed to reschedule cron");
}
tracing::info!(
cron = %cron.title,
agent = %cron.agent,
task = ?task_path,
"Fired cron job"
);
Ok(task_path)
}
fn add_cron(&mut self, path: &Path) -> Result<(), CronError> {
let entity: VaultEntity<CronJob> = filesystem::read_entity(path)?;
let cron = &entity.frontmatter;
if !cron.enabled {
return Ok(());
}
// cron crate expects 6 or 7 fields (sec min hour dom month dow [year])
// Standard 5-field cron: prepend "0 " for seconds
let expr = format!("0 {}", cron.schedule);
let schedule = Schedule::from_str(&expr).map_err(|e| CronError::InvalidExpression {
expr: cron.schedule.clone(),
reason: e.to_string(),
})?;
if let Some(next) = schedule.upcoming(Utc).next() {
self.schedule.push(Reverse(ScheduleEntry {
next_fire: next,
path: path.to_path_buf(),
}));
}
Ok(())
}
pub fn scheduled_count(&self) -> usize {
self.schedule.len()
}
}

View file

@ -0,0 +1,41 @@
use std::collections::HashMap;
use std::time::Duration;
#[derive(Debug, Clone)]
pub struct ExecutionResult {
pub stdout: String,
pub stderr: String,
pub exit_code: Option<i32>,
pub duration: Duration,
}
#[derive(Debug, thiserror::Error)]
pub enum ExecutionError {
#[error("Execution timed out after {0:?}")]
Timeout(Duration),
#[error("Process failed to start: {0}")]
SpawnFailed(String),
#[error("Process exited with code {code}: {stderr}")]
NonZeroExit { code: i32, stderr: String },
#[error("HTTP error: {0}")]
Http(String),
#[error("IO error: {0}")]
Io(#[from] std::io::Error),
}
#[async_trait::async_trait]
pub trait Executor: Send + Sync {
async fn execute(
&self,
executable: &str,
model: Option<&str>,
system_prompt: &str,
task_context: &str,
env: &HashMap<String, String>,
timeout: Duration,
) -> Result<ExecutionResult, ExecutionError>;
}

View file

@ -0,0 +1 @@
pub mod process;

View file

@ -0,0 +1,132 @@
use crate::executor::{ExecutionError, ExecutionResult, Executor};
use std::collections::HashMap;
use std::time::{Duration, Instant};
use tokio::io::AsyncWriteExt;
use tokio::process::Command;
/// Generic process executor: spawns a child process, pipes prompt to stdin,
/// captures stdout/stderr.
pub struct GenericProcessExecutor {
vault_path: std::path::PathBuf,
}
impl GenericProcessExecutor {
pub fn new(vault_path: std::path::PathBuf) -> Self {
Self { vault_path }
}
/// Expand `${VAR}` references in environment variable values.
fn expand_env(value: &str) -> String {
let mut result = value.to_string();
// Simple ${VAR} expansion from process environment
while let Some(start) = result.find("${") {
if let Some(end) = result[start..].find('}') {
let var_name = &result[start + 2..start + end];
let replacement = std::env::var(var_name).unwrap_or_default();
result = format!("{}{}{}", &result[..start], replacement, &result[start + end + 1..]);
} else {
break;
}
}
result
}
}
#[async_trait::async_trait]
impl Executor for GenericProcessExecutor {
async fn execute(
&self,
executable: &str,
model: Option<&str>,
system_prompt: &str,
task_context: &str,
env: &HashMap<String, String>,
timeout: Duration,
) -> Result<ExecutionResult, ExecutionError> {
let start = Instant::now();
// Build the full prompt
let full_prompt = if task_context.is_empty() {
system_prompt.to_string()
} else {
format!("{}\n\n## Task\n\n{}", system_prompt, task_context)
};
// Determine command and args based on executable type
let (cmd, args) = if executable == "claude-code" {
(
"claude".to_string(),
vec![
"--print".to_string(),
"--dangerously-skip-permissions".to_string(),
full_prompt.clone(),
],
)
} else {
(executable.to_string(), vec![])
};
let mut command = Command::new(&cmd);
command.args(&args);
// Set environment
command.env("VAULT_PATH", &self.vault_path);
for (key, value) in env {
command.env(key, Self::expand_env(value));
}
if let Some(model) = model {
command.env("MODEL", model);
}
// For non-claude executables, pipe prompt via stdin
if executable != "claude-code" {
command
.stdin(std::process::Stdio::piped())
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped());
} else {
command
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped());
}
let mut child = command
.spawn()
.map_err(|e| ExecutionError::SpawnFailed(format!("{}: {}", cmd, e)))?;
// Write prompt to stdin for non-claude executables
if executable != "claude-code" {
if let Some(mut stdin) = child.stdin.take() {
stdin.write_all(full_prompt.as_bytes()).await?;
drop(stdin);
}
}
// Wait with timeout
let output = match tokio::time::timeout(timeout, child.wait_with_output()).await {
Ok(result) => result.map_err(|e| ExecutionError::SpawnFailed(e.to_string()))?,
Err(_) => {
return Err(ExecutionError::Timeout(timeout));
}
};
let duration = start.elapsed();
let stdout = String::from_utf8_lossy(&output.stdout).to_string();
let stderr = String::from_utf8_lossy(&output.stderr).to_string();
let exit_code = output.status.code();
if output.status.success() {
Ok(ExecutionResult {
stdout,
stderr,
exit_code,
duration,
})
} else {
Err(ExecutionError::NonZeroExit {
code: exit_code.unwrap_or(-1),
stderr,
})
}
}
}

View file

@ -0,0 +1,5 @@
pub mod cron_engine;
pub mod executor;
pub mod executors;
pub mod state;
pub mod task_runner;

View file

@ -0,0 +1,36 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::path::Path;
use vault_core::error::VaultError;
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct RuntimeState {
pub last_startup: Option<DateTime<Utc>>,
pub last_shutdown: Option<DateTime<Utc>>,
pub total_tasks_executed: u64,
pub total_cron_fires: u64,
}
impl RuntimeState {
pub fn load(vault_root: &Path) -> Result<Self, VaultError> {
let state_path = vault_root.join(".vault/state.json");
if !state_path.exists() {
return Ok(Self::default());
}
let content = std::fs::read_to_string(&state_path)
.map_err(|e| VaultError::io(e, &state_path))?;
let state: RuntimeState =
serde_json::from_str(&content).unwrap_or_default();
Ok(state)
}
pub fn save(&self, vault_root: &Path) -> Result<(), VaultError> {
let state_path = vault_root.join(".vault/state.json");
let content = serde_json::to_string_pretty(self)
.map_err(|e| VaultError::InvalidEntity {
path: state_path.clone(),
reason: e.to_string(),
})?;
std::fs::write(&state_path, content).map_err(|e| VaultError::io(e, &state_path))
}
}

View file

@ -0,0 +1,253 @@
use crate::executor::{ExecutionError, Executor};
use std::path::{Path, PathBuf};
use std::sync::Arc;
use tokio::sync::Semaphore;
use vault_core::entity::VaultEntity;
use vault_core::error::VaultError;
use vault_core::filesystem;
use vault_core::frontmatter;
use vault_core::types::{Agent, AgentTask};
use vault_watch::write_filter::DaemonWriteFilter;
pub struct TaskRunner {
vault_root: PathBuf,
semaphore: Arc<Semaphore>,
executor: Arc<dyn Executor>,
write_filter: Arc<DaemonWriteFilter>,
}
impl TaskRunner {
pub fn new(
vault_root: PathBuf,
max_parallel: usize,
executor: Arc<dyn Executor>,
write_filter: Arc<DaemonWriteFilter>,
) -> Self {
Self {
vault_root,
semaphore: Arc::new(Semaphore::new(max_parallel)),
executor,
write_filter,
}
}
/// Process all currently queued tasks.
pub async fn process_queued(&self) -> Result<Vec<PathBuf>, VaultError> {
let queued_dir = self.vault_root.join("todos/agent/queued");
let files = filesystem::list_md_files(&queued_dir)?;
let mut spawned = Vec::new();
for file in files {
spawned.push(file.clone());
let runner = TaskRunner {
vault_root: self.vault_root.clone(),
semaphore: self.semaphore.clone(),
executor: self.executor.clone(),
write_filter: self.write_filter.clone(),
};
tokio::spawn(async move {
if let Err(e) = runner.execute_task(&file).await {
tracing::error!(task = ?file, error = %e, "Task execution failed");
}
});
}
Ok(spawned)
}
/// Execute a single agent task.
#[tracing::instrument(skip(self), fields(task = ?task_path.file_name()))]
pub async fn execute_task(&self, task_path: &Path) -> Result<(), VaultError> {
let _permit = self
.semaphore
.acquire()
.await
.map_err(|e| VaultError::InvalidEntity {
path: task_path.to_path_buf(),
reason: format!("Semaphore closed: {}", e),
})?;
let task_entity: VaultEntity<AgentTask> = filesystem::read_entity(task_path)?;
let agent_name = &task_entity.frontmatter.agent;
// Load agent definition
let agent_path = self.vault_root.join("agents").join(format!("{}.md", agent_name));
let agent_entity: VaultEntity<Agent> = filesystem::read_entity(&agent_path)?;
// Move queued -> running
let running_path = self
.vault_root
.join("todos/agent/running")
.join(task_path.file_name().unwrap());
self.write_filter.register(running_path.clone());
filesystem::move_file(task_path, &running_path)?;
// Update started timestamp
let content = std::fs::read_to_string(&running_path)
.map_err(|e| VaultError::io(e, &running_path))?;
let updates = serde_json::json!({
"started": chrono::Utc::now().to_rfc3339(),
});
let updated = frontmatter::update_frontmatter_fields(&content, &running_path, &updates)?;
self.write_filter.register(running_path.clone());
std::fs::write(&running_path, updated).map_err(|e| VaultError::io(e, &running_path))?;
// Compose prompt
let system_prompt =
vault_core::prompt::compose_prompt(&self.vault_root, &agent_entity, None)?;
let task_context = &task_entity.body;
let timeout = std::time::Duration::from_secs(agent_entity.frontmatter.timeout);
// Execute
let result = self
.executor
.execute(
&agent_entity.frontmatter.executable,
agent_entity.frontmatter.model.as_deref(),
&system_prompt,
task_context,
&agent_entity.frontmatter.env,
timeout,
)
.await;
match result {
Ok(exec_result) => {
// Move running -> done
let done_path = self
.vault_root
.join("todos/agent/done")
.join(running_path.file_name().unwrap());
let content = std::fs::read_to_string(&running_path)
.map_err(|e| VaultError::io(e, &running_path))?;
let updates = serde_json::json!({
"completed": chrono::Utc::now().to_rfc3339(),
"output": {
"stdout": exec_result.stdout,
"duration_secs": exec_result.duration.as_secs(),
},
});
let updated =
frontmatter::update_frontmatter_fields(&content, &running_path, &updates)?;
self.write_filter.register(running_path.clone());
std::fs::write(&running_path, updated)
.map_err(|e| VaultError::io(e, &running_path))?;
self.write_filter.register(done_path.clone());
filesystem::move_file(&running_path, &done_path)?;
tracing::info!(task = ?done_path, "Task completed successfully");
}
Err(exec_error) => {
let task_entity: VaultEntity<AgentTask> = filesystem::read_entity(&running_path)?;
let retry = task_entity.frontmatter.retry;
let max_retries = task_entity.frontmatter.max_retries;
if retry < max_retries {
// Re-queue with incremented retry count
let content = std::fs::read_to_string(&running_path)
.map_err(|e| VaultError::io(e, &running_path))?;
let updates = serde_json::json!({
"retry": retry + 1,
"started": null,
"error": format!("Attempt {}: {}", retry + 1, exec_error),
});
let updated =
frontmatter::update_frontmatter_fields(&content, &running_path, &updates)?;
self.write_filter.register(running_path.clone());
std::fs::write(&running_path, updated)
.map_err(|e| VaultError::io(e, &running_path))?;
let queued_path = self
.vault_root
.join("todos/agent/queued")
.join(running_path.file_name().unwrap());
self.write_filter.register(queued_path.clone());
filesystem::move_file(&running_path, &queued_path)?;
tracing::warn!(
task = ?queued_path,
retry = retry + 1,
max_retries,
"Task failed, re-queued"
);
} else {
// Move running -> failed
let failed_path = self
.vault_root
.join("todos/agent/failed")
.join(running_path.file_name().unwrap());
let content = std::fs::read_to_string(&running_path)
.map_err(|e| VaultError::io(e, &running_path))?;
let error_msg = match &exec_error {
ExecutionError::Timeout(d) => format!("Timed out after {:?}", d),
ExecutionError::NonZeroExit { code, stderr } => {
format!("Exit code {}: {}", code, stderr)
}
other => other.to_string(),
};
let updates = serde_json::json!({
"completed": chrono::Utc::now().to_rfc3339(),
"error": error_msg,
});
let updated =
frontmatter::update_frontmatter_fields(&content, &running_path, &updates)?;
self.write_filter.register(running_path.clone());
std::fs::write(&running_path, updated)
.map_err(|e| VaultError::io(e, &running_path))?;
self.write_filter.register(failed_path.clone());
filesystem::move_file(&running_path, &failed_path)?;
tracing::error!(
task = ?failed_path,
error = %exec_error,
"Task failed permanently"
);
}
}
}
Ok(())
}
/// On startup, recover tasks that were left in running/ (daemon crashed).
/// Move them back to queued/ for re-execution.
pub fn recover_running_tasks(&self) -> Result<Vec<PathBuf>, VaultError> {
let running_dir = self.vault_root.join("todos/agent/running");
let files = filesystem::list_md_files(&running_dir)?;
let mut recovered = Vec::new();
for file in &files {
let queued_path = self
.vault_root
.join("todos/agent/queued")
.join(file.file_name().unwrap());
// Reset started timestamp
let content =
std::fs::read_to_string(file).map_err(|e| VaultError::io(e, file))?;
let updates = serde_json::json!({
"started": null,
});
if let Ok(updated) = frontmatter::update_frontmatter_fields(&content, file, &updates) {
self.write_filter.register(file.clone());
let _ = std::fs::write(file, updated);
}
self.write_filter.register(queued_path.clone());
filesystem::move_file(file, &queued_path)?;
recovered.push(queued_path);
tracing::info!(task = ?file, "Recovered running task");
}
if !recovered.is_empty() {
tracing::info!(count = recovered.len(), "Recovered tasks from previous run");
}
Ok(recovered)
}
}

View file

@ -0,0 +1,11 @@
[package]
name = "vault-watch"
version.workspace = true
edition.workspace = true
[dependencies]
vault-core.workspace = true
notify.workspace = true
tokio.workspace = true
tracing.workspace = true
thiserror.workspace = true

View file

@ -0,0 +1,214 @@
use crate::events::VaultEvent;
use notify::event::{CreateKind, ModifyKind, RemoveKind, RenameMode};
use notify::EventKind;
use std::path::{Path, PathBuf};
use vault_core::entity::{classify_path, EntityKind};
/// Classify a raw notify event into typed VaultEvents.
pub fn classify(
event: &notify::Event,
vault_root: &Path,
) -> Vec<VaultEvent> {
let mut vault_events = Vec::new();
for path in &event.paths {
// Skip non-.md files
if path.extension().is_none_or(|e| e != "md") {
continue;
}
// Skip dotfiles and temp files
if let Some(name) = path.file_name().and_then(|n| n.to_str()) {
if name.starts_with('.') || name.starts_with('~') || name.ends_with(".tmp") {
continue;
}
}
let relative = match path.strip_prefix(vault_root) {
Ok(r) => r,
Err(_) => continue,
};
// Skip .vault/ internal files
if relative.starts_with(".vault") {
continue;
}
let kind = classify_path(relative);
match event.kind {
EventKind::Create(CreateKind::File) | EventKind::Create(CreateKind::Any) => {
vault_events.push(make_created(kind, path.clone()));
}
EventKind::Modify(ModifyKind::Data(_))
| EventKind::Modify(ModifyKind::Any)
| EventKind::Modify(ModifyKind::Metadata(_)) => {
vault_events.push(make_modified(kind, path.clone()));
}
EventKind::Remove(RemoveKind::File) | EventKind::Remove(RemoveKind::Any) => {
vault_events.push(make_deleted(kind, path.clone()));
}
_ => {}
}
}
// Handle renames (two paths: from, to)
if matches!(event.kind, EventKind::Modify(ModifyKind::Name(RenameMode::Both)))
&& event.paths.len() == 2
{
let from = &event.paths[0];
let to = &event.paths[1];
if to.extension().is_some_and(|e| e == "md") {
if let Ok(rel_to) = to.strip_prefix(vault_root) {
let kind_to = classify_path(rel_to);
let moved = make_moved(kind_to, from.clone(), to.clone());
// Replace any Created/Deleted pair we may have emitted above
vault_events.clear();
vault_events.push(moved);
}
}
}
vault_events
}
fn make_created(kind: EntityKind, path: PathBuf) -> VaultEvent {
match kind {
EntityKind::Agent => VaultEvent::AgentCreated(path),
EntityKind::Skill => VaultEvent::SkillCreated(path),
EntityKind::CronActive | EntityKind::CronPaused | EntityKind::CronTemplate => {
VaultEvent::CronCreated(path)
}
EntityKind::HumanTask(_) => VaultEvent::HumanTaskCreated(path),
EntityKind::AgentTask(_) => VaultEvent::AgentTaskCreated(path),
EntityKind::Knowledge => VaultEvent::KnowledgeCreated(path),
EntityKind::ViewPage | EntityKind::ViewWidget | EntityKind::ViewLayout | EntityKind::ViewCustom => {
VaultEvent::ViewCreated(path)
}
EntityKind::Notification => VaultEvent::NotificationCreated(path),
EntityKind::Unknown => VaultEvent::FileChanged(path),
}
}
fn make_modified(kind: EntityKind, path: PathBuf) -> VaultEvent {
match kind {
EntityKind::Agent => VaultEvent::AgentModified(path),
EntityKind::Skill => VaultEvent::SkillModified(path),
EntityKind::CronActive | EntityKind::CronPaused | EntityKind::CronTemplate => {
VaultEvent::CronModified(path)
}
EntityKind::HumanTask(_) => VaultEvent::HumanTaskModified(path),
EntityKind::AgentTask(_) => VaultEvent::AgentTaskModified(path),
EntityKind::Knowledge => VaultEvent::KnowledgeModified(path),
EntityKind::ViewPage | EntityKind::ViewWidget | EntityKind::ViewLayout | EntityKind::ViewCustom => {
VaultEvent::ViewModified(path)
}
EntityKind::Notification => VaultEvent::NotificationCreated(path),
EntityKind::Unknown => VaultEvent::FileChanged(path),
}
}
fn make_deleted(kind: EntityKind, path: PathBuf) -> VaultEvent {
match kind {
EntityKind::Agent => VaultEvent::AgentDeleted(path),
EntityKind::Skill => VaultEvent::SkillDeleted(path),
EntityKind::CronActive | EntityKind::CronPaused | EntityKind::CronTemplate => {
VaultEvent::CronDeleted(path)
}
EntityKind::HumanTask(_) => VaultEvent::HumanTaskDeleted(path),
EntityKind::AgentTask(_) => VaultEvent::AgentTaskDeleted(path),
EntityKind::Knowledge => VaultEvent::KnowledgeDeleted(path),
EntityKind::ViewPage | EntityKind::ViewWidget | EntityKind::ViewLayout | EntityKind::ViewCustom => {
VaultEvent::ViewDeleted(path)
}
EntityKind::Notification => VaultEvent::NotificationExpired(path),
EntityKind::Unknown => VaultEvent::FileChanged(path),
}
}
fn make_moved(kind: EntityKind, from: PathBuf, to: PathBuf) -> VaultEvent {
match kind {
EntityKind::CronActive | EntityKind::CronPaused => {
VaultEvent::CronMoved { from, to }
}
EntityKind::HumanTask(_) => VaultEvent::HumanTaskMoved { from, to },
EntityKind::AgentTask(_) => VaultEvent::AgentTaskMoved { from, to },
_ => make_created(kind, to),
}
}
#[cfg(test)]
mod tests {
use super::*;
use notify::event::{CreateKind, DataChange, ModifyKind};
fn make_event(kind: EventKind, paths: Vec<PathBuf>) -> notify::Event {
notify::Event {
kind,
paths,
attrs: Default::default(),
}
}
#[test]
fn test_classify_agent_created() {
let root = PathBuf::from("/vault");
let event = make_event(
EventKind::Create(CreateKind::File),
vec![PathBuf::from("/vault/agents/reviewer.md")],
);
let events = classify(&event, &root);
assert_eq!(events.len(), 1);
assert!(matches!(events[0], VaultEvent::AgentCreated(_)));
}
#[test]
fn test_skip_non_md() {
let root = PathBuf::from("/vault");
let event = make_event(
EventKind::Create(CreateKind::File),
vec![PathBuf::from("/vault/agents/readme.txt")],
);
let events = classify(&event, &root);
assert!(events.is_empty());
}
#[test]
fn test_skip_dotfiles() {
let root = PathBuf::from("/vault");
let event = make_event(
EventKind::Create(CreateKind::File),
vec![PathBuf::from("/vault/agents/.hidden.md")],
);
let events = classify(&event, &root);
assert!(events.is_empty());
}
#[test]
fn test_classify_task_modified() {
let root = PathBuf::from("/vault");
let event = make_event(
EventKind::Modify(ModifyKind::Data(DataChange::Content)),
vec![PathBuf::from("/vault/todos/agent/running/task-1.md")],
);
let events = classify(&event, &root);
assert_eq!(events.len(), 1);
assert!(matches!(events[0], VaultEvent::AgentTaskModified(_)));
}
#[test]
fn test_classify_rename() {
let root = PathBuf::from("/vault");
let event = make_event(
EventKind::Modify(ModifyKind::Name(RenameMode::Both)),
vec![
PathBuf::from("/vault/todos/agent/queued/task.md"),
PathBuf::from("/vault/todos/agent/running/task.md"),
],
);
let events = classify(&event, &root);
assert_eq!(events.len(), 1);
assert!(matches!(events[0], VaultEvent::AgentTaskMoved { .. }));
}
}

View file

@ -0,0 +1,108 @@
use std::path::PathBuf;
#[derive(Debug, Clone)]
pub enum VaultEvent {
AgentCreated(PathBuf),
AgentModified(PathBuf),
AgentDeleted(PathBuf),
SkillCreated(PathBuf),
SkillModified(PathBuf),
SkillDeleted(PathBuf),
CronCreated(PathBuf),
CronModified(PathBuf),
CronDeleted(PathBuf),
CronMoved { from: PathBuf, to: PathBuf },
HumanTaskCreated(PathBuf),
HumanTaskModified(PathBuf),
HumanTaskMoved { from: PathBuf, to: PathBuf },
HumanTaskDeleted(PathBuf),
AgentTaskCreated(PathBuf),
AgentTaskModified(PathBuf),
AgentTaskMoved { from: PathBuf, to: PathBuf },
AgentTaskDeleted(PathBuf),
KnowledgeCreated(PathBuf),
KnowledgeModified(PathBuf),
KnowledgeDeleted(PathBuf),
ViewCreated(PathBuf),
ViewModified(PathBuf),
ViewDeleted(PathBuf),
NotificationCreated(PathBuf),
NotificationExpired(PathBuf),
FileChanged(PathBuf),
}
impl VaultEvent {
/// Get the primary path associated with this event.
pub fn path(&self) -> &PathBuf {
match self {
Self::AgentCreated(p)
| Self::AgentModified(p)
| Self::AgentDeleted(p)
| Self::SkillCreated(p)
| Self::SkillModified(p)
| Self::SkillDeleted(p)
| Self::CronCreated(p)
| Self::CronModified(p)
| Self::CronDeleted(p)
| Self::HumanTaskCreated(p)
| Self::HumanTaskModified(p)
| Self::HumanTaskDeleted(p)
| Self::AgentTaskCreated(p)
| Self::AgentTaskModified(p)
| Self::AgentTaskDeleted(p)
| Self::KnowledgeCreated(p)
| Self::KnowledgeModified(p)
| Self::KnowledgeDeleted(p)
| Self::ViewCreated(p)
| Self::ViewModified(p)
| Self::ViewDeleted(p)
| Self::NotificationCreated(p)
| Self::NotificationExpired(p)
| Self::FileChanged(p) => p,
Self::CronMoved { to, .. }
| Self::HumanTaskMoved { to, .. }
| Self::AgentTaskMoved { to, .. } => to,
}
}
/// Return a string event type name for serialization.
pub fn event_type(&self) -> &'static str {
match self {
Self::AgentCreated(_) => "agent_created",
Self::AgentModified(_) => "agent_modified",
Self::AgentDeleted(_) => "agent_deleted",
Self::SkillCreated(_) => "skill_created",
Self::SkillModified(_) => "skill_modified",
Self::SkillDeleted(_) => "skill_deleted",
Self::CronCreated(_) => "cron_created",
Self::CronModified(_) => "cron_modified",
Self::CronDeleted(_) => "cron_deleted",
Self::CronMoved { .. } => "cron_moved",
Self::HumanTaskCreated(_) => "human_task_created",
Self::HumanTaskModified(_) => "human_task_modified",
Self::HumanTaskMoved { .. } => "human_task_moved",
Self::HumanTaskDeleted(_) => "human_task_deleted",
Self::AgentTaskCreated(_) => "agent_task_created",
Self::AgentTaskModified(_) => "agent_task_modified",
Self::AgentTaskMoved { .. } => "agent_task_moved",
Self::AgentTaskDeleted(_) => "agent_task_deleted",
Self::KnowledgeCreated(_) => "knowledge_created",
Self::KnowledgeModified(_) => "knowledge_modified",
Self::KnowledgeDeleted(_) => "knowledge_deleted",
Self::ViewCreated(_) => "view_created",
Self::ViewModified(_) => "view_modified",
Self::ViewDeleted(_) => "view_deleted",
Self::NotificationCreated(_) => "notification_created",
Self::NotificationExpired(_) => "notification_expired",
Self::FileChanged(_) => "file_changed",
}
}
}

View file

@ -0,0 +1,4 @@
pub mod classifier;
pub mod events;
pub mod watcher;
pub mod write_filter;

View file

@ -0,0 +1,83 @@
use crate::classifier;
use crate::events::VaultEvent;
use crate::write_filter::DaemonWriteFilter;
use notify::{Config, RecommendedWatcher, RecursiveMode, Watcher};
use std::path::PathBuf;
use std::sync::Arc;
use tokio::sync::mpsc;
#[derive(Debug, thiserror::Error)]
pub enum WatchError {
#[error("Notify error: {0}")]
Notify(#[from] notify::Error),
#[error("Channel closed")]
ChannelClosed,
}
pub struct VaultWatcher {
vault_root: PathBuf,
write_filter: Arc<DaemonWriteFilter>,
_watcher: RecommendedWatcher,
rx: mpsc::Receiver<VaultEvent>,
}
impl VaultWatcher {
pub fn new(
vault_root: PathBuf,
write_filter: Arc<DaemonWriteFilter>,
) -> Result<Self, WatchError> {
let (event_tx, event_rx) = mpsc::channel(256);
let root = vault_root.clone();
let filter = write_filter.clone();
let (notify_tx, mut notify_rx) = mpsc::channel(512);
let mut watcher = RecommendedWatcher::new(
move |res: Result<notify::Event, notify::Error>| {
if let Ok(event) = res {
let _ = notify_tx.blocking_send(event);
}
},
Config::default(),
)?;
watcher.watch(&vault_root, RecursiveMode::Recursive)?;
// Spawn classification task
let tx = event_tx.clone();
tokio::spawn(async move {
while let Some(raw_event) = notify_rx.recv().await {
let vault_events = classifier::classify(&raw_event, &root);
for event in vault_events {
if filter.should_suppress(event.path()) {
tracing::debug!(?event, "Suppressed daemon-originated event");
continue;
}
if tx.send(event).await.is_err() {
return;
}
}
}
});
Ok(Self {
vault_root,
write_filter,
_watcher: watcher,
rx: event_rx,
})
}
pub fn vault_root(&self) -> &PathBuf {
&self.vault_root
}
pub fn write_filter(&self) -> &Arc<DaemonWriteFilter> {
&self.write_filter
}
pub async fn recv(&mut self) -> Option<VaultEvent> {
self.rx.recv().await
}
}

View file

@ -0,0 +1,67 @@
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Mutex;
use std::time::{Duration, Instant};
const WRITE_FILTER_TTL: Duration = Duration::from_secs(5);
/// Filters out filesystem events triggered by daemon-originated writes.
/// Before writing a file, register the path. When an event arrives,
/// check if it should be suppressed.
pub struct DaemonWriteFilter {
pending: Mutex<HashMap<PathBuf, Instant>>,
}
impl DaemonWriteFilter {
pub fn new() -> Self {
Self {
pending: Mutex::new(HashMap::new()),
}
}
/// Register a path that the daemon is about to write.
pub fn register(&self, path: PathBuf) {
let mut pending = self.pending.lock().unwrap();
pending.insert(path, Instant::now());
}
/// Check if an event for this path should be suppressed.
/// Returns true if the event should be suppressed (i.e., it was daemon-originated).
pub fn should_suppress(&self, path: &PathBuf) -> bool {
let mut pending = self.pending.lock().unwrap();
// Clean up stale entries
pending.retain(|_, ts| ts.elapsed() < WRITE_FILTER_TTL);
pending.remove(path).is_some()
}
}
impl Default for DaemonWriteFilter {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_register_and_suppress() {
let filter = DaemonWriteFilter::new();
let path = PathBuf::from("/vault/crons/active/test.md");
filter.register(path.clone());
assert!(filter.should_suppress(&path));
// Second check should not suppress (already consumed)
assert!(!filter.should_suppress(&path));
}
#[test]
fn test_unregistered_not_suppressed() {
let filter = DaemonWriteFilter::new();
let path = PathBuf::from("/vault/agents/test.md");
assert!(!filter.should_suppress(&path));
}
}