Initial implementation of vault-os

Complete implementation across all 13 phases:

- vault-core: types, YAML frontmatter parsing, entity classification,
  filesystem ops, config, prompt composition, validation, search
- vault-watch: filesystem watcher with daemon write filtering, event
  classification
- vault-scheduler: cron engine, process executor, task runner with
  retry logic and concurrency limiting
- vault-api: Axum REST API (15 route modules), WebSocket with broadcast,
  AI assistant proxy, validation, templates
- Dashboard: React + TypeScript + Tailwind v4 with kanban, CodeMirror
  editor, dynamic view system, AI chat sidebar
- Nix flake with dev shell and NixOS module
- Graceful shutdown, inotify overflow recovery, tracing instrumentation

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Harald Hoyer 2026-03-03 01:21:17 +01:00
commit f820a72b04
123 changed files with 18288 additions and 0 deletions

1
.envrc Normal file
View file

@ -0,0 +1 @@
use flake

9
.gitignore vendored Normal file
View file

@ -0,0 +1,9 @@
/target
.vault/
.direnv/
*.swp
*.swo
*~
.DS_Store
dashboard/node_modules/
dashboard/dist/

2783
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

54
Cargo.toml Normal file
View file

@ -0,0 +1,54 @@
[workspace]
members = ["crates/*"]
resolver = "2"
[workspace.package]
version = "0.1.0"
edition = "2021"
license = "MIT OR Apache-2.0"
authors = ["Harald Hoyer <harald@hoyer.xyz>"]
[workspace.dependencies]
serde = { version = "1", features = ["derive"] }
serde_yaml = "0.9"
serde_json = "1"
chrono = { version = "0.4", features = ["serde"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
thiserror = "2"
anyhow = "1"
uuid = { version = "1", features = ["v4", "serde"] }
tokio = { version = "1", features = ["full"] }
axum = { version = "0.8", features = ["ws"] }
tower = "0.5"
tower-http = { version = "0.6", features = ["cors", "trace", "fs"] }
notify = "8"
cron = "0.15"
async-trait = "0.1"
reqwest = { version = "0.12", features = ["json"] }
clap = { version = "4", features = ["derive", "env"] }
rust-embed = "8"
pulldown-cmark = "0.12"
vault-core = { path = "crates/vault-core" }
vault-watch = { path = "crates/vault-watch" }
vault-scheduler = { path = "crates/vault-scheduler" }
vault-api = { path = "crates/vault-api" }
[package]
name = "vault-os"
version.workspace = true
edition.workspace = true
[dependencies]
vault-core.workspace = true
vault-watch.workspace = true
vault-scheduler.workspace = true
vault-api.workspace = true
tokio.workspace = true
tracing.workspace = true
tracing-subscriber.workspace = true
clap.workspace = true
anyhow.workspace = true
axum.workspace = true
chrono.workspace = true

190
LICENSE-APACHE Normal file
View file

@ -0,0 +1,190 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to the Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by the Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding any notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
Copyright 2026 Harald Hoyer <harald@hoyer.xyz>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

21
LICENSE-MIT Normal file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2026 Harald Hoyer <harald@hoyer.xyz>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

260
README.md Normal file
View file

@ -0,0 +1,260 @@
# vault-os
A personal AI operations platform — a single Rust daemon that turns a directory of markdown files with YAML frontmatter into a reactive system: knowledge base, task manager, agent orchestrator, cron scheduler, and web dashboard.
**Everything is markdown.** Status is directory-based. The filesystem is the database.
## Architecture
```
vault-os (binary)
├── vault-core # Types, frontmatter parsing, filesystem ops, validation, search
├── vault-watch # Filesystem watcher (notify/inotify) with daemon write filtering
├── vault-scheduler # Cron engine, process executor, task runner with retry logic
└── vault-api # Axum REST API, WebSocket, embedded dashboard
```
The daemon runs three concurrent event sources via `tokio::select!`:
- **Filesystem events** — file changes trigger cache updates, task execution, cron rescheduling
- **Cron timer** — fires scheduled agent tasks at the right time
- **Shutdown signal** — graceful shutdown waiting for running tasks
## Vault Directory Structure
```
vault/
├── agents/ # Agent definitions
├── skills/ # Reusable skill modules
├── crons/
│ ├── active/ # Enabled cron jobs
│ ├── paused/ # Disabled cron jobs
│ └── templates/ # Cron templates
├── todos/
│ ├── harald/ # Human tasks
│ │ ├── urgent/
│ │ ├── open/
│ │ ├── in-progress/
│ │ └── done/
│ └── agent/ # Agent task queue
│ ├── queued/
│ ├── running/
│ ├── done/
│ └── failed/
├── knowledge/ # Free-form knowledge base
├── views/ # Dashboard view definitions
│ ├── pages/
│ ├── widgets/
│ ├── layouts/
│ └── notifications/
└── .vault/ # Daemon state (git-ignored)
├── config.yaml
└── state.json
```
## Building
### Prerequisites
**With Nix (recommended):**
```sh
nix develop
```
This gives you Rust (stable, latest), rust-analyzer, clippy, Node.js 22, npm, and cargo-watch.
**Without Nix:**
- Rust stable (1.75+)
- Node.js 22+
- npm
- pkg-config, openssl (on Linux)
### Build the daemon
```sh
cargo build --release
```
### Build the dashboard
```sh
cd dashboard
npm install
npm run build
```
### Run tests
```sh
cargo test --workspace
```
### Run clippy
```sh
cargo clippy --workspace
```
## Usage
### Start the daemon
```sh
vault-os --vault /path/to/your/vault
```
The daemon creates the directory structure automatically on first run.
### CLI Options
| Flag | Env Var | Default | Description |
|------|---------|---------|-------------|
| `--vault <PATH>` | `VAULT_PATH` | (required) | Path to vault directory |
| `--port <PORT>` | `VAULT_PORT` | `8080` | HTTP/WebSocket port |
| `--bind <ADDR>` | `VAULT_BIND` | `127.0.0.1` | Bind address |
| `--max-parallel <N>` | `VAULT_MAX_PARALLEL` | `4` | Max concurrent agent executions |
| `--log-level <LEVEL>` | `VAULT_LOG_LEVEL` | `info` | Log level (trace/debug/info/warn/error) |
### Access the dashboard
Open `http://localhost:8080` in your browser.
For development with hot-reload:
```sh
cd dashboard
npm run dev
```
The Vite dev server proxies `/api` and `/ws` to the Rust daemon on port 8080.
## File Format
Every entity is a markdown file with YAML frontmatter. Example agent:
```markdown
---
name: reviewer
executable: claude-code
model: claude-sonnet-4-20250514
skills:
- read-vault
- github-pr-review
timeout: 600
max_retries: 1
env:
GITHUB_TOKEN: ${GITHUB_TOKEN}
---
You are a code reviewer. Review pull requests thoroughly,
focusing on correctness, security, and maintainability.
```
Example cron job:
```markdown
---
title: Daily Inbox Review
schedule: "0 9 * * *"
agent: reviewer
enabled: true
---
Review all open PRs and summarize findings.
```
Example human task (in `todos/harald/open/`):
```markdown
---
title: Fix login bug
priority: high
labels: [bug, auth]
created: 2026-03-01T10:00:00Z
---
The login form throws a 500 when the email contains a plus sign.
```
## API
The REST API is available at `/api`. Key endpoints:
| Endpoint | Description |
|----------|-------------|
| `GET /api/agents` | List all agents |
| `POST /api/agents/:name/trigger` | Trigger an agent |
| `GET /api/crons` | List cron jobs |
| `POST /api/crons/:name/trigger` | Fire a cron manually |
| `GET /api/todos/harald` | List human tasks |
| `PATCH /api/todos/harald/:status/:id/move` | Move task between statuses |
| `GET /api/todos/agent` | List agent tasks |
| `GET /api/knowledge` | Search knowledge base |
| `GET/PUT/DELETE /api/files/*path` | Generic file CRUD |
| `GET /api/tree` | Vault directory tree |
| `GET /api/stats` | Vault statistics |
| `GET /api/health` | Health check |
| `POST /api/assistant/chat` | AI assistant chat |
| `POST /api/validate` | Validate a vault file |
| `GET /api/templates` | List entity templates |
| `WS /ws` | Real-time events |
## Configuration
Create `.vault/config.yaml` in your vault root:
```yaml
# Agent executors
executors:
ollama:
base_url: http://localhost:11434
# Task queue settings
queue:
max_parallel: 4
default_timeout: 600
retry_delay: 60
# Inline AI assistant
assistant:
default_model: local/qwen3
models:
- local/qwen3
- claude-sonnet-4-20250514
```
Set `ANTHROPIC_API_KEY` in your environment to use Claude models with the assistant.
## NixOS Deployment
Add to your NixOS configuration:
```nix
{
inputs.vault-os.url = "github:you/vault-os";
# In your configuration.nix:
imports = [ vault-os.nixosModules.default ];
services.vault-os = {
enable = true;
vaultPath = "/var/lib/vault-os";
port = 8080;
bind = "127.0.0.1";
maxParallel = 4;
environmentFile = "/run/secrets/vault-os.env"; # for API keys
};
}
```
The systemd service runs with hardened settings (NoNewPrivileges, ProtectSystem=strict, PrivateTmp, etc.).
## License
Licensed under either of
- Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE))
- MIT License ([LICENSE-MIT](LICENSE-MIT))
at your option.

View file

@ -0,0 +1,24 @@
[package]
name = "vault-api"
version.workspace = true
edition.workspace = true
[dependencies]
vault-core.workspace = true
vault-watch.workspace = true
vault-scheduler.workspace = true
axum.workspace = true
tower.workspace = true
tower-http.workspace = true
tokio.workspace = true
serde.workspace = true
serde_json.workspace = true
serde_yaml.workspace = true
tracing.workspace = true
thiserror.workspace = true
rust-embed.workspace = true
pulldown-cmark.workspace = true
uuid.workspace = true
chrono.workspace = true
futures-util = "0.3"
reqwest.workspace = true

View file

@ -0,0 +1,44 @@
use axum::http::StatusCode;
use axum::response::{IntoResponse, Response};
use serde_json::json;
#[derive(Debug, thiserror::Error)]
pub enum ApiError {
#[error("Not found: {0}")]
NotFound(String),
#[error("Bad request: {0}")]
BadRequest(String),
#[error("Internal error: {0}")]
Internal(String),
#[error("Vault error: {0}")]
Vault(#[from] vault_core::VaultError),
}
impl IntoResponse for ApiError {
fn into_response(self) -> Response {
let (status, message) = match &self {
ApiError::NotFound(msg) => (StatusCode::NOT_FOUND, msg.clone()),
ApiError::BadRequest(msg) => (StatusCode::BAD_REQUEST, msg.clone()),
ApiError::Internal(msg) => (StatusCode::INTERNAL_SERVER_ERROR, msg.clone()),
ApiError::Vault(e) => match e {
vault_core::VaultError::NotFound(msg) => {
(StatusCode::NOT_FOUND, msg.clone())
}
vault_core::VaultError::MissingFrontmatter(p) => {
(StatusCode::BAD_REQUEST, format!("Missing frontmatter: {:?}", p))
}
_ => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()),
},
};
let body = json!({
"error": message,
"status": status.as_u16(),
});
(status, axum::Json(body)).into_response()
}
}

View file

@ -0,0 +1,28 @@
pub mod error;
pub mod routes;
pub mod state;
pub mod ws;
pub mod ws_protocol;
use axum::Router;
use std::sync::Arc;
use tower_http::cors::{Any, CorsLayer};
use tower_http::trace::TraceLayer;
pub use state::AppState;
pub fn build_router(state: Arc<AppState>) -> Router {
let cors = CorsLayer::new()
.allow_origin(Any)
.allow_methods(Any)
.allow_headers(Any);
let api = routes::api_routes();
Router::new()
.nest("/api", api)
.route("/ws", axum::routing::get(ws::ws_handler))
.layer(cors)
.layer(TraceLayer::new_for_http())
.with_state(state)
}

View file

@ -0,0 +1,111 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::{Path, State};
use axum::routing::get;
use axum::{Json, Router};
use serde_json::{json, Value};
use std::sync::Arc;
use vault_core::filesystem;
use vault_core::types::AgentTask;
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/agents", get(list_agents))
.route("/agents/{name}", get(get_agent))
.route(
"/agents/{name}/trigger",
axum::routing::post(trigger_agent),
)
}
async fn list_agents(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
let agents = state.agents.read().unwrap();
let list: Vec<Value> = agents
.values()
.map(|a| {
json!({
"name": a.frontmatter.name,
"executable": a.frontmatter.executable,
"model": a.frontmatter.model,
"skills": a.frontmatter.skills,
"timeout": a.frontmatter.timeout,
})
})
.collect();
Ok(Json(json!(list)))
}
async fn get_agent(
State(state): State<Arc<AppState>>,
Path(name): Path<String>,
) -> Result<Json<Value>, ApiError> {
let agents = state.agents.read().unwrap();
let agent = agents
.get(&name)
.ok_or_else(|| ApiError::NotFound(format!("Agent '{}' not found", name)))?;
Ok(Json(json!({
"name": agent.frontmatter.name,
"executable": agent.frontmatter.executable,
"model": agent.frontmatter.model,
"escalate_to": agent.frontmatter.escalate_to,
"mcp_servers": agent.frontmatter.mcp_servers,
"skills": agent.frontmatter.skills,
"timeout": agent.frontmatter.timeout,
"max_retries": agent.frontmatter.max_retries,
"env": agent.frontmatter.env,
"body": agent.body,
})))
}
async fn trigger_agent(
State(state): State<Arc<AppState>>,
Path(name): Path<String>,
body: Option<Json<Value>>,
) -> Result<Json<Value>, ApiError> {
let agents = state.agents.read().unwrap();
if !agents.contains_key(&name) {
return Err(ApiError::NotFound(format!("Agent '{}' not found", name)));
}
drop(agents);
let context = body
.and_then(|b| b.get("context").and_then(|c| c.as_str().map(String::from)))
.unwrap_or_default();
let title = format!("Manual trigger: {}", name);
let slug = filesystem::timestamped_slug(&title);
let task_path = state
.vault_root
.join("todos/agent/queued")
.join(format!("{}.md", slug));
let task = AgentTask {
title,
agent: name,
priority: vault_core::types::Priority::Medium,
task_type: Some("manual".into()),
created: chrono::Utc::now(),
started: None,
completed: None,
retry: 0,
max_retries: 0,
input: None,
output: None,
error: None,
};
let entity = vault_core::entity::VaultEntity {
path: task_path.clone(),
frontmatter: task,
body: context,
};
state.write_filter.register(task_path.clone());
vault_core::filesystem::write_entity(&entity).map_err(ApiError::Vault)?;
Ok(Json(json!({
"status": "queued",
"task_path": task_path.strip_prefix(&state.vault_root).unwrap_or(&task_path),
})))
}

View file

@ -0,0 +1,390 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::State;
use axum::routing::{get, post};
use axum::{Json, Router};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
// --- Types ---
#[derive(Debug, Deserialize)]
pub struct ChatRequest {
pub messages: Vec<ChatMessage>,
pub model: Option<String>,
/// Optional path of the file being edited (for context)
pub file_path: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChatMessage {
pub role: String,
pub content: String,
}
#[derive(Debug, Serialize)]
pub struct ChatResponse {
pub message: ChatMessage,
pub model: String,
}
#[derive(Debug, Deserialize)]
pub struct ApplyDiffRequest {
pub file_path: String,
pub diff: String,
}
#[derive(Debug, Serialize)]
pub struct ModelInfo {
pub id: String,
pub name: String,
}
// --- Routes ---
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/assistant/chat", post(chat))
.route("/assistant/apply-diff", post(apply_diff))
.route("/assistant/models", get(list_models))
}
/// POST /api/assistant/chat — proxy chat to configured LLM
async fn chat(
State(state): State<Arc<AppState>>,
Json(req): Json<ChatRequest>,
) -> Result<Json<ChatResponse>, ApiError> {
let model = req
.model
.unwrap_or_else(|| state.config.assistant.default_model.clone());
// Build system prompt with vault context
let mut system_parts = vec![
"You are an AI assistant integrated into vault-os, a personal operations platform.".into(),
"You help the user edit markdown files with YAML frontmatter.".into(),
"When suggesting changes, output unified diffs that can be applied.".into(),
];
// If a file path is provided, include its content as context
if let Some(ref fp) = req.file_path {
let full = state.vault_root.join(fp);
if let Ok(content) = tokio::fs::read_to_string(&full).await {
system_parts.push(format!("\n--- Current file: {} ---\n{}", fp, content));
}
}
let system_prompt = system_parts.join("\n");
// Build messages for the LLM
let mut messages = vec![ChatMessage {
role: "system".into(),
content: system_prompt,
}];
messages.extend(req.messages);
// Determine backend from model string
let response = if model.starts_with("claude") || model.starts_with("anthropic/") {
call_anthropic(&state, &model, &messages).await?
} else {
// Default: OpenAI-compatible API (works with Ollama, vLLM, LM Studio, etc.)
call_openai_compatible(&state, &model, &messages).await?
};
Ok(Json(ChatResponse {
message: response,
model,
}))
}
/// Call Anthropic Messages API
async fn call_anthropic(
_state: &AppState,
model: &str,
messages: &[ChatMessage],
) -> Result<ChatMessage, ApiError> {
let api_key = std::env::var("ANTHROPIC_API_KEY")
.map_err(|_| ApiError::BadRequest("ANTHROPIC_API_KEY not set".into()))?;
// Extract system message
let system = messages
.iter()
.find(|m| m.role == "system")
.map(|m| m.content.clone())
.unwrap_or_default();
let user_messages: Vec<serde_json::Value> = messages
.iter()
.filter(|m| m.role != "system")
.map(|m| {
serde_json::json!({
"role": m.role,
"content": m.content,
})
})
.collect();
let model_id = model.strip_prefix("anthropic/").unwrap_or(model);
let body = serde_json::json!({
"model": model_id,
"max_tokens": 4096,
"system": system,
"messages": user_messages,
});
let client = reqwest::Client::new();
let resp = client
.post("https://api.anthropic.com/v1/messages")
.header("x-api-key", &api_key)
.header("anthropic-version", "2023-06-01")
.header("content-type", "application/json")
.json(&body)
.send()
.await
.map_err(|e| ApiError::Internal(format!("Anthropic request failed: {e}")))?;
if !resp.status().is_success() {
let status = resp.status();
let text = resp.text().await.unwrap_or_default();
return Err(ApiError::Internal(format!(
"Anthropic API error {status}: {text}"
)));
}
let json: serde_json::Value = resp
.json()
.await
.map_err(|e| ApiError::Internal(format!("Failed to parse Anthropic response: {e}")))?;
let content = json["content"]
.as_array()
.and_then(|arr| arr.first())
.and_then(|block| block["text"].as_str())
.unwrap_or("")
.to_string();
Ok(ChatMessage {
role: "assistant".into(),
content,
})
}
/// Call OpenAI-compatible API (Ollama, vLLM, LM Studio, etc.)
async fn call_openai_compatible(
state: &AppState,
model: &str,
messages: &[ChatMessage],
) -> Result<ChatMessage, ApiError> {
// Check for configured executor base_url, fall back to Ollama default
let base_url = state
.config
.executors
.values()
.find_map(|e| e.base_url.clone())
.unwrap_or_else(|| "http://localhost:11434".into());
let model_id = model.split('/').next_back().unwrap_or(model);
let body = serde_json::json!({
"model": model_id,
"messages": messages.iter().map(|m| serde_json::json!({
"role": m.role,
"content": m.content,
})).collect::<Vec<_>>(),
});
let client = reqwest::Client::new();
let resp = client
.post(format!("{}/v1/chat/completions", base_url))
.header("content-type", "application/json")
.json(&body)
.send()
.await
.map_err(|e| ApiError::Internal(format!("LLM request failed: {e}")))?;
if !resp.status().is_success() {
let status = resp.status();
let text = resp.text().await.unwrap_or_default();
return Err(ApiError::Internal(format!(
"LLM API error {status}: {text}"
)));
}
let json: serde_json::Value = resp
.json()
.await
.map_err(|e| ApiError::Internal(format!("Failed to parse LLM response: {e}")))?;
let content = json["choices"]
.as_array()
.and_then(|arr| arr.first())
.and_then(|choice| choice["message"]["content"].as_str())
.unwrap_or("")
.to_string();
Ok(ChatMessage {
role: "assistant".into(),
content,
})
}
/// POST /api/assistant/apply-diff — apply a unified diff to a file
async fn apply_diff(
State(state): State<Arc<AppState>>,
Json(req): Json<ApplyDiffRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let full_path = state.vault_root.join(&req.file_path);
if !full_path.exists() {
return Err(ApiError::NotFound(format!("File not found: {}", req.file_path)));
}
let original = tokio::fs::read_to_string(&full_path)
.await
.map_err(|e| ApiError::Internal(format!("Failed to read file: {e}")))?;
let patched = apply_unified_diff(&original, &req.diff)
.map_err(|e| ApiError::BadRequest(format!("Failed to apply diff: {e}")))?;
// Register with write filter to prevent feedback loop
state.write_filter.register(full_path.clone());
tokio::fs::write(&full_path, &patched)
.await
.map_err(|e| ApiError::Internal(format!("Failed to write file: {e}")))?;
Ok(Json(serde_json::json!({ "status": "ok", "path": req.file_path })))
}
/// Simple unified diff applier
fn apply_unified_diff(original: &str, diff: &str) -> Result<String, String> {
let mut result_lines: Vec<String> = original.lines().map(String::from).collect();
let mut offset: i64 = 0;
for hunk in parse_hunks(diff) {
let start = ((hunk.old_start as i64) - 1 + offset) as usize;
let end = start + hunk.old_count;
if end > result_lines.len() {
return Err(format!(
"Hunk at line {} extends beyond file (file has {} lines)",
hunk.old_start,
result_lines.len()
));
}
result_lines.splice(start..end, hunk.new_lines);
offset += hunk.new_count as i64 - hunk.old_count as i64;
}
let mut result = result_lines.join("\n");
if original.ends_with('\n') && !result.ends_with('\n') {
result.push('\n');
}
Ok(result)
}
struct Hunk {
old_start: usize,
old_count: usize,
new_count: usize,
new_lines: Vec<String>,
}
fn parse_hunks(diff: &str) -> Vec<Hunk> {
let mut hunks = Vec::new();
let mut lines = diff.lines().peekable();
while let Some(line) = lines.next() {
if line.starts_with("@@") {
// Parse @@ -old_start,old_count +new_start,new_count @@
if let Some(hunk) = parse_hunk_header(line) {
let mut old_count = 0;
let mut new_lines = Vec::new();
let mut new_count = 0;
while old_count < hunk.0 || new_count < hunk.1 {
match lines.next() {
Some(l) if l.starts_with('-') => {
old_count += 1;
}
Some(l) if l.starts_with('+') => {
new_lines.push(l[1..].to_string());
new_count += 1;
}
Some(l) => {
// Context line (starts with ' ' or no prefix)
let content = l.strip_prefix(' ').unwrap_or(l);
new_lines.push(content.to_string());
old_count += 1;
new_count += 1;
}
None => break,
}
}
hunks.push(Hunk {
old_start: hunk.2,
old_count: hunk.0,
new_count: new_lines.len(),
new_lines,
});
}
}
}
hunks
}
/// Parse "@@ -start,count +start,count @@" returning (old_count, new_count, old_start)
fn parse_hunk_header(line: &str) -> Option<(usize, usize, usize)> {
let stripped = line.trim_start_matches("@@").trim_end_matches("@@").trim();
let parts: Vec<&str> = stripped.split_whitespace().collect();
if parts.len() < 2 {
return None;
}
let old_part = parts[0].trim_start_matches('-');
let new_part = parts[1].trim_start_matches('+');
let (old_start, old_count) = parse_range(old_part)?;
let (_new_start, new_count) = parse_range(new_part)?;
Some((old_count, new_count, old_start))
}
fn parse_range(s: &str) -> Option<(usize, usize)> {
if let Some((start, count)) = s.split_once(',') {
Some((start.parse().ok()?, count.parse().ok()?))
} else {
Some((s.parse().ok()?, 1))
}
}
/// GET /api/assistant/models — list available models from config
async fn list_models(State(state): State<Arc<AppState>>) -> Json<Vec<ModelInfo>> {
let mut models: Vec<ModelInfo> = state
.config
.assistant
.models
.iter()
.map(|m| ModelInfo {
id: m.clone(),
name: m.clone(),
})
.collect();
// Always include the default model
let default = &state.config.assistant.default_model;
if !models.iter().any(|m| m.id == *default) {
models.insert(
0,
ModelInfo {
id: default.clone(),
name: format!("{} (default)", default),
},
);
}
Json(models)
}

View file

@ -0,0 +1,127 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::{Path, State};
use axum::routing::{get, post};
use axum::{Json, Router};
use serde_json::{json, Value};
use std::sync::Arc;
use vault_core::filesystem;
use vault_core::types::CronJob;
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/crons", get(list_crons))
.route("/crons/{name}/trigger", post(trigger_cron))
.route("/crons/{name}/pause", post(pause_cron))
.route("/crons/{name}/resume", post(resume_cron))
}
async fn list_crons(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
let mut crons = Vec::new();
for subdir in &["active", "paused"] {
let dir = state.vault_root.join("crons").join(subdir);
let files = filesystem::list_md_files(&dir).map_err(ApiError::Vault)?;
for file in files {
match filesystem::read_entity::<CronJob>(&file) {
Ok(entity) => {
crons.push(json!({
"name": file.file_stem().and_then(|s| s.to_str()),
"title": entity.frontmatter.title,
"schedule": entity.frontmatter.schedule,
"agent": entity.frontmatter.agent,
"enabled": *subdir == "active" && entity.frontmatter.enabled,
"status": subdir,
"last_run": entity.frontmatter.last_run,
"last_status": entity.frontmatter.last_status,
"next_run": entity.frontmatter.next_run,
"run_count": entity.frontmatter.run_count,
}));
}
Err(e) => {
tracing::warn!(path = ?file, error = %e, "Failed to read cron");
}
}
}
}
Ok(Json(json!(crons)))
}
async fn trigger_cron(
State(state): State<Arc<AppState>>,
Path(name): Path<String>,
) -> Result<Json<Value>, ApiError> {
let cron_path = state
.vault_root
.join("crons/active")
.join(format!("{}.md", name));
if !cron_path.exists() {
return Err(ApiError::NotFound(format!("Cron '{}' not found in active/", name)));
}
let mut engine = state.cron_engine.lock().unwrap();
let task_path = engine
.fire_cron(&cron_path, &state.write_filter)
.map_err(|e| ApiError::Internal(e.to_string()))?;
Ok(Json(json!({
"status": "fired",
"task_path": task_path.strip_prefix(&state.vault_root).unwrap_or(&task_path),
})))
}
async fn pause_cron(
State(state): State<Arc<AppState>>,
Path(name): Path<String>,
) -> Result<Json<Value>, ApiError> {
let from = state
.vault_root
.join("crons/active")
.join(format!("{}.md", name));
let to = state
.vault_root
.join("crons/paused")
.join(format!("{}.md", name));
if !from.exists() {
return Err(ApiError::NotFound(format!("Cron '{}' not found in active/", name)));
}
state.write_filter.register(to.clone());
filesystem::move_file(&from, &to).map_err(ApiError::Vault)?;
let mut engine = state.cron_engine.lock().unwrap();
engine.remove_cron(&from);
Ok(Json(json!({ "status": "paused" })))
}
async fn resume_cron(
State(state): State<Arc<AppState>>,
Path(name): Path<String>,
) -> Result<Json<Value>, ApiError> {
let from = state
.vault_root
.join("crons/paused")
.join(format!("{}.md", name));
let to = state
.vault_root
.join("crons/active")
.join(format!("{}.md", name));
if !from.exists() {
return Err(ApiError::NotFound(format!("Cron '{}' not found in paused/", name)));
}
state.write_filter.register(to.clone());
filesystem::move_file(&from, &to).map_err(ApiError::Vault)?;
let mut engine = state.cron_engine.lock().unwrap();
if let Err(e) = engine.upsert_cron(&to) {
tracing::warn!(error = %e, "Failed to schedule resumed cron");
}
Ok(Json(json!({ "status": "active" })))
}

View file

@ -0,0 +1,126 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::{Path, State};
use axum::routing::get;
use axum::{Json, Router};
use serde::Deserialize;
use serde_json::{json, Value};
use std::sync::Arc;
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/files/{*path}", get(read_file).put(write_file).patch(patch_file).delete(delete_file))
}
async fn read_file(
State(state): State<Arc<AppState>>,
Path(path): Path<String>,
) -> Result<Json<Value>, ApiError> {
let file_path = state.vault_root.join(&path);
if !file_path.exists() {
return Err(ApiError::NotFound(format!("File '{}' not found", path)));
}
let content = std::fs::read_to_string(&file_path)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &file_path)))?;
// Try to split frontmatter
if let Ok((yaml, body)) = vault_core::frontmatter::split_frontmatter(&content) {
let frontmatter: Value = serde_yaml::from_str(yaml).unwrap_or(Value::Null);
Ok(Json(json!({
"path": path,
"frontmatter": frontmatter,
"body": body,
})))
} else {
Ok(Json(json!({
"path": path,
"frontmatter": null,
"body": content,
})))
}
}
#[derive(Deserialize)]
struct WriteFileBody {
#[serde(default)]
frontmatter: Option<Value>,
#[serde(default)]
body: Option<String>,
#[serde(default)]
raw: Option<String>,
}
async fn write_file(
State(state): State<Arc<AppState>>,
Path(path): Path<String>,
Json(data): Json<WriteFileBody>,
) -> Result<Json<Value>, ApiError> {
let file_path = state.vault_root.join(&path);
if let Some(parent) = file_path.parent() {
std::fs::create_dir_all(parent)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, parent)))?;
}
let content = if let Some(raw) = data.raw {
raw
} else {
let body = data.body.unwrap_or_default();
if let Some(fm) = data.frontmatter {
let yaml = serde_yaml::to_string(&fm)
.map_err(|e| ApiError::Internal(e.to_string()))?;
format!("---\n{}---\n{}", yaml, body)
} else {
body
}
};
state.write_filter.register(file_path.clone());
std::fs::write(&file_path, content)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &file_path)))?;
Ok(Json(json!({ "status": "written", "path": path })))
}
async fn patch_file(
State(state): State<Arc<AppState>>,
Path(path): Path<String>,
Json(updates): Json<Value>,
) -> Result<Json<Value>, ApiError> {
let file_path = state.vault_root.join(&path);
if !file_path.exists() {
return Err(ApiError::NotFound(format!("File '{}' not found", path)));
}
let content = std::fs::read_to_string(&file_path)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &file_path)))?;
let updated =
vault_core::frontmatter::update_frontmatter_fields(&content, &file_path, &updates)
.map_err(ApiError::Vault)?;
state.write_filter.register(file_path.clone());
std::fs::write(&file_path, updated)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &file_path)))?;
Ok(Json(json!({ "status": "patched", "path": path })))
}
async fn delete_file(
State(state): State<Arc<AppState>>,
Path(path): Path<String>,
) -> Result<Json<Value>, ApiError> {
let file_path = state.vault_root.join(&path);
if !file_path.exists() {
return Err(ApiError::NotFound(format!("File '{}' not found", path)));
}
std::fs::remove_file(&file_path)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &file_path)))?;
Ok(Json(json!({ "status": "deleted", "path": path })))
}

View file

@ -0,0 +1,126 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::{Path, Query, State};
use axum::routing::get;
use axum::{Json, Router};
use pulldown_cmark::{html, Parser};
use serde::Deserialize;
use serde_json::{json, Value};
use std::sync::Arc;
use vault_core::entity::VaultEntity;
use vault_core::filesystem;
use vault_core::types::KnowledgeNote;
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/knowledge", get(list_knowledge))
.route("/knowledge/{*path}", get(get_knowledge))
}
#[derive(Deserialize, Default)]
struct SearchQuery {
#[serde(default)]
q: Option<String>,
#[serde(default)]
tag: Option<String>,
}
async fn list_knowledge(
State(state): State<Arc<AppState>>,
Query(query): Query<SearchQuery>,
) -> Result<Json<Value>, ApiError> {
let dir = state.vault_root.join("knowledge");
let files = filesystem::list_md_files_recursive(&dir).map_err(ApiError::Vault)?;
let mut notes = Vec::new();
for file in files {
// Try parsing with frontmatter
let content = std::fs::read_to_string(&file)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &file)))?;
let (title, tags) = if let Ok(entity) = VaultEntity::<KnowledgeNote>::from_content(file.clone(), &content) {
(
entity.frontmatter.title.unwrap_or_else(|| {
file.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("untitled")
.to_string()
}),
entity.frontmatter.tags,
)
} else {
(
file.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("untitled")
.to_string(),
vec![],
)
};
// Apply filters
if let Some(ref q) = query.q {
let q_lower = q.to_lowercase();
if !title.to_lowercase().contains(&q_lower)
&& !content.to_lowercase().contains(&q_lower)
{
continue;
}
}
if let Some(ref tag) = query.tag {
if !tags.iter().any(|t| t == tag) {
continue;
}
}
let relative = file.strip_prefix(&state.vault_root).unwrap_or(&file);
notes.push(json!({
"path": relative,
"title": title,
"tags": tags,
}));
}
Ok(Json(json!(notes)))
}
async fn get_knowledge(
State(state): State<Arc<AppState>>,
Path(path): Path<String>,
) -> Result<Json<Value>, ApiError> {
let file_path = state.vault_root.join("knowledge").join(&path);
if !file_path.exists() {
return Err(ApiError::NotFound(format!("Knowledge note '{}' not found", path)));
}
let content = std::fs::read_to_string(&file_path)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &file_path)))?;
let (frontmatter, body) = if let Ok(entity) = VaultEntity::<KnowledgeNote>::from_content(file_path.clone(), &content) {
(
json!({
"title": entity.frontmatter.title,
"tags": entity.frontmatter.tags,
"source": entity.frontmatter.source,
"created": entity.frontmatter.created,
"related": entity.frontmatter.related,
}),
entity.body,
)
} else {
(json!({}), content.clone())
};
// Render markdown to HTML
let parser = Parser::new(&body);
let mut html_output = String::new();
html::push_html(&mut html_output, parser);
Ok(Json(json!({
"path": path,
"frontmatter": frontmatter,
"body": body,
"html": html_output,
})))
}

View file

@ -0,0 +1,36 @@
pub mod agents;
pub mod assistant;
pub mod crons;
pub mod files;
pub mod knowledge;
pub mod skills;
pub mod stats;
pub mod suggest;
pub mod templates;
pub mod todos_agent;
pub mod todos_human;
pub mod tree;
pub mod validate;
pub mod views;
use crate::state::AppState;
use axum::Router;
use std::sync::Arc;
pub fn api_routes() -> Router<Arc<AppState>> {
Router::new()
.merge(agents::routes())
.merge(skills::routes())
.merge(crons::routes())
.merge(todos_human::routes())
.merge(todos_agent::routes())
.merge(knowledge::routes())
.merge(files::routes())
.merge(tree::routes())
.merge(suggest::routes())
.merge(stats::routes())
.merge(views::routes())
.merge(assistant::routes())
.merge(validate::routes())
.merge(templates::routes())
}

View file

@ -0,0 +1,62 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::{Path, State};
use axum::routing::get;
use axum::{Json, Router};
use serde_json::{json, Value};
use std::sync::Arc;
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/skills", get(list_skills))
.route("/skills/{name}", get(get_skill))
.route("/skills/{name}/used-by", get(skill_used_by))
}
async fn list_skills(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
let skills = state.skills.read().unwrap();
let list: Vec<Value> = skills
.values()
.map(|s| {
json!({
"name": s.frontmatter.name,
"description": s.frontmatter.description,
"version": s.frontmatter.version,
})
})
.collect();
Ok(Json(json!(list)))
}
async fn get_skill(
State(state): State<Arc<AppState>>,
Path(name): Path<String>,
) -> Result<Json<Value>, ApiError> {
let skills = state.skills.read().unwrap();
let skill = skills
.get(&name)
.ok_or_else(|| ApiError::NotFound(format!("Skill '{}' not found", name)))?;
Ok(Json(json!({
"name": skill.frontmatter.name,
"description": skill.frontmatter.description,
"version": skill.frontmatter.version,
"requires_mcp": skill.frontmatter.requires_mcp,
"inputs": skill.frontmatter.inputs,
"outputs": skill.frontmatter.outputs,
"body": skill.body,
})))
}
async fn skill_used_by(
State(state): State<Arc<AppState>>,
Path(name): Path<String>,
) -> Result<Json<Value>, ApiError> {
let agents = state.agents.read().unwrap();
let users: Vec<String> = agents
.values()
.filter(|a| a.frontmatter.skills.contains(&name))
.map(|a| a.frontmatter.name.clone())
.collect();
Ok(Json(json!(users)))
}

View file

@ -0,0 +1,112 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::State;
use axum::routing::get;
use axum::{Json, Router};
use serde_json::{json, Value};
use std::sync::Arc;
use vault_core::filesystem;
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/stats", get(get_stats))
.route("/activity", get(get_activity))
.route("/health", get(health_check))
}
async fn get_stats(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
let agents_count = state.agents.read().unwrap().len();
let skills_count = state.skills.read().unwrap().len();
let crons_scheduled = state.cron_engine.lock().unwrap().scheduled_count();
let mut task_counts = serde_json::Map::new();
for status in &["urgent", "open", "in-progress", "done"] {
let dir = state.vault_root.join("todos/harald").join(status);
let count = filesystem::list_md_files(&dir)
.map(|f| f.len())
.unwrap_or(0);
task_counts.insert(status.to_string(), json!(count));
}
let mut agent_task_counts = serde_json::Map::new();
for status in &["queued", "running", "done", "failed"] {
let dir = state.vault_root.join("todos/agent").join(status);
let count = filesystem::list_md_files(&dir)
.map(|f| f.len())
.unwrap_or(0);
agent_task_counts.insert(status.to_string(), json!(count));
}
let knowledge_count = filesystem::list_md_files_recursive(&state.vault_root.join("knowledge"))
.map(|f| f.len())
.unwrap_or(0);
let runtime_state = state.runtime_state.lock().unwrap();
Ok(Json(json!({
"agents": agents_count,
"skills": skills_count,
"crons_scheduled": crons_scheduled,
"human_tasks": task_counts,
"agent_tasks": agent_task_counts,
"knowledge_notes": knowledge_count,
"total_tasks_executed": runtime_state.total_tasks_executed,
"total_cron_fires": runtime_state.total_cron_fires,
})))
}
async fn get_activity(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
// Collect recently modified files across the vault as activity items
let mut activity = Vec::new();
let dirs = [
("todos/harald", "human_task"),
("todos/agent", "agent_task"),
("knowledge", "knowledge"),
];
for (dir, kind) in &dirs {
if let Ok(files) = filesystem::list_md_files_recursive(&state.vault_root.join(dir)) {
for file in files.iter().rev().take(20) {
if let Ok(metadata) = std::fs::metadata(file) {
if let Ok(modified) = metadata.modified() {
let relative = file.strip_prefix(&state.vault_root).unwrap_or(file);
activity.push(json!({
"path": relative,
"kind": kind,
"modified": chrono::DateTime::<chrono::Utc>::from(modified),
"name": file.file_stem().and_then(|s| s.to_str()),
}));
}
}
}
}
}
// Sort by modification time, newest first
activity.sort_by(|a, b| {
let a_time = a.get("modified").and_then(|t| t.as_str()).unwrap_or("");
let b_time = b.get("modified").and_then(|t| t.as_str()).unwrap_or("");
b_time.cmp(a_time)
});
activity.truncate(50);
Ok(Json(json!(activity)))
}
async fn health_check(State(state): State<Arc<AppState>>) -> Json<Value> {
let runtime_state = state.runtime_state.lock().unwrap();
let uptime = chrono::Utc::now() - state.startup_time;
let crons = state.cron_engine.lock().unwrap().scheduled_count();
let agents = state.agents.read().unwrap().len();
Json(json!({
"status": "ok",
"version": env!("CARGO_PKG_VERSION"),
"uptime_secs": uptime.num_seconds(),
"agents": agents,
"crons_scheduled": crons,
"total_tasks_executed": runtime_state.total_tasks_executed,
}))
}

View file

@ -0,0 +1,141 @@
use crate::state::AppState;
use axum::extract::{Query, State};
use axum::routing::get;
use axum::{Json, Router};
use serde::Deserialize;
use serde_json::{json, Value};
use std::collections::HashSet;
use std::sync::Arc;
use vault_core::filesystem;
use vault_core::types::{HumanTask, KnowledgeNote};
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/suggest/agents", get(suggest_agents))
.route("/suggest/skills", get(suggest_skills))
.route("/suggest/tags", get(suggest_tags))
.route("/suggest/repos", get(suggest_repos))
.route("/suggest/labels", get(suggest_labels))
.route("/suggest/files", get(suggest_files))
.route("/suggest/models", get(suggest_models))
.route("/suggest/mcp-servers", get(suggest_mcp_servers))
}
async fn suggest_agents(State(state): State<Arc<AppState>>) -> Json<Value> {
let agents = state.agents.read().unwrap();
let names: Vec<&str> = agents.keys().map(|s| s.as_str()).collect();
Json(json!(names))
}
async fn suggest_skills(State(state): State<Arc<AppState>>) -> Json<Value> {
let skills = state.skills.read().unwrap();
let names: Vec<&str> = skills.keys().map(|s| s.as_str()).collect();
Json(json!(names))
}
async fn suggest_tags(State(state): State<Arc<AppState>>) -> Json<Value> {
let mut tags = HashSet::new();
// Collect from knowledge notes
if let Ok(files) = filesystem::list_md_files_recursive(&state.vault_root.join("knowledge")) {
for file in files {
if let Ok(entity) = filesystem::read_entity::<KnowledgeNote>(&file) {
for tag in &entity.frontmatter.tags {
tags.insert(tag.clone());
}
}
}
}
let mut tags: Vec<String> = tags.into_iter().collect();
tags.sort();
Json(json!(tags))
}
async fn suggest_repos(State(state): State<Arc<AppState>>) -> Json<Value> {
let mut repos = HashSet::new();
for status in &["urgent", "open", "in-progress", "done"] {
let dir = state.vault_root.join("todos/harald").join(status);
if let Ok(files) = filesystem::list_md_files(&dir) {
for file in files {
if let Ok(entity) = filesystem::read_entity::<HumanTask>(&file) {
if let Some(repo) = &entity.frontmatter.repo {
repos.insert(repo.clone());
}
}
}
}
}
let mut repos: Vec<String> = repos.into_iter().collect();
repos.sort();
Json(json!(repos))
}
async fn suggest_labels(State(state): State<Arc<AppState>>) -> Json<Value> {
let mut labels = HashSet::new();
for status in &["urgent", "open", "in-progress", "done"] {
let dir = state.vault_root.join("todos/harald").join(status);
if let Ok(files) = filesystem::list_md_files(&dir) {
for file in files {
if let Ok(entity) = filesystem::read_entity::<HumanTask>(&file) {
for label in &entity.frontmatter.labels {
labels.insert(label.clone());
}
}
}
}
}
let mut labels: Vec<String> = labels.into_iter().collect();
labels.sort();
Json(json!(labels))
}
#[derive(Deserialize, Default)]
struct FileQuery {
#[serde(default)]
q: Option<String>,
}
async fn suggest_files(
State(state): State<Arc<AppState>>,
Query(query): Query<FileQuery>,
) -> Json<Value> {
let mut files = Vec::new();
if let Ok(all_files) = filesystem::list_md_files_recursive(&state.vault_root) {
for file in all_files {
if let Ok(relative) = file.strip_prefix(&state.vault_root) {
let rel_str = relative.to_string_lossy().to_string();
// Skip .vault internal files
if rel_str.starts_with(".vault") {
continue;
}
if let Some(ref q) = query.q {
if !rel_str.to_lowercase().contains(&q.to_lowercase()) {
continue;
}
}
files.push(rel_str);
}
}
}
files.sort();
Json(json!(files))
}
async fn suggest_models(State(state): State<Arc<AppState>>) -> Json<Value> {
Json(json!(state.config.assistant.models))
}
async fn suggest_mcp_servers(State(state): State<Arc<AppState>>) -> Json<Value> {
let servers: Vec<&str> = state.config.mcp_servers.keys().map(|s| s.as_str()).collect();
Json(json!(servers))
}

View file

@ -0,0 +1,144 @@
use crate::state::AppState;
use axum::extract::Path;
use axum::routing::get;
use axum::{Json, Router};
use std::sync::Arc;
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/templates", get(list_templates))
.route("/templates/{name}", get(get_template))
}
#[derive(serde::Serialize)]
struct TemplateInfo {
name: String,
description: String,
category: String,
}
async fn list_templates() -> Json<Vec<TemplateInfo>> {
Json(vec![
TemplateInfo {
name: "agent".into(),
description: "New AI agent definition".into(),
category: "agents".into(),
},
TemplateInfo {
name: "skill".into(),
description: "New agent skill".into(),
category: "skills".into(),
},
TemplateInfo {
name: "cron".into(),
description: "New cron schedule".into(),
category: "crons".into(),
},
TemplateInfo {
name: "human-task".into(),
description: "New human task".into(),
category: "todos/harald".into(),
},
TemplateInfo {
name: "agent-task".into(),
description: "New agent task".into(),
category: "todos/agent".into(),
},
TemplateInfo {
name: "knowledge".into(),
description: "New knowledge note".into(),
category: "knowledge".into(),
},
TemplateInfo {
name: "view-page".into(),
description: "New dashboard view page".into(),
category: "views/pages".into(),
},
])
}
async fn get_template(Path(name): Path<String>) -> Json<serde_json::Value> {
let template = match name.as_str() {
"agent" => serde_json::json!({
"frontmatter": {
"name": "new-agent",
"executable": "claude-code",
"model": "",
"skills": [],
"mcp_servers": [],
"timeout": 600,
"max_retries": 0,
"env": {}
},
"body": "You are an AI agent.\n\nDescribe your agent's purpose and behavior here.\n"
}),
"skill" => serde_json::json!({
"frontmatter": {
"name": "new-skill",
"description": "Describe what this skill does",
"version": 1,
"inputs": [],
"outputs": [],
"requires_mcp": []
},
"body": "## Instructions\n\nDescribe the skill instructions here.\n"
}),
"cron" => serde_json::json!({
"frontmatter": {
"title": "New Cron Job",
"schedule": "0 9 * * *",
"agent": "",
"enabled": true
},
"body": "Optional context for the cron job execution.\n"
}),
"human-task" => serde_json::json!({
"frontmatter": {
"title": "New Task",
"priority": "medium",
"labels": [],
"created": chrono::Utc::now().to_rfc3339()
},
"body": "Task description goes here.\n"
}),
"agent-task" => serde_json::json!({
"frontmatter": {
"title": "New Agent Task",
"agent": "",
"priority": "medium",
"created": chrono::Utc::now().to_rfc3339(),
"retry": 0,
"max_retries": 0
},
"body": "Task instructions for the agent.\n"
}),
"knowledge" => serde_json::json!({
"frontmatter": {
"title": "New Note",
"tags": [],
"created": chrono::Utc::now().to_rfc3339()
},
"body": "Write your knowledge note here.\n"
}),
"view-page" => serde_json::json!({
"frontmatter": {
"type": "page",
"title": "New View",
"icon": "",
"route": "/view/new-view",
"position": 10,
"layout": "single",
"regions": {
"main": []
}
},
"body": ""
}),
_ => serde_json::json!({
"frontmatter": {},
"body": ""
}),
};
Json(template)
}

View file

@ -0,0 +1,149 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::{Path, State};
use axum::routing::get;
use axum::{Json, Router};
use serde::Deserialize;
use serde_json::{json, Value};
use std::sync::Arc;
use vault_core::entity::VaultEntity;
use vault_core::filesystem;
use vault_core::types::AgentTask;
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/todos/agent", get(list_all).post(create_task))
.route("/todos/agent/{id}", get(get_task))
}
async fn list_all(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
let mut tasks = Vec::new();
for status in &["queued", "running", "done", "failed"] {
let dir = state.vault_root.join("todos/agent").join(status);
let files = filesystem::list_md_files(&dir).map_err(ApiError::Vault)?;
for file in files {
if let Ok(entity) = filesystem::read_entity::<AgentTask>(&file) {
tasks.push(agent_task_to_json(&entity, status));
}
}
}
Ok(Json(json!(tasks)))
}
async fn get_task(
State(state): State<Arc<AppState>>,
Path(id): Path<String>,
) -> Result<Json<Value>, ApiError> {
for status in &["queued", "running", "done", "failed"] {
let path = state
.vault_root
.join("todos/agent")
.join(status)
.join(format!("{}.md", id));
if path.exists() {
let entity = filesystem::read_entity::<AgentTask>(&path).map_err(ApiError::Vault)?;
return Ok(Json(agent_task_to_json(&entity, status)));
}
}
Err(ApiError::NotFound(format!("Agent task '{}' not found", id)))
}
#[derive(Deserialize)]
struct CreateAgentTaskBody {
title: String,
agent: String,
#[serde(default)]
priority: Option<String>,
#[serde(default, rename = "type")]
task_type: Option<String>,
#[serde(default)]
max_retries: Option<u32>,
#[serde(default)]
input: Option<Value>,
#[serde(default)]
body: Option<String>,
}
async fn create_task(
State(state): State<Arc<AppState>>,
Json(body): Json<CreateAgentTaskBody>,
) -> Result<Json<Value>, ApiError> {
// Verify agent exists
{
let agents = state.agents.read().unwrap();
if !agents.contains_key(&body.agent) {
return Err(ApiError::BadRequest(format!(
"Agent '{}' not found",
body.agent
)));
}
}
let priority = match body.priority.as_deref() {
Some("urgent") => vault_core::types::Priority::Urgent,
Some("high") => vault_core::types::Priority::High,
Some("low") => vault_core::types::Priority::Low,
_ => vault_core::types::Priority::Medium,
};
let slug = filesystem::timestamped_slug(&body.title);
let path = state
.vault_root
.join("todos/agent/queued")
.join(format!("{}.md", slug));
let task = AgentTask {
title: body.title,
agent: body.agent,
priority,
task_type: body.task_type,
created: chrono::Utc::now(),
started: None,
completed: None,
retry: 0,
max_retries: body.max_retries.unwrap_or(0),
input: body.input,
output: None,
error: None,
};
let entity = VaultEntity {
path: path.clone(),
frontmatter: task,
body: body.body.unwrap_or_default(),
};
state.write_filter.register(path.clone());
filesystem::write_entity(&entity).map_err(ApiError::Vault)?;
Ok(Json(json!({
"status": "queued",
"path": path.strip_prefix(&state.vault_root).unwrap_or(&path),
})))
}
fn agent_task_to_json(entity: &VaultEntity<AgentTask>, status: &str) -> Value {
let id = entity
.path
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("unknown");
json!({
"id": id,
"title": entity.frontmatter.title,
"agent": entity.frontmatter.agent,
"priority": entity.frontmatter.priority,
"type": entity.frontmatter.task_type,
"status": status,
"created": entity.frontmatter.created,
"started": entity.frontmatter.started,
"completed": entity.frontmatter.completed,
"retry": entity.frontmatter.retry,
"max_retries": entity.frontmatter.max_retries,
"input": entity.frontmatter.input,
"output": entity.frontmatter.output,
"error": entity.frontmatter.error,
"body": entity.body,
})
}

View file

@ -0,0 +1,205 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::{Path, State};
use axum::routing::{get, patch};
use axum::{Json, Router};
use serde::Deserialize;
use serde_json::{json, Value};
use std::sync::Arc;
use vault_core::entity::VaultEntity;
use vault_core::filesystem;
use vault_core::types::HumanTask;
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/todos/harald", get(list_all).post(create_task))
.route("/todos/harald/{status}", get(list_by_status))
.route("/todos/harald/{status}/{id}/move", patch(move_task))
.route(
"/todos/harald/{status}/{id}",
axum::routing::delete(delete_task),
)
}
async fn list_all(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
let mut tasks = Vec::new();
for status in &["urgent", "open", "in-progress", "done"] {
let dir = state
.vault_root
.join("todos/harald")
.join(status);
let files = filesystem::list_md_files(&dir).map_err(ApiError::Vault)?;
for file in files {
if let Ok(entity) = filesystem::read_entity::<HumanTask>(&file) {
tasks.push(task_to_json(&entity, status));
}
}
}
Ok(Json(json!(tasks)))
}
async fn list_by_status(
State(state): State<Arc<AppState>>,
Path(status): Path<String>,
) -> Result<Json<Value>, ApiError> {
let dir = state
.vault_root
.join("todos/harald")
.join(&status);
if !dir.exists() {
return Err(ApiError::NotFound(format!("Status '{}' not found", status)));
}
let files = filesystem::list_md_files(&dir).map_err(ApiError::Vault)?;
let mut tasks = Vec::new();
for file in files {
if let Ok(entity) = filesystem::read_entity::<HumanTask>(&file) {
tasks.push(task_to_json(&entity, &status));
}
}
Ok(Json(json!(tasks)))
}
#[derive(Deserialize)]
struct CreateTaskBody {
title: String,
#[serde(default)]
priority: Option<String>,
#[serde(default)]
labels: Vec<String>,
#[serde(default)]
repo: Option<String>,
#[serde(default)]
due: Option<String>,
#[serde(default)]
body: Option<String>,
}
async fn create_task(
State(state): State<Arc<AppState>>,
Json(body): Json<CreateTaskBody>,
) -> Result<Json<Value>, ApiError> {
let priority = match body.priority.as_deref() {
Some("urgent") => vault_core::types::Priority::Urgent,
Some("high") => vault_core::types::Priority::High,
Some("low") => vault_core::types::Priority::Low,
_ => vault_core::types::Priority::Medium,
};
let status_dir = match priority {
vault_core::types::Priority::Urgent => "urgent",
_ => "open",
};
let slug = filesystem::timestamped_slug(&body.title);
let path = state
.vault_root
.join("todos/harald")
.join(status_dir)
.join(format!("{}.md", slug));
let due = body
.due
.and_then(|d| chrono::DateTime::parse_from_rfc3339(&d).ok())
.map(|d| d.with_timezone(&chrono::Utc));
let task = HumanTask {
title: body.title,
priority,
source: Some("dashboard".into()),
repo: body.repo,
labels: body.labels,
created: chrono::Utc::now(),
due,
};
let entity = VaultEntity {
path: path.clone(),
frontmatter: task,
body: body.body.unwrap_or_default(),
};
state.write_filter.register(path.clone());
filesystem::write_entity(&entity).map_err(ApiError::Vault)?;
Ok(Json(json!({
"status": "created",
"path": path.strip_prefix(&state.vault_root).unwrap_or(&path),
})))
}
#[derive(Deserialize)]
struct MoveBody {
to: String,
}
async fn move_task(
State(state): State<Arc<AppState>>,
Path((status, id)): Path<(String, String)>,
Json(body): Json<MoveBody>,
) -> Result<Json<Value>, ApiError> {
let from = state
.vault_root
.join("todos/harald")
.join(&status)
.join(format!("{}.md", id));
if !from.exists() {
return Err(ApiError::NotFound(format!("Task '{}' not found in {}", id, status)));
}
let to = state
.vault_root
.join("todos/harald")
.join(&body.to)
.join(format!("{}.md", id));
state.write_filter.register(to.clone());
filesystem::move_file(&from, &to).map_err(ApiError::Vault)?;
Ok(Json(json!({
"status": "moved",
"from": status,
"to": body.to,
})))
}
async fn delete_task(
State(state): State<Arc<AppState>>,
Path((status, id)): Path<(String, String)>,
) -> Result<Json<Value>, ApiError> {
let path = state
.vault_root
.join("todos/harald")
.join(&status)
.join(format!("{}.md", id));
if !path.exists() {
return Err(ApiError::NotFound(format!("Task '{}' not found", id)));
}
std::fs::remove_file(&path).map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &path)))?;
Ok(Json(json!({ "status": "deleted" })))
}
fn task_to_json(entity: &VaultEntity<HumanTask>, status: &str) -> Value {
let id = entity
.path
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("unknown");
json!({
"id": id,
"title": entity.frontmatter.title,
"priority": entity.frontmatter.priority,
"status": status,
"source": entity.frontmatter.source,
"repo": entity.frontmatter.repo,
"labels": entity.frontmatter.labels,
"created": entity.frontmatter.created,
"due": entity.frontmatter.due,
"body": entity.body,
})
}

View file

@ -0,0 +1,93 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::{Path, State};
use axum::routing::{get, post};
use axum::{Json, Router};
use serde_json::{json, Value};
use std::sync::Arc;
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/tree", get(get_tree))
.route("/tree/{*path}", post(create_dir).delete(delete_dir))
}
async fn get_tree(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
let tree = build_tree(&state.vault_root, &state.vault_root)?;
Ok(Json(tree))
}
fn build_tree(root: &std::path::Path, dir: &std::path::Path) -> Result<Value, ApiError> {
let mut children = Vec::new();
let entries = std::fs::read_dir(dir)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, dir)))?;
let mut entries: Vec<_> = entries
.filter_map(|e| e.ok())
.collect();
entries.sort_by_key(|e| e.file_name());
for entry in entries {
let path = entry.path();
let name = entry.file_name().to_string_lossy().to_string();
// Skip hidden files/dirs
if name.starts_with('.') {
continue;
}
let relative = path.strip_prefix(root).unwrap_or(&path);
if path.is_dir() {
let subtree = build_tree(root, &path)?;
children.push(json!({
"name": name,
"path": relative,
"type": "directory",
"children": subtree.get("children").unwrap_or(&json!([])),
}));
} else {
children.push(json!({
"name": name,
"path": relative,
"type": "file",
}));
}
}
Ok(json!({
"name": dir.file_name().and_then(|n| n.to_str()).unwrap_or("vault"),
"path": dir.strip_prefix(root).unwrap_or(dir),
"type": "directory",
"children": children,
}))
}
async fn create_dir(
State(state): State<Arc<AppState>>,
Path(path): Path<String>,
) -> Result<Json<Value>, ApiError> {
let dir_path = state.vault_root.join(&path);
std::fs::create_dir_all(&dir_path)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &dir_path)))?;
Ok(Json(json!({ "status": "created", "path": path })))
}
async fn delete_dir(
State(state): State<Arc<AppState>>,
Path(path): Path<String>,
) -> Result<Json<Value>, ApiError> {
let dir_path = state.vault_root.join(&path);
if !dir_path.exists() {
return Err(ApiError::NotFound(format!("Directory '{}' not found", path)));
}
std::fs::remove_dir_all(&dir_path)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &dir_path)))?;
Ok(Json(json!({ "status": "deleted", "path": path })))
}

View file

@ -0,0 +1,78 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::State;
use axum::routing::post;
use axum::{Json, Router};
use serde::Deserialize;
use std::collections::HashSet;
use std::path::Path;
use std::sync::Arc;
use vault_core::validation;
#[derive(Debug, Deserialize)]
pub struct ValidateRequest {
/// Relative path within the vault
pub path: String,
/// Raw file content to validate (optional; if omitted, reads from disk)
pub content: Option<String>,
}
pub fn routes() -> Router<Arc<AppState>> {
Router::new().route("/validate", post(validate))
}
async fn validate(
State(state): State<Arc<AppState>>,
Json(req): Json<ValidateRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let relative = Path::new(&req.path);
let content = if let Some(c) = req.content {
c
} else {
let full = state.vault_root.join(&req.path);
tokio::fs::read_to_string(&full)
.await
.map_err(|e| ApiError::NotFound(format!("File not found: {} ({})", req.path, e)))?
};
let issues = validation::validate(relative, &content);
// Also check references
let agent_names: HashSet<String> = state
.agents
.read()
.unwrap()
.keys()
.cloned()
.collect();
let skill_names: HashSet<String> = state
.skills
.read()
.unwrap()
.keys()
.cloned()
.collect();
let ref_issues = validation::validate_references(&state.vault_root, &agent_names, &skill_names);
let mut all_issues: Vec<serde_json::Value> = issues
.into_iter()
.map(|i| serde_json::to_value(i).unwrap_or_default())
.collect();
for (entity, issue) in ref_issues {
let mut val = serde_json::to_value(&issue).unwrap_or_default();
if let Some(obj) = val.as_object_mut() {
obj.insert("entity".into(), serde_json::Value::String(entity));
}
all_issues.push(val);
}
Ok(Json(serde_json::json!({
"path": req.path,
"issues": all_issues,
"valid": all_issues.iter().all(|i|
i.get("level").and_then(|l| l.as_str()) != Some("error")
),
})))
}

View file

@ -0,0 +1,214 @@
use crate::error::ApiError;
use crate::state::AppState;
use axum::extract::{Path, State};
use axum::routing::get;
use axum::{Json, Router};
use serde_json::{json, Value};
use std::sync::Arc;
use vault_core::filesystem;
use vault_core::types::{Notification, ViewDefinition};
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
.route("/views/pages", get(list_pages))
.route("/views/widgets", get(list_widgets))
.route("/views/layouts", get(list_layouts))
.route("/views/{*path}", get(get_view).put(put_view).delete(delete_view))
.route("/notifications", get(list_notifications))
.route(
"/notifications/{id}",
axum::routing::delete(dismiss_notification),
)
}
async fn list_pages(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
list_view_dir(&state, "views/pages").await
}
async fn list_widgets(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
list_view_dir(&state, "views/widgets").await
}
async fn list_layouts(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
list_view_dir(&state, "views/layouts").await
}
async fn list_view_dir(state: &AppState, subdir: &str) -> Result<Json<Value>, ApiError> {
let dir = state.vault_root.join(subdir);
let files = filesystem::list_md_files(&dir).map_err(ApiError::Vault)?;
let mut views = Vec::new();
for file in files {
match filesystem::read_entity::<ViewDefinition>(&file) {
Ok(entity) => {
let name = file.file_stem().and_then(|s| s.to_str()).unwrap_or("unknown");
views.push(json!({
"name": name,
"type": entity.frontmatter.view_type,
"title": entity.frontmatter.title,
"icon": entity.frontmatter.icon,
"route": entity.frontmatter.route,
"position": entity.frontmatter.position,
"layout": entity.frontmatter.layout,
"component": entity.frontmatter.component,
"description": entity.frontmatter.description,
}));
}
Err(e) => {
tracing::warn!(path = ?file, error = %e, "Failed to read view definition");
}
}
}
views.sort_by_key(|v| v.get("position").and_then(|p| p.as_i64()).unwrap_or(999));
Ok(Json(json!(views)))
}
async fn get_view(
State(state): State<Arc<AppState>>,
Path(path): Path<String>,
) -> Result<Json<Value>, ApiError> {
let file_path = state.vault_root.join("views").join(&path);
let file_path = if file_path.extension().is_none() {
file_path.with_extension("md")
} else {
file_path
};
if !file_path.exists() {
return Err(ApiError::NotFound(format!("View '{}' not found", path)));
}
let content = std::fs::read_to_string(&file_path)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &file_path)))?;
if let Ok((yaml, body)) = vault_core::frontmatter::split_frontmatter(&content) {
let frontmatter: Value = serde_yaml::from_str(yaml).unwrap_or(Value::Null);
Ok(Json(json!({
"path": path,
"frontmatter": frontmatter,
"body": body,
})))
} else {
Ok(Json(json!({
"path": path,
"frontmatter": null,
"body": content,
})))
}
}
async fn put_view(
State(state): State<Arc<AppState>>,
Path(path): Path<String>,
Json(data): Json<Value>,
) -> Result<Json<Value>, ApiError> {
let file_path = state.vault_root.join("views").join(&path);
let file_path = if file_path.extension().is_none() {
file_path.with_extension("md")
} else {
file_path
};
if let Some(parent) = file_path.parent() {
std::fs::create_dir_all(parent)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, parent)))?;
}
let content = if let Some(raw) = data.get("raw").and_then(|r| r.as_str()) {
raw.to_string()
} else {
let body = data.get("body").and_then(|b| b.as_str()).unwrap_or("");
if let Some(fm) = data.get("frontmatter") {
let yaml = serde_yaml::to_string(fm).map_err(|e| ApiError::Internal(e.to_string()))?;
format!("---\n{}---\n{}", yaml, body)
} else {
body.to_string()
}
};
state.write_filter.register(file_path.clone());
std::fs::write(&file_path, content)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &file_path)))?;
Ok(Json(json!({ "status": "saved", "path": path })))
}
async fn delete_view(
State(state): State<Arc<AppState>>,
Path(path): Path<String>,
) -> Result<Json<Value>, ApiError> {
let file_path = state.vault_root.join("views").join(&path);
let file_path = if file_path.extension().is_none() {
file_path.with_extension("md")
} else {
file_path
};
if !file_path.exists() {
return Err(ApiError::NotFound(format!("View '{}' not found", path)));
}
std::fs::remove_file(&file_path)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &file_path)))?;
Ok(Json(json!({ "status": "deleted", "path": path })))
}
async fn list_notifications(State(state): State<Arc<AppState>>) -> Result<Json<Value>, ApiError> {
let dir = state.vault_root.join("views/notifications");
let files = filesystem::list_md_files(&dir).map_err(ApiError::Vault)?;
let mut notifications = Vec::new();
let now = chrono::Utc::now();
for file in files {
match filesystem::read_entity::<Notification>(&file) {
Ok(entity) => {
// Skip expired notifications
if let Some(expires) = entity.frontmatter.expires {
if expires < now {
// Auto-clean expired
let _ = std::fs::remove_file(&file);
continue;
}
}
let id = file.file_stem().and_then(|s| s.to_str()).unwrap_or("unknown");
notifications.push(json!({
"id": id,
"title": entity.frontmatter.title,
"message": entity.frontmatter.message,
"level": entity.frontmatter.level,
"source": entity.frontmatter.source,
"created": entity.frontmatter.created,
"expires": entity.frontmatter.expires,
}));
}
Err(e) => {
tracing::warn!(path = ?file, error = %e, "Failed to read notification");
}
}
}
Ok(Json(json!(notifications)))
}
async fn dismiss_notification(
State(state): State<Arc<AppState>>,
Path(id): Path<String>,
) -> Result<Json<Value>, ApiError> {
let path = state
.vault_root
.join("views/notifications")
.join(format!("{}.md", id));
if !path.exists() {
return Err(ApiError::NotFound(format!("Notification '{}' not found", id)));
}
std::fs::remove_file(&path)
.map_err(|e| ApiError::Vault(vault_core::VaultError::io(e, &path)))?;
Ok(Json(json!({ "status": "dismissed" })))
}

View file

@ -0,0 +1,111 @@
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::{Arc, Mutex, RwLock};
use vault_core::config::VaultConfig;
use vault_core::entity::VaultEntity;
use vault_core::filesystem;
use vault_core::types::{Agent, Skill};
use vault_scheduler::cron_engine::CronEngine;
use vault_scheduler::executor::Executor;
use vault_scheduler::executors::process::GenericProcessExecutor;
use vault_scheduler::state::RuntimeState;
use vault_scheduler::task_runner::TaskRunner;
use vault_watch::events::VaultEvent;
use vault_watch::write_filter::DaemonWriteFilter;
pub struct AppState {
pub vault_root: PathBuf,
pub config: VaultConfig,
pub cron_engine: Mutex<CronEngine>,
pub write_filter: Arc<DaemonWriteFilter>,
pub event_tx: tokio::sync::broadcast::Sender<Arc<VaultEvent>>,
pub agents: RwLock<HashMap<String, VaultEntity<Agent>>>,
pub skills: RwLock<HashMap<String, VaultEntity<Skill>>>,
pub runtime_state: Mutex<RuntimeState>,
pub startup_time: chrono::DateTime<chrono::Utc>,
executor: Arc<dyn Executor>,
max_parallel: usize,
}
impl AppState {
pub fn new(vault_root: PathBuf, config: VaultConfig, max_parallel: usize) -> Self {
let (event_tx, _) = tokio::sync::broadcast::channel(256);
let write_filter = Arc::new(DaemonWriteFilter::new());
let executor: Arc<dyn Executor> =
Arc::new(GenericProcessExecutor::new(vault_root.clone()));
let now = chrono::Utc::now();
let mut runtime_state = RuntimeState::load(&vault_root).unwrap_or_default();
runtime_state.last_startup = Some(now);
let _ = runtime_state.save(&vault_root);
Self {
cron_engine: Mutex::new(CronEngine::new(vault_root.clone())),
vault_root,
config,
write_filter,
event_tx,
agents: RwLock::new(HashMap::new()),
skills: RwLock::new(HashMap::new()),
runtime_state: Mutex::new(runtime_state),
startup_time: now,
executor,
max_parallel,
}
}
pub fn task_runner(&self) -> TaskRunner {
TaskRunner::new(
self.vault_root.clone(),
self.max_parallel,
self.executor.clone(),
self.write_filter.clone(),
)
}
/// Load all agent and skill definitions from disk.
pub fn reload_definitions(&self) -> Result<(), vault_core::VaultError> {
// Load agents
let agent_files = filesystem::list_md_files(&self.vault_root.join("agents"))?;
let mut agents = HashMap::new();
for path in agent_files {
match filesystem::read_entity::<Agent>(&path) {
Ok(entity) => {
agents.insert(entity.frontmatter.name.clone(), entity);
}
Err(e) => {
tracing::warn!(path = ?path, error = %e, "Failed to load agent");
}
}
}
tracing::info!(count = agents.len(), "Loaded agents");
*self.agents.write().unwrap() = agents;
// Load skills
let skill_files =
filesystem::list_md_files_recursive(&self.vault_root.join("skills"))?;
let mut skills = HashMap::new();
for path in skill_files {
match filesystem::read_entity::<Skill>(&path) {
Ok(entity) => {
skills.insert(entity.frontmatter.name.clone(), entity);
}
Err(e) => {
tracing::warn!(path = ?path, error = %e, "Failed to load skill");
}
}
}
tracing::info!(count = skills.len(), "Loaded skills");
*self.skills.write().unwrap() = skills;
Ok(())
}
pub fn broadcast(&self, event: VaultEvent) {
let _ = self.event_tx.send(Arc::new(event));
}
pub fn subscribe(&self) -> tokio::sync::broadcast::Receiver<Arc<VaultEvent>> {
self.event_tx.subscribe()
}
}

129
crates/vault-api/src/ws.rs Normal file
View file

@ -0,0 +1,129 @@
use crate::state::AppState;
use crate::ws_protocol::{WsAction, WsEvent};
use axum::extract::ws::{Message, WebSocket};
use axum::extract::{State, WebSocketUpgrade};
use axum::response::Response;
use std::sync::Arc;
pub async fn ws_handler(
ws: WebSocketUpgrade,
State(state): State<Arc<AppState>>,
) -> Response {
ws.on_upgrade(move |socket| handle_socket(socket, state))
}
async fn handle_socket(socket: WebSocket, state: Arc<AppState>) {
let (mut sender, mut receiver) = socket.split();
use futures_util::{SinkExt, StreamExt};
let mut event_rx = state.subscribe();
// Send task: forward vault events to the client
let send_state = state.clone();
let send_task = tokio::spawn(async move {
while let Ok(event) = event_rx.recv().await {
let ws_event = WsEvent::from_vault_event(&event, &send_state.vault_root);
match serde_json::to_string(&ws_event) {
Ok(json) => {
if sender.send(Message::Text(json.into())).await.is_err() {
break;
}
}
Err(e) => {
tracing::warn!(error = %e, "Failed to serialize WS event");
}
}
}
});
// Receive task: handle client actions
let recv_state = state.clone();
let recv_task = tokio::spawn(async move {
while let Some(msg) = receiver.next().await {
match msg {
Ok(Message::Text(text)) => {
match serde_json::from_str::<WsAction>(&text) {
Ok(action) => handle_action(&recv_state, action).await,
Err(e) => {
tracing::warn!(error = %e, text = %text, "Invalid WS action");
}
}
}
Ok(Message::Close(_)) => break,
Err(e) => {
tracing::debug!(error = %e, "WebSocket error");
break;
}
_ => {}
}
}
});
// Wait for either task to finish
tokio::select! {
_ = send_task => {},
_ = recv_task => {},
}
tracing::debug!("WebSocket connection closed");
}
async fn handle_action(state: &AppState, action: WsAction) {
match action {
WsAction::MoveTask { from, to } => {
let from_path = state.vault_root.join(&from);
let to_path = state.vault_root.join(&to);
state.write_filter.register(to_path.clone());
if let Err(e) = vault_core::filesystem::move_file(&from_path, &to_path) {
tracing::error!(error = %e, "WS move_task failed");
}
}
WsAction::TriggerCron { name } => {
let cron_path = state
.vault_root
.join("crons/active")
.join(format!("{}.md", name));
let mut engine = state.cron_engine.lock().unwrap();
if let Err(e) = engine.fire_cron(&cron_path, &state.write_filter) {
tracing::error!(error = %e, "WS trigger_cron failed");
}
}
WsAction::TriggerAgent { name, context } => {
let title = format!("WS trigger: {}", name);
let slug = vault_core::filesystem::timestamped_slug(&title);
let task_path = state
.vault_root
.join("todos/agent/queued")
.join(format!("{}.md", slug));
let task = vault_core::types::AgentTask {
title,
agent: name,
priority: vault_core::types::Priority::Medium,
task_type: Some("ws-trigger".into()),
created: chrono::Utc::now(),
started: None,
completed: None,
retry: 0,
max_retries: 0,
input: None,
output: None,
error: None,
};
let entity = vault_core::entity::VaultEntity {
path: task_path.clone(),
frontmatter: task,
body: context.unwrap_or_default(),
};
state.write_filter.register(task_path.clone());
if let Err(e) = vault_core::filesystem::write_entity(&entity) {
tracing::error!(error = %e, "WS trigger_agent failed");
}
}
WsAction::Ping => {
tracing::debug!("WS ping received");
}
}
}

View file

@ -0,0 +1,76 @@
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::path::Path;
use vault_watch::events::VaultEvent;
/// Server -> Client event
#[derive(Debug, Clone, Serialize)]
pub struct WsEvent {
#[serde(rename = "type")]
pub event_type: String,
pub area: String,
pub path: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub data: Option<Value>,
}
impl WsEvent {
pub fn from_vault_event(event: &VaultEvent, vault_root: &Path) -> Self {
let path = event.path();
let relative = path
.strip_prefix(vault_root)
.unwrap_or(path)
.to_string_lossy()
.to_string();
// Derive area from relative path (first two components)
let area = relative
.split('/')
.take(2)
.collect::<Vec<_>>()
.join("/");
// Try to read frontmatter data
let data = if path.exists() {
std::fs::read_to_string(path)
.ok()
.and_then(|content| {
vault_core::frontmatter::split_frontmatter(&content)
.ok()
.and_then(|(yaml, _)| serde_yaml::from_str::<Value>(yaml).ok())
})
} else {
None
};
Self {
event_type: event.event_type().to_string(),
area,
path: relative,
data,
}
}
}
/// Client -> Server action
#[derive(Debug, Deserialize)]
#[serde(tag = "action")]
pub enum WsAction {
#[serde(rename = "move_task")]
MoveTask {
from: String,
to: String,
},
#[serde(rename = "trigger_cron")]
TriggerCron {
name: String,
},
#[serde(rename = "trigger_agent")]
TriggerAgent {
name: String,
#[serde(default)]
context: Option<String>,
},
#[serde(rename = "ping")]
Ping,
}

View file

@ -0,0 +1,14 @@
[package]
name = "vault-core"
version.workspace = true
edition.workspace = true
[dependencies]
serde.workspace = true
serde_yaml.workspace = true
serde_json.workspace = true
chrono.workspace = true
thiserror.workspace = true
uuid.workspace = true
tracing.workspace = true
cron.workspace = true

View file

@ -0,0 +1,101 @@
use crate::error::VaultError;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::Path;
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct VaultConfig {
#[serde(default)]
pub mcp_servers: HashMap<String, McpServerConfig>,
#[serde(default)]
pub executors: HashMap<String, ExecutorConfig>,
#[serde(default)]
pub queue: QueueConfig,
#[serde(default)]
pub assistant: AssistantConfig,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct McpServerConfig {
pub command: String,
#[serde(default)]
pub args: Vec<String>,
#[serde(default)]
pub env: HashMap<String, String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ExecutorConfig {
#[serde(default)]
pub command: Option<String>,
#[serde(default)]
pub base_url: Option<String>,
#[serde(default)]
pub default_model: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct QueueConfig {
#[serde(default = "default_max_parallel")]
pub max_parallel: usize,
#[serde(default = "default_timeout")]
pub default_timeout: u64,
#[serde(default = "default_retry_delay")]
pub retry_delay: u64,
}
impl Default for QueueConfig {
fn default() -> Self {
Self {
max_parallel: default_max_parallel(),
default_timeout: default_timeout(),
retry_delay: default_retry_delay(),
}
}
}
fn default_max_parallel() -> usize {
4
}
fn default_timeout() -> u64 {
600
}
fn default_retry_delay() -> u64 {
60
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AssistantConfig {
#[serde(default = "default_assistant_model")]
pub default_model: String,
#[serde(default)]
pub models: Vec<String>,
}
impl Default for AssistantConfig {
fn default() -> Self {
Self {
default_model: default_assistant_model(),
models: vec![],
}
}
}
fn default_assistant_model() -> String {
"local/qwen3".into()
}
impl VaultConfig {
/// Load config from `.vault/config.yaml` in the vault root.
/// Returns default config if file doesn't exist.
pub fn load(vault_root: &Path) -> Result<Self, VaultError> {
let config_path = vault_root.join(".vault/config.yaml");
if !config_path.exists() {
return Ok(Self::default());
}
let content = std::fs::read_to_string(&config_path)
.map_err(|e| VaultError::io(e, &config_path))?;
let config: VaultConfig = serde_yaml::from_str(&content)?;
Ok(config)
}
}

View file

@ -0,0 +1,195 @@
use crate::error::VaultError;
use crate::types::{AgentTaskStatus, TaskStatus};
use serde::{de::DeserializeOwned, Serialize};
use std::path::{Path, PathBuf};
/// A vault entity: parsed frontmatter + markdown body + file path.
#[derive(Debug, Clone)]
pub struct VaultEntity<T> {
pub path: PathBuf,
pub frontmatter: T,
pub body: String,
}
/// The kind of entity inferred from its relative path within the vault.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum EntityKind {
Agent,
Skill,
CronActive,
CronPaused,
CronTemplate,
HumanTask(TaskStatus),
AgentTask(AgentTaskStatus),
Knowledge,
ViewPage,
ViewWidget,
ViewLayout,
ViewCustom,
Notification,
Unknown,
}
/// Classify a relative path within the vault to determine entity kind.
pub fn classify_path(relative: &Path) -> EntityKind {
let components: Vec<&str> = relative
.components()
.filter_map(|c| c.as_os_str().to_str())
.collect();
match components.as_slice() {
["agents", ..] => EntityKind::Agent,
["skills", ..] => EntityKind::Skill,
["crons", "active", ..] => EntityKind::CronActive,
["crons", "paused", ..] => EntityKind::CronPaused,
["crons", "templates", ..] => EntityKind::CronTemplate,
["todos", "harald", status, ..] => {
EntityKind::HumanTask(task_status_from_dir(status))
}
["todos", "agent", status, ..] => {
EntityKind::AgentTask(agent_task_status_from_dir(status))
}
["knowledge", ..] => EntityKind::Knowledge,
["views", "pages", ..] => EntityKind::ViewPage,
["views", "widgets", ..] => EntityKind::ViewWidget,
["views", "layouts", ..] => EntityKind::ViewLayout,
["views", "custom", ..] => EntityKind::ViewCustom,
["views", "notifications", ..] => EntityKind::Notification,
_ => EntityKind::Unknown,
}
}
pub fn task_status_from_dir(dir: &str) -> TaskStatus {
match dir {
"urgent" => TaskStatus::Urgent,
"open" => TaskStatus::Open,
"in-progress" => TaskStatus::InProgress,
"done" => TaskStatus::Done,
_ => TaskStatus::Open,
}
}
pub fn agent_task_status_from_dir(dir: &str) -> AgentTaskStatus {
match dir {
"queued" => AgentTaskStatus::Queued,
"running" => AgentTaskStatus::Running,
"done" => AgentTaskStatus::Done,
"failed" => AgentTaskStatus::Failed,
_ => AgentTaskStatus::Queued,
}
}
pub fn task_status_dir(status: &TaskStatus) -> &'static str {
match status {
TaskStatus::Urgent => "urgent",
TaskStatus::Open => "open",
TaskStatus::InProgress => "in-progress",
TaskStatus::Done => "done",
}
}
pub fn agent_task_status_dir(status: &AgentTaskStatus) -> &'static str {
match status {
AgentTaskStatus::Queued => "queued",
AgentTaskStatus::Running => "running",
AgentTaskStatus::Done => "done",
AgentTaskStatus::Failed => "failed",
}
}
impl<T> VaultEntity<T>
where
T: DeserializeOwned + Serialize,
{
pub fn from_content(path: PathBuf, content: &str) -> Result<Self, VaultError> {
let (yaml, body) =
crate::frontmatter::split_frontmatter_with_path(content, &path)?;
let frontmatter: T = crate::frontmatter::parse_entity(yaml)?;
Ok(Self {
path,
frontmatter,
body: body.to_string(),
})
}
pub fn to_string(&self) -> Result<String, VaultError> {
crate::frontmatter::write_frontmatter(&self.frontmatter, &self.body)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_classify_agent() {
assert_eq!(
classify_path(Path::new("agents/reviewer.md")),
EntityKind::Agent
);
}
#[test]
fn test_classify_skill() {
assert_eq!(
classify_path(Path::new("skills/vault/read-vault.md")),
EntityKind::Skill
);
}
#[test]
fn test_classify_cron() {
assert_eq!(
classify_path(Path::new("crons/active/daily-review.md")),
EntityKind::CronActive
);
assert_eq!(
classify_path(Path::new("crons/paused/old-job.md")),
EntityKind::CronPaused
);
}
#[test]
fn test_classify_human_task() {
assert_eq!(
classify_path(Path::new("todos/harald/urgent/fix-bug.md")),
EntityKind::HumanTask(TaskStatus::Urgent)
);
assert_eq!(
classify_path(Path::new("todos/harald/in-progress/feature.md")),
EntityKind::HumanTask(TaskStatus::InProgress)
);
}
#[test]
fn test_classify_agent_task() {
assert_eq!(
classify_path(Path::new("todos/agent/queued/task-1.md")),
EntityKind::AgentTask(AgentTaskStatus::Queued)
);
assert_eq!(
classify_path(Path::new("todos/agent/running/task-2.md")),
EntityKind::AgentTask(AgentTaskStatus::Running)
);
}
#[test]
fn test_classify_knowledge() {
assert_eq!(
classify_path(Path::new("knowledge/notes/rust-tips.md")),
EntityKind::Knowledge
);
}
#[test]
fn test_classify_views() {
assert_eq!(
classify_path(Path::new("views/pages/home.md")),
EntityKind::ViewPage
);
assert_eq!(
classify_path(Path::new("views/notifications/alert.md")),
EntityKind::Notification
);
}
}

View file

@ -0,0 +1,34 @@
use std::path::PathBuf;
#[derive(Debug, thiserror::Error)]
pub enum VaultError {
#[error("IO error: {source} (path: {path:?})")]
Io {
source: std::io::Error,
path: PathBuf,
},
#[error("YAML parsing error: {0}")]
Yaml(#[from] serde_yaml::Error),
#[error("Missing frontmatter in {0}")]
MissingFrontmatter(PathBuf),
#[error("Invalid entity at {path}: {reason}")]
InvalidEntity { path: PathBuf, reason: String },
#[error("Not found: {0}")]
NotFound(String),
#[error("Broken reference from {from} to {to}")]
BrokenReference { from: PathBuf, to: String },
}
impl VaultError {
pub fn io(source: std::io::Error, path: impl Into<PathBuf>) -> Self {
Self::Io {
source,
path: path.into(),
}
}
}

View file

@ -0,0 +1,167 @@
use crate::entity::VaultEntity;
use crate::error::VaultError;
use serde::{de::DeserializeOwned, Serialize};
use std::path::{Path, PathBuf};
/// Read and parse a vault entity from a markdown file.
pub fn read_entity<T: DeserializeOwned + Serialize>(path: &Path) -> Result<VaultEntity<T>, VaultError> {
let content =
std::fs::read_to_string(path).map_err(|e| VaultError::io(e, path))?;
VaultEntity::from_content(path.to_path_buf(), &content)
}
/// Write a vault entity to disk.
pub fn write_entity<T: DeserializeOwned + Serialize>(entity: &VaultEntity<T>) -> Result<(), VaultError> {
let content = entity.to_string()?;
if let Some(parent) = entity.path.parent() {
std::fs::create_dir_all(parent).map_err(|e| VaultError::io(e, parent))?;
}
std::fs::write(&entity.path, content).map_err(|e| VaultError::io(e, &entity.path))
}
/// Move a file from one path to another, creating parent dirs as needed.
pub fn move_file(from: &Path, to: &Path) -> Result<(), VaultError> {
if let Some(parent) = to.parent() {
std::fs::create_dir_all(parent).map_err(|e| VaultError::io(e, parent))?;
}
std::fs::rename(from, to).map_err(|e| VaultError::io(e, from))
}
/// Ensure the standard vault directory structure exists.
pub fn ensure_vault_structure(vault_root: &Path) -> Result<(), VaultError> {
let dirs = [
"agents",
"skills/vault",
"crons/active",
"crons/paused",
"crons/templates",
"todos/harald/urgent",
"todos/harald/open",
"todos/harald/in-progress",
"todos/harald/done",
"todos/agent/queued",
"todos/agent/running",
"todos/agent/done",
"todos/agent/failed",
"knowledge",
"views/pages",
"views/widgets",
"views/layouts",
"views/custom",
"views/notifications",
".vault/logs",
".vault/templates",
];
for dir in &dirs {
let path = vault_root.join(dir);
std::fs::create_dir_all(&path).map_err(|e| VaultError::io(e, &path))?;
}
Ok(())
}
/// List all .md files in a directory (non-recursive).
pub fn list_md_files(dir: &Path) -> Result<Vec<PathBuf>, VaultError> {
if !dir.exists() {
return Ok(vec![]);
}
let mut files = Vec::new();
let entries = std::fs::read_dir(dir).map_err(|e| VaultError::io(e, dir))?;
for entry in entries {
let entry = entry.map_err(|e| VaultError::io(e, dir))?;
let path = entry.path();
if path.is_file() && path.extension().is_some_and(|e| e == "md") {
files.push(path);
}
}
files.sort();
Ok(files)
}
/// List all .md files in a directory tree (recursive).
pub fn list_md_files_recursive(dir: &Path) -> Result<Vec<PathBuf>, VaultError> {
if !dir.exists() {
return Ok(vec![]);
}
let mut files = Vec::new();
walk_dir_recursive(dir, &mut files)?;
files.sort();
Ok(files)
}
fn walk_dir_recursive(dir: &Path, files: &mut Vec<PathBuf>) -> Result<(), VaultError> {
let entries = std::fs::read_dir(dir).map_err(|e| VaultError::io(e, dir))?;
for entry in entries {
let entry = entry.map_err(|e| VaultError::io(e, dir))?;
let path = entry.path();
if path.is_dir() {
// Skip dotfiles/dirs
if path
.file_name()
.is_some_and(|n| n.to_str().is_some_and(|s| s.starts_with('.')))
{
continue;
}
walk_dir_recursive(&path, files)?;
} else if path.is_file() && path.extension().is_some_and(|e| e == "md") {
files.push(path);
}
}
Ok(())
}
/// Convert a string to a URL-safe slug.
pub fn slugify(s: &str) -> String {
s.to_lowercase()
.chars()
.map(|c| {
if c.is_alphanumeric() {
c
} else {
'-'
}
})
.collect::<String>()
.split('-')
.filter(|s| !s.is_empty())
.collect::<Vec<_>>()
.join("-")
}
/// Create a timestamped slug: `YYYYMMDD-HHMMSS-slug`
pub fn timestamped_slug(title: &str) -> String {
let now = chrono::Utc::now();
format!("{}-{}", now.format("%Y%m%d-%H%M%S"), slugify(title))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_slugify() {
assert_eq!(slugify("Hello World!"), "hello-world");
assert_eq!(slugify("Review PR #1234"), "review-pr-1234");
assert_eq!(slugify(" spaces everywhere "), "spaces-everywhere");
}
#[test]
fn test_timestamped_slug() {
let slug = timestamped_slug("My Task");
assert!(slug.ends_with("-my-task"));
assert!(slug.len() > 20);
}
#[test]
fn test_ensure_vault_structure() {
let tmp = std::env::temp_dir().join("vault-os-test-structure");
let _ = std::fs::remove_dir_all(&tmp);
ensure_vault_structure(&tmp).unwrap();
assert!(tmp.join("agents").is_dir());
assert!(tmp.join("todos/harald/urgent").is_dir());
assert!(tmp.join("todos/agent/queued").is_dir());
assert!(tmp.join(".vault/logs").is_dir());
let _ = std::fs::remove_dir_all(&tmp);
}
}

View file

@ -0,0 +1,180 @@
use crate::error::VaultError;
use serde::{de::DeserializeOwned, Serialize};
use std::path::Path;
const DELIMITER: &str = "---";
/// Split a markdown file into frontmatter YAML and body.
/// Returns (yaml_str, body_str). Body preserves original content byte-for-byte.
pub fn split_frontmatter(content: &str) -> Result<(&str, &str), VaultError> {
let trimmed = content.trim_start();
if !trimmed.starts_with(DELIMITER) {
return Err(VaultError::MissingFrontmatter(
"<unknown>".into(),
));
}
// Find the opening delimiter
let after_first = &trimmed[DELIMITER.len()..];
let after_first = after_first.strip_prefix('\n').unwrap_or(
after_first.strip_prefix("\r\n").unwrap_or(after_first),
);
// Find the closing delimiter
if let Some(end_pos) = find_closing_delimiter(after_first) {
let yaml = &after_first[..end_pos];
let rest = &after_first[end_pos + DELIMITER.len()..];
// Skip the newline after closing ---
let body = rest
.strip_prefix('\n')
.unwrap_or(rest.strip_prefix("\r\n").unwrap_or(rest));
Ok((yaml, body))
} else {
Err(VaultError::MissingFrontmatter(
"<unknown>".into(),
))
}
}
/// Split frontmatter with path context for error messages.
pub fn split_frontmatter_with_path<'a>(
content: &'a str,
path: &Path,
) -> Result<(&'a str, &'a str), VaultError> {
split_frontmatter(content).map_err(|e| match e {
VaultError::MissingFrontmatter(_) => VaultError::MissingFrontmatter(path.to_path_buf()),
other => other,
})
}
fn find_closing_delimiter(s: &str) -> Option<usize> {
for (i, line) in s.lines().enumerate() {
if line.trim() == DELIMITER {
// Calculate byte offset
let offset: usize = s.lines().take(i).map(|l| l.len() + 1).sum();
return Some(offset);
}
}
None
}
/// Parse frontmatter YAML into a typed struct.
pub fn parse_entity<T: DeserializeOwned>(yaml: &str) -> Result<T, VaultError> {
serde_yaml::from_str(yaml).map_err(VaultError::Yaml)
}
/// Serialize frontmatter and combine with body, preserving body byte-for-byte.
pub fn write_frontmatter<T: Serialize>(frontmatter: &T, body: &str) -> Result<String, VaultError> {
let yaml = serde_yaml::to_string(frontmatter).map_err(VaultError::Yaml)?;
let mut out = String::new();
out.push_str(DELIMITER);
out.push('\n');
out.push_str(&yaml);
// serde_yaml adds trailing newline, but ensure delimiter is on its own line
if !yaml.ends_with('\n') {
out.push('\n');
}
out.push_str(DELIMITER);
out.push('\n');
if !body.is_empty() {
out.push_str(body);
}
Ok(out)
}
/// Update specific fields in frontmatter YAML without re-serializing the entire struct.
/// This preserves unknown fields and ordering as much as possible.
pub fn update_frontmatter_fields(
content: &str,
path: &Path,
updates: &serde_json::Value,
) -> Result<String, VaultError> {
let (yaml, body) = split_frontmatter_with_path(content, path)?;
let mut mapping: serde_yaml::Value = serde_yaml::from_str(yaml).map_err(VaultError::Yaml)?;
if let (serde_yaml::Value::Mapping(ref mut map), serde_json::Value::Object(ref obj)) =
(&mut mapping, updates)
{
for (key, value) in obj {
let yaml_key = serde_yaml::Value::String(key.clone());
let yaml_value: serde_yaml::Value =
serde_json::from_value(value.clone()).unwrap_or(serde_yaml::Value::Null);
map.insert(yaml_key, yaml_value);
}
}
let yaml_out = serde_yaml::to_string(&mapping).map_err(VaultError::Yaml)?;
let mut out = String::new();
out.push_str(DELIMITER);
out.push('\n');
out.push_str(&yaml_out);
if !yaml_out.ends_with('\n') {
out.push('\n');
}
out.push_str(DELIMITER);
out.push('\n');
if !body.is_empty() {
out.push_str(body);
}
Ok(out)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::types::Agent;
#[test]
fn test_split_frontmatter() {
let content = "---\nname: test\n---\nHello world\n";
let (yaml, body) = split_frontmatter(content).unwrap();
assert_eq!(yaml, "name: test\n");
assert_eq!(body, "Hello world\n");
}
#[test]
fn test_split_missing_frontmatter() {
let content = "Hello world\n";
assert!(split_frontmatter(content).is_err());
}
#[test]
fn test_roundtrip() {
let original_body = "# System Prompt\n\nYou are a helpful agent.\n\n- Rule 1\n- Rule 2\n";
let agent = Agent {
name: "test-agent".into(),
executable: "claude-code".into(),
model: Some("sonnet".into()),
escalate_to: None,
escalate_when: vec![],
mcp_servers: vec![],
skills: vec!["read-vault".into()],
timeout: 600,
max_retries: 2,
env: Default::default(),
};
let written = write_frontmatter(&agent, original_body).unwrap();
let (yaml, body) = split_frontmatter(&written).unwrap();
let parsed: Agent = parse_entity(yaml).unwrap();
assert_eq!(parsed.name, "test-agent");
assert_eq!(parsed.executable, "claude-code");
assert_eq!(body, original_body);
}
#[test]
fn test_update_fields() {
let content = "---\nname: test\nschedule: '* * * * *'\n---\nBody\n";
let updates = serde_json::json!({
"last_run": "2024-01-01T00:00:00Z",
"run_count": 5
});
let result =
update_frontmatter_fields(content, Path::new("test.md"), &updates).unwrap();
assert!(result.contains("last_run"));
assert!(result.contains("run_count"));
assert!(result.contains("Body\n"));
}
}

View file

@ -0,0 +1,12 @@
pub mod config;
pub mod entity;
pub mod error;
pub mod filesystem;
pub mod frontmatter;
pub mod prompt;
pub mod search;
pub mod types;
pub mod validation;
pub use error::VaultError;
pub type Result<T> = std::result::Result<T, VaultError>;

View file

@ -0,0 +1,64 @@
use crate::entity::VaultEntity;
use crate::error::VaultError;
use crate::filesystem;
use crate::types::{Agent, Skill};
use std::path::Path;
/// Resolve a skill name to its file path under the vault's `skills/` directory.
pub fn resolve_skill_path(vault_root: &Path, skill_name: &str) -> Option<std::path::PathBuf> {
// Try direct: skills/{name}.md
let direct = vault_root.join("skills").join(format!("{}.md", skill_name));
if direct.exists() {
return Some(direct);
}
// Try nested: skills/vault/{name}.md
let nested = vault_root
.join("skills/vault")
.join(format!("{}.md", skill_name));
if nested.exists() {
return Some(nested);
}
// Try recursive search
if let Ok(files) = filesystem::list_md_files_recursive(&vault_root.join("skills")) {
for file in files {
if let Some(stem) = file.file_stem() {
if stem == skill_name {
return Some(file);
}
}
}
}
None
}
/// Compose the full prompt for an agent execution.
/// Agent body + skill bodies appended under `## Skills` sections.
pub fn compose_prompt(
vault_root: &Path,
agent: &VaultEntity<Agent>,
task_context: Option<&str>,
) -> Result<String, VaultError> {
let mut prompt = agent.body.clone();
// Append skills
if !agent.frontmatter.skills.is_empty() {
prompt.push_str("\n\n## Skills\n");
for skill_name in &agent.frontmatter.skills {
if let Some(skill_path) = resolve_skill_path(vault_root, skill_name) {
let skill_entity: VaultEntity<Skill> = filesystem::read_entity(&skill_path)?;
prompt.push_str(&format!("\n### {}\n", skill_entity.frontmatter.name));
prompt.push_str(&skill_entity.body);
} else {
tracing::warn!(skill = %skill_name, "Skill not found, skipping");
}
}
}
// Append task context if provided
if let Some(ctx) = task_context {
prompt.push_str("\n\n## Task\n\n");
prompt.push_str(ctx);
}
Ok(prompt)
}

View file

@ -0,0 +1,164 @@
use crate::filesystem::{list_md_files_recursive, read_entity};
use crate::frontmatter::split_frontmatter_with_path;
use crate::types::KnowledgeNote;
use std::path::Path;
#[derive(Debug, Clone, serde::Serialize)]
pub struct SearchResult {
pub path: String,
pub title: String,
pub snippet: String,
pub score: f64,
}
/// Search vault files by query string.
/// Matches against frontmatter title, tags, and body content.
pub fn search_vault(vault_root: &Path, query: &str) -> Vec<SearchResult> {
let query_lower = query.to_lowercase();
let terms: Vec<&str> = query_lower.split_whitespace().collect();
if terms.is_empty() {
return Vec::new();
}
let mut results = Vec::new();
// Search across key directories
let dirs = ["knowledge", "agents", "skills", "todos/harald", "todos/agent"];
for dir in dirs {
let full_dir = vault_root.join(dir);
if !full_dir.exists() {
continue;
}
if let Ok(files) = list_md_files_recursive(&full_dir) {
for path in files {
if let Some(result) = score_file(&path, vault_root, &terms) {
results.push(result);
}
}
}
}
results.sort_by(|a, b| b.score.partial_cmp(&a.score).unwrap_or(std::cmp::Ordering::Equal));
results.truncate(50);
results
}
/// Search specifically by tag.
pub fn search_by_tag(vault_root: &Path, tag: &str) -> Vec<SearchResult> {
let tag_lower = tag.to_lowercase();
let mut results = Vec::new();
let knowledge_dir = vault_root.join("knowledge");
if let Ok(files) = list_md_files_recursive(&knowledge_dir) {
for path in files {
if let Ok(entity) = read_entity::<KnowledgeNote>(&path) {
let has_tag = entity
.frontmatter
.tags
.iter()
.any(|t| t.to_lowercase() == tag_lower);
if has_tag {
let relative = path
.strip_prefix(vault_root)
.unwrap_or(&path)
.to_string_lossy()
.to_string();
let title = entity
.frontmatter
.title
.unwrap_or_else(|| relative.clone());
results.push(SearchResult {
path: relative,
title,
snippet: entity.body.chars().take(120).collect(),
score: 1.0,
});
}
}
}
}
results
}
fn score_file(path: &Path, vault_root: &Path, terms: &[&str]) -> Option<SearchResult> {
let content = std::fs::read_to_string(path).ok()?;
let content_lower = content.to_lowercase();
let relative = path
.strip_prefix(vault_root)
.unwrap_or(path)
.to_string_lossy()
.to_string();
let mut score = 0.0;
let mut all_matched = true;
for term in terms {
let mut term_score = 0.0;
// Title matches (higher weight)
if relative.to_lowercase().contains(term) {
term_score += 3.0;
}
// Body/content matches
let count = content_lower.matches(term).count();
if count > 0 {
term_score += 1.0 + (count as f64).min(5.0) * 0.2;
}
if term_score == 0.0 {
all_matched = false;
break;
}
score += term_score;
}
if !all_matched || score == 0.0 {
return None;
}
// Extract title from frontmatter if possible
let title = if let Ok((yaml, _body)) = split_frontmatter_with_path(&content, path) {
serde_yaml::from_str::<serde_json::Value>(yaml)
.ok()
.and_then(|v| v.get("title").and_then(|t| t.as_str()).map(String::from))
.or_else(|| {
serde_yaml::from_str::<serde_json::Value>(yaml)
.ok()
.and_then(|v| v.get("name").and_then(|t| t.as_str()).map(String::from))
})
.unwrap_or_else(|| relative.clone())
} else {
relative.clone()
};
// Extract a snippet around the first match
let snippet = extract_snippet(&content, terms.first().unwrap_or(&""));
Some(SearchResult {
path: relative,
title,
snippet,
score,
})
}
fn extract_snippet(content: &str, term: &str) -> String {
let lower = content.to_lowercase();
if let Some(pos) = lower.find(&term.to_lowercase()) {
let start = content[..pos]
.rfind('\n')
.map(|p| p + 1)
.unwrap_or(pos.saturating_sub(60));
let end = content[pos..]
.find('\n')
.map(|p| pos + p)
.unwrap_or((pos + 120).min(content.len()));
content[start..end].chars().take(150).collect()
} else {
content.lines().next().unwrap_or("").chars().take(150).collect()
}
}

View file

@ -0,0 +1,204 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
pub enum Priority {
Urgent,
High,
#[default]
Medium,
Low,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
pub enum RunStatus {
Success,
Failure,
Timeout,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
pub enum TaskStatus {
Urgent,
Open,
#[serde(rename = "in-progress")]
InProgress,
Done,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
pub enum AgentTaskStatus {
Queued,
Running,
Done,
Failed,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Agent {
pub name: String,
pub executable: String,
#[serde(default)]
pub model: Option<String>,
#[serde(default)]
pub escalate_to: Option<String>,
#[serde(default)]
pub escalate_when: Vec<String>,
#[serde(default)]
pub mcp_servers: Vec<String>,
#[serde(default)]
pub skills: Vec<String>,
#[serde(default = "default_timeout")]
pub timeout: u64,
#[serde(default)]
pub max_retries: u32,
#[serde(default)]
pub env: HashMap<String, String>,
}
fn default_timeout() -> u64 {
600
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Skill {
pub name: String,
pub description: String,
#[serde(default)]
pub version: Option<u32>,
#[serde(default)]
pub requires_mcp: Vec<String>,
#[serde(default)]
pub inputs: Vec<String>,
#[serde(default)]
pub outputs: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CronJob {
pub schedule: String,
pub agent: String,
pub title: String,
#[serde(default = "default_true")]
pub enabled: bool,
#[serde(default)]
pub last_run: Option<DateTime<Utc>>,
#[serde(default)]
pub last_status: Option<RunStatus>,
#[serde(default)]
pub next_run: Option<DateTime<Utc>>,
#[serde(default)]
pub run_count: u64,
}
fn default_true() -> bool {
true
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HumanTask {
pub title: String,
#[serde(default)]
pub priority: Priority,
#[serde(default)]
pub source: Option<String>,
#[serde(default)]
pub repo: Option<String>,
#[serde(default)]
pub labels: Vec<String>,
pub created: DateTime<Utc>,
#[serde(default)]
pub due: Option<DateTime<Utc>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AgentTask {
pub title: String,
pub agent: String,
#[serde(default)]
pub priority: Priority,
#[serde(default, rename = "type")]
pub task_type: Option<String>,
pub created: DateTime<Utc>,
#[serde(default)]
pub started: Option<DateTime<Utc>>,
#[serde(default)]
pub completed: Option<DateTime<Utc>>,
#[serde(default)]
pub retry: u32,
#[serde(default)]
pub max_retries: u32,
#[serde(default)]
pub input: Option<serde_json::Value>,
#[serde(default)]
pub output: Option<serde_json::Value>,
#[serde(default)]
pub error: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct KnowledgeNote {
#[serde(default)]
pub title: Option<String>,
#[serde(default)]
pub tags: Vec<String>,
#[serde(default)]
pub source: Option<String>,
#[serde(default)]
pub created: Option<DateTime<Utc>>,
#[serde(default)]
pub related: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ViewDefinition {
#[serde(rename = "type")]
pub view_type: String,
pub title: Option<String>,
#[serde(default)]
pub icon: Option<String>,
#[serde(default)]
pub route: Option<String>,
#[serde(default)]
pub position: Option<i32>,
#[serde(default)]
pub layout: Option<String>,
#[serde(default)]
pub regions: HashMap<String, Vec<WidgetInstance>>,
// Widget-specific fields
#[serde(default)]
pub name: Option<String>,
#[serde(default)]
pub description: Option<String>,
#[serde(default)]
pub component: Option<String>,
#[serde(default)]
pub props_schema: Option<serde_json::Value>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WidgetInstance {
pub widget: String,
#[serde(default)]
pub props: serde_json::Value,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Notification {
pub title: String,
#[serde(default)]
pub message: Option<String>,
#[serde(default)]
pub level: Option<String>,
#[serde(default)]
pub source: Option<String>,
#[serde(default)]
pub created: Option<DateTime<Utc>>,
#[serde(default)]
pub expires: Option<DateTime<Utc>>,
}

View file

@ -0,0 +1,317 @@
use crate::entity::{classify_path, EntityKind};
use crate::frontmatter::split_frontmatter_with_path;
use crate::types::*;
use std::collections::HashSet;
use std::path::Path;
#[derive(Debug, Clone)]
pub struct ValidationIssue {
pub level: IssueLevel,
pub field: Option<String>,
pub message: String,
}
#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize)]
#[serde(rename_all = "lowercase")]
pub enum IssueLevel {
Error,
Warning,
}
impl serde::Serialize for ValidationIssue {
fn serialize<S: serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
use serde::ser::SerializeStruct;
let mut st = s.serialize_struct("ValidationIssue", 3)?;
st.serialize_field("level", &self.level)?;
st.serialize_field("field", &self.field)?;
st.serialize_field("message", &self.message)?;
st.end()
}
}
/// Validate a vault file given its relative path and raw content.
pub fn validate(relative_path: &Path, content: &str) -> Vec<ValidationIssue> {
let mut issues = Vec::new();
// Check frontmatter exists
let (yaml, _body) = match split_frontmatter_with_path(content, relative_path) {
Ok(pair) => pair,
Err(_) => {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: None,
message: "Missing or malformed frontmatter".into(),
});
return issues;
}
};
let kind = classify_path(relative_path);
match kind {
EntityKind::Agent => validate_agent(yaml, &mut issues),
EntityKind::Skill => validate_skill(yaml, &mut issues),
EntityKind::CronActive | EntityKind::CronPaused | EntityKind::CronTemplate => {
validate_cron(yaml, &mut issues)
}
EntityKind::HumanTask(_) => validate_human_task(yaml, &mut issues),
EntityKind::AgentTask(_) => validate_agent_task(yaml, &mut issues),
_ => {}
}
issues
}
fn validate_agent(yaml: &str, issues: &mut Vec<ValidationIssue>) {
match serde_yaml::from_str::<Agent>(yaml) {
Ok(agent) => {
if agent.name.is_empty() {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: Some("name".into()),
message: "Agent name is required".into(),
});
}
if agent.executable.is_empty() {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: Some("executable".into()),
message: "Agent executable is required".into(),
});
}
let valid_executables = ["claude-code", "ollama", "openai-compat"];
if !valid_executables.contains(&agent.executable.as_str())
&& !agent.executable.starts_with('/')
&& !agent.executable.contains('/')
{
issues.push(ValidationIssue {
level: IssueLevel::Warning,
field: Some("executable".into()),
message: format!(
"Executable '{}' is not a known executor. Expected one of: {:?} or an absolute path",
agent.executable, valid_executables
),
});
}
}
Err(e) => {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: None,
message: format!("Invalid agent frontmatter: {e}"),
});
}
}
}
fn validate_skill(yaml: &str, issues: &mut Vec<ValidationIssue>) {
match serde_yaml::from_str::<Skill>(yaml) {
Ok(skill) => {
if skill.name.is_empty() {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: Some("name".into()),
message: "Skill name is required".into(),
});
}
if skill.description.is_empty() {
issues.push(ValidationIssue {
level: IssueLevel::Warning,
field: Some("description".into()),
message: "Skill should have a description".into(),
});
}
}
Err(e) => {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: None,
message: format!("Invalid skill frontmatter: {e}"),
});
}
}
}
fn validate_cron(yaml: &str, issues: &mut Vec<ValidationIssue>) {
match serde_yaml::from_str::<CronJob>(yaml) {
Ok(cron) => {
if cron.title.is_empty() {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: Some("title".into()),
message: "Cron title is required".into(),
});
}
if cron.agent.is_empty() {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: Some("agent".into()),
message: "Cron agent is required".into(),
});
}
// Validate cron expression
let expr = if cron.schedule.split_whitespace().count() == 5 {
format!("0 {}", cron.schedule)
} else {
cron.schedule.clone()
};
if cron::Schedule::from_str(&expr).is_err() {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: Some("schedule".into()),
message: format!("Invalid cron expression: '{}'", cron.schedule),
});
}
}
Err(e) => {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: None,
message: format!("Invalid cron frontmatter: {e}"),
});
}
}
}
fn validate_human_task(yaml: &str, issues: &mut Vec<ValidationIssue>) {
match serde_yaml::from_str::<HumanTask>(yaml) {
Ok(task) => {
if task.title.is_empty() {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: Some("title".into()),
message: "Task title is required".into(),
});
}
}
Err(e) => {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: None,
message: format!("Invalid task frontmatter: {e}"),
});
}
}
}
fn validate_agent_task(yaml: &str, issues: &mut Vec<ValidationIssue>) {
match serde_yaml::from_str::<AgentTask>(yaml) {
Ok(task) => {
if task.title.is_empty() {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: Some("title".into()),
message: "Task title is required".into(),
});
}
if task.agent.is_empty() {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: Some("agent".into()),
message: "Agent name is required for agent tasks".into(),
});
}
}
Err(e) => {
issues.push(ValidationIssue {
level: IssueLevel::Error,
field: None,
message: format!("Invalid agent task frontmatter: {e}"),
});
}
}
}
/// Validate that references between entities are valid.
/// Checks that agent skills and cron agents exist.
pub fn validate_references(
vault_root: &Path,
agent_names: &HashSet<String>,
skill_names: &HashSet<String>,
) -> Vec<(String, ValidationIssue)> {
let mut issues = Vec::new();
// Check agent skill references
let agents_dir = vault_root.join("agents");
if let Ok(files) = crate::filesystem::list_md_files(&agents_dir) {
for path in files {
if let Ok(entity) = crate::filesystem::read_entity::<Agent>(&path) {
for skill in &entity.frontmatter.skills {
if !skill_names.contains(skill) {
issues.push((
entity.frontmatter.name.clone(),
ValidationIssue {
level: IssueLevel::Warning,
field: Some("skills".into()),
message: format!("Referenced skill '{}' not found", skill),
},
));
}
}
}
}
}
// Check cron agent references
let crons_dir = vault_root.join("crons/active");
if let Ok(files) = crate::filesystem::list_md_files(&crons_dir) {
for path in files {
if let Ok(entity) = crate::filesystem::read_entity::<CronJob>(&path) {
if !agent_names.contains(&entity.frontmatter.agent) {
issues.push((
entity.frontmatter.title.clone(),
ValidationIssue {
level: IssueLevel::Warning,
field: Some("agent".into()),
message: format!(
"Referenced agent '{}' not found",
entity.frontmatter.agent
),
},
));
}
}
}
}
issues
}
use std::str::FromStr;
#[cfg(test)]
mod tests {
use super::*;
use std::path::Path;
#[test]
fn test_validate_valid_agent() {
let content = "---\nname: test-agent\nexecutable: claude-code\n---\nBody";
let issues = validate(Path::new("agents/test-agent.md"), content);
assert!(issues.is_empty(), "Expected no issues: {:?}", issues);
}
#[test]
fn test_validate_agent_missing_name() {
let content = "---\nname: \"\"\nexecutable: claude-code\n---\n";
let issues = validate(Path::new("agents/bad.md"), content);
assert!(issues.iter().any(|i| i.field.as_deref() == Some("name")));
}
#[test]
fn test_validate_missing_frontmatter() {
let content = "No frontmatter here";
let issues = validate(Path::new("agents/bad.md"), content);
assert_eq!(issues.len(), 1);
assert_eq!(issues[0].level, IssueLevel::Error);
}
#[test]
fn test_validate_cron_bad_expression() {
let content = "---\ntitle: bad\nagent: test\nschedule: \"not a cron\"\n---\n";
let issues = validate(Path::new("crons/active/bad.md"), content);
assert!(issues
.iter()
.any(|i| i.field.as_deref() == Some("schedule")));
}
}

View file

@ -0,0 +1,19 @@
[package]
name = "vault-scheduler"
version.workspace = true
edition.workspace = true
[dependencies]
vault-core.workspace = true
vault-watch.workspace = true
tokio.workspace = true
cron.workspace = true
chrono.workspace = true
tracing.workspace = true
thiserror.workspace = true
async-trait.workspace = true
reqwest.workspace = true
serde.workspace = true
serde_json.workspace = true
serde_yaml.workspace = true
uuid.workspace = true

View file

@ -0,0 +1,206 @@
use chrono::{DateTime, Utc};
use cron::Schedule;
use std::cmp::Reverse;
use std::collections::BinaryHeap;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use vault_core::entity::VaultEntity;
use vault_core::error::VaultError;
use vault_core::filesystem;
use vault_core::frontmatter;
use vault_core::types::CronJob;
#[derive(Debug, thiserror::Error)]
pub enum CronError {
#[error("Invalid cron expression '{expr}': {reason}")]
InvalidExpression { expr: String, reason: String },
#[error("Vault error: {0}")]
Vault(#[from] VaultError),
}
#[derive(Debug, Clone, PartialEq, Eq)]
struct ScheduleEntry {
next_fire: DateTime<Utc>,
path: PathBuf,
}
impl PartialOrd for ScheduleEntry {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for ScheduleEntry {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.next_fire.cmp(&other.next_fire)
}
}
pub struct CronEngine {
vault_root: PathBuf,
schedule: BinaryHeap<Reverse<ScheduleEntry>>,
}
impl CronEngine {
pub fn new(vault_root: PathBuf) -> Self {
Self {
vault_root,
schedule: BinaryHeap::new(),
}
}
/// Rebuild the entire schedule by scanning `crons/active/`.
pub fn rebuild_schedule(&mut self) -> Result<(), CronError> {
self.schedule.clear();
let active_dir = self.vault_root.join("crons/active");
let files = filesystem::list_md_files(&active_dir)?;
for file in files {
if let Err(e) = self.add_cron(&file) {
tracing::warn!(?file, error = %e, "Skipping invalid cron");
}
}
tracing::info!(count = self.schedule.len(), "Rebuilt cron schedule");
Ok(())
}
/// Add or update a cron job in the schedule.
pub fn upsert_cron(&mut self, path: &Path) -> Result<(), CronError> {
self.remove_cron(path);
self.add_cron(path)
}
/// Remove a cron job from the schedule.
pub fn remove_cron(&mut self, path: &Path) {
let entries: Vec<_> = self
.schedule
.drain()
.filter(|Reverse(e)| e.path != path)
.collect();
self.schedule = entries.into_iter().collect();
}
/// Get the next fire time, if any crons are scheduled.
pub fn next_fire_time(&self) -> Option<DateTime<Utc>> {
self.schedule.peek().map(|Reverse(e)| e.next_fire)
}
/// Pop all crons that are due (fire time <= now).
pub fn pop_due(&mut self) -> Vec<PathBuf> {
let now = Utc::now();
let mut due = Vec::new();
while let Some(Reverse(entry)) = self.schedule.peek() {
if entry.next_fire <= now {
let Reverse(entry) = self.schedule.pop().unwrap();
due.push(entry.path);
} else {
break;
}
}
due
}
/// Fire a cron: create an agent task in queued/, update cron frontmatter.
/// Returns the path to the created agent task.
#[tracing::instrument(skip(self, write_filter), fields(cron = ?cron_path.file_name()))]
pub fn fire_cron(
&mut self,
cron_path: &Path,
write_filter: &vault_watch::write_filter::DaemonWriteFilter,
) -> Result<PathBuf, CronError> {
let entity: VaultEntity<CronJob> = filesystem::read_entity(cron_path)?;
let cron = &entity.frontmatter;
// Create agent task
let slug = filesystem::timestamped_slug(&cron.title);
let task_path = self
.vault_root
.join("todos/agent/queued")
.join(format!("{}.md", slug));
let now = Utc::now();
let agent_task = vault_core::types::AgentTask {
title: cron.title.clone(),
agent: cron.agent.clone(),
priority: vault_core::types::Priority::Medium,
task_type: Some("cron".into()),
created: now,
started: None,
completed: None,
retry: 0,
max_retries: 0,
input: None,
output: None,
error: None,
};
let task_entity = VaultEntity {
path: task_path.clone(),
frontmatter: agent_task,
body: entity.body.clone(),
};
write_filter.register(task_path.clone());
filesystem::write_entity(&task_entity)?;
// Update cron frontmatter
let content = std::fs::read_to_string(cron_path)
.map_err(|e| VaultError::io(e, cron_path))?;
let updates = serde_json::json!({
"last_run": now.to_rfc3339(),
"last_status": "success",
"run_count": cron.run_count + 1,
});
let updated = frontmatter::update_frontmatter_fields(&content, cron_path, &updates)?;
write_filter.register(cron_path.to_path_buf());
std::fs::write(cron_path, updated).map_err(|e| VaultError::io(e, cron_path))?;
// Re-schedule this cron
if let Err(e) = self.add_cron(cron_path) {
tracing::warn!(?cron_path, error = %e, "Failed to reschedule cron");
}
tracing::info!(
cron = %cron.title,
agent = %cron.agent,
task = ?task_path,
"Fired cron job"
);
Ok(task_path)
}
fn add_cron(&mut self, path: &Path) -> Result<(), CronError> {
let entity: VaultEntity<CronJob> = filesystem::read_entity(path)?;
let cron = &entity.frontmatter;
if !cron.enabled {
return Ok(());
}
// cron crate expects 6 or 7 fields (sec min hour dom month dow [year])
// Standard 5-field cron: prepend "0 " for seconds
let expr = format!("0 {}", cron.schedule);
let schedule = Schedule::from_str(&expr).map_err(|e| CronError::InvalidExpression {
expr: cron.schedule.clone(),
reason: e.to_string(),
})?;
if let Some(next) = schedule.upcoming(Utc).next() {
self.schedule.push(Reverse(ScheduleEntry {
next_fire: next,
path: path.to_path_buf(),
}));
}
Ok(())
}
pub fn scheduled_count(&self) -> usize {
self.schedule.len()
}
}

View file

@ -0,0 +1,41 @@
use std::collections::HashMap;
use std::time::Duration;
#[derive(Debug, Clone)]
pub struct ExecutionResult {
pub stdout: String,
pub stderr: String,
pub exit_code: Option<i32>,
pub duration: Duration,
}
#[derive(Debug, thiserror::Error)]
pub enum ExecutionError {
#[error("Execution timed out after {0:?}")]
Timeout(Duration),
#[error("Process failed to start: {0}")]
SpawnFailed(String),
#[error("Process exited with code {code}: {stderr}")]
NonZeroExit { code: i32, stderr: String },
#[error("HTTP error: {0}")]
Http(String),
#[error("IO error: {0}")]
Io(#[from] std::io::Error),
}
#[async_trait::async_trait]
pub trait Executor: Send + Sync {
async fn execute(
&self,
executable: &str,
model: Option<&str>,
system_prompt: &str,
task_context: &str,
env: &HashMap<String, String>,
timeout: Duration,
) -> Result<ExecutionResult, ExecutionError>;
}

View file

@ -0,0 +1 @@
pub mod process;

View file

@ -0,0 +1,132 @@
use crate::executor::{ExecutionError, ExecutionResult, Executor};
use std::collections::HashMap;
use std::time::{Duration, Instant};
use tokio::io::AsyncWriteExt;
use tokio::process::Command;
/// Generic process executor: spawns a child process, pipes prompt to stdin,
/// captures stdout/stderr.
pub struct GenericProcessExecutor {
vault_path: std::path::PathBuf,
}
impl GenericProcessExecutor {
pub fn new(vault_path: std::path::PathBuf) -> Self {
Self { vault_path }
}
/// Expand `${VAR}` references in environment variable values.
fn expand_env(value: &str) -> String {
let mut result = value.to_string();
// Simple ${VAR} expansion from process environment
while let Some(start) = result.find("${") {
if let Some(end) = result[start..].find('}') {
let var_name = &result[start + 2..start + end];
let replacement = std::env::var(var_name).unwrap_or_default();
result = format!("{}{}{}", &result[..start], replacement, &result[start + end + 1..]);
} else {
break;
}
}
result
}
}
#[async_trait::async_trait]
impl Executor for GenericProcessExecutor {
async fn execute(
&self,
executable: &str,
model: Option<&str>,
system_prompt: &str,
task_context: &str,
env: &HashMap<String, String>,
timeout: Duration,
) -> Result<ExecutionResult, ExecutionError> {
let start = Instant::now();
// Build the full prompt
let full_prompt = if task_context.is_empty() {
system_prompt.to_string()
} else {
format!("{}\n\n## Task\n\n{}", system_prompt, task_context)
};
// Determine command and args based on executable type
let (cmd, args) = if executable == "claude-code" {
(
"claude".to_string(),
vec![
"--print".to_string(),
"--dangerously-skip-permissions".to_string(),
full_prompt.clone(),
],
)
} else {
(executable.to_string(), vec![])
};
let mut command = Command::new(&cmd);
command.args(&args);
// Set environment
command.env("VAULT_PATH", &self.vault_path);
for (key, value) in env {
command.env(key, Self::expand_env(value));
}
if let Some(model) = model {
command.env("MODEL", model);
}
// For non-claude executables, pipe prompt via stdin
if executable != "claude-code" {
command
.stdin(std::process::Stdio::piped())
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped());
} else {
command
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped());
}
let mut child = command
.spawn()
.map_err(|e| ExecutionError::SpawnFailed(format!("{}: {}", cmd, e)))?;
// Write prompt to stdin for non-claude executables
if executable != "claude-code" {
if let Some(mut stdin) = child.stdin.take() {
stdin.write_all(full_prompt.as_bytes()).await?;
drop(stdin);
}
}
// Wait with timeout
let output = match tokio::time::timeout(timeout, child.wait_with_output()).await {
Ok(result) => result.map_err(|e| ExecutionError::SpawnFailed(e.to_string()))?,
Err(_) => {
return Err(ExecutionError::Timeout(timeout));
}
};
let duration = start.elapsed();
let stdout = String::from_utf8_lossy(&output.stdout).to_string();
let stderr = String::from_utf8_lossy(&output.stderr).to_string();
let exit_code = output.status.code();
if output.status.success() {
Ok(ExecutionResult {
stdout,
stderr,
exit_code,
duration,
})
} else {
Err(ExecutionError::NonZeroExit {
code: exit_code.unwrap_or(-1),
stderr,
})
}
}
}

View file

@ -0,0 +1,5 @@
pub mod cron_engine;
pub mod executor;
pub mod executors;
pub mod state;
pub mod task_runner;

View file

@ -0,0 +1,36 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::path::Path;
use vault_core::error::VaultError;
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct RuntimeState {
pub last_startup: Option<DateTime<Utc>>,
pub last_shutdown: Option<DateTime<Utc>>,
pub total_tasks_executed: u64,
pub total_cron_fires: u64,
}
impl RuntimeState {
pub fn load(vault_root: &Path) -> Result<Self, VaultError> {
let state_path = vault_root.join(".vault/state.json");
if !state_path.exists() {
return Ok(Self::default());
}
let content = std::fs::read_to_string(&state_path)
.map_err(|e| VaultError::io(e, &state_path))?;
let state: RuntimeState =
serde_json::from_str(&content).unwrap_or_default();
Ok(state)
}
pub fn save(&self, vault_root: &Path) -> Result<(), VaultError> {
let state_path = vault_root.join(".vault/state.json");
let content = serde_json::to_string_pretty(self)
.map_err(|e| VaultError::InvalidEntity {
path: state_path.clone(),
reason: e.to_string(),
})?;
std::fs::write(&state_path, content).map_err(|e| VaultError::io(e, &state_path))
}
}

View file

@ -0,0 +1,253 @@
use crate::executor::{ExecutionError, Executor};
use std::path::{Path, PathBuf};
use std::sync::Arc;
use tokio::sync::Semaphore;
use vault_core::entity::VaultEntity;
use vault_core::error::VaultError;
use vault_core::filesystem;
use vault_core::frontmatter;
use vault_core::types::{Agent, AgentTask};
use vault_watch::write_filter::DaemonWriteFilter;
pub struct TaskRunner {
vault_root: PathBuf,
semaphore: Arc<Semaphore>,
executor: Arc<dyn Executor>,
write_filter: Arc<DaemonWriteFilter>,
}
impl TaskRunner {
pub fn new(
vault_root: PathBuf,
max_parallel: usize,
executor: Arc<dyn Executor>,
write_filter: Arc<DaemonWriteFilter>,
) -> Self {
Self {
vault_root,
semaphore: Arc::new(Semaphore::new(max_parallel)),
executor,
write_filter,
}
}
/// Process all currently queued tasks.
pub async fn process_queued(&self) -> Result<Vec<PathBuf>, VaultError> {
let queued_dir = self.vault_root.join("todos/agent/queued");
let files = filesystem::list_md_files(&queued_dir)?;
let mut spawned = Vec::new();
for file in files {
spawned.push(file.clone());
let runner = TaskRunner {
vault_root: self.vault_root.clone(),
semaphore: self.semaphore.clone(),
executor: self.executor.clone(),
write_filter: self.write_filter.clone(),
};
tokio::spawn(async move {
if let Err(e) = runner.execute_task(&file).await {
tracing::error!(task = ?file, error = %e, "Task execution failed");
}
});
}
Ok(spawned)
}
/// Execute a single agent task.
#[tracing::instrument(skip(self), fields(task = ?task_path.file_name()))]
pub async fn execute_task(&self, task_path: &Path) -> Result<(), VaultError> {
let _permit = self
.semaphore
.acquire()
.await
.map_err(|e| VaultError::InvalidEntity {
path: task_path.to_path_buf(),
reason: format!("Semaphore closed: {}", e),
})?;
let task_entity: VaultEntity<AgentTask> = filesystem::read_entity(task_path)?;
let agent_name = &task_entity.frontmatter.agent;
// Load agent definition
let agent_path = self.vault_root.join("agents").join(format!("{}.md", agent_name));
let agent_entity: VaultEntity<Agent> = filesystem::read_entity(&agent_path)?;
// Move queued -> running
let running_path = self
.vault_root
.join("todos/agent/running")
.join(task_path.file_name().unwrap());
self.write_filter.register(running_path.clone());
filesystem::move_file(task_path, &running_path)?;
// Update started timestamp
let content = std::fs::read_to_string(&running_path)
.map_err(|e| VaultError::io(e, &running_path))?;
let updates = serde_json::json!({
"started": chrono::Utc::now().to_rfc3339(),
});
let updated = frontmatter::update_frontmatter_fields(&content, &running_path, &updates)?;
self.write_filter.register(running_path.clone());
std::fs::write(&running_path, updated).map_err(|e| VaultError::io(e, &running_path))?;
// Compose prompt
let system_prompt =
vault_core::prompt::compose_prompt(&self.vault_root, &agent_entity, None)?;
let task_context = &task_entity.body;
let timeout = std::time::Duration::from_secs(agent_entity.frontmatter.timeout);
// Execute
let result = self
.executor
.execute(
&agent_entity.frontmatter.executable,
agent_entity.frontmatter.model.as_deref(),
&system_prompt,
task_context,
&agent_entity.frontmatter.env,
timeout,
)
.await;
match result {
Ok(exec_result) => {
// Move running -> done
let done_path = self
.vault_root
.join("todos/agent/done")
.join(running_path.file_name().unwrap());
let content = std::fs::read_to_string(&running_path)
.map_err(|e| VaultError::io(e, &running_path))?;
let updates = serde_json::json!({
"completed": chrono::Utc::now().to_rfc3339(),
"output": {
"stdout": exec_result.stdout,
"duration_secs": exec_result.duration.as_secs(),
},
});
let updated =
frontmatter::update_frontmatter_fields(&content, &running_path, &updates)?;
self.write_filter.register(running_path.clone());
std::fs::write(&running_path, updated)
.map_err(|e| VaultError::io(e, &running_path))?;
self.write_filter.register(done_path.clone());
filesystem::move_file(&running_path, &done_path)?;
tracing::info!(task = ?done_path, "Task completed successfully");
}
Err(exec_error) => {
let task_entity: VaultEntity<AgentTask> = filesystem::read_entity(&running_path)?;
let retry = task_entity.frontmatter.retry;
let max_retries = task_entity.frontmatter.max_retries;
if retry < max_retries {
// Re-queue with incremented retry count
let content = std::fs::read_to_string(&running_path)
.map_err(|e| VaultError::io(e, &running_path))?;
let updates = serde_json::json!({
"retry": retry + 1,
"started": null,
"error": format!("Attempt {}: {}", retry + 1, exec_error),
});
let updated =
frontmatter::update_frontmatter_fields(&content, &running_path, &updates)?;
self.write_filter.register(running_path.clone());
std::fs::write(&running_path, updated)
.map_err(|e| VaultError::io(e, &running_path))?;
let queued_path = self
.vault_root
.join("todos/agent/queued")
.join(running_path.file_name().unwrap());
self.write_filter.register(queued_path.clone());
filesystem::move_file(&running_path, &queued_path)?;
tracing::warn!(
task = ?queued_path,
retry = retry + 1,
max_retries,
"Task failed, re-queued"
);
} else {
// Move running -> failed
let failed_path = self
.vault_root
.join("todos/agent/failed")
.join(running_path.file_name().unwrap());
let content = std::fs::read_to_string(&running_path)
.map_err(|e| VaultError::io(e, &running_path))?;
let error_msg = match &exec_error {
ExecutionError::Timeout(d) => format!("Timed out after {:?}", d),
ExecutionError::NonZeroExit { code, stderr } => {
format!("Exit code {}: {}", code, stderr)
}
other => other.to_string(),
};
let updates = serde_json::json!({
"completed": chrono::Utc::now().to_rfc3339(),
"error": error_msg,
});
let updated =
frontmatter::update_frontmatter_fields(&content, &running_path, &updates)?;
self.write_filter.register(running_path.clone());
std::fs::write(&running_path, updated)
.map_err(|e| VaultError::io(e, &running_path))?;
self.write_filter.register(failed_path.clone());
filesystem::move_file(&running_path, &failed_path)?;
tracing::error!(
task = ?failed_path,
error = %exec_error,
"Task failed permanently"
);
}
}
}
Ok(())
}
/// On startup, recover tasks that were left in running/ (daemon crashed).
/// Move them back to queued/ for re-execution.
pub fn recover_running_tasks(&self) -> Result<Vec<PathBuf>, VaultError> {
let running_dir = self.vault_root.join("todos/agent/running");
let files = filesystem::list_md_files(&running_dir)?;
let mut recovered = Vec::new();
for file in &files {
let queued_path = self
.vault_root
.join("todos/agent/queued")
.join(file.file_name().unwrap());
// Reset started timestamp
let content =
std::fs::read_to_string(file).map_err(|e| VaultError::io(e, file))?;
let updates = serde_json::json!({
"started": null,
});
if let Ok(updated) = frontmatter::update_frontmatter_fields(&content, file, &updates) {
self.write_filter.register(file.clone());
let _ = std::fs::write(file, updated);
}
self.write_filter.register(queued_path.clone());
filesystem::move_file(file, &queued_path)?;
recovered.push(queued_path);
tracing::info!(task = ?file, "Recovered running task");
}
if !recovered.is_empty() {
tracing::info!(count = recovered.len(), "Recovered tasks from previous run");
}
Ok(recovered)
}
}

View file

@ -0,0 +1,11 @@
[package]
name = "vault-watch"
version.workspace = true
edition.workspace = true
[dependencies]
vault-core.workspace = true
notify.workspace = true
tokio.workspace = true
tracing.workspace = true
thiserror.workspace = true

View file

@ -0,0 +1,214 @@
use crate::events::VaultEvent;
use notify::event::{CreateKind, ModifyKind, RemoveKind, RenameMode};
use notify::EventKind;
use std::path::{Path, PathBuf};
use vault_core::entity::{classify_path, EntityKind};
/// Classify a raw notify event into typed VaultEvents.
pub fn classify(
event: &notify::Event,
vault_root: &Path,
) -> Vec<VaultEvent> {
let mut vault_events = Vec::new();
for path in &event.paths {
// Skip non-.md files
if path.extension().is_none_or(|e| e != "md") {
continue;
}
// Skip dotfiles and temp files
if let Some(name) = path.file_name().and_then(|n| n.to_str()) {
if name.starts_with('.') || name.starts_with('~') || name.ends_with(".tmp") {
continue;
}
}
let relative = match path.strip_prefix(vault_root) {
Ok(r) => r,
Err(_) => continue,
};
// Skip .vault/ internal files
if relative.starts_with(".vault") {
continue;
}
let kind = classify_path(relative);
match event.kind {
EventKind::Create(CreateKind::File) | EventKind::Create(CreateKind::Any) => {
vault_events.push(make_created(kind, path.clone()));
}
EventKind::Modify(ModifyKind::Data(_))
| EventKind::Modify(ModifyKind::Any)
| EventKind::Modify(ModifyKind::Metadata(_)) => {
vault_events.push(make_modified(kind, path.clone()));
}
EventKind::Remove(RemoveKind::File) | EventKind::Remove(RemoveKind::Any) => {
vault_events.push(make_deleted(kind, path.clone()));
}
_ => {}
}
}
// Handle renames (two paths: from, to)
if matches!(event.kind, EventKind::Modify(ModifyKind::Name(RenameMode::Both)))
&& event.paths.len() == 2
{
let from = &event.paths[0];
let to = &event.paths[1];
if to.extension().is_some_and(|e| e == "md") {
if let Ok(rel_to) = to.strip_prefix(vault_root) {
let kind_to = classify_path(rel_to);
let moved = make_moved(kind_to, from.clone(), to.clone());
// Replace any Created/Deleted pair we may have emitted above
vault_events.clear();
vault_events.push(moved);
}
}
}
vault_events
}
fn make_created(kind: EntityKind, path: PathBuf) -> VaultEvent {
match kind {
EntityKind::Agent => VaultEvent::AgentCreated(path),
EntityKind::Skill => VaultEvent::SkillCreated(path),
EntityKind::CronActive | EntityKind::CronPaused | EntityKind::CronTemplate => {
VaultEvent::CronCreated(path)
}
EntityKind::HumanTask(_) => VaultEvent::HumanTaskCreated(path),
EntityKind::AgentTask(_) => VaultEvent::AgentTaskCreated(path),
EntityKind::Knowledge => VaultEvent::KnowledgeCreated(path),
EntityKind::ViewPage | EntityKind::ViewWidget | EntityKind::ViewLayout | EntityKind::ViewCustom => {
VaultEvent::ViewCreated(path)
}
EntityKind::Notification => VaultEvent::NotificationCreated(path),
EntityKind::Unknown => VaultEvent::FileChanged(path),
}
}
fn make_modified(kind: EntityKind, path: PathBuf) -> VaultEvent {
match kind {
EntityKind::Agent => VaultEvent::AgentModified(path),
EntityKind::Skill => VaultEvent::SkillModified(path),
EntityKind::CronActive | EntityKind::CronPaused | EntityKind::CronTemplate => {
VaultEvent::CronModified(path)
}
EntityKind::HumanTask(_) => VaultEvent::HumanTaskModified(path),
EntityKind::AgentTask(_) => VaultEvent::AgentTaskModified(path),
EntityKind::Knowledge => VaultEvent::KnowledgeModified(path),
EntityKind::ViewPage | EntityKind::ViewWidget | EntityKind::ViewLayout | EntityKind::ViewCustom => {
VaultEvent::ViewModified(path)
}
EntityKind::Notification => VaultEvent::NotificationCreated(path),
EntityKind::Unknown => VaultEvent::FileChanged(path),
}
}
fn make_deleted(kind: EntityKind, path: PathBuf) -> VaultEvent {
match kind {
EntityKind::Agent => VaultEvent::AgentDeleted(path),
EntityKind::Skill => VaultEvent::SkillDeleted(path),
EntityKind::CronActive | EntityKind::CronPaused | EntityKind::CronTemplate => {
VaultEvent::CronDeleted(path)
}
EntityKind::HumanTask(_) => VaultEvent::HumanTaskDeleted(path),
EntityKind::AgentTask(_) => VaultEvent::AgentTaskDeleted(path),
EntityKind::Knowledge => VaultEvent::KnowledgeDeleted(path),
EntityKind::ViewPage | EntityKind::ViewWidget | EntityKind::ViewLayout | EntityKind::ViewCustom => {
VaultEvent::ViewDeleted(path)
}
EntityKind::Notification => VaultEvent::NotificationExpired(path),
EntityKind::Unknown => VaultEvent::FileChanged(path),
}
}
fn make_moved(kind: EntityKind, from: PathBuf, to: PathBuf) -> VaultEvent {
match kind {
EntityKind::CronActive | EntityKind::CronPaused => {
VaultEvent::CronMoved { from, to }
}
EntityKind::HumanTask(_) => VaultEvent::HumanTaskMoved { from, to },
EntityKind::AgentTask(_) => VaultEvent::AgentTaskMoved { from, to },
_ => make_created(kind, to),
}
}
#[cfg(test)]
mod tests {
use super::*;
use notify::event::{CreateKind, DataChange, ModifyKind};
fn make_event(kind: EventKind, paths: Vec<PathBuf>) -> notify::Event {
notify::Event {
kind,
paths,
attrs: Default::default(),
}
}
#[test]
fn test_classify_agent_created() {
let root = PathBuf::from("/vault");
let event = make_event(
EventKind::Create(CreateKind::File),
vec![PathBuf::from("/vault/agents/reviewer.md")],
);
let events = classify(&event, &root);
assert_eq!(events.len(), 1);
assert!(matches!(events[0], VaultEvent::AgentCreated(_)));
}
#[test]
fn test_skip_non_md() {
let root = PathBuf::from("/vault");
let event = make_event(
EventKind::Create(CreateKind::File),
vec![PathBuf::from("/vault/agents/readme.txt")],
);
let events = classify(&event, &root);
assert!(events.is_empty());
}
#[test]
fn test_skip_dotfiles() {
let root = PathBuf::from("/vault");
let event = make_event(
EventKind::Create(CreateKind::File),
vec![PathBuf::from("/vault/agents/.hidden.md")],
);
let events = classify(&event, &root);
assert!(events.is_empty());
}
#[test]
fn test_classify_task_modified() {
let root = PathBuf::from("/vault");
let event = make_event(
EventKind::Modify(ModifyKind::Data(DataChange::Content)),
vec![PathBuf::from("/vault/todos/agent/running/task-1.md")],
);
let events = classify(&event, &root);
assert_eq!(events.len(), 1);
assert!(matches!(events[0], VaultEvent::AgentTaskModified(_)));
}
#[test]
fn test_classify_rename() {
let root = PathBuf::from("/vault");
let event = make_event(
EventKind::Modify(ModifyKind::Name(RenameMode::Both)),
vec![
PathBuf::from("/vault/todos/agent/queued/task.md"),
PathBuf::from("/vault/todos/agent/running/task.md"),
],
);
let events = classify(&event, &root);
assert_eq!(events.len(), 1);
assert!(matches!(events[0], VaultEvent::AgentTaskMoved { .. }));
}
}

View file

@ -0,0 +1,108 @@
use std::path::PathBuf;
#[derive(Debug, Clone)]
pub enum VaultEvent {
AgentCreated(PathBuf),
AgentModified(PathBuf),
AgentDeleted(PathBuf),
SkillCreated(PathBuf),
SkillModified(PathBuf),
SkillDeleted(PathBuf),
CronCreated(PathBuf),
CronModified(PathBuf),
CronDeleted(PathBuf),
CronMoved { from: PathBuf, to: PathBuf },
HumanTaskCreated(PathBuf),
HumanTaskModified(PathBuf),
HumanTaskMoved { from: PathBuf, to: PathBuf },
HumanTaskDeleted(PathBuf),
AgentTaskCreated(PathBuf),
AgentTaskModified(PathBuf),
AgentTaskMoved { from: PathBuf, to: PathBuf },
AgentTaskDeleted(PathBuf),
KnowledgeCreated(PathBuf),
KnowledgeModified(PathBuf),
KnowledgeDeleted(PathBuf),
ViewCreated(PathBuf),
ViewModified(PathBuf),
ViewDeleted(PathBuf),
NotificationCreated(PathBuf),
NotificationExpired(PathBuf),
FileChanged(PathBuf),
}
impl VaultEvent {
/// Get the primary path associated with this event.
pub fn path(&self) -> &PathBuf {
match self {
Self::AgentCreated(p)
| Self::AgentModified(p)
| Self::AgentDeleted(p)
| Self::SkillCreated(p)
| Self::SkillModified(p)
| Self::SkillDeleted(p)
| Self::CronCreated(p)
| Self::CronModified(p)
| Self::CronDeleted(p)
| Self::HumanTaskCreated(p)
| Self::HumanTaskModified(p)
| Self::HumanTaskDeleted(p)
| Self::AgentTaskCreated(p)
| Self::AgentTaskModified(p)
| Self::AgentTaskDeleted(p)
| Self::KnowledgeCreated(p)
| Self::KnowledgeModified(p)
| Self::KnowledgeDeleted(p)
| Self::ViewCreated(p)
| Self::ViewModified(p)
| Self::ViewDeleted(p)
| Self::NotificationCreated(p)
| Self::NotificationExpired(p)
| Self::FileChanged(p) => p,
Self::CronMoved { to, .. }
| Self::HumanTaskMoved { to, .. }
| Self::AgentTaskMoved { to, .. } => to,
}
}
/// Return a string event type name for serialization.
pub fn event_type(&self) -> &'static str {
match self {
Self::AgentCreated(_) => "agent_created",
Self::AgentModified(_) => "agent_modified",
Self::AgentDeleted(_) => "agent_deleted",
Self::SkillCreated(_) => "skill_created",
Self::SkillModified(_) => "skill_modified",
Self::SkillDeleted(_) => "skill_deleted",
Self::CronCreated(_) => "cron_created",
Self::CronModified(_) => "cron_modified",
Self::CronDeleted(_) => "cron_deleted",
Self::CronMoved { .. } => "cron_moved",
Self::HumanTaskCreated(_) => "human_task_created",
Self::HumanTaskModified(_) => "human_task_modified",
Self::HumanTaskMoved { .. } => "human_task_moved",
Self::HumanTaskDeleted(_) => "human_task_deleted",
Self::AgentTaskCreated(_) => "agent_task_created",
Self::AgentTaskModified(_) => "agent_task_modified",
Self::AgentTaskMoved { .. } => "agent_task_moved",
Self::AgentTaskDeleted(_) => "agent_task_deleted",
Self::KnowledgeCreated(_) => "knowledge_created",
Self::KnowledgeModified(_) => "knowledge_modified",
Self::KnowledgeDeleted(_) => "knowledge_deleted",
Self::ViewCreated(_) => "view_created",
Self::ViewModified(_) => "view_modified",
Self::ViewDeleted(_) => "view_deleted",
Self::NotificationCreated(_) => "notification_created",
Self::NotificationExpired(_) => "notification_expired",
Self::FileChanged(_) => "file_changed",
}
}
}

View file

@ -0,0 +1,4 @@
pub mod classifier;
pub mod events;
pub mod watcher;
pub mod write_filter;

View file

@ -0,0 +1,83 @@
use crate::classifier;
use crate::events::VaultEvent;
use crate::write_filter::DaemonWriteFilter;
use notify::{Config, RecommendedWatcher, RecursiveMode, Watcher};
use std::path::PathBuf;
use std::sync::Arc;
use tokio::sync::mpsc;
#[derive(Debug, thiserror::Error)]
pub enum WatchError {
#[error("Notify error: {0}")]
Notify(#[from] notify::Error),
#[error("Channel closed")]
ChannelClosed,
}
pub struct VaultWatcher {
vault_root: PathBuf,
write_filter: Arc<DaemonWriteFilter>,
_watcher: RecommendedWatcher,
rx: mpsc::Receiver<VaultEvent>,
}
impl VaultWatcher {
pub fn new(
vault_root: PathBuf,
write_filter: Arc<DaemonWriteFilter>,
) -> Result<Self, WatchError> {
let (event_tx, event_rx) = mpsc::channel(256);
let root = vault_root.clone();
let filter = write_filter.clone();
let (notify_tx, mut notify_rx) = mpsc::channel(512);
let mut watcher = RecommendedWatcher::new(
move |res: Result<notify::Event, notify::Error>| {
if let Ok(event) = res {
let _ = notify_tx.blocking_send(event);
}
},
Config::default(),
)?;
watcher.watch(&vault_root, RecursiveMode::Recursive)?;
// Spawn classification task
let tx = event_tx.clone();
tokio::spawn(async move {
while let Some(raw_event) = notify_rx.recv().await {
let vault_events = classifier::classify(&raw_event, &root);
for event in vault_events {
if filter.should_suppress(event.path()) {
tracing::debug!(?event, "Suppressed daemon-originated event");
continue;
}
if tx.send(event).await.is_err() {
return;
}
}
}
});
Ok(Self {
vault_root,
write_filter,
_watcher: watcher,
rx: event_rx,
})
}
pub fn vault_root(&self) -> &PathBuf {
&self.vault_root
}
pub fn write_filter(&self) -> &Arc<DaemonWriteFilter> {
&self.write_filter
}
pub async fn recv(&mut self) -> Option<VaultEvent> {
self.rx.recv().await
}
}

View file

@ -0,0 +1,67 @@
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Mutex;
use std::time::{Duration, Instant};
const WRITE_FILTER_TTL: Duration = Duration::from_secs(5);
/// Filters out filesystem events triggered by daemon-originated writes.
/// Before writing a file, register the path. When an event arrives,
/// check if it should be suppressed.
pub struct DaemonWriteFilter {
pending: Mutex<HashMap<PathBuf, Instant>>,
}
impl DaemonWriteFilter {
pub fn new() -> Self {
Self {
pending: Mutex::new(HashMap::new()),
}
}
/// Register a path that the daemon is about to write.
pub fn register(&self, path: PathBuf) {
let mut pending = self.pending.lock().unwrap();
pending.insert(path, Instant::now());
}
/// Check if an event for this path should be suppressed.
/// Returns true if the event should be suppressed (i.e., it was daemon-originated).
pub fn should_suppress(&self, path: &PathBuf) -> bool {
let mut pending = self.pending.lock().unwrap();
// Clean up stale entries
pending.retain(|_, ts| ts.elapsed() < WRITE_FILTER_TTL);
pending.remove(path).is_some()
}
}
impl Default for DaemonWriteFilter {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_register_and_suppress() {
let filter = DaemonWriteFilter::new();
let path = PathBuf::from("/vault/crons/active/test.md");
filter.register(path.clone());
assert!(filter.should_suppress(&path));
// Second check should not suppress (already consumed)
assert!(!filter.should_suppress(&path));
}
#[test]
fn test_unregistered_not_suppressed() {
let filter = DaemonWriteFilter::new();
let path = PathBuf::from("/vault/agents/test.md");
assert!(!filter.should_suppress(&path));
}
}

24
dashboard/.gitignore vendored Normal file
View file

@ -0,0 +1,24 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
node_modules
dist
dist-ssr
*.local
# Editor directories and files
.vscode/*
!.vscode/extensions.json
.idea
.DS_Store
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?

73
dashboard/README.md Normal file
View file

@ -0,0 +1,73 @@
# React + TypeScript + Vite
This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules.
Currently, two official plugins are available:
- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react) uses [Babel](https://babeljs.io/) (or [oxc](https://oxc.rs) when used in [rolldown-vite](https://vite.dev/guide/rolldown)) for Fast Refresh
- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh
## React Compiler
The React Compiler is not enabled on this template because of its impact on dev & build performances. To add it, see [this documentation](https://react.dev/learn/react-compiler/installation).
## Expanding the ESLint configuration
If you are developing a production application, we recommend updating the configuration to enable type-aware lint rules:
```js
export default defineConfig([
globalIgnores(['dist']),
{
files: ['**/*.{ts,tsx}'],
extends: [
// Other configs...
// Remove tseslint.configs.recommended and replace with this
tseslint.configs.recommendedTypeChecked,
// Alternatively, use this for stricter rules
tseslint.configs.strictTypeChecked,
// Optionally, add this for stylistic rules
tseslint.configs.stylisticTypeChecked,
// Other configs...
],
languageOptions: {
parserOptions: {
project: ['./tsconfig.node.json', './tsconfig.app.json'],
tsconfigRootDir: import.meta.dirname,
},
// other options...
},
},
])
```
You can also install [eslint-plugin-react-x](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-x) and [eslint-plugin-react-dom](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-dom) for React-specific lint rules:
```js
// eslint.config.js
import reactX from 'eslint-plugin-react-x'
import reactDom from 'eslint-plugin-react-dom'
export default defineConfig([
globalIgnores(['dist']),
{
files: ['**/*.{ts,tsx}'],
extends: [
// Other configs...
// Enable lint rules for React
reactX.configs['recommended-typescript'],
// Enable lint rules for React DOM
reactDom.configs.recommended,
],
languageOptions: {
parserOptions: {
project: ['./tsconfig.node.json', './tsconfig.app.json'],
tsconfigRootDir: import.meta.dirname,
},
// other options...
},
},
])
```

View file

@ -0,0 +1,23 @@
import js from '@eslint/js'
import globals from 'globals'
import reactHooks from 'eslint-plugin-react-hooks'
import reactRefresh from 'eslint-plugin-react-refresh'
import tseslint from 'typescript-eslint'
import { defineConfig, globalIgnores } from 'eslint/config'
export default defineConfig([
globalIgnores(['dist']),
{
files: ['**/*.{ts,tsx}'],
extends: [
js.configs.recommended,
tseslint.configs.recommended,
reactHooks.configs.flat.recommended,
reactRefresh.configs.vite,
],
languageOptions: {
ecmaVersion: 2020,
globals: globals.browser,
},
},
])

13
dashboard/index.html Normal file
View file

@ -0,0 +1,13 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>dashboard</title>
</head>
<body>
<div id="root"></div>
<script type="module" src="/src/main.tsx"></script>
</body>
</html>

4235
dashboard/package-lock.json generated Normal file

File diff suppressed because it is too large Load diff

43
dashboard/package.json Normal file
View file

@ -0,0 +1,43 @@
{
"name": "dashboard",
"private": true,
"version": "0.0.0",
"type": "module",
"scripts": {
"dev": "vite",
"build": "tsc -b && vite build",
"lint": "eslint .",
"preview": "vite preview"
},
"dependencies": {
"@codemirror/autocomplete": "^6.20.1",
"@codemirror/commands": "^6.10.2",
"@codemirror/lang-markdown": "^6.5.0",
"@codemirror/language": "^6.12.2",
"@codemirror/state": "^6.5.4",
"@codemirror/theme-one-dark": "^6.1.3",
"@codemirror/view": "^6.39.16",
"@hello-pangea/dnd": "^18.0.1",
"@tailwindcss/vite": "^4.2.1",
"@tanstack/react-query": "^5.90.21",
"codemirror": "^6.0.2",
"react": "^19.2.0",
"react-dom": "^19.2.0",
"react-router-dom": "^7.13.1",
"tailwindcss": "^4.2.1"
},
"devDependencies": {
"@eslint/js": "^9.39.1",
"@types/node": "^24.10.1",
"@types/react": "^19.2.7",
"@types/react-dom": "^19.2.3",
"@vitejs/plugin-react": "^5.1.1",
"eslint": "^9.39.1",
"eslint-plugin-react-hooks": "^7.0.1",
"eslint-plugin-react-refresh": "^0.4.24",
"globals": "^16.5.0",
"typescript": "~5.9.3",
"typescript-eslint": "^8.48.0",
"vite": "^7.3.1"
}
}

47
dashboard/src/App.tsx Normal file
View file

@ -0,0 +1,47 @@
import { BrowserRouter, Routes, Route } from 'react-router-dom';
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
import { Layout } from './components/Layout';
import { TasksPage } from './pages/TasksPage';
import { AgentsPage } from './pages/AgentsPage';
import { CronsPage } from './pages/CronsPage';
import { AgentQueuePage } from './pages/AgentQueuePage';
import { KnowledgePage } from './pages/KnowledgePage';
import { EditorPage } from './pages/EditorPage';
import { ViewPage } from './pages/ViewPage';
import { CommandPalette } from './components/CommandPalette';
import { useWebSocket } from './hooks/useWebSocket';
const queryClient = new QueryClient({
defaultOptions: {
queries: { staleTime: 5000, retry: 1 },
},
});
function AppInner() {
useWebSocket();
return (
<BrowserRouter>
<CommandPalette />
<Routes>
<Route element={<Layout />}>
<Route path="/" element={<TasksPage />} />
<Route path="/agents" element={<AgentsPage />} />
<Route path="/crons" element={<CronsPage />} />
<Route path="/queue" element={<AgentQueuePage />} />
<Route path="/knowledge" element={<KnowledgePage />} />
<Route path="/editor" element={<EditorPage />} />
<Route path="/view/*" element={<ViewPage />} />
</Route>
</Routes>
</BrowserRouter>
);
}
export default function App() {
return (
<QueryClientProvider client={queryClient}>
<AppInner />
</QueryClientProvider>
);
}

152
dashboard/src/api/client.ts Normal file
View file

@ -0,0 +1,152 @@
import type {
Agent,
AgentTask,
CronJob,
HumanTask,
KnowledgeNote,
Skill,
TreeNode,
VaultStats,
HealthStatus,
ViewPageDef,
ViewDetail,
NotificationItem,
} from './types';
const BASE = '/api';
async function fetchJson<T>(url: string, init?: RequestInit): Promise<T> {
const res = await fetch(url, {
headers: { 'Content-Type': 'application/json', ...init?.headers },
...init,
});
if (!res.ok) {
const body = await res.json().catch(() => ({ error: res.statusText }));
throw new Error(body.error || res.statusText);
}
return res.json();
}
// Agents
export const listAgents = () => fetchJson<Agent[]>(`${BASE}/agents`);
export const getAgent = (name: string) => fetchJson<Agent>(`${BASE}/agents/${name}`);
export const triggerAgent = (name: string, context?: string) =>
fetchJson<{ status: string }>(`${BASE}/agents/${name}/trigger`, {
method: 'POST',
body: JSON.stringify({ context }),
});
// Skills
export const listSkills = () => fetchJson<Skill[]>(`${BASE}/skills`);
export const getSkill = (name: string) => fetchJson<Skill>(`${BASE}/skills/${name}`);
export const skillUsedBy = (name: string) => fetchJson<string[]>(`${BASE}/skills/${name}/used-by`);
// Crons
export const listCrons = () => fetchJson<CronJob[]>(`${BASE}/crons`);
export const triggerCron = (name: string) =>
fetchJson<{ status: string }>(`${BASE}/crons/${name}/trigger`, { method: 'POST' });
export const pauseCron = (name: string) =>
fetchJson<{ status: string }>(`${BASE}/crons/${name}/pause`, { method: 'POST' });
export const resumeCron = (name: string) =>
fetchJson<{ status: string }>(`${BASE}/crons/${name}/resume`, { method: 'POST' });
// Human Tasks
export const listHumanTasks = () => fetchJson<HumanTask[]>(`${BASE}/todos/harald`);
export const listHumanTasksByStatus = (status: string) =>
fetchJson<HumanTask[]>(`${BASE}/todos/harald/${status}`);
export const createHumanTask = (task: {
title: string;
priority?: string;
labels?: string[];
body?: string;
}) =>
fetchJson<{ status: string; path: string }>(`${BASE}/todos/harald`, {
method: 'POST',
body: JSON.stringify(task),
});
export const moveHumanTask = (status: string, id: string, to: string) =>
fetchJson<{ status: string }>(`${BASE}/todos/harald/${status}/${id}/move`, {
method: 'PATCH',
body: JSON.stringify({ to }),
});
export const deleteHumanTask = (status: string, id: string) =>
fetchJson<{ status: string }>(`${BASE}/todos/harald/${status}/${id}`, { method: 'DELETE' });
// Agent Tasks
export const listAgentTasks = () => fetchJson<AgentTask[]>(`${BASE}/todos/agent`);
export const getAgentTask = (id: string) => fetchJson<AgentTask>(`${BASE}/todos/agent/${id}`);
export const createAgentTask = (task: {
title: string;
agent: string;
priority?: string;
body?: string;
}) =>
fetchJson<{ status: string; path: string }>(`${BASE}/todos/agent`, {
method: 'POST',
body: JSON.stringify(task),
});
// Knowledge
export const listKnowledge = (q?: string, tag?: string) => {
const params = new URLSearchParams();
if (q) params.set('q', q);
if (tag) params.set('tag', tag);
const qs = params.toString();
return fetchJson<KnowledgeNote[]>(`${BASE}/knowledge${qs ? `?${qs}` : ''}`);
};
export const getKnowledge = (path: string) =>
fetchJson<{ path: string; frontmatter: unknown; body: string; html: string }>(
`${BASE}/knowledge/${path}`,
);
// Files
export const readFile = (path: string) =>
fetchJson<{ path: string; frontmatter: unknown; body: string }>(`${BASE}/files/${path}`);
export const writeFile = (path: string, data: { frontmatter?: unknown; body?: string; raw?: string }) =>
fetchJson<{ status: string }>(`${BASE}/files/${path}`, {
method: 'PUT',
body: JSON.stringify(data),
});
export const patchFile = (path: string, updates: Record<string, unknown>) =>
fetchJson<{ status: string }>(`${BASE}/files/${path}`, {
method: 'PATCH',
body: JSON.stringify(updates),
});
export const deleteFile = (path: string) =>
fetchJson<{ status: string }>(`${BASE}/files/${path}`, { method: 'DELETE' });
// Tree
export const getTree = () => fetchJson<TreeNode>(`${BASE}/tree`);
// Suggest
export const suggestAgents = () => fetchJson<string[]>(`${BASE}/suggest/agents`);
export const suggestSkills = () => fetchJson<string[]>(`${BASE}/suggest/skills`);
export const suggestTags = () => fetchJson<string[]>(`${BASE}/suggest/tags`);
export const suggestFiles = (q?: string) =>
fetchJson<string[]>(`${BASE}/suggest/files${q ? `?q=${encodeURIComponent(q)}` : ''}`);
export const suggestModels = () => fetchJson<string[]>(`${BASE}/suggest/models`);
export const suggestMcpServers = () => fetchJson<string[]>(`${BASE}/suggest/mcp-servers`);
// Stats
export const getStats = () => fetchJson<VaultStats>(`${BASE}/stats`);
export const getActivity = () =>
fetchJson<{ path: string; kind: string; modified: string; name: string }[]>(`${BASE}/activity`);
export const getHealth = () => fetchJson<HealthStatus>(`${BASE}/health`);
// Views
export const listViewPages = () => fetchJson<ViewPageDef[]>(`${BASE}/views/pages`);
export const listViewWidgets = () => fetchJson<ViewPageDef[]>(`${BASE}/views/widgets`);
export const listViewLayouts = () => fetchJson<ViewPageDef[]>(`${BASE}/views/layouts`);
export const getView = (path: string) => fetchJson<ViewDetail>(`${BASE}/views/${path}`);
export const putView = (path: string, data: { frontmatter?: unknown; body?: string; raw?: string }) =>
fetchJson<{ status: string }>(`${BASE}/views/${path}`, {
method: 'PUT',
body: JSON.stringify(data),
});
export const deleteView = (path: string) =>
fetchJson<{ status: string }>(`${BASE}/views/${path}`, { method: 'DELETE' });
// Notifications
export const listNotifications = () => fetchJson<NotificationItem[]>(`${BASE}/notifications`);
export const dismissNotification = (id: string) =>
fetchJson<{ status: string }>(`${BASE}/notifications/${id}`, { method: 'DELETE' });

161
dashboard/src/api/types.ts Normal file
View file

@ -0,0 +1,161 @@
export type Priority = 'urgent' | 'high' | 'medium' | 'low';
export type TaskStatus = 'urgent' | 'open' | 'in-progress' | 'done';
export type AgentTaskStatus = 'queued' | 'running' | 'done' | 'failed';
export type RunStatus = 'success' | 'failure' | 'timeout';
export interface Agent {
name: string;
executable: string;
model?: string;
escalate_to?: string;
mcp_servers: string[];
skills: string[];
timeout: number;
max_retries: number;
env: Record<string, string>;
body?: string;
}
export interface Skill {
name: string;
description: string;
version?: number;
requires_mcp: string[];
inputs: string[];
outputs: string[];
body?: string;
}
export interface CronJob {
name: string;
title: string;
schedule: string;
agent: string;
enabled: boolean;
status: 'active' | 'paused';
last_run?: string;
last_status?: RunStatus;
next_run?: string;
run_count: number;
}
export interface HumanTask {
id: string;
title: string;
priority: Priority;
status: TaskStatus;
source?: string;
repo?: string;
labels: string[];
created: string;
due?: string;
body: string;
}
export interface AgentTask {
id: string;
title: string;
agent: string;
priority: Priority;
type?: string;
status: AgentTaskStatus;
created: string;
started?: string;
completed?: string;
retry: number;
max_retries: number;
input?: unknown;
output?: unknown;
error?: string;
body: string;
}
export interface KnowledgeNote {
path: string;
title: string;
tags: string[];
}
export interface TreeNode {
name: string;
path: string;
type: 'file' | 'directory';
children?: TreeNode[];
}
export interface VaultStats {
agents: number;
skills: number;
crons_scheduled: number;
human_tasks: Record<TaskStatus, number>;
agent_tasks: Record<AgentTaskStatus, number>;
knowledge_notes: number;
total_tasks_executed: number;
total_cron_fires: number;
}
export interface WsEvent {
type: string;
area: string;
path: string;
data?: Record<string, unknown>;
}
export interface HealthStatus {
status: string;
version: string;
uptime_secs: number;
agents: number;
crons_scheduled: number;
total_tasks_executed: number;
}
// View system types
export interface ViewPageDef {
name: string;
type: string;
title?: string;
icon?: string;
route?: string;
position?: number;
layout?: string;
component?: string;
description?: string;
}
export interface WidgetInstanceDef {
widget: string;
props?: Record<string, unknown>;
}
export interface ViewRegions {
[region: string]: WidgetInstanceDef[];
}
export interface ViewDetail {
path: string;
frontmatter: {
type: string;
title?: string;
icon?: string;
route?: string;
position?: number;
layout?: string;
regions?: ViewRegions;
name?: string;
description?: string;
component?: string;
} | null;
body: string;
}
export interface NotificationItem {
id: string;
title: string;
message?: string;
level?: string;
source?: string;
created?: string;
expires?: string;
}

80
dashboard/src/api/ws.ts Normal file
View file

@ -0,0 +1,80 @@
import type { WsEvent } from './types';
type Listener = (event: WsEvent) => void;
export class VaultWebSocket {
private ws: WebSocket | null = null;
private listeners: Map<string, Set<Listener>> = new Map();
private globalListeners: Set<Listener> = new Set();
private reconnectTimer: ReturnType<typeof setTimeout> | null = null;
private url: string;
constructor(url?: string) {
const proto = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
this.url = url || `${proto}//${window.location.host}/ws`;
}
connect() {
if (this.ws?.readyState === WebSocket.OPEN) return;
this.ws = new WebSocket(this.url);
this.ws.onmessage = (msg) => {
try {
const event: WsEvent = JSON.parse(msg.data);
this.globalListeners.forEach((fn) => fn(event));
this.listeners.get(event.type)?.forEach((fn) => fn(event));
} catch {
// ignore malformed messages
}
};
this.ws.onclose = () => {
this.scheduleReconnect();
};
this.ws.onerror = () => {
this.ws?.close();
};
}
disconnect() {
if (this.reconnectTimer) {
clearTimeout(this.reconnectTimer);
this.reconnectTimer = null;
}
this.ws?.close();
this.ws = null;
}
/** Listen to a specific event type */
on(type: string, fn: Listener): () => void {
if (!this.listeners.has(type)) {
this.listeners.set(type, new Set());
}
this.listeners.get(type)!.add(fn);
return () => this.listeners.get(type)?.delete(fn);
}
/** Listen to all events */
onAny(fn: Listener): () => void {
this.globalListeners.add(fn);
return () => this.globalListeners.delete(fn);
}
send(action: Record<string, unknown>) {
if (this.ws?.readyState === WebSocket.OPEN) {
this.ws.send(JSON.stringify(action));
}
}
private scheduleReconnect() {
if (this.reconnectTimer) return;
this.reconnectTimer = setTimeout(() => {
this.reconnectTimer = null;
this.connect();
}, 3000);
}
}
export const vaultWs = new VaultWebSocket();

View file

@ -0,0 +1,39 @@
import { useActivity } from '../hooks/useApi';
export function ActivityFeed() {
const { data: activity, isLoading } = useActivity();
if (isLoading) return <div className="p-4 text-sm text-text-muted">Loading...</div>;
if (!activity?.length) return <div className="p-4 text-sm text-text-muted">No recent activity</div>;
return (
<div className="space-y-1">
{activity.slice(0, 20).map((item, i) => (
<div key={i} className="flex items-center gap-2 px-4 py-1.5 text-xs">
<span className={`h-1.5 w-1.5 rounded-full ${kindColor(item.kind)}`} />
<span className="flex-1 truncate text-text-secondary">{item.name}</span>
<span className="text-text-muted">{timeAgo(item.modified)}</span>
</div>
))}
</div>
);
}
function kindColor(kind: string): string {
switch (kind) {
case 'human_task': return 'bg-accent';
case 'agent_task': return 'bg-warning';
case 'knowledge': return 'bg-success';
default: return 'bg-text-muted';
}
}
function timeAgo(iso: string): string {
const ms = Date.now() - new Date(iso).getTime();
const mins = Math.floor(ms / 60000);
if (mins < 1) return 'now';
if (mins < 60) return `${mins}m`;
const hrs = Math.floor(mins / 60);
if (hrs < 24) return `${hrs}h`;
return `${Math.floor(hrs / 24)}d`;
}

View file

@ -0,0 +1,42 @@
import type { Agent } from '../api/types';
interface Props {
agent: Agent;
onTrigger: (name: string) => void;
}
export function AgentCard({ agent, onTrigger }: Props) {
return (
<div className="rounded-lg border border-border bg-surface-raised p-4 transition-colors hover:border-border-hover">
<div className="mb-2 flex items-center justify-between">
<h3 className="text-sm font-semibold text-text-primary">{agent.name}</h3>
<button
onClick={() => onTrigger(agent.name)}
className="rounded bg-accent/15 px-2 py-0.5 text-xs font-medium text-accent transition-colors hover:bg-accent/25"
>
Trigger
</button>
</div>
<div className="mb-2 text-xs text-text-secondary">
<span className="mr-3">{agent.executable}</span>
{agent.model && <span className="text-text-muted">{agent.model}</span>}
</div>
{agent.skills.length > 0 && (
<div className="flex flex-wrap gap-1">
{agent.skills.map((s) => (
<span key={s} className="rounded bg-surface-overlay px-1.5 py-0.5 text-xs text-text-secondary">
{s}
</span>
))}
</div>
)}
<div className="mt-2 flex items-center gap-3 text-xs text-text-muted">
<span>timeout: {agent.timeout}s</span>
{agent.max_retries > 0 && <span>retries: {agent.max_retries}</span>}
</div>
</div>
);
}

View file

@ -0,0 +1,94 @@
import { useState, useEffect, useRef } from 'react';
import { useNavigate } from 'react-router-dom';
interface Command {
id: string;
label: string;
action: () => void;
}
export function CommandPalette() {
const [open, setOpen] = useState(false);
const [query, setQuery] = useState('');
const inputRef = useRef<HTMLInputElement>(null);
const navigate = useNavigate();
const commands: Command[] = [
{ id: 'new-task', label: 'New Human Task', action: () => navigate('/editor?new=todos/harald/open') },
{ id: 'new-agent-task', label: 'New Agent Task', action: () => navigate('/editor?new=todos/agent/queued') },
{ id: 'new-agent', label: 'New Agent', action: () => navigate('/editor?new=agents') },
{ id: 'new-skill', label: 'New Skill', action: () => navigate('/editor?new=skills') },
{ id: 'new-cron', label: 'New Cron Job', action: () => navigate('/editor?new=crons/active') },
{ id: 'new-note', label: 'New Knowledge Note', action: () => navigate('/editor?new=knowledge') },
{ id: 'nav-tasks', label: 'Go to Tasks', action: () => navigate('/') },
{ id: 'nav-agents', label: 'Go to Agents', action: () => navigate('/agents') },
{ id: 'nav-crons', label: 'Go to Crons', action: () => navigate('/crons') },
{ id: 'nav-queue', label: 'Go to Agent Queue', action: () => navigate('/queue') },
{ id: 'nav-knowledge', label: 'Go to Knowledge', action: () => navigate('/knowledge') },
{ id: 'nav-editor', label: 'Open Editor', action: () => navigate('/editor') },
];
const filtered = query
? commands.filter((c) => c.label.toLowerCase().includes(query.toLowerCase()))
: commands;
useEffect(() => {
const handler = (e: KeyboardEvent) => {
if ((e.metaKey || e.ctrlKey) && e.key === 'k') {
e.preventDefault();
setOpen((prev) => !prev);
setQuery('');
}
if (e.key === 'Escape') setOpen(false);
};
window.addEventListener('keydown', handler);
return () => window.removeEventListener('keydown', handler);
}, []);
useEffect(() => {
if (open) inputRef.current?.focus();
}, [open]);
if (!open) return null;
return (
<div className="fixed inset-0 z-50 flex items-start justify-center pt-[20vh]" onClick={() => setOpen(false)}>
<div className="fixed inset-0 bg-black/50" />
<div
className="relative w-full max-w-md rounded-lg border border-border bg-surface-raised shadow-2xl"
onClick={(e) => e.stopPropagation()}
>
<input
ref={inputRef}
className="w-full rounded-t-lg border-b border-border bg-transparent px-4 py-3 text-sm text-text-primary outline-none"
placeholder="Type a command..."
value={query}
onChange={(e) => setQuery(e.target.value)}
onKeyDown={(e) => {
if (e.key === 'Enter' && filtered.length > 0) {
filtered[0].action();
setOpen(false);
}
}}
/>
<div className="max-h-64 overflow-auto py-1">
{filtered.map((cmd) => (
<button
key={cmd.id}
className="block w-full px-4 py-2 text-left text-sm text-text-secondary transition-colors hover:bg-surface-overlay hover:text-text-primary"
onClick={() => {
cmd.action();
setOpen(false);
}}
>
{cmd.label}
</button>
))}
{filtered.length === 0 && (
<div className="px-4 py-2 text-sm text-text-muted">No matching commands</div>
)}
</div>
</div>
</div>
);
}

View file

@ -0,0 +1,44 @@
import type { CronJob } from '../api/types';
import { StatusBadge } from './StatusBadge';
interface Props {
cron: CronJob;
onTrigger: (name: string) => void;
onToggle: (name: string, active: boolean) => void;
}
export function CronRow({ cron, onTrigger, onToggle }: Props) {
const isActive = cron.status === 'active';
return (
<div className="flex items-center gap-4 border-b border-border px-4 py-3 last:border-b-0">
<div className="flex-1">
<div className="text-sm font-medium text-text-primary">{cron.title}</div>
<div className="mt-0.5 flex items-center gap-3 text-xs text-text-secondary">
<code className="rounded bg-surface-overlay px-1.5 py-0.5">{cron.schedule}</code>
<span>agent: {cron.agent}</span>
<span>runs: {cron.run_count}</span>
</div>
</div>
<div className="flex items-center gap-3">
{cron.last_status && <StatusBadge value={cron.last_status} />}
<StatusBadge value={cron.status} />
<button
onClick={() => onToggle(cron.name, isActive)}
className="rounded px-2 py-1 text-xs text-text-secondary transition-colors hover:bg-surface-overlay"
>
{isActive ? 'Pause' : 'Resume'}
</button>
<button
onClick={() => onTrigger(cron.name)}
className="rounded bg-accent/15 px-2 py-1 text-xs font-medium text-accent transition-colors hover:bg-accent/25"
>
Fire
</button>
</div>
</div>
);
}

View file

@ -0,0 +1,89 @@
import { useState } from 'react';
import type { TreeNode } from '../api/types';
interface Props {
tree: TreeNode;
selectedPath?: string;
onSelect: (path: string) => void;
onCreateFile?: (dir: string) => void;
}
export function FileTree({ tree, selectedPath, onSelect, onCreateFile }: Props) {
return (
<div className="text-sm">
{tree.children?.map((node) => (
<TreeItem
key={node.path}
node={node}
depth={0}
selectedPath={selectedPath}
onSelect={onSelect}
onCreateFile={onCreateFile}
/>
))}
</div>
);
}
function TreeItem({
node,
depth,
selectedPath,
onSelect,
onCreateFile,
}: {
node: TreeNode;
depth: number;
selectedPath?: string;
onSelect: (path: string) => void;
onCreateFile?: (dir: string) => void;
}) {
const [expanded, setExpanded] = useState(depth < 1);
const isDir = node.type === 'directory';
const isSelected = node.path === selectedPath;
const pad = `${depth * 12 + 8}px`;
if (isDir) {
return (
<div>
<div
className="group flex cursor-pointer items-center py-0.5 pr-2 text-text-secondary hover:bg-surface-overlay"
style={{ paddingLeft: pad }}
onClick={() => setExpanded(!expanded)}
onContextMenu={(e) => {
e.preventDefault();
onCreateFile?.(node.path);
}}
>
<span className="mr-1 text-xs text-text-muted">{expanded ? '\u25BE' : '\u25B8'}</span>
<span className="truncate">{node.name}</span>
</div>
{expanded &&
node.children?.map((child) => (
<TreeItem
key={child.path}
node={child}
depth={depth + 1}
selectedPath={selectedPath}
onSelect={onSelect}
onCreateFile={onCreateFile}
/>
))}
</div>
);
}
return (
<div
className={`cursor-pointer truncate py-0.5 pr-2 transition-colors ${
isSelected
? 'bg-accent/15 text-accent'
: 'text-text-secondary hover:bg-surface-overlay hover:text-text-primary'
}`}
style={{ paddingLeft: `${depth * 12 + 20}px` }}
onClick={() => onSelect(node.path)}
>
{node.name}
</div>
);
}

View file

@ -0,0 +1,77 @@
import {
DragDropContext,
Droppable,
Draggable,
type DropResult,
} from '@hello-pangea/dnd';
import type { HumanTask, TaskStatus } from '../api/types';
import { TaskCard } from './TaskCard';
const COLUMNS: { id: TaskStatus; label: string }[] = [
{ id: 'urgent', label: 'Urgent' },
{ id: 'open', label: 'Open' },
{ id: 'in-progress', label: 'In Progress' },
{ id: 'done', label: 'Done' },
];
interface Props {
tasks: HumanTask[];
onMove: (id: string, fromStatus: string, toStatus: string) => void;
}
export function Kanban({ tasks, onMove }: Props) {
const byStatus = (status: TaskStatus) => tasks.filter((t) => t.status === status);
const handleDragEnd = (result: DropResult) => {
if (!result.destination) return;
const fromStatus = result.source.droppableId;
const toStatus = result.destination.droppableId;
if (fromStatus === toStatus) return;
onMove(result.draggableId, fromStatus, toStatus);
};
return (
<DragDropContext onDragEnd={handleDragEnd}>
<div className="flex gap-4 p-4">
{COLUMNS.map((col) => {
const items = byStatus(col.id);
return (
<div key={col.id} className="flex w-72 shrink-0 flex-col">
<div className="mb-2 flex items-center justify-between px-1">
<h3 className="text-sm font-semibold text-text-secondary">{col.label}</h3>
<span className="text-xs text-text-muted">{items.length}</span>
</div>
<Droppable droppableId={col.id}>
{(provided, snapshot) => (
<div
ref={provided.innerRef}
{...provided.droppableProps}
className={`flex min-h-[200px] flex-col gap-2 rounded-lg border border-border p-2 transition-colors ${
snapshot.isDraggingOver ? 'border-accent/40 bg-accent/5' : 'bg-surface'
}`}
>
{items.map((task, idx) => (
<Draggable key={task.id} draggableId={task.id} index={idx}>
{(prov) => (
<div
ref={prov.innerRef}
{...prov.draggableProps}
{...prov.dragHandleProps}
>
<TaskCard task={task} />
</div>
)}
</Draggable>
))}
{provided.placeholder}
</div>
)}
</Droppable>
</div>
);
})}
</div>
</DragDropContext>
);
}

View file

@ -0,0 +1,13 @@
import { Outlet } from 'react-router-dom';
import { NavigationSidebar } from './NavigationSidebar';
export function Layout() {
return (
<div className="flex h-screen">
<NavigationSidebar />
<main className="flex-1 overflow-auto">
<Outlet />
</main>
</div>
);
}

View file

@ -0,0 +1,96 @@
import { NavLink } from 'react-router-dom';
import { useHealth } from '../hooks/useApi';
import { useViewPages } from '../views/ViewRenderer';
/** Static built-in navigation entries */
const builtinNav = [
{ to: '/', label: 'Tasks', icon: '/' },
{ to: '/agents', label: 'Agents' },
{ to: '/crons', label: 'Crons' },
{ to: '/queue', label: 'Queue' },
{ to: '/knowledge', label: 'Knowledge' },
{ to: '/editor', label: 'Editor' },
];
export function NavigationSidebar() {
const { data: health } = useHealth();
const { data: viewPages } = useViewPages();
// Build dynamic nav entries from view pages, sorted by position
const dynamicNav = (viewPages || [])
.filter((p) => p.route && p.title)
.sort((a, b) => (a.position ?? 100) - (b.position ?? 100))
.map((p) => ({
to: p.route!.startsWith('/') ? p.route! : `/${p.route}`,
label: p.title || p.name,
icon: p.icon,
}));
return (
<aside className="flex w-52 shrink-0 flex-col border-r border-border bg-surface-raised">
<div className="flex items-center gap-2 border-b border-border px-4 py-3">
<span className="text-lg font-bold text-accent">vault:os</span>
</div>
<nav className="flex-1 overflow-auto px-2 py-3">
<div className="space-y-0.5">
{builtinNav.map((item) => (
<NavLink
key={item.to}
to={item.to}
end={item.to === '/'}
className={({ isActive }) =>
`block rounded-md px-3 py-1.5 text-sm transition-colors ${
isActive
? 'bg-accent/15 text-accent font-medium'
: 'text-text-secondary hover:bg-surface-overlay hover:text-text-primary'
}`
}
>
{item.label}
</NavLink>
))}
</div>
{dynamicNav.length > 0 && (
<>
<div className="my-3 border-t border-border" />
<div className="mb-1 px-3 text-[10px] font-semibold uppercase tracking-wider text-text-muted">
Views
</div>
<div className="space-y-0.5">
{dynamicNav.map((item) => (
<NavLink
key={item.to}
to={item.to}
className={({ isActive }) =>
`block rounded-md px-3 py-1.5 text-sm transition-colors ${
isActive
? 'bg-accent/15 text-accent font-medium'
: 'text-text-secondary hover:bg-surface-overlay hover:text-text-primary'
}`
}
>
{item.icon && <span className="mr-1.5">{item.icon}</span>}
{item.label}
</NavLink>
))}
</div>
</>
)}
</nav>
<div className="border-t border-border px-4 py-3 text-xs text-text-muted">
{health ? (
<>
<div>v{health.version}</div>
<div>{health.agents} agents</div>
<div>{health.crons_scheduled} crons</div>
</>
) : (
<div>connecting...</div>
)}
</div>
</aside>
);
}

View file

@ -0,0 +1,49 @@
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query';
import { listNotifications, dismissNotification } from '../api/client';
const LEVEL_STYLES: Record<string, string> = {
info: 'bg-accent/10 border-accent/30 text-accent',
warning: 'bg-warning/10 border-warning/30 text-warning',
error: 'bg-danger/10 border-danger/30 text-danger',
success: 'bg-success/10 border-success/30 text-success',
};
export function NotificationBanner() {
const queryClient = useQueryClient();
const { data: notifications } = useQuery({
queryKey: ['notifications'],
queryFn: listNotifications,
refetchInterval: 30000,
});
const dismiss = useMutation({
mutationFn: dismissNotification,
onSuccess: () => queryClient.invalidateQueries({ queryKey: ['notifications'] }),
});
if (!notifications?.length) return null;
return (
<div className="space-y-2 px-6 pt-4">
{notifications.map((n) => (
<div
key={n.id}
className={`flex items-start gap-3 rounded-md border px-3 py-2 text-sm ${
LEVEL_STYLES[n.level || 'info'] || LEVEL_STYLES.info
}`}
>
<div className="flex-1">
<div className="font-medium">{n.title}</div>
{n.message && <div className="mt-0.5 text-xs opacity-80">{n.message}</div>}
</div>
<button
onClick={() => dismiss.mutate(n.id)}
className="shrink-0 text-xs opacity-60 hover:opacity-100"
>
dismiss
</button>
</div>
))}
</div>
);
}

View file

@ -0,0 +1,26 @@
const styles: Record<string, string> = {
urgent: 'bg-urgent/20 text-urgent',
high: 'bg-danger/20 text-danger',
medium: 'bg-warning/20 text-warning',
low: 'bg-text-muted/20 text-text-secondary',
open: 'bg-accent/20 text-accent',
'in-progress': 'bg-warning/20 text-warning',
done: 'bg-success/20 text-success',
queued: 'bg-text-muted/20 text-text-secondary',
running: 'bg-accent/20 text-accent',
failed: 'bg-danger/20 text-danger',
active: 'bg-success/20 text-success',
paused: 'bg-text-muted/20 text-text-secondary',
success: 'bg-success/20 text-success',
failure: 'bg-danger/20 text-danger',
timeout: 'bg-warning/20 text-warning',
};
export function StatusBadge({ value }: { value: string }) {
const cls = styles[value] || 'bg-surface-overlay text-text-secondary';
return (
<span className={`inline-block rounded px-2 py-0.5 text-xs font-medium ${cls}`}>
{value}
</span>
);
}

View file

@ -0,0 +1,35 @@
import type { HumanTask } from '../api/types';
import { StatusBadge } from './StatusBadge';
export function TaskCard({ task }: { task: HumanTask }) {
const age = timeAgo(task.created);
return (
<div className="rounded-lg border border-border bg-surface-raised p-3 transition-colors hover:border-border-hover">
<div className="mb-1.5 text-sm font-medium text-text-primary">{task.title}</div>
<div className="flex flex-wrap items-center gap-1.5">
<StatusBadge value={task.priority} />
{task.labels.map((l) => (
<span key={l} className="rounded bg-surface-overlay px-1.5 py-0.5 text-xs text-text-secondary">
{l}
</span>
))}
</div>
<div className="mt-2 flex items-center justify-between text-xs text-text-muted">
<span>{age}</span>
{task.source && <span>via {task.source}</span>}
</div>
</div>
);
}
function timeAgo(iso: string): string {
const ms = Date.now() - new Date(iso).getTime();
const mins = Math.floor(ms / 60000);
if (mins < 1) return 'just now';
if (mins < 60) return `${mins}m ago`;
const hrs = Math.floor(mins / 60);
if (hrs < 24) return `${hrs}h ago`;
const days = Math.floor(hrs / 24);
return `${days}d ago`;
}

View file

@ -0,0 +1,185 @@
import { useState, useRef, useEffect } from 'react';
import { ModelSelector } from './ModelSelector';
import { DiffView } from './DiffView';
interface Message {
role: 'user' | 'assistant';
content: string;
}
interface Props {
filePath?: string;
onClose: () => void;
}
/** Extract unified diff blocks from markdown-formatted assistant response */
function extractDiffs(content: string): string[] {
const diffs: string[] = [];
const regex = /```(?:diff)?\n([\s\S]*?)```/g;
let match;
while ((match = regex.exec(content)) !== null) {
const block = match[1].trim();
if (block.includes('@@') || block.startsWith('---') || block.startsWith('diff ')) {
diffs.push(block);
}
}
return diffs;
}
export function ChatSidebar({ filePath, onClose }: Props) {
const [messages, setMessages] = useState<Message[]>([]);
const [input, setInput] = useState('');
const [model, setModel] = useState('local/qwen3');
const [loading, setLoading] = useState(false);
const [applyingDiff, setApplyingDiff] = useState<string | null>(null);
const scrollRef = useRef<HTMLDivElement>(null);
useEffect(() => {
scrollRef.current?.scrollTo({ top: scrollRef.current.scrollHeight });
}, [messages]);
const sendMessage = async () => {
const text = input.trim();
if (!text || loading) return;
const userMsg: Message = { role: 'user', content: text };
const newMessages = [...messages, userMsg];
setMessages(newMessages);
setInput('');
setLoading(true);
try {
const res = await fetch('/api/assistant/chat', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
messages: newMessages.map((m) => ({ role: m.role, content: m.content })),
model,
file_path: filePath,
}),
});
if (!res.ok) {
const err = await res.json().catch(() => ({ error: 'Request failed' }));
setMessages([...newMessages, { role: 'assistant', content: `Error: ${err.error}` }]);
return;
}
const data = await res.json();
setMessages([...newMessages, { role: 'assistant', content: data.message.content }]);
} catch (e) {
setMessages([
...newMessages,
{ role: 'assistant', content: `Error: ${e instanceof Error ? e.message : 'Unknown error'}` },
]);
} finally {
setLoading(false);
}
};
const applyDiff = async (diff: string) => {
if (!filePath) return;
setApplyingDiff(diff);
try {
const res = await fetch('/api/assistant/apply-diff', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ file_path: filePath, diff }),
});
if (!res.ok) {
const err = await res.json().catch(() => ({ error: 'Apply failed' }));
alert(`Failed to apply diff: ${err.error}`);
}
} finally {
setApplyingDiff(null);
}
};
const removeDiff = (diff: string) => {
// Remove the diff from the last assistant message display (user chose to reject)
void diff;
};
return (
<div className="flex h-full w-80 shrink-0 flex-col border-l border-border bg-surface-raised">
{/* Header */}
<div className="flex items-center justify-between border-b border-border px-3 py-2">
<span className="text-sm font-medium text-text-primary">Assistant</span>
<div className="flex items-center gap-2">
<ModelSelector value={model} onChange={setModel} />
<button onClick={onClose} className="text-text-muted hover:text-text-primary">
&times;
</button>
</div>
</div>
{/* Messages */}
<div ref={scrollRef} className="flex-1 space-y-3 overflow-auto p-3">
{messages.length === 0 && (
<div className="text-xs text-text-muted">
Ask about the current file or request edits. The assistant will suggest diffs you can
apply directly.
</div>
)}
{messages.map((msg, i) => (
<div key={i}>
<div
className={`rounded-md px-3 py-2 text-sm ${
msg.role === 'user'
? 'ml-4 bg-accent/10 text-text-primary'
: 'mr-4 bg-surface-overlay text-text-secondary'
}`}
>
<div className="whitespace-pre-wrap">{msg.content}</div>
</div>
{/* Render extractable diffs as apply/reject widgets */}
{msg.role === 'assistant' &&
extractDiffs(msg.content).map((diff, di) => (
<div key={di} className="mt-2">
<DiffView
diff={diff}
onApply={() => applyDiff(diff)}
onReject={() => removeDiff(diff)}
applying={applyingDiff === diff}
/>
</div>
))}
</div>
))}
{loading && (
<div className="mr-4 rounded-md bg-surface-overlay px-3 py-2 text-sm text-text-muted">
Thinking...
</div>
)}
</div>
{/* Input */}
<div className="border-t border-border p-3">
<div className="flex gap-2">
<input
value={input}
onChange={(e) => setInput(e.target.value)}
onKeyDown={(e) => {
if (e.key === 'Enter' && !e.shiftKey) {
e.preventDefault();
sendMessage();
}
}}
placeholder="Ask about this file..."
className="flex-1 rounded border border-border bg-surface-base px-2 py-1.5 text-sm text-text-primary outline-none placeholder:text-text-muted focus:border-accent"
/>
<button
onClick={sendMessage}
disabled={loading || !input.trim()}
className="rounded bg-accent px-3 py-1.5 text-sm text-white hover:bg-accent/80 disabled:opacity-50"
>
Send
</button>
</div>
{filePath && (
<div className="mt-1.5 truncate text-[10px] text-text-muted">Context: {filePath}</div>
)}
</div>
</div>
);
}

View file

@ -0,0 +1,51 @@
interface Props {
diff: string;
onApply: () => void;
onReject: () => void;
applying?: boolean;
}
export function DiffView({ diff, onApply, onReject, applying }: Props) {
const lines = diff.split('\n');
return (
<div className="rounded border border-border bg-surface-overlay">
<div className="flex items-center justify-between border-b border-border px-3 py-1.5">
<span className="text-xs font-medium text-text-secondary">Suggested Changes</span>
<div className="flex gap-1.5">
<button
onClick={onReject}
disabled={applying}
className="rounded px-2 py-0.5 text-xs text-text-muted hover:bg-danger/10 hover:text-danger"
>
Reject
</button>
<button
onClick={onApply}
disabled={applying}
className="rounded bg-accent px-2 py-0.5 text-xs text-white hover:bg-accent/80 disabled:opacity-50"
>
{applying ? 'Applying...' : 'Apply'}
</button>
</div>
</div>
<pre className="overflow-auto p-2 text-xs leading-relaxed">
{lines.map((line, i) => {
let cls = 'text-text-secondary';
if (line.startsWith('+') && !line.startsWith('+++')) {
cls = 'text-success bg-success/10';
} else if (line.startsWith('-') && !line.startsWith('---')) {
cls = 'text-danger bg-danger/10';
} else if (line.startsWith('@@')) {
cls = 'text-accent';
}
return (
<div key={i} className={cls}>
{line}
</div>
);
})}
</pre>
</div>
);
}

View file

@ -0,0 +1,31 @@
import { useQuery } from '@tanstack/react-query';
async function fetchModels() {
const res = await fetch('/api/assistant/models');
if (!res.ok) throw new Error('Failed to fetch models');
return res.json() as Promise<{ id: string; name: string }[]>;
}
interface Props {
value: string;
onChange: (model: string) => void;
}
export function ModelSelector({ value, onChange }: Props) {
const { data: models } = useQuery({ queryKey: ['assistant-models'], queryFn: fetchModels });
return (
<select
value={value}
onChange={(e) => onChange(e.target.value)}
className="rounded border border-border bg-surface-raised px-2 py-1 text-xs text-text-secondary outline-none focus:border-accent"
>
{(models || []).map((m) => (
<option key={m.id} value={m.id}>
{m.name}
</option>
))}
{!models?.length && <option value={value}>{value}</option>}
</select>
);
}

View file

@ -0,0 +1,254 @@
import { useState, useEffect } from 'react';
import { MarkdownEditor } from './MarkdownEditor';
import { MarkdownPreview, renderMarkdown } from './MarkdownPreview';
import { AgentForm } from '../forms/AgentForm';
import { CronForm } from '../forms/CronForm';
import { HumanTaskForm } from '../forms/HumanTaskForm';
import { AgentTaskForm } from '../forms/AgentTaskForm';
import { SkillForm } from '../forms/SkillForm';
import { KnowledgeForm } from '../forms/KnowledgeForm';
import { readFile, writeFile } from '../../api/client';
type ViewMode = 'edit' | 'preview' | 'split';
type FmMode = 'form' | 'yaml';
interface Props {
path: string;
onSaved?: () => void;
onToggleAssistant?: () => void;
}
export function FileEditor({ path, onSaved, onToggleAssistant }: Props) {
const [frontmatter, setFrontmatter] = useState<Record<string, unknown>>({});
const [body, setBody] = useState('');
const [rawYaml, setRawYaml] = useState('');
const [viewMode, setViewMode] = useState<ViewMode>('edit');
const [fmMode, setFmMode] = useState<FmMode>('form');
const [dirty, setDirty] = useState(false);
const [saving, setSaving] = useState(false);
const [error, setError] = useState<string>();
const entityType = detectEntityType(path);
// Load file
useEffect(() => {
if (!path) return;
setError(undefined);
readFile(path)
.then((data) => {
const fm = (data.frontmatter as Record<string, unknown>) || {};
setFrontmatter(fm);
setBody(data.body || '');
setRawYaml(toYaml(fm));
setDirty(false);
})
.catch((e) => setError(e.message));
}, [path]);
const handleFmChange = (values: Record<string, unknown>) => {
setFrontmatter(values);
setRawYaml(toYaml(values));
setDirty(true);
};
const handleYamlChange = (yaml: string) => {
setRawYaml(yaml);
try {
// We don't parse YAML client-side; just track the raw value
setDirty(true);
} catch {
// ignore parse errors during editing
}
};
const handleBodyChange = (value: string) => {
setBody(value);
setDirty(true);
};
const handleSave = async () => {
setSaving(true);
setError(undefined);
try {
if (fmMode === 'yaml') {
// Send raw content
const raw = rawYaml.trim()
? `---\n${rawYaml.trimEnd()}\n---\n${body}`
: body;
await writeFile(path, { raw });
} else {
await writeFile(path, { frontmatter, body });
}
setDirty(false);
onSaved?.();
} catch (e: unknown) {
setError(e instanceof Error ? e.message : 'Save failed');
} finally {
setSaving(false);
}
};
// Ctrl+S / Cmd+S
useEffect(() => {
const handler = (e: KeyboardEvent) => {
if ((e.metaKey || e.ctrlKey) && e.key === 's') {
e.preventDefault();
if (dirty) handleSave();
}
};
window.addEventListener('keydown', handler);
return () => window.removeEventListener('keydown', handler);
});
return (
<div className="flex h-full flex-col">
{/* Toolbar */}
<div className="flex items-center justify-between border-b border-border px-4 py-2">
<div className="flex items-center gap-2">
<span className="truncate text-sm text-text-secondary">{path}</span>
{dirty && <span className="text-xs text-warning">unsaved</span>}
</div>
<div className="flex items-center gap-2">
{/* View mode toggle */}
{(['edit', 'split', 'preview'] as ViewMode[]).map((m) => (
<button
key={m}
onClick={() => setViewMode(m)}
className={`rounded px-2 py-1 text-xs transition-colors ${
viewMode === m
? 'bg-accent/15 text-accent'
: 'text-text-muted hover:text-text-secondary'
}`}
>
{m}
</button>
))}
<span className="mx-1 text-border">|</span>
{/* Frontmatter mode toggle */}
<button
onClick={() => setFmMode(fmMode === 'form' ? 'yaml' : 'form')}
className="rounded px-2 py-1 text-xs text-text-muted hover:text-text-secondary"
>
{fmMode === 'form' ? 'YAML' : 'Form'}
</button>
{onToggleAssistant && (
<button
onClick={onToggleAssistant}
className="rounded px-2 py-1 text-xs text-text-muted hover:text-accent"
title="Toggle AI Assistant"
>
AI
</button>
)}
<button
onClick={handleSave}
disabled={!dirty || saving}
className="rounded bg-accent px-3 py-1 text-xs font-medium text-white transition-colors hover:bg-accent-hover disabled:opacity-50"
>
{saving ? 'Saving...' : 'Save'}
</button>
</div>
</div>
{error && (
<div className="border-b border-danger/30 bg-danger/10 px-4 py-2 text-xs text-danger">{error}</div>
)}
<div className="flex min-h-0 flex-1">
{/* Frontmatter panel */}
<div className="w-72 shrink-0 overflow-auto border-r border-border">
{fmMode === 'form' ? (
renderEntityForm(entityType, frontmatter, handleFmChange)
) : (
<div className="h-full">
<MarkdownEditor value={rawYaml} onChange={handleYamlChange} placeholder="YAML frontmatter..." />
</div>
)}
</div>
{/* Body editor / preview */}
<div className="flex min-w-0 flex-1">
{(viewMode === 'edit' || viewMode === 'split') && (
<div className={`${viewMode === 'split' ? 'w-1/2' : 'w-full'} min-w-0`}>
<MarkdownEditor value={body} onChange={handleBodyChange} placeholder="Write markdown..." />
</div>
)}
{(viewMode === 'preview' || viewMode === 'split') && (
<div
className={`${viewMode === 'split' ? 'w-1/2 border-l border-border' : 'w-full'} overflow-auto`}
>
<MarkdownPreview html={renderMarkdown(body)} />
</div>
)}
</div>
</div>
</div>
);
}
type EntityType = 'agent' | 'skill' | 'cron' | 'human-task' | 'agent-task' | 'knowledge' | 'generic';
function detectEntityType(path: string): EntityType {
if (path.startsWith('agents/')) return 'agent';
if (path.startsWith('skills/')) return 'skill';
if (path.startsWith('crons/')) return 'cron';
if (path.startsWith('todos/harald/')) return 'human-task';
if (path.startsWith('todos/agent/')) return 'agent-task';
if (path.startsWith('knowledge/')) return 'knowledge';
return 'generic';
}
function renderEntityForm(
type: EntityType,
values: Record<string, unknown>,
onChange: (v: Record<string, unknown>) => void,
) {
switch (type) {
case 'agent':
return <AgentForm values={values} onChange={onChange} />;
case 'skill':
return <SkillForm values={values} onChange={onChange} />;
case 'cron':
return <CronForm values={values} onChange={onChange} />;
case 'human-task':
return <HumanTaskForm values={values} onChange={onChange} />;
case 'agent-task':
return <AgentTaskForm values={values} onChange={onChange} />;
case 'knowledge':
return <KnowledgeForm values={values} onChange={onChange} />;
default:
return (
<div className="p-4 text-xs text-text-muted">
No structured form for this file type. Switch to YAML mode.
</div>
);
}
}
function toYaml(obj: Record<string, unknown>): string {
// Simple YAML serialization for the form -> YAML toggle
const lines: string[] = [];
for (const [key, value] of Object.entries(obj)) {
if (value === undefined || value === null) continue;
if (Array.isArray(value)) {
if (value.length === 0) {
lines.push(`${key}: []`);
} else {
lines.push(`${key}:`);
for (const item of value) {
lines.push(` - ${typeof item === 'string' ? item : JSON.stringify(item)}`);
}
}
} else if (typeof value === 'object') {
lines.push(`${key}: ${JSON.stringify(value)}`);
} else if (typeof value === 'string' && value.includes('\n')) {
lines.push(`${key}: |`);
for (const line of value.split('\n')) {
lines.push(` ${line}`);
}
} else {
lines.push(`${key}: ${value}`);
}
}
return lines.join('\n') + '\n';
}

View file

@ -0,0 +1,190 @@
import { useState } from 'react';
interface Props {
fields: FieldDef[];
values: Record<string, unknown>;
onChange: (values: Record<string, unknown>) => void;
}
export interface FieldDef {
name: string;
label: string;
type: 'text' | 'textarea' | 'number' | 'select' | 'tags' | 'datetime' | 'checkbox' | 'json';
options?: string[];
placeholder?: string;
required?: boolean;
}
export function FrontmatterForm({ fields, values, onChange }: Props) {
const update = (name: string, value: unknown) => {
onChange({ ...values, [name]: value });
};
return (
<div className="space-y-3 p-4">
{fields.map((field) => (
<div key={field.name}>
<label className="mb-1 block text-xs font-medium text-text-secondary">
{field.label}
{field.required && <span className="text-danger"> *</span>}
</label>
<FieldInput field={field} value={values[field.name]} onChange={(v) => update(field.name, v)} />
</div>
))}
</div>
);
}
function FieldInput({
field,
value,
onChange,
}: {
field: FieldDef;
value: unknown;
onChange: (v: unknown) => void;
}) {
const cls =
'w-full rounded border border-border bg-surface px-3 py-1.5 text-sm text-text-primary outline-none focus:border-accent';
switch (field.type) {
case 'text':
return (
<input
className={cls}
value={(value as string) || ''}
onChange={(e) => onChange(e.target.value)}
placeholder={field.placeholder}
/>
);
case 'textarea':
return (
<textarea
className={`${cls} min-h-[60px] resize-y`}
value={(value as string) || ''}
onChange={(e) => onChange(e.target.value)}
placeholder={field.placeholder}
/>
);
case 'number':
return (
<input
type="number"
className={cls}
value={(value as number) ?? ''}
onChange={(e) => onChange(e.target.value ? Number(e.target.value) : undefined)}
placeholder={field.placeholder}
/>
);
case 'select':
return (
<select className={cls} value={(value as string) || ''} onChange={(e) => onChange(e.target.value)}>
<option value=""></option>
{field.options?.map((o) => (
<option key={o} value={o}>
{o}
</option>
))}
</select>
);
case 'tags':
return <TagsInput value={(value as string[]) || []} onChange={onChange} />;
case 'datetime':
return (
<input
type="datetime-local"
className={cls}
value={toDatetimeLocal((value as string) || '')}
onChange={(e) => onChange(e.target.value ? new Date(e.target.value).toISOString() : undefined)}
/>
);
case 'checkbox':
return (
<input
type="checkbox"
className="h-4 w-4 rounded border-border"
checked={!!value}
onChange={(e) => onChange(e.target.checked)}
/>
);
case 'json':
return (
<textarea
className={`${cls} min-h-[60px] resize-y font-mono text-xs`}
value={typeof value === 'string' ? value : JSON.stringify(value, null, 2) || ''}
onChange={(e) => {
try {
onChange(JSON.parse(e.target.value));
} catch {
onChange(e.target.value);
}
}}
placeholder={field.placeholder || '{}'}
/>
);
default:
return null;
}
}
function TagsInput({ value, onChange }: { value: string[]; onChange: (v: string[]) => void }) {
const [input, setInput] = useState('');
const add = () => {
const tag = input.trim();
if (tag && !value.includes(tag)) {
onChange([...value, tag]);
}
setInput('');
};
return (
<div>
<div className="mb-1 flex flex-wrap gap-1">
{value.map((tag) => (
<span
key={tag}
className="inline-flex items-center gap-1 rounded bg-surface-overlay px-2 py-0.5 text-xs text-text-secondary"
>
{tag}
<button
onClick={() => onChange(value.filter((t) => t !== tag))}
className="text-text-muted hover:text-danger"
>
x
</button>
</span>
))}
</div>
<input
className="w-full rounded border border-border bg-surface px-3 py-1.5 text-sm text-text-primary outline-none focus:border-accent"
value={input}
onChange={(e) => setInput(e.target.value)}
onKeyDown={(e) => {
if (e.key === 'Enter') {
e.preventDefault();
add();
}
}}
placeholder="Add tag..."
/>
</div>
);
}
function toDatetimeLocal(iso: string): string {
if (!iso) return '';
try {
return new Date(iso).toISOString().slice(0, 16);
} catch {
return '';
}
}

View file

@ -0,0 +1,92 @@
import { useEffect, useRef } from 'react';
import { EditorState } from '@codemirror/state';
import { EditorView, keymap, placeholder as phPlugin } from '@codemirror/view';
import { defaultKeymap, history, historyKeymap } from '@codemirror/commands';
import { markdown } from '@codemirror/lang-markdown';
import { oneDark } from '@codemirror/theme-one-dark';
import {
autocompletion,
type CompletionContext,
type CompletionResult,
} from '@codemirror/autocomplete';
import { suggestFiles } from '../../api/client';
interface Props {
value: string;
onChange: (value: string) => void;
placeholder?: string;
}
/** Wiki-link `[[...]]` async autocompletion source. */
async function wikiLinkCompletion(ctx: CompletionContext): Promise<CompletionResult | null> {
const before = ctx.matchBefore(/\[\[[^\]]*$/);
if (!before) return null;
const query = before.text.slice(2); // strip [[
const files = await suggestFiles(query).catch(() => [] as string[]);
return {
from: before.from + 2,
filter: false,
options: files.map((f) => ({
label: f,
apply: f + ']]',
})),
};
}
export function MarkdownEditor({ value, onChange, placeholder }: Props) {
const containerRef = useRef<HTMLDivElement>(null);
const viewRef = useRef<EditorView | null>(null);
const onChangeRef = useRef(onChange);
onChangeRef.current = onChange;
useEffect(() => {
if (!containerRef.current) return;
const state = EditorState.create({
doc: value,
extensions: [
keymap.of([...defaultKeymap, ...historyKeymap]),
history(),
markdown(),
oneDark,
autocompletion({ override: [wikiLinkCompletion] }),
EditorView.updateListener.of((update) => {
if (update.docChanged) {
onChangeRef.current(update.state.doc.toString());
}
}),
EditorView.theme({
'&': { height: '100%', fontSize: '14px' },
'.cm-scroller': { overflow: 'auto' },
'.cm-content': { fontFamily: "'JetBrains Mono', 'Fira Code', monospace" },
}),
...(placeholder ? [phPlugin(placeholder)] : []),
],
});
const view = new EditorView({ state, parent: containerRef.current });
viewRef.current = view;
return () => {
view.destroy();
viewRef.current = null;
};
// Only create editor once
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
// Sync external value changes (e.g., file load) without re-creating editor
useEffect(() => {
const view = viewRef.current;
if (!view) return;
const current = view.state.doc.toString();
if (current !== value) {
view.dispatch({
changes: { from: 0, to: current.length, insert: value },
});
}
}, [value]);
return <div ref={containerRef} className="h-full" />;
}

View file

@ -0,0 +1,27 @@
interface Props {
html: string;
}
export function MarkdownPreview({ html }: Props) {
return (
<article
className="prose prose-invert max-w-none p-4"
dangerouslySetInnerHTML={{ __html: html }}
/>
);
}
/** Render markdown to HTML client-side (basic). */
export function renderMarkdown(md: string): string {
// Simple client-side rendering for preview — real rendering done server-side
return md
.replace(/^### (.+)$/gm, '<h3>$1</h3>')
.replace(/^## (.+)$/gm, '<h2>$1</h2>')
.replace(/^# (.+)$/gm, '<h1>$1</h1>')
.replace(/\*\*(.+?)\*\*/g, '<strong>$1</strong>')
.replace(/\*(.+?)\*/g, '<em>$1</em>')
.replace(/`(.+?)`/g, '<code>$1</code>')
.replace(/\[\[(.+?)\]\]/g, '<a href="#" class="text-accent">$1</a>')
.replace(/^- (.+)$/gm, '<li>$1</li>')
.replace(/\n/g, '<br/>');
}

View file

@ -0,0 +1,21 @@
import { FrontmatterForm, type FieldDef } from '../editor/FrontmatterForm';
const fields: FieldDef[] = [
{ name: 'name', label: 'Name', type: 'text', required: true, placeholder: 'my-agent' },
{ name: 'executable', label: 'Executable', type: 'select', required: true, options: ['claude-code', 'ollama', 'custom'] },
{ name: 'model', label: 'Model', type: 'text', placeholder: 'sonnet' },
{ name: 'escalate_to', label: 'Escalate To', type: 'text', placeholder: 'opus' },
{ name: 'skills', label: 'Skills', type: 'tags' },
{ name: 'mcp_servers', label: 'MCP Servers', type: 'tags' },
{ name: 'timeout', label: 'Timeout (seconds)', type: 'number', placeholder: '600' },
{ name: 'max_retries', label: 'Max Retries', type: 'number', placeholder: '0' },
];
interface Props {
values: Record<string, unknown>;
onChange: (values: Record<string, unknown>) => void;
}
export function AgentForm({ values, onChange }: Props) {
return <FrontmatterForm fields={fields} values={values} onChange={onChange} />;
}

View file

@ -0,0 +1,19 @@
import { FrontmatterForm, type FieldDef } from '../editor/FrontmatterForm';
const fields: FieldDef[] = [
{ name: 'title', label: 'Title', type: 'text', required: true },
{ name: 'agent', label: 'Agent', type: 'text', required: true },
{ name: 'priority', label: 'Priority', type: 'select', options: ['urgent', 'high', 'medium', 'low'] },
{ name: 'type', label: 'Task Type', type: 'text', placeholder: 'review' },
{ name: 'max_retries', label: 'Max Retries', type: 'number', placeholder: '0' },
{ name: 'input', label: 'Input (JSON)', type: 'json' },
];
interface Props {
values: Record<string, unknown>;
onChange: (values: Record<string, unknown>) => void;
}
export function AgentTaskForm({ values, onChange }: Props) {
return <FrontmatterForm fields={fields} values={values} onChange={onChange} />;
}

View file

@ -0,0 +1,17 @@
import { FrontmatterForm, type FieldDef } from '../editor/FrontmatterForm';
const fields: FieldDef[] = [
{ name: 'title', label: 'Title', type: 'text', required: true, placeholder: 'Daily code review' },
{ name: 'schedule', label: 'Schedule (cron)', type: 'text', required: true, placeholder: '0 9 * * *' },
{ name: 'agent', label: 'Agent', type: 'text', required: true, placeholder: 'reviewer' },
{ name: 'enabled', label: 'Enabled', type: 'checkbox' },
];
interface Props {
values: Record<string, unknown>;
onChange: (values: Record<string, unknown>) => void;
}
export function CronForm({ values, onChange }: Props) {
return <FrontmatterForm fields={fields} values={values} onChange={onChange} />;
}

View file

@ -0,0 +1,19 @@
import { FrontmatterForm, type FieldDef } from '../editor/FrontmatterForm';
const fields: FieldDef[] = [
{ name: 'title', label: 'Title', type: 'text', required: true },
{ name: 'priority', label: 'Priority', type: 'select', options: ['urgent', 'high', 'medium', 'low'] },
{ name: 'labels', label: 'Labels', type: 'tags' },
{ name: 'repo', label: 'Repository', type: 'text', placeholder: 'owner/repo' },
{ name: 'due', label: 'Due Date', type: 'datetime' },
{ name: 'source', label: 'Source', type: 'text', placeholder: 'self' },
];
interface Props {
values: Record<string, unknown>;
onChange: (values: Record<string, unknown>) => void;
}
export function HumanTaskForm({ values, onChange }: Props) {
return <FrontmatterForm fields={fields} values={values} onChange={onChange} />;
}

View file

@ -0,0 +1,17 @@
import { FrontmatterForm, type FieldDef } from '../editor/FrontmatterForm';
const fields: FieldDef[] = [
{ name: 'title', label: 'Title', type: 'text' },
{ name: 'tags', label: 'Tags', type: 'tags' },
{ name: 'source', label: 'Source', type: 'text', placeholder: 'agent-name or self' },
{ name: 'related', label: 'Related Files', type: 'tags' },
];
interface Props {
values: Record<string, unknown>;
onChange: (values: Record<string, unknown>) => void;
}
export function KnowledgeForm({ values, onChange }: Props) {
return <FrontmatterForm fields={fields} values={values} onChange={onChange} />;
}

View file

@ -0,0 +1,19 @@
import { FrontmatterForm, type FieldDef } from '../editor/FrontmatterForm';
const fields: FieldDef[] = [
{ name: 'name', label: 'Name', type: 'text', required: true, placeholder: 'read-vault' },
{ name: 'description', label: 'Description', type: 'textarea', required: true },
{ name: 'version', label: 'Version', type: 'number', placeholder: '1' },
{ name: 'requires_mcp', label: 'Requires MCP', type: 'tags' },
{ name: 'inputs', label: 'Inputs', type: 'tags' },
{ name: 'outputs', label: 'Outputs', type: 'tags' },
];
interface Props {
values: Record<string, unknown>;
onChange: (values: Record<string, unknown>) => void;
}
export function SkillForm({ values, onChange }: Props) {
return <FrontmatterForm fields={fields} values={values} onChange={onChange} />;
}

View file

@ -0,0 +1,98 @@
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query';
import * as api from '../api/client';
export function useAgents() {
return useQuery({ queryKey: ['agents'], queryFn: api.listAgents });
}
export function useAgent(name: string) {
return useQuery({ queryKey: ['agents', name], queryFn: () => api.getAgent(name), enabled: !!name });
}
export function useSkills() {
return useQuery({ queryKey: ['skills'], queryFn: api.listSkills });
}
export function useCrons() {
return useQuery({ queryKey: ['crons'], queryFn: api.listCrons });
}
export function useHumanTasks() {
return useQuery({ queryKey: ['humanTasks'], queryFn: api.listHumanTasks });
}
export function useAgentTasks() {
return useQuery({ queryKey: ['agentTasks'], queryFn: api.listAgentTasks });
}
export function useKnowledge(q?: string, tag?: string) {
return useQuery({
queryKey: ['knowledge', q, tag],
queryFn: () => api.listKnowledge(q, tag),
});
}
export function useStats() {
return useQuery({ queryKey: ['stats'], queryFn: api.getStats, refetchInterval: 30000 });
}
export function useActivity() {
return useQuery({ queryKey: ['activity'], queryFn: api.getActivity });
}
export function useHealth() {
return useQuery({ queryKey: ['health'], queryFn: api.getHealth, refetchInterval: 15000 });
}
export function useMoveHumanTask() {
const qc = useQueryClient();
return useMutation({
mutationFn: ({ status, id, to }: { status: string; id: string; to: string }) =>
api.moveHumanTask(status, id, to),
onSuccess: () => qc.invalidateQueries({ queryKey: ['humanTasks'] }),
});
}
export function useCreateHumanTask() {
const qc = useQueryClient();
return useMutation({
mutationFn: api.createHumanTask,
onSuccess: () => qc.invalidateQueries({ queryKey: ['humanTasks'] }),
});
}
export function useTriggerAgent() {
const qc = useQueryClient();
return useMutation({
mutationFn: ({ name, context }: { name: string; context?: string }) =>
api.triggerAgent(name, context),
onSuccess: () => qc.invalidateQueries({ queryKey: ['agentTasks'] }),
});
}
export function useTriggerCron() {
const qc = useQueryClient();
return useMutation({
mutationFn: (name: string) => api.triggerCron(name),
onSuccess: () => {
qc.invalidateQueries({ queryKey: ['crons'] });
qc.invalidateQueries({ queryKey: ['agentTasks'] });
},
});
}
export function usePauseCron() {
const qc = useQueryClient();
return useMutation({
mutationFn: (name: string) => api.pauseCron(name),
onSuccess: () => qc.invalidateQueries({ queryKey: ['crons'] }),
});
}
export function useResumeCron() {
const qc = useQueryClient();
return useMutation({
mutationFn: (name: string) => api.resumeCron(name),
onSuccess: () => qc.invalidateQueries({ queryKey: ['crons'] }),
});
}

View file

@ -0,0 +1,47 @@
import { useEffect, useRef } from 'react';
import { useQueryClient } from '@tanstack/react-query';
import { vaultWs } from '../api/ws';
import type { WsEvent } from '../api/types';
/** Connect/disconnect the global WebSocket with the component lifecycle. */
export function useWebSocket() {
const queryClient = useQueryClient();
useEffect(() => {
vaultWs.connect();
const unsub = vaultWs.onAny((event: WsEvent) => {
// Invalidate relevant queries based on event area
if (event.type.startsWith('agent_task'))
queryClient.invalidateQueries({ queryKey: ['agentTasks'] });
if (event.type.startsWith('human_task'))
queryClient.invalidateQueries({ queryKey: ['humanTasks'] });
if (event.type.startsWith('agent_') && !event.type.startsWith('agent_task'))
queryClient.invalidateQueries({ queryKey: ['agents'] });
if (event.type.startsWith('skill_'))
queryClient.invalidateQueries({ queryKey: ['skills'] });
if (event.type.startsWith('cron_'))
queryClient.invalidateQueries({ queryKey: ['crons'] });
if (event.type.startsWith('knowledge_'))
queryClient.invalidateQueries({ queryKey: ['knowledge'] });
// Always invalidate stats/activity on any change
queryClient.invalidateQueries({ queryKey: ['stats'] });
queryClient.invalidateQueries({ queryKey: ['activity'] });
});
return () => {
unsub();
vaultWs.disconnect();
};
}, [queryClient]);
}
/** Subscribe to a specific event type. */
export function useVaultEvent(type: string, handler: (event: WsEvent) => void) {
const handlerRef = useRef(handler);
handlerRef.current = handler;
useEffect(() => {
return vaultWs.on(type, (e) => handlerRef.current(e));
}, [type]);
}

24
dashboard/src/index.css Normal file
View file

@ -0,0 +1,24 @@
@import "tailwindcss";
@theme {
--color-surface: #0f1117;
--color-surface-raised: #1a1d27;
--color-surface-overlay: #252833;
--color-border: #2e3241;
--color-border-hover: #3d4254;
--color-text-primary: #e1e4ed;
--color-text-secondary: #8b90a0;
--color-text-muted: #5c6070;
--color-accent: #6c8cff;
--color-accent-hover: #8aa3ff;
--color-success: #4ade80;
--color-warning: #fbbf24;
--color-danger: #f87171;
--color-urgent: #ef4444;
}
body {
@apply bg-surface text-text-primary;
margin: 0;
font-family: 'Inter', -apple-system, BlinkMacSystemFont, sans-serif;
}

10
dashboard/src/main.tsx Normal file
View file

@ -0,0 +1,10 @@
import { StrictMode } from 'react'
import { createRoot } from 'react-dom/client'
import './index.css'
import App from './App.tsx'
createRoot(document.getElementById('root')!).render(
<StrictMode>
<App />
</StrictMode>,
)

View file

@ -0,0 +1,70 @@
import { useAgentTasks } from '../hooks/useApi';
import { StatusBadge } from '../components/StatusBadge';
import type { AgentTask, AgentTaskStatus } from '../api/types';
const COLUMNS: { id: AgentTaskStatus; label: string }[] = [
{ id: 'queued', label: 'Queued' },
{ id: 'running', label: 'Running' },
{ id: 'done', label: 'Done' },
{ id: 'failed', label: 'Failed' },
];
export function AgentQueuePage() {
const { data: tasks, isLoading } = useAgentTasks();
const byStatus = (s: AgentTaskStatus) => (tasks || []).filter((t) => t.status === s);
return (
<div className="p-6">
<h1 className="mb-4 text-lg font-semibold">Agent Queue</h1>
{isLoading ? (
<div className="text-text-muted">Loading...</div>
) : (
<div className="flex gap-4">
{COLUMNS.map((col) => {
const items = byStatus(col.id);
return (
<div key={col.id} className="w-72 shrink-0">
<div className="mb-2 flex items-center justify-between px-1">
<h3 className="text-sm font-semibold text-text-secondary">{col.label}</h3>
<span className="text-xs text-text-muted">{items.length}</span>
</div>
<div className="space-y-2 rounded-lg border border-border bg-surface p-2">
{items.length === 0 ? (
<div className="py-4 text-center text-xs text-text-muted">Empty</div>
) : (
items.map((task) => <AgentTaskCard key={task.id} task={task} />)
)}
</div>
</div>
);
})}
</div>
)}
</div>
);
}
function AgentTaskCard({ task }: { task: AgentTask }) {
return (
<div className="rounded-lg border border-border bg-surface-raised p-3">
<div className="mb-1 text-sm font-medium text-text-primary">{task.title}</div>
<div className="flex items-center gap-2 text-xs text-text-secondary">
<span>{task.agent}</span>
<StatusBadge value={task.priority} />
{task.type && <span className="text-text-muted">{task.type}</span>}
</div>
{task.error && (
<div className="mt-2 truncate rounded bg-danger/10 px-2 py-1 text-xs text-danger">
{task.error}
</div>
)}
{task.started && (
<div className="mt-1 text-xs text-text-muted">
started: {new Date(task.started).toLocaleTimeString()}
</div>
)}
</div>
);
}

View file

@ -0,0 +1,29 @@
import { AgentCard } from '../components/AgentCard';
import { useAgents, useTriggerAgent } from '../hooks/useApi';
export function AgentsPage() {
const { data: agents, isLoading } = useAgents();
const triggerMut = useTriggerAgent();
return (
<div className="p-6">
<h1 className="mb-4 text-lg font-semibold">Agents</h1>
{isLoading ? (
<div className="text-text-muted">Loading agents...</div>
) : !agents?.length ? (
<div className="text-text-muted">No agents defined. Create markdown files in agents/</div>
) : (
<div className="grid grid-cols-1 gap-4 md:grid-cols-2 xl:grid-cols-3">
{agents.map((agent) => (
<AgentCard
key={agent.name}
agent={agent}
onTrigger={(name) => triggerMut.mutate({ name })}
/>
))}
</div>
)}
</div>
);
}

View file

@ -0,0 +1,40 @@
import { CronRow } from '../components/CronRow';
import { useCrons, useTriggerCron, usePauseCron, useResumeCron } from '../hooks/useApi';
export function CronsPage() {
const { data: crons, isLoading } = useCrons();
const triggerMut = useTriggerCron();
const pauseMut = usePauseCron();
const resumeMut = useResumeCron();
const handleToggle = (name: string, isActive: boolean) => {
if (isActive) {
pauseMut.mutate(name);
} else {
resumeMut.mutate(name);
}
};
return (
<div className="p-6">
<h1 className="mb-4 text-lg font-semibold">Cron Jobs</h1>
{isLoading ? (
<div className="text-text-muted">Loading crons...</div>
) : !crons?.length ? (
<div className="text-text-muted">No cron jobs defined. Create markdown files in crons/active/</div>
) : (
<div className="rounded-lg border border-border bg-surface-raised">
{crons.map((cron) => (
<CronRow
key={cron.name}
cron={cron}
onTrigger={(name) => triggerMut.mutate(name)}
onToggle={handleToggle}
/>
))}
</div>
)}
</div>
);
}

View file

@ -0,0 +1,93 @@
import { useState, useEffect } from 'react';
import { useSearchParams } from 'react-router-dom';
import { useQuery } from '@tanstack/react-query';
import { FileTree } from '../components/FileTree';
import { FileEditor } from '../components/editor/FileEditor';
import { ChatSidebar } from '../components/assistant/ChatSidebar';
import { getTree, writeFile } from '../api/client';
import { slugify } from '../utils';
export function EditorPage() {
const [searchParams, setSearchParams] = useSearchParams();
const [selectedPath, setSelectedPath] = useState(searchParams.get('file') || '');
const [showAssistant, setShowAssistant] = useState(false);
const { data: tree, refetch: refetchTree } = useQuery({
queryKey: ['tree'],
queryFn: getTree,
});
// Handle ?new=path for creating new files
useEffect(() => {
const newDir = searchParams.get('new');
if (newDir) {
const name = prompt('File name (without .md):');
if (name) {
const slug = slugify(name);
const path = `${newDir}/${slug}.md`;
writeFile(path, { frontmatter: { title: name }, body: '' }).then(() => {
setSelectedPath(path);
setSearchParams({});
refetchTree();
});
} else {
setSearchParams({});
}
}
}, [searchParams, setSearchParams, refetchTree]);
const handleSelect = (path: string) => {
if (path.endsWith('.md')) {
setSelectedPath(path);
}
};
const handleCreateFile = (dir: string) => {
const name = prompt('New file name (without .md):');
if (name) {
const slug = slugify(name);
const path = `${dir}/${slug}.md`;
writeFile(path, { frontmatter: {}, body: '' }).then(() => {
setSelectedPath(path);
refetchTree();
});
}
};
return (
<div className="flex h-full">
{/* File tree sidebar */}
<div className="w-56 shrink-0 overflow-auto border-r border-border bg-surface-raised py-2">
{tree ? (
<FileTree
tree={tree}
selectedPath={selectedPath}
onSelect={handleSelect}
onCreateFile={handleCreateFile}
/>
) : (
<div className="p-4 text-xs text-text-muted">Loading...</div>
)}
</div>
{/* Editor */}
<div className="min-w-0 flex-1">
{selectedPath ? (
<FileEditor
path={selectedPath}
onSaved={() => refetchTree()}
onToggleAssistant={() => setShowAssistant((v) => !v)}
/>
) : (
<div className="flex h-full items-center justify-center text-text-muted">
Select a file or press Cmd+K to create one
</div>
)}
</div>
{/* AI Assistant sidebar */}
{showAssistant && (
<ChatSidebar filePath={selectedPath} onClose={() => setShowAssistant(false)} />
)}
</div>
);
}

View file

@ -0,0 +1,96 @@
import { useState } from 'react';
import { useKnowledge } from '../hooks/useApi';
import { getKnowledge } from '../api/client';
export function KnowledgePage() {
const [search, setSearch] = useState('');
const [selectedTag, setSelectedTag] = useState<string>();
const [selectedNote, setSelectedNote] = useState<{
path: string;
html: string;
body: string;
frontmatter: unknown;
} | null>(null);
const { data: notes, isLoading } = useKnowledge(search || undefined, selectedTag);
const allTags = [...new Set((notes || []).flatMap((n) => n.tags))].sort();
const openNote = async (path: string) => {
const data = await getKnowledge(path.replace(/^knowledge\//, ''));
setSelectedNote(data);
};
return (
<div className="flex h-full">
<div className="w-80 shrink-0 border-r border-border">
<div className="border-b border-border p-4">
<h1 className="mb-3 text-lg font-semibold">Knowledge</h1>
<input
value={search}
onChange={(e) => setSearch(e.target.value)}
placeholder="Search..."
className="w-full rounded border border-border bg-surface px-3 py-1.5 text-sm text-text-primary outline-none focus:border-accent"
/>
{allTags.length > 0 && (
<div className="mt-2 flex flex-wrap gap-1">
{allTags.map((tag) => (
<button
key={tag}
onClick={() => setSelectedTag(selectedTag === tag ? undefined : tag)}
className={`rounded px-2 py-0.5 text-xs transition-colors ${
selectedTag === tag
? 'bg-accent/20 text-accent'
: 'bg-surface-overlay text-text-secondary hover:text-text-primary'
}`}
>
{tag}
</button>
))}
</div>
)}
</div>
<div className="overflow-auto">
{isLoading ? (
<div className="p-4 text-sm text-text-muted">Loading...</div>
) : !notes?.length ? (
<div className="p-4 text-sm text-text-muted">No notes found</div>
) : (
notes.map((note) => (
<button
key={note.path}
onClick={() => openNote(note.path)}
className={`block w-full border-b border-border px-4 py-2.5 text-left transition-colors hover:bg-surface-overlay ${
selectedNote?.path === note.path.replace(/^knowledge\//, '')
? 'bg-surface-overlay'
: ''
}`}
>
<div className="text-sm font-medium text-text-primary">{note.title}</div>
{note.tags.length > 0 && (
<div className="mt-0.5 flex gap-1">
{note.tags.map((t) => (
<span key={t} className="text-xs text-text-muted">#{t}</span>
))}
</div>
)}
</button>
))
)}
</div>
</div>
<div className="flex-1 overflow-auto p-6">
{selectedNote ? (
<article
className="prose prose-invert max-w-none"
dangerouslySetInnerHTML={{ __html: selectedNote.html }}
/>
) : (
<div className="text-text-muted">Select a note to view</div>
)}
</div>
</div>
);
}

View file

@ -0,0 +1,84 @@
import { Kanban } from '../components/Kanban';
import { ActivityFeed } from '../components/ActivityFeed';
import { useHumanTasks, useMoveHumanTask, useCreateHumanTask, useStats } from '../hooks/useApi';
import { StatusBadge } from '../components/StatusBadge';
import { useState } from 'react';
export function TasksPage() {
const { data: tasks, isLoading } = useHumanTasks();
const { data: stats } = useStats();
const moveMut = useMoveHumanTask();
const createMut = useCreateHumanTask();
const [showCreate, setShowCreate] = useState(false);
const [newTitle, setNewTitle] = useState('');
const handleMove = (id: string, fromStatus: string, toStatus: string) => {
moveMut.mutate({ status: fromStatus, id, to: toStatus });
};
const handleCreate = () => {
if (!newTitle.trim()) return;
createMut.mutate({ title: newTitle.trim() }, {
onSuccess: () => { setNewTitle(''); setShowCreate(false); },
});
};
return (
<div className="flex h-full">
<div className="flex-1 overflow-auto">
<div className="flex items-center justify-between border-b border-border px-6 py-3">
<h1 className="text-lg font-semibold">Tasks</h1>
<button
onClick={() => setShowCreate(!showCreate)}
className="rounded-md bg-accent px-3 py-1.5 text-xs font-medium text-white transition-colors hover:bg-accent-hover"
>
New Task
</button>
</div>
{showCreate && (
<div className="flex items-center gap-2 border-b border-border px-6 py-2">
<input
value={newTitle}
onChange={(e) => setNewTitle(e.target.value)}
onKeyDown={(e) => e.key === 'Enter' && handleCreate()}
placeholder="Task title..."
className="flex-1 rounded border border-border bg-surface px-3 py-1.5 text-sm text-text-primary outline-none focus:border-accent"
autoFocus
/>
<button onClick={handleCreate} className="rounded bg-accent px-3 py-1.5 text-xs text-white">
Create
</button>
</div>
)}
{isLoading ? (
<div className="p-6 text-text-muted">Loading tasks...</div>
) : (
<Kanban tasks={tasks || []} onMove={handleMove} />
)}
</div>
<aside className="w-64 shrink-0 border-l border-border bg-surface-raised">
<div className="border-b border-border px-4 py-3">
<h2 className="text-sm font-semibold text-text-secondary">Overview</h2>
{stats && (
<div className="mt-2 space-y-1">
{Object.entries(stats.human_tasks).map(([k, v]) => (
<div key={k} className="flex items-center justify-between text-xs">
<StatusBadge value={k} />
<span className="text-text-muted">{v}</span>
</div>
))}
</div>
)}
</div>
<div className="px-4 py-3">
<h2 className="mb-2 text-sm font-semibold text-text-secondary">Recent Activity</h2>
<ActivityFeed />
</div>
</aside>
</div>
);
}

View file

@ -0,0 +1,18 @@
import { useParams } from 'react-router-dom';
import { ViewRenderer } from '../views/ViewRenderer';
import { NotificationBanner } from '../components/NotificationBanner';
export function ViewPage() {
const { '*': viewPath } = useParams();
if (!viewPath) {
return <div className="p-6 text-sm text-text-muted">No view specified</div>;
}
return (
<>
<NotificationBanner />
<ViewRenderer viewPath={`pages/${viewPath}`} />
</>
);
}

6
dashboard/src/utils.ts Normal file
View file

@ -0,0 +1,6 @@
export function slugify(s: string): string {
return s
.toLowerCase()
.replace(/[^a-z0-9]+/g, '-')
.replace(/^-+|-+$/g, '');
}

View file

@ -0,0 +1,52 @@
import type { ViewRegions, WidgetInstanceDef } from '../api/types';
import { WidgetRenderer } from './WidgetRenderer';
interface Props {
layout?: string;
regions?: ViewRegions;
}
function renderWidgets(widgets: WidgetInstanceDef[]) {
return widgets.map((w, i) => <WidgetRenderer key={`${w.widget}-${i}`} instance={w} />);
}
/** Single-column: all widgets stacked in "main" region */
function SingleColumn({ regions }: { regions: ViewRegions }) {
const main = regions['main'] || [];
return <div className="space-y-4">{renderWidgets(main)}</div>;
}
/** Two-column: "left" and "right" regions side by side */
function TwoColumn({ regions }: { regions: ViewRegions }) {
const left = regions['left'] || regions['main'] || [];
const right = regions['right'] || regions['sidebar'] || [];
return (
<div className="grid grid-cols-3 gap-4">
<div className="col-span-2 space-y-4">{renderWidgets(left)}</div>
<div className="space-y-4">{renderWidgets(right)}</div>
</div>
);
}
/** Dashboard grid: renders all regions as a responsive grid */
function DashboardGrid({ regions }: { regions: ViewRegions }) {
const all = Object.values(regions).flat();
return (
<div className="grid grid-cols-1 gap-4 sm:grid-cols-2 lg:grid-cols-3">
{renderWidgets(all)}
</div>
);
}
export function LayoutRenderer({ layout = 'single', regions = {} }: Props) {
switch (layout) {
case 'two-column':
return <TwoColumn regions={regions} />;
case 'dashboard':
case 'grid':
return <DashboardGrid regions={regions} />;
case 'single':
default:
return <SingleColumn regions={regions} />;
}
}

Some files were not shown because too many files have changed in this diff Show more