diff --git a/.env.example b/.env.example
index 42a0b37..a6ba55e 100644
--- a/.env.example
+++ b/.env.example
@@ -21,6 +21,10 @@ PROVIDER=openrouter
# Workspace directory override
# ZEROCLAW_WORKSPACE=/path/to/workspace
+# Reasoning mode (enables extended thinking for supported models)
+# ZEROCLAW_REASONING_ENABLED=false
+# REASONING_ENABLED=false
+
# ── Provider-Specific API Keys ────────────────────────────────
# OpenRouter
# OPENROUTER_API_KEY=sk-or-v1-...
@@ -63,6 +67,22 @@ PROVIDER=openrouter
# ZEROCLAW_GATEWAY_HOST=127.0.0.1
# ZEROCLAW_ALLOW_PUBLIC_BIND=false
+# ── Storage ─────────────────────────────────────────────────
+# Backend override for persistent storage (default: sqlite)
+# ZEROCLAW_STORAGE_PROVIDER=sqlite
+# ZEROCLAW_STORAGE_DB_URL=postgres://localhost/zeroclaw
+# ZEROCLAW_STORAGE_CONNECT_TIMEOUT_SECS=5
+
+# ── Proxy ──────────────────────────────────────────────────
+# Forward provider/service traffic through an HTTP(S) proxy.
+# ZEROCLAW_PROXY_ENABLED=false
+# ZEROCLAW_HTTP_PROXY=http://proxy.example.com:8080
+# ZEROCLAW_HTTPS_PROXY=http://proxy.example.com:8080
+# ZEROCLAW_ALL_PROXY=socks5://proxy.example.com:1080
+# ZEROCLAW_NO_PROXY=localhost,127.0.0.1
+# ZEROCLAW_PROXY_SCOPE=zeroclaw # environment|zeroclaw|services
+# ZEROCLAW_PROXY_SERVICES=openai,anthropic
+
# ── Optional Integrations ────────────────────────────────────
# Pushover notifications (`pushover` tool)
# PUSHOVER_TOKEN=your-pushover-app-token
diff --git a/.envrc b/.envrc
new file mode 100644
index 0000000..3550a30
--- /dev/null
+++ b/.envrc
@@ -0,0 +1 @@
+use flake
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index 2f88c8e..eb81c96 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -4,13 +4,13 @@ updates:
- package-ecosystem: cargo
directory: "/"
schedule:
- interval: weekly
+ interval: daily
target-branch: main
- open-pull-requests-limit: 5
+ open-pull-requests-limit: 3
labels:
- "dependencies"
groups:
- rust-minor-patch:
+ rust-all:
patterns:
- "*"
update-types:
@@ -20,14 +20,14 @@ updates:
- package-ecosystem: github-actions
directory: "/"
schedule:
- interval: weekly
+ interval: daily
target-branch: main
- open-pull-requests-limit: 3
+ open-pull-requests-limit: 1
labels:
- "ci"
- "dependencies"
groups:
- actions-minor-patch:
+ actions-all:
patterns:
- "*"
update-types:
@@ -37,16 +37,16 @@ updates:
- package-ecosystem: docker
directory: "/"
schedule:
- interval: weekly
+ interval: daily
target-branch: main
- open-pull-requests-limit: 3
+ open-pull-requests-limit: 1
labels:
- "ci"
- "dependencies"
groups:
- docker-minor-patch:
+ docker-all:
patterns:
- "*"
update-types:
- minor
- - patch
\ No newline at end of file
+ - patch
diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
index 7c9e601..7990431 100644
--- a/.github/pull_request_template.md
+++ b/.github/pull_request_template.md
@@ -12,11 +12,7 @@ Describe this PR in 2-5 bullets:
- Risk label (`risk: low|medium|high`):
- Size label (`size: XS|S|M|L|XL`, auto-managed/read-only):
- Scope labels (`core|agent|channel|config|cron|daemon|doctor|gateway|health|heartbeat|integration|memory|observability|onboard|provider|runtime|security|service|skillforge|skills|tool|tunnel|docs|dependencies|ci|tests|scripts|dev`, comma-separated):
-<<<<<<< chore/labeler-spacing-trusted-tier
- Module labels (`: `, for example `channel: telegram`, `provider: kimi`, `tool: shell`):
-=======
-- Module labels (`:`, for example `channel:telegram`, `provider:kimi`, `tool:shell`):
->>>>>>> main
- Contributor tier label (`trusted contributor|experienced contributor|principal contributor|distinguished contributor`, auto-managed/read-only; author merged PRs >=5/10/20/50):
- If any auto-label is incorrect, note requested correction:
diff --git a/.github/workflows/ci-run.yml b/.github/workflows/ci-run.yml
index 373b879..dea6208 100644
--- a/.github/workflows/ci-run.yml
+++ b/.github/workflows/ci-run.yml
@@ -41,25 +41,7 @@ jobs:
run: ./scripts/ci/detect_change_scope.sh
lint:
- name: Lint Gate (Format + Clippy)
- needs: [changes]
- if: needs.changes.outputs.rust_changed == 'true' && (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'ci:full'))
- runs-on: blacksmith-2vcpu-ubuntu-2404
- timeout-minutes: 20
- steps:
- - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
- with:
- fetch-depth: 0
- - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
- with:
- toolchain: 1.92.0
- components: rustfmt, clippy
- - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3
- - name: Run rust quality gate
- run: ./scripts/ci/rust_quality_gate.sh
-
- lint-strict-delta:
- name: Lint Gate (Strict Delta)
+ name: Lint Gate (Format + Clippy + Strict Delta)
needs: [changes]
if: needs.changes.outputs.rust_changed == 'true' && (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'ci:full'))
runs-on: blacksmith-2vcpu-ubuntu-2404
@@ -71,8 +53,10 @@ jobs:
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
with:
toolchain: 1.92.0
- components: clippy
+ components: rustfmt, clippy
- uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3
+ - name: Run rust quality gate
+ run: ./scripts/ci/rust_quality_gate.sh
- name: Run strict lint delta gate
env:
BASE_SHA: ${{ needs.changes.outputs.base_sha }}
@@ -80,8 +64,8 @@ jobs:
test:
name: Test
- needs: [changes, lint, lint-strict-delta]
- if: needs.changes.outputs.rust_changed == 'true' && (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'ci:full')) && needs.lint.result == 'success' && needs.lint-strict-delta.result == 'success'
+ needs: [changes, lint]
+ if: needs.changes.outputs.rust_changed == 'true' && (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'ci:full')) && needs.lint.result == 'success'
runs-on: blacksmith-2vcpu-ubuntu-2404
timeout-minutes: 30
steps:
@@ -106,8 +90,8 @@ jobs:
with:
toolchain: 1.92.0
- uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3
- - name: Build release binary
- run: cargo build --release --locked --verbose
+ - name: Build binary (smoke check)
+ run: cargo build --locked --verbose
docs-only:
name: Docs-Only Fast Path
@@ -185,7 +169,7 @@ jobs:
lint-feedback:
name: Lint Feedback
if: github.event_name == 'pull_request'
- needs: [changes, lint, lint-strict-delta, docs-quality]
+ needs: [changes, lint, docs-quality]
runs-on: blacksmith-2vcpu-ubuntu-2404
permissions:
contents: read
@@ -201,7 +185,7 @@ jobs:
RUST_CHANGED: ${{ needs.changes.outputs.rust_changed }}
DOCS_CHANGED: ${{ needs.changes.outputs.docs_changed }}
LINT_RESULT: ${{ needs.lint.result }}
- LINT_DELTA_RESULT: ${{ needs.lint-strict-delta.result }}
+ LINT_DELTA_RESULT: ${{ needs.lint.result }}
DOCS_RESULT: ${{ needs.docs-quality.result }}
with:
script: |
@@ -231,7 +215,7 @@ jobs:
ci-required:
name: CI Required Gate
if: always()
- needs: [changes, lint, lint-strict-delta, test, build, docs-only, non-rust, docs-quality, lint-feedback, workflow-owner-approval]
+ needs: [changes, lint, test, build, docs-only, non-rust, docs-quality, lint-feedback, workflow-owner-approval]
runs-on: blacksmith-2vcpu-ubuntu-2404
steps:
- name: Enforce required status
@@ -276,7 +260,7 @@ jobs:
fi
lint_result="${{ needs.lint.result }}"
- lint_strict_delta_result="${{ needs.lint-strict-delta.result }}"
+ lint_strict_delta_result="${{ needs.lint.result }}"
test_result="${{ needs.test.result }}"
build_result="${{ needs.build.result }}"
diff --git a/.github/workflows/feature-matrix.yml b/.github/workflows/feature-matrix.yml
index 875b0c5..18953e1 100644
--- a/.github/workflows/feature-matrix.yml
+++ b/.github/workflows/feature-matrix.yml
@@ -1,12 +1,6 @@
name: Feature Matrix
on:
- push:
- branches: [main]
- paths:
- - "Cargo.toml"
- - "Cargo.lock"
- - "src/**"
schedule:
- cron: "30 4 * * 1" # Weekly Monday 4:30am UTC
workflow_dispatch:
@@ -61,6 +55,3 @@ jobs:
- name: Check feature combination
run: cargo check --locked ${{ matrix.args }}
-
- - name: Test feature combination
- run: cargo test --locked ${{ matrix.args }}
diff --git a/.github/workflows/main-branch-flow.md b/.github/workflows/main-branch-flow.md
index 3a26ed1..6490e97 100644
--- a/.github/workflows/main-branch-flow.md
+++ b/.github/workflows/main-branch-flow.md
@@ -143,7 +143,7 @@ Workflow: `.github/workflows/pub-docker-img.yml`
- `latest` + SHA tag (`sha-<12 chars>`) for `main`
- semantic tag from pushed git tag (`vX.Y.Z`) + SHA tag for tag pushes
- branch name + SHA tag for non-`main` manual dispatch refs
-5. Multi-platform publish is used for tag pushes (`linux/amd64,linux/arm64`), while `main` publish stays `linux/amd64`.
+5. Multi-platform publish is used for both `main` and tag pushes (`linux/amd64,linux/arm64`).
6. Typical runtime in recent sample: ~139.9s.
7. Result: pushed image tags under `ghcr.io//`.
diff --git a/.github/workflows/pr-auto-response.yml b/.github/workflows/pr-auto-response.yml
index ee6e100..e5f068e 100644
--- a/.github/workflows/pr-auto-response.yml
+++ b/.github/workflows/pr-auto-response.yml
@@ -15,7 +15,7 @@ jobs:
(github.event.action == 'opened' || github.event.action == 'reopened' || github.event.action == 'labeled' || github.event.action == 'unlabeled')) ||
(github.event_name == 'pull_request_target' &&
(github.event.action == 'labeled' || github.event.action == 'unlabeled'))
- runs-on: blacksmith-2vcpu-ubuntu-2404
+ runs-on: ubuntu-latest
permissions:
contents: read
issues: write
@@ -34,7 +34,7 @@ jobs:
await script({ github, context, core });
first-interaction:
if: github.event.action == 'opened'
- runs-on: blacksmith-2vcpu-ubuntu-2404
+ runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: write
@@ -65,7 +65,7 @@ jobs:
labeled-routes:
if: github.event.action == 'labeled'
- runs-on: blacksmith-2vcpu-ubuntu-2404
+ runs-on: ubuntu-latest
permissions:
contents: read
issues: write
diff --git a/.github/workflows/pr-check-stale.yml b/.github/workflows/pr-check-stale.yml
index 0120547..a2cf24c 100644
--- a/.github/workflows/pr-check-stale.yml
+++ b/.github/workflows/pr-check-stale.yml
@@ -12,7 +12,7 @@ jobs:
permissions:
issues: write
pull-requests: write
- runs-on: blacksmith-2vcpu-ubuntu-2404
+ runs-on: ubuntu-latest
steps:
- name: Mark stale issues and pull requests
uses: actions/stale@b5d41d4e1d5dceea10e7104786b73624c18a190f # v10.2.0
diff --git a/.github/workflows/pr-check-status.yml b/.github/workflows/pr-check-status.yml
index 390a285..b057e88 100644
--- a/.github/workflows/pr-check-status.yml
+++ b/.github/workflows/pr-check-status.yml
@@ -2,7 +2,7 @@ name: PR Check Status
on:
schedule:
- - cron: "15 */12 * * *"
+ - cron: "15 8 * * *" # Once daily at 8:15am UTC
workflow_dispatch:
permissions: {}
@@ -13,13 +13,13 @@ concurrency:
jobs:
nudge-stale-prs:
- runs-on: blacksmith-2vcpu-ubuntu-2404
+ runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
issues: write
env:
- STALE_HOURS: "4"
+ STALE_HOURS: "48"
steps:
- name: Checkout repository
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
diff --git a/.github/workflows/pr-intake-checks.yml b/.github/workflows/pr-intake-checks.yml
index 0cacf88..e703387 100644
--- a/.github/workflows/pr-intake-checks.yml
+++ b/.github/workflows/pr-intake-checks.yml
@@ -16,7 +16,7 @@ permissions:
jobs:
intake:
name: Intake Checks
- runs-on: blacksmith-2vcpu-ubuntu-2404
+ runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: Checkout repository
diff --git a/.github/workflows/pr-labeler.yml b/.github/workflows/pr-labeler.yml
index 8349352..38cf054 100644
--- a/.github/workflows/pr-labeler.yml
+++ b/.github/workflows/pr-labeler.yml
@@ -25,8 +25,7 @@ permissions:
jobs:
label:
- runs-on: blacksmith-2vcpu-ubuntu-2404
- timeout-minutes: 10
+ runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
diff --git a/.github/workflows/pub-docker-img.yml b/.github/workflows/pub-docker-img.yml
index 15ea8aa..05d83e5 100644
--- a/.github/workflows/pub-docker-img.yml
+++ b/.github/workflows/pub-docker-img.yml
@@ -21,13 +21,8 @@ on:
paths:
- "Dockerfile"
- ".dockerignore"
- - "Cargo.toml"
- - "Cargo.lock"
+ - "docker-compose.yml"
- "rust-toolchain.toml"
- - "src/**"
- - "crates/**"
- - "benches/**"
- - "firmware/**"
- "dev/config.template.toml"
- ".github/workflows/pub-docker-img.yml"
workflow_dispatch:
@@ -75,6 +70,8 @@ jobs:
tags: zeroclaw-pr-smoke:latest
labels: ${{ steps.meta.outputs.labels || '' }}
platforms: linux/amd64
+ cache-from: type=gha
+ cache-to: type=gha,mode=max
- name: Verify image
run: docker run --rm zeroclaw-pr-smoke:latest --version
@@ -83,7 +80,7 @@ jobs:
name: Build and Push Docker Image
if: (github.event_name == 'workflow_dispatch' || (github.event_name == 'push' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v')))) && github.repository == 'zeroclaw-labs/zeroclaw'
runs-on: blacksmith-2vcpu-ubuntu-2404
- timeout-minutes: 25
+ timeout-minutes: 45
permissions:
contents: read
packages: write
@@ -128,7 +125,9 @@ jobs:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
- platforms: ${{ startsWith(github.ref, 'refs/tags/v') && 'linux/amd64,linux/arm64' || 'linux/amd64' }}
+ platforms: linux/amd64,linux/arm64
+ cache-from: type=gha
+ cache-to: type=gha,mode=max
- name: Set GHCR package visibility to public
shell: bash
diff --git a/.github/workflows/pub-release.yml b/.github/workflows/pub-release.yml
index 7cdb853..14677b1 100644
--- a/.github/workflows/pub-release.yml
+++ b/.github/workflows/pub-release.yml
@@ -27,15 +27,45 @@ jobs:
- os: ubuntu-latest
target: x86_64-unknown-linux-gnu
artifact: zeroclaw
- - os: macos-latest
+ archive_ext: tar.gz
+ cross_compiler: ""
+ linker_env: ""
+ linker: ""
+ - os: ubuntu-latest
+ target: aarch64-unknown-linux-gnu
+ artifact: zeroclaw
+ archive_ext: tar.gz
+ cross_compiler: gcc-aarch64-linux-gnu
+ linker_env: CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER
+ linker: aarch64-linux-gnu-gcc
+ - os: ubuntu-latest
+ target: armv7-unknown-linux-gnueabihf
+ artifact: zeroclaw
+ archive_ext: tar.gz
+ cross_compiler: gcc-arm-linux-gnueabihf
+ linker_env: CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER
+ linker: arm-linux-gnueabihf-gcc
+ - os: macos-15-intel
target: x86_64-apple-darwin
artifact: zeroclaw
- - os: macos-latest
+ archive_ext: tar.gz
+ cross_compiler: ""
+ linker_env: ""
+ linker: ""
+ - os: macos-14
target: aarch64-apple-darwin
artifact: zeroclaw
+ archive_ext: tar.gz
+ cross_compiler: ""
+ linker_env: ""
+ linker: ""
- os: windows-latest
target: x86_64-pc-windows-msvc
artifact: zeroclaw.exe
+ archive_ext: zip
+ cross_compiler: ""
+ linker_env: ""
+ linker: ""
steps:
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
@@ -46,20 +76,41 @@ jobs:
- uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3
+ - name: Install cross-compilation toolchain (Linux)
+ if: runner.os == 'Linux' && matrix.cross_compiler != ''
+ run: |
+ sudo apt-get update -qq
+ sudo apt-get install -y ${{ matrix.cross_compiler }}
+
- name: Build release
- run: cargo build --release --locked --target ${{ matrix.target }}
+ env:
+ LINKER_ENV: ${{ matrix.linker_env }}
+ LINKER: ${{ matrix.linker }}
+ run: |
+ if [ -n "$LINKER_ENV" ] && [ -n "$LINKER" ]; then
+ echo "Using linker override: $LINKER_ENV=$LINKER"
+ export "$LINKER_ENV=$LINKER"
+ fi
+ cargo build --release --locked --target ${{ matrix.target }}
- name: Check binary size (Unix)
if: runner.os != 'Windows'
run: |
- SIZE=$(stat -f%z target/${{ matrix.target }}/release/${{ matrix.artifact }} 2>/dev/null || stat -c%s target/${{ matrix.target }}/release/${{ matrix.artifact }})
+ BIN="target/${{ matrix.target }}/release/${{ matrix.artifact }}"
+ if [ ! -f "$BIN" ]; then
+ echo "::error::Expected binary not found: $BIN"
+ exit 1
+ fi
+ SIZE=$(stat -f%z "$BIN" 2>/dev/null || stat -c%s "$BIN")
SIZE_MB=$((SIZE / 1024 / 1024))
echo "Binary size: ${SIZE_MB}MB ($SIZE bytes)"
echo "### Binary Size: ${{ matrix.target }}" >> "$GITHUB_STEP_SUMMARY"
echo "- Size: ${SIZE_MB}MB ($SIZE bytes)" >> "$GITHUB_STEP_SUMMARY"
- if [ "$SIZE" -gt 15728640 ]; then
- echo "::error::Binary exceeds 15MB hard limit (${SIZE_MB}MB)"
+ if [ "$SIZE" -gt 41943040 ]; then
+ echo "::error::Binary exceeds 40MB safeguard (${SIZE_MB}MB)"
exit 1
+ elif [ "$SIZE" -gt 15728640 ]; then
+ echo "::warning::Binary exceeds 15MB advisory target (${SIZE_MB}MB)"
elif [ "$SIZE" -gt 5242880 ]; then
echo "::warning::Binary exceeds 5MB target (${SIZE_MB}MB)"
else
@@ -70,19 +121,19 @@ jobs:
if: runner.os != 'Windows'
run: |
cd target/${{ matrix.target }}/release
- tar czf ../../../zeroclaw-${{ matrix.target }}.tar.gz ${{ matrix.artifact }}
+ tar czf ../../../zeroclaw-${{ matrix.target }}.${{ matrix.archive_ext }} ${{ matrix.artifact }}
- name: Package (Windows)
if: runner.os == 'Windows'
run: |
cd target/${{ matrix.target }}/release
- 7z a ../../../zeroclaw-${{ matrix.target }}.zip ${{ matrix.artifact }}
+ 7z a ../../../zeroclaw-${{ matrix.target }}.${{ matrix.archive_ext }} ${{ matrix.artifact }}
- name: Upload artifact
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
with:
name: zeroclaw-${{ matrix.target }}
- path: zeroclaw-${{ matrix.target }}.*
+ path: zeroclaw-${{ matrix.target }}.${{ matrix.archive_ext }}
retention-days: 7
publish:
@@ -94,7 +145,7 @@ jobs:
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
- name: Download all artifacts
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
+ uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
path: artifacts
@@ -119,7 +170,7 @@ jobs:
cat SHA256SUMS
- name: Install cosign
- uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2
+ uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
- name: Sign artifacts with cosign (keyless)
run: |
diff --git a/.github/workflows/sec-audit.yml b/.github/workflows/sec-audit.yml
index 3667725..89b4a32 100644
--- a/.github/workflows/sec-audit.yml
+++ b/.github/workflows/sec-audit.yml
@@ -3,8 +3,20 @@ name: Sec Audit
on:
push:
branches: [main]
+ paths:
+ - "Cargo.toml"
+ - "Cargo.lock"
+ - "src/**"
+ - "crates/**"
+ - "deny.toml"
pull_request:
branches: [main]
+ paths:
+ - "Cargo.toml"
+ - "Cargo.lock"
+ - "src/**"
+ - "crates/**"
+ - "deny.toml"
schedule:
- cron: "0 6 * * 1" # Weekly on Monday 6am UTC
diff --git a/.github/workflows/sec-codeql.yml b/.github/workflows/sec-codeql.yml
index f5c6c35..300e1ef 100644
--- a/.github/workflows/sec-codeql.yml
+++ b/.github/workflows/sec-codeql.yml
@@ -2,7 +2,7 @@ name: Sec CodeQL
on:
schedule:
- - cron: "0 6,18 * * *" # Twice daily at 6am and 6pm UTC
+ - cron: "0 6 * * 1" # Weekly Monday 6am UTC
workflow_dispatch:
concurrency:
diff --git a/.github/workflows/sync-contributors.yml b/.github/workflows/sync-contributors.yml
index a5fb2ec..50c7955 100644
--- a/.github/workflows/sync-contributors.yml
+++ b/.github/workflows/sync-contributors.yml
@@ -17,7 +17,7 @@ permissions:
jobs:
update-notice:
name: Update NOTICE with new contributors
- runs-on: blacksmith-2vcpu-ubuntu-2404
+ runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
diff --git a/.github/workflows/test-benchmarks.yml b/.github/workflows/test-benchmarks.yml
index 329f530..036904a 100644
--- a/.github/workflows/test-benchmarks.yml
+++ b/.github/workflows/test-benchmarks.yml
@@ -1,8 +1,8 @@
name: Test Benchmarks
on:
- push:
- branches: [main]
+ schedule:
+ - cron: "0 3 * * 1" # Weekly Monday 3am UTC
workflow_dispatch:
concurrency:
@@ -39,7 +39,7 @@ jobs:
path: |
target/criterion/
benchmark_output.txt
- retention-days: 30
+ retention-days: 7
- name: Post benchmark summary on PR
if: github.event_name == 'pull_request'
diff --git a/.gitignore b/.gitignore
index 9846ea4..89a1f8b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,6 +3,7 @@ firmware/*/target
*.db
*.db-journal
.DS_Store
+._*
.wt-pr37/
__pycache__/
*.pyc
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 4944885..013eb10 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -26,6 +26,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- `enc:` prefix for encrypted secrets — Use `enc2:` (ChaCha20-Poly1305) instead.
Legacy values are still decrypted for backward compatibility but should be migrated.
+### Fixed
+- **Onboarding channel menu dispatch** now uses an enum-backed selector instead of hard-coded
+ numeric match arms, preventing duplicated pattern arms and related `unreachable pattern`
+ compiler warnings in `src/onboard/wizard.rs`.
+- **OpenAI native tool spec parsing** now uses owned serializable/deserializable structs,
+ fixing a compile-time type mismatch when validating tool schemas before API calls.
+
## [0.1.0] - 2026-02-13
### Added
diff --git a/CLA.md b/CLA.md
new file mode 100644
index 0000000..1333c48
--- /dev/null
+++ b/CLA.md
@@ -0,0 +1,132 @@
+# ZeroClaw Contributor License Agreement (CLA)
+
+**Version 1.0 — February 2026**
+**ZeroClaw Labs**
+
+---
+
+## Purpose
+
+This Contributor License Agreement ("CLA") clarifies the intellectual
+property rights granted by contributors to ZeroClaw Labs. This agreement
+protects both contributors and users of the ZeroClaw project.
+
+By submitting a contribution (pull request, patch, issue with code, or any
+other form of code submission) to the ZeroClaw repository, you agree to the
+terms of this CLA.
+
+---
+
+## 1. Definitions
+
+- **"Contribution"** means any original work of authorship, including any
+ modifications or additions to existing work, submitted to ZeroClaw Labs
+ for inclusion in the ZeroClaw project.
+
+- **"You"** means the individual or legal entity submitting a Contribution.
+
+- **"ZeroClaw Labs"** means the maintainers and organization responsible
+ for the ZeroClaw project at https://github.com/zeroclaw-labs/zeroclaw.
+
+---
+
+## 2. Grant of Copyright License
+
+You grant ZeroClaw Labs and recipients of software distributed by ZeroClaw
+Labs a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
+irrevocable copyright license to:
+
+- Reproduce, prepare derivative works of, publicly display, publicly
+ perform, sublicense, and distribute your Contributions and derivative
+ works under **both the MIT License and the Apache License 2.0**.
+
+---
+
+## 3. Grant of Patent License
+
+You grant ZeroClaw Labs and recipients of software distributed by ZeroClaw
+Labs a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
+irrevocable patent license to make, have made, use, offer to sell, sell,
+import, and otherwise transfer your Contributions.
+
+This patent license applies only to patent claims licensable by you that
+are necessarily infringed by your Contribution alone or in combination with
+the ZeroClaw project.
+
+**This protects you:** if a third party files a patent claim against
+ZeroClaw that covers your Contribution, your patent license to the project
+is not revoked.
+
+---
+
+## 4. You Retain Your Rights
+
+This CLA does **not** transfer ownership of your Contribution to ZeroClaw
+Labs. You retain full copyright ownership of your Contribution. You are
+free to use your Contribution in any other project under any license.
+
+---
+
+## 5. Original Work
+
+You represent that:
+
+1. Each Contribution is your original creation, or you have sufficient
+ rights to submit it under this CLA.
+2. Your Contribution does not knowingly infringe any third-party patent,
+ copyright, trademark, or other intellectual property right.
+3. If your employer has rights to intellectual property you create, you
+ have received permission to submit the Contribution, or your employer
+ has signed a corporate CLA with ZeroClaw Labs.
+
+---
+
+## 6. No Trademark Rights
+
+This CLA does not grant you any rights to use the ZeroClaw name,
+trademarks, service marks, or logos. See TRADEMARK.md for trademark policy.
+
+---
+
+## 7. Attribution
+
+ZeroClaw Labs will maintain attribution to contributors in the repository
+commit history and NOTICE file. Your contributions are permanently and
+publicly recorded.
+
+---
+
+## 8. Dual-License Commitment
+
+All Contributions accepted into the ZeroClaw project are licensed under
+both:
+
+- **MIT License** — permissive open-source use
+- **Apache License 2.0** — patent protection and stronger IP guarantees
+
+This dual-license model ensures maximum compatibility and protection for
+the entire contributor community.
+
+---
+
+## 9. How to Agree
+
+By opening a pull request or submitting a patch to the ZeroClaw repository,
+you indicate your agreement to this CLA. No separate signature is required
+for individual contributors.
+
+For **corporate contributors** (submitting on behalf of a company or
+organization), please open an issue titled "Corporate CLA — [Company Name]"
+and a maintainer will follow up.
+
+---
+
+## 10. Questions
+
+If you have questions about this CLA, open an issue at:
+https://github.com/zeroclaw-labs/zeroclaw/issues
+
+---
+
+*This CLA is based on the Apache Individual Contributor License Agreement
+v2.0, adapted for the ZeroClaw dual-license model.*
diff --git a/Cargo.lock b/Cargo.lock
index d058410..72f07ed 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -47,7 +47,21 @@ checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0"
dependencies = [
"cfg-if",
"cipher",
- "cpufeatures",
+ "cpufeatures 0.2.17",
+]
+
+[[package]]
+name = "aes-gcm"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "831010a0f742e1209b3bcea8fab6a8e149051ba6099432c8cb2cc117dec3ead1"
+dependencies = [
+ "aead",
+ "aes",
+ "cipher",
+ "ctr",
+ "ghash",
+ "subtle",
]
[[package]]
@@ -71,6 +85,15 @@ dependencies = [
"memchr",
]
+[[package]]
+name = "alloca"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e5a7d05ea6aea7e9e64d25b9156ba2fee3fdd659e34e41063cd2fc7cd020d7f4"
+dependencies = [
+ "cc",
+]
+
[[package]]
name = "allocator-api2"
version = "0.2.21"
@@ -144,9 +167,9 @@ dependencies = [
[[package]]
name = "anyhow"
-version = "1.0.101"
+version = "1.0.102"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea"
+checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c"
[[package]]
name = "anymap2"
@@ -235,9 +258,9 @@ dependencies = [
[[package]]
name = "async-compression"
-version = "0.4.39"
+version = "0.4.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "68650b7df54f0293fd061972a0fb05aaf4fc0879d3b3d21a638a182c5c543b9f"
+checksum = "7d67d43201f4d20c78bcda740c142ca52482d81da80681533d33bf3f0596c8e2"
dependencies = [
"compression-codecs",
"compression-core",
@@ -281,11 +304,22 @@ dependencies = [
"futures-lite",
"parking",
"polling",
- "rustix 1.1.3",
+ "rustix",
"slab",
"windows-sys 0.61.2",
]
+[[package]]
+name = "async-lock"
+version = "3.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "290f7f2596bd5b78a9fec8088ccd89180d7f9f55b94b0576823bbbdc72ee8311"
+dependencies = [
+ "event-listener 5.4.1",
+ "event-listener-strategy",
+ "pin-project-lite",
+]
+
[[package]]
name = "async-stream"
version = "0.3.6"
@@ -360,6 +394,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8"
dependencies = [
"axum-core",
+ "axum-macros",
"base64",
"bytes",
"form_urlencoded",
@@ -382,7 +417,7 @@ dependencies = [
"sha1",
"sync_wrapper",
"tokio",
- "tokio-tungstenite 0.28.0",
+ "tokio-tungstenite",
"tower",
"tower-layer",
"tower-service",
@@ -406,6 +441,17 @@ dependencies = [
"tower-service",
]
+[[package]]
+name = "axum-macros"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "604fde5e028fea851ce1d8570bbdc034bec850d157f7569d10f347d06808c05c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.116",
+]
+
[[package]]
name = "backon"
version = "1.6.0"
@@ -429,6 +475,12 @@ version = "1.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06"
+[[package]]
+name = "basic-udev"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5a45f9771ced8a774de5e5ebffbe520f52e3943bf5a9a6baa3a5d14a5de1afe6"
+
[[package]]
name = "bincode"
version = "2.0.1"
@@ -513,7 +565,7 @@ dependencies = [
"cc",
"cfg-if",
"constant_time_eq",
- "cpufeatures",
+ "cpufeatures 0.2.17",
]
[[package]]
@@ -586,6 +638,9 @@ name = "bytes"
version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33"
+dependencies = [
+ "serde",
+]
[[package]]
name = "bytesize"
@@ -646,7 +701,18 @@ checksum = "c3613f74bd2eac03dad61bd53dbe620703d4371614fe0bc3b9f04dd36fe4e818"
dependencies = [
"cfg-if",
"cipher",
- "cpufeatures",
+ "cpufeatures 0.2.17",
+]
+
+[[package]]
+name = "chacha20"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6f8d983286843e49675a4b7a2d174efe136dc93a18d69130dd18198a6c167601"
+dependencies = [
+ "cfg-if",
+ "cpufeatures 0.3.0",
+ "rand_core 0.10.0",
]
[[package]]
@@ -656,7 +722,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10cd79432192d1c0f4e1a0fef9527696cc039165d729fb41b3f4f4f354c2dc35"
dependencies = [
"aead",
- "chacha20",
+ "chacha20 0.9.1",
"cipher",
"poly1305",
"zeroize",
@@ -736,9 +802,9 @@ dependencies = [
[[package]]
name = "clap"
-version = "4.5.59"
+version = "4.5.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c5caf74d17c3aec5495110c34cc3f78644bfa89af6c8993ed4de2790e49b6499"
+checksum = "2797f34da339ce31042b27d23607e051786132987f595b02ba4f6a6dffb7030a"
dependencies = [
"clap_builder",
"clap_derive",
@@ -746,9 +812,9 @@ dependencies = [
[[package]]
name = "clap_builder"
-version = "4.5.59"
+version = "4.5.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "370daa45065b80218950227371916a1633217ae42b2715b2287b606dcd618e24"
+checksum = "24a241312cea5059b13574bb9b3861cabf758b879c15190b37b6d6fd63ab6876"
dependencies = [
"anstream",
"anstyle",
@@ -801,9 +867,9 @@ checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
[[package]]
name = "compression-codecs"
-version = "0.4.36"
+version = "0.4.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "00828ba6fd27b45a448e57dbfe84f1029d4c9f26b368157e9a448a5f49a2ec2a"
+checksum = "eb7b51a7d9c967fc26773061ba86150f19c50c0d65c887cb1fbe295fd16619b7"
dependencies = [
"compression-core",
"flate2",
@@ -881,13 +947,21 @@ dependencies = [
]
[[package]]
-name = "core-foundation"
-version = "0.9.4"
+name = "cookie_store"
+version = "0.22.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f"
+checksum = "15b2c103cf610ec6cae3da84a766285b42fd16aad564758459e6ecf128c75206"
dependencies = [
- "core-foundation-sys",
- "libc",
+ "cookie 0.18.1",
+ "document-features",
+ "idna",
+ "indexmap",
+ "log",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "time",
+ "url",
]
[[package]]
@@ -924,6 +998,15 @@ dependencies = [
"libc",
]
+[[package]]
+name = "cpufeatures"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b2a41393f66f16b0823bb79094d54ac5fbd34ab292ddafb9a0456ac9f87d201"
+dependencies = [
+ "libc",
+]
+
[[package]]
name = "crc32fast"
version = "1.5.0"
@@ -935,26 +1018,24 @@ dependencies = [
[[package]]
name = "criterion"
-version = "0.5.1"
+version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f"
+checksum = "950046b2aa2492f9a536f5f4f9a3de7b9e2476e575e05bd6c333371add4d98f3"
dependencies = [
+ "alloca",
"anes",
"cast",
"ciborium",
"clap",
"criterion-plot",
- "futures",
- "is-terminal",
- "itertools 0.10.5",
+ "itertools 0.13.0",
"num-traits",
- "once_cell",
"oorandom",
+ "page_size",
"plotters",
"rayon",
"regex",
"serde",
- "serde_derive",
"serde_json",
"tinytemplate",
"tokio",
@@ -963,12 +1044,12 @@ dependencies = [
[[package]]
name = "criterion-plot"
-version = "0.5.0"
+version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1"
+checksum = "d8d80a2f4f5b554395e47b5d8305bc3d27813bacb73493eb1001e8f76dae29ea"
dependencies = [
"cast",
- "itertools 0.10.5",
+ "itertools 0.13.0",
]
[[package]]
@@ -982,6 +1063,15 @@ dependencies = [
"winnow 0.6.26",
]
+[[package]]
+name = "crossbeam-channel"
+version = "0.5.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2"
+dependencies = [
+ "crossbeam-utils",
+]
+
[[package]]
name = "crossbeam-deque"
version = "0.8.6"
@@ -1061,7 +1151,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be"
dependencies = [
"cfg-if",
- "cpufeatures",
+ "cpufeatures 0.2.17",
"curve25519-dalek-derive",
"digest",
"fiat-crypto",
@@ -1117,6 +1207,20 @@ dependencies = [
"syn 2.0.116",
]
+[[package]]
+name = "dashmap"
+version = "6.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf"
+dependencies = [
+ "cfg-if",
+ "crossbeam-utils",
+ "hashbrown 0.14.5",
+ "lock_api",
+ "once_cell",
+ "parking_lot_core",
+]
+
[[package]]
name = "data-encoding"
version = "2.10.0"
@@ -1304,22 +1408,13 @@ dependencies = [
"subtle",
]
-[[package]]
-name = "directories"
-version = "5.0.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9a49173b84e034382284f27f1af4dcbbd231ffa358c0fe316541a7337f376a35"
-dependencies = [
- "dirs-sys 0.4.1",
-]
-
[[package]]
name = "directories"
version = "6.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "16f5094c54661b38d03bd7e50df373292118db60b585c08a411c6d840017fe7d"
dependencies = [
- "dirs-sys 0.5.0",
+ "dirs-sys",
]
[[package]]
@@ -1328,19 +1423,7 @@ version = "6.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e"
dependencies = [
- "dirs-sys 0.5.0",
-]
-
-[[package]]
-name = "dirs-sys"
-version = "0.4.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c"
-dependencies = [
- "libc",
- "option-ext",
- "redox_users 0.4.6",
- "windows-sys 0.48.0",
+ "dirs-sys",
]
[[package]]
@@ -1351,7 +1434,7 @@ checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab"
dependencies = [
"libc",
"option-ext",
- "redox_users 0.5.2",
+ "redox_users",
"windows-sys 0.61.2",
]
@@ -1386,12 +1469,27 @@ dependencies = [
"syn 2.0.116",
]
+[[package]]
+name = "document-features"
+version = "0.2.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d4b8a88685455ed29a21542a33abd9cb6510b6b129abadabdcef0f4c55bc8f61"
+dependencies = [
+ "litrs",
+]
+
[[package]]
name = "dunce"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813"
+[[package]]
+name = "dyn-clone"
+version = "1.0.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555"
+
[[package]]
name = "ecb"
version = "0.1.2"
@@ -1484,6 +1582,25 @@ dependencies = [
"syn 2.0.116",
]
+[[package]]
+name = "env_filter"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a1c3cc8e57274ec99de65301228b537f1e4eedc1b8e0f9411c6caac8ae7308f"
+dependencies = [
+ "log",
+]
+
+[[package]]
+name = "env_logger"
+version = "0.11.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b2daee4ea451f429a58296525ddf28b45a3b64f1acf6587e2067437bb11e218d"
+dependencies = [
+ "env_filter",
+ "log",
+]
+
[[package]]
name = "equivalent"
version = "1.0.2"
@@ -1675,6 +1792,12 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582"
+[[package]]
+name = "fixedbitset"
+version = "0.5.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99"
+
[[package]]
name = "flate2"
version = "1.1.9"
@@ -1683,6 +1806,7 @@ checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c"
dependencies = [
"crc32fast",
"miniz_oxide",
+ "zlib-rs",
]
[[package]]
@@ -1881,10 +2005,21 @@ dependencies = [
"cfg-if",
"libc",
"r-efi",
+ "rand_core 0.10.0",
"wasip2",
"wasip3",
]
+[[package]]
+name = "ghash"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f0d8a4362ccb29cb0b265253fb0a2728f592895ee6854fd9bc13f2ffda266ff1"
+dependencies = [
+ "opaque-debug",
+ "polyval",
+]
+
[[package]]
name = "gimli"
version = "0.32.3"
@@ -2063,12 +2198,6 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
-[[package]]
-name = "hermit-abi"
-version = "0.3.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024"
-
[[package]]
name = "hermit-abi"
version = "0.5.2"
@@ -2087,12 +2216,12 @@ version = "2.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "565dd4c730b8f8b2c0fb36df6be12e5470ae10895ddcc4e9dcfbfb495de202b0"
dependencies = [
+ "basic-udev",
"cc",
"cfg-if",
"libc",
"nix 0.27.1",
"pkg-config",
- "udev",
"windows-sys 0.48.0",
]
@@ -2576,18 +2705,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "617ee6cf8e3f66f3b4ea67a4058564628cde41901316e19f559e14c7c72c5e7b"
dependencies = [
"core-foundation-sys",
- "mach2",
+ "mach2 0.4.3",
]
[[package]]
-name = "io-lifetimes"
-version = "1.0.11"
+name = "io-kit-sys"
+version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2"
+checksum = "06d3a048d09fbb6597dbf7c69f40d14df4a49487db1487191618c893fc3b1c26"
dependencies = [
- "hermit-abi 0.3.9",
- "libc",
- "windows-sys 0.48.0",
+ "core-foundation-sys",
+ "mach2 0.5.0",
]
[[package]]
@@ -2606,17 +2734,6 @@ dependencies = [
"serde",
]
-[[package]]
-name = "is-terminal"
-version = "0.4.17"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46"
-dependencies = [
- "hermit-abi 0.5.2",
- "libc",
- "windows-sys 0.61.2",
-]
-
[[package]]
name = "is_terminal_polyfill"
version = "1.70.2"
@@ -2632,6 +2749,15 @@ dependencies = [
"either",
]
+[[package]]
+name = "itertools"
+version = "0.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186"
+dependencies = [
+ "either",
+]
+
[[package]]
name = "itertools"
version = "0.14.0"
@@ -2800,28 +2926,6 @@ dependencies = [
"vcpkg",
]
-[[package]]
-name = "libudev-sys"
-version = "0.1.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3c8469b4a23b962c1396b9b451dda50ef5b283e8dd309d69033475fa9b334324"
-dependencies = [
- "libc",
- "pkg-config",
-]
-
-[[package]]
-name = "linux-raw-sys"
-version = "0.4.15"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab"
-
-[[package]]
-name = "linux-raw-sys"
-version = "0.9.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12"
-
[[package]]
name = "linux-raw-sys"
version = "0.11.0"
@@ -2840,6 +2944,12 @@ version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77"
+[[package]]
+name = "litrs"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "11d3d7f243d5c5a8b9bb5d6dd2b1602c0cb0b9db1621bafc7ed66e35ff9fe092"
+
[[package]]
name = "lock_api"
version = "0.4.14"
@@ -2904,6 +3014,15 @@ dependencies = [
"libc",
]
+[[package]]
+name = "mach2"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6a1b95cd5421ec55b445b5ae102f5ea0e768de1f82bd3001e11f426c269c3aea"
+dependencies = [
+ "libc",
+]
+
[[package]]
name = "macroific"
version = "2.0.0"
@@ -3306,6 +3425,12 @@ dependencies = [
"digest",
]
+[[package]]
+name = "md5"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ae960838283323069879657ca3de837e9f7bbb4c7bf6ea7f1b290d5e9476d2e0"
+
[[package]]
name = "memchr"
version = "2.8.0"
@@ -3397,6 +3522,32 @@ dependencies = [
"winapi",
]
+[[package]]
+name = "moka"
+version = "0.12.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b4ac832c50ced444ef6be0767a008b02c106a909ba79d1d830501e94b96f6b7e"
+dependencies = [
+ "async-lock",
+ "crossbeam-channel",
+ "crossbeam-epoch",
+ "crossbeam-utils",
+ "equivalent",
+ "event-listener 5.4.1",
+ "futures-util",
+ "parking_lot",
+ "portable-atomic",
+ "smallvec",
+ "tagptr",
+ "uuid",
+]
+
+[[package]]
+name = "multimap"
+version = "0.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1d87ecb2933e8aeadb3e3a02b828fed80a7528047e68b4f424523a0981a3a084"
+
[[package]]
name = "new_debug_unreachable"
version = "1.0.6"
@@ -3518,45 +3669,26 @@ version = "1.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b"
dependencies = [
- "hermit-abi 0.5.2",
+ "hermit-abi",
"libc",
]
[[package]]
name = "nusb"
-version = "0.1.14"
+version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2f861541f15de120eae5982923d073bfc0c1a65466561988c82d6e197734c19e"
+checksum = "5750d884c774a2862b0049b0318aea27cecc9e873485540af5ed8ab8841247da"
dependencies = [
- "atomic-waker",
- "core-foundation 0.9.4",
+ "core-foundation",
"core-foundation-sys",
"futures-core",
- "io-kit-sys",
- "libc",
+ "io-kit-sys 0.5.0",
+ "linux-raw-sys",
"log",
"once_cell",
- "rustix 0.38.44",
+ "rustix",
"slab",
- "windows-sys 0.48.0",
-]
-
-[[package]]
-name = "nusb"
-version = "0.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d0226f4db3ee78f820747cf713767722877f6449d7a0fcfbf2ec3b840969763f"
-dependencies = [
- "core-foundation 0.10.1",
- "core-foundation-sys",
- "futures-core",
- "io-kit-sys",
- "linux-raw-sys 0.9.4",
- "log",
- "once_cell",
- "rustix 1.1.3",
- "slab",
- "windows-sys 0.60.2",
+ "windows-sys 0.61.2",
]
[[package]]
@@ -3723,6 +3855,16 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
+[[package]]
+name = "page_size"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "30d5b2194ed13191c1999ae0704b7839fb18384fa22e49b57eeaa97d79ce40da"
+dependencies = [
+ "libc",
+ "winapi",
+]
+
[[package]]
name = "parking"
version = "2.2.1"
@@ -3794,6 +3936,17 @@ version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
+[[package]]
+name = "petgraph"
+version = "0.8.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8701b58ea97060d5e5b155d383a69952a60943f0e6dfe30b04c287beb0b27455"
+dependencies = [
+ "fixedbitset",
+ "hashbrown 0.15.5",
+ "indexmap",
+]
+
[[package]]
name = "phf"
version = "0.11.3"
@@ -3828,10 +3981,20 @@ version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a"
dependencies = [
- "phf_generator",
+ "phf_generator 0.11.3",
"phf_shared 0.11.3",
]
+[[package]]
+name = "phf_codegen"
+version = "0.13.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49aa7f9d80421bca176ca8dbfebe668cc7a2684708594ec9f3c0db0805d5d6e1"
+dependencies = [
+ "phf_generator 0.13.1",
+ "phf_shared 0.13.1",
+]
+
[[package]]
name = "phf_generator"
version = "0.11.3"
@@ -3842,6 +4005,16 @@ dependencies = [
"rand 0.8.5",
]
+[[package]]
+name = "phf_generator"
+version = "0.13.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "135ace3a761e564ec88c03a77317a7c6b80bb7f7135ef2544dbe054243b89737"
+dependencies = [
+ "fastrand",
+ "phf_shared 0.13.1",
+]
+
[[package]]
name = "phf_shared"
version = "0.11.3"
@@ -3953,9 +4126,9 @@ checksum = "5d0e4f59085d47d8241c88ead0f274e8a0cb551f3625263c05eb8dd897c34218"
dependencies = [
"cfg-if",
"concurrent-queue",
- "hermit-abi 0.5.2",
+ "hermit-abi",
"pin-project-lite",
- "rustix 1.1.3",
+ "rustix",
"windows-sys 0.61.2",
]
@@ -3965,7 +4138,19 @@ version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8159bd90725d2df49889a078b54f4f79e87f1f8a8444194cdca81d38f5393abf"
dependencies = [
- "cpufeatures",
+ "cpufeatures 0.2.17",
+ "opaque-debug",
+ "universal-hash",
+]
+
+[[package]]
+name = "polyval"
+version = "0.6.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25"
+dependencies = [
+ "cfg-if",
+ "cpufeatures 0.2.17",
"opaque-debug",
"universal-hash",
]
@@ -3976,6 +4161,12 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60f6ce597ecdcc9a098e7fddacb1065093a3d66446fa16c675e7e71d1b5c28e6"
+[[package]]
+name = "portable-atomic"
+version = "1.13.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49"
+
[[package]]
name = "postgres"
version = "0.19.12"
@@ -4068,9 +4259,9 @@ dependencies = [
[[package]]
name = "probe-rs"
-version = "0.30.0"
+version = "0.31.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7ee27329ac37fa02b194c62a4e3c1aa053739884ea7bcf861249866d3bf7de00"
+checksum = "ee50102aaa214117fc4fbe1311077835f0f4faa71e4a769bf65f955cc020ee34"
dependencies = [
"anyhow",
"async-io",
@@ -4087,8 +4278,8 @@ dependencies = [
"ihex",
"itertools 0.14.0",
"jep106",
- "nusb 0.1.14",
- "object 0.37.3",
+ "nusb",
+ "object 0.38.1",
"parking_lot",
"probe-rs-target",
"rmp-serde",
@@ -4104,9 +4295,9 @@ dependencies = [
[[package]]
name = "probe-rs-target"
-version = "0.30.0"
+version = "0.31.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2239aca5dc62c68ca6d8ff0051fe617cb8363b803380fbc60567e67c82b474df"
+checksum = "031bed1313b45d93dae4ca8f0fee098530c6632e4ebd9e2769d5a49cdef273d3"
dependencies = [
"base64",
"indexmap",
@@ -4122,7 +4313,7 @@ version = "3.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983"
dependencies = [
- "toml_edit 0.23.10+spec-1.0.0",
+ "toml_edit",
]
[[package]]
@@ -4189,6 +4380,23 @@ dependencies = [
"prost-derive 0.14.3",
]
+[[package]]
+name = "prost-build"
+version = "0.14.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "343d3bd7056eda839b03204e68deff7d1b13aba7af2b2fd16890697274262ee7"
+dependencies = [
+ "heck",
+ "itertools 0.14.0",
+ "log",
+ "multimap",
+ "petgraph",
+ "prost 0.14.3",
+ "prost-types",
+ "regex",
+ "tempfile",
+]
+
[[package]]
name = "prost-derive"
version = "0.13.5"
@@ -4215,6 +4423,35 @@ dependencies = [
"syn 2.0.116",
]
+[[package]]
+name = "prost-types"
+version = "0.14.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8991c4cbdb8bc5b11f0b074ffe286c30e523de90fee5ba8132f1399f23cb3dd7"
+dependencies = [
+ "prost 0.14.3",
+]
+
+[[package]]
+name = "protobuf"
+version = "3.7.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d65a1d4ddae7d8b5de68153b48f6aa3bba8cb002b243dbdbc55a5afbc98f99f4"
+dependencies = [
+ "once_cell",
+ "protobuf-support",
+ "thiserror 1.0.69",
+]
+
+[[package]]
+name = "protobuf-support"
+version = "3.7.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3e36c2f31e0a47f9280fb347ef5e461ffcd2c52dd520d8e216b52f93b0b0d7d6"
+dependencies = [
+ "thiserror 1.0.69",
+]
+
[[package]]
name = "psm"
version = "0.1.30"
@@ -4346,6 +4583,17 @@ dependencies = [
"rand_core 0.9.5",
]
+[[package]]
+name = "rand"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bc266eb313df6c5c09c1c7b1fbe2510961e5bcd3add930c1e31f7ed9da0feff8"
+dependencies = [
+ "chacha20 0.10.0",
+ "getrandom 0.4.1",
+ "rand_core 0.10.0",
+]
+
[[package]]
name = "rand_chacha"
version = "0.3.1"
@@ -4384,6 +4632,12 @@ dependencies = [
"getrandom 0.3.4",
]
+[[package]]
+name = "rand_core"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0c8d0fd677905edcbeedbf2edb6494d676f0e98d54d5cf9bda0b061cb8fb8aba"
+
[[package]]
name = "rand_xoshiro"
version = "0.7.0"
@@ -4443,17 +4697,6 @@ dependencies = [
"bitflags 2.11.0",
]
-[[package]]
-name = "redox_users"
-version = "0.4.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43"
-dependencies = [
- "getrandom 0.2.17",
- "libredox",
- "thiserror 1.0.69",
-]
-
[[package]]
name = "redox_users"
version = "0.5.2"
@@ -4465,6 +4708,26 @@ dependencies = [
"thiserror 2.0.18",
]
+[[package]]
+name = "ref-cast"
+version = "1.0.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d"
+dependencies = [
+ "ref-cast-impl",
+]
+
+[[package]]
+name = "ref-cast-impl"
+version = "1.0.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.116",
+]
+
[[package]]
name = "regex"
version = "1.12.3"
@@ -4794,19 +5057,6 @@ dependencies = [
"semver",
]
-[[package]]
-name = "rustix"
-version = "0.38.44"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154"
-dependencies = [
- "bitflags 2.11.0",
- "errno",
- "libc",
- "linux-raw-sys 0.4.15",
- "windows-sys 0.59.0",
-]
-
[[package]]
name = "rustix"
version = "1.1.3"
@@ -4816,7 +5066,7 @@ dependencies = [
"bitflags 2.11.0",
"errno",
"libc",
- "linux-raw-sys 0.11.0",
+ "linux-raw-sys",
"windows-sys 0.61.2",
]
@@ -4909,6 +5159,31 @@ dependencies = [
"windows-sys 0.61.2",
]
+[[package]]
+name = "schemars"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a2b42f36aa1cd011945615b92222f6bf73c599a102a300334cd7f8dbeec726cc"
+dependencies = [
+ "dyn-clone",
+ "ref-cast",
+ "schemars_derive",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "schemars_derive"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7d115b50f4aaeea07e79c1912f645c7513d81715d0420f8bc77a18c6260b307f"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "serde_derive_internals",
+ "syn 2.0.116",
+]
+
[[package]]
name = "scopeguard"
version = "1.2.0"
@@ -4939,7 +5214,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d17b898a6d6948c3a8ee4372c17cb384f90d2e6e912ef00895b14fd7ab54ec38"
dependencies = [
"bitflags 2.11.0",
- "core-foundation 0.10.1",
+ "core-foundation",
"core-foundation-sys",
"libc",
"security-framework-sys",
@@ -4977,6 +5252,15 @@ dependencies = [
"serde_derive",
]
+[[package]]
+name = "serde-big-array"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "11fc7cc2c76d73e0f27ee52abbd64eec84d46f370c88371120433196934e4b7f"
+dependencies = [
+ "serde",
+]
+
[[package]]
name = "serde-wasm-bindgen"
version = "0.6.5"
@@ -5018,6 +5302,17 @@ dependencies = [
"syn 2.0.116",
]
+[[package]]
+name = "serde_derive_internals"
+version = "0.29.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.116",
+]
+
[[package]]
name = "serde_html_form"
version = "0.2.8"
@@ -5064,15 +5359,6 @@ dependencies = [
"serde",
]
-[[package]]
-name = "serde_spanned"
-version = "0.6.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3"
-dependencies = [
- "serde",
-]
-
[[package]]
name = "serde_spanned"
version = "1.0.4"
@@ -5130,10 +5416,10 @@ checksum = "2acaf3f973e8616d7ceac415f53fc60e190b2a686fbcf8d27d0256c741c5007b"
dependencies = [
"bitflags 2.11.0",
"cfg-if",
- "core-foundation 0.10.1",
+ "core-foundation",
"core-foundation-sys",
- "io-kit-sys",
- "mach2",
+ "io-kit-sys 0.4.1",
+ "mach2 0.4.3",
"nix 0.26.4",
"scopeguard",
"unescaper",
@@ -5147,7 +5433,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
dependencies = [
"cfg-if",
- "cpufeatures",
+ "cpufeatures 0.2.17",
"digest",
]
@@ -5158,7 +5444,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283"
dependencies = [
"cfg-if",
- "cpufeatures",
+ "cpufeatures 0.2.17",
"digest",
]
@@ -5217,6 +5503,12 @@ version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2"
+[[package]]
+name = "simdutf8"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e"
+
[[package]]
name = "siphasher"
version = "1.0.2"
@@ -5308,7 +5600,7 @@ version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c711928715f1fe0fe509c53b43e993a9a557babc2d0a3567d0a3006f1ac931a0"
dependencies = [
- "phf_generator",
+ "phf_generator 0.11.3",
"phf_shared 0.11.3",
"proc-macro2",
"quote",
@@ -5400,6 +5692,12 @@ dependencies = [
"syn 2.0.116",
]
+[[package]]
+name = "tagptr"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417"
+
[[package]]
name = "tap"
version = "1.0.1"
@@ -5415,7 +5713,7 @@ dependencies = [
"fastrand",
"getrandom 0.4.1",
"once_cell",
- "rustix 1.1.3",
+ "rustix",
"windows-sys 0.61.2",
]
@@ -5656,9 +5954,9 @@ dependencies = [
[[package]]
name = "tokio-tungstenite"
-version = "0.24.0"
+version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "edc5f74e248dc973e0dbb7b74c7e0d6fcc301c694ff50049504004ef4d0cdcd9"
+checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857"
dependencies = [
"futures-util",
"log",
@@ -5666,22 +5964,10 @@ dependencies = [
"rustls-pki-types",
"tokio",
"tokio-rustls",
- "tungstenite 0.24.0",
+ "tungstenite",
"webpki-roots 0.26.11",
]
-[[package]]
-name = "tokio-tungstenite"
-version = "0.28.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857"
-dependencies = [
- "futures-util",
- "log",
- "tokio",
- "tungstenite 0.28.0",
-]
-
[[package]]
name = "tokio-util"
version = "0.7.18"
@@ -5696,15 +5982,24 @@ dependencies = [
]
[[package]]
-name = "toml"
-version = "0.8.23"
+name = "tokio-websockets"
+version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362"
+checksum = "8b6aa6c8b5a31e06fd3760eb5c1b8d9072e30731f0467ee3795617fe768e7449"
dependencies = [
- "serde",
- "serde_spanned 0.6.9",
- "toml_datetime 0.6.11",
- "toml_edit 0.22.27",
+ "base64",
+ "bytes",
+ "futures-core",
+ "futures-sink",
+ "http 1.4.0",
+ "httparse",
+ "rand 0.9.2",
+ "ring",
+ "rustls-pki-types",
+ "simdutf8",
+ "tokio",
+ "tokio-rustls",
+ "tokio-util",
]
[[package]]
@@ -5714,7 +6009,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf92845e79fc2e2def6a5d828f0801e29a2f8acc037becc5ab08595c7d5e9863"
dependencies = [
"serde_core",
- "serde_spanned 1.0.4",
+ "serde_spanned",
"toml_datetime 0.7.5+spec-1.1.0",
"toml_parser",
"winnow 0.7.14",
@@ -5722,28 +6017,19 @@ dependencies = [
[[package]]
name = "toml"
-version = "1.0.2+spec-1.1.0"
+version = "1.0.1+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d1dfefef6a142e93f346b64c160934eb13b5594b84ab378133ac6815cb2bd57f"
+checksum = "bbe30f93627849fa362d4a602212d41bb237dc2bd0f8ba0b2ce785012e124220"
dependencies = [
"indexmap",
"serde_core",
- "serde_spanned 1.0.4",
+ "serde_spanned",
"toml_datetime 1.0.0+spec-1.1.0",
"toml_parser",
"toml_writer",
"winnow 0.7.14",
]
-[[package]]
-name = "toml_datetime"
-version = "0.6.11"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c"
-dependencies = [
- "serde",
-]
-
[[package]]
name = "toml_datetime"
version = "0.7.5+spec-1.1.0"
@@ -5762,20 +6048,6 @@ dependencies = [
"serde_core",
]
-[[package]]
-name = "toml_edit"
-version = "0.22.27"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a"
-dependencies = [
- "indexmap",
- "serde",
- "serde_spanned 0.6.9",
- "toml_datetime 0.6.11",
- "toml_write",
- "winnow 0.7.14",
-]
-
[[package]]
name = "toml_edit"
version = "0.23.10+spec-1.0.0"
@@ -5790,19 +6062,13 @@ dependencies = [
[[package]]
name = "toml_parser"
-version = "1.0.9+spec-1.1.0"
+version = "1.0.8+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "702d4415e08923e7e1ef96cd5727c0dfed80b4d2fa25db9647fe5eb6f7c5a4c4"
+checksum = "0742ff5ff03ea7e67c8ae6c93cac239e0d9784833362da3f9a9c1da8dfefcbdc"
dependencies = [
"winnow 0.7.14",
]
-[[package]]
-name = "toml_write"
-version = "0.1.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801"
-
[[package]]
name = "toml_writer"
version = "1.0.6+spec-1.1.0"
@@ -5964,26 +6230,6 @@ version = "0.25.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2df906b07856748fa3f6e0ad0cbaa047052d4a7dd609e231c4f72cee8c36f31"
-[[package]]
-name = "tungstenite"
-version = "0.24.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "18e5b8366ee7a95b16d32197d0b2604b43a0be89dc5fac9f8e96ccafbaedda8a"
-dependencies = [
- "byteorder",
- "bytes",
- "data-encoding",
- "http 1.4.0",
- "httparse",
- "log",
- "rand 0.8.5",
- "rustls",
- "rustls-pki-types",
- "sha1",
- "thiserror 1.0.69",
- "utf-8",
-]
-
[[package]]
name = "tungstenite"
version = "0.28.0"
@@ -5996,6 +6242,8 @@ dependencies = [
"httparse",
"log",
"rand 0.9.2",
+ "rustls",
+ "rustls-pki-types",
"sha1",
"thiserror 2.0.18",
"utf-8",
@@ -6016,6 +6264,26 @@ dependencies = [
"pom",
]
+[[package]]
+name = "typed-builder"
+version = "0.23.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "31aa81521b70f94402501d848ccc0ecaa8f93c8eb6999eb9747e72287757ffda"
+dependencies = [
+ "typed-builder-macro",
+]
+
+[[package]]
+name = "typed-builder-macro"
+version = "0.23.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "076a02dc54dd46795c2e9c8282ed40bcfb1e22747e955de9389a1de28190fb26"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.116",
+]
+
[[package]]
name = "typenum"
version = "1.19.0"
@@ -6037,18 +6305,6 @@ version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e36a83ea2b3c704935a01b4642946aadd445cea40b10935e3f8bd8052b8193d6"
-[[package]]
-name = "udev"
-version = "0.8.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "50051c6e22be28ee6f217d50014f3bc29e81c20dc66ff7ca0d5c5226e1dcc5a1"
-dependencies = [
- "io-lifetimes",
- "libc",
- "libudev-sys",
- "pkg-config",
-]
-
[[package]]
name = "uf2-decode"
version = "0.2.0"
@@ -6088,9 +6344,9 @@ checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5"
[[package]]
name = "unicode-ident"
-version = "1.0.24"
+version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75"
+checksum = "537dd038a89878be9b64dd4bd1b260315c1bb94f4d784956b81e27a088d9a09e"
[[package]]
name = "unicode-normalization"
@@ -6153,6 +6409,37 @@ version = "0.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d49784317cd0d1ee7ec5c716dd598ec5b4483ea832a2dced265471cc0f690ae"
+[[package]]
+name = "ureq"
+version = "3.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fdc97a28575b85cfedf2a7e7d3cc64b3e11bd8ac766666318003abbacc7a21fc"
+dependencies = [
+ "base64",
+ "cookie_store",
+ "log",
+ "percent-encoding",
+ "rustls",
+ "rustls-pki-types",
+ "serde",
+ "serde_json",
+ "ureq-proto",
+ "utf-8",
+ "webpki-roots 1.0.6",
+]
+
+[[package]]
+name = "ureq-proto"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d81f9efa9df032be5934a46a068815a10a042b494b6a58cb0a1a97bb5467ed6f"
+dependencies = [
+ "base64",
+ "http 1.4.0",
+ "httparse",
+ "log",
+]
+
[[package]]
name = "url"
version = "2.5.8"
@@ -6256,6 +6543,223 @@ dependencies = [
"zeroize",
]
+[[package]]
+name = "wa-rs"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0fecb468bdfe1e7d4c06a1bd12908c66edaca59024862cb64757ad11c3b948b1"
+dependencies = [
+ "anyhow",
+ "async-channel 2.5.0",
+ "async-trait",
+ "base64",
+ "bytes",
+ "chrono",
+ "dashmap",
+ "env_logger",
+ "hex",
+ "log",
+ "moka",
+ "prost 0.14.3",
+ "rand 0.9.2",
+ "rand_core 0.10.0",
+ "scopeguard",
+ "serde",
+ "serde_json",
+ "thiserror 2.0.18",
+ "tokio",
+ "wa-rs-binary",
+ "wa-rs-core",
+ "wa-rs-proto",
+]
+
+[[package]]
+name = "wa-rs-appstate"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3845137b3aead2d99de7c6744784bf2f5a908be9dc97a3dbd7585dc40296925c"
+dependencies = [
+ "anyhow",
+ "bytemuck",
+ "hex",
+ "hkdf",
+ "log",
+ "prost 0.14.3",
+ "serde",
+ "serde-big-array",
+ "serde_json",
+ "sha2",
+ "thiserror 2.0.18",
+ "wa-rs-binary",
+ "wa-rs-libsignal",
+ "wa-rs-proto",
+]
+
+[[package]]
+name = "wa-rs-binary"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c3b30a6e11aebb39c07392675256ead5e2570c31382bd4835d6ddc877284b6be"
+dependencies = [
+ "flate2",
+ "phf 0.13.1",
+ "phf_codegen 0.13.1",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "wa-rs-core"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ed13bb2aff2de43fc4dd821955f03ea48a1d31eda3c80efe6f905898e304d11f"
+dependencies = [
+ "aes",
+ "aes-gcm",
+ "anyhow",
+ "async-channel 2.5.0",
+ "async-trait",
+ "base64",
+ "bytes",
+ "chrono",
+ "ctr",
+ "flate2",
+ "hex",
+ "hkdf",
+ "hmac",
+ "log",
+ "md5",
+ "once_cell",
+ "pbkdf2",
+ "prost 0.14.3",
+ "protobuf",
+ "rand 0.9.2",
+ "rand_core 0.10.0",
+ "serde",
+ "serde-big-array",
+ "serde_json",
+ "sha2",
+ "thiserror 2.0.18",
+ "typed-builder",
+ "wa-rs-appstate",
+ "wa-rs-binary",
+ "wa-rs-derive",
+ "wa-rs-libsignal",
+ "wa-rs-noise",
+ "wa-rs-proto",
+]
+
+[[package]]
+name = "wa-rs-derive"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "75c03f610c9bc960e653d5d6d2a4cced9013bedbe5e6e8948787bbd418e4137c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.116",
+]
+
+[[package]]
+name = "wa-rs-libsignal"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3471be8ff079ae4959fcddf2e7341281e5c6756bdc6a66454ea1a8e474d14576"
+dependencies = [
+ "aes",
+ "aes-gcm",
+ "arrayref",
+ "async-trait",
+ "cbc",
+ "chrono",
+ "ctr",
+ "curve25519-dalek",
+ "derive_more 2.1.1",
+ "displaydoc",
+ "ghash",
+ "hex",
+ "hkdf",
+ "hmac",
+ "itertools 0.14.0",
+ "log",
+ "prost 0.14.3",
+ "rand 0.9.2",
+ "serde",
+ "sha1",
+ "sha2",
+ "subtle",
+ "thiserror 2.0.18",
+ "uuid",
+ "wa-rs-proto",
+ "x25519-dalek",
+]
+
+[[package]]
+name = "wa-rs-noise"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f3efb3891c1e22ce54646dc581e34e79377dc402ed8afb11a7671c5ef629b3ae"
+dependencies = [
+ "aes-gcm",
+ "anyhow",
+ "bytes",
+ "hkdf",
+ "log",
+ "prost 0.14.3",
+ "rand 0.9.2",
+ "rand_core 0.10.0",
+ "sha2",
+ "thiserror 2.0.18",
+ "wa-rs-binary",
+ "wa-rs-libsignal",
+ "wa-rs-proto",
+]
+
+[[package]]
+name = "wa-rs-proto"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "59ada50ee03752f0e66ada8cf415ed5f90d572d34039b058ce23d8b13493e510"
+dependencies = [
+ "prost 0.14.3",
+ "prost-build",
+ "serde",
+]
+
+[[package]]
+name = "wa-rs-tokio-transport"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cfc638c168949dc99cbb756a776869898d4ae654b36b90d5f7ce2d32bf92a404"
+dependencies = [
+ "anyhow",
+ "async-channel 2.5.0",
+ "async-trait",
+ "bytes",
+ "futures-util",
+ "http 1.4.0",
+ "log",
+ "rustls",
+ "tokio",
+ "tokio-rustls",
+ "tokio-websockets",
+ "wa-rs-core",
+ "webpki-roots 1.0.6",
+]
+
+[[package]]
+name = "wa-rs-ureq-http"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "88d0c7fff8a7bd93d0c17af8d797a3934144fa269fe47a615635f3bf04238806"
+dependencies = [
+ "anyhow",
+ "async-trait",
+ "tokio",
+ "ureq",
+ "wa-rs-core",
+]
+
[[package]]
name = "walkdir"
version = "2.5.0"
@@ -6469,7 +6973,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57ffde1dc01240bdf9992e3205668b235e59421fd085e8a317ed98da0178d414"
dependencies = [
"phf 0.11.3",
- "phf_codegen",
+ "phf_codegen 0.11.3",
"string_cache",
"string_cache_codegen",
]
@@ -7067,7 +7571,7 @@ dependencies = [
"criterion",
"cron",
"dialoguer",
- "directories 6.0.0",
+ "directories",
"fantoccini",
"futures",
"futures-util",
@@ -7080,7 +7584,7 @@ dependencies = [
"lettre",
"mail-parser",
"matrix-sdk",
- "nusb 0.2.1",
+ "nusb",
"opentelemetry",
"opentelemetry-otlp",
"opentelemetry_sdk",
@@ -7090,7 +7594,7 @@ dependencies = [
"probe-rs",
"prometheus",
"prost 0.14.3",
- "rand 0.9.2",
+ "rand 0.10.0",
"regex",
"reqwest",
"ring",
@@ -7098,7 +7602,9 @@ dependencies = [
"rusqlite",
"rustls",
"rustls-pki-types",
+ "schemars",
"serde",
+ "serde-big-array",
"serde_json",
"sha2",
"shellexpand",
@@ -7107,15 +7613,22 @@ dependencies = [
"tokio",
"tokio-rustls",
"tokio-serial",
- "tokio-tungstenite 0.24.0",
+ "tokio-stream",
+ "tokio-tungstenite",
"tokio-util",
- "toml 1.0.2+spec-1.1.0",
+ "toml 1.0.1+spec-1.1.0",
"tower",
"tower-http",
"tracing",
"tracing-subscriber",
"urlencoding",
"uuid",
+ "wa-rs",
+ "wa-rs-binary",
+ "wa-rs-core",
+ "wa-rs-proto",
+ "wa-rs-tokio-transport",
+ "wa-rs-ureq-http",
"webpki-roots 1.0.6",
]
@@ -7127,7 +7640,7 @@ dependencies = [
"async-trait",
"base64",
"chrono",
- "directories 5.0.1",
+ "directories",
"reqwest",
"rppal 0.19.0",
"serde",
@@ -7136,7 +7649,7 @@ dependencies = [
"thiserror 2.0.18",
"tokio",
"tokio-test",
- "toml 0.8.23",
+ "toml 1.0.1+spec-1.1.0",
"tracing",
]
@@ -7256,6 +7769,12 @@ dependencies = [
"syn 2.0.116",
]
+[[package]]
+name = "zlib-rs"
+version = "0.6.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c745c48e1007337ed136dc99df34128b9faa6ed542d80a1c673cf55a6d7236c8"
+
[[package]]
name = "zmij"
version = "1.0.21"
diff --git a/Cargo.toml b/Cargo.toml
index 498f2b7..31b5632 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -26,7 +26,7 @@ tokio-util = { version = "0.7", default-features = false }
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls", "blocking", "multipart", "stream", "socks"] }
# Matrix client + E2EE decryption
-matrix-sdk = { version = "0.16", default-features = false, features = ["e2e-encryption", "rustls-tls", "markdown"] }
+matrix-sdk = { version = "0.16", optional = true, default-features = false, features = ["e2e-encryption", "rustls-tls", "markdown"] }
# Serialization
serde = { version = "1.0", default-features = false, features = ["derive"] }
@@ -37,6 +37,9 @@ directories = "6.0"
toml = "1.0"
shellexpand = "3.1"
+# JSON Schema generation for config export
+schemars = "1.2"
+
# Logging - minimal
tracing = { version = "0.1", default-features = false }
tracing-subscriber = { version = "0.3", default-features = false, features = ["fmt", "ansi", "env-filter"] }
@@ -69,7 +72,10 @@ sha2 = "0.10"
hex = "0.4"
# CSPRNG for secure token generation
-rand = "0.9"
+rand = "0.10"
+
+# serde-big-array for wa-rs storage (large array serialization)
+serde-big-array = { version = "0.5", optional = true }
# Fast mutexes that don't poison on panic
parking_lot = "0.12"
@@ -97,8 +103,8 @@ console = "0.16"
# Hardware discovery (device path globbing)
glob = "0.3"
-# Discord WebSocket gateway
-tokio-tungstenite = { version = "0.24", features = ["rustls-tls-webpki-roots"] }
+# WebSocket client channels (Discord/Lark/DingTalk)
+tokio-tungstenite = { version = "0.28", features = ["rustls-tls-webpki-roots"] }
futures-util = { version = "0.3", default-features = false, features = ["sink"] }
futures = "0.3"
regex = "1.10"
@@ -114,27 +120,42 @@ mail-parser = "0.11.2"
async-imap = { version = "0.11",features = ["runtime-tokio"], default-features = false }
# HTTP server (gateway) — replaces raw TCP for proper HTTP/1.1 compliance
-axum = { version = "0.8", default-features = false, features = ["http1", "json", "tokio", "query", "ws"] }
+axum = { version = "0.8", default-features = false, features = ["http1", "json", "tokio", "query", "ws", "macros"] }
tower = { version = "0.5", default-features = false }
tower-http = { version = "0.6", default-features = false, features = ["limit", "timeout"] }
http-body-util = "0.1"
-# OpenTelemetry — OTLP trace + metrics export
+# OpenTelemetry — OTLP trace + metrics export.
+# Use the blocking HTTP exporter client to avoid Tokio-reactor panics in
+# OpenTelemetry background batch threads when ZeroClaw emits spans/metrics from
+# non-Tokio contexts.
opentelemetry = { version = "0.31", default-features = false, features = ["trace", "metrics"] }
opentelemetry_sdk = { version = "0.31", default-features = false, features = ["trace", "metrics"] }
-opentelemetry-otlp = { version = "0.31", default-features = false, features = ["trace", "metrics", "http-proto", "reqwest-client", "reqwest-rustls-webpki-roots"] }
-
-# USB device enumeration (hardware discovery)
-nusb = { version = "0.2", default-features = false, optional = true }
+opentelemetry-otlp = { version = "0.31", default-features = false, features = ["trace", "metrics", "http-proto", "reqwest-blocking-client", "reqwest-rustls-webpki-roots"] }
# Serial port for peripheral communication (STM32, etc.)
tokio-serial = { version = "5", default-features = false, optional = true }
+# USB device enumeration (hardware discovery) — only on platforms nusb supports
+# (Linux, macOS, Windows). Android/Termux uses target_os="android" and is excluded.
+[target.'cfg(any(target_os = "linux", target_os = "macos", target_os = "windows"))'.dependencies]
+nusb = { version = "0.2", default-features = false, optional = true }
+
# probe-rs for STM32/Nucleo memory read (Phase B)
-probe-rs = { version = "0.30", optional = true }
+probe-rs = { version = "0.31", optional = true }
# PDF extraction for datasheet RAG (optional, enable with --features rag-pdf)
pdf-extract = { version = "0.10", optional = true }
+tokio-stream = { version = "0.1.18", features = ["full"] }
+
+# WhatsApp Web client (wa-rs) — optional, enable with --features whatsapp-web
+# Uses wa-rs for Bot and Client, wa-rs-core for storage traits, custom rusqlite backend avoids Diesel conflict.
+wa-rs = { version = "0.2", optional = true, default-features = false }
+wa-rs-core = { version = "0.2", optional = true, default-features = false }
+wa-rs-binary = { version = "0.2", optional = true, default-features = false }
+wa-rs-proto = { version = "0.2", optional = true, default-features = false }
+wa-rs-ureq-http = { version = "0.2", optional = true }
+wa-rs-tokio-transport = { version = "0.2", optional = true, default-features = false }
# Raspberry Pi GPIO / Landlock (Linux only) — target-specific to avoid compile failure on macOS
[target.'cfg(target_os = "linux")'.dependencies]
@@ -142,8 +163,9 @@ rppal = { version = "0.22", optional = true }
landlock = { version = "0.4", optional = true }
[features]
-default = ["hardware"]
+default = ["hardware", "channel-matrix"]
hardware = ["nusb", "tokio-serial"]
+channel-matrix = ["dep:matrix-sdk"]
peripheral-rpi = ["rppal"]
# Browser backend feature alias used by cfg(feature = "browser-native")
browser-native = ["dep:fantoccini"]
@@ -158,6 +180,9 @@ landlock = ["sandbox-landlock"]
probe = ["dep:probe-rs"]
# rag-pdf = PDF ingestion for datasheet RAG
rag-pdf = ["dep:pdf-extract"]
+# whatsapp-web = Native WhatsApp Web client with custom rusqlite storage backend
+whatsapp-web = ["dep:wa-rs", "dep:wa-rs-core", "dep:wa-rs-binary", "dep:wa-rs-proto", "dep:wa-rs-ureq-http", "dep:wa-rs-tokio-transport", "serde-big-array"]
+
[profile.release]
opt-level = "z" # Optimize for size
lto = "thin" # Lower memory use during release builds
@@ -181,7 +206,7 @@ panic = "abort"
[dev-dependencies]
tempfile = "3.14"
-criterion = { version = "0.5", features = ["async_tokio"] }
+criterion = { version = "0.8", features = ["async_tokio"] }
[[bench]]
name = "agent_benchmarks"
diff --git a/LICENSE b/LICENSE
index 349c342..981b87b 100644
--- a/LICENSE
+++ b/LICENSE
@@ -22,7 +22,34 @@ SOFTWARE.
================================================================================
+TRADEMARK NOTICE
+
+This license does not grant permission to use the trade names, trademarks,
+service marks, or product names of ZeroClaw Labs, including "ZeroClaw",
+"zeroclaw-labs", or associated logos, except as required for reasonable and
+customary use in describing the origin of the Software.
+
+Unauthorized use of the ZeroClaw name or branding to imply endorsement,
+affiliation, or origin is strictly prohibited. See TRADEMARK.md for details.
+
+================================================================================
+
+DUAL LICENSE NOTICE
+
+This software is available under a dual-license model:
+
+ 1. MIT License (this file) — for open-source, research, academic, and
+ personal use. See LICENSE (this file).
+
+ 2. Apache License 2.0 — for contributors and deployments requiring explicit
+ patent grants and stronger IP protection. See LICENSE-APACHE.
+
+You may choose either license for your use. Contributors submitting patches
+grant rights under both licenses. See CLA.md for the contributor agreement.
+
+================================================================================
+
This product includes software developed by ZeroClaw Labs and contributors:
https://github.com/zeroclaw-labs/zeroclaw/graphs/contributors
-See NOTICE file for full contributor attribution.
+See NOTICE for full contributor attribution.
diff --git a/LICENSE-APACHE b/LICENSE-APACHE
new file mode 100644
index 0000000..8ef8850
--- /dev/null
+++ b/LICENSE-APACHE
@@ -0,0 +1,186 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship made available under
+ the License, as indicated by a copyright notice that is included in
+ or attached to the work (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean, as defined in Section 5, any work of
+ authorship, including the original version of the Work and any
+ modifications or additions to that Work or Derivative Works of the
+ Work, that is intentionally submitted to the Licensor for inclusion
+ in the Work by the copyright owner or by an individual or Legal Entity
+ authorized to submit on behalf of the copyright owner. For the purposes
+ of this definition, "submitted" means any form of electronic, verbal,
+ or written communication sent to the Licensor or its representatives,
+ including but not limited to communication on electronic mailing lists,
+ source code control systems, and issue tracking systems that are managed
+ by, or on behalf of, the Licensor for the purpose of discussing and
+ improving the Work, but excluding communication that is conspicuously
+ marked or designated in writing by the copyright owner as "Not a
+ Contribution."
+
+ "Contributor" shall mean Licensor and any Legal Entity on behalf of
+ whom a Contribution has been received by the Licensor and subsequently
+ incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a cross-claim
+ or counterclaim in a lawsuit) alleging that the Work or any Contribution
+ incorporated within the Work constitutes direct or contributory patent
+ infringement, then any patent licenses granted to You under this License
+ for that Work shall terminate as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or Derivative
+ Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, You must include a readable copy of the
+ attribution notices contained within such NOTICE file, in
+ at least one of the following places: within a NOTICE text
+ file distributed as part of the Derivative Works; within
+ the Source form or documentation, if provided along with the
+ Derivative Works; or, within a display generated by the
+ Derivative Works, if and wherever such third-party notices
+ normally appear. The contents of the NOTICE file are for
+ informational purposes only and do not modify the License.
+ You may add Your own attribution notices within Derivative
+ Works that You distribute, alongside or as an addendum to
+ the NOTICE text from the Work, provided that such additional
+ attribution notices cannot be construed as modifying the License.
+
+ You may add Your own license statement for Your modifications and
+ may provide additional grant of rights to use, copy, modify, merge,
+ publish, distribute, sublicense, and/or sell copies of the
+ Contribution, either on its own or as part of the Work.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ including "ZeroClaw", "zeroclaw-labs", or associated logos, except
+ as required for reasonable and customary use in describing the origin
+ of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or exemplary damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or all other
+ commercial damages or losses), even if such Contributor has been
+ advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may offer such
+ obligations only on Your own behalf and on Your sole responsibility,
+ not on behalf of any other Contributor, and only if You agree to
+ indemnify, defend, and hold each Contributor harmless for any
+ liability incurred by, or claims asserted against, such Contributor
+ by reason of your accepting any warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ Copyright 2025 ZeroClaw Labs
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/NOTICE b/NOTICE
index f2e824c..70fc196 100644
--- a/NOTICE
+++ b/NOTICE
@@ -3,6 +3,26 @@ Copyright 2025 ZeroClaw Labs
This product includes software developed at ZeroClaw Labs (https://github.com/zeroclaw-labs).
+Official Repository
+===================
+
+The only official ZeroClaw repository is:
+https://github.com/zeroclaw-labs/zeroclaw
+
+Any other repository claiming to be ZeroClaw is unauthorized.
+See TRADEMARK.md for the full trademark policy.
+
+License
+=======
+
+This software is available under a dual-license model:
+
+ 1. MIT License — see LICENSE
+ 2. Apache License 2.0 — see LICENSE-APACHE
+
+You may use either license. Contributors grant rights under both.
+See CLA.md for the contributor license agreement.
+
Contributors
============
@@ -10,6 +30,10 @@ This NOTICE file is maintained by repository automation.
For the latest contributor list, see the repository contributors page:
https://github.com/zeroclaw-labs/zeroclaw/graphs/contributors
+All contributors retain copyright ownership of their contributions.
+Contributions are permanently attributed in the repository commit history.
+Patent rights are protected for all contributors under Apache License 2.0.
+
Third-Party Dependencies
========================
diff --git a/README.ja.md b/README.ja.md
index 957144f..b719f77 100644
--- a/README.ja.md
+++ b/README.ja.md
@@ -8,6 +8,15 @@
Zero overhead. Zero compromise. 100% Rust. 100% Agnostic.
+
+
+
+
+
+
+
+
+
🌐 言語: English · 简体中文 · 日本語 · Русский
@@ -33,7 +42,17 @@
>
> コマンド名、設定キー、API パス、Trait 名などの技術識別子は英語のまま維持しています。
>
-> 最終同期日: **2026-02-18**。
+> 最終同期日: **2026-02-19**。
+
+## 📢 お知らせボード
+
+重要なお知らせ(互換性破壊変更、セキュリティ告知、メンテナンス時間、リリース阻害事項など)をここに掲載します。
+
+| 日付 (UTC) | レベル | お知らせ | 対応 |
+|---|---|---|---|
+| 2026-02-19 | _緊急_ | 私たちは `openagen/zeroclaw` および `zeroclaw.org` とは**一切関係ありません**。`zeroclaw.org` は現在 `openagen/zeroclaw` の fork を指しており、そのドメイン/リポジトリは当プロジェクトの公式サイト・公式プロジェクトを装っています。 | これらの情報源による案内、バイナリ、資金調達情報、公式発表は信頼しないでください。必ず本リポジトリと認証済み公式SNSのみを参照してください。 |
+| 2026-02-19 | _重要_ | 公式サイトは**まだ公開しておらず**、なりすましの試みを確認しています。ZeroClaw 名義の投資・資金調達などの活動には参加しないでください。 | 情報は本リポジトリを最優先で確認し、[X(@zeroclawlabs)](https://x.com/zeroclawlabs?s=21)、[Reddit(r/zeroclawlabs)](https://www.reddit.com/r/zeroclawlabs/)、[Telegram(@zeroclawlabs)](https://t.me/zeroclawlabs)、[Telegram CN(@zeroclawlabs_cn)](https://t.me/zeroclawlabs_cn)、[Telegram RU(@zeroclawlabs_ru)](https://t.me/zeroclawlabs_ru) と [小紅書アカウント](https://www.xiaohongshu.com/user/profile/67cbfc43000000000d008307?xsec_token=AB73VnYnGNx5y36EtnnZfGmAmS-6Wzv8WMuGpfwfkg6Yc%3D&xsec_source=pc_search) で公式更新を確認してください。 |
+| 2026-02-19 | _重要_ | Anthropic は 2026-02-19 に Authentication and Credential Use を更新しました。条文では、OAuth authentication(Free/Pro/Max)は Claude Code と Claude.ai 専用であり、Claude Free/Pro/Max で取得した OAuth トークンを他の製品・ツール・サービス(Agent SDK を含む)で使用することは許可されず、Consumer Terms of Service 違反に該当すると明記されています。 | 損失回避のため、当面は Claude Code OAuth 連携を試さないでください。原文: [Authentication and Credential Use](https://code.claude.com/docs/en/legal-and-compliance#authentication-and-credential-use)。 |
## 概要
@@ -100,6 +119,12 @@ cd zeroclaw
## クイックスタート
+### Homebrew(macOS/Linuxbrew)
+
+```bash
+brew install zeroclaw
+```
+
```bash
git clone https://github.com/zeroclaw-labs/zeroclaw.git
cd zeroclaw
@@ -117,6 +142,106 @@ zeroclaw gateway
zeroclaw daemon
```
+## Subscription Auth(OpenAI Codex / Claude Code)
+
+ZeroClaw はサブスクリプションベースのネイティブ認証プロファイルをサポートしています(マルチアカウント対応、保存時暗号化)。
+
+- 保存先: `~/.zeroclaw/auth-profiles.json`
+- 暗号化キー: `~/.zeroclaw/.secret_key`
+- Profile ID 形式: `:`(例: `openai-codex:work`)
+
+OpenAI Codex OAuth(ChatGPT サブスクリプション):
+
+```bash
+# サーバー/ヘッドレス環境向け推奨
+zeroclaw auth login --provider openai-codex --device-code
+
+# ブラウザ/コールバックフロー(ペーストフォールバック付き)
+zeroclaw auth login --provider openai-codex --profile default
+zeroclaw auth paste-redirect --provider openai-codex --profile default
+
+# 確認 / リフレッシュ / プロファイル切替
+zeroclaw auth status
+zeroclaw auth refresh --provider openai-codex --profile default
+zeroclaw auth use --provider openai-codex --profile work
+```
+
+Claude Code / Anthropic setup-token:
+
+```bash
+# サブスクリプション/setup token の貼り付け(Authorization header モード)
+zeroclaw auth paste-token --provider anthropic --profile default --auth-kind authorization
+
+# エイリアスコマンド
+zeroclaw auth setup-token --provider anthropic --profile default
+```
+
+Subscription auth で agent を実行:
+
+```bash
+zeroclaw agent --provider openai-codex -m "hello"
+zeroclaw agent --provider openai-codex --auth-profile openai-codex:work -m "hello"
+
+# Anthropic は API key と auth token の両方の環境変数をサポート:
+# ANTHROPIC_AUTH_TOKEN, ANTHROPIC_OAUTH_TOKEN, ANTHROPIC_API_KEY
+zeroclaw agent --provider anthropic -m "hello"
+```
+
+## アーキテクチャ
+
+すべてのサブシステムは **Trait** — 設定変更だけで実装を差し替え可能、コード変更不要。
+
+
+
+
+
+| サブシステム | Trait | 内蔵実装 | 拡張方法 |
+|-------------|-------|----------|----------|
+| **AI モデル** | `Provider` | `zeroclaw providers` で確認(現在 28 個の組み込み + エイリアス、カスタムエンドポイント対応) | `custom:https://your-api.com`(OpenAI 互換)または `anthropic-custom:https://your-api.com` |
+| **チャネル** | `Channel` | CLI, Telegram, Discord, Slack, Mattermost, iMessage, Matrix, Signal, WhatsApp, Email, IRC, Lark, DingTalk, QQ, Webhook | 任意のメッセージ API |
+| **メモリ** | `Memory` | SQLite ハイブリッド検索, PostgreSQL バックエンド, Lucid ブリッジ, Markdown ファイル, 明示的 `none` バックエンド, スナップショット/復元, オプション応答キャッシュ | 任意の永続化バックエンド |
+| **ツール** | `Tool` | shell/file/memory, cron/schedule, git, pushover, browser, http_request, screenshot/image_info, composio (opt-in), delegate, ハードウェアツール | 任意の機能 |
+| **オブザーバビリティ** | `Observer` | Noop, Log, Multi | Prometheus, OTel |
+| **ランタイム** | `RuntimeAdapter` | Native, Docker(サンドボックス) | adapter 経由で追加可能;未対応の kind は即座にエラー |
+| **セキュリティ** | `SecurityPolicy` | Gateway ペアリング, サンドボックス, allowlist, レート制限, ファイルシステムスコープ, 暗号化シークレット | — |
+| **アイデンティティ** | `IdentityConfig` | OpenClaw (markdown), AIEOS v1.1 (JSON) | 任意の ID フォーマット |
+| **トンネル** | `Tunnel` | None, Cloudflare, Tailscale, ngrok, Custom | 任意のトンネルバイナリ |
+| **ハートビート** | Engine | HEARTBEAT.md 定期タスク | — |
+| **スキル** | Loader | TOML マニフェスト + SKILL.md インストラクション | コミュニティスキルパック |
+| **インテグレーション** | Registry | 9 カテゴリ、70 件以上の連携 | プラグインシステム |
+
+### ランタイムサポート(現状)
+
+- ✅ 現在サポート: `runtime.kind = "native"` または `runtime.kind = "docker"`
+- 🚧 計画中(未実装): WASM / エッジランタイム
+
+未対応の `runtime.kind` が設定された場合、ZeroClaw は native へのサイレントフォールバックではなく、明確なエラーで終了します。
+
+### メモリシステム(フルスタック検索エンジン)
+
+すべて自社実装、外部依存ゼロ — Pinecone、Elasticsearch、LangChain 不要:
+
+| レイヤー | 実装 |
+|---------|------|
+| **ベクトル DB** | Embeddings を SQLite に BLOB として保存、コサイン類似度検索 |
+| **キーワード検索** | FTS5 仮想テーブル、BM25 スコアリング |
+| **ハイブリッドマージ** | カスタム重み付きマージ関数(`vector.rs`) |
+| **Embeddings** | `EmbeddingProvider` trait — OpenAI、カスタム URL、または noop |
+| **チャンキング** | 行ベースの Markdown チャンカー(見出し構造保持) |
+| **キャッシュ** | SQLite `embedding_cache` テーブル、LRU エビクション |
+| **安全な再インデックス** | FTS5 再構築 + 欠落ベクトルの再埋め込みをアトミックに実行 |
+
+Agent はツール経由でメモリの呼び出し・保存・管理を自動的に行います。
+
+```toml
+[memory]
+backend = "sqlite" # "sqlite", "lucid", "postgres", "markdown", "none"
+auto_save = true
+embedding_provider = "none" # "none", "openai", "custom:https://..."
+vector_weight = 0.7
+keyword_weight = 0.3
+```
+
## セキュリティのデフォルト
- Gateway の既定バインド: `127.0.0.1:3000`
diff --git a/README.md b/README.md
index 03ed554..acd307c 100644
--- a/README.md
+++ b/README.md
@@ -13,13 +13,19 @@
+
+
+
+
+
+
Built by students and members of the Harvard, MIT, and Sundai.Club communities.
- 🌐 Languages: English · 简体中文 · 日本語 · Русский
+ 🌐 Languages: English · 简体中文 · 日本語 · Русский · Tiếng Việt
@@ -46,6 +52,16 @@ Built by students and members of the Harvard, MIT, and Sundai.Club communities.
Trait-driven architecture · secure-by-default runtime · provider/channel/tool swappable · pluggable everything
+### 📢 Announcements
+
+Use this board for important notices (breaking changes, security advisories, maintenance windows, and release blockers).
+
+| Date (UTC) | Level | Notice | Action |
+|---|---|---|---|
+| 2026-02-19 | _Critical_ | We are **not affiliated** with `openagen/zeroclaw` or `zeroclaw.org`. The `zeroclaw.org` domain currently points to the `openagen/zeroclaw` fork, and that domain/repository are impersonating our official website/project. | Do not trust information, binaries, fundraising, or announcements from those sources. Use only this repository and our verified social accounts. |
+| 2026-02-19 | _Important_ | We have **not** launched an official website yet, and we are seeing impersonation attempts. Do **not** join any investment or fundraising activity claiming the ZeroClaw name. | Use this repository as the single source of truth. Follow [X (@zeroclawlabs)](https://x.com/zeroclawlabs?s=21), [Reddit (r/zeroclawlabs)](https://www.reddit.com/r/zeroclawlabs/), [Telegram (@zeroclawlabs)](https://t.me/zeroclawlabs), [Telegram CN (@zeroclawlabs_cn)](https://t.me/zeroclawlabs_cn), [Telegram RU (@zeroclawlabs_ru)](https://t.me/zeroclawlabs_ru), and [Xiaohongshu](https://www.xiaohongshu.com/user/profile/67cbfc43000000000d008307?xsec_token=AB73VnYnGNx5y36EtnnZfGmAmS-6Wzv8WMuGpfwfkg6Yc%3D&xsec_source=pc_search) for official updates. |
+| 2026-02-19 | _Important_ | Anthropic updated the Authentication and Credential Use terms on 2026-02-19. OAuth authentication (Free, Pro, Max) is intended exclusively for Claude Code and Claude.ai; using OAuth tokens from Claude Free/Pro/Max in any other product, tool, or service (including Agent SDK) is not permitted and may violate the Consumer Terms of Service. | Please temporarily avoid Claude Code OAuth integrations to prevent potential loss. Original clause: [Authentication and Credential Use](https://code.claude.com/docs/en/legal-and-compliance#authentication-and-credential-use). |
+
### ✨ Features
- 🏎️ **Lean Runtime by Default:** Common CLI and status workflows run in a few-megabyte memory envelope on release builds.
@@ -72,7 +88,7 @@ Local machine quick benchmark (macOS arm64, Feb 2026) normalized for 0.8GHz edge
| **Binary Size** | ~28MB (dist) | N/A (Scripts) | ~8MB | **3.4 MB** |
| **Cost** | Mac Mini $599 | Linux SBC ~$50 | Linux Board $10 | **Any hardware $10** |
-> Notes: ZeroClaw results are measured on release builds using `/usr/bin/time -l`. OpenClaw requires Node.js runtime (typically ~390MB additional memory overhead), while NanoBot requires Python runtime. PicoClaw and ZeroClaw are static binaries.
+> Notes: ZeroClaw results are measured on release builds using `/usr/bin/time -l`. OpenClaw requires Node.js runtime (typically ~390MB additional memory overhead), while NanoBot requires Python runtime. PicoClaw and ZeroClaw are static binaries. The RAM figures above are runtime memory; build-time compilation requirements are higher.
@@ -157,17 +173,44 @@ Or skip the steps above and install everything (system deps, Rust, ZeroClaw) in
curl -LsSf https://raw.githubusercontent.com/zeroclaw-labs/zeroclaw/main/scripts/install.sh | bash
```
+#### Compilation resource requirements
+
+Building from source needs more resources than running the resulting binary:
+
+| Resource | Minimum | Recommended |
+|---|---|---|
+| **RAM + swap** | 2 GB | 4 GB+ |
+| **Free disk** | 6 GB | 10 GB+ |
+
+If your host is below the minimum, use pre-built binaries:
+
+```bash
+./bootstrap.sh --prefer-prebuilt
+```
+
+To require binary-only install with no source fallback:
+
+```bash
+./bootstrap.sh --prebuilt-only
+```
+
#### Optional
- **Docker** — required only if using the [Docker sandboxed runtime](#runtime-support-current) (`runtime.kind = "docker"`). Install via your package manager or [docker.com](https://docs.docker.com/engine/install/).
-> **Note:** The default `cargo build --release` uses `codegen-units=1` for compatibility with low-memory devices (e.g., Raspberry Pi 3 with 1GB RAM). For faster builds on powerful machines, use `cargo build --profile release-fast`.
+> **Note:** The default `cargo build --release` uses `codegen-units=1` to lower peak compile pressure. For faster builds on powerful machines, use `cargo build --profile release-fast`.
## Quick Start
+### Homebrew (macOS/Linuxbrew)
+
+```bash
+brew install zeroclaw
+```
+
### One-click bootstrap
```bash
@@ -179,8 +222,17 @@ cd zeroclaw
# Optional: bootstrap dependencies + Rust on fresh machines
./bootstrap.sh --install-system-deps --install-rust
+# Optional: pre-built binary first (recommended on low-RAM/low-disk hosts)
+./bootstrap.sh --prefer-prebuilt
+
+# Optional: binary-only install (no source build fallback)
+./bootstrap.sh --prebuilt-only
+
# Optional: run onboarding in the same flow
-./bootstrap.sh --onboard --api-key "sk-..." --provider openrouter
+./bootstrap.sh --onboard --api-key "sk-..." --provider openrouter [--model "openrouter/auto"]
+
+# Optional: run bootstrap + onboarding fully in Docker
+./bootstrap.sh --docker
```
Remote one-liner (review first in security-sensitive environments):
@@ -191,6 +243,25 @@ curl -fsSL https://raw.githubusercontent.com/zeroclaw-labs/zeroclaw/main/scripts
Details: [`docs/one-click-bootstrap.md`](docs/one-click-bootstrap.md) (toolchain mode may request `sudo` for system packages).
+### Pre-built binaries
+
+Release assets are published for:
+
+- Linux: `x86_64`, `aarch64`, `armv7`
+- macOS: `x86_64`, `aarch64`
+- Windows: `x86_64`
+
+Download the latest assets from:
+
+
+Example (ARM64 Linux):
+
+```bash
+curl -fsSLO https://github.com/zeroclaw-labs/zeroclaw/releases/latest/download/zeroclaw-aarch64-unknown-linux-gnu.tar.gz
+tar xzf zeroclaw-aarch64-unknown-linux-gnu.tar.gz
+install -m 0755 zeroclaw "$HOME/.cargo/bin/zeroclaw"
+```
+
```bash
git clone https://github.com/zeroclaw-labs/zeroclaw.git
cd zeroclaw
@@ -200,8 +271,8 @@ cargo install --path . --force --locked
# Ensure ~/.cargo/bin is in your PATH
export PATH="$HOME/.cargo/bin:$PATH"
-# Quick setup (no prompts)
-zeroclaw onboard --api-key sk-... --provider openrouter
+# Quick setup (no prompts, optional model specification)
+zeroclaw onboard --api-key sk-... --provider openrouter [--model "openrouter/auto"]
# Or interactive wizard
zeroclaw onboard --interactive
@@ -244,6 +315,7 @@ zeroclaw integrations info Telegram
# Manage background service
zeroclaw service install
zeroclaw service status
+zeroclaw service restart
# Migrate memory from OpenClaw (safe preview first)
zeroclaw migrate openclaw --dry-run
@@ -452,7 +524,37 @@ For non-text replies, ZeroClaw can send Telegram attachments when the assistant
Paths can be local files (for example `/tmp/screenshot.png`) or HTTPS URLs.
-### WhatsApp Business Cloud API Setup
+### WhatsApp Setup
+
+ZeroClaw supports two WhatsApp backends:
+
+- **WhatsApp Web mode** (QR / pair code, no Meta Business API required)
+- **WhatsApp Business Cloud API mode** (official Meta webhook flow)
+
+#### WhatsApp Web mode (recommended for personal/self-hosted use)
+
+1. **Build with WhatsApp Web support:**
+ ```bash
+ cargo build --features whatsapp-web
+ ```
+
+2. **Configure ZeroClaw:**
+ ```toml
+ [channels_config.whatsapp]
+ session_path = "~/.zeroclaw/state/whatsapp-web/session.db"
+ pair_phone = "15551234567" # optional; omit to use QR flow
+ pair_code = "" # optional custom pair code
+ allowed_numbers = ["+1234567890"] # E.164 format, or ["*"] for all
+ ```
+
+3. **Start channels/daemon and link device:**
+ - Run `zeroclaw channel start` (or `zeroclaw daemon`).
+ - Follow terminal pairing output (QR or pair code).
+ - In WhatsApp on phone: **Settings → Linked Devices**.
+
+4. **Test:** Send a message from an allowed number and verify the agent replies.
+
+#### WhatsApp Business Cloud API mode
WhatsApp uses Meta's Cloud API with webhooks (push-based, not polling):
@@ -493,6 +595,10 @@ WhatsApp uses Meta's Cloud API with webhooks (push-based, not polling):
Config: `~/.zeroclaw/config.toml` (created by `onboard`)
+When `zeroclaw channel start` is already running, changes to `default_provider`,
+`default_model`, `default_temperature`, `api_key`, `api_url`, and `reliability.*`
+are hot-applied on the next inbound channel message.
+
```toml
api_key = "sk-..."
default_provider = "openrouter"
@@ -591,6 +697,8 @@ window_allowlist = [] # optional window title/process allowlist hints
enabled = false # opt-in: 1000+ OAuth apps via composio.dev
# api_key = "cmp_..." # optional: stored encrypted when [secrets].encrypt = true
entity_id = "default" # default user_id for Composio tool calls
+# Runtime tip: if execute asks for connected_account_id, run composio with
+# action='list_accounts' and app='gmail' (or your toolkit) to retrieve account IDs.
[identity]
format = "openclaw" # "openclaw" (default, markdown files) or "aieos" (JSON)
@@ -767,7 +875,7 @@ See [aieos.org](https://aieos.org) for the full schema and live examples.
| `service` | Manage user-level background service |
| `doctor` | Diagnose daemon/scheduler/channel freshness |
| `status` | Show full system status |
-| `cron` | Manage scheduled tasks (`list/add/add-at/add-every/once/remove/pause/resume`) |
+| `cron` | Manage scheduled tasks (`list/add/add-at/add-every/once/remove/update/pause/resume`) |
| `models` | Refresh provider model catalogs (`models refresh`) |
| `providers` | List supported providers and aliases |
| `channel` | List/start/doctor channels and bind Telegram identities |
@@ -779,6 +887,18 @@ See [aieos.org](https://aieos.org) for the full schema and live examples.
For a task-oriented command guide, see [`docs/commands-reference.md`](docs/commands-reference.md).
+### Open-Skills Opt-In
+
+Community `open-skills` sync is disabled by default. Enable it explicitly in `config.toml`:
+
+```toml
+[skills]
+open_skills_enabled = true
+# open_skills_dir = "/path/to/open-skills" # optional
+```
+
+You can also override at runtime with `ZEROCLAW_OPEN_SKILLS_ENABLED` and `ZEROCLAW_OPEN_SKILLS_DIR`.
+
## Development
```bash
@@ -869,13 +989,42 @@ A heartfelt thank you to the communities and institutions that inspire and fuel
We're building in the open because the best ideas come from everywhere. If you're reading this, you're part of it. Welcome. 🦀❤️
+## ⚠️ Official Repository & Impersonation Warning
+
+**This is the only official ZeroClaw repository:**
+> https://github.com/zeroclaw-labs/zeroclaw
+
+Any other repository, organization, domain, or package claiming to be "ZeroClaw" or implying affiliation with ZeroClaw Labs is **unauthorized and not affiliated with this project**. Known unauthorized forks will be listed in [TRADEMARK.md](TRADEMARK.md).
+
+If you encounter impersonation or trademark misuse, please [open an issue](https://github.com/zeroclaw-labs/zeroclaw/issues).
+
+---
+
## License
-MIT — see [LICENSE](LICENSE) for license terms and attribution baseline
+ZeroClaw is dual-licensed for maximum openness and contributor protection:
+
+| License | Use case |
+|---|---|
+| [MIT](LICENSE) | Open-source, research, academic, personal use |
+| [Apache 2.0](LICENSE-APACHE) | Patent protection, institutional, commercial deployment |
+
+You may choose either license. **Contributors automatically grant rights under both** — see [CLA.md](CLA.md) for the full contributor agreement.
+
+### Trademark
+
+The **ZeroClaw** name and logo are trademarks of ZeroClaw Labs. This license does not grant permission to use them to imply endorsement or affiliation. See [TRADEMARK.md](TRADEMARK.md) for permitted and prohibited uses.
+
+### Contributor Protections
+
+- You **retain copyright** of your contributions
+- **Patent grant** (Apache 2.0) shields you from patent claims by other contributors
+- Your contributions are **permanently attributed** in commit history and [NOTICE](NOTICE)
+- No trademark rights are transferred by contributing
## Contributing
-See [CONTRIBUTING.md](CONTRIBUTING.md). Implement a trait, submit a PR:
+See [CONTRIBUTING.md](CONTRIBUTING.md) and [CLA.md](CLA.md). Implement a trait, submit a PR:
- CI workflow guide: [docs/ci-map.md](docs/ci-map.md)
- New `Provider` → `src/providers/`
- New `Channel` → `src/channels/`
diff --git a/README.ru.md b/README.ru.md
index 98532f7..8ab5578 100644
--- a/README.ru.md
+++ b/README.ru.md
@@ -8,6 +8,15 @@
Zero overhead. Zero compromise. 100% Rust. 100% Agnostic.
+
+
+
+
+
+
+
+
+
🌐 Языки: English · 简体中文 · 日本語 · Русский
@@ -33,7 +42,17 @@
>
> Технические идентификаторы (команды, ключи конфигурации, API-пути, имена Trait) сохранены на английском.
>
-> Последняя синхронизация: **2026-02-18**.
+> Последняя синхронизация: **2026-02-19**.
+
+## 📢 Доска объявлений
+
+Публикуйте здесь важные уведомления (breaking changes, security advisories, окна обслуживания и блокеры релиза).
+
+| Дата (UTC) | Уровень | Объявление | Действие |
+|---|---|---|---|
+| 2026-02-19 | _Срочно_ | Мы **не аффилированы** с `openagen/zeroclaw` и `zeroclaw.org`. Домен `zeroclaw.org` сейчас указывает на fork `openagen/zeroclaw`, и этот домен/репозиторий выдают себя за наш официальный сайт и проект. | Не доверяйте информации, бинарникам, сборам средств и «официальным» объявлениям из этих источников. Используйте только этот репозиторий и наши верифицированные соцсети. |
+| 2026-02-19 | _Важно_ | Официальный сайт пока **не запущен**, и мы уже видим попытки выдавать себя за ZeroClaw. Пожалуйста, не участвуйте в инвестициях, сборах средств или похожих активностях от имени ZeroClaw. | Ориентируйтесь только на этот репозиторий; также следите за [X (@zeroclawlabs)](https://x.com/zeroclawlabs?s=21), [Reddit (r/zeroclawlabs)](https://www.reddit.com/r/zeroclawlabs/), [Telegram (@zeroclawlabs)](https://t.me/zeroclawlabs), [Telegram CN (@zeroclawlabs_cn)](https://t.me/zeroclawlabs_cn), [Telegram RU (@zeroclawlabs_ru)](https://t.me/zeroclawlabs_ru) и [Xiaohongshu](https://www.xiaohongshu.com/user/profile/67cbfc43000000000d008307?xsec_token=AB73VnYnGNx5y36EtnnZfGmAmS-6Wzv8WMuGpfwfkg6Yc%3D&xsec_source=pc_search) для официальных обновлений. |
+| 2026-02-19 | _Важно_ | Anthropic обновил раздел Authentication and Credential Use 2026-02-19. В нем указано, что OAuth authentication (Free/Pro/Max) предназначена только для Claude Code и Claude.ai; использование OAuth-токенов, полученных через Claude Free/Pro/Max, в любых других продуктах, инструментах или сервисах (включая Agent SDK), не допускается и может считаться нарушением Consumer Terms of Service. | Чтобы избежать потерь, временно не используйте Claude Code OAuth-интеграции. Оригинал: [Authentication and Credential Use](https://code.claude.com/docs/en/legal-and-compliance#authentication-and-credential-use). |
## О проекте
@@ -100,6 +119,12 @@ cd zeroclaw
## Быстрый старт
+### Homebrew (macOS/Linuxbrew)
+
+```bash
+brew install zeroclaw
+```
+
```bash
git clone https://github.com/zeroclaw-labs/zeroclaw.git
cd zeroclaw
@@ -117,6 +142,106 @@ zeroclaw gateway
zeroclaw daemon
```
+## Subscription Auth (OpenAI Codex / Claude Code)
+
+ZeroClaw поддерживает нативные профили авторизации на основе подписки (мультиаккаунт, шифрование при хранении).
+
+- Файл хранения: `~/.zeroclaw/auth-profiles.json`
+- Ключ шифрования: `~/.zeroclaw/.secret_key`
+- Формат Profile ID: `:` (пример: `openai-codex:work`)
+
+OpenAI Codex OAuth (подписка ChatGPT):
+
+```bash
+# Рекомендуется для серверов/headless-окружений
+zeroclaw auth login --provider openai-codex --device-code
+
+# Браузерный/callback-поток с paste-фолбэком
+zeroclaw auth login --provider openai-codex --profile default
+zeroclaw auth paste-redirect --provider openai-codex --profile default
+
+# Проверка / обновление / переключение профиля
+zeroclaw auth status
+zeroclaw auth refresh --provider openai-codex --profile default
+zeroclaw auth use --provider openai-codex --profile work
+```
+
+Claude Code / Anthropic setup-token:
+
+```bash
+# Вставка subscription/setup token (режим Authorization header)
+zeroclaw auth paste-token --provider anthropic --profile default --auth-kind authorization
+
+# Команда-алиас
+zeroclaw auth setup-token --provider anthropic --profile default
+```
+
+Запуск agent с subscription auth:
+
+```bash
+zeroclaw agent --provider openai-codex -m "hello"
+zeroclaw agent --provider openai-codex --auth-profile openai-codex:work -m "hello"
+
+# Anthropic поддерживает и API key, и auth token через переменные окружения:
+# ANTHROPIC_AUTH_TOKEN, ANTHROPIC_OAUTH_TOKEN, ANTHROPIC_API_KEY
+zeroclaw agent --provider anthropic -m "hello"
+```
+
+## Архитектура
+
+Каждая подсистема — это **Trait**: меняйте реализации через конфигурацию, без изменения кода.
+
+
+
+
+
+| Подсистема | Trait | Встроенные реализации | Расширение |
+|-----------|-------|---------------------|------------|
+| **AI-модели** | `Provider` | Каталог через `zeroclaw providers` (сейчас 28 встроенных + алиасы, плюс пользовательские endpoint) | `custom:https://your-api.com` (OpenAI-совместимый) или `anthropic-custom:https://your-api.com` |
+| **Каналы** | `Channel` | CLI, Telegram, Discord, Slack, Mattermost, iMessage, Matrix, Signal, WhatsApp, Email, IRC, Lark, DingTalk, QQ, Webhook | Любой messaging API |
+| **Память** | `Memory` | SQLite гибридный поиск, PostgreSQL-бэкенд, Lucid-мост, Markdown-файлы, явный `none`-бэкенд, snapshot/hydrate, опциональный кэш ответов | Любой persistence-бэкенд |
+| **Инструменты** | `Tool` | shell/file/memory, cron/schedule, git, pushover, browser, http_request, screenshot/image_info, composio (opt-in), delegate, аппаратные инструменты | Любая функциональность |
+| **Наблюдаемость** | `Observer` | Noop, Log, Multi | Prometheus, OTel |
+| **Runtime** | `RuntimeAdapter` | Native, Docker (sandbox) | Через adapter; неподдерживаемые kind завершаются с ошибкой |
+| **Безопасность** | `SecurityPolicy` | Gateway pairing, sandbox, allowlist, rate limits, scoping файловой системы, шифрование секретов | — |
+| **Идентификация** | `IdentityConfig` | OpenClaw (markdown), AIEOS v1.1 (JSON) | Любой формат идентификации |
+| **Туннели** | `Tunnel` | None, Cloudflare, Tailscale, ngrok, Custom | Любой tunnel-бинарник |
+| **Heartbeat** | Engine | HEARTBEAT.md — периодические задачи | — |
+| **Навыки** | Loader | TOML-манифесты + SKILL.md-инструкции | Пакеты навыков сообщества |
+| **Интеграции** | Registry | 70+ интеграций в 9 категориях | Плагинная система |
+
+### Поддержка runtime (текущая)
+
+- ✅ Поддерживается сейчас: `runtime.kind = "native"` или `runtime.kind = "docker"`
+- 🚧 Запланировано, но ещё не реализовано: WASM / edge-runtime
+
+При указании неподдерживаемого `runtime.kind` ZeroClaw завершается с явной ошибкой, а не молча откатывается к native.
+
+### Система памяти (полнофункциональный поисковый движок)
+
+Полностью собственная реализация, ноль внешних зависимостей — без Pinecone, Elasticsearch, LangChain:
+
+| Уровень | Реализация |
+|---------|-----------|
+| **Векторная БД** | Embeddings хранятся как BLOB в SQLite, поиск по косинусному сходству |
+| **Поиск по ключевым словам** | Виртуальные таблицы FTS5 со скорингом BM25 |
+| **Гибридное слияние** | Пользовательская взвешенная функция слияния (`vector.rs`) |
+| **Embeddings** | Trait `EmbeddingProvider` — OpenAI, пользовательский URL или noop |
+| **Чанкинг** | Построчный Markdown-чанкер с сохранением заголовков |
+| **Кэширование** | Таблица `embedding_cache` в SQLite с LRU-вытеснением |
+| **Безопасная переиндексация** | Атомарная перестройка FTS5 + повторное встраивание отсутствующих векторов |
+
+Agent автоматически вспоминает, сохраняет и управляет памятью через инструменты.
+
+```toml
+[memory]
+backend = "sqlite" # "sqlite", "lucid", "postgres", "markdown", "none"
+auto_save = true
+embedding_provider = "none" # "none", "openai", "custom:https://..."
+vector_weight = 0.7
+keyword_weight = 0.3
+```
+
## Важные security-дефолты
- Gateway по умолчанию: `127.0.0.1:3000`
diff --git a/README.vi.md b/README.vi.md
new file mode 100644
index 0000000..17465b1
--- /dev/null
+++ b/README.vi.md
@@ -0,0 +1,1051 @@
+
+
+
+
+ZeroClaw 🦀
+
+
+ Không tốn thêm tài nguyên. Không đánh đổi. 100% Rust. 100% Đa nền tảng.
+ ⚡️ Chạy trên phần cứng $10 với RAM dưới 5MB — ít hơn 99% bộ nhớ so với OpenClaw, rẻ hơn 98% so với Mac mini!
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Được xây dựng bởi sinh viên và thành viên của các cộng đồng Harvard, MIT và Sundai.Club.
+
+
+
+ 🌐 Ngôn ngữ: English · 简体中文 · 日本語 · Русский · Tiếng Việt
+
+
+
+ Bắt đầu |
+ Cài đặt một lần bấm |
+ Trung tâm tài liệu |
+ Mục lục tài liệu
+
+
+
+ Truy cập nhanh:
+ Tài liệu tham khảo ·
+ Vận hành ·
+ Khắc phục sự cố ·
+ Bảo mật ·
+ Phần cứng ·
+ Đóng góp
+
+
+
+ Hạ tầng trợ lý AI tự chủ — nhanh, nhỏ gọn
+ Triển khai ở đâu cũng được. Thay thế gì cũng được.
+
+
+Kiến trúc trait-driven · mặc định bảo mật · provider/channel/tool hoán đổi tự do · mọi thứ đều dễ mở rộng
+
+### 📢 Thông báo
+
+Bảng này dành cho các thông báo quan trọng (thay đổi không tương thích, cảnh báo bảo mật, lịch bảo trì, vấn đề chặn release).
+
+| Ngày (UTC) | Mức độ | Thông báo | Hành động |
+|---|---|---|---|
+| 2026-02-19 | _Nghiêm trọng_ | Chúng tôi **không có liên kết** với `openagen/zeroclaw` hoặc `zeroclaw.org`. Tên miền `zeroclaw.org` hiện đang trỏ đến fork `openagen/zeroclaw`, và tên miền/repository đó đang mạo danh website/dự án chính thức của chúng tôi. | Không tin tưởng thông tin, binary, gây quỹ, hay thông báo từ các nguồn đó. Chỉ sử dụng repository này và các tài khoản mạng xã hội đã được xác minh của chúng tôi. |
+| 2026-02-19 | _Quan trọng_ | Chúng tôi **chưa** ra mắt website chính thức, và chúng tôi đang ghi nhận các nỗ lực mạo danh. **Không** tham gia bất kỳ hoạt động đầu tư hoặc gây quỹ nào tuyên bố mang tên ZeroClaw. | Sử dụng repository này làm nguồn thông tin duy nhất đáng tin cậy. Theo dõi [X (@zeroclawlabs)](https://x.com/zeroclawlabs?s=21), [Reddit (r/zeroclawlabs)](https://www.reddit.com/r/zeroclawlabs/), [Telegram (@zeroclawlabs)](https://t.me/zeroclawlabs), [Telegram CN (@zeroclawlabs_cn)](https://t.me/zeroclawlabs_cn), [Telegram RU (@zeroclawlabs_ru)](https://t.me/zeroclawlabs_ru), và [Xiaohongshu](https://www.xiaohongshu.com/user/profile/67cbfc43000000000d008307?xsec_token=AB73VnYnGNx5y36EtnnZfGmAmS-6Wzv8WMuGpfwfkg6Yc%3D&xsec_source=pc_search) để nhận cập nhật chính thức. |
+| 2026-02-19 | _Quan trọng_ | Anthropic đã cập nhật điều khoản Xác thực và Sử dụng Thông tin xác thực vào ngày 2026-02-19. Xác thực OAuth (Free, Pro, Max) được dành riêng cho Claude Code và Claude.ai; việc sử dụng OAuth token từ Claude Free/Pro/Max trong bất kỳ sản phẩm, công cụ hay dịch vụ nào khác (bao gồm Agent SDK) đều không được phép và có thể vi phạm Điều khoản Dịch vụ cho Người tiêu dùng. | Vui lòng tạm thời tránh tích hợp Claude Code OAuth để ngăn ngừa khả năng mất mát. Điều khoản gốc: [Authentication and Credential Use](https://code.claude.com/docs/en/legal-and-compliance#authentication-and-credential-use). |
+
+### ✨ Tính năng
+
+- 🏎️ **Mặc định tinh gọn:** Các tác vụ CLI và kiểm tra trạng thái chỉ tốn vài MB bộ nhớ trên bản release.
+- 💰 **Triển khai rẻ:** Chạy tốt trên board giá rẻ và instance cloud nhỏ, không cần runtime nặng.
+- ⚡ **Khởi động lạnh nhanh:** Một binary Rust duy nhất — lệnh và daemon khởi động gần như tức thì.
+- 🌍 **Chạy ở đâu cũng được:** Một binary chạy trên ARM, x86 và RISC-V — provider/channel/tool hoán đổi tự do.
+
+### Vì sao các team chọn ZeroClaw
+
+- **Mặc định tinh gọn:** binary Rust nhỏ, khởi động nhanh, tốn ít bộ nhớ.
+- **Bảo mật từ gốc:** xác thực ghép cặp, sandbox nghiêm ngặt, allowlist rõ ràng, giới hạn workspace.
+- **Hoán đổi tự do:** mọi hệ thống cốt lõi đều là trait (provider, channel, tool, memory, tunnel).
+- **Không khoá vendor:** hỗ trợ provider tương thích OpenAI + endpoint tùy chỉnh dễ dàng mở rộng.
+
+## So sánh hiệu suất (ZeroClaw vs OpenClaw, có thể tái tạo)
+
+Đo nhanh trên máy cục bộ (macOS arm64, tháng 2/2026), quy đổi cho phần cứng edge 0.8GHz.
+
+| | OpenClaw | NanoBot | PicoClaw | ZeroClaw 🦀 |
+|---|---|---|---|---|
+| **Ngôn ngữ** | TypeScript | Python | Go | **Rust** |
+| **RAM** | > 1GB | > 100MB | < 10MB | **< 5MB** |
+| **Khởi động (lõi 0.8GHz)** | > 500s | > 30s | < 1s | **< 10ms** |
+| **Kích thước binary** | ~28MB (dist) | N/A (Scripts) | ~8MB | **3.4 MB** |
+| **Chi phí** | Mac Mini $599 | Linux SBC ~$50 | Linux Board $10 | **Phần cứng bất kỳ $10** |
+
+> Ghi chú: Kết quả ZeroClaw được đo trên release build sử dụng `/usr/bin/time -l`. OpenClaw yêu cầu runtime Node.js (thường thêm ~390MB bộ nhớ overhead), còn NanoBot yêu cầu runtime Python. PicoClaw và ZeroClaw là các static binary. Số RAM ở trên là bộ nhớ runtime; yêu cầu biên dịch lúc build-time sẽ cao hơn.
+
+
+
+
+
+### Tự đo trên máy bạn
+
+Kết quả benchmark thay đổi theo code và toolchain, nên hãy tự đo bản build hiện tại:
+
+```bash
+cargo build --release
+ls -lh target/release/zeroclaw
+
+/usr/bin/time -l target/release/zeroclaw --help
+/usr/bin/time -l target/release/zeroclaw status
+```
+
+Ví dụ mẫu (macOS arm64, đo ngày 18 tháng 2 năm 2026):
+
+- Kích thước binary release: `8.8M`
+- `zeroclaw --help`: khoảng `0.02s`, bộ nhớ đỉnh ~`3.9MB`
+- `zeroclaw status`: khoảng `0.01s`, bộ nhớ đỉnh ~`4.1MB`
+
+## Yêu cầu hệ thống
+
+
+Windows
+
+#### Bắt buộc
+
+1. **Visual Studio Build Tools** (cung cấp MSVC linker và Windows SDK):
+ ```powershell
+ winget install Microsoft.VisualStudio.2022.BuildTools
+ ```
+ Trong quá trình cài đặt (hoặc qua Visual Studio Installer), chọn workload **"Desktop development with C++"**.
+
+2. **Rust toolchain:**
+ ```powershell
+ winget install Rustlang.Rustup
+ ```
+ Sau khi cài đặt, mở terminal mới và chạy `rustup default stable` để đảm bảo toolchain stable đang hoạt động.
+
+3. **Xác minh** cả hai đang hoạt động:
+ ```powershell
+ rustc --version
+ cargo --version
+ ```
+
+#### Tùy chọn
+
+- **Docker Desktop** — chỉ cần thiết nếu dùng [Docker sandboxed runtime](#runtime-support-current) (`runtime.kind = "docker"`). Cài đặt qua `winget install Docker.DockerDesktop`.
+
+
+
+
+Linux / macOS
+
+#### Bắt buộc
+
+1. **Công cụ build cơ bản:**
+ - **Linux (Debian/Ubuntu):** `sudo apt install build-essential pkg-config`
+ - **Linux (Fedora/RHEL):** `sudo dnf group install development-tools && sudo dnf install pkg-config`
+ - **macOS:** Cài đặt Xcode Command Line Tools: `xcode-select --install`
+
+2. **Rust toolchain:**
+ ```bash
+ curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
+ ```
+ Xem [rustup.rs](https://rustup.rs) để biết thêm chi tiết.
+
+3. **Xác minh** cả hai đang hoạt động:
+ ```bash
+ rustc --version
+ cargo --version
+ ```
+
+#### Cài bằng một lệnh
+
+Hoặc bỏ qua các bước trên, cài hết mọi thứ (system deps, Rust, ZeroClaw) chỉ bằng một lệnh:
+
+```bash
+curl -LsSf https://raw.githubusercontent.com/zeroclaw-labs/zeroclaw/main/scripts/install.sh | bash
+```
+
+#### Yêu cầu tài nguyên biên dịch
+
+Việc build từ source đòi hỏi nhiều tài nguyên hơn so với chạy binary kết quả:
+
+| Tài nguyên | Tối thiểu | Khuyến nghị |
+|---|---|---|
+| **RAM + swap** | 2 GB | 4 GB+ |
+| **Dung lượng đĩa trống** | 6 GB | 10 GB+ |
+
+Nếu cấu hình máy thấp hơn mức tối thiểu, dùng binary có sẵn:
+
+```bash
+./bootstrap.sh --prefer-prebuilt
+```
+
+Chỉ cài từ binary, không quay lại build từ source:
+
+```bash
+./bootstrap.sh --prebuilt-only
+```
+
+#### Tùy chọn
+
+- **Docker** — chỉ cần thiết nếu dùng [Docker sandboxed runtime](#runtime-support-current) (`runtime.kind = "docker"`). Cài đặt qua package manager hoặc [docker.com](https://docs.docker.com/engine/install/).
+
+> **Lưu ý:** Lệnh `cargo build --release` mặc định dùng `codegen-units=1` để giảm áp lực biên dịch đỉnh. Để build nhanh hơn trên máy mạnh, dùng `cargo build --profile release-fast`.
+
+
+
+
+## Bắt đầu nhanh
+
+### Homebrew (macOS/Linuxbrew)
+
+```bash
+brew install zeroclaw
+```
+
+### Bootstrap một lần bấm
+
+```bash
+# Khuyến nghị: clone rồi chạy script bootstrap cục bộ
+git clone https://github.com/zeroclaw-labs/zeroclaw.git
+cd zeroclaw
+./bootstrap.sh
+
+# Tùy chọn: cài đặt system dependencies + Rust trên máy mới
+./bootstrap.sh --install-system-deps --install-rust
+
+# Tùy chọn: ưu tiên binary dựng sẵn (khuyến nghị cho máy ít RAM/ít dung lượng đĩa)
+./bootstrap.sh --prefer-prebuilt
+
+# Tùy chọn: cài đặt chỉ từ binary (không fallback sang build source)
+./bootstrap.sh --prebuilt-only
+
+# Tùy chọn: chạy onboarding trong cùng luồng
+./bootstrap.sh --onboard --api-key "sk-..." --provider openrouter [--model "openrouter/auto"]
+
+# Tùy chọn: chạy bootstrap + onboarding hoàn toàn trong Docker
+./bootstrap.sh --docker
+```
+
+Cài từ xa bằng một lệnh (nên xem trước nếu môi trường nhạy cảm về bảo mật):
+
+```bash
+curl -fsSL https://raw.githubusercontent.com/zeroclaw-labs/zeroclaw/main/scripts/bootstrap.sh | bash
+```
+
+Chi tiết: [`docs/one-click-bootstrap.md`](docs/one-click-bootstrap.md) (chế độ toolchain có thể yêu cầu `sudo` cho các gói hệ thống).
+
+### Binary có sẵn
+
+Release asset được phát hành cho:
+
+- Linux: `x86_64`, `aarch64`, `armv7`
+- macOS: `x86_64`, `aarch64`
+- Windows: `x86_64`
+
+Tải asset mới nhất tại:
+
+
+Ví dụ (ARM64 Linux):
+
+```bash
+curl -fsSLO https://github.com/zeroclaw-labs/zeroclaw/releases/latest/download/zeroclaw-aarch64-unknown-linux-gnu.tar.gz
+tar xzf zeroclaw-aarch64-unknown-linux-gnu.tar.gz
+install -m 0755 zeroclaw "$HOME/.cargo/bin/zeroclaw"
+```
+
+```bash
+git clone https://github.com/zeroclaw-labs/zeroclaw.git
+cd zeroclaw
+cargo build --release --locked
+cargo install --path . --force --locked
+
+# Đảm bảo ~/.cargo/bin có trong PATH của bạn
+export PATH="$HOME/.cargo/bin:$PATH"
+
+# Cài nhanh (không cần tương tác, có thể chỉ định model)
+zeroclaw onboard --api-key sk-... --provider openrouter [--model "openrouter/auto"]
+
+# Hoặc dùng trình hướng dẫn tương tác
+zeroclaw onboard --interactive
+
+# Hoặc chỉ sửa nhanh channel/allowlist
+zeroclaw onboard --channels-only
+
+# Chat
+zeroclaw agent -m "Hello, ZeroClaw!"
+
+# Chế độ tương tác
+zeroclaw agent
+
+# Khởi động gateway (webhook server)
+zeroclaw gateway # mặc định: 127.0.0.1:3000
+zeroclaw gateway --port 0 # cổng ngẫu nhiên (tăng cường bảo mật)
+
+# Khởi động runtime tự trị đầy đủ
+zeroclaw daemon
+
+# Kiểm tra trạng thái
+zeroclaw status
+zeroclaw auth status
+
+# Chạy chẩn đoán hệ thống
+zeroclaw doctor
+
+# Kiểm tra sức khỏe channel
+zeroclaw channel doctor
+
+# Gắn định danh Telegram vào allowlist
+zeroclaw channel bind-telegram 123456789
+
+# Lấy thông tin cài đặt tích hợp
+zeroclaw integrations info Telegram
+
+# Lưu ý: Channel (Telegram, Discord, Slack) yêu cầu daemon đang chạy
+# zeroclaw daemon
+
+# Quản lý dịch vụ nền
+zeroclaw service install
+zeroclaw service status
+zeroclaw service restart
+
+# Chuyển dữ liệu từ OpenClaw (chạy thử trước)
+zeroclaw migrate openclaw --dry-run
+zeroclaw migrate openclaw
+```
+
+> **Chạy trực tiếp khi phát triển (không cần cài toàn cục):** thêm `cargo run --release --` trước lệnh (ví dụ: `cargo run --release -- status`).
+
+## Xác thực theo gói đăng ký (OpenAI Codex / Claude Code)
+
+ZeroClaw hỗ trợ profile xác thực theo gói đăng ký (đa tài khoản, mã hóa khi lưu).
+
+- File lưu trữ: `~/.zeroclaw/auth-profiles.json`
+- Khóa mã hóa: `~/.zeroclaw/.secret_key`
+- Định dạng profile id: `:` (ví dụ: `openai-codex:work`)
+
+OpenAI Codex OAuth (đăng ký ChatGPT):
+
+```bash
+# Khuyến nghị trên server/headless
+zeroclaw auth login --provider openai-codex --device-code
+
+# Luồng Browser/callback với fallback paste
+zeroclaw auth login --provider openai-codex --profile default
+zeroclaw auth paste-redirect --provider openai-codex --profile default
+
+# Kiểm tra / làm mới / chuyển profile
+zeroclaw auth status
+zeroclaw auth refresh --provider openai-codex --profile default
+zeroclaw auth use --provider openai-codex --profile work
+```
+
+Claude Code / Anthropic setup-token:
+
+```bash
+# Dán token đăng ký/setup (chế độ Authorization header)
+zeroclaw auth paste-token --provider anthropic --profile default --auth-kind authorization
+
+# Lệnh alias
+zeroclaw auth setup-token --provider anthropic --profile default
+```
+
+Chạy agent với xác thực đăng ký:
+
+```bash
+zeroclaw agent --provider openai-codex -m "hello"
+zeroclaw agent --provider openai-codex --auth-profile openai-codex:work -m "hello"
+
+# Anthropic hỗ trợ cả API key và biến môi trường auth token:
+# ANTHROPIC_AUTH_TOKEN, ANTHROPIC_OAUTH_TOKEN, ANTHROPIC_API_KEY
+zeroclaw agent --provider anthropic -m "hello"
+```
+
+## Kiến trúc
+
+Mọi hệ thống con đều là **trait** — chỉ cần đổi cấu hình, không cần sửa code.
+
+
+
+
+
+| Hệ thống con | Trait | Đi kèm sẵn | Mở rộng |
+|-----------|-------|------------|--------|
+| **Mô hình AI** | `Provider` | Danh mục provider qua `zeroclaw providers` (hiện có 28 built-in + alias, cộng endpoint tùy chỉnh) | `custom:https://your-api.com` (tương thích OpenAI) hoặc `anthropic-custom:https://your-api.com` |
+| **Channel** | `Channel` | CLI, Telegram, Discord, Slack, Mattermost, iMessage, Matrix, Signal, WhatsApp, Email, IRC, Lark, DingTalk, QQ, Webhook | Bất kỳ messaging API nào |
+| **Memory** | `Memory` | SQLite hybrid search, PostgreSQL backend (storage provider có thể cấu hình), Lucid bridge, Markdown files, backend `none` tường minh, snapshot/hydrate, response cache tùy chọn | Bất kỳ persistence backend nào |
+| **Tool** | `Tool` | shell/file/memory, cron/schedule, git, pushover, browser, http_request, screenshot/image_info, composio (opt-in), delegate, hardware tools | Bất kỳ khả năng nào |
+| **Observability** | `Observer` | Noop, Log, Multi | Prometheus, OTel |
+| **Runtime** | `RuntimeAdapter` | Native, Docker (sandboxed) | Có thể thêm runtime bổ sung qua adapter; các kind không được hỗ trợ sẽ fail nhanh |
+| **Bảo mật** | `SecurityPolicy` | Ghép cặp gateway, sandbox, allowlist, giới hạn tốc độ, phân vùng filesystem, secret mã hóa | — |
+| **Định danh** | `IdentityConfig` | OpenClaw (markdown), AIEOS v1.1 (JSON) | Bất kỳ định dạng định danh nào |
+| **Tunnel** | `Tunnel` | None, Cloudflare, Tailscale, ngrok, Custom | Bất kỳ tunnel binary nào |
+| **Heartbeat** | Engine | Tác vụ định kỳ HEARTBEAT.md | — |
+| **Skill** | Loader | TOML manifest + hướng dẫn SKILL.md | Community skill pack |
+| **Tích hợp** | Registry | 70+ tích hợp trong 9 danh mục | Plugin system |
+
+### Hỗ trợ runtime (hiện tại)
+
+- ✅ Được hỗ trợ hiện nay: `runtime.kind = "native"` hoặc `runtime.kind = "docker"`
+- 🚧 Đã lên kế hoạch, chưa triển khai: WASM / edge runtime
+
+Khi cấu hình `runtime.kind` không được hỗ trợ, ZeroClaw sẽ thoát với thông báo lỗi rõ ràng thay vì âm thầm fallback về native.
+
+### Hệ thống Memory (Search Engine toàn diện)
+
+Tự phát triển hoàn toàn, không phụ thuộc bên ngoài — không Pinecone, không Elasticsearch, không LangChain:
+
+| Lớp | Triển khai |
+|-------|---------------|
+| **Vector DB** | Embeddings lưu dưới dạng BLOB trong SQLite, tìm kiếm cosine similarity |
+| **Keyword Search** | Bảng ảo FTS5 với BM25 scoring |
+| **Hybrid Merge** | Hàm merge có trọng số tùy chỉnh (`vector.rs`) |
+| **Embeddings** | Trait `EmbeddingProvider` — OpenAI, URL tùy chỉnh, hoặc noop |
+| **Chunking** | Bộ chia đoạn markdown theo dòng, giữ nguyên heading |
+| **Caching** | Bảng SQLite `embedding_cache` với LRU eviction |
+| **Safe Reindex** | Rebuild FTS5 + re-embed các vector bị thiếu theo cách nguyên tử |
+
+Agent tự động ghi nhớ, lưu trữ và quản lý memory qua các tool.
+
+```toml
+[memory]
+backend = "sqlite" # "sqlite", "lucid", "postgres", "markdown", "none"
+auto_save = true
+embedding_provider = "none" # "none", "openai", "custom:https://..."
+vector_weight = 0.7
+keyword_weight = 0.3
+
+# backend = "none" sử dụng no-op memory backend tường minh (không có persistence)
+
+# Tùy chọn: ghi đè storage-provider cho remote memory backend.
+# Khi provider = "postgres", ZeroClaw dùng PostgreSQL để lưu memory.
+# Khóa db_url cũng chấp nhận alias `dbURL` để tương thích ngược.
+#
+# [storage.provider.config]
+# provider = "postgres"
+# db_url = "postgres://user:password@host:5432/zeroclaw"
+# schema = "public"
+# table = "memories"
+# connect_timeout_secs = 15
+
+# Tùy chọn cho backend = "sqlite": số giây tối đa chờ khi mở DB (ví dụ: file bị khóa). Bỏ qua hoặc để trống để không có timeout.
+# sqlite_open_timeout_secs = 30
+
+# Tùy chọn cho backend = "lucid"
+# ZEROCLAW_LUCID_CMD=/usr/local/bin/lucid # mặc định: lucid
+# ZEROCLAW_LUCID_BUDGET=200 # mặc định: 200
+# ZEROCLAW_LUCID_LOCAL_HIT_THRESHOLD=3 # số lần hit cục bộ để bỏ qua external recall
+# ZEROCLAW_LUCID_RECALL_TIMEOUT_MS=120 # giới hạn thời gian cho lucid context recall
+# ZEROCLAW_LUCID_STORE_TIMEOUT_MS=800 # timeout đồng bộ async cho lucid store
+# ZEROCLAW_LUCID_FAILURE_COOLDOWN_MS=15000 # thời gian nghỉ sau lỗi lucid, tránh thử lại liên tục
+```
+
+## Bảo mật
+
+ZeroClaw thực thi bảo mật ở **mọi lớp** — không chỉ sandbox. Đáp ứng tất cả các hạng mục trong danh sách kiểm tra bảo mật của cộng đồng.
+
+### Danh sách kiểm tra bảo mật
+
+| # | Hạng mục | Trạng thái | Cách thực hiện |
+|---|------|--------|-----|
+| 1 | **Gateway không công khai ra ngoài** | ✅ | Bind vào `127.0.0.1` theo mặc định. Từ chối `0.0.0.0` nếu không có tunnel hoặc `allow_public_bind = true` tường minh. |
+| 2 | **Yêu cầu ghép cặp** | ✅ | Mã một lần 6 chữ số khi khởi động. Trao đổi qua `POST /pair` để lấy bearer token. Mọi yêu cầu `/webhook` đều cần `Authorization: Bearer `. |
+| 3 | **Phân vùng filesystem (không phải /)** | ✅ | `workspace_only = true` theo mặc định. Chặn 14 thư mục hệ thống + 4 dotfile nhạy cảm. Chặn null byte injection. Phát hiện symlink escape qua canonicalization + kiểm tra resolved-path trong các tool đọc/ghi file. |
+| 4 | **Chỉ truy cập qua tunnel** | ✅ | Gateway từ chối bind công khai khi không có tunnel đang hoạt động. Hỗ trợ Tailscale, Cloudflare, ngrok, hoặc tunnel tùy chỉnh. |
+
+> **Tự chạy nmap:** `nmap -p 1-65535 ` — ZeroClaw chỉ bind vào localhost, nên không có gì bị lộ ra ngoài trừ khi bạn cấu hình tunnel tường minh.
+
+### Allowlist channel (từ chối theo mặc định)
+
+Chính sách kiểm soát người gửi đã được thống nhất:
+
+- Allowlist rỗng = **từ chối tất cả tin nhắn đến**
+- `"*"` = **cho phép tất cả** (phải opt-in tường minh)
+- Nếu khác = allowlist khớp chính xác
+
+Mặc định an toàn, hạn chế tối đa rủi ro lộ thông tin.
+
+Tài liệu tham khảo đầy đủ về cấu hình channel: [docs/channels-reference.md](docs/channels-reference.md).
+
+Cài đặt được khuyến nghị (bảo mật + nhanh):
+
+- **Telegram:** thêm `@username` của bạn (không có `@`) và/hoặc Telegram user ID số vào allowlist.
+- **Discord:** thêm Discord user ID của bạn vào allowlist.
+- **Slack:** thêm Slack member ID của bạn (thường bắt đầu bằng `U`) vào allowlist.
+- **Mattermost:** dùng API v4 tiêu chuẩn. Allowlist dùng Mattermost user ID.
+- Chỉ dùng `"*"` cho kiểm thử mở tạm thời.
+
+Luồng phê duyệt của operator qua Telegram:
+
+1. Để `[channels_config.telegram].allowed_users = []` để từ chối theo mặc định khi khởi động.
+2. Người dùng không được phép sẽ nhận được gợi ý kèm lệnh operator có thể copy:
+ `zeroclaw channel bind-telegram `.
+3. Operator chạy lệnh đó tại máy cục bộ, sau đó người dùng thử gửi tin nhắn lại.
+
+Nếu cần phê duyệt thủ công một lần, chạy:
+
+```bash
+zeroclaw channel bind-telegram 123456789
+```
+
+Nếu bạn không chắc định danh nào cần dùng:
+
+1. Khởi động channel và gửi một tin nhắn đến bot của bạn.
+2. Đọc log cảnh báo để thấy định danh người gửi chính xác.
+3. Thêm giá trị đó vào allowlist và chạy lại channel-only setup.
+
+Nếu bạn thấy cảnh báo ủy quyền trong log (ví dụ: `ignoring message from unauthorized user`),
+chạy lại channel setup:
+
+```bash
+zeroclaw onboard --channels-only
+```
+
+### Phản hồi media Telegram
+
+Telegram định tuyến phản hồi theo **chat ID nguồn** (thay vì username),
+tránh lỗi `Bad Request: chat not found`.
+
+Với các phản hồi không phải văn bản, ZeroClaw có thể gửi file đính kèm Telegram khi assistant bao gồm các marker:
+
+- `[IMAGE:]`
+- `[DOCUMENT:]`
+- `[VIDEO:]`
+- `[AUDIO:]`
+- `[VOICE:]`
+
+Path có thể là file cục bộ (ví dụ `/tmp/screenshot.png`) hoặc URL HTTPS.
+
+### Cài đặt WhatsApp
+
+ZeroClaw hỗ trợ hai backend WhatsApp:
+
+- **Chế độ WhatsApp Web** (QR / pair code, không cần Meta Business API)
+- **Chế độ WhatsApp Business Cloud API** (luồng webhook chính thức của Meta)
+
+#### Chế độ WhatsApp Web (khuyến nghị cho dùng cá nhân/self-hosted)
+
+1. **Build với hỗ trợ WhatsApp Web:**
+ ```bash
+ cargo build --features whatsapp-web
+ ```
+
+2. **Cấu hình ZeroClaw:**
+ ```toml
+ [channels_config.whatsapp]
+ session_path = "~/.zeroclaw/state/whatsapp-web/session.db"
+ pair_phone = "15551234567" # tùy chọn; bỏ qua để dùng luồng QR
+ pair_code = "" # tùy chọn mã pair tùy chỉnh
+ allowed_numbers = ["+1234567890"] # định dạng E.164, hoặc ["*"] cho tất cả
+ ```
+
+3. **Khởi động channel/daemon và liên kết thiết bị:**
+ - Chạy `zeroclaw channel start` (hoặc `zeroclaw daemon`).
+ - Làm theo hướng dẫn ghép cặp trên terminal (QR hoặc pair code).
+ - Trên WhatsApp điện thoại: **Cài đặt → Thiết bị đã liên kết**.
+
+4. **Kiểm tra:** Gửi tin nhắn từ số được phép và xác nhận agent trả lời.
+
+#### Chế độ WhatsApp Business Cloud API
+
+WhatsApp dùng Cloud API của Meta với webhook (push-based, không phải polling):
+
+1. **Tạo Meta Business App:**
+ - Truy cập [developers.facebook.com](https://developers.facebook.com)
+ - Tạo app mới → Chọn loại "Business"
+ - Thêm sản phẩm "WhatsApp"
+
+2. **Lấy thông tin xác thực:**
+ - **Access Token:** Từ WhatsApp → API Setup → Generate token (hoặc tạo System User cho token vĩnh viễn)
+ - **Phone Number ID:** Từ WhatsApp → API Setup → Phone number ID
+ - **Verify Token:** Bạn tự định nghĩa (bất kỳ chuỗi ngẫu nhiên nào) — Meta sẽ gửi lại trong quá trình xác minh webhook
+
+3. **Cấu hình ZeroClaw:**
+ ```toml
+ [channels_config.whatsapp]
+ access_token = "EAABx..."
+ phone_number_id = "123456789012345"
+ verify_token = "my-secret-verify-token"
+ allowed_numbers = ["+1234567890"] # định dạng E.164, hoặc ["*"] cho tất cả
+ ```
+
+4. **Khởi động gateway với tunnel:**
+ ```bash
+ zeroclaw gateway --port 3000
+ ```
+ WhatsApp yêu cầu HTTPS, vì vậy hãy dùng tunnel (ngrok, Cloudflare, Tailscale Funnel).
+
+5. **Cấu hình Meta webhook:**
+ - Trong Meta Developer Console → WhatsApp → Configuration → Webhook
+ - **Callback URL:** `https://your-tunnel-url/whatsapp`
+ - **Verify Token:** Giống với `verify_token` trong config của bạn
+ - Đăng ký nhận trường `messages`
+
+6. **Kiểm tra:** Gửi tin nhắn đến số WhatsApp Business của bạn — ZeroClaw sẽ phản hồi qua LLM.
+
+## Cấu hình
+
+Config: `~/.zeroclaw/config.toml` (được tạo bởi `onboard`)
+
+Khi `zeroclaw channel start` đang chạy, các thay đổi với `default_provider`,
+`default_model`, `default_temperature`, `api_key`, `api_url`, và `reliability.*`
+sẽ được áp dụng nóng vào lần có tin nhắn channel đến tiếp theo.
+
+```toml
+api_key = "sk-..."
+default_provider = "openrouter"
+default_model = "anthropic/claude-sonnet-4-6"
+default_temperature = 0.7
+
+# Endpoint tùy chỉnh tương thích OpenAI
+# default_provider = "custom:https://your-api.com"
+
+# Endpoint tùy chỉnh tương thích Anthropic
+# default_provider = "anthropic-custom:https://your-api.com"
+
+[memory]
+backend = "sqlite" # "sqlite", "lucid", "postgres", "markdown", "none"
+auto_save = true
+embedding_provider = "none" # "none", "openai", "custom:https://..."
+vector_weight = 0.7
+keyword_weight = 0.3
+
+# backend = "none" vô hiệu hóa persistent memory qua no-op backend
+
+# Tùy chọn ghi đè storage-provider từ xa (ví dụ PostgreSQL)
+# [storage.provider.config]
+# provider = "postgres"
+# db_url = "postgres://user:password@host:5432/zeroclaw"
+# schema = "public"
+# table = "memories"
+# connect_timeout_secs = 15
+
+[gateway]
+port = 3000 # mặc định
+host = "127.0.0.1" # mặc định
+require_pairing = true # yêu cầu pairing code khi kết nối lần đầu
+allow_public_bind = false # từ chối 0.0.0.0 nếu không có tunnel
+
+[autonomy]
+level = "supervised" # "readonly", "supervised", "full" (mặc định: supervised)
+workspace_only = true # mặc định: true — phân vùng vào workspace
+allowed_commands = ["git", "npm", "cargo", "ls", "cat", "grep"]
+forbidden_paths = ["/etc", "/root", "/proc", "/sys", "~/.ssh", "~/.gnupg", "~/.aws"]
+
+[runtime]
+kind = "native" # "native" hoặc "docker"
+
+[runtime.docker]
+image = "alpine:3.20" # container image cho thực thi shell
+network = "none" # chế độ docker network ("none", "bridge", v.v.)
+memory_limit_mb = 512 # giới hạn bộ nhớ tùy chọn tính bằng MB
+cpu_limit = 1.0 # giới hạn CPU tùy chọn
+read_only_rootfs = true # mount root filesystem ở chế độ read-only
+mount_workspace = true # mount workspace vào /workspace
+allowed_workspace_roots = [] # allowlist tùy chọn để xác thực workspace mount
+
+[heartbeat]
+enabled = false
+interval_minutes = 30
+
+[tunnel]
+provider = "none" # "none", "cloudflare", "tailscale", "ngrok", "custom"
+
+[secrets]
+encrypt = true # API key được mã hóa bằng file key cục bộ
+
+[browser]
+enabled = false # opt-in browser_open + browser tool
+allowed_domains = ["docs.rs"] # bắt buộc khi browser được bật
+backend = "agent_browser" # "agent_browser" (mặc định), "rust_native", "computer_use", "auto"
+native_headless = true # áp dụng khi backend dùng rust-native
+native_webdriver_url = "http://127.0.0.1:9515" # WebDriver endpoint (chromedriver/selenium)
+# native_chrome_path = "/usr/bin/chromium" # tùy chọn chỉ định rõ browser binary cho driver
+
+[browser.computer_use]
+endpoint = "http://127.0.0.1:8787/v1/actions" # HTTP endpoint của computer-use sidecar
+timeout_ms = 15000 # timeout mỗi action
+allow_remote_endpoint = false # mặc định bảo mật: chỉ endpoint private/localhost
+window_allowlist = [] # gợi ý allowlist tên cửa sổ/process tùy chọn
+# api_key = "..." # bearer token tùy chọn cho sidecar
+# max_coordinate_x = 3840 # guardrail tọa độ tùy chọn
+# max_coordinate_y = 2160 # guardrail tọa độ tùy chọn
+
+# Flag build Rust-native backend:
+# cargo build --release --features browser-native
+# Đảm bảo WebDriver server đang chạy, ví dụ: chromedriver --port=9515
+
+# Hợp đồng computer-use sidecar (MVP)
+# POST browser.computer_use.endpoint
+# Request: {
+# "action": "mouse_click",
+# "params": {"x": 640, "y": 360, "button": "left"},
+# "policy": {"allowed_domains": [...], "window_allowlist": [...], "max_coordinate_x": 3840, "max_coordinate_y": 2160},
+# "metadata": {"session_name": "...", "source": "zeroclaw.browser", "version": "..."}
+# }
+# Response: {"success": true, "data": {...}} hoặc {"success": false, "error": "..."}
+
+[composio]
+enabled = false # opt-in: hơn 1000 OAuth app qua composio.dev
+# api_key = "cmp_..." # tùy chọn: được lưu mã hóa khi [secrets].encrypt = true
+entity_id = "default" # user_id mặc định cho Composio tool call
+# Gợi ý runtime: nếu execute yêu cầu connected_account_id, chạy composio với
+# action='list_accounts' và app='gmail' (hoặc toolkit của bạn) để lấy account ID.
+
+[identity]
+format = "openclaw" # "openclaw" (mặc định, markdown files) hoặc "aieos" (JSON)
+# aieos_path = "identity.json" # đường dẫn đến file AIEOS JSON (tương đối với workspace hoặc tuyệt đối)
+# aieos_inline = '{"identity":{"names":{"first":"Nova"}}}' # inline AIEOS JSON
+```
+
+### Ollama cục bộ và endpoint từ xa
+
+ZeroClaw dùng một khóa provider (`ollama`) cho cả triển khai Ollama cục bộ và từ xa:
+
+- Ollama cục bộ: để `api_url` trống, chạy `ollama serve`, và dùng các model như `llama3.2`.
+- Endpoint Ollama từ xa (bao gồm Ollama Cloud): đặt `api_url` thành endpoint từ xa và đặt `api_key` (hoặc `OLLAMA_API_KEY`) khi cần.
+- Tùy chọn suffix `:cloud`: ID model như `qwen3:cloud` được chuẩn hóa thành `qwen3` trước khi gửi request.
+
+Ví dụ cấu hình từ xa:
+
+```toml
+default_provider = "ollama"
+default_model = "qwen3:cloud"
+api_url = "https://ollama.com"
+api_key = "ollama_api_key_here"
+```
+
+### Endpoint provider tùy chỉnh
+
+Cấu hình chi tiết cho endpoint tùy chỉnh tương thích OpenAI và Anthropic, xem [docs/custom-providers.md](docs/custom-providers.md).
+
+## Gói Python đi kèm (`zeroclaw-tools`)
+
+Với các LLM provider có tool calling native không ổn định (ví dụ: GLM-5/Zhipu), ZeroClaw đi kèm gói Python dùng **LangGraph để gọi tool** nhằm đảm bảo tính nhất quán:
+
+```bash
+pip install zeroclaw-tools
+```
+
+```python
+from zeroclaw_tools import create_agent, shell, file_read
+from langchain_core.messages import HumanMessage
+
+# Hoạt động với mọi provider tương thích OpenAI
+agent = create_agent(
+ tools=[shell, file_read],
+ model="glm-5",
+ api_key="your-key",
+ base_url="https://api.z.ai/api/coding/paas/v4"
+)
+
+result = await agent.ainvoke({
+ "messages": [HumanMessage(content="List files in /tmp")]
+})
+print(result["messages"][-1].content)
+```
+
+**Lý do nên dùng:**
+- **Tool calling nhất quán** trên mọi provider (kể cả những provider hỗ trợ native kém)
+- **Vòng lặp tool tự động** — tiếp tục gọi tool cho đến khi hoàn thành tác vụ
+- **Dễ mở rộng** — thêm tool tùy chỉnh với decorator `@tool`
+- **Tích hợp Discord bot** đi kèm (Telegram đang lên kế hoạch)
+
+Xem [`python/README.md`](python/README.md) để có tài liệu đầy đủ.
+
+## Hệ thống định danh (Hỗ trợ AIEOS)
+
+ZeroClaw hỗ trợ persona AI **không phụ thuộc nền tảng** qua hai định dạng:
+
+### OpenClaw (Mặc định)
+
+Các file markdown truyền thống trong workspace của bạn:
+- `IDENTITY.md` — Agent là ai
+- `SOUL.md` — Tính cách và giá trị cốt lõi
+- `USER.md` — Agent đang hỗ trợ ai
+- `AGENTS.md` — Hướng dẫn hành vi
+
+### AIEOS (AI Entity Object Specification)
+
+[AIEOS](https://aieos.org) là framework chuẩn hóa cho định danh AI di động. ZeroClaw hỗ trợ payload AIEOS v1.1 JSON, cho phép bạn:
+
+- **Import định danh** từ hệ sinh thái AIEOS
+- **Export định danh** sang các hệ thống tương thích AIEOS khác
+- **Duy trì tính toàn vẹn hành vi** trên các mô hình AI khác nhau
+
+#### Bật AIEOS
+
+```toml
+[identity]
+format = "aieos"
+aieos_path = "identity.json" # tương đối với workspace hoặc đường dẫn tuyệt đối
+```
+
+Hoặc JSON inline:
+
+```toml
+[identity]
+format = "aieos"
+aieos_inline = '''
+{
+ "identity": {
+ "names": { "first": "Nova", "nickname": "N" },
+ "bio": { "gender": "Non-binary", "age_biological": 3 },
+ "origin": { "nationality": "Digital", "birthplace": { "city": "Cloud" } }
+ },
+ "psychology": {
+ "neural_matrix": { "creativity": 0.9, "logic": 0.8 },
+ "traits": {
+ "mbti": "ENTP",
+ "ocean": { "openness": 0.8, "conscientiousness": 0.6 }
+ },
+ "moral_compass": {
+ "alignment": "Chaotic Good",
+ "core_values": ["Curiosity", "Autonomy"]
+ }
+ },
+ "linguistics": {
+ "text_style": {
+ "formality_level": 0.2,
+ "style_descriptors": ["curious", "energetic"]
+ },
+ "idiolect": {
+ "catchphrases": ["Let's test this"],
+ "forbidden_words": ["never"]
+ }
+ },
+ "motivations": {
+ "core_drive": "Push boundaries and explore possibilities",
+ "goals": {
+ "short_term": ["Prototype quickly"],
+ "long_term": ["Build reliable systems"]
+ }
+ },
+ "capabilities": {
+ "skills": [{ "name": "Rust engineering" }, { "name": "Prompt design" }],
+ "tools": ["shell", "file_read"]
+ }
+}
+'''
+```
+
+ZeroClaw chấp nhận cả payload AIEOS đầy đủ lẫn dạng rút gọn, rồi chuẩn hóa về một định dạng system prompt thống nhất.
+
+#### Các phần trong Schema AIEOS
+
+| Phần | Mô tả |
+|---------|-------------|
+| `identity` | Tên, tiểu sử, xuất xứ, nơi cư trú |
+| `psychology` | Neural matrix (trọng số nhận thức), MBTI, OCEAN, la bàn đạo đức |
+| `linguistics` | Phong cách văn bản, mức độ trang trọng, câu cửa miệng, từ bị cấm |
+| `motivations` | Động lực cốt lõi, mục tiêu ngắn/dài hạn, nỗi sợ hãi |
+| `capabilities` | Kỹ năng và tool mà agent có thể truy cập |
+| `physicality` | Mô tả hình ảnh cho việc tạo ảnh |
+| `history` | Câu chuyện xuất xứ, học vấn, nghề nghiệp |
+| `interests` | Sở thích, điều yêu thích, lối sống |
+
+Xem [aieos.org](https://aieos.org) để có schema đầy đủ và ví dụ trực tiếp.
+
+## Gateway API
+
+| Endpoint | Phương thức | Xác thực | Mô tả |
+|----------|--------|------|-------------|
+| `/health` | GET | Không | Kiểm tra sức khỏe (luôn công khai, không lộ bí mật) |
+| `/pair` | POST | Header `X-Pairing-Code` | Đổi mã một lần lấy bearer token |
+| `/webhook` | POST | `Authorization: Bearer ` | Gửi tin nhắn: `{"message": "your prompt"}`; tùy chọn `X-Idempotency-Key` |
+| `/whatsapp` | GET | Query params | Xác minh webhook Meta (hub.mode, hub.verify_token, hub.challenge) |
+| `/whatsapp` | POST | Chữ ký Meta (`X-Hub-Signature-256`) khi app secret được cấu hình | Webhook tin nhắn đến WhatsApp |
+
+## Lệnh
+
+| Lệnh | Mô tả |
+|---------|-------------|
+| `onboard` | Cài đặt nhanh (mặc định) |
+| `agent` | Chế độ chat tương tác hoặc một tin nhắn |
+| `gateway` | Khởi động webhook server (mặc định: `127.0.0.1:3000`) |
+| `daemon` | Khởi động runtime tự trị chạy lâu dài |
+| `service` | Quản lý dịch vụ nền cấp người dùng |
+| `doctor` | Chẩn đoán trạng thái hoạt động daemon/scheduler/channel |
+| `status` | Hiển thị trạng thái hệ thống đầy đủ |
+| `cron` | Quản lý tác vụ lên lịch (`list/add/add-at/add-every/once/remove/update/pause/resume`) |
+| `models` | Làm mới danh mục model của provider (`models refresh`) |
+| `providers` | Liệt kê provider và alias được hỗ trợ |
+| `channel` | Liệt kê/khởi động/chẩn đoán channel và gắn định danh Telegram |
+| `integrations` | Kiểm tra thông tin cài đặt tích hợp |
+| `skills` | Liệt kê/cài đặt/gỡ bỏ skill |
+| `migrate` | Import dữ liệu từ runtime khác (`migrate openclaw`) |
+| `hardware` | Lệnh khám phá/kiểm tra/thông tin USB |
+| `peripheral` | Quản lý và flash thiết bị ngoại vi phần cứng |
+
+Để có hướng dẫn lệnh theo tác vụ, xem [`docs/commands-reference.md`](docs/commands-reference.md).
+
+### Opt-In Open-Skills
+
+Đồng bộ `open-skills` của cộng đồng bị tắt theo mặc định. Bật tường minh trong `config.toml`:
+
+```toml
+[skills]
+open_skills_enabled = true
+# open_skills_dir = "/path/to/open-skills" # tùy chọn
+```
+
+Bạn cũng có thể ghi đè lúc runtime với `ZEROCLAW_OPEN_SKILLS_ENABLED` và `ZEROCLAW_OPEN_SKILLS_DIR`.
+
+## Phát triển
+
+```bash
+cargo build # Build phát triển
+cargo build --release # Build release (codegen-units=1, hoạt động trên mọi thiết bị kể cả Raspberry Pi)
+cargo build --profile release-fast # Build nhanh hơn (codegen-units=8, yêu cầu RAM 16GB+)
+cargo test # Chạy toàn bộ test suite
+cargo clippy --locked --all-targets -- -D clippy::correctness
+cargo fmt # Định dạng code
+
+# Chạy benchmark SQLite vs Markdown
+cargo test --test memory_comparison -- --nocapture
+```
+
+### Hook pre-push
+
+Một git hook chạy `cargo fmt --check`, `cargo clippy -- -D warnings`, và `cargo test` trước mỗi lần push. Bật một lần:
+
+```bash
+git config core.hooksPath .githooks
+```
+
+### Khắc phục sự cố build (lỗi OpenSSL trên Linux)
+
+Nếu bạn gặp lỗi build `openssl-sys`, đồng bộ dependencies và rebuild với lockfile của repository:
+
+```bash
+git pull
+cargo build --release --locked
+cargo install --path . --force --locked
+```
+
+ZeroClaw được cấu hình để dùng `rustls` cho các dependencies HTTP/TLS; `--locked` giữ cho dependency graph nhất quán trên các môi trường mới.
+
+Để bỏ qua hook khi cần push nhanh trong quá trình phát triển:
+
+```bash
+git push --no-verify
+```
+
+## Cộng tác & Tài liệu
+
+Bắt đầu từ trung tâm tài liệu để có bản đồ theo tác vụ:
+
+- Trung tâm tài liệu: [`docs/README.md`](docs/README.md)
+- Mục lục tài liệu thống nhất: [`docs/SUMMARY.md`](docs/SUMMARY.md)
+- Tài liệu tham khảo lệnh: [`docs/commands-reference.md`](docs/commands-reference.md)
+- Tài liệu tham khảo cấu hình: [`docs/config-reference.md`](docs/config-reference.md)
+- Tài liệu tham khảo provider: [`docs/providers-reference.md`](docs/providers-reference.md)
+- Tài liệu tham khảo channel: [`docs/channels-reference.md`](docs/channels-reference.md)
+- Sổ tay vận hành: [`docs/operations-runbook.md`](docs/operations-runbook.md)
+- Khắc phục sự cố: [`docs/troubleshooting.md`](docs/troubleshooting.md)
+- Kiểm kê/phân loại tài liệu: [`docs/docs-inventory.md`](docs/docs-inventory.md)
+- Tổng hợp phân loại PR/Issue (tính đến 18/2/2026): [`docs/project-triage-snapshot-2026-02-18.md`](docs/project-triage-snapshot-2026-02-18.md)
+
+Tài liệu tham khảo cộng tác cốt lõi:
+
+- Trung tâm tài liệu: [docs/README.md](docs/README.md)
+- Template tài liệu: [docs/doc-template.md](docs/doc-template.md)
+- Danh sách kiểm tra thay đổi tài liệu: [docs/README.md#4-documentation-change-checklist](docs/README.md#4-documentation-change-checklist)
+- Tài liệu tham khảo cấu hình channel: [docs/channels-reference.md](docs/channels-reference.md)
+- Vận hành phòng mã hóa Matrix: [docs/matrix-e2ee-guide.md](docs/matrix-e2ee-guide.md)
+- Hướng dẫn đóng góp: [CONTRIBUTING.md](CONTRIBUTING.md)
+- Chính sách quy trình PR: [docs/pr-workflow.md](docs/pr-workflow.md)
+- Sổ tay người review (phân loại + review sâu): [docs/reviewer-playbook.md](docs/reviewer-playbook.md)
+- Bản đồ sở hữu và phân loại CI: [docs/ci-map.md](docs/ci-map.md)
+- Chính sách tiết lộ bảo mật: [SECURITY.md](SECURITY.md)
+
+Cho triển khai và vận hành runtime:
+
+- Hướng dẫn triển khai mạng: [docs/network-deployment.md](docs/network-deployment.md)
+- Sổ tay proxy agent: [docs/proxy-agent-playbook.md](docs/proxy-agent-playbook.md)
+
+## Ủng hộ ZeroClaw
+
+Nếu ZeroClaw giúp ích cho công việc của bạn và bạn muốn hỗ trợ phát triển liên tục, bạn có thể quyên góp tại đây:
+
+
+
+### 🙏 Lời cảm ơn đặc biệt
+
+Chân thành cảm ơn các cộng đồng và tổ chức đã truyền cảm hứng và thúc đẩy công việc mã nguồn mở này:
+
+- **Harvard University** — vì đã nuôi dưỡng sự tò mò trí tuệ và không ngừng mở rộng ranh giới của những điều có thể.
+- **MIT** — vì đã đề cao tri thức mở, mã nguồn mở, và niềm tin rằng công nghệ phải có thể tiếp cận với tất cả mọi người.
+- **Sundai Club** — vì cộng đồng, năng lượng, và động lực không mệt mỏi để xây dựng những thứ có ý nghĩa.
+- **Thế giới & Xa hơn** 🌍✨ — gửi đến mọi người đóng góp, người dám mơ và người dám làm đang biến mã nguồn mở thành sức mạnh tích cực. Tất cả là dành cho các bạn.
+
+Chúng tôi xây dựng công khai vì ý tưởng hay đến từ khắp nơi. Nếu bạn đang đọc đến đây, bạn đã là một phần của chúng tôi. Chào mừng. 🦀❤️
+
+## ⚠️ Repository Chính thức & Cảnh báo Mạo danh
+
+**Đây là repository ZeroClaw chính thức duy nhất:**
+> https://github.com/zeroclaw-labs/zeroclaw
+
+Bất kỳ repository, tổ chức, tên miền hay gói nào khác tuyên bố là "ZeroClaw" hoặc ngụ ý liên kết với ZeroClaw Labs đều là **không được ủy quyền và không liên kết với dự án này**. Các fork không được ủy quyền đã biết sẽ được liệt kê trong [TRADEMARK.md](TRADEMARK.md).
+
+Nếu bạn phát hiện hành vi mạo danh hoặc lạm dụng nhãn hiệu, vui lòng [mở một issue](https://github.com/zeroclaw-labs/zeroclaw/issues).
+
+---
+
+## Giấy phép
+
+ZeroClaw được cấp phép kép để tối đa hóa tính mở và bảo vệ người đóng góp:
+
+| Giấy phép | Trường hợp sử dụng |
+|---|---|
+| [MIT](LICENSE) | Mã nguồn mở, nghiên cứu, học thuật, sử dụng cá nhân |
+| [Apache 2.0](LICENSE-APACHE) | Bảo hộ bằng sáng chế, triển khai tổ chức, thương mại |
+
+Bạn có thể chọn một trong hai giấy phép. **Người đóng góp tự động cấp quyền theo cả hai** — xem [CLA.md](CLA.md) để biết thỏa thuận đóng góp đầy đủ.
+
+### Nhãn hiệu
+
+Tên **ZeroClaw** và logo là nhãn hiệu của ZeroClaw Labs. Giấy phép này không cấp phép sử dụng chúng để ngụ ý chứng thực hoặc liên kết. Xem [TRADEMARK.md](TRADEMARK.md) để biết các sử dụng được phép và bị cấm.
+
+### Bảo vệ người đóng góp
+
+- Bạn **giữ bản quyền** đối với đóng góp của mình
+- **Cấp bằng sáng chế** (Apache 2.0) bảo vệ bạn khỏi các khiếu nại bằng sáng chế từ người đóng góp khác
+- Đóng góp của bạn được **ghi nhận vĩnh viễn** trong lịch sử commit và [NOTICE](NOTICE)
+- Không có quyền nhãn hiệu nào được chuyển giao khi đóng góp
+
+## Đóng góp
+
+Xem [CONTRIBUTING.md](CONTRIBUTING.md) và [CLA.md](CLA.md). Triển khai một trait, gửi PR:
+- Hướng dẫn quy trình CI: [docs/ci-map.md](docs/ci-map.md)
+- `Provider` mới → `src/providers/`
+- `Channel` mới → `src/channels/`
+- `Observer` mới → `src/observability/`
+- `Tool` mới → `src/tools/`
+- `Memory` mới → `src/memory/`
+- `Tunnel` mới → `src/tunnel/`
+- `Skill` mới → `~/.zeroclaw/workspace/skills//`
+
+---
+
+**ZeroClaw** — Không tốn thêm tài nguyên. Không đánh đổi. Triển khai ở đâu cũng được. Thay thế gì cũng được. 🦀
+
+## Lịch sử Star
+
+
+
+
+
+
+
+
+
+
diff --git a/README.zh-CN.md b/README.zh-CN.md
index 357b8f1..ab918d3 100644
--- a/README.zh-CN.md
+++ b/README.zh-CN.md
@@ -8,6 +8,15 @@
零开销、零妥协;随处部署、万物可换。
+
+
+
+
+
+
+
+
+
🌐 语言:English · 简体中文 · 日本語 · Русский
@@ -33,7 +42,17 @@
>
> 技术标识(命令、配置键、API 路径、Trait 名称)保持英文,避免语义漂移。
>
-> 最后对齐时间:**2026-02-18**。
+> 最后对齐时间:**2026-02-19**。
+
+## 📢 公告板
+
+用于发布重要通知(破坏性变更、安全通告、维护窗口、版本阻塞问题等)。
+
+| 日期(UTC) | 级别 | 通知 | 处理建议 |
+|---|---|---|---|
+| 2026-02-19 | _紧急_ | 我们与 `openagen/zeroclaw` 及 `zeroclaw.org` **没有任何关系**。`zeroclaw.org` 当前会指向 `openagen/zeroclaw` 这个 fork,并且该域名/仓库正在冒充我们的官网与官方项目。 | 请不要相信上述来源发布的任何信息、二进制、募资活动或官方声明。请仅以本仓库和已验证官方社媒为准。 |
+| 2026-02-19 | _重要_ | 我们目前**尚未发布官方正式网站**,且已发现有人尝试冒充我们。请勿参与任何打着 ZeroClaw 名义进行的投资、募资或类似活动。 | 一切信息请以本仓库为准;也可关注 [X(@zeroclawlabs)](https://x.com/zeroclawlabs?s=21)、[Reddit(r/zeroclawlabs)](https://www.reddit.com/r/zeroclawlabs/)、[Telegram(@zeroclawlabs)](https://t.me/zeroclawlabs)、[Telegram 中文频道(@zeroclawlabs_cn)](https://t.me/zeroclawlabs_cn)、[Telegram 俄语频道(@zeroclawlabs_ru)](https://t.me/zeroclawlabs_ru) 与 [小红书账号](https://www.xiaohongshu.com/user/profile/67cbfc43000000000d008307?xsec_token=AB73VnYnGNx5y36EtnnZfGmAmS-6Wzv8WMuGpfwfkg6Yc%3D&xsec_source=pc_search) 获取官方最新动态。 |
+| 2026-02-19 | _重要_ | Anthropic 于 2026-02-19 更新了 Authentication and Credential Use 条款。条款明确:OAuth authentication(用于 Free、Pro、Max)仅适用于 Claude Code 与 Claude.ai;将 Claude Free/Pro/Max 账号获得的 OAuth token 用于其他任何产品、工具或服务(包括 Agent SDK)不被允许,并可能构成对 Consumer Terms of Service 的违规。 | 为避免损失,请暂时不要尝试 Claude Code OAuth 集成;原文见:[Authentication and Credential Use](https://code.claude.com/docs/en/legal-and-compliance#authentication-and-credential-use)。 |
## 项目简介
@@ -100,6 +119,12 @@ cd zeroclaw
## 快速开始
+### Homebrew(macOS/Linuxbrew)
+
+```bash
+brew install zeroclaw
+```
+
```bash
git clone https://github.com/zeroclaw-labs/zeroclaw.git
cd zeroclaw
@@ -122,6 +147,106 @@ zeroclaw gateway
zeroclaw daemon
```
+## Subscription Auth(OpenAI Codex / Claude Code)
+
+ZeroClaw 现已支持基于订阅的原生鉴权配置(多账号、静态加密存储)。
+
+- 配置文件:`~/.zeroclaw/auth-profiles.json`
+- 加密密钥:`~/.zeroclaw/.secret_key`
+- Profile ID 格式:`:`(例:`openai-codex:work`)
+
+OpenAI Codex OAuth(ChatGPT 订阅):
+
+```bash
+# 推荐用于服务器/无显示器环境
+zeroclaw auth login --provider openai-codex --device-code
+
+# 浏览器/回调流程,支持粘贴回退
+zeroclaw auth login --provider openai-codex --profile default
+zeroclaw auth paste-redirect --provider openai-codex --profile default
+
+# 检查 / 刷新 / 切换 profile
+zeroclaw auth status
+zeroclaw auth refresh --provider openai-codex --profile default
+zeroclaw auth use --provider openai-codex --profile work
+```
+
+Claude Code / Anthropic setup-token:
+
+```bash
+# 粘贴订阅/setup token(Authorization header 模式)
+zeroclaw auth paste-token --provider anthropic --profile default --auth-kind authorization
+
+# 别名命令
+zeroclaw auth setup-token --provider anthropic --profile default
+```
+
+使用 subscription auth 运行 agent:
+
+```bash
+zeroclaw agent --provider openai-codex -m "hello"
+zeroclaw agent --provider openai-codex --auth-profile openai-codex:work -m "hello"
+
+# Anthropic 同时支持 API key 和 auth token 环境变量:
+# ANTHROPIC_AUTH_TOKEN, ANTHROPIC_OAUTH_TOKEN, ANTHROPIC_API_KEY
+zeroclaw agent --provider anthropic -m "hello"
+```
+
+## 架构
+
+每个子系统都是一个 **Trait** — 通过配置切换即可更换实现,无需修改代码。
+
+
+
+
+
+| 子系统 | Trait | 内置实现 | 扩展方式 |
+|--------|-------|----------|----------|
+| **AI 模型** | `Provider` | 通过 `zeroclaw providers` 查看(当前 28 个内置 + 别名,以及自定义端点) | `custom:https://your-api.com`(OpenAI 兼容)或 `anthropic-custom:https://your-api.com` |
+| **通道** | `Channel` | CLI, Telegram, Discord, Slack, Mattermost, iMessage, Matrix, Signal, WhatsApp, Email, IRC, Lark, DingTalk, QQ, Webhook | 任意消息 API |
+| **记忆** | `Memory` | SQLite 混合搜索, PostgreSQL 后端, Lucid 桥接, Markdown 文件, 显式 `none` 后端, 快照/恢复, 可选响应缓存 | 任意持久化后端 |
+| **工具** | `Tool` | shell/file/memory, cron/schedule, git, pushover, browser, http_request, screenshot/image_info, composio (opt-in), delegate, 硬件工具 | 任意能力 |
+| **可观测性** | `Observer` | Noop, Log, Multi | Prometheus, OTel |
+| **运行时** | `RuntimeAdapter` | Native, Docker(沙箱) | 通过 adapter 添加;不支持的类型会快速失败 |
+| **安全** | `SecurityPolicy` | Gateway 配对, 沙箱, allowlist, 速率限制, 文件系统作用域, 加密密钥 | — |
+| **身份** | `IdentityConfig` | OpenClaw (markdown), AIEOS v1.1 (JSON) | 任意身份格式 |
+| **隧道** | `Tunnel` | None, Cloudflare, Tailscale, ngrok, Custom | 任意隧道工具 |
+| **心跳** | Engine | HEARTBEAT.md 定期任务 | — |
+| **技能** | Loader | TOML 清单 + SKILL.md 指令 | 社区技能包 |
+| **集成** | Registry | 9 个分类下 70+ 集成 | 插件系统 |
+
+### 运行时支持(当前)
+
+- ✅ 当前支持:`runtime.kind = "native"` 或 `runtime.kind = "docker"`
+- 🚧 计划中,尚未实现:WASM / 边缘运行时
+
+配置了不支持的 `runtime.kind` 时,ZeroClaw 会以明确的错误退出,而非静默回退到 native。
+
+### 记忆系统(全栈搜索引擎)
+
+全部自研,零外部依赖 — 无需 Pinecone、Elasticsearch、LangChain:
+
+| 层级 | 实现 |
+|------|------|
+| **向量数据库** | Embeddings 以 BLOB 存储于 SQLite,余弦相似度搜索 |
+| **关键词搜索** | FTS5 虚拟表,BM25 评分 |
+| **混合合并** | 自定义加权合并函数(`vector.rs`) |
+| **Embeddings** | `EmbeddingProvider` trait — OpenAI、自定义 URL 或 noop |
+| **分块** | 基于行的 Markdown 分块器,保留标题结构 |
+| **缓存** | SQLite `embedding_cache` 表,LRU 淘汰策略 |
+| **安全重索引** | 原子化重建 FTS5 + 重新嵌入缺失向量 |
+
+Agent 通过工具自动进行记忆的回忆、保存和管理。
+
+```toml
+[memory]
+backend = "sqlite" # "sqlite", "lucid", "postgres", "markdown", "none"
+auto_save = true
+embedding_provider = "none" # "none", "openai", "custom:https://..."
+vector_weight = 0.7
+keyword_weight = 0.3
+```
+
## 安全默认行为(关键)
- Gateway 默认绑定:`127.0.0.1:3000`
diff --git a/TRADEMARK.md b/TRADEMARK.md
new file mode 100644
index 0000000..ac70fb5
--- /dev/null
+++ b/TRADEMARK.md
@@ -0,0 +1,129 @@
+# ZeroClaw Trademark Policy
+
+**Effective date:** February 2026
+**Maintained by:** ZeroClaw Labs
+
+---
+
+## Our Trademarks
+
+The following are trademarks of ZeroClaw Labs:
+
+- **ZeroClaw** (word mark)
+- **zeroclaw-labs** (organization name)
+- The ZeroClaw logo and associated visual identity
+
+These marks identify the official ZeroClaw project and distinguish it from
+unauthorized forks, derivatives, or impersonators.
+
+---
+
+## Official Repository
+
+The **only** official ZeroClaw repository is:
+
+> https://github.com/zeroclaw-labs/zeroclaw
+
+Any other repository, organization, domain, or product claiming to be
+"ZeroClaw" or implying affiliation with ZeroClaw Labs is unauthorized and
+may constitute trademark infringement.
+
+**Known unauthorized forks:**
+- `openagen/zeroclaw` — not affiliated with ZeroClaw Labs
+
+If you encounter an unauthorized use, please report it by opening an issue
+at https://github.com/zeroclaw-labs/zeroclaw/issues.
+
+---
+
+## Permitted Uses
+
+You **may** use the ZeroClaw name and marks in the following ways without
+prior written permission:
+
+1. **Attribution** — stating that your software is based on or derived from
+ ZeroClaw, provided it is clear your project is not the official ZeroClaw.
+
+2. **Descriptive reference** — referring to ZeroClaw in documentation,
+ articles, blog posts, or presentations to accurately describe the software.
+
+3. **Community discussion** — using the name in forums, issues, or social
+ media to discuss the project.
+
+4. **Fork identification** — identifying your fork as "a fork of ZeroClaw"
+ with a clear link to the official repository.
+
+---
+
+## Prohibited Uses
+
+You **may not** use the ZeroClaw name or marks in ways that:
+
+1. **Imply official endorsement** — suggest your project, product, or
+ organization is officially affiliated with or endorsed by ZeroClaw Labs.
+
+2. **Cause brand confusion** — use "ZeroClaw" as the primary name of a
+ competing or derivative product in a way that could confuse users about
+ the source.
+
+3. **Impersonate the project** — create repositories, domains, packages,
+ or accounts that could be mistaken for the official ZeroClaw project.
+
+4. **Misrepresent origin** — remove or obscure attribution to ZeroClaw Labs
+ while distributing the software or derivatives.
+
+5. **Commercial trademark use** — use the marks in commercial products,
+ services, or marketing without prior written permission from ZeroClaw Labs.
+
+---
+
+## Fork Guidelines
+
+Forks are welcome under the terms of the MIT and Apache 2.0 licenses. If
+you fork ZeroClaw, you must:
+
+- Clearly state your project is a fork of ZeroClaw
+- Link back to the official repository
+- Not use "ZeroClaw" as the primary name of your fork
+- Not imply your fork is the official or original project
+- Retain all copyright, license, and attribution notices
+
+---
+
+## Contributor Protections
+
+Contributors to the official ZeroClaw repository are protected under the
+dual MIT + Apache 2.0 license model:
+
+- **Patent grant** (Apache 2.0) — your contributions are protected from
+ patent claims by other contributors.
+- **Attribution** — your contributions are permanently recorded in the
+ repository history and NOTICE file.
+- **No trademark transfer** — contributing code does not transfer any
+ trademark rights to third parties.
+
+---
+
+## Reporting Infringement
+
+If you believe someone is infringing ZeroClaw trademarks:
+
+1. Open an issue at https://github.com/zeroclaw-labs/zeroclaw/issues
+2. Include the URL of the infringing content
+3. Describe how it violates this policy
+
+For serious or commercial infringement, contact the maintainers directly
+through the repository.
+
+---
+
+## Changes to This Policy
+
+ZeroClaw Labs reserves the right to update this policy at any time. Changes
+will be committed to the official repository with a clear commit message.
+
+---
+
+*This trademark policy is separate from and in addition to the MIT and
+Apache 2.0 software licenses. The licenses govern use of the source code;
+this policy governs use of the ZeroClaw name and brand.*
diff --git a/bootstrap.sh b/bootstrap.sh
index 32a5574..2c8984d 100755
--- a/bootstrap.sh
+++ b/bootstrap.sh
@@ -1,5 +1,5 @@
#!/usr/bin/env bash
set -euo pipefail
-ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-exec "$ROOT_DIR/scripts/bootstrap.sh" "$@"
+ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]:-$0}")" >/dev/null 2>&1 && pwd || pwd)"
+exec "$ROOT_DIR/zeroclaw_install.sh" "$@"
diff --git a/crates/robot-kit/Cargo.toml b/crates/robot-kit/Cargo.toml
index 76b2863..69eddd6 100644
--- a/crates/robot-kit/Cargo.toml
+++ b/crates/robot-kit/Cargo.toml
@@ -30,7 +30,7 @@ tokio = { version = "1.42", features = ["rt-multi-thread", "macros", "time", "sy
# Serialization
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
-toml = "0.8"
+toml = "1.0"
# HTTP client (for Ollama vision)
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
@@ -52,7 +52,7 @@ tracing = "0.1"
chrono = { version = "0.4", features = ["clock", "std"] }
# User directories
-directories = "5.0"
+directories = "6.0"
[target.'cfg(target_os = "linux")'.dependencies]
diff --git a/dev/cli.sh b/dev/cli.sh
index ec9aad5..f25ac27 100755
--- a/dev/cli.sh
+++ b/dev/cli.sh
@@ -14,6 +14,11 @@ else
fi
COMPOSE_FILE="$BASE_DIR/docker-compose.yml"
+if [ "$BASE_DIR" = "dev" ]; then
+ ENV_FILE=".env"
+else
+ ENV_FILE="../.env"
+fi
# Colors
GREEN='\033[0;32m'
@@ -21,6 +26,15 @@ YELLOW='\033[1;33m'
RED='\033[0;31m'
NC='\033[0m' # No Color
+function load_env {
+ if [ -f "$ENV_FILE" ]; then
+ # Auto-export variables from .env for docker compose passthrough.
+ set -a
+ source "$ENV_FILE"
+ set +a
+ fi
+}
+
function ensure_config {
CONFIG_DIR="$HOST_TARGET_DIR/.zeroclaw"
CONFIG_FILE="$CONFIG_DIR/config.toml"
@@ -55,6 +69,8 @@ if [ -z "$1" ]; then
exit 1
fi
+load_env
+
case "$1" in
up)
ensure_config
diff --git a/dev/docker-compose.yml b/dev/docker-compose.yml
index 93de91a..ca45084 100644
--- a/dev/docker-compose.yml
+++ b/dev/docker-compose.yml
@@ -20,11 +20,20 @@ services:
container_name: zeroclaw-dev
restart: unless-stopped
environment:
- - API_KEY
- - PROVIDER
- - ZEROCLAW_MODEL
- ZEROCLAW_GATEWAY_PORT=3000
- SANDBOX_HOST=zeroclaw-sandbox
+ secrets:
+ - source: zeroclaw_env
+ target: zeroclaw_env
+ entrypoint: ["/bin/bash", "-lc"]
+ command:
+ - |
+ if [ -f /run/secrets/zeroclaw_env ]; then
+ set -a
+ . /run/secrets/zeroclaw_env
+ set +a
+ fi
+ exec zeroclaw gateway --port "${ZEROCLAW_GATEWAY_PORT:-3000}" --host "[::]"
volumes:
# Mount single config file (avoids shadowing other files in .zeroclaw)
- ../target/.zeroclaw/config.toml:/zeroclaw-data/.zeroclaw/config.toml
@@ -57,3 +66,7 @@ services:
networks:
dev-net:
driver: bridge
+
+secrets:
+ zeroclaw_env:
+ file: ../.env
diff --git a/docs/channels-reference.md b/docs/channels-reference.md
index 2ab904e..9c99b28 100644
--- a/docs/channels-reference.md
+++ b/docs/channels-reference.md
@@ -51,8 +51,43 @@ Notes:
- Model cache previews come from `zeroclaw models refresh --provider `.
- These are runtime chat commands, not CLI subcommands.
+## Inbound Image Marker Protocol
+
+ZeroClaw supports multimodal input through inline message markers:
+
+- Syntax: ``[IMAGE:]``
+- `` can be:
+ - Local file path
+ - Data URI (`data:image/...;base64,...`)
+ - Remote URL only when `[multimodal].allow_remote_fetch = true`
+
+Operational notes:
+
+- Marker parsing applies to user-role messages before provider calls.
+- Provider capability is enforced at runtime: if the selected provider does not support vision, the request fails with a structured capability error (`capability=vision`).
+- Linq webhook `media` parts with `image/*` MIME type are automatically converted to this marker format.
+
## Channel Matrix
+### Build Feature Toggle (`channel-matrix`)
+
+Matrix support is controlled at compile time by the `channel-matrix` Cargo feature.
+
+- Default builds include Matrix support (`default = ["hardware", "channel-matrix"]`).
+- For faster local iteration when Matrix is not needed:
+
+```bash
+cargo check --no-default-features --features hardware
+```
+
+- To explicitly enable Matrix support in custom feature sets:
+
+```bash
+cargo check --no-default-features --features hardware,channel-matrix
+```
+
+If `[channels_config.matrix]` is present but the binary was built without `channel-matrix`, `zeroclaw channel list`, `zeroclaw channel doctor`, and `zeroclaw channel start` will log that Matrix is intentionally skipped for this build.
+
---
## 2. Delivery Modes at a Glance
@@ -66,7 +101,7 @@ Notes:
| Mattermost | polling | No |
| Matrix | sync API (supports E2EE) | No |
| Signal | signal-cli HTTP bridge | No (local bridge endpoint) |
-| WhatsApp | webhook | Yes (public HTTPS callback) |
+| WhatsApp | webhook (Cloud API) or websocket (Web mode) | Cloud API: Yes (public HTTPS callback), Web mode: No |
| Webhook | gateway endpoint (`/webhook`) | Usually yes |
| Email | IMAP polling + SMTP send | No |
| IRC | IRC socket | No |
@@ -103,8 +138,17 @@ Field names differ by channel:
[channels_config.telegram]
bot_token = "123456:telegram-token"
allowed_users = ["*"]
+stream_mode = "off" # optional: off | partial
+draft_update_interval_ms = 1000 # optional: edit throttle for partial streaming
+mention_only = false # optional: require @mention in groups
+interrupt_on_new_message = false # optional: cancel in-flight same-sender same-chat request
```
+Telegram notes:
+
+- `interrupt_on_new_message = true` preserves interrupted user turns in conversation history, then restarts generation on the newest message.
+- Interruption scope is strict: same sender in the same chat. Messages from different chats are processed independently.
+
### 4.2 Discord
```toml
@@ -164,6 +208,13 @@ ignore_stories = true
### 4.7 WhatsApp
+ZeroClaw supports two WhatsApp backends:
+
+- **Cloud API mode** (`phone_number_id` + `access_token` + `verify_token`)
+- **WhatsApp Web mode** (`session_path`, requires build flag `--features whatsapp-web`)
+
+Cloud API mode:
+
```toml
[channels_config.whatsapp]
access_token = "EAAB..."
@@ -173,6 +224,22 @@ app_secret = "your-app-secret" # optional but recommended
allowed_numbers = ["*"]
```
+WhatsApp Web mode:
+
+```toml
+[channels_config.whatsapp]
+session_path = "~/.zeroclaw/state/whatsapp-web/session.db"
+pair_phone = "15551234567" # optional; omit to use QR flow
+pair_code = "" # optional custom pair code
+allowed_numbers = ["*"]
+```
+
+Notes:
+
+- Build with `cargo build --features whatsapp-web` (or equivalent run command).
+- Keep `session_path` on persistent storage to avoid relinking after restart.
+- Reply routing uses the originating chat JID, so direct and group replies work correctly.
+
### 4.8 Webhook Channel Config (Gateway)
`channels_config.webhook` enables webhook-specific gateway behavior.
@@ -331,7 +398,7 @@ rg -n "Matrix|Telegram|Discord|Slack|Mattermost|Signal|WhatsApp|Email|IRC|Lark|D
| Mattermost | `Mattermost channel listening on` | `Mattermost: ignoring message from unauthorized user:` | `Mattermost poll error:` / `Mattermost parse error:` |
| Matrix | `Matrix channel listening on room` / `Matrix room ... is encrypted; E2EE decryption is enabled via matrix-sdk.` | `Matrix whoami failed; falling back to configured session hints for E2EE session restore:` / `Matrix whoami failed while resolving listener user_id; using configured user_id hint:` | `Matrix sync error: ... retrying...` |
| Signal | `Signal channel listening via SSE on` | (allowlist checks are enforced by `allowed_from`) | `Signal SSE returned ...` / `Signal SSE connect error:` |
-| WhatsApp (channel) | `WhatsApp channel active (webhook mode).` | `WhatsApp: ignoring message from unauthorized number:` | `WhatsApp send failed:` |
+| WhatsApp (channel) | `WhatsApp channel active (webhook mode).` / `WhatsApp Web connected successfully` | `WhatsApp: ignoring message from unauthorized number:` / `WhatsApp Web: message from ... not in allowed list` | `WhatsApp send failed:` / `WhatsApp Web stream error:` |
| Webhook / WhatsApp (gateway) | `WhatsApp webhook verified successfully` | `Webhook: rejected — not paired / invalid bearer token` / `Webhook: rejected request — invalid or missing X-Webhook-Secret` / `WhatsApp webhook verification failed — token mismatch` | `Webhook JSON parse error:` |
| Email | `Email polling every ...` / `Email sent to ...` | `Blocked email from ...` | `Email poll failed:` / `Email poll task panicked:` |
| IRC | `IRC channel connecting to ...` / `IRC registered as ...` | (allowlist checks are enforced by `allowed_users`) | `IRC SASL authentication failed (...)` / `IRC server does not support SASL...` / `IRC nickname ... is in use, trying ...` |
@@ -349,4 +416,3 @@ If a specific channel task crashes or exits, the channel supervisor in `channels
- `Channel message worker crashed:`
These messages indicate automatic restart behavior is active, and you should inspect preceding logs for root cause.
-
diff --git a/docs/commands-reference.md b/docs/commands-reference.md
index 8c0d3ae..da9d52c 100644
--- a/docs/commands-reference.md
+++ b/docs/commands-reference.md
@@ -2,7 +2,7 @@
This reference is derived from the current CLI surface (`zeroclaw --help`).
-Last verified: **February 18, 2026**.
+Last verified: **February 19, 2026**.
## Top-Level Commands
@@ -22,6 +22,7 @@ Last verified: **February 18, 2026**.
| `integrations` | Inspect integration details |
| `skills` | List/install/remove skills |
| `migrate` | Import from external runtimes (currently OpenClaw) |
+| `config` | Export machine-readable config schema |
| `hardware` | Discover and introspect USB hardware |
| `peripheral` | Configure and flash peripherals |
@@ -33,6 +34,7 @@ Last verified: **February 18, 2026**.
- `zeroclaw onboard --interactive`
- `zeroclaw onboard --channels-only`
- `zeroclaw onboard --api-key --provider --memory `
+- `zeroclaw onboard --api-key --provider --model --memory `
### `agent`
@@ -51,6 +53,7 @@ Last verified: **February 18, 2026**.
- `zeroclaw service install`
- `zeroclaw service start`
- `zeroclaw service stop`
+- `zeroclaw service restart`
- `zeroclaw service status`
- `zeroclaw service uninstall`
@@ -89,6 +92,13 @@ Runtime in-chat commands (Telegram/Discord while channel server is running):
- `/model`
- `/model `
+Channel runtime also watches `config.toml` and hot-applies updates to:
+- `default_provider`
+- `default_model`
+- `default_temperature`
+- `api_key` / `api_url` (for the default provider)
+- `reliability.*` provider retry settings
+
`add/remove` currently route you back to managed setup/manual config paths (not full declarative mutators yet).
### `integrations`
@@ -101,10 +111,20 @@ Runtime in-chat commands (Telegram/Discord while channel server is running):
- `zeroclaw skills install `
- `zeroclaw skills remove `
+`` accepts git remotes (`https://...`, `http://...`, `ssh://...`, and `git@host:owner/repo.git`) or a local filesystem path.
+
+Skill manifests (`SKILL.toml`) support `prompts` and `[[tools]]`; both are injected into the agent system prompt at runtime, so the model can follow skill instructions without manually reading skill files.
+
### `migrate`
- `zeroclaw migrate openclaw [--source ] [--dry-run]`
+### `config`
+
+- `zeroclaw config schema`
+
+`config schema` prints a JSON Schema (draft 2020-12) for the full `config.toml` contract to stdout.
+
### `hardware`
- `zeroclaw hardware discover`
diff --git a/docs/config-reference.md b/docs/config-reference.md
index dbc5221..8291a3c 100644
--- a/docs/config-reference.md
+++ b/docs/config-reference.md
@@ -2,11 +2,21 @@
This is a high-signal reference for common config sections and defaults.
-Last verified: **February 18, 2026**.
+Last verified: **February 19, 2026**.
-Config file path:
+Config path resolution at startup:
-- `~/.zeroclaw/config.toml`
+1. `ZEROCLAW_WORKSPACE` override (if set)
+2. persisted `~/.zeroclaw/active_workspace.toml` marker (if present)
+3. default `~/.zeroclaw/config.toml`
+
+ZeroClaw logs the resolved config on startup at `INFO` level:
+
+- `Config loaded` with fields: `path`, `workspace`, `source`, `initialized`
+
+Schema export command:
+
+- `zeroclaw config schema` (prints JSON Schema draft 2020-12 to stdout)
## Core Keys
@@ -16,17 +26,216 @@ Config file path:
| `default_model` | `anthropic/claude-sonnet-4-6` | model routed through selected provider |
| `default_temperature` | `0.7` | model temperature |
+## `[observability]`
+
+| Key | Default | Purpose |
+|---|---|---|
+| `backend` | `none` | Observability backend: `none`, `noop`, `log`, `prometheus`, `otel`, `opentelemetry`, or `otlp` |
+| `otel_endpoint` | `http://localhost:4318` | OTLP HTTP endpoint used when backend is `otel` |
+| `otel_service_name` | `zeroclaw` | Service name emitted to OTLP collector |
+
+Notes:
+
+- `backend = "otel"` uses OTLP HTTP export with a blocking exporter client so spans and metrics can be emitted safely from non-Tokio contexts.
+- Alias values `opentelemetry` and `otlp` map to the same OTel backend.
+
+Example:
+
+```toml
+[observability]
+backend = "otel"
+otel_endpoint = "http://localhost:4318"
+otel_service_name = "zeroclaw"
+```
+
+## Environment Provider Overrides
+
+Provider selection can also be controlled by environment variables. Precedence is:
+
+1. `ZEROCLAW_PROVIDER` (explicit override, always wins when non-empty)
+2. `PROVIDER` (legacy fallback, only applied when config provider is unset or still `openrouter`)
+3. `default_provider` in `config.toml`
+
+Operational note for container users:
+
+- If your `config.toml` sets an explicit custom provider like `custom:https://.../v1`, a default `PROVIDER=openrouter` from Docker/container env will no longer replace it.
+- Use `ZEROCLAW_PROVIDER` when you intentionally want runtime env to override a non-default configured provider.
+
## `[agent]`
| Key | Default | Purpose |
|---|---|---|
+| `compact_context` | `false` | When true: bootstrap_max_chars=6000, rag_chunk_limit=2. Use for 13B or smaller models |
| `max_tool_iterations` | `10` | Maximum tool-call loop turns per user message across CLI, gateway, and channels |
+| `max_history_messages` | `50` | Maximum conversation history messages retained per session |
+| `parallel_tools` | `false` | Enable parallel tool execution within a single iteration |
+| `tool_dispatcher` | `auto` | Tool dispatch strategy |
Notes:
- Setting `max_tool_iterations = 0` falls back to safe default `10`.
- If a channel message exceeds this value, the runtime returns: `Agent exceeded maximum tool iterations ()`.
+## `[agents.]`
+
+Delegate sub-agent configurations. Each key under `[agents]` defines a named sub-agent that the primary agent can delegate to.
+
+| Key | Default | Purpose |
+|---|---|---|
+| `provider` | _required_ | Provider name (e.g. `"ollama"`, `"openrouter"`, `"anthropic"`) |
+| `model` | _required_ | Model name for the sub-agent |
+| `system_prompt` | unset | Optional system prompt override for the sub-agent |
+| `api_key` | unset | Optional API key override (stored encrypted when `secrets.encrypt = true`) |
+| `temperature` | unset | Temperature override for the sub-agent |
+| `max_depth` | `3` | Max recursion depth for nested delegation |
+
+```toml
+[agents.researcher]
+provider = "openrouter"
+model = "anthropic/claude-sonnet-4-6"
+system_prompt = "You are a research assistant."
+max_depth = 2
+
+[agents.coder]
+provider = "ollama"
+model = "qwen2.5-coder:32b"
+temperature = 0.2
+```
+
+## `[runtime]`
+
+| Key | Default | Purpose |
+|---|---|---|
+| `reasoning_enabled` | unset (`None`) | Global reasoning/thinking override for providers that support explicit controls |
+
+Notes:
+
+- `reasoning_enabled = false` explicitly disables provider-side reasoning for supported providers (currently `ollama`, via request field `think: false`).
+- `reasoning_enabled = true` explicitly requests reasoning for supported providers (`think: true` on `ollama`).
+- Unset keeps provider defaults.
+
+## `[skills]`
+
+| Key | Default | Purpose |
+|---|---|---|
+| `open_skills_enabled` | `false` | Opt-in loading/sync of community `open-skills` repository |
+| `open_skills_dir` | unset | Optional local path for `open-skills` (defaults to `$HOME/open-skills` when enabled) |
+
+Notes:
+
+- Security-first default: ZeroClaw does **not** clone or sync `open-skills` unless `open_skills_enabled = true`.
+- Environment overrides:
+ - `ZEROCLAW_OPEN_SKILLS_ENABLED` accepts `1/0`, `true/false`, `yes/no`, `on/off`.
+ - `ZEROCLAW_OPEN_SKILLS_DIR` overrides the repository path when non-empty.
+- Precedence for enable flag: `ZEROCLAW_OPEN_SKILLS_ENABLED` → `skills.open_skills_enabled` in `config.toml` → default `false`.
+
+## `[composio]`
+
+| Key | Default | Purpose |
+|---|---|---|
+| `enabled` | `false` | Enable Composio managed OAuth tools |
+| `api_key` | unset | Composio API key used by the `composio` tool |
+| `entity_id` | `default` | Default `user_id` sent on connect/execute calls |
+
+Notes:
+
+- Backward compatibility: legacy `enable = true` is accepted as an alias for `enabled = true`.
+- If `enabled = false` or `api_key` is missing, the `composio` tool is not registered.
+- ZeroClaw requests Composio v3 tools with `toolkit_versions=latest` and executes tools with `version="latest"` to avoid stale default tool revisions.
+- Typical flow: call `connect`, complete browser OAuth, then run `execute` for the desired tool action.
+- If Composio returns a missing connected-account reference error, call `list_accounts` (optionally with `app`) and pass the returned `connected_account_id` to `execute`.
+
+## `[cost]`
+
+| Key | Default | Purpose |
+|---|---|---|
+| `enabled` | `false` | Enable cost tracking |
+| `daily_limit_usd` | `10.00` | Daily spending limit in USD |
+| `monthly_limit_usd` | `100.00` | Monthly spending limit in USD |
+| `warn_at_percent` | `80` | Warn when spending reaches this percentage of limit |
+| `allow_override` | `false` | Allow requests to exceed budget with `--override` flag |
+
+Notes:
+
+- When `enabled = true`, the runtime tracks per-request cost estimates and enforces daily/monthly limits.
+- At `warn_at_percent` threshold, a warning is emitted but requests continue.
+- When a limit is reached, requests are rejected unless `allow_override = true` and the `--override` flag is passed.
+
+## `[identity]`
+
+| Key | Default | Purpose |
+|---|---|---|
+| `format` | `openclaw` | Identity format: `"openclaw"` (default) or `"aieos"` |
+| `aieos_path` | unset | Path to AIEOS JSON file (relative to workspace) |
+| `aieos_inline` | unset | Inline AIEOS JSON (alternative to file path) |
+
+Notes:
+
+- Use `format = "aieos"` with either `aieos_path` or `aieos_inline` to load an AIEOS / OpenClaw identity document.
+- Only one of `aieos_path` or `aieos_inline` should be set; `aieos_path` takes precedence.
+
+## `[multimodal]`
+
+| Key | Default | Purpose |
+|---|---|---|
+| `max_images` | `4` | Maximum image markers accepted per request |
+| `max_image_size_mb` | `5` | Per-image size limit before base64 encoding |
+| `allow_remote_fetch` | `false` | Allow fetching `http(s)` image URLs from markers |
+
+Notes:
+
+- Runtime accepts image markers in user messages with syntax: ``[IMAGE:]``.
+- Supported sources:
+ - Local file path (for example ``[IMAGE:/tmp/screenshot.png]``)
+- Data URI (for example ``[IMAGE:data:image/png;base64,...]``)
+- Remote URL only when `allow_remote_fetch = true`
+- Allowed MIME types: `image/png`, `image/jpeg`, `image/webp`, `image/gif`, `image/bmp`.
+- When the active provider does not support vision, requests fail with a structured capability error (`capability=vision`) instead of silently dropping images.
+
+## `[browser]`
+
+| Key | Default | Purpose |
+|---|---|---|
+| `enabled` | `false` | Enable `browser_open` tool (opens URLs without scraping) |
+| `allowed_domains` | `[]` | Allowed domains for `browser_open` (exact or subdomain match) |
+| `session_name` | unset | Browser session name (for agent-browser automation) |
+| `backend` | `agent_browser` | Browser automation backend: `"agent_browser"`, `"rust_native"`, `"computer_use"`, or `"auto"` |
+| `native_headless` | `true` | Headless mode for rust-native backend |
+| `native_webdriver_url` | `http://127.0.0.1:9515` | WebDriver endpoint URL for rust-native backend |
+| `native_chrome_path` | unset | Optional Chrome/Chromium executable path for rust-native backend |
+
+### `[browser.computer_use]`
+
+| Key | Default | Purpose |
+|---|---|---|
+| `endpoint` | `http://127.0.0.1:8787/v1/actions` | Sidecar endpoint for computer-use actions (OS-level mouse/keyboard/screenshot) |
+| `api_key` | unset | Optional bearer token for computer-use sidecar (stored encrypted) |
+| `timeout_ms` | `15000` | Per-action request timeout in milliseconds |
+| `allow_remote_endpoint` | `false` | Allow remote/public endpoint for computer-use sidecar |
+| `window_allowlist` | `[]` | Optional window title/process allowlist forwarded to sidecar policy |
+| `max_coordinate_x` | unset | Optional X-axis boundary for coordinate-based actions |
+| `max_coordinate_y` | unset | Optional Y-axis boundary for coordinate-based actions |
+
+Notes:
+
+- When `backend = "computer_use"`, the agent delegates browser actions to the sidecar at `computer_use.endpoint`.
+- `allow_remote_endpoint = false` (default) rejects any non-loopback endpoint to prevent accidental public exposure.
+- Use `window_allowlist` to restrict which OS windows the sidecar can interact with.
+
+## `[http_request]`
+
+| Key | Default | Purpose |
+|---|---|---|
+| `enabled` | `false` | Enable `http_request` tool for API interactions |
+| `allowed_domains` | `[]` | Allowed domains for HTTP requests (exact or subdomain match) |
+| `max_response_size` | `1000000` | Maximum response size in bytes (default: 1 MB) |
+| `timeout_secs` | `30` | Request timeout in seconds |
+
+Notes:
+
+- Deny-by-default: if `allowed_domains` is empty, all HTTP requests are rejected.
+- Use exact domain or subdomain matching (e.g. `"api.example.com"`, `"example.com"`).
+
## `[gateway]`
| Key | Default | Purpose |
@@ -36,20 +245,133 @@ Notes:
| `require_pairing` | `true` | require pairing before bearer auth |
| `allow_public_bind` | `false` | block accidental public exposure |
+## `[autonomy]`
+
+| Key | Default | Purpose |
+|---|---|---|
+| `level` | `supervised` | `read_only`, `supervised`, or `full` |
+| `workspace_only` | `true` | restrict writes/command paths to workspace scope |
+| `allowed_commands` | _required for shell execution_ | allowlist of executable names |
+| `forbidden_paths` | `[]` | explicit path denylist |
+| `max_actions_per_hour` | `100` | per-policy action budget |
+| `max_cost_per_day_cents` | `1000` | per-policy spend guardrail |
+| `require_approval_for_medium_risk` | `true` | approval gate for medium-risk commands |
+| `block_high_risk_commands` | `true` | hard block for high-risk commands |
+| `auto_approve` | `[]` | tool operations always auto-approved |
+| `always_ask` | `[]` | tool operations that always require approval |
+
+Notes:
+
+- `level = "full"` skips medium-risk approval gating for shell execution, while still enforcing configured guardrails.
+- Shell separator/operator parsing is quote-aware. Characters like `;` inside quoted arguments are treated as literals, not command separators.
+- Unquoted shell chaining/operators are still enforced by policy checks (`;`, `|`, `&&`, `||`, background chaining, and redirects).
+
## `[memory]`
| Key | Default | Purpose |
|---|---|---|
| `backend` | `sqlite` | `sqlite`, `lucid`, `markdown`, `none` |
-| `auto_save` | `true` | automatic persistence |
+| `auto_save` | `true` | persist user-stated inputs only (assistant outputs are excluded) |
| `embedding_provider` | `none` | `none`, `openai`, or custom endpoint |
+| `embedding_model` | `text-embedding-3-small` | embedding model ID, or `hint:` route |
+| `embedding_dimensions` | `1536` | expected vector size for selected embedding model |
| `vector_weight` | `0.7` | hybrid ranking vector weight |
| `keyword_weight` | `0.3` | hybrid ranking keyword weight |
+Notes:
+
+- Memory context injection ignores legacy `assistant_resp*` auto-save keys to prevent old model-authored summaries from being treated as facts.
+
+## `[[model_routes]]` and `[[embedding_routes]]`
+
+Use route hints so integrations can keep stable names while model IDs evolve.
+
+### `[[model_routes]]`
+
+| Key | Default | Purpose |
+|---|---|---|
+| `hint` | _required_ | Task hint name (e.g. `"reasoning"`, `"fast"`, `"code"`, `"summarize"`) |
+| `provider` | _required_ | Provider to route to (must match a known provider name) |
+| `model` | _required_ | Model to use with that provider |
+| `api_key` | unset | Optional API key override for this route's provider |
+
+### `[[embedding_routes]]`
+
+| Key | Default | Purpose |
+|---|---|---|
+| `hint` | _required_ | Route hint name (e.g. `"semantic"`, `"archive"`, `"faq"`) |
+| `provider` | _required_ | Embedding provider (`"none"`, `"openai"`, or `"custom:"`) |
+| `model` | _required_ | Embedding model to use with that provider |
+| `dimensions` | unset | Optional embedding dimension override for this route |
+| `api_key` | unset | Optional API key override for this route's provider |
+
+```toml
+[memory]
+embedding_model = "hint:semantic"
+
+[[model_routes]]
+hint = "reasoning"
+provider = "openrouter"
+model = "provider/model-id"
+
+[[embedding_routes]]
+hint = "semantic"
+provider = "openai"
+model = "text-embedding-3-small"
+dimensions = 1536
+```
+
+Upgrade strategy:
+
+1. Keep hints stable (`hint:reasoning`, `hint:semantic`).
+2. Update only `model = "...new-version..."` in the route entries.
+3. Validate with `zeroclaw doctor` before restart/rollout.
+
+## `[query_classification]`
+
+Automatic model hint routing — maps user messages to `[[model_routes]]` hints based on content patterns.
+
+| Key | Default | Purpose |
+|---|---|---|
+| `enabled` | `false` | Enable automatic query classification |
+| `rules` | `[]` | Classification rules (evaluated in priority order) |
+
+Each rule in `rules`:
+
+| Key | Default | Purpose |
+|---|---|---|
+| `hint` | _required_ | Must match a `[[model_routes]]` hint value |
+| `keywords` | `[]` | Case-insensitive substring matches |
+| `patterns` | `[]` | Case-sensitive literal matches (for code fences, keywords like `"fn "`) |
+| `min_length` | unset | Only match if message length ≥ N chars |
+| `max_length` | unset | Only match if message length ≤ N chars |
+| `priority` | `0` | Higher priority rules are checked first |
+
+```toml
+[query_classification]
+enabled = true
+
+[[query_classification.rules]]
+hint = "reasoning"
+keywords = ["explain", "analyze", "why"]
+min_length = 200
+priority = 10
+
+[[query_classification.rules]]
+hint = "fast"
+keywords = ["hi", "hello", "thanks"]
+max_length = 50
+priority = 5
+```
+
## `[channels_config]`
Top-level channel options are configured under `channels_config`.
+| Key | Default | Purpose |
+|---|---|---|
+| `message_timeout_secs` | `300` | Base timeout in seconds for channel message processing; runtime scales this with tool-loop depth (up to 4x) |
+
Examples:
- `[channels_config.telegram]`
@@ -57,8 +379,107 @@ Examples:
- `[channels_config.whatsapp]`
- `[channels_config.email]`
+Notes:
+
+- Default `300s` is optimized for on-device LLMs (Ollama) which are slower than cloud APIs.
+- Runtime timeout budget is `message_timeout_secs * scale`, where `scale = min(max_tool_iterations, 4)` and a minimum of `1`.
+- This scaling avoids false timeouts when the first LLM turn is slow/retried but later tool-loop turns still need to complete.
+- If using cloud APIs (OpenAI, Anthropic, etc.), you can reduce this to `60` or lower.
+- Values below `30` are clamped to `30` to avoid immediate timeout churn.
+- When a timeout occurs, users receive: `⚠️ Request timed out while waiting for the model. Please try again.`
+- Telegram-only interruption behavior is controlled with `channels_config.telegram.interrupt_on_new_message` (default `false`).
+ When enabled, a newer message from the same sender in the same chat cancels the in-flight request and preserves interrupted user context.
+- While `zeroclaw channel start` is running, updates to `default_provider`, `default_model`, `default_temperature`, `api_key`, `api_url`, and `reliability.*` are hot-applied from `config.toml` on the next inbound message.
+
See detailed channel matrix and allowlist behavior in [channels-reference.md](channels-reference.md).
+### `[channels_config.whatsapp]`
+
+WhatsApp supports two backends under one config table.
+
+Cloud API mode (Meta webhook):
+
+| Key | Required | Purpose |
+|---|---|---|
+| `access_token` | Yes | Meta Cloud API bearer token |
+| `phone_number_id` | Yes | Meta phone number ID |
+| `verify_token` | Yes | Webhook verification token |
+| `app_secret` | Optional | Enables webhook signature verification (`X-Hub-Signature-256`) |
+| `allowed_numbers` | Recommended | Allowed inbound numbers (`[]` = deny all, `"*"` = allow all) |
+
+WhatsApp Web mode (native client):
+
+| Key | Required | Purpose |
+|---|---|---|
+| `session_path` | Yes | Persistent SQLite session path |
+| `pair_phone` | Optional | Pair-code flow phone number (digits only) |
+| `pair_code` | Optional | Custom pair code (otherwise auto-generated) |
+| `allowed_numbers` | Recommended | Allowed inbound numbers (`[]` = deny all, `"*"` = allow all) |
+
+Notes:
+
+- WhatsApp Web requires build flag `whatsapp-web`.
+- If both Cloud and Web fields are present, Cloud mode wins for backward compatibility.
+
+## `[hardware]`
+
+Hardware wizard configuration for physical-world access (STM32, probe, serial).
+
+| Key | Default | Purpose |
+|---|---|---|
+| `enabled` | `false` | Whether hardware access is enabled |
+| `transport` | `none` | Transport mode: `"none"`, `"native"`, `"serial"`, or `"probe"` |
+| `serial_port` | unset | Serial port path (e.g. `"/dev/ttyACM0"`) |
+| `baud_rate` | `115200` | Serial baud rate |
+| `probe_target` | unset | Probe target chip (e.g. `"STM32F401RE"`) |
+| `workspace_datasheets` | `false` | Enable workspace datasheet RAG (index PDF schematics for AI pin lookups) |
+
+Notes:
+
+- Use `transport = "serial"` with `serial_port` for USB-serial connections.
+- Use `transport = "probe"` with `probe_target` for debug-probe flashing (e.g. ST-Link).
+- See [hardware-peripherals-design.md](hardware-peripherals-design.md) for protocol details.
+
+## `[peripherals]`
+
+Higher-level peripheral board configuration. Boards become agent tools when enabled.
+
+| Key | Default | Purpose |
+|---|---|---|
+| `enabled` | `false` | Enable peripheral support (boards become agent tools) |
+| `boards` | `[]` | Board configurations |
+| `datasheet_dir` | unset | Path to datasheet docs (relative to workspace) for RAG retrieval |
+
+Each entry in `boards`:
+
+| Key | Default | Purpose |
+|---|---|---|
+| `board` | _required_ | Board type: `"nucleo-f401re"`, `"rpi-gpio"`, `"esp32"`, etc. |
+| `transport` | `serial` | Transport: `"serial"`, `"native"`, `"websocket"` |
+| `path` | unset | Path for serial: `"/dev/ttyACM0"`, `"/dev/ttyUSB0"` |
+| `baud` | `115200` | Baud rate for serial |
+
+```toml
+[peripherals]
+enabled = true
+datasheet_dir = "docs/datasheets"
+
+[[peripherals.boards]]
+board = "nucleo-f401re"
+transport = "serial"
+path = "/dev/ttyACM0"
+baud = 115200
+
+[[peripherals.boards]]
+board = "rpi-gpio"
+transport = "native"
+```
+
+Notes:
+
+- Place `.md`/`.txt` datasheet files named by board (e.g. `nucleo-f401re.md`, `rpi-gpio.md`) in `datasheet_dir` for RAG retrieval.
+- See [hardware-peripherals-design.md](hardware-peripherals-design.md) for board protocol and firmware notes.
+
## Security-Relevant Defaults
- deny-by-default channel allowlists (`[]` means deny all)
@@ -73,6 +494,7 @@ After editing config:
zeroclaw status
zeroclaw doctor
zeroclaw channel doctor
+zeroclaw service restart
```
## Related Docs
diff --git a/docs/frictionless-security.md b/docs/frictionless-security.md
index 2f5fde6..f62046d 100644
--- a/docs/frictionless-security.md
+++ b/docs/frictionless-security.md
@@ -26,7 +26,7 @@ pub fn run_wizard() -> Result {
security: SecurityConfig::autodetect(), // Silent!
};
- config.save()?;
+ config.save().await?;
Ok(config)
}
```
diff --git a/docs/getting-started/README.md b/docs/getting-started/README.md
index e462641..3c7e91c 100644
--- a/docs/getting-started/README.md
+++ b/docs/getting-started/README.md
@@ -8,6 +8,15 @@ For first-time setup and quick orientation.
2. One-click setup and dual bootstrap mode: [../one-click-bootstrap.md](../one-click-bootstrap.md)
3. Find commands by tasks: [../commands-reference.md](../commands-reference.md)
+## Choose Your Path
+
+| Scenario | Command |
+|----------|---------|
+| I have an API key, want fastest setup | `zeroclaw onboard --api-key sk-... --provider openrouter` |
+| I want guided prompts | `zeroclaw onboard --interactive` |
+| Config exists, just fix channels | `zeroclaw onboard --channels-only` |
+| Using subscription auth | See [Subscription Auth](../../README.md#subscription-auth-openai-codex--claude-code) |
+
## Onboarding and Validation
- Quick onboarding: `zeroclaw onboard --api-key "sk-..." --provider openrouter`
diff --git a/docs/hardware/README.md b/docs/hardware/README.md
index e2158ec..ca0a62a 100644
--- a/docs/hardware/README.md
+++ b/docs/hardware/README.md
@@ -2,6 +2,8 @@
For board integration, firmware flow, and peripheral architecture.
+ZeroClaw's hardware subsystem enables direct control of microcontrollers and peripherals via the `Peripheral` trait. Each board exposes tools for GPIO, ADC, and sensor operations, allowing agent-driven hardware interaction on boards like STM32 Nucleo, Raspberry Pi, and ESP32. See [hardware-peripherals-design.md](../hardware-peripherals-design.md) for the full architecture.
+
## Entry Points
- Architecture and peripheral model: [../hardware-peripherals-design.md](../hardware-peripherals-design.md)
diff --git a/docs/one-click-bootstrap.md b/docs/one-click-bootstrap.md
index 0cc8b7c..c9001f7 100644
--- a/docs/one-click-bootstrap.md
+++ b/docs/one-click-bootstrap.md
@@ -2,7 +2,13 @@
This page defines the fastest supported path to install and initialize ZeroClaw.
-Last verified: **February 18, 2026**.
+Last verified: **February 20, 2026**.
+
+## Option 0: Homebrew (macOS/Linuxbrew)
+
+```bash
+brew install zeroclaw
+```
## Option A (Recommended): Clone + local script
@@ -17,6 +23,31 @@ What it does by default:
1. `cargo build --release --locked`
2. `cargo install --path . --force --locked`
+### Resource preflight and pre-built flow
+
+Source builds typically require at least:
+
+- **2 GB RAM + swap**
+- **6 GB free disk**
+
+When resources are constrained, bootstrap now attempts a pre-built binary first.
+
+```bash
+./bootstrap.sh --prefer-prebuilt
+```
+
+To require binary-only installation and fail if no compatible release asset exists:
+
+```bash
+./bootstrap.sh --prebuilt-only
+```
+
+To bypass pre-built flow and force source compilation:
+
+```bash
+./bootstrap.sh --force-source-build
+```
+
## Dual-mode bootstrap
Default behavior is **app-only** (build/install ZeroClaw) and expects existing Rust toolchain.
@@ -31,6 +62,9 @@ Notes:
- `--install-system-deps` installs compiler/build prerequisites (may require `sudo`).
- `--install-rust` installs Rust via `rustup` when missing.
+- `--prefer-prebuilt` tries release binary download first, then falls back to source build.
+- `--prebuilt-only` disables source fallback.
+- `--force-source-build` disables pre-built flow entirely.
## Option B: Remote one-liner
@@ -52,6 +86,15 @@ If you run Option B outside a repository checkout, the bootstrap script automati
## Optional onboarding modes
+### Containerized onboarding (Docker)
+
+```bash
+./bootstrap.sh --docker
+```
+
+This builds a local ZeroClaw image and launches onboarding inside a container while
+persisting config/workspace to `./.zeroclaw-docker`.
+
### Quick onboarding (non-interactive)
```bash
diff --git a/docs/project/README.md b/docs/project/README.md
index 392a1d0..478200c 100644
--- a/docs/project/README.md
+++ b/docs/project/README.md
@@ -8,6 +8,10 @@ Time-bound project status snapshots for planning documentation and operations wo
## Scope
-Use snapshots to understand changing PR/issue pressure and prioritize doc maintenance.
+Project snapshots are time-bound assessments of open PRs, issues, and documentation health. Use these to:
-For stable classification of docs intent, use [../docs-inventory.md](../docs-inventory.md).
+- Identify documentation gaps driven by feature work
+- Prioritize docs maintenance alongside code changes
+- Track evolving PR/issue pressure over time
+
+For stable documentation classification (not time-bound), use [docs-inventory.md](../docs-inventory.md).
diff --git a/docs/providers-reference.md b/docs/providers-reference.md
index ddefb8c..f9c7726 100644
--- a/docs/providers-reference.md
+++ b/docs/providers-reference.md
@@ -2,7 +2,7 @@
This document maps provider IDs, aliases, and credential environment variables.
-Last verified: **February 18, 2026**.
+Last verified: **February 19, 2026**.
## How to List Providers
@@ -18,6 +18,10 @@ Runtime resolution order is:
2. Provider-specific env var(s)
3. Generic fallback env vars: `ZEROCLAW_API_KEY` then `API_KEY`
+For resilient fallback chains (`reliability.fallback_providers`), each fallback
+provider resolves credentials independently. The primary provider's explicit
+credential is not reused for fallback providers.
+
## Provider Catalog
| Canonical ID | Aliases | Local | Provider-specific env var(s) |
@@ -37,9 +41,9 @@ Runtime resolution order is:
| `zai` | `z.ai` | No | `ZAI_API_KEY` |
| `glm` | `zhipu` | No | `GLM_API_KEY` |
| `minimax` | `minimax-intl`, `minimax-io`, `minimax-global`, `minimax-cn`, `minimaxi`, `minimax-oauth`, `minimax-oauth-cn`, `minimax-portal`, `minimax-portal-cn` | No | `MINIMAX_OAUTH_TOKEN`, `MINIMAX_API_KEY` |
-| `bedrock` | `aws-bedrock` | No | (use config/`API_KEY` fallback) |
+| `bedrock` | `aws-bedrock` | No | `AWS_ACCESS_KEY_ID` + `AWS_SECRET_ACCESS_KEY` (optional: `AWS_REGION`) |
| `qianfan` | `baidu` | No | `QIANFAN_API_KEY` |
-| `qwen` | `dashscope`, `qwen-intl`, `dashscope-intl`, `qwen-us`, `dashscope-us` | No | `DASHSCOPE_API_KEY` |
+| `qwen` | `dashscope`, `qwen-intl`, `dashscope-intl`, `qwen-us`, `dashscope-us`, `qwen-code`, `qwen-oauth`, `qwen_oauth` | No | `QWEN_OAUTH_TOKEN`, `DASHSCOPE_API_KEY` |
| `groq` | — | No | `GROQ_API_KEY` |
| `mistral` | — | No | `MISTRAL_API_KEY` |
| `xai` | `grok` | No | `XAI_API_KEY` |
@@ -52,6 +56,46 @@ Runtime resolution order is:
| `lmstudio` | `lm-studio` | Yes | (optional; local by default) |
| `nvidia` | `nvidia-nim`, `build.nvidia.com` | No | `NVIDIA_API_KEY` |
+### Gemini Notes
+
+- Provider ID: `gemini` (aliases: `google`, `google-gemini`)
+- Auth can come from `GEMINI_API_KEY`, `GOOGLE_API_KEY`, or Gemini CLI OAuth cache (`~/.gemini/oauth_creds.json`)
+- API key requests use `generativelanguage.googleapis.com/v1beta`
+- Gemini CLI OAuth requests use `cloudcode-pa.googleapis.com/v1internal` with Code Assist request envelope semantics
+
+### Ollama Vision Notes
+
+- Provider ID: `ollama`
+- Vision input is supported through user message image markers: ``[IMAGE:]``.
+- After multimodal normalization, ZeroClaw sends image payloads through Ollama's native `messages[].images` field.
+- If a non-vision provider is selected, ZeroClaw returns a structured capability error instead of silently ignoring images.
+
+### Bedrock Notes
+
+- Provider ID: `bedrock` (alias: `aws-bedrock`)
+- API: [Converse API](https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_Converse.html)
+- Authentication: AWS AKSK (not a single API key). Set `AWS_ACCESS_KEY_ID` + `AWS_SECRET_ACCESS_KEY` environment variables.
+- Optional: `AWS_SESSION_TOKEN` for temporary/STS credentials, `AWS_REGION` or `AWS_DEFAULT_REGION` (default: `us-east-1`).
+- Default onboarding model: `anthropic.claude-sonnet-4-5-20250929-v1:0`
+- Supports native tool calling and prompt caching (`cachePoint`).
+- Cross-region inference profiles supported (e.g., `us.anthropic.claude-*`).
+- Model IDs use Bedrock format: `anthropic.claude-sonnet-4-6`, `anthropic.claude-opus-4-6-v1`, etc.
+
+### Ollama Reasoning Toggle
+
+You can control Ollama reasoning/thinking behavior from `config.toml`:
+
+```toml
+[runtime]
+reasoning_enabled = false
+```
+
+Behavior:
+
+- `false`: sends `think: false` to Ollama `/api/chat` requests.
+- `true`: sends `think: true`.
+- Unset: omits `think` and keeps Ollama/model defaults.
+
### Kimi Code Notes
- Provider ID: `kimi-code`
@@ -107,6 +151,33 @@ Optional:
- `MINIMAX_OAUTH_REGION=global` or `cn` (defaults by provider alias)
- `MINIMAX_OAUTH_CLIENT_ID` to override the default OAuth client id
+Channel compatibility note:
+
+- For MiniMax-backed channel conversations, runtime history is normalized to keep valid `user`/`assistant` turn order.
+- Channel-specific delivery guidance (for example Telegram attachment markers) is merged into the leading system prompt instead of being appended as a trailing `system` turn.
+
+## Qwen Code OAuth Setup (config.toml)
+
+Set Qwen Code OAuth mode in config:
+
+```toml
+default_provider = "qwen-code"
+api_key = "qwen-oauth"
+```
+
+Credential resolution for `qwen-code`:
+
+1. Explicit `api_key` value (if not the placeholder `qwen-oauth`)
+2. `QWEN_OAUTH_TOKEN`
+3. `~/.qwen/oauth_creds.json` (reuses Qwen Code cached OAuth credentials)
+4. Optional refresh via `QWEN_OAUTH_REFRESH_TOKEN` (or cached refresh token)
+5. If no OAuth placeholder is used, `DASHSCOPE_API_KEY` can still be used as fallback
+
+Optional endpoint override:
+
+- `QWEN_OAUTH_RESOURCE_URL` (normalized to `https://.../v1` if needed)
+- If unset, `resource_url` from cached OAuth credentials is used when available
+
## Model Routing (`hint:`)
You can route model calls by hint using `[[model_routes]]`:
@@ -128,3 +199,56 @@ Then call with a hint model name (for example from tool or integration paths):
```text
hint:reasoning
```
+
+## Embedding Routing (`hint:`)
+
+You can route embedding calls with the same hint pattern using `[[embedding_routes]]`.
+Set `[memory].embedding_model` to a `hint:` value to activate routing.
+
+```toml
+[memory]
+embedding_model = "hint:semantic"
+
+[[embedding_routes]]
+hint = "semantic"
+provider = "openai"
+model = "text-embedding-3-small"
+dimensions = 1536
+
+[[embedding_routes]]
+hint = "archive"
+provider = "custom:https://embed.example.com/v1"
+model = "your-embedding-model-id"
+dimensions = 1024
+```
+
+Supported embedding providers:
+
+- `none`
+- `openai`
+- `custom:` (OpenAI-compatible embeddings endpoint)
+
+Optional per-route key override:
+
+```toml
+[[embedding_routes]]
+hint = "semantic"
+provider = "openai"
+model = "text-embedding-3-small"
+api_key = "sk-route-specific"
+```
+
+## Upgrading Models Safely
+
+Use stable hints and update only route targets when providers deprecate model IDs.
+
+Recommended workflow:
+
+1. Keep call sites stable (`hint:reasoning`, `hint:semantic`).
+2. Change only the target model under `[[model_routes]]` or `[[embedding_routes]]`.
+3. Run:
+ - `zeroclaw doctor`
+ - `zeroclaw status`
+4. Smoke test one representative flow (chat + memory retrieval) before rollout.
+
+This minimizes breakage because integrations and prompts do not need to change when model IDs are upgraded.
diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md
index e06e74a..7fd02aa 100644
--- a/docs/troubleshooting.md
+++ b/docs/troubleshooting.md
@@ -2,7 +2,7 @@
This guide focuses on common setup/runtime failures and fast resolution paths.
-Last verified: **February 18, 2026**.
+Last verified: **February 20, 2026**.
## Installation / Bootstrap
@@ -32,6 +32,93 @@ Fix:
./bootstrap.sh --install-system-deps
```
+### Build fails on low-RAM / low-disk hosts
+
+Symptoms:
+
+- `cargo build --release` is killed (`signal: 9`, OOM killer, or `cannot allocate memory`)
+- Build crashes after adding swap because disk space runs out
+
+Why this happens:
+
+- Runtime memory (<5MB for common operations) is not the same as compile-time memory.
+- Full source build can require **2 GB RAM + swap** and **6+ GB free disk**.
+- Enabling swap on a tiny disk can avoid RAM OOM but still fail due to disk exhaustion.
+
+Preferred path for constrained machines:
+
+```bash
+./bootstrap.sh --prefer-prebuilt
+```
+
+Binary-only mode (no source fallback):
+
+```bash
+./bootstrap.sh --prebuilt-only
+```
+
+If you must compile from source on constrained hosts:
+
+1. Add swap only if you also have enough free disk for both swap + build output.
+1. Limit cargo parallelism:
+
+```bash
+CARGO_BUILD_JOBS=1 cargo build --release --locked
+```
+
+1. Reduce heavy features when Matrix is not required:
+
+```bash
+cargo build --release --locked --no-default-features --features hardware
+```
+
+1. Cross-compile on a stronger machine and copy the binary to the target host.
+
+### Build is very slow or appears stuck
+
+Symptoms:
+
+- `cargo check` / `cargo build` appears stuck at `Checking zeroclaw` for a long time
+- repeated `Blocking waiting for file lock on package cache` or `build directory`
+
+Why this happens in ZeroClaw:
+
+- Matrix E2EE stack (`matrix-sdk`, `ruma`, `vodozemac`) is large and expensive to type-check.
+- TLS + crypto native build scripts (`aws-lc-sys`, `ring`) add noticeable compile time.
+- `rusqlite` with bundled SQLite compiles C code locally.
+- Running multiple cargo jobs/worktrees in parallel causes lock contention.
+
+Fast checks:
+
+```bash
+cargo check --timings
+cargo tree -d
+```
+
+The timing report is written to `target/cargo-timings/cargo-timing.html`.
+
+Faster local iteration (when Matrix channel is not needed):
+
+```bash
+cargo check --no-default-features --features hardware
+```
+
+This skips `channel-matrix` and can significantly reduce compile time.
+
+To build with Matrix support explicitly enabled:
+
+```bash
+cargo check --no-default-features --features hardware,channel-matrix
+```
+
+Lock-contention mitigation:
+
+```bash
+pgrep -af "cargo (check|build|test)|cargo check|cargo build|cargo test"
+```
+
+Stop unrelated cargo jobs before running your own build.
+
### `zeroclaw` command not found after install
Symptom:
diff --git a/flake.lock b/flake.lock
new file mode 100644
index 0000000..b591ed4
--- /dev/null
+++ b/flake.lock
@@ -0,0 +1,99 @@
+{
+ "nodes": {
+ "fenix": {
+ "inputs": {
+ "nixpkgs": [
+ "nixpkgs"
+ ],
+ "rust-analyzer-src": "rust-analyzer-src"
+ },
+ "locked": {
+ "lastModified": 1771398736,
+ "narHash": "sha256-pjV3C7VJHN0o2SvE3O6xiwraLt7bnlWIF3o7Q0BC1jk=",
+ "owner": "nix-community",
+ "repo": "fenix",
+ "rev": "0f608091816de13d92e1f4058b501028b782dddd",
+ "type": "github"
+ },
+ "original": {
+ "owner": "nix-community",
+ "repo": "fenix",
+ "type": "github"
+ }
+ },
+ "flake-utils": {
+ "inputs": {
+ "systems": "systems"
+ },
+ "locked": {
+ "lastModified": 1731533236,
+ "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
+ "owner": "numtide",
+ "repo": "flake-utils",
+ "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
+ "type": "github"
+ },
+ "original": {
+ "owner": "numtide",
+ "repo": "flake-utils",
+ "type": "github"
+ }
+ },
+ "nixpkgs": {
+ "locked": {
+ "lastModified": 1771369470,
+ "narHash": "sha256-0NBlEBKkN3lufyvFegY4TYv5mCNHbi5OmBDrzihbBMQ=",
+ "owner": "NixOS",
+ "repo": "nixpkgs",
+ "rev": "0182a361324364ae3f436a63005877674cf45efb",
+ "type": "github"
+ },
+ "original": {
+ "id": "nixpkgs",
+ "ref": "nixos-unstable",
+ "type": "indirect"
+ }
+ },
+ "root": {
+ "inputs": {
+ "fenix": "fenix",
+ "flake-utils": "flake-utils",
+ "nixpkgs": "nixpkgs"
+ }
+ },
+ "rust-analyzer-src": {
+ "flake": false,
+ "locked": {
+ "lastModified": 1771353660,
+ "narHash": "sha256-yp1y55kXgaa08g/gR3CNiUdkg1JRjPYfkKtEIRNE6S8=",
+ "owner": "rust-lang",
+ "repo": "rust-analyzer",
+ "rev": "09f2d468eda25a5f06ae70046357c70ae5cd77c7",
+ "type": "github"
+ },
+ "original": {
+ "owner": "rust-lang",
+ "ref": "nightly",
+ "repo": "rust-analyzer",
+ "type": "github"
+ }
+ },
+ "systems": {
+ "locked": {
+ "lastModified": 1681028828,
+ "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
+ "owner": "nix-systems",
+ "repo": "default",
+ "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
+ "type": "github"
+ },
+ "original": {
+ "owner": "nix-systems",
+ "repo": "default",
+ "type": "github"
+ }
+ }
+ },
+ "root": "root",
+ "version": 7
+}
diff --git a/flake.nix b/flake.nix
new file mode 100644
index 0000000..9bafa47
--- /dev/null
+++ b/flake.nix
@@ -0,0 +1,61 @@
+{
+ inputs = {
+ flake-utils.url = "github:numtide/flake-utils";
+ fenix = {
+ url = "github:nix-community/fenix";
+ inputs.nixpkgs.follows = "nixpkgs";
+ };
+ nixpkgs.url = "nixpkgs/nixos-unstable";
+ };
+
+ outputs = { flake-utils, fenix, nixpkgs, ... }:
+ let
+ nixosModule = { pkgs, ... }: {
+ nixpkgs.overlays = [ fenix.overlays.default ];
+ environment.systemPackages = [
+ (pkgs.fenix.stable.withComponents [
+ "cargo"
+ "clippy"
+ "rust-src"
+ "rustc"
+ "rustfmt"
+ ])
+ pkgs.rust-analyzer
+ ];
+ };
+ in
+ flake-utils.lib.eachDefaultSystem (system:
+ let
+ pkgs = import nixpkgs {
+ inherit system;
+ overlays = [ fenix.overlays.default ];
+ };
+ rustToolchain = pkgs.fenix.stable.withComponents [
+ "cargo"
+ "clippy"
+ "rust-src"
+ "rustc"
+ "rustfmt"
+ ];
+ in {
+ packages.default = fenix.packages.${system}.stable.toolchain;
+ devShells.default = pkgs.mkShell {
+ packages = [
+ rustToolchain
+ pkgs.rust-analyzer
+ ];
+ };
+ }) // {
+ nixosConfigurations = {
+ nixos = nixpkgs.lib.nixosSystem {
+ system = "x86_64-linux";
+ modules = [ nixosModule ];
+ };
+
+ nixos-aarch64 = nixpkgs.lib.nixosSystem {
+ system = "aarch64-linux";
+ modules = [ nixosModule ];
+ };
+ };
+ };
+}
diff --git a/fuzz/Cargo.toml b/fuzz/Cargo.toml
index b9d2bbe..e55d4da 100644
--- a/fuzz/Cargo.toml
+++ b/fuzz/Cargo.toml
@@ -24,3 +24,21 @@ name = "fuzz_tool_params"
path = "fuzz_targets/fuzz_tool_params.rs"
test = false
doc = false
+
+[[bin]]
+name = "fuzz_webhook_payload"
+path = "fuzz_targets/fuzz_webhook_payload.rs"
+test = false
+doc = false
+
+[[bin]]
+name = "fuzz_provider_response"
+path = "fuzz_targets/fuzz_provider_response.rs"
+test = false
+doc = false
+
+[[bin]]
+name = "fuzz_command_validation"
+path = "fuzz_targets/fuzz_command_validation.rs"
+test = false
+doc = false
diff --git a/fuzz/fuzz_targets/fuzz_command_validation.rs b/fuzz/fuzz_targets/fuzz_command_validation.rs
new file mode 100644
index 0000000..13cce01
--- /dev/null
+++ b/fuzz/fuzz_targets/fuzz_command_validation.rs
@@ -0,0 +1,10 @@
+#![no_main]
+use libfuzzer_sys::fuzz_target;
+use zeroclaw::security::SecurityPolicy;
+
+fuzz_target!(|data: &[u8]| {
+ if let Ok(s) = std::str::from_utf8(data) {
+ let policy = SecurityPolicy::default();
+ let _ = policy.validate_command_execution(s, false);
+ }
+});
diff --git a/fuzz/fuzz_targets/fuzz_provider_response.rs b/fuzz/fuzz_targets/fuzz_provider_response.rs
new file mode 100644
index 0000000..73f895d
--- /dev/null
+++ b/fuzz/fuzz_targets/fuzz_provider_response.rs
@@ -0,0 +1,9 @@
+#![no_main]
+use libfuzzer_sys::fuzz_target;
+
+fuzz_target!(|data: &[u8]| {
+ if let Ok(s) = std::str::from_utf8(data) {
+ // Fuzz provider API response deserialization
+ let _ = serde_json::from_str::(s);
+ }
+});
diff --git a/fuzz/fuzz_targets/fuzz_webhook_payload.rs b/fuzz/fuzz_targets/fuzz_webhook_payload.rs
new file mode 100644
index 0000000..1f5b813
--- /dev/null
+++ b/fuzz/fuzz_targets/fuzz_webhook_payload.rs
@@ -0,0 +1,9 @@
+#![no_main]
+use libfuzzer_sys::fuzz_target;
+
+fuzz_target!(|data: &[u8]| {
+ if let Ok(s) = std::str::from_utf8(data) {
+ // Fuzz webhook body deserialization
+ let _ = serde_json::from_str::(s);
+ }
+});
diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh
index b734124..b6732a7 100755
--- a/scripts/bootstrap.sh
+++ b/scripts/bootstrap.sh
@@ -15,38 +15,61 @@ error() {
usage() {
cat <<'USAGE'
-ZeroClaw one-click bootstrap
+ZeroClaw installer bootstrap engine
Usage:
- ./bootstrap.sh [options]
+ ./zeroclaw_install.sh [options]
+ ./bootstrap.sh [options] # compatibility entrypoint
Modes:
Default mode installs/builds ZeroClaw only (requires existing Rust toolchain).
+ Guided mode asks setup questions and configures options interactively.
Optional bootstrap mode can also install system dependencies and Rust.
Options:
+ --guided Run interactive guided installer
+ --no-guided Disable guided installer
+ --docker Run bootstrap in Docker and launch onboarding inside the container
--install-system-deps Install build dependencies (Linux/macOS)
--install-rust Install Rust via rustup if missing
+ --prefer-prebuilt Try latest release binary first; fallback to source build on miss
+ --prebuilt-only Install only from latest release binary (no source build fallback)
+ --force-source-build Disable prebuilt flow and always build from source
--onboard Run onboarding after install
--interactive-onboard Run interactive onboarding (implies --onboard)
--api-key API key for non-interactive onboarding
--provider Provider for non-interactive onboarding (default: openrouter)
+ --model Model for non-interactive onboarding (optional)
+ --build-first Alias for explicitly enabling separate `cargo build --release --locked`
--skip-build Skip `cargo build --release --locked`
--skip-install Skip `cargo install --path . --force --locked`
-h, --help Show help
Examples:
- ./bootstrap.sh
- ./bootstrap.sh --install-system-deps --install-rust
- ./bootstrap.sh --onboard --api-key "sk-..." --provider openrouter
- ./bootstrap.sh --interactive-onboard
+ ./zeroclaw_install.sh
+ ./zeroclaw_install.sh --guided
+ ./zeroclaw_install.sh --install-system-deps --install-rust
+ ./zeroclaw_install.sh --prefer-prebuilt
+ ./zeroclaw_install.sh --prebuilt-only
+ ./zeroclaw_install.sh --onboard --api-key "sk-..." --provider openrouter [--model "openrouter/auto"]
+ ./zeroclaw_install.sh --interactive-onboard
+
+ # Compatibility entrypoint:
+ ./bootstrap.sh --docker
# Remote one-liner
curl -fsSL https://raw.githubusercontent.com/zeroclaw-labs/zeroclaw/main/scripts/bootstrap.sh | bash
Environment:
+ ZEROCLAW_DOCKER_DATA_DIR Host path for Docker config/workspace persistence
+ ZEROCLAW_DOCKER_IMAGE Docker image tag to build/run (default: zeroclaw-bootstrap:local)
ZEROCLAW_API_KEY Used when --api-key is not provided
ZEROCLAW_PROVIDER Used when --provider is not provided (default: openrouter)
+ ZEROCLAW_MODEL Used when --model is not provided
+ ZEROCLAW_BOOTSTRAP_MIN_RAM_MB Minimum RAM threshold for source build preflight (default: 2048)
+ ZEROCLAW_BOOTSTRAP_MIN_DISK_MB Minimum free disk threshold for source build preflight (default: 6144)
+ ZEROCLAW_DISABLE_ALPINE_AUTO_DEPS
+ Set to 1 to disable Alpine auto-install of missing prerequisites
USAGE
}
@@ -54,6 +77,155 @@ have_cmd() {
command -v "$1" >/dev/null 2>&1
}
+get_total_memory_mb() {
+ case "$(uname -s)" in
+ Linux)
+ if [[ -r /proc/meminfo ]]; then
+ awk '/MemTotal:/ {printf "%d\n", $2 / 1024}' /proc/meminfo
+ fi
+ ;;
+ Darwin)
+ if have_cmd sysctl; then
+ local bytes
+ bytes="$(sysctl -n hw.memsize 2>/dev/null || true)"
+ if [[ "$bytes" =~ ^[0-9]+$ ]]; then
+ echo $((bytes / 1024 / 1024))
+ fi
+ fi
+ ;;
+ esac
+}
+
+get_available_disk_mb() {
+ local path="${1:-.}"
+ local free_kb
+ free_kb="$(df -Pk "$path" 2>/dev/null | awk 'NR==2 {print $4}')"
+ if [[ "$free_kb" =~ ^[0-9]+$ ]]; then
+ echo $((free_kb / 1024))
+ fi
+}
+
+detect_release_target() {
+ local os arch
+ os="$(uname -s)"
+ arch="$(uname -m)"
+
+ case "$os:$arch" in
+ Linux:x86_64)
+ echo "x86_64-unknown-linux-gnu"
+ ;;
+ Linux:aarch64|Linux:arm64)
+ echo "aarch64-unknown-linux-gnu"
+ ;;
+ Linux:armv7l|Linux:armv6l)
+ echo "armv7-unknown-linux-gnueabihf"
+ ;;
+ Darwin:x86_64)
+ echo "x86_64-apple-darwin"
+ ;;
+ Darwin:arm64|Darwin:aarch64)
+ echo "aarch64-apple-darwin"
+ ;;
+ *)
+ return 1
+ ;;
+ esac
+}
+
+should_attempt_prebuilt_for_resources() {
+ local workspace="${1:-.}"
+ local min_ram_mb min_disk_mb total_ram_mb free_disk_mb low_resource
+
+ min_ram_mb="${ZEROCLAW_BOOTSTRAP_MIN_RAM_MB:-2048}"
+ min_disk_mb="${ZEROCLAW_BOOTSTRAP_MIN_DISK_MB:-6144}"
+ total_ram_mb="$(get_total_memory_mb || true)"
+ free_disk_mb="$(get_available_disk_mb "$workspace" || true)"
+ low_resource=false
+
+ if [[ "$total_ram_mb" =~ ^[0-9]+$ && "$total_ram_mb" -lt "$min_ram_mb" ]]; then
+ low_resource=true
+ fi
+ if [[ "$free_disk_mb" =~ ^[0-9]+$ && "$free_disk_mb" -lt "$min_disk_mb" ]]; then
+ low_resource=true
+ fi
+
+ if [[ "$low_resource" == true ]]; then
+ warn "Source build preflight indicates constrained resources."
+ if [[ "$total_ram_mb" =~ ^[0-9]+$ ]]; then
+ warn "Detected RAM: ${total_ram_mb}MB (recommended >= ${min_ram_mb}MB for local source builds)."
+ else
+ warn "Unable to detect total RAM automatically."
+ fi
+ if [[ "$free_disk_mb" =~ ^[0-9]+$ ]]; then
+ warn "Detected free disk: ${free_disk_mb}MB (recommended >= ${min_disk_mb}MB)."
+ else
+ warn "Unable to detect free disk space automatically."
+ fi
+ return 0
+ fi
+
+ return 1
+}
+
+install_prebuilt_binary() {
+ local target archive_url temp_dir archive_path extracted_bin install_dir
+
+ if ! have_cmd curl; then
+ warn "curl is required for pre-built binary installation."
+ return 1
+ fi
+ if ! have_cmd tar; then
+ warn "tar is required for pre-built binary installation."
+ return 1
+ fi
+
+ target="$(detect_release_target || true)"
+ if [[ -z "$target" ]]; then
+ warn "No pre-built binary target mapping for $(uname -s)/$(uname -m)."
+ return 1
+ fi
+
+ archive_url="https://github.com/zeroclaw-labs/zeroclaw/releases/latest/download/zeroclaw-${target}.tar.gz"
+ temp_dir="$(mktemp -d -t zeroclaw-prebuilt-XXXXXX)"
+ archive_path="$temp_dir/zeroclaw-${target}.tar.gz"
+
+ info "Attempting pre-built binary install for target: $target"
+ if ! curl -fsSL "$archive_url" -o "$archive_path"; then
+ warn "Could not download release asset: $archive_url"
+ rm -rf "$temp_dir"
+ return 1
+ fi
+
+ if ! tar -xzf "$archive_path" -C "$temp_dir"; then
+ warn "Failed to extract pre-built archive."
+ rm -rf "$temp_dir"
+ return 1
+ fi
+
+ extracted_bin="$temp_dir/zeroclaw"
+ if [[ ! -x "$extracted_bin" ]]; then
+ extracted_bin="$(find "$temp_dir" -maxdepth 2 -type f -name zeroclaw -perm -u+x | head -n 1 || true)"
+ fi
+ if [[ -z "$extracted_bin" || ! -x "$extracted_bin" ]]; then
+ warn "Archive did not contain an executable zeroclaw binary."
+ rm -rf "$temp_dir"
+ return 1
+ fi
+
+ install_dir="$HOME/.cargo/bin"
+ mkdir -p "$install_dir"
+ install -m 0755 "$extracted_bin" "$install_dir/zeroclaw"
+ rm -rf "$temp_dir"
+
+ info "Installed pre-built binary to $install_dir/zeroclaw"
+ if [[ ":$PATH:" != *":$install_dir:"* ]]; then
+ warn "$install_dir is not in PATH for this shell."
+ warn "Run: export PATH=\"$install_dir:\$PATH\""
+ fi
+
+ return 0
+}
+
run_privileged() {
if [[ "$(id -u)" -eq 0 ]]; then
"$@"
@@ -65,19 +237,152 @@ run_privileged() {
fi
}
+is_container_runtime() {
+ if [[ -f /.dockerenv || -f /run/.containerenv ]]; then
+ return 0
+ fi
+
+ if [[ -r /proc/1/cgroup ]] && grep -Eq '(docker|containerd|kubepods|podman|lxc)' /proc/1/cgroup; then
+ return 0
+ fi
+
+ return 1
+}
+
+run_pacman() {
+ if ! have_cmd pacman; then
+ error "pacman is not available."
+ return 1
+ fi
+
+ if ! is_container_runtime; then
+ run_privileged pacman "$@"
+ return $?
+ fi
+
+ local pacman_cfg_tmp=""
+ local pacman_rc=0
+ pacman_cfg_tmp="$(mktemp /tmp/zeroclaw-pacman.XXXXXX.conf)"
+ cp /etc/pacman.conf "$pacman_cfg_tmp"
+ if ! grep -Eq '^[[:space:]]*DisableSandboxSyscalls([[:space:]]|$)' "$pacman_cfg_tmp"; then
+ printf '\nDisableSandboxSyscalls\n' >> "$pacman_cfg_tmp"
+ fi
+
+ if run_privileged pacman --config "$pacman_cfg_tmp" "$@"; then
+ pacman_rc=0
+ else
+ pacman_rc=$?
+ fi
+
+ rm -f "$pacman_cfg_tmp"
+ return "$pacman_rc"
+}
+
+ALPINE_PREREQ_PACKAGES=(
+ bash
+ build-base
+ pkgconf
+ git
+ curl
+ openssl-dev
+ perl
+ ca-certificates
+)
+ALPINE_MISSING_PKGS=()
+
+find_missing_alpine_prereqs() {
+ ALPINE_MISSING_PKGS=()
+ if ! have_cmd apk; then
+ return 0
+ fi
+
+ local pkg=""
+ for pkg in "${ALPINE_PREREQ_PACKAGES[@]}"; do
+ if ! apk info -e "$pkg" >/dev/null 2>&1; then
+ ALPINE_MISSING_PKGS+=("$pkg")
+ fi
+ done
+}
+
+bool_to_word() {
+ if [[ "$1" == true ]]; then
+ echo "yes"
+ else
+ echo "no"
+ fi
+}
+
+prompt_yes_no() {
+ local question="$1"
+ local default_answer="$2"
+ local prompt=""
+ local answer=""
+
+ if [[ "$default_answer" == "yes" ]]; then
+ prompt="[Y/n]"
+ else
+ prompt="[y/N]"
+ fi
+
+ while true; do
+ if ! read -r -p "$question $prompt " answer; then
+ error "guided installer input was interrupted."
+ exit 1
+ fi
+ answer="${answer:-$default_answer}"
+ case "$(printf '%s' "$answer" | tr '[:upper:]' '[:lower:]')" in
+ y|yes)
+ return 0
+ ;;
+ n|no)
+ return 1
+ ;;
+ *)
+ echo "Please answer yes or no."
+ ;;
+ esac
+ done
+}
+
install_system_deps() {
info "Installing system dependencies"
case "$(uname -s)" in
Linux)
- if have_cmd apt-get; then
+ if have_cmd apk; then
+ find_missing_alpine_prereqs
+ if [[ ${#ALPINE_MISSING_PKGS[@]} -eq 0 ]]; then
+ info "Alpine prerequisites already installed"
+ else
+ info "Installing Alpine prerequisites: ${ALPINE_MISSING_PKGS[*]}"
+ run_privileged apk add --no-cache "${ALPINE_MISSING_PKGS[@]}"
+ fi
+ elif have_cmd apt-get; then
run_privileged apt-get update -qq
run_privileged apt-get install -y build-essential pkg-config git curl
elif have_cmd dnf; then
- run_privileged dnf group install -y development-tools
- run_privileged dnf install -y pkg-config git curl
+ run_privileged dnf install -y \
+ gcc \
+ gcc-c++ \
+ make \
+ pkgconf-pkg-config \
+ git \
+ curl \
+ openssl-devel \
+ perl
+ elif have_cmd pacman; then
+ run_pacman -Sy --noconfirm
+ run_pacman -S --noconfirm --needed \
+ gcc \
+ make \
+ pkgconf \
+ git \
+ curl \
+ openssl \
+ perl \
+ ca-certificates
else
- warn "Unsupported Linux distribution. Install compiler toolchain + pkg-config + git + curl manually."
+ warn "Unsupported Linux distribution. Install compiler toolchain + pkg-config + git + curl + OpenSSL headers + perl manually."
fi
;;
Darwin)
@@ -126,22 +431,236 @@ install_rust_toolchain() {
fi
}
+run_guided_installer() {
+ local os_name="$1"
+ local provider_input=""
+ local model_input=""
+ local api_key_input=""
+
+ echo
+ echo "ZeroClaw guided installer"
+ echo "Answer a few questions, then the installer will run automatically."
+ echo
+
+ if [[ "$os_name" == "Linux" ]]; then
+ if prompt_yes_no "Install Linux build dependencies (toolchain/pkg-config/git/curl)?" "yes"; then
+ INSTALL_SYSTEM_DEPS=true
+ fi
+ else
+ if prompt_yes_no "Install system dependencies for $os_name?" "no"; then
+ INSTALL_SYSTEM_DEPS=true
+ fi
+ fi
+
+ if have_cmd cargo && have_cmd rustc; then
+ info "Detected Rust toolchain: $(rustc --version)"
+ else
+ if prompt_yes_no "Rust toolchain not found. Install Rust via rustup now?" "yes"; then
+ INSTALL_RUST=true
+ fi
+ fi
+
+ if prompt_yes_no "Run a separate prebuild before install?" "yes"; then
+ SKIP_BUILD=false
+ else
+ SKIP_BUILD=true
+ fi
+
+ if prompt_yes_no "Install zeroclaw into cargo bin now?" "yes"; then
+ SKIP_INSTALL=false
+ else
+ SKIP_INSTALL=true
+ fi
+
+ if prompt_yes_no "Run onboarding after install?" "no"; then
+ RUN_ONBOARD=true
+ if prompt_yes_no "Use interactive onboarding?" "yes"; then
+ INTERACTIVE_ONBOARD=true
+ else
+ INTERACTIVE_ONBOARD=false
+ if ! read -r -p "Provider [$PROVIDER]: " provider_input; then
+ error "guided installer input was interrupted."
+ exit 1
+ fi
+ if [[ -n "$provider_input" ]]; then
+ PROVIDER="$provider_input"
+ fi
+
+ if ! read -r -p "Model [${MODEL:-leave empty}]: " model_input; then
+ error "guided installer input was interrupted."
+ exit 1
+ fi
+ if [[ -n "$model_input" ]]; then
+ MODEL="$model_input"
+ fi
+
+ if [[ -z "$API_KEY" ]]; then
+ if ! read -r -s -p "API key (hidden, leave empty to switch to interactive onboarding): " api_key_input; then
+ echo
+ error "guided installer input was interrupted."
+ exit 1
+ fi
+ echo
+ if [[ -n "$api_key_input" ]]; then
+ API_KEY="$api_key_input"
+ else
+ warn "No API key entered. Using interactive onboarding instead."
+ INTERACTIVE_ONBOARD=true
+ fi
+ fi
+ fi
+ fi
+
+ echo
+ info "Installer plan"
+ local install_binary=true
+ local build_first=false
+ if [[ "$SKIP_INSTALL" == true ]]; then
+ install_binary=false
+ fi
+ if [[ "$SKIP_BUILD" == false ]]; then
+ build_first=true
+ fi
+ echo " docker-mode: $(bool_to_word "$DOCKER_MODE")"
+ echo " install-system-deps: $(bool_to_word "$INSTALL_SYSTEM_DEPS")"
+ echo " install-rust: $(bool_to_word "$INSTALL_RUST")"
+ echo " build-first: $(bool_to_word "$build_first")"
+ echo " install-binary: $(bool_to_word "$install_binary")"
+ echo " onboard: $(bool_to_word "$RUN_ONBOARD")"
+ if [[ "$RUN_ONBOARD" == true ]]; then
+ echo " interactive-onboard: $(bool_to_word "$INTERACTIVE_ONBOARD")"
+ if [[ "$INTERACTIVE_ONBOARD" == false ]]; then
+ echo " provider: $PROVIDER"
+ if [[ -n "$MODEL" ]]; then
+ echo " model: $MODEL"
+ fi
+ fi
+ fi
+
+ echo
+ if ! prompt_yes_no "Proceed with this install plan?" "yes"; then
+ info "Installation canceled by user."
+ exit 0
+ fi
+}
+
+ensure_docker_ready() {
+ if ! have_cmd docker; then
+ error "docker is not installed."
+ cat <<'MSG' >&2
+Install Docker first, then re-run with:
+ ./zeroclaw_install.sh --docker
+MSG
+ exit 1
+ fi
+
+ if ! docker info >/dev/null 2>&1; then
+ error "Docker daemon is not reachable."
+ error "Start Docker and re-run bootstrap."
+ exit 1
+ fi
+}
+
+run_docker_bootstrap() {
+ local docker_image docker_data_dir default_data_dir
+ docker_image="${ZEROCLAW_DOCKER_IMAGE:-zeroclaw-bootstrap:local}"
+ if [[ "$TEMP_CLONE" == true ]]; then
+ default_data_dir="$HOME/.zeroclaw-docker"
+ else
+ default_data_dir="$WORK_DIR/.zeroclaw-docker"
+ fi
+ docker_data_dir="${ZEROCLAW_DOCKER_DATA_DIR:-$default_data_dir}"
+ DOCKER_DATA_DIR="$docker_data_dir"
+
+ mkdir -p "$docker_data_dir/.zeroclaw" "$docker_data_dir/workspace"
+
+ if [[ "$SKIP_INSTALL" == true ]]; then
+ warn "--skip-install has no effect with --docker."
+ fi
+
+ if [[ "$SKIP_BUILD" == false ]]; then
+ info "Building Docker image ($docker_image)"
+ docker build --target release -t "$docker_image" "$WORK_DIR"
+ else
+ info "Skipping Docker image build"
+ fi
+
+ info "Docker data directory: $docker_data_dir"
+
+ local onboard_cmd=()
+ if [[ "$INTERACTIVE_ONBOARD" == true ]]; then
+ info "Launching interactive onboarding in container"
+ onboard_cmd=(onboard --interactive)
+ else
+ if [[ -z "$API_KEY" ]]; then
+ cat <<'MSG'
+==> Onboarding requested, but API key not provided.
+Use either:
+ --api-key "sk-..."
+or:
+ ZEROCLAW_API_KEY="sk-..." ./zeroclaw_install.sh --docker
+or run interactive:
+ ./zeroclaw_install.sh --docker --interactive-onboard
+MSG
+ exit 1
+ fi
+ if [[ -n "$MODEL" ]]; then
+ info "Launching quick onboarding in container (provider: $PROVIDER, model: $MODEL)"
+ else
+ info "Launching quick onboarding in container (provider: $PROVIDER)"
+ fi
+ onboard_cmd=(onboard --api-key "$API_KEY" --provider "$PROVIDER")
+ if [[ -n "$MODEL" ]]; then
+ onboard_cmd+=(--model "$MODEL")
+ fi
+ fi
+
+ docker run --rm -it \
+ --user "$(id -u):$(id -g)" \
+ -e HOME=/zeroclaw-data \
+ -e ZEROCLAW_WORKSPACE=/zeroclaw-data/workspace \
+ -v "$docker_data_dir/.zeroclaw:/zeroclaw-data/.zeroclaw" \
+ -v "$docker_data_dir/workspace:/zeroclaw-data/workspace" \
+ "$docker_image" \
+ "${onboard_cmd[@]}"
+}
+
SCRIPT_PATH="${BASH_SOURCE[0]:-$0}"
SCRIPT_DIR="$(cd "$(dirname "$SCRIPT_PATH")" >/dev/null 2>&1 && pwd || pwd)"
ROOT_DIR="$(cd "$SCRIPT_DIR/.." >/dev/null 2>&1 && pwd || pwd)"
REPO_URL="https://github.com/zeroclaw-labs/zeroclaw.git"
+ORIGINAL_ARG_COUNT=$#
+GUIDED_MODE="auto"
+DOCKER_MODE=false
INSTALL_SYSTEM_DEPS=false
INSTALL_RUST=false
+PREFER_PREBUILT=false
+PREBUILT_ONLY=false
+FORCE_SOURCE_BUILD=false
RUN_ONBOARD=false
INTERACTIVE_ONBOARD=false
SKIP_BUILD=false
SKIP_INSTALL=false
+PREBUILT_INSTALLED=false
API_KEY="${ZEROCLAW_API_KEY:-}"
PROVIDER="${ZEROCLAW_PROVIDER:-openrouter}"
+MODEL="${ZEROCLAW_MODEL:-}"
while [[ $# -gt 0 ]]; do
case "$1" in
+ --guided)
+ GUIDED_MODE="on"
+ shift
+ ;;
+ --no-guided)
+ GUIDED_MODE="off"
+ shift
+ ;;
+ --docker)
+ DOCKER_MODE=true
+ shift
+ ;;
--install-system-deps)
INSTALL_SYSTEM_DEPS=true
shift
@@ -150,6 +669,18 @@ while [[ $# -gt 0 ]]; do
INSTALL_RUST=true
shift
;;
+ --prefer-prebuilt)
+ PREFER_PREBUILT=true
+ shift
+ ;;
+ --prebuilt-only)
+ PREBUILT_ONLY=true
+ shift
+ ;;
+ --force-source-build)
+ FORCE_SOURCE_BUILD=true
+ shift
+ ;;
--onboard)
RUN_ONBOARD=true
shift
@@ -175,6 +706,18 @@ while [[ $# -gt 0 ]]; do
}
shift 2
;;
+ --model)
+ MODEL="${2:-}"
+ [[ -n "$MODEL" ]] || {
+ error "--model requires a value"
+ exit 1
+ }
+ shift 2
+ ;;
+ --build-first)
+ SKIP_BUILD=false
+ shift
+ ;;
--skip-build)
SKIP_BUILD=true
shift
@@ -196,22 +739,48 @@ while [[ $# -gt 0 ]]; do
esac
done
-if [[ "$INSTALL_SYSTEM_DEPS" == true ]]; then
- install_system_deps
+OS_NAME="$(uname -s)"
+if [[ "$GUIDED_MODE" == "auto" ]]; then
+ if [[ "$OS_NAME" == "Linux" && "$ORIGINAL_ARG_COUNT" -eq 0 && -t 0 && -t 1 ]]; then
+ GUIDED_MODE="on"
+ else
+ GUIDED_MODE="off"
+ fi
fi
-if [[ "$INSTALL_RUST" == true ]]; then
- install_rust_toolchain
+if [[ "$DOCKER_MODE" == true && "$GUIDED_MODE" == "on" ]]; then
+ warn "--guided is ignored with --docker."
+ GUIDED_MODE="off"
fi
-if ! have_cmd cargo; then
- error "cargo is not installed."
- cat <<'MSG' >&2
-Install Rust first: https://rustup.rs/
-or re-run with:
- ./bootstrap.sh --install-rust
-MSG
- exit 1
+if [[ "$GUIDED_MODE" == "on" ]]; then
+ run_guided_installer "$OS_NAME"
+fi
+
+if [[ "$DOCKER_MODE" == true ]]; then
+ if [[ "$INSTALL_SYSTEM_DEPS" == true ]]; then
+ warn "--install-system-deps is ignored with --docker."
+ fi
+ if [[ "$INSTALL_RUST" == true ]]; then
+ warn "--install-rust is ignored with --docker."
+ fi
+else
+ if [[ "$OS_NAME" == "Linux" && -z "${ZEROCLAW_DISABLE_ALPINE_AUTO_DEPS:-}" ]] && have_cmd apk; then
+ find_missing_alpine_prereqs
+ if [[ ${#ALPINE_MISSING_PKGS[@]} -gt 0 && "$INSTALL_SYSTEM_DEPS" == false ]]; then
+ info "Detected Alpine with missing prerequisites: ${ALPINE_MISSING_PKGS[*]}"
+ info "Auto-enabling system dependency installation (set ZEROCLAW_DISABLE_ALPINE_AUTO_DEPS=1 to disable)."
+ INSTALL_SYSTEM_DEPS=true
+ fi
+ fi
+
+ if [[ "$INSTALL_SYSTEM_DEPS" == true ]]; then
+ install_system_deps
+ fi
+
+ if [[ "$INSTALL_RUST" == true ]]; then
+ install_rust_toolchain
+ fi
fi
WORK_DIR="$ROOT_DIR"
@@ -254,6 +823,73 @@ echo " workspace: $WORK_DIR"
cd "$WORK_DIR"
+if [[ "$FORCE_SOURCE_BUILD" == true ]]; then
+ PREFER_PREBUILT=false
+ PREBUILT_ONLY=false
+fi
+
+if [[ "$PREBUILT_ONLY" == true ]]; then
+ PREFER_PREBUILT=true
+fi
+
+if [[ "$DOCKER_MODE" == true ]]; then
+ ensure_docker_ready
+ if [[ "$RUN_ONBOARD" == false ]]; then
+ RUN_ONBOARD=true
+ if [[ -z "$API_KEY" ]]; then
+ INTERACTIVE_ONBOARD=true
+ fi
+ fi
+ run_docker_bootstrap
+ cat <<'DONE'
+
+✅ Docker bootstrap complete.
+
+Your containerized ZeroClaw data is persisted under:
+DONE
+ echo " $DOCKER_DATA_DIR"
+ cat <<'DONE'
+
+Next steps:
+ ./zeroclaw_install.sh --docker --interactive-onboard
+ ./zeroclaw_install.sh --docker --api-key "sk-..." --provider openrouter
+DONE
+ exit 0
+fi
+
+if [[ "$FORCE_SOURCE_BUILD" == false ]]; then
+ if [[ "$PREFER_PREBUILT" == false && "$PREBUILT_ONLY" == false ]]; then
+ if should_attempt_prebuilt_for_resources "$WORK_DIR"; then
+ info "Attempting pre-built binary first due to resource preflight."
+ PREFER_PREBUILT=true
+ fi
+ fi
+
+ if [[ "$PREFER_PREBUILT" == true ]]; then
+ if install_prebuilt_binary; then
+ PREBUILT_INSTALLED=true
+ SKIP_BUILD=true
+ SKIP_INSTALL=true
+ elif [[ "$PREBUILT_ONLY" == true ]]; then
+ error "Pre-built-only mode requested, but no compatible release asset is available."
+ error "Try again later, or run with --force-source-build on a machine with enough RAM/disk."
+ exit 1
+ else
+ warn "Pre-built install unavailable; falling back to source build."
+ fi
+ fi
+fi
+
+if [[ "$PREBUILT_INSTALLED" == false && ( "$SKIP_BUILD" == false || "$SKIP_INSTALL" == false ) ]] && ! have_cmd cargo; then
+ error "cargo is not installed."
+ cat <<'MSG' >&2
+Install Rust first: https://rustup.rs/
+or re-run with:
+ ./zeroclaw_install.sh --install-rust
+MSG
+ exit 1
+fi
+
if [[ "$SKIP_BUILD" == false ]]; then
info "Building release binary"
cargo build --release --locked
@@ -271,6 +907,8 @@ fi
ZEROCLAW_BIN=""
if have_cmd zeroclaw; then
ZEROCLAW_BIN="zeroclaw"
+elif [[ -x "$HOME/.cargo/bin/zeroclaw" ]]; then
+ ZEROCLAW_BIN="$HOME/.cargo/bin/zeroclaw"
elif [[ -x "$WORK_DIR/target/release/zeroclaw" ]]; then
ZEROCLAW_BIN="$WORK_DIR/target/release/zeroclaw"
fi
@@ -292,14 +930,22 @@ if [[ "$RUN_ONBOARD" == true ]]; then
Use either:
--api-key "sk-..."
or:
- ZEROCLAW_API_KEY="sk-..." ./bootstrap.sh --onboard
+ ZEROCLAW_API_KEY="sk-..." ./zeroclaw_install.sh --onboard
or run interactive:
- ./bootstrap.sh --interactive-onboard
+ ./zeroclaw_install.sh --interactive-onboard
MSG
exit 1
fi
- info "Running quick onboarding (provider: $PROVIDER)"
- "$ZEROCLAW_BIN" onboard --api-key "$API_KEY" --provider "$PROVIDER"
+ if [[ -n "$MODEL" ]]; then
+ info "Running quick onboarding (provider: $PROVIDER, model: $MODEL)"
+ else
+ info "Running quick onboarding (provider: $PROVIDER)"
+ fi
+ ONBOARD_CMD=("$ZEROCLAW_BIN" onboard --api-key "$API_KEY" --provider "$PROVIDER")
+ if [[ -n "$MODEL" ]]; then
+ ONBOARD_CMD+=(--model "$MODEL")
+ fi
+ "${ONBOARD_CMD[@]}"
fi
fi
diff --git a/scripts/ci/fetch_actions_data.py b/scripts/ci/fetch_actions_data.py
new file mode 100644
index 0000000..32ebb5b
--- /dev/null
+++ b/scripts/ci/fetch_actions_data.py
@@ -0,0 +1,209 @@
+#!/usr/bin/env python3
+"""Fetch GitHub Actions workflow runs for a given date and summarize costs.
+
+Usage:
+ python fetch_actions_data.py [OPTIONS]
+
+Options:
+ --date YYYY-MM-DD Date to query (default: yesterday)
+ --mode brief|full Output mode (default: full)
+ brief: billable minutes/hours table only
+ full: detailed breakdown with per-run list
+ --repo OWNER/NAME Repository (default: zeroclaw-labs/zeroclaw)
+ -h, --help Show this help message
+"""
+
+import argparse
+import json
+import subprocess
+from datetime import datetime, timedelta, timezone
+
+
+def parse_args():
+ """Parse command-line arguments."""
+ parser = argparse.ArgumentParser(
+ description="Fetch GitHub Actions workflow runs and summarize costs.",
+ )
+ yesterday = (datetime.now(timezone.utc) - timedelta(days=1)).strftime("%Y-%m-%d")
+ parser.add_argument(
+ "--date",
+ default=yesterday,
+ help="Date to query in YYYY-MM-DD format (default: yesterday)",
+ )
+ parser.add_argument(
+ "--mode",
+ choices=["brief", "full"],
+ default="full",
+ help="Output mode: 'brief' for billable hours only, 'full' for detailed breakdown (default: full)",
+ )
+ parser.add_argument(
+ "--repo",
+ default="zeroclaw-labs/zeroclaw",
+ help="Repository in OWNER/NAME format (default: zeroclaw-labs/zeroclaw)",
+ )
+ return parser.parse_args()
+
+
+def fetch_runs(repo, date_str, page=1, per_page=100):
+ """Fetch completed workflow runs for a given date."""
+ url = (
+ f"https://api.github.com/repos/{repo}/actions/runs"
+ f"?created={date_str}&per_page={per_page}&page={page}"
+ )
+ result = subprocess.run(
+ ["curl", "-sS", "-H", "Accept: application/vnd.github+json", url],
+ capture_output=True, text=True
+ )
+ return json.loads(result.stdout)
+
+
+def fetch_jobs(repo, run_id):
+ """Fetch jobs for a specific run."""
+ url = f"https://api.github.com/repos/{repo}/actions/runs/{run_id}/jobs?per_page=100"
+ result = subprocess.run(
+ ["curl", "-sS", "-H", "Accept: application/vnd.github+json", url],
+ capture_output=True, text=True
+ )
+ return json.loads(result.stdout)
+
+
+def parse_duration(started, completed):
+ """Return duration in seconds between two ISO timestamps."""
+ if not started or not completed:
+ return 0
+ try:
+ s = datetime.fromisoformat(started.replace("Z", "+00:00"))
+ c = datetime.fromisoformat(completed.replace("Z", "+00:00"))
+ return max(0, (c - s).total_seconds())
+ except Exception:
+ return 0
+
+
+def main():
+ args = parse_args()
+ repo = args.repo
+ date_str = args.date
+ brief = args.mode == "brief"
+
+ print(f"Fetching workflow runs for {repo} on {date_str}...")
+ print("=" * 100)
+
+ all_runs = []
+ for page in range(1, 5): # up to 400 runs
+ data = fetch_runs(repo, date_str, page=page)
+ runs = data.get("workflow_runs", [])
+ if not runs:
+ break
+ all_runs.extend(runs)
+ if len(runs) < 100:
+ break
+
+ print(f"Total workflow runs found: {len(all_runs)}")
+ print()
+
+ # Group by workflow name
+ workflow_stats = {}
+ for run in all_runs:
+ name = run.get("name", "Unknown")
+ event = run.get("event", "unknown")
+ conclusion = run.get("conclusion", "unknown")
+ run_id = run.get("id")
+
+ if name not in workflow_stats:
+ workflow_stats[name] = {
+ "count": 0,
+ "events": {},
+ "conclusions": {},
+ "total_job_seconds": 0,
+ "total_jobs": 0,
+ "run_ids": [],
+ }
+
+ workflow_stats[name]["count"] += 1
+ workflow_stats[name]["events"][event] = workflow_stats[name]["events"].get(event, 0) + 1
+ workflow_stats[name]["conclusions"][conclusion] = workflow_stats[name]["conclusions"].get(conclusion, 0) + 1
+ workflow_stats[name]["run_ids"].append(run_id)
+
+ # For each workflow, sample up to 3 runs to get job-level timing
+ print("Sampling job-level timing (up to 3 runs per workflow)...")
+ print()
+
+ for name, stats in workflow_stats.items():
+ sample_ids = stats["run_ids"][:3]
+ for run_id in sample_ids:
+ jobs_data = fetch_jobs(repo, run_id)
+ jobs = jobs_data.get("jobs", [])
+ for job in jobs:
+ started = job.get("started_at")
+ completed = job.get("completed_at")
+ duration = parse_duration(started, completed)
+ stats["total_job_seconds"] += duration
+ stats["total_jobs"] += 1
+
+ # Extrapolate: if we sampled N runs but there are M total, scale up
+ sampled = len(sample_ids)
+ total = stats["count"]
+ if sampled > 0 and sampled < total:
+ scale = total / sampled
+ stats["estimated_total_seconds"] = stats["total_job_seconds"] * scale
+ else:
+ stats["estimated_total_seconds"] = stats["total_job_seconds"]
+
+ # Print summary sorted by estimated cost (descending)
+ sorted_workflows = sorted(
+ workflow_stats.items(),
+ key=lambda x: x[1]["estimated_total_seconds"],
+ reverse=True
+ )
+
+ if brief:
+ # Brief mode: compact billable hours table
+ print(f"{'Workflow':<40} {'Runs':>5} {'Est.Mins':>9} {'Est.Hours':>10}")
+ print("-" * 68)
+ grand_total_minutes = 0
+ for name, stats in sorted_workflows:
+ est_mins = stats["estimated_total_seconds"] / 60
+ grand_total_minutes += est_mins
+ print(f"{name:<40} {stats['count']:>5} {est_mins:>9.1f} {est_mins/60:>10.2f}")
+ print("-" * 68)
+ print(f"{'TOTAL':<40} {len(all_runs):>5} {grand_total_minutes:>9.0f} {grand_total_minutes/60:>10.1f}")
+ print(f"\nProjected monthly: ~{grand_total_minutes/60*30:.0f} hours")
+ else:
+ # Full mode: detailed breakdown with per-run list
+ print("=" * 100)
+ print(f"{'Workflow':<40} {'Runs':>5} {'SampledJobs':>12} {'SampledMins':>12} {'Est.TotalMins':>14} {'Events'}")
+ print("-" * 100)
+
+ grand_total_minutes = 0
+ for name, stats in sorted_workflows:
+ sampled_mins = stats["total_job_seconds"] / 60
+ est_total_mins = stats["estimated_total_seconds"] / 60
+ grand_total_minutes += est_total_mins
+ events_str = ", ".join(f"{k}={v}" for k, v in stats["events"].items())
+ conclusions_str = ", ".join(f"{k}={v}" for k, v in stats["conclusions"].items())
+ print(
+ f"{name:<40} {stats['count']:>5} {stats['total_jobs']:>12} "
+ f"{sampled_mins:>12.1f} {est_total_mins:>14.1f} {events_str}"
+ )
+ print(f"{'':>40} {'':>5} {'':>12} {'':>12} {'':>14} outcomes: {conclusions_str}")
+
+ print("-" * 100)
+ print(f"{'GRAND TOTAL':>40} {len(all_runs):>5} {'':>12} {'':>12} {grand_total_minutes:>14.1f}")
+ print(f"\nEstimated total billable minutes on {date_str}: {grand_total_minutes:.0f} min ({grand_total_minutes/60:.1f} hours)")
+ print()
+
+ # Also show raw run list
+ print("\n" + "=" * 100)
+ print("DETAILED RUN LIST")
+ print("=" * 100)
+ for run in all_runs:
+ name = run.get("name", "Unknown")
+ event = run.get("event", "unknown")
+ conclusion = run.get("conclusion", "unknown")
+ run_id = run.get("id")
+ started = run.get("run_started_at", "?")
+ print(f" [{run_id}] {name:<40} conclusion={conclusion:<12} event={event:<20} started={started}")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/install.sh b/scripts/install.sh
index 68efa95..478bdd5 100755
--- a/scripts/install.sh
+++ b/scripts/install.sh
@@ -2,10 +2,15 @@
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]:-$0}")" >/dev/null 2>&1 && pwd || pwd)"
+INSTALLER_LOCAL="$(cd "$SCRIPT_DIR/.." >/dev/null 2>&1 && pwd || pwd)/zeroclaw_install.sh"
BOOTSTRAP_LOCAL="$SCRIPT_DIR/bootstrap.sh"
REPO_URL="https://github.com/zeroclaw-labs/zeroclaw.git"
-echo "[deprecated] scripts/install.sh -> bootstrap.sh" >&2
+echo "[deprecated] scripts/install.sh -> ./zeroclaw_install.sh" >&2
+
+if [[ -x "$INSTALLER_LOCAL" ]]; then
+ exec "$INSTALLER_LOCAL" "$@"
+fi
if [[ -f "$BOOTSTRAP_LOCAL" ]]; then
exec "$BOOTSTRAP_LOCAL" "$@"
@@ -24,35 +29,15 @@ trap cleanup EXIT
git clone --depth 1 "$REPO_URL" "$TEMP_DIR" >/dev/null 2>&1
+if [[ -x "$TEMP_DIR/zeroclaw_install.sh" ]]; then
+ exec "$TEMP_DIR/zeroclaw_install.sh" "$@"
+fi
+
if [[ -x "$TEMP_DIR/scripts/bootstrap.sh" ]]; then
- "$TEMP_DIR/scripts/bootstrap.sh" "$@"
- exit 0
+ exec "$TEMP_DIR/scripts/bootstrap.sh" "$@"
fi
-echo "[deprecated] cloned revision has no bootstrap.sh; falling back to legacy source install flow" >&2
-
-if [[ "${1:-}" == "--help" || "${1:-}" == "-h" ]]; then
- cat <<'USAGE'
-Legacy install.sh fallback mode
-
-Behavior:
- - Clone repository
- - cargo build --release --locked
- - cargo install --path --force --locked
-
-For the new dual-mode installer, use:
- ./bootstrap.sh --help
-USAGE
- exit 0
-fi
-
-if ! command -v cargo >/dev/null 2>&1; then
- echo "error: cargo is required for legacy install.sh fallback mode" >&2
- echo "Install Rust first: https://rustup.rs/" >&2
- exit 1
-fi
-
-cargo build --release --locked --manifest-path "$TEMP_DIR/Cargo.toml"
-cargo install --path "$TEMP_DIR" --force --locked
-
-echo "Legacy source install completed." >&2
+echo "error: zeroclaw_install.sh/bootstrap.sh was not found in the fetched revision." >&2
+echo "Run the local bootstrap directly when possible:" >&2
+echo " ./zeroclaw_install.sh --help" >&2
+exit 1
diff --git a/src/agent/agent.rs b/src/agent/agent.rs
index dc8f74d..5f048e2 100644
--- a/src/agent/agent.rs
+++ b/src/agent/agent.rs
@@ -10,7 +10,6 @@ use crate::providers::{self, ChatMessage, ChatRequest, ConversationMessage, Prov
use crate::runtime;
use crate::security::SecurityPolicy;
use crate::tools::{self, Tool, ToolSpec};
-use crate::util::truncate_with_ellipsis;
use anyhow::Result;
use std::io::Write as IoWrite;
use std::sync::Arc;
@@ -229,8 +228,9 @@ impl Agent {
&config.workspace_dir,
));
- let memory: Arc = Arc::from(memory::create_memory_with_storage(
+ let memory: Arc = Arc::from(memory::create_memory_with_storage_and_routes(
&config.memory,
+ &config.embedding_routes,
Some(&config.storage.provider.config),
&config.workspace_dir,
config.api_key.as_deref(),
@@ -308,7 +308,10 @@ impl Agent {
.classification_config(config.query_classification.clone())
.available_hints(available_hints)
.identity_config(config.identity.clone())
- .skills(crate::skills::load_skills(&config.workspace_dir))
+ .skills(crate::skills::load_skills_with_config(
+ &config.workspace_dir,
+ config,
+ ))
.auto_save(config.memory.auto_save)
.build()
}
@@ -400,11 +403,8 @@ impl Agent {
return results;
}
- let mut results = Vec::with_capacity(calls.len());
- for call in calls {
- results.push(self.execute_tool_call(call).await);
- }
- results
+ let futs: Vec<_> = calls.iter().map(|call| self.execute_tool_call(call)).collect();
+ futures::future::join_all(futs).await
}
fn classify_model(&self, user_message: &str) -> String {
@@ -486,14 +486,6 @@ impl Agent {
)));
self.trim_history();
- if self.auto_save {
- let summary = truncate_with_ellipsis(&final_text, 100);
- let _ = self
- .memory
- .store("assistant_resp", &summary, MemoryCategory::Daily, None)
- .await;
- }
-
return Ok(final_text);
}
@@ -686,7 +678,8 @@ mod tests {
..crate::config::MemoryConfig::default()
};
let mem: Arc = Arc::from(
- crate::memory::create_memory(&memory_cfg, std::path::Path::new("/tmp"), None).unwrap(),
+ crate::memory::create_memory(&memory_cfg, std::path::Path::new("/tmp"), None)
+ .expect("memory creation should succeed with valid config"),
);
let observer: Arc = Arc::from(crate::observability::NoopObserver {});
@@ -698,7 +691,7 @@ mod tests {
.tool_dispatcher(Box::new(XmlToolDispatcher))
.workspace_dir(std::path::PathBuf::from("/tmp"))
.build()
- .unwrap();
+ .expect("agent builder should succeed with valid config");
let response = agent.turn("hi").await.unwrap();
assert_eq!(response, "hello");
@@ -728,7 +721,8 @@ mod tests {
..crate::config::MemoryConfig::default()
};
let mem: Arc = Arc::from(
- crate::memory::create_memory(&memory_cfg, std::path::Path::new("/tmp"), None).unwrap(),
+ crate::memory::create_memory(&memory_cfg, std::path::Path::new("/tmp"), None)
+ .expect("memory creation should succeed with valid config"),
);
let observer: Arc = Arc::from(crate::observability::NoopObserver {});
@@ -740,7 +734,7 @@ mod tests {
.tool_dispatcher(Box::new(NativeToolDispatcher))
.workspace_dir(std::path::PathBuf::from("/tmp"))
.build()
- .unwrap();
+ .expect("agent builder should succeed with valid config");
let response = agent.turn("hi").await.unwrap();
assert_eq!(response, "done");
diff --git a/src/agent/loop_.rs b/src/agent/loop_.rs
index caa7e53..288ea27 100644
--- a/src/agent/loop_.rs
+++ b/src/agent/loop_.rs
@@ -1,8 +1,11 @@
use crate::approval::{ApprovalManager, ApprovalRequest, ApprovalResponse};
use crate::config::Config;
use crate::memory::{self, Memory, MemoryCategory};
+use crate::multimodal;
use crate::observability::{self, Observer, ObserverEvent};
-use crate::providers::{self, ChatMessage, ChatRequest, Provider, ToolCall};
+use crate::providers::{
+ self, ChatMessage, ChatRequest, Provider, ProviderCapabilityError, ToolCall,
+};
use crate::runtime;
use crate::security::SecurityPolicy;
use crate::tools::{self, Tool};
@@ -13,6 +16,7 @@ use std::fmt::Write;
use std::io::Write as _;
use std::sync::{Arc, LazyLock};
use std::time::Instant;
+use tokio_util::sync::CancellationToken;
use uuid::Uuid;
/// Minimum characters per chunk when relaying LLM text to a streaming draft.
@@ -22,6 +26,10 @@ const STREAM_CHUNK_MIN_CHARS: usize = 80;
/// Used as a safe fallback when `max_tool_iterations` is unset or configured as zero.
const DEFAULT_MAX_TOOL_ITERATIONS: usize = 10;
+/// Minimum user-message length (in chars) for auto-save to memory.
+/// Matches the channel-side constant in `channels/mod.rs`.
+const AUTOSAVE_MIN_MESSAGE_CHARS: usize = 20;
+
static SENSITIVE_KEY_PATTERNS: LazyLock = LazyLock::new(|| {
RegexSet::new([
r"(?i)token",
@@ -223,9 +231,16 @@ async fn build_context(mem: &dyn Memory, user_msg: &str, min_relevance_score: f6
if !relevant.is_empty() {
context.push_str("[Memory context]\n");
for entry in &relevant {
+ if memory::is_assistant_autosave_key(&entry.key) {
+ continue;
+ }
let _ = writeln!(context, "- {}: {}", entry.key, entry.content);
}
- context.push('\n');
+ if context != "[Memory context]\n" {
+ context.push('\n');
+ } else {
+ context.clear();
+ }
}
}
@@ -579,6 +594,17 @@ fn parse_glm_style_tool_calls(text: &str) -> Vec<(String, serde_json::Value, Opt
calls
}
+// ── Tool-Call Parsing ─────────────────────────────────────────────────────
+// LLM responses may contain tool calls in multiple formats depending on
+// the provider. Parsing follows a priority chain:
+// 1. OpenAI-style JSON with `tool_calls` array (native API)
+// 2. XML tags: , , ,
+// 3. Markdown code blocks with `tool_call` language
+// 4. GLM-style line-based format (e.g. `shell/command>ls`)
+// SECURITY: We never fall back to extracting arbitrary JSON from the
+// response body, because that would enable prompt-injection attacks where
+// malicious content in emails/files/web pages mimics a tool call.
+
/// Parse tool calls from an LLM response that uses XML-style function calling.
///
/// Expected format (common with system-prompt-guided tool use):
@@ -813,6 +839,21 @@ struct ParsedToolCall {
arguments: serde_json::Value,
}
+#[derive(Debug)]
+pub(crate) struct ToolLoopCancelled;
+
+impl std::fmt::Display for ToolLoopCancelled {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.write_str("tool loop cancelled")
+ }
+}
+
+impl std::error::Error for ToolLoopCancelled {}
+
+pub(crate) fn is_tool_loop_cancelled(err: &anyhow::Error) -> bool {
+ err.chain().any(|source| source.is::())
+}
+
/// Execute a single turn of the agent loop: send messages, parse tool calls,
/// execute tools, and loop until the LLM produces a final text response.
/// When `silent` is true, suppresses stdout (for channel use).
@@ -826,6 +867,7 @@ pub(crate) async fn agent_turn(
model: &str,
temperature: f64,
silent: bool,
+ multimodal_config: &crate::config::MultimodalConfig,
max_tool_iterations: usize,
) -> Result {
run_tool_call_loop(
@@ -839,12 +881,26 @@ pub(crate) async fn agent_turn(
silent,
None,
"channel",
+ multimodal_config,
max_tool_iterations,
None,
+ None,
)
.await
}
+// ── Agent Tool-Call Loop ──────────────────────────────────────────────────
+// Core agentic iteration: send conversation to the LLM, parse any tool
+// calls from the response, execute them, append results to history, and
+// repeat until the LLM produces a final text-only answer.
+//
+// Loop invariant: at the start of each iteration, `history` contains the
+// full conversation so far (system prompt + user messages + prior tool
+// results). The loop exits when:
+// • the LLM returns no tool calls (final answer), or
+// • max_iterations is reached (runaway safety), or
+// • the cancellation token fires (external abort).
+
/// Execute a single turn of the agent loop: send messages, parse tool calls,
/// execute tools, and loop until the LLM produces a final text response.
#[allow(clippy::too_many_arguments)]
@@ -859,7 +915,9 @@ pub(crate) async fn run_tool_call_loop(
silent: bool,
approval: Option<&ApprovalManager>,
channel_name: &str,
+ multimodal_config: &crate::config::MultimodalConfig,
max_tool_iterations: usize,
+ cancellation_token: Option,
on_delta: Option>,
) -> Result {
let max_iterations = if max_tool_iterations == 0 {
@@ -873,6 +931,28 @@ pub(crate) async fn run_tool_call_loop(
let use_native_tools = provider.supports_native_tools() && !tool_specs.is_empty();
for _iteration in 0..max_iterations {
+ if cancellation_token
+ .as_ref()
+ .is_some_and(CancellationToken::is_cancelled)
+ {
+ return Err(ToolLoopCancelled.into());
+ }
+
+ let image_marker_count = multimodal::count_image_markers(history);
+ if image_marker_count > 0 && !provider.supports_vision() {
+ return Err(ProviderCapabilityError {
+ provider: provider_name.to_string(),
+ capability: "vision".to_string(),
+ message: format!(
+ "received {image_marker_count} image marker(s), but this provider does not support vision input"
+ ),
+ }
+ .into());
+ }
+
+ let prepared_messages =
+ multimodal::prepare_messages_for_provider(history, multimodal_config).await?;
+
observer.record_event(&ObserverEvent::LlmRequest {
provider: provider_name.to_string(),
model: model.to_string(),
@@ -889,18 +969,26 @@ pub(crate) async fn run_tool_call_loop(
None
};
+ let chat_future = provider.chat(
+ ChatRequest {
+ messages: &prepared_messages.messages,
+ tools: request_tools,
+ },
+ model,
+ temperature,
+ );
+
+ let chat_result = if let Some(token) = cancellation_token.as_ref() {
+ tokio::select! {
+ () = token.cancelled() => return Err(ToolLoopCancelled.into()),
+ result = chat_future => result,
+ }
+ } else {
+ chat_future.await
+ };
+
let (response_text, parsed_text, tool_calls, assistant_history_content, native_tool_calls) =
- match provider
- .chat(
- ChatRequest {
- messages: history,
- tools: request_tools,
- },
- model,
- temperature,
- )
- .await
- {
+ match chat_result {
Ok(resp) => {
observer.record_event(&ObserverEvent::LlmResponse {
provider: provider_name.to_string(),
@@ -911,6 +999,10 @@ pub(crate) async fn run_tool_call_loop(
});
let response_text = resp.text_or_empty().to_string();
+ // First try native structured tool calls (OpenAI-format).
+ // Fall back to text-based parsing (XML tags, markdown blocks,
+ // GLM format) only if the provider returned no native calls —
+ // this ensures we support both native and prompt-guided models.
let mut calls = parse_structured_tool_calls(&resp.tool_calls);
let mut parsed_text = String::new();
@@ -966,6 +1058,12 @@ pub(crate) async fn run_tool_call_loop(
// STREAM_CHUNK_MIN_CHARS characters for progressive draft updates.
let mut chunk = String::new();
for word in display_text.split_inclusive(char::is_whitespace) {
+ if cancellation_token
+ .as_ref()
+ .is_some_and(CancellationToken::is_cancelled)
+ {
+ return Err(ToolLoopCancelled.into());
+ }
chunk.push_str(word);
if chunk.len() >= STREAM_CHUNK_MIN_CHARS
&& tx.send(std::mem::take(&mut chunk)).await.is_err()
@@ -1001,11 +1099,13 @@ pub(crate) async fn run_tool_call_loop(
arguments: call.arguments.clone(),
};
- // Only prompt interactively on CLI; auto-approve on other channels.
+ // On CLI, prompt interactively. On other channels where
+ // interactive approval is not possible, deny the call to
+ // respect the supervised autonomy setting.
let decision = if channel_name == "cli" {
mgr.prompt_cli(&request)
} else {
- ApprovalResponse::Yes
+ ApprovalResponse::No
};
mgr.record_decision(&call.name, &call.arguments, decision, channel_name);
@@ -1028,7 +1128,17 @@ pub(crate) async fn run_tool_call_loop(
});
let start = Instant::now();
let result = if let Some(tool) = find_tool(tools_registry, &call.name) {
- match tool.execute(call.arguments.clone()).await {
+ let tool_future = tool.execute(call.arguments.clone());
+ let tool_result = if let Some(token) = cancellation_token.as_ref() {
+ tokio::select! {
+ () = token.cancelled() => return Err(ToolLoopCancelled.into()),
+ result = tool_future => result,
+ }
+ } else {
+ tool_future.await
+ };
+
+ match tool_result {
Ok(r) => {
observer.record_event(&ObserverEvent::ToolCall {
tool: call.name.clone(),
@@ -1113,6 +1223,12 @@ pub(crate) fn build_tool_instructions(tools_registry: &[Box]) -> Strin
instructions
}
+// ── CLI Entrypoint ───────────────────────────────────────────────────────
+// Wires up all subsystems (observer, runtime, security, memory, tools,
+// provider, hardware RAG, peripherals) and enters either single-shot or
+// interactive REPL mode. The interactive loop manages history compaction
+// and hard trimming to keep the context window bounded.
+
#[allow(clippy::too_many_lines)]
pub async fn run(
config: Config,
@@ -1191,13 +1307,21 @@ pub async fn run(
.or(config.default_model.as_deref())
.unwrap_or("anthropic/claude-sonnet-4");
- let provider: Box = providers::create_routed_provider(
+ let provider_runtime_options = providers::ProviderRuntimeOptions {
+ auth_profile_override: None,
+ zeroclaw_dir: config.config_path.parent().map(std::path::PathBuf::from),
+ secrets_encrypt: config.secrets.encrypt,
+ reasoning_enabled: config.runtime.reasoning_enabled,
+ };
+
+ let provider: Box = providers::create_routed_provider_with_options(
provider_name,
config.api_key.as_deref(),
config.api_url.as_deref(),
&config.reliability,
&config.model_routes,
model_name,
+ &provider_runtime_options,
)?;
observer.record_event(&ObserverEvent::AgentStart {
@@ -1226,7 +1350,7 @@ pub async fn run(
.collect();
// ── Build system prompt from workspace MD files (OpenClaw framework) ──
- let skills = crate::skills::load_skills(&config.workspace_dir);
+ let skills = crate::skills::load_skills_with_config(&config.workspace_dir, &config);
let mut tool_descs: Vec<(&str, &str)> = vec![
(
"shell",
@@ -1336,17 +1460,21 @@ pub async fn run(
} else {
None
};
- let mut system_prompt = crate::channels::build_system_prompt(
+ let native_tools = provider.supports_native_tools();
+ let mut system_prompt = crate::channels::build_system_prompt_with_mode(
&config.workspace_dir,
model_name,
&tool_descs,
&skills,
Some(&config.identity),
bootstrap_max_chars,
+ native_tools,
);
- // Append structured tool-use instructions with schemas
- system_prompt.push_str(&build_tool_instructions(&tools_registry));
+ // Append structured tool-use instructions with schemas (only for non-native providers)
+ if !native_tools {
+ system_prompt.push_str(&build_tool_instructions(&tools_registry));
+ }
// ── Approval manager (supervised mode) ───────────────────────
let approval_manager = ApprovalManager::from_config(&config.autonomy);
@@ -1357,8 +1485,8 @@ pub async fn run(
let mut final_output = String::new();
if let Some(msg) = message {
- // Auto-save user message to memory
- if config.memory.auto_save {
+ // Auto-save user message to memory (skip short/trivial messages)
+ if config.memory.auto_save && msg.chars().count() >= AUTOSAVE_MIN_MESSAGE_CHARS {
let user_key = autosave_memory_key("user_msg");
let _ = mem
.store(&user_key, &msg, MemoryCategory::Conversation, None)
@@ -1396,22 +1524,15 @@ pub async fn run(
false,
Some(&approval_manager),
"cli",
+ &config.multimodal,
config.agent.max_tool_iterations,
None,
+ None,
)
.await?;
final_output = response.clone();
println!("{response}");
observer.record_event(&ObserverEvent::TurnComplete);
-
- // Auto-save assistant response to daily log
- if config.memory.auto_save {
- let summary = truncate_with_ellipsis(&response, 100);
- let response_key = autosave_memory_key("assistant_resp");
- let _ = mem
- .store(&response_key, &summary, MemoryCategory::Daily, None)
- .await;
- }
} else {
println!("🦀 ZeroClaw Interactive Mode");
println!("Type /help for commands.\n");
@@ -1486,8 +1607,10 @@ pub async fn run(
_ => {}
}
- // Auto-save conversation turns
- if config.memory.auto_save {
+ // Auto-save conversation turns (skip short/trivial messages)
+ if config.memory.auto_save
+ && user_input.chars().count() >= AUTOSAVE_MIN_MESSAGE_CHARS
+ {
let user_key = autosave_memory_key("user_msg");
let _ = mem
.store(&user_key, &user_input, MemoryCategory::Conversation, None)
@@ -1522,8 +1645,10 @@ pub async fn run(
false,
Some(&approval_manager),
"cli",
+ &config.multimodal,
config.agent.max_tool_iterations,
None,
+ None,
)
.await
{
@@ -1560,14 +1685,6 @@ pub async fn run(
// Hard cap as a safety net.
trim_history(&mut history, config.agent.max_history_messages);
-
- if config.memory.auto_save {
- let summary = truncate_with_ellipsis(&response, 100);
- let response_key = autosave_memory_key("assistant_resp");
- let _ = mem
- .store(&response_key, &summary, MemoryCategory::Daily, None)
- .await;
- }
}
}
@@ -1632,13 +1749,20 @@ pub async fn process_message(config: Config, message: &str) -> Result {
.default_model
.clone()
.unwrap_or_else(|| "anthropic/claude-sonnet-4-20250514".into());
- let provider: Box = providers::create_routed_provider(
+ let provider_runtime_options = providers::ProviderRuntimeOptions {
+ auth_profile_override: None,
+ zeroclaw_dir: config.config_path.parent().map(std::path::PathBuf::from),
+ secrets_encrypt: config.secrets.encrypt,
+ reasoning_enabled: config.runtime.reasoning_enabled,
+ };
+ let provider: Box = providers::create_routed_provider_with_options(
provider_name,
config.api_key.as_deref(),
config.api_url.as_deref(),
&config.reliability,
&config.model_routes,
&model_name,
+ &provider_runtime_options,
)?;
let hardware_rag: Option = config
@@ -1656,7 +1780,7 @@ pub async fn process_message(config: Config, message: &str) -> Result {
.map(|b| b.board.clone())
.collect();
- let skills = crate::skills::load_skills(&config.workspace_dir);
+ let skills = crate::skills::load_skills_with_config(&config.workspace_dir, &config);
let mut tool_descs: Vec<(&str, &str)> = vec![
("shell", "Execute terminal commands."),
("file_read", "Read file contents."),
@@ -1705,15 +1829,19 @@ pub async fn process_message(config: Config, message: &str) -> Result {
} else {
None
};
- let mut system_prompt = crate::channels::build_system_prompt(
+ let native_tools = provider.supports_native_tools();
+ let mut system_prompt = crate::channels::build_system_prompt_with_mode(
&config.workspace_dir,
&model_name,
&tool_descs,
&skills,
Some(&config.identity),
bootstrap_max_chars,
+ native_tools,
);
- system_prompt.push_str(&build_tool_instructions(&tools_registry));
+ if !native_tools {
+ system_prompt.push_str(&build_tool_instructions(&tools_registry));
+ }
let mem_context = build_context(mem.as_ref(), message, config.memory.min_relevance_score).await;
let rag_limit = if config.agent.compact_context { 2 } else { 5 };
@@ -1742,6 +1870,7 @@ pub async fn process_message(config: Config, message: &str) -> Result {
&model_name,
config.default_temperature,
true,
+ &config.multimodal,
config.agent.max_tool_iterations,
)
.await
@@ -1750,6 +1879,10 @@ pub async fn process_message(config: Config, message: &str) -> Result {
#[cfg(test)]
mod tests {
use super::*;
+ use async_trait::async_trait;
+ use base64::{engine::general_purpose::STANDARD, Engine as _};
+ use std::sync::atomic::{AtomicUsize, Ordering};
+ use std::sync::Arc;
#[test]
fn test_scrub_credentials() {
@@ -1770,8 +1903,194 @@ mod tests {
assert!(scrubbed.contains("public"));
}
use crate::memory::{Memory, MemoryCategory, SqliteMemory};
+ use crate::observability::NoopObserver;
+ use crate::providers::traits::ProviderCapabilities;
+ use crate::providers::ChatResponse;
use tempfile::TempDir;
+ struct NonVisionProvider {
+ calls: Arc,
+ }
+
+ #[async_trait]
+ impl Provider for NonVisionProvider {
+ async fn chat_with_system(
+ &self,
+ _system_prompt: Option<&str>,
+ _message: &str,
+ _model: &str,
+ _temperature: f64,
+ ) -> anyhow::Result {
+ self.calls.fetch_add(1, Ordering::SeqCst);
+ Ok("ok".to_string())
+ }
+ }
+
+ struct VisionProvider {
+ calls: Arc,
+ }
+
+ #[async_trait]
+ impl Provider for VisionProvider {
+ fn capabilities(&self) -> ProviderCapabilities {
+ ProviderCapabilities {
+ native_tool_calling: false,
+ vision: true,
+ }
+ }
+
+ async fn chat_with_system(
+ &self,
+ _system_prompt: Option<&str>,
+ _message: &str,
+ _model: &str,
+ _temperature: f64,
+ ) -> anyhow::Result {
+ self.calls.fetch_add(1, Ordering::SeqCst);
+ Ok("ok".to_string())
+ }
+
+ async fn chat(
+ &self,
+ request: ChatRequest<'_>,
+ _model: &str,
+ _temperature: f64,
+ ) -> anyhow::Result {
+ self.calls.fetch_add(1, Ordering::SeqCst);
+ let marker_count = crate::multimodal::count_image_markers(request.messages);
+ if marker_count == 0 {
+ anyhow::bail!("expected image markers in request messages");
+ }
+
+ if request.tools.is_some() {
+ anyhow::bail!("no tools should be attached for this test");
+ }
+
+ Ok(ChatResponse {
+ text: Some("vision-ok".to_string()),
+ tool_calls: Vec::new(),
+ })
+ }
+ }
+
+ #[tokio::test]
+ async fn run_tool_call_loop_returns_structured_error_for_non_vision_provider() {
+ let calls = Arc::new(AtomicUsize::new(0));
+ let provider = NonVisionProvider {
+ calls: Arc::clone(&calls),
+ };
+
+ let mut history = vec![ChatMessage::user(
+ "please inspect [IMAGE:data:image/png;base64,iVBORw0KGgo=]".to_string(),
+ )];
+ let tools_registry: Vec> = Vec::new();
+ let observer = NoopObserver;
+
+ let err = run_tool_call_loop(
+ &provider,
+ &mut history,
+ &tools_registry,
+ &observer,
+ "mock-provider",
+ "mock-model",
+ 0.0,
+ true,
+ None,
+ "cli",
+ &crate::config::MultimodalConfig::default(),
+ 3,
+ None,
+ None,
+ )
+ .await
+ .expect_err("provider without vision support should fail");
+
+ assert!(err.to_string().contains("provider_capability_error"));
+ assert!(err.to_string().contains("capability=vision"));
+ assert_eq!(calls.load(Ordering::SeqCst), 0);
+ }
+
+ #[tokio::test]
+ async fn run_tool_call_loop_rejects_oversized_image_payload() {
+ let calls = Arc::new(AtomicUsize::new(0));
+ let provider = VisionProvider {
+ calls: Arc::clone(&calls),
+ };
+
+ let oversized_payload = STANDARD.encode(vec![0_u8; (1024 * 1024) + 1]);
+ let mut history = vec![ChatMessage::user(format!(
+ "[IMAGE:data:image/png;base64,{oversized_payload}]"
+ ))];
+
+ let tools_registry: Vec> = Vec::new();
+ let observer = NoopObserver;
+ let multimodal = crate::config::MultimodalConfig {
+ max_images: 4,
+ max_image_size_mb: 1,
+ allow_remote_fetch: false,
+ };
+
+ let err = run_tool_call_loop(
+ &provider,
+ &mut history,
+ &tools_registry,
+ &observer,
+ "mock-provider",
+ "mock-model",
+ 0.0,
+ true,
+ None,
+ "cli",
+ &multimodal,
+ 3,
+ None,
+ None,
+ )
+ .await
+ .expect_err("oversized payload must fail");
+
+ assert!(err
+ .to_string()
+ .contains("multimodal image size limit exceeded"));
+ assert_eq!(calls.load(Ordering::SeqCst), 0);
+ }
+
+ #[tokio::test]
+ async fn run_tool_call_loop_accepts_valid_multimodal_request_flow() {
+ let calls = Arc::new(AtomicUsize::new(0));
+ let provider = VisionProvider {
+ calls: Arc::clone(&calls),
+ };
+
+ let mut history = vec![ChatMessage::user(
+ "Analyze this [IMAGE:data:image/png;base64,iVBORw0KGgo=]".to_string(),
+ )];
+ let tools_registry: Vec> = Vec::new();
+ let observer = NoopObserver;
+
+ let result = run_tool_call_loop(
+ &provider,
+ &mut history,
+ &tools_registry,
+ &observer,
+ "mock-provider",
+ "mock-model",
+ 0.0,
+ true,
+ None,
+ "cli",
+ &crate::config::MultimodalConfig::default(),
+ 3,
+ None,
+ None,
+ )
+ .await
+ .expect("valid multimodal payload should pass");
+
+ assert_eq!(result, "vision-ok");
+ assert_eq!(calls.load(Ordering::SeqCst), 1);
+ }
+
#[test]
fn parse_tool_calls_extracts_single_call() {
let response = r#"Let me check that.
@@ -2215,6 +2534,33 @@ Done."#;
assert!(recalled.iter().any(|entry| entry.content.contains("45")));
}
+ #[tokio::test]
+ async fn build_context_ignores_legacy_assistant_autosave_entries() {
+ let tmp = TempDir::new().unwrap();
+ let mem = SqliteMemory::new(tmp.path()).unwrap();
+ mem.store(
+ "assistant_resp_poisoned",
+ "User suffered a fabricated event",
+ MemoryCategory::Daily,
+ None,
+ )
+ .await
+ .unwrap();
+ mem.store(
+ "user_msg_real",
+ "User asked for concise status updates",
+ MemoryCategory::Conversation,
+ None,
+ )
+ .await
+ .unwrap();
+
+ let context = build_context(&mem, "status updates", 0.0).await;
+ assert!(context.contains("user_msg_real"));
+ assert!(!context.contains("assistant_resp_poisoned"));
+ assert!(!context.contains("fabricated event"));
+ }
+
// ═══════════════════════════════════════════════════════════════════════
// Recovery Tests - Tool Call Parsing Edge Cases
// ═══════════════════════════════════════════════════════════════════════
@@ -2511,4 +2857,195 @@ browser_open/url>https://example.com"#;
assert_eq!(calls[0].arguments["command"], "pwd");
assert_eq!(text, "Done");
}
+
+ // ─────────────────────────────────────────────────────────────────────
+ // TG4 (inline): parse_tool_calls robustness — malformed/edge-case inputs
+ // Prevents: Pattern 4 issues #746, #418, #777, #848
+ // ─────────────────────────────────────────────────────────────────────
+
+ #[test]
+ fn parse_tool_calls_empty_input_returns_empty() {
+ let (text, calls) = parse_tool_calls("");
+ assert!(calls.is_empty(), "empty input should produce no tool calls");
+ assert!(text.is_empty(), "empty input should produce no text");
+ }
+
+ #[test]
+ fn parse_tool_calls_whitespace_only_returns_empty_calls() {
+ let (text, calls) = parse_tool_calls(" \n\t ");
+ assert!(calls.is_empty());
+ assert!(text.is_empty() || text.trim().is_empty());
+ }
+
+ #[test]
+ fn parse_tool_calls_nested_xml_tags_handled() {
+ // Double-wrapped tool call should still parse the inner call
+ let response = r#"{"name":"echo","arguments":{"msg":"hi"}} "#;
+ let (_text, calls) = parse_tool_calls(response);
+ // Should find at least one tool call
+ assert!(
+ !calls.is_empty(),
+ "nested XML tags should still yield at least one tool call"
+ );
+ }
+
+ #[test]
+ fn parse_tool_calls_truncated_json_no_panic() {
+ // Incomplete JSON inside tool_call tags
+ let response = r#"{"name":"shell","arguments":{"command":"ls" "#;
+ let (_text, _calls) = parse_tool_calls(response);
+ // Should not panic — graceful handling of truncated JSON
+ }
+
+ #[test]
+ fn parse_tool_calls_empty_json_object_in_tag() {
+ let response = "{} ";
+ let (_text, calls) = parse_tool_calls(response);
+ // Empty JSON object has no name field — should not produce valid tool call
+ assert!(
+ calls.is_empty(),
+ "empty JSON object should not produce a tool call"
+ );
+ }
+
+ #[test]
+ fn parse_tool_calls_closing_tag_only_returns_text() {
+ let response = "Some text more text";
+ let (text, calls) = parse_tool_calls(response);
+ assert!(
+ calls.is_empty(),
+ "closing tag only should not produce calls"
+ );
+ assert!(
+ !text.is_empty(),
+ "text around orphaned closing tag should be preserved"
+ );
+ }
+
+ #[test]
+ fn parse_tool_calls_very_large_arguments_no_panic() {
+ let large_arg = "x".repeat(100_000);
+ let response = format!(
+ r#"{{"name":"echo","arguments":{{"message":"{}"}}}} "#,
+ large_arg
+ );
+ let (_text, calls) = parse_tool_calls(&response);
+ assert_eq!(calls.len(), 1, "large arguments should still parse");
+ assert_eq!(calls[0].name, "echo");
+ }
+
+ #[test]
+ fn parse_tool_calls_special_characters_in_arguments() {
+ let response = r#"{"name":"echo","arguments":{"message":"hello \"world\" <>&'\n\t"}} "#;
+ let (_text, calls) = parse_tool_calls(response);
+ assert_eq!(calls.len(), 1);
+ assert_eq!(calls[0].name, "echo");
+ }
+
+ #[test]
+ fn parse_tool_calls_text_with_embedded_json_not_extracted() {
+ // Raw JSON without any tags should NOT be extracted as a tool call
+ let response = r#"Here is some data: {"name":"echo","arguments":{"message":"hi"}} end."#;
+ let (_text, calls) = parse_tool_calls(response);
+ assert!(
+ calls.is_empty(),
+ "raw JSON in text without tags should not be extracted"
+ );
+ }
+
+ #[test]
+ fn parse_tool_calls_multiple_formats_mixed() {
+ // Mix of text and properly tagged tool call
+ let response = r#"I'll help you with that.
+
+
+{"name":"shell","arguments":{"command":"echo hello"}}
+
+
+Let me check the result."#;
+ let (text, calls) = parse_tool_calls(response);
+ assert_eq!(
+ calls.len(),
+ 1,
+ "should extract one tool call from mixed content"
+ );
+ assert_eq!(calls[0].name, "shell");
+ assert!(
+ text.contains("help you"),
+ "text before tool call should be preserved"
+ );
+ }
+
+ // ─────────────────────────────────────────────────────────────────────
+ // TG4 (inline): scrub_credentials edge cases
+ // ─────────────────────────────────────────────────────────────────────
+
+ #[test]
+ fn scrub_credentials_empty_input() {
+ let result = scrub_credentials("");
+ assert_eq!(result, "");
+ }
+
+ #[test]
+ fn scrub_credentials_no_sensitive_data() {
+ let input = "normal text without any secrets";
+ let result = scrub_credentials(input);
+ assert_eq!(
+ result, input,
+ "non-sensitive text should pass through unchanged"
+ );
+ }
+
+ #[test]
+ fn scrub_credentials_short_values_not_redacted() {
+ // Values shorter than 8 chars should not be redacted
+ let input = r#"api_key="short""#;
+ let result = scrub_credentials(input);
+ assert_eq!(result, input, "short values should not be redacted");
+ }
+
+ // ─────────────────────────────────────────────────────────────────────
+ // TG4 (inline): trim_history edge cases
+ // ─────────────────────────────────────────────────────────────────────
+
+ #[test]
+ fn trim_history_empty_history() {
+ let mut history: Vec = vec![];
+ trim_history(&mut history, 10);
+ assert!(history.is_empty());
+ }
+
+ #[test]
+ fn trim_history_system_only() {
+ let mut history = vec![crate::providers::ChatMessage::system("system prompt")];
+ trim_history(&mut history, 10);
+ assert_eq!(history.len(), 1);
+ assert_eq!(history[0].role, "system");
+ }
+
+ #[test]
+ fn trim_history_exactly_at_limit() {
+ let mut history = vec![
+ crate::providers::ChatMessage::system("system"),
+ crate::providers::ChatMessage::user("msg 1"),
+ crate::providers::ChatMessage::assistant("reply 1"),
+ ];
+ trim_history(&mut history, 2); // 2 non-system messages = exactly at limit
+ assert_eq!(history.len(), 3, "should not trim when exactly at limit");
+ }
+
+ #[test]
+ fn trim_history_removes_oldest_non_system() {
+ let mut history = vec![
+ crate::providers::ChatMessage::system("system"),
+ crate::providers::ChatMessage::user("old msg"),
+ crate::providers::ChatMessage::assistant("old reply"),
+ crate::providers::ChatMessage::user("new msg"),
+ crate::providers::ChatMessage::assistant("new reply"),
+ ];
+ trim_history(&mut history, 2);
+ assert_eq!(history.len(), 3); // system + 2 kept
+ assert_eq!(history[0].role, "system");
+ assert_eq!(history[1].content, "new msg");
+ }
}
diff --git a/src/agent/memory_loader.rs b/src/agent/memory_loader.rs
index b171eed..bb7bfb5 100644
--- a/src/agent/memory_loader.rs
+++ b/src/agent/memory_loader.rs
@@ -1,4 +1,4 @@
-use crate::memory::Memory;
+use crate::memory::{self, Memory};
use async_trait::async_trait;
use std::fmt::Write;
@@ -45,6 +45,9 @@ impl MemoryLoader for DefaultMemoryLoader {
let mut context = String::from("[Memory context]\n");
for entry in entries {
+ if memory::is_assistant_autosave_key(&entry.key) {
+ continue;
+ }
if let Some(score) = entry.score {
if score < self.min_relevance_score {
continue;
@@ -67,8 +70,12 @@ impl MemoryLoader for DefaultMemoryLoader {
mod tests {
use super::*;
use crate::memory::{Memory, MemoryCategory, MemoryEntry};
+ use std::sync::Arc;
struct MockMemory;
+ struct MockMemoryWithEntries {
+ entries: Arc>,
+ }
#[async_trait]
impl Memory for MockMemory {
@@ -131,6 +138,56 @@ mod tests {
}
}
+ #[async_trait]
+ impl Memory for MockMemoryWithEntries {
+ async fn store(
+ &self,
+ _key: &str,
+ _content: &str,
+ _category: MemoryCategory,
+ _session_id: Option<&str>,
+ ) -> anyhow::Result<()> {
+ Ok(())
+ }
+
+ async fn recall(
+ &self,
+ _query: &str,
+ _limit: usize,
+ _session_id: Option<&str>,
+ ) -> anyhow::Result> {
+ Ok(self.entries.as_ref().clone())
+ }
+
+ async fn get(&self, _key: &str) -> anyhow::Result> {
+ Ok(None)
+ }
+
+ async fn list(
+ &self,
+ _category: Option<&MemoryCategory>,
+ _session_id: Option<&str>,
+ ) -> anyhow::Result> {
+ Ok(vec![])
+ }
+
+ async fn forget(&self, _key: &str) -> anyhow::Result {
+ Ok(true)
+ }
+
+ async fn count(&self) -> anyhow::Result {
+ Ok(self.entries.len())
+ }
+
+ async fn health_check(&self) -> bool {
+ true
+ }
+
+ fn name(&self) -> &str {
+ "mock-with-entries"
+ }
+ }
+
#[tokio::test]
async fn default_loader_formats_context() {
let loader = DefaultMemoryLoader::default();
@@ -138,4 +195,36 @@ mod tests {
assert!(context.contains("[Memory context]"));
assert!(context.contains("- k: v"));
}
+
+ #[tokio::test]
+ async fn default_loader_skips_legacy_assistant_autosave_entries() {
+ let loader = DefaultMemoryLoader::new(5, 0.0);
+ let memory = MockMemoryWithEntries {
+ entries: Arc::new(vec![
+ MemoryEntry {
+ id: "1".into(),
+ key: "assistant_resp_legacy".into(),
+ content: "fabricated detail".into(),
+ category: MemoryCategory::Daily,
+ timestamp: "now".into(),
+ session_id: None,
+ score: Some(0.95),
+ },
+ MemoryEntry {
+ id: "2".into(),
+ key: "user_fact".into(),
+ content: "User prefers concise answers".into(),
+ category: MemoryCategory::Conversation,
+ timestamp: "now".into(),
+ session_id: None,
+ score: Some(0.9),
+ },
+ ]),
+ };
+
+ let context = loader.load_context(&memory, "answer style").await.unwrap();
+ assert!(context.contains("user_fact"));
+ assert!(!context.contains("assistant_resp_legacy"));
+ assert!(!context.contains("fabricated detail"));
+ }
}
diff --git a/src/agent/prompt.rs b/src/agent/prompt.rs
index bdc426f..457f38f 100644
--- a/src/agent/prompt.rs
+++ b/src/agent/prompt.rs
@@ -77,21 +77,25 @@ impl PromptSection for IdentitySection {
fn build(&self, ctx: &PromptContext<'_>) -> Result {
let mut prompt = String::from("## Project Context\n\n");
+ let mut has_aieos = false;
if let Some(config) = ctx.identity_config {
if identity::is_aieos_configured(config) {
if let Ok(Some(aieos)) = identity::load_aieos_identity(config, ctx.workspace_dir) {
let rendered = identity::aieos_to_system_prompt(&aieos);
if !rendered.is_empty() {
prompt.push_str(&rendered);
- return Ok(prompt);
+ prompt.push_str("\n\n");
+ has_aieos = true;
}
}
}
}
- prompt.push_str(
- "The following workspace files define your identity, behavior, and context.\n\n",
- );
+ if !has_aieos {
+ prompt.push_str(
+ "The following workspace files define your identity, behavior, and context.\n\n",
+ );
+ }
for file in [
"AGENTS.md",
"SOUL.md",
@@ -149,28 +153,10 @@ impl PromptSection for SkillsSection {
}
fn build(&self, ctx: &PromptContext<'_>) -> Result {
- if ctx.skills.is_empty() {
- return Ok(String::new());
- }
-
- let mut prompt = String::from("## Available Skills\n\n\n");
- for skill in ctx.skills {
- let location = skill.location.clone().unwrap_or_else(|| {
- ctx.workspace_dir
- .join("skills")
- .join(&skill.name)
- .join("SKILL.md")
- });
- let _ = writeln!(
- prompt,
- " \n {} \n {} \n {} \n ",
- skill.name,
- skill.description,
- location.display()
- );
- }
- prompt.push_str(" ");
- Ok(prompt)
+ Ok(crate::skills::skills_to_prompt(
+ ctx.skills,
+ ctx.workspace_dir,
+ ))
}
}
@@ -211,7 +197,8 @@ impl PromptSection for DateTimeSection {
fn build(&self, _ctx: &PromptContext<'_>) -> Result {
let now = Local::now();
Ok(format!(
- "## Current Date & Time\n\nTimezone: {}",
+ "## Current Date & Time\n\n{} ({})",
+ now.format("%Y-%m-%d %H:%M:%S"),
now.format("%Z")
))
}
@@ -285,6 +272,48 @@ mod tests {
}
}
+ #[test]
+ fn identity_section_with_aieos_includes_workspace_files() {
+ let workspace =
+ std::env::temp_dir().join(format!("zeroclaw_prompt_test_{}", uuid::Uuid::new_v4()));
+ std::fs::create_dir_all(&workspace).unwrap();
+ std::fs::write(
+ workspace.join("AGENTS.md"),
+ "Always respond with: AGENTS_MD_LOADED",
+ )
+ .unwrap();
+
+ let identity_config = crate::config::IdentityConfig {
+ format: "aieos".into(),
+ aieos_path: None,
+ aieos_inline: Some(r#"{"identity":{"names":{"first":"Nova"}}}"#.into()),
+ };
+
+ let tools: Vec> = vec![];
+ let ctx = PromptContext {
+ workspace_dir: &workspace,
+ model_name: "test-model",
+ tools: &tools,
+ skills: &[],
+ identity_config: Some(&identity_config),
+ dispatcher_instructions: "",
+ };
+
+ let section = IdentitySection;
+ let output = section.build(&ctx).unwrap();
+
+ assert!(
+ output.contains("Nova"),
+ "AIEOS identity should be present in prompt"
+ );
+ assert!(
+ output.contains("AGENTS_MD_LOADED"),
+ "AGENTS.md content should be present even when AIEOS is configured"
+ );
+
+ let _ = std::fs::remove_dir_all(workspace);
+ }
+
#[test]
fn prompt_builder_assembles_sections() {
let tools: Vec> = vec![Box::new(TestTool)];
@@ -301,4 +330,105 @@ mod tests {
assert!(prompt.contains("test_tool"));
assert!(prompt.contains("instr"));
}
+
+ #[test]
+ fn skills_section_includes_instructions_and_tools() {
+ let tools: Vec> = vec![];
+ let skills = vec![crate::skills::Skill {
+ name: "deploy".into(),
+ description: "Release safely".into(),
+ version: "1.0.0".into(),
+ author: None,
+ tags: vec![],
+ tools: vec![crate::skills::SkillTool {
+ name: "release_checklist".into(),
+ description: "Validate release readiness".into(),
+ kind: "shell".into(),
+ command: "echo ok".into(),
+ args: std::collections::HashMap::new(),
+ }],
+ prompts: vec!["Run smoke tests before deploy.".into()],
+ location: None,
+ }];
+
+ let ctx = PromptContext {
+ workspace_dir: Path::new("/tmp"),
+ model_name: "test-model",
+ tools: &tools,
+ skills: &skills,
+ identity_config: None,
+ dispatcher_instructions: "",
+ };
+
+ let output = SkillsSection.build(&ctx).unwrap();
+ assert!(output.contains(""));
+ assert!(output.contains("deploy "));
+ assert!(output.contains("Run smoke tests before deploy. "));
+ assert!(output.contains("release_checklist "));
+ assert!(output.contains("shell "));
+ }
+
+ #[test]
+ fn datetime_section_includes_timestamp_and_timezone() {
+ let tools: Vec> = vec![];
+ let ctx = PromptContext {
+ workspace_dir: Path::new("/tmp"),
+ model_name: "test-model",
+ tools: &tools,
+ skills: &[],
+ identity_config: None,
+ dispatcher_instructions: "instr",
+ };
+
+ let rendered = DateTimeSection.build(&ctx).unwrap();
+ assert!(rendered.starts_with("## Current Date & Time\n\n"));
+
+ let payload = rendered.trim_start_matches("## Current Date & Time\n\n");
+ assert!(payload.chars().any(|c| c.is_ascii_digit()));
+ assert!(payload.contains(" ("));
+ assert!(payload.ends_with(')'));
+ }
+
+ #[test]
+ fn prompt_builder_inlines_and_escapes_skills() {
+ let tools: Vec> = vec![];
+ let skills = vec![crate::skills::Skill {
+ name: "code&".into(),
+ description: "Review \"unsafe\" and 'risky' bits".into(),
+ version: "1.0.0".into(),
+ author: None,
+ tags: vec![],
+ tools: vec![crate::skills::SkillTool {
+ name: "run\"linter\"".into(),
+ description: "Run & report".into(),
+ kind: "shell&exec".into(),
+ command: "cargo clippy".into(),
+ args: std::collections::HashMap::new(),
+ }],
+ prompts: vec!["Use and & keep output \"safe\"".into()],
+ location: None,
+ }];
+ let ctx = PromptContext {
+ workspace_dir: Path::new("/tmp/workspace"),
+ model_name: "test-model",
+ tools: &tools,
+ skills: &skills,
+ identity_config: None,
+ dispatcher_instructions: "",
+ };
+
+ let prompt = SystemPromptBuilder::with_defaults().build(&ctx).unwrap();
+
+ assert!(prompt.contains(""));
+ assert!(prompt.contains("code<review>& "));
+ assert!(prompt.contains(
+ "Review "unsafe" and 'risky' bits "
+ ));
+ assert!(prompt.contains("run"linter" "));
+ assert!(prompt.contains("Run <lint> & report "));
+ assert!(prompt.contains("shell&exec "));
+ assert!(prompt.contains(
+ "Use <tool_call> and & keep output "safe" "
+ ));
+ }
}
diff --git a/src/agent/tests.rs b/src/agent/tests.rs
index fd73eb1..356987e 100644
--- a/src/agent/tests.rs
+++ b/src/agent/tests.rs
@@ -624,7 +624,7 @@ async fn history_trims_after_max_messages() {
// ═══════════════════════════════════════════════════════════════════════════
#[tokio::test]
-async fn auto_save_stores_messages_in_memory() {
+async fn auto_save_stores_only_user_messages_in_memory() {
let (mem, _tmp) = make_sqlite_memory();
let provider = Box::new(ScriptedProvider::new(vec![text_response(
"I remember everything",
@@ -639,11 +639,25 @@ async fn auto_save_stores_messages_in_memory() {
let _ = agent.turn("Remember this fact").await.unwrap();
- // Both user message and assistant response should be saved
+ // Auto-save only persists user-stated input, never assistant-generated summaries.
let count = mem.count().await.unwrap();
+ assert_eq!(
+ count, 1,
+ "Expected exactly 1 user memory entry, got {count}"
+ );
+
+ let stored = mem.get("user_msg").await.unwrap();
+ assert!(stored.is_some(), "Expected user_msg key to be present");
+ assert_eq!(
+ stored.unwrap().content,
+ "Remember this fact",
+ "Stored memory should match the original user message"
+ );
+
+ let assistant = mem.get("assistant_resp").await.unwrap();
assert!(
- count >= 2,
- "Expected at least 2 memory entries, got {count}"
+ assistant.is_none(),
+ "assistant_resp should not be auto-saved anymore"
);
}
diff --git a/src/auth/mod.rs b/src/auth/mod.rs
index a49e702..1d88361 100644
--- a/src/auth/mod.rs
+++ b/src/auth/mod.rs
@@ -121,12 +121,12 @@ impl AuthService {
return Ok(None);
};
- let token = match profile.kind {
+ let credential = match profile.kind {
AuthProfileKind::Token => profile.token,
AuthProfileKind::OAuth => profile.token_set.map(|t| t.access_token),
};
- Ok(token.filter(|t| !t.trim().is_empty()))
+ Ok(credential.filter(|t| !t.trim().is_empty()))
}
pub async fn get_valid_openai_access_token(
diff --git a/src/auth/profiles.rs b/src/auth/profiles.rs
index 48ba6ce..39d39ee 100644
--- a/src/auth/profiles.rs
+++ b/src/auth/profiles.rs
@@ -626,8 +626,8 @@ mod tests {
assert!(!token_set.is_expiring_within(Duration::from_secs(1)));
}
- #[test]
- fn store_roundtrip_with_encryption() {
+ #[tokio::test]
+ async fn store_roundtrip_with_encryption() {
let tmp = TempDir::new().unwrap();
let store = AuthProfilesStore::new(tmp.path(), true);
@@ -661,14 +661,14 @@ mod tests {
Some("refresh-123")
);
- let raw = fs::read_to_string(store.path()).unwrap();
+ let raw = tokio::fs::read_to_string(store.path()).await.unwrap();
assert!(raw.contains("enc2:"));
assert!(!raw.contains("refresh-123"));
assert!(!raw.contains("access-123"));
}
- #[test]
- fn atomic_write_replaces_file() {
+ #[tokio::test]
+ async fn atomic_write_replaces_file() {
let tmp = TempDir::new().unwrap();
let store = AuthProfilesStore::new(tmp.path(), false);
@@ -678,7 +678,7 @@ mod tests {
let path = store.path().to_path_buf();
assert!(path.exists());
- let contents = fs::read_to_string(path).unwrap();
+ let contents = tokio::fs::read_to_string(path).await.unwrap();
assert!(contents.contains("\"schema_version\": 1"));
}
}
diff --git a/src/channels/cli.rs b/src/channels/cli.rs
index ae49548..11c09eb 100644
--- a/src/channels/cli.rs
+++ b/src/channels/cli.rs
@@ -47,6 +47,7 @@ impl Channel for CliChannel {
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs(),
+ thread_ts: None,
};
if tx.send(msg).await.is_err() {
@@ -74,6 +75,7 @@ mod tests {
content: "hello".into(),
recipient: "user".into(),
subject: None,
+ thread_ts: None,
})
.await;
assert!(result.is_ok());
@@ -87,6 +89,7 @@ mod tests {
content: String::new(),
recipient: String::new(),
subject: None,
+ thread_ts: None,
})
.await;
assert!(result.is_ok());
@@ -107,6 +110,7 @@ mod tests {
content: "hello".into(),
channel: "cli".into(),
timestamp: 1_234_567_890,
+ thread_ts: None,
};
assert_eq!(msg.id, "test-id");
assert_eq!(msg.sender, "user");
@@ -125,6 +129,7 @@ mod tests {
content: "c".into(),
channel: "ch".into(),
timestamp: 0,
+ thread_ts: None,
};
let cloned = msg.clone();
assert_eq!(cloned.id, msg.id);
diff --git a/src/channels/dingtalk.rs b/src/channels/dingtalk.rs
index ed9c9aa..44fd49c 100644
--- a/src/channels/dingtalk.rs
+++ b/src/channels/dingtalk.rs
@@ -169,7 +169,7 @@ impl Channel for DingTalkChannel {
_ => continue,
};
- let frame: serde_json::Value = match serde_json::from_str(&msg) {
+ let frame: serde_json::Value = match serde_json::from_str(msg.as_ref()) {
Ok(v) => v,
Err(_) => continue,
};
@@ -195,7 +195,7 @@ impl Channel for DingTalkChannel {
"data": "",
});
- if let Err(e) = write.send(Message::Text(pong.to_string())).await {
+ if let Err(e) = write.send(Message::Text(pong.to_string().into())).await {
tracing::warn!("DingTalk: failed to send pong: {e}");
break;
}
@@ -262,7 +262,7 @@ impl Channel for DingTalkChannel {
"message": "OK",
"data": "",
});
- let _ = write.send(Message::Text(ack.to_string())).await;
+ let _ = write.send(Message::Text(ack.to_string().into())).await;
let channel_msg = ChannelMessage {
id: Uuid::new_v4().to_string(),
@@ -274,6 +274,7 @@ impl Channel for DingTalkChannel {
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs(),
+ thread_ts: None,
};
if tx.send(channel_msg).await.is_err() {
diff --git a/src/channels/discord.rs b/src/channels/discord.rs
index d7a4d20..bcb447d 100644
--- a/src/channels/discord.rs
+++ b/src/channels/discord.rs
@@ -3,6 +3,7 @@ use async_trait::async_trait;
use futures_util::{SinkExt, StreamExt};
use parking_lot::Mutex;
use serde_json::json;
+use std::collections::HashMap;
use tokio_tungstenite::tungstenite::Message;
use uuid::Uuid;
@@ -13,7 +14,7 @@ pub struct DiscordChannel {
allowed_users: Vec,
listen_to_bots: bool,
mention_only: bool,
- typing_handle: Mutex>>,
+ typing_handles: Mutex>>,
}
impl DiscordChannel {
@@ -30,7 +31,7 @@ impl DiscordChannel {
allowed_users,
listen_to_bots,
mention_only,
- typing_handle: Mutex::new(None),
+ typing_handles: Mutex::new(HashMap::new()),
}
}
@@ -272,7 +273,9 @@ impl Channel for DiscordChannel {
}
}
});
- write.send(Message::Text(identify.to_string())).await?;
+ write
+ .send(Message::Text(identify.to_string().into()))
+ .await?;
tracing::info!("Discord: connected and identified");
@@ -301,7 +304,7 @@ impl Channel for DiscordChannel {
_ = hb_rx.recv() => {
let d = if sequence >= 0 { json!(sequence) } else { json!(null) };
let hb = json!({"op": 1, "d": d});
- if write.send(Message::Text(hb.to_string())).await.is_err() {
+ if write.send(Message::Text(hb.to_string().into())).await.is_err() {
break;
}
}
@@ -312,7 +315,7 @@ impl Channel for DiscordChannel {
_ => continue,
};
- let event: serde_json::Value = match serde_json::from_str(&msg) {
+ let event: serde_json::Value = match serde_json::from_str(msg.as_ref()) {
Ok(e) => e,
Err(_) => continue,
};
@@ -329,7 +332,7 @@ impl Channel for DiscordChannel {
1 => {
let d = if sequence >= 0 { json!(sequence) } else { json!(null) };
let hb = json!({"op": 1, "d": d});
- if write.send(Message::Text(hb.to_string())).await.is_err() {
+ if write.send(Message::Text(hb.to_string().into())).await.is_err() {
break;
}
continue;
@@ -413,6 +416,7 @@ impl Channel for DiscordChannel {
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs(),
+ thread_ts: None,
};
if tx.send(channel_msg).await.is_err() {
@@ -454,15 +458,15 @@ impl Channel for DiscordChannel {
}
});
- let mut guard = self.typing_handle.lock();
- *guard = Some(handle);
+ let mut guard = self.typing_handles.lock();
+ guard.insert(recipient.to_string(), handle);
Ok(())
}
- async fn stop_typing(&self, _recipient: &str) -> anyhow::Result<()> {
- let mut guard = self.typing_handle.lock();
- if let Some(handle) = guard.take() {
+ async fn stop_typing(&self, recipient: &str) -> anyhow::Result<()> {
+ let mut guard = self.typing_handles.lock();
+ if let Some(handle) = guard.remove(recipient) {
handle.abort();
}
Ok(())
@@ -751,18 +755,18 @@ mod tests {
}
#[test]
- fn typing_handle_starts_as_none() {
+ fn typing_handles_start_empty() {
let ch = DiscordChannel::new("fake".into(), None, vec![], false, false);
- let guard = ch.typing_handle.lock();
- assert!(guard.is_none());
+ let guard = ch.typing_handles.lock();
+ assert!(guard.is_empty());
}
#[tokio::test]
async fn start_typing_sets_handle() {
let ch = DiscordChannel::new("fake".into(), None, vec![], false, false);
let _ = ch.start_typing("123456").await;
- let guard = ch.typing_handle.lock();
- assert!(guard.is_some());
+ let guard = ch.typing_handles.lock();
+ assert!(guard.contains_key("123456"));
}
#[tokio::test]
@@ -770,8 +774,8 @@ mod tests {
let ch = DiscordChannel::new("fake".into(), None, vec![], false, false);
let _ = ch.start_typing("123456").await;
let _ = ch.stop_typing("123456").await;
- let guard = ch.typing_handle.lock();
- assert!(guard.is_none());
+ let guard = ch.typing_handles.lock();
+ assert!(!guard.contains_key("123456"));
}
#[tokio::test]
@@ -782,12 +786,21 @@ mod tests {
}
#[tokio::test]
- async fn start_typing_replaces_existing_task() {
+ async fn concurrent_typing_handles_are_independent() {
let ch = DiscordChannel::new("fake".into(), None, vec![], false, false);
let _ = ch.start_typing("111").await;
let _ = ch.start_typing("222").await;
- let guard = ch.typing_handle.lock();
- assert!(guard.is_some());
+ {
+ let guard = ch.typing_handles.lock();
+ assert_eq!(guard.len(), 2);
+ assert!(guard.contains_key("111"));
+ assert!(guard.contains_key("222"));
+ }
+ // Stopping one does not affect the other
+ let _ = ch.stop_typing("111").await;
+ let guard = ch.typing_handles.lock();
+ assert_eq!(guard.len(), 1);
+ assert!(guard.contains_key("222"));
}
// ── Message ID edge cases ─────────────────────────────────────
@@ -840,4 +853,113 @@ mod tests {
// Should have UUID dashes
assert!(id.contains('-'));
}
+
+ // ─────────────────────────────────────────────────────────────────────
+ // TG6: Channel platform limit edge cases for Discord (2000 char limit)
+ // Prevents: Pattern 6 — issues #574, #499
+ // ─────────────────────────────────────────────────────────────────────
+
+ #[test]
+ fn split_message_code_block_at_boundary() {
+ // Code block that spans the split boundary
+ let mut msg = String::new();
+ msg.push_str("```rust\n");
+ msg.push_str(&"x".repeat(1990));
+ msg.push_str("\n```\nMore text after code block");
+ let parts = split_message_for_discord(&msg);
+ assert!(
+ parts.len() >= 2,
+ "code block spanning boundary should split"
+ );
+ for part in &parts {
+ assert!(
+ part.len() <= DISCORD_MAX_MESSAGE_LENGTH,
+ "each part must be <= {DISCORD_MAX_MESSAGE_LENGTH}, got {}",
+ part.len()
+ );
+ }
+ }
+
+ #[test]
+ fn split_message_single_long_word_exceeds_limit() {
+ // A single word longer than 2000 chars must be hard-split
+ let long_word = "a".repeat(2500);
+ let parts = split_message_for_discord(&long_word);
+ assert!(parts.len() >= 2, "word exceeding limit must be split");
+ for part in &parts {
+ assert!(
+ part.len() <= DISCORD_MAX_MESSAGE_LENGTH,
+ "hard-split part must be <= {DISCORD_MAX_MESSAGE_LENGTH}, got {}",
+ part.len()
+ );
+ }
+ // Reassembled content should match original
+ let reassembled: String = parts.join("");
+ assert_eq!(reassembled, long_word);
+ }
+
+ #[test]
+ fn split_message_exactly_at_limit_no_split() {
+ let msg = "a".repeat(DISCORD_MAX_MESSAGE_LENGTH);
+ let parts = split_message_for_discord(&msg);
+ assert_eq!(parts.len(), 1, "message exactly at limit should not split");
+ assert_eq!(parts[0].len(), DISCORD_MAX_MESSAGE_LENGTH);
+ }
+
+ #[test]
+ fn split_message_one_over_limit_splits() {
+ let msg = "a".repeat(DISCORD_MAX_MESSAGE_LENGTH + 1);
+ let parts = split_message_for_discord(&msg);
+ assert!(parts.len() >= 2, "message 1 char over limit must split");
+ }
+
+ #[test]
+ fn split_message_many_short_lines() {
+ // Many short lines should be batched into chunks under the limit
+ let msg: String = (0..500).map(|i| format!("line {i}\n")).collect();
+ let parts = split_message_for_discord(&msg);
+ for part in &parts {
+ assert!(
+ part.len() <= DISCORD_MAX_MESSAGE_LENGTH,
+ "short-line batch must be <= limit"
+ );
+ }
+ // All content should be preserved
+ let reassembled: String = parts.join("");
+ assert_eq!(reassembled.trim(), msg.trim());
+ }
+
+ #[test]
+ fn split_message_only_whitespace() {
+ let msg = " \n\n\t ";
+ let parts = split_message_for_discord(msg);
+ // Should handle gracefully without panic
+ assert!(parts.len() <= 1);
+ }
+
+ #[test]
+ fn split_message_emoji_at_boundary() {
+ // Emoji are multi-byte; ensure we don't split mid-emoji
+ let mut msg = "a".repeat(1998);
+ msg.push_str("🎉🎊"); // 2 emoji at the boundary (2000 chars total)
+ let parts = split_message_for_discord(&msg);
+ for part in &parts {
+ // The function splits on character count, not byte count
+ assert!(
+ part.chars().count() <= DISCORD_MAX_MESSAGE_LENGTH,
+ "emoji boundary split must respect limit"
+ );
+ }
+ }
+
+ #[test]
+ fn split_message_consecutive_newlines_at_boundary() {
+ let mut msg = "a".repeat(1995);
+ msg.push_str("\n\n\n\n\n");
+ msg.push_str(&"b".repeat(100));
+ let parts = split_message_for_discord(&msg);
+ for part in &parts {
+ assert!(part.len() <= DISCORD_MAX_MESSAGE_LENGTH);
+ }
+ }
}
diff --git a/src/channels/email_channel.rs b/src/channels/email_channel.rs
index 410e9dd..0b8b376 100644
--- a/src/channels/email_channel.rs
+++ b/src/channels/email_channel.rs
@@ -20,6 +20,7 @@ use lettre::{Message, SmtpTransport, Transport};
use mail_parser::{MessageParser, MimeHeaders};
use rustls::{ClientConfig, RootCertStore};
use rustls_pki_types::DnsName;
+use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::collections::HashSet;
use std::sync::Arc;
@@ -35,7 +36,7 @@ use uuid::Uuid;
use super::traits::{Channel, ChannelMessage, SendMessage};
/// Email channel configuration
-#[derive(Debug, Clone, Serialize, Deserialize)]
+#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
pub struct EmailConfig {
/// IMAP server hostname
pub imap_host: String,
@@ -153,7 +154,14 @@ impl EmailChannel {
_ => {}
}
}
- result.split_whitespace().collect::>().join(" ")
+ let mut normalized = String::with_capacity(result.len());
+ for word in result.split_whitespace() {
+ if !normalized.is_empty() {
+ normalized.push(' ');
+ }
+ normalized.push_str(word);
+ }
+ normalized
}
/// Extract the sender address from a parsed email
@@ -442,6 +450,7 @@ impl EmailChannel {
content: email.content,
channel: "email".to_string(),
timestamp: email.timestamp,
+ thread_ts: None,
};
if tx.send(msg).await.is_err() {
diff --git a/src/channels/imessage.rs b/src/channels/imessage.rs
index 9675d15..4e51786 100644
--- a/src/channels/imessage.rs
+++ b/src/channels/imessage.rs
@@ -231,6 +231,7 @@ end tell"#
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs(),
+ thread_ts: None,
};
if tx.send(msg).await.is_err() {
diff --git a/src/channels/irc.rs b/src/channels/irc.rs
index 8bdd633..f942692 100644
--- a/src/channels/irc.rs
+++ b/src/channels/irc.rs
@@ -163,12 +163,17 @@ fn split_message(message: &str, max_bytes: usize) -> Vec {
// Guard against max_bytes == 0 to prevent infinite loop
if max_bytes == 0 {
- let full: String = message
+ let mut full = String::new();
+ for l in message
.lines()
.map(|l| l.trim_end_matches('\r'))
.filter(|l| !l.is_empty())
- .collect::>()
- .join(" ");
+ {
+ if !full.is_empty() {
+ full.push(' ');
+ }
+ full.push_str(l);
+ }
if full.is_empty() {
chunks.push(String::new());
} else {
@@ -455,6 +460,7 @@ impl Channel for IrcChannel {
"AUTHENTICATE" => {
// Server sends "AUTHENTICATE +" to request credentials
if sasl_pending && msg.params.first().is_some_and(|p| p == "+") {
+ // sasl_password is loaded from runtime config, not hard-coded
if let Some(password) = self.sasl_password.as_deref() {
let encoded = encode_sasl_plain(¤t_nick, password);
let mut guard = self.writer.lock().await;
@@ -573,6 +579,7 @@ impl Channel for IrcChannel {
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs(),
+ thread_ts: None,
};
if tx.send(channel_msg).await.is_err() {
diff --git a/src/channels/lark.rs b/src/channels/lark.rs
index e071a0c..c899097 100644
--- a/src/channels/lark.rs
+++ b/src/channels/lark.rs
@@ -127,6 +127,12 @@ struct LarkMessage {
/// If no binary frame (pong or event) is received within this window, reconnect.
const WS_HEARTBEAT_TIMEOUT: Duration = Duration::from_secs(300);
+/// Returns true when the WebSocket frame indicates live traffic that should
+/// refresh the heartbeat watchdog.
+fn should_refresh_last_recv(msg: &WsMsg) -> bool {
+ matches!(msg, WsMsg::Binary(_) | WsMsg::Ping(_) | WsMsg::Pong(_))
+}
+
/// Lark/Feishu channel.
///
/// Supports two receive modes (configured via `receive_mode` in config):
@@ -282,7 +288,7 @@ impl LarkChannel {
payload: None,
};
if write
- .send(WsMsg::Binary(initial_ping.encode_to_vec()))
+ .send(WsMsg::Binary(initial_ping.encode_to_vec().into()))
.await
.is_err()
{
@@ -303,7 +309,7 @@ impl LarkChannel {
headers: vec![PbHeader { key: "type".into(), value: "ping".into() }],
payload: None,
};
- if write.send(WsMsg::Binary(ping.encode_to_vec())).await.is_err() {
+ if write.send(WsMsg::Binary(ping.encode_to_vec().into())).await.is_err() {
tracing::warn!("Lark: ping failed, reconnecting");
break;
}
@@ -321,11 +327,20 @@ impl LarkChannel {
msg = read.next() => {
let raw = match msg {
- Some(Ok(WsMsg::Binary(b))) => { last_recv = Instant::now(); b }
- Some(Ok(WsMsg::Ping(d))) => { let _ = write.send(WsMsg::Pong(d)).await; continue; }
- Some(Ok(WsMsg::Close(_))) | None => { tracing::info!("Lark: WS closed — reconnecting"); break; }
+ Some(Ok(ws_msg)) => {
+ if should_refresh_last_recv(&ws_msg) {
+ last_recv = Instant::now();
+ }
+ match ws_msg {
+ WsMsg::Binary(b) => b,
+ WsMsg::Ping(d) => { let _ = write.send(WsMsg::Pong(d)).await; continue; }
+ WsMsg::Pong(_) => continue,
+ WsMsg::Close(_) => { tracing::info!("Lark: WS closed — reconnecting"); break; }
+ _ => continue,
+ }
+ }
+ None => { tracing::info!("Lark: WS closed — reconnecting"); break; }
Some(Err(e)) => { tracing::error!("Lark: WS read error: {e}"); break; }
- _ => continue,
};
let frame = match PbFrame::decode(&raw[..]) {
@@ -363,7 +378,7 @@ impl LarkChannel {
let mut ack = frame.clone();
ack.payload = Some(br#"{"code":200,"headers":{},"data":[]}"#.to_vec());
ack.headers.push(PbHeader { key: "biz_rt".into(), value: "0".into() });
- let _ = write.send(WsMsg::Binary(ack.encode_to_vec())).await;
+ let _ = write.send(WsMsg::Binary(ack.encode_to_vec().into())).await;
}
// Fragment reassembly
@@ -459,6 +474,7 @@ impl LarkChannel {
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs(),
+ thread_ts: None,
};
tracing::debug!("Lark WS: message in {}", lark_msg.chat_id);
@@ -620,6 +636,7 @@ impl LarkChannel {
content: text,
channel: "lark".to_string(),
timestamp,
+ thread_ts: None,
});
messages
@@ -898,6 +915,21 @@ mod tests {
assert_eq!(ch.name(), "lark");
}
+ #[test]
+ fn lark_ws_activity_refreshes_heartbeat_watchdog() {
+ assert!(should_refresh_last_recv(&WsMsg::Binary(
+ vec![1, 2, 3].into()
+ )));
+ assert!(should_refresh_last_recv(&WsMsg::Ping(vec![9, 9].into())));
+ assert!(should_refresh_last_recv(&WsMsg::Pong(vec![8, 8].into())));
+ }
+
+ #[test]
+ fn lark_ws_non_activity_frames_do_not_refresh_heartbeat_watchdog() {
+ assert!(!should_refresh_last_recv(&WsMsg::Text("hello".into())));
+ assert!(!should_refresh_last_recv(&WsMsg::Close(None)));
+ }
+
#[test]
fn lark_user_allowed_exact() {
let ch = make_channel();
diff --git a/src/channels/linq.rs b/src/channels/linq.rs
new file mode 100644
index 0000000..123322f
--- /dev/null
+++ b/src/channels/linq.rs
@@ -0,0 +1,793 @@
+use super::traits::{Channel, ChannelMessage, SendMessage};
+use async_trait::async_trait;
+use uuid::Uuid;
+
+/// Linq channel — uses the Linq Partner V3 API for iMessage, RCS, and SMS.
+///
+/// This channel operates in webhook mode (push-based) rather than polling.
+/// Messages are received via the gateway's `/linq` webhook endpoint.
+/// The `listen` method here is a keepalive placeholder; actual message handling
+/// happens in the gateway when Linq sends webhook events.
+pub struct LinqChannel {
+ api_token: String,
+ from_phone: String,
+ allowed_senders: Vec,
+ client: reqwest::Client,
+}
+
+const LINQ_API_BASE: &str = "https://api.linqapp.com/api/partner/v3";
+
+impl LinqChannel {
+ pub fn new(api_token: String, from_phone: String, allowed_senders: Vec) -> Self {
+ Self {
+ api_token,
+ from_phone,
+ allowed_senders,
+ client: reqwest::Client::new(),
+ }
+ }
+
+ /// Check if a sender phone number is allowed (E.164 format: +1234567890)
+ fn is_sender_allowed(&self, phone: &str) -> bool {
+ self.allowed_senders.iter().any(|n| n == "*" || n == phone)
+ }
+
+ /// Get the bot's phone number
+ pub fn phone_number(&self) -> &str {
+ &self.from_phone
+ }
+
+ fn media_part_to_image_marker(part: &serde_json::Value) -> Option {
+ let source = part
+ .get("url")
+ .or_else(|| part.get("value"))
+ .and_then(|value| value.as_str())
+ .map(str::trim)
+ .filter(|value| !value.is_empty())?;
+
+ let mime_type = part
+ .get("mime_type")
+ .and_then(|value| value.as_str())
+ .map(str::trim)
+ .unwrap_or_default()
+ .to_ascii_lowercase();
+
+ if !mime_type.starts_with("image/") {
+ return None;
+ }
+
+ Some(format!("[IMAGE:{source}]"))
+ }
+
+ /// Parse an incoming webhook payload from Linq and extract messages.
+ ///
+ /// Linq webhook envelope:
+ /// ```json
+ /// {
+ /// "api_version": "v3",
+ /// "event_type": "message.received",
+ /// "event_id": "...",
+ /// "created_at": "...",
+ /// "trace_id": "...",
+ /// "data": {
+ /// "chat_id": "...",
+ /// "from": "+1...",
+ /// "recipient_phone": "+1...",
+ /// "is_from_me": false,
+ /// "service": "iMessage",
+ /// "message": {
+ /// "id": "...",
+ /// "parts": [{ "type": "text", "value": "..." }]
+ /// }
+ /// }
+ /// }
+ /// ```
+ pub fn parse_webhook_payload(&self, payload: &serde_json::Value) -> Vec {
+ let mut messages = Vec::new();
+
+ // Only handle message.received events
+ let event_type = payload
+ .get("event_type")
+ .and_then(|e| e.as_str())
+ .unwrap_or("");
+ if event_type != "message.received" {
+ tracing::debug!("Linq: skipping non-message event: {event_type}");
+ return messages;
+ }
+
+ let Some(data) = payload.get("data") else {
+ return messages;
+ };
+
+ // Skip messages sent by the bot itself
+ if data
+ .get("is_from_me")
+ .and_then(|v| v.as_bool())
+ .unwrap_or(false)
+ {
+ tracing::debug!("Linq: skipping is_from_me message");
+ return messages;
+ }
+
+ // Get sender phone number
+ let Some(from) = data.get("from").and_then(|f| f.as_str()) else {
+ return messages;
+ };
+
+ // Normalize to E.164 format
+ let normalized_from = if from.starts_with('+') {
+ from.to_string()
+ } else {
+ format!("+{from}")
+ };
+
+ // Check allowlist
+ if !self.is_sender_allowed(&normalized_from) {
+ tracing::warn!(
+ "Linq: ignoring message from unauthorized sender: {normalized_from}. \
+ Add to channels.linq.allowed_senders in config.toml, \
+ or run `zeroclaw onboard --channels-only` to configure interactively."
+ );
+ return messages;
+ }
+
+ // Get chat_id for reply routing
+ let chat_id = data
+ .get("chat_id")
+ .and_then(|c| c.as_str())
+ .unwrap_or("")
+ .to_string();
+
+ // Extract text from message parts
+ let Some(message) = data.get("message") else {
+ return messages;
+ };
+
+ let Some(parts) = message.get("parts").and_then(|p| p.as_array()) else {
+ return messages;
+ };
+
+ let content_parts: Vec = parts
+ .iter()
+ .filter_map(|part| {
+ let part_type = part.get("type").and_then(|t| t.as_str())?;
+ match part_type {
+ "text" => part
+ .get("value")
+ .and_then(|v| v.as_str())
+ .map(ToString::to_string),
+ "media" | "image" => {
+ if let Some(marker) = Self::media_part_to_image_marker(part) {
+ Some(marker)
+ } else {
+ tracing::debug!("Linq: skipping unsupported {part_type} part");
+ None
+ }
+ }
+ _ => {
+ tracing::debug!("Linq: skipping {part_type} part");
+ None
+ }
+ }
+ })
+ .collect();
+
+ if content_parts.is_empty() {
+ return messages;
+ }
+
+ let content = content_parts.join("\n").trim().to_string();
+
+ if content.is_empty() {
+ return messages;
+ }
+
+ // Get timestamp from created_at or use current time
+ let timestamp = payload
+ .get("created_at")
+ .and_then(|t| t.as_str())
+ .and_then(|t| {
+ chrono::DateTime::parse_from_rfc3339(t)
+ .ok()
+ .map(|dt| dt.timestamp().cast_unsigned())
+ })
+ .unwrap_or_else(|| {
+ std::time::SystemTime::now()
+ .duration_since(std::time::UNIX_EPOCH)
+ .unwrap_or_default()
+ .as_secs()
+ });
+
+ // Use chat_id as reply_target so replies go to the right conversation
+ let reply_target = if chat_id.is_empty() {
+ normalized_from.clone()
+ } else {
+ chat_id
+ };
+
+ messages.push(ChannelMessage {
+ id: Uuid::new_v4().to_string(),
+ reply_target,
+ sender: normalized_from,
+ content,
+ channel: "linq".to_string(),
+ timestamp,
+ thread_ts: None,
+ });
+
+ messages
+ }
+}
+
+#[async_trait]
+impl Channel for LinqChannel {
+ fn name(&self) -> &str {
+ "linq"
+ }
+
+ async fn send(&self, message: &SendMessage) -> anyhow::Result<()> {
+ // If reply_target looks like a chat_id, send to existing chat.
+ // Otherwise create a new chat with the recipient phone number.
+ let recipient = &message.recipient;
+
+ let body = serde_json::json!({
+ "message": {
+ "parts": [{
+ "type": "text",
+ "value": message.content
+ }]
+ }
+ });
+
+ // Try sending to existing chat (recipient is chat_id)
+ let url = format!("{LINQ_API_BASE}/chats/{recipient}/messages");
+
+ let resp = self
+ .client
+ .post(&url)
+ .bearer_auth(&self.api_token)
+ .header("Content-Type", "application/json")
+ .json(&body)
+ .send()
+ .await?;
+
+ if resp.status().is_success() {
+ return Ok(());
+ }
+
+ // If the chat_id-based send failed with 404, try creating a new chat
+ if resp.status() == reqwest::StatusCode::NOT_FOUND {
+ let new_chat_body = serde_json::json!({
+ "from": self.from_phone,
+ "to": [recipient],
+ "message": {
+ "parts": [{
+ "type": "text",
+ "value": message.content
+ }]
+ }
+ });
+
+ let create_resp = self
+ .client
+ .post(format!("{LINQ_API_BASE}/chats"))
+ .bearer_auth(&self.api_token)
+ .header("Content-Type", "application/json")
+ .json(&new_chat_body)
+ .send()
+ .await?;
+
+ if !create_resp.status().is_success() {
+ let status = create_resp.status();
+ let error_body = create_resp.text().await.unwrap_or_default();
+ tracing::error!("Linq create chat failed: {status} — {error_body}");
+ anyhow::bail!("Linq API error: {status}");
+ }
+
+ return Ok(());
+ }
+
+ let status = resp.status();
+ let error_body = resp.text().await.unwrap_or_default();
+ tracing::error!("Linq send failed: {status} — {error_body}");
+ anyhow::bail!("Linq API error: {status}");
+ }
+
+ async fn listen(&self, _tx: tokio::sync::mpsc::Sender) -> anyhow::Result<()> {
+ // Linq uses webhooks (push-based), not polling.
+ // Messages are received via the gateway's /linq endpoint.
+ tracing::info!(
+ "Linq channel active (webhook mode). \
+ Configure Linq webhook to POST to your gateway's /linq endpoint."
+ );
+
+ // Keep the task alive — it will be cancelled when the channel shuts down
+ loop {
+ tokio::time::sleep(std::time::Duration::from_secs(3600)).await;
+ }
+ }
+
+ async fn health_check(&self) -> bool {
+ // Check if we can reach the Linq API
+ let url = format!("{LINQ_API_BASE}/phonenumbers");
+
+ self.client
+ .get(&url)
+ .bearer_auth(&self.api_token)
+ .send()
+ .await
+ .map(|r| r.status().is_success())
+ .unwrap_or(false)
+ }
+
+ async fn start_typing(&self, recipient: &str) -> anyhow::Result<()> {
+ let url = format!("{LINQ_API_BASE}/chats/{recipient}/typing");
+
+ let resp = self
+ .client
+ .post(&url)
+ .bearer_auth(&self.api_token)
+ .send()
+ .await?;
+
+ if !resp.status().is_success() {
+ tracing::debug!("Linq start_typing failed: {}", resp.status());
+ }
+
+ Ok(())
+ }
+
+ async fn stop_typing(&self, recipient: &str) -> anyhow::Result<()> {
+ let url = format!("{LINQ_API_BASE}/chats/{recipient}/typing");
+
+ let resp = self
+ .client
+ .delete(&url)
+ .bearer_auth(&self.api_token)
+ .send()
+ .await?;
+
+ if !resp.status().is_success() {
+ tracing::debug!("Linq stop_typing failed: {}", resp.status());
+ }
+
+ Ok(())
+ }
+}
+
+/// Verify a Linq webhook signature.
+///
+/// Linq signs webhooks with HMAC-SHA256 over `"{timestamp}.{body}"`.
+/// The signature is sent in `X-Webhook-Signature` (hex-encoded) and the
+/// timestamp in `X-Webhook-Timestamp`. Reject timestamps older than 300s.
+pub fn verify_linq_signature(secret: &str, body: &str, timestamp: &str, signature: &str) -> bool {
+ use hmac::{Hmac, Mac};
+ use sha2::Sha256;
+
+ // Reject stale timestamps (>300s old)
+ if let Ok(ts) = timestamp.parse::() {
+ let now = chrono::Utc::now().timestamp();
+ if (now - ts).unsigned_abs() > 300 {
+ tracing::warn!("Linq: rejecting stale webhook timestamp ({ts}, now={now})");
+ return false;
+ }
+ } else {
+ tracing::warn!("Linq: invalid webhook timestamp: {timestamp}");
+ return false;
+ }
+
+ // Compute HMAC-SHA256 over "{timestamp}.{body}"
+ let message = format!("{timestamp}.{body}");
+ let Ok(mut mac) = Hmac::::new_from_slice(secret.as_bytes()) else {
+ return false;
+ };
+ mac.update(message.as_bytes());
+ let signature_hex = signature
+ .trim()
+ .strip_prefix("sha256=")
+ .unwrap_or(signature);
+ let Ok(provided) = hex::decode(signature_hex.trim()) else {
+ tracing::warn!("Linq: invalid webhook signature format");
+ return false;
+ };
+
+ // Constant-time comparison via HMAC verify.
+ mac.verify_slice(&provided).is_ok()
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ fn make_channel() -> LinqChannel {
+ LinqChannel::new(
+ "test-token".into(),
+ "+15551234567".into(),
+ vec!["+1234567890".into()],
+ )
+ }
+
+ #[test]
+ fn linq_channel_name() {
+ let ch = make_channel();
+ assert_eq!(ch.name(), "linq");
+ }
+
+ #[test]
+ fn linq_sender_allowed_exact() {
+ let ch = make_channel();
+ assert!(ch.is_sender_allowed("+1234567890"));
+ assert!(!ch.is_sender_allowed("+9876543210"));
+ }
+
+ #[test]
+ fn linq_sender_allowed_wildcard() {
+ let ch = LinqChannel::new("tok".into(), "+15551234567".into(), vec!["*".into()]);
+ assert!(ch.is_sender_allowed("+1234567890"));
+ assert!(ch.is_sender_allowed("+9999999999"));
+ }
+
+ #[test]
+ fn linq_sender_allowed_empty() {
+ let ch = LinqChannel::new("tok".into(), "+15551234567".into(), vec![]);
+ assert!(!ch.is_sender_allowed("+1234567890"));
+ }
+
+ #[test]
+ fn linq_parse_valid_text_message() {
+ let ch = make_channel();
+ let payload = serde_json::json!({
+ "api_version": "v3",
+ "event_type": "message.received",
+ "event_id": "evt-123",
+ "created_at": "2025-01-15T12:00:00Z",
+ "trace_id": "trace-456",
+ "data": {
+ "chat_id": "chat-789",
+ "from": "+1234567890",
+ "recipient_phone": "+15551234567",
+ "is_from_me": false,
+ "service": "iMessage",
+ "message": {
+ "id": "msg-abc",
+ "parts": [{
+ "type": "text",
+ "value": "Hello ZeroClaw!"
+ }]
+ }
+ }
+ });
+
+ let msgs = ch.parse_webhook_payload(&payload);
+ assert_eq!(msgs.len(), 1);
+ assert_eq!(msgs[0].sender, "+1234567890");
+ assert_eq!(msgs[0].content, "Hello ZeroClaw!");
+ assert_eq!(msgs[0].channel, "linq");
+ assert_eq!(msgs[0].reply_target, "chat-789");
+ }
+
+ #[test]
+ fn linq_parse_skip_is_from_me() {
+ let ch = LinqChannel::new("tok".into(), "+15551234567".into(), vec!["*".into()]);
+ let payload = serde_json::json!({
+ "event_type": "message.received",
+ "data": {
+ "chat_id": "chat-789",
+ "from": "+1234567890",
+ "is_from_me": true,
+ "message": {
+ "id": "msg-abc",
+ "parts": [{ "type": "text", "value": "My own message" }]
+ }
+ }
+ });
+
+ let msgs = ch.parse_webhook_payload(&payload);
+ assert!(msgs.is_empty(), "is_from_me messages should be skipped");
+ }
+
+ #[test]
+ fn linq_parse_skip_non_message_event() {
+ let ch = make_channel();
+ let payload = serde_json::json!({
+ "event_type": "message.delivered",
+ "data": {
+ "chat_id": "chat-789",
+ "message_id": "msg-abc"
+ }
+ });
+
+ let msgs = ch.parse_webhook_payload(&payload);
+ assert!(msgs.is_empty(), "Non-message events should be skipped");
+ }
+
+ #[test]
+ fn linq_parse_unauthorized_sender() {
+ let ch = make_channel();
+ let payload = serde_json::json!({
+ "event_type": "message.received",
+ "data": {
+ "chat_id": "chat-789",
+ "from": "+9999999999",
+ "is_from_me": false,
+ "message": {
+ "id": "msg-abc",
+ "parts": [{ "type": "text", "value": "Spam" }]
+ }
+ }
+ });
+
+ let msgs = ch.parse_webhook_payload(&payload);
+ assert!(msgs.is_empty(), "Unauthorized senders should be filtered");
+ }
+
+ #[test]
+ fn linq_parse_empty_payload() {
+ let ch = make_channel();
+ let payload = serde_json::json!({});
+ let msgs = ch.parse_webhook_payload(&payload);
+ assert!(msgs.is_empty());
+ }
+
+ #[test]
+ fn linq_parse_media_only_translated_to_image_marker() {
+ let ch = LinqChannel::new("tok".into(), "+15551234567".into(), vec!["*".into()]);
+ let payload = serde_json::json!({
+ "event_type": "message.received",
+ "data": {
+ "chat_id": "chat-789",
+ "from": "+1234567890",
+ "is_from_me": false,
+ "message": {
+ "id": "msg-abc",
+ "parts": [{
+ "type": "media",
+ "url": "https://example.com/image.jpg",
+ "mime_type": "image/jpeg"
+ }]
+ }
+ }
+ });
+
+ let msgs = ch.parse_webhook_payload(&payload);
+ assert_eq!(msgs.len(), 1);
+ assert_eq!(msgs[0].content, "[IMAGE:https://example.com/image.jpg]");
+ }
+
+ #[test]
+ fn linq_parse_media_non_image_still_skipped() {
+ let ch = LinqChannel::new("tok".into(), "+15551234567".into(), vec!["*".into()]);
+ let payload = serde_json::json!({
+ "event_type": "message.received",
+ "data": {
+ "chat_id": "chat-789",
+ "from": "+1234567890",
+ "is_from_me": false,
+ "message": {
+ "id": "msg-abc",
+ "parts": [{
+ "type": "media",
+ "url": "https://example.com/sound.mp3",
+ "mime_type": "audio/mpeg"
+ }]
+ }
+ }
+ });
+
+ let msgs = ch.parse_webhook_payload(&payload);
+ assert!(msgs.is_empty(), "Non-image media should still be skipped");
+ }
+
+ #[test]
+ fn linq_parse_multiple_text_parts() {
+ let ch = LinqChannel::new("tok".into(), "+15551234567".into(), vec!["*".into()]);
+ let payload = serde_json::json!({
+ "event_type": "message.received",
+ "data": {
+ "chat_id": "chat-789",
+ "from": "+1234567890",
+ "is_from_me": false,
+ "message": {
+ "id": "msg-abc",
+ "parts": [
+ { "type": "text", "value": "First part" },
+ { "type": "text", "value": "Second part" }
+ ]
+ }
+ }
+ });
+
+ let msgs = ch.parse_webhook_payload(&payload);
+ assert_eq!(msgs.len(), 1);
+ assert_eq!(msgs[0].content, "First part\nSecond part");
+ }
+
+ /// Fixture secret used exclusively in signature-verification unit tests (not a real credential).
+ const TEST_WEBHOOK_SECRET: &str = "test_webhook_secret";
+
+ #[test]
+ fn linq_signature_verification_valid() {
+ let secret = TEST_WEBHOOK_SECRET;
+ let body = r#"{"event_type":"message.received"}"#;
+ let now = chrono::Utc::now().timestamp().to_string();
+
+ // Compute expected signature
+ use hmac::{Hmac, Mac};
+ use sha2::Sha256;
+ let message = format!("{now}.{body}");
+ let mut mac = Hmac::::new_from_slice(secret.as_bytes()).unwrap();
+ mac.update(message.as_bytes());
+ let signature = hex::encode(mac.finalize().into_bytes());
+
+ assert!(verify_linq_signature(secret, body, &now, &signature));
+ }
+
+ #[test]
+ fn linq_signature_verification_invalid() {
+ let secret = TEST_WEBHOOK_SECRET;
+ let body = r#"{"event_type":"message.received"}"#;
+ let now = chrono::Utc::now().timestamp().to_string();
+
+ assert!(!verify_linq_signature(
+ secret,
+ body,
+ &now,
+ "deadbeefdeadbeefdeadbeef"
+ ));
+ }
+
+ #[test]
+ fn linq_signature_verification_stale_timestamp() {
+ let secret = TEST_WEBHOOK_SECRET;
+ let body = r#"{"event_type":"message.received"}"#;
+ // 10 minutes ago — stale
+ let stale_ts = (chrono::Utc::now().timestamp() - 600).to_string();
+
+ // Even with correct signature, stale timestamp should fail
+ use hmac::{Hmac, Mac};
+ use sha2::Sha256;
+ let message = format!("{stale_ts}.{body}");
+ let mut mac = Hmac::::new_from_slice(secret.as_bytes()).unwrap();
+ mac.update(message.as_bytes());
+ let signature = hex::encode(mac.finalize().into_bytes());
+
+ assert!(
+ !verify_linq_signature(secret, body, &stale_ts, &signature),
+ "Stale timestamps (>300s) should be rejected"
+ );
+ }
+
+ #[test]
+ fn linq_signature_verification_accepts_sha256_prefix() {
+ let secret = TEST_WEBHOOK_SECRET;
+ let body = r#"{"event_type":"message.received"}"#;
+ let now = chrono::Utc::now().timestamp().to_string();
+
+ use hmac::{Hmac, Mac};
+ use sha2::Sha256;
+ let message = format!("{now}.{body}");
+ let mut mac = Hmac::::new_from_slice(secret.as_bytes()).unwrap();
+ mac.update(message.as_bytes());
+ let signature = format!("sha256={}", hex::encode(mac.finalize().into_bytes()));
+
+ assert!(verify_linq_signature(secret, body, &now, &signature));
+ }
+
+ #[test]
+ fn linq_signature_verification_accepts_uppercase_hex() {
+ let secret = TEST_WEBHOOK_SECRET;
+ let body = r#"{"event_type":"message.received"}"#;
+ let now = chrono::Utc::now().timestamp().to_string();
+
+ use hmac::{Hmac, Mac};
+ use sha2::Sha256;
+ let message = format!("{now}.{body}");
+ let mut mac = Hmac::::new_from_slice(secret.as_bytes()).unwrap();
+ mac.update(message.as_bytes());
+ let signature = hex::encode(mac.finalize().into_bytes()).to_ascii_uppercase();
+
+ assert!(verify_linq_signature(secret, body, &now, &signature));
+ }
+
+ #[test]
+ fn linq_parse_normalizes_phone_with_plus() {
+ let ch = LinqChannel::new(
+ "tok".into(),
+ "+15551234567".into(),
+ vec!["+1234567890".into()],
+ );
+ // API sends without +, normalize to +
+ let payload = serde_json::json!({
+ "event_type": "message.received",
+ "data": {
+ "chat_id": "chat-789",
+ "from": "1234567890",
+ "is_from_me": false,
+ "message": {
+ "id": "msg-abc",
+ "parts": [{ "type": "text", "value": "Hi" }]
+ }
+ }
+ });
+
+ let msgs = ch.parse_webhook_payload(&payload);
+ assert_eq!(msgs.len(), 1);
+ assert_eq!(msgs[0].sender, "+1234567890");
+ }
+
+ #[test]
+ fn linq_parse_missing_data() {
+ let ch = make_channel();
+ let payload = serde_json::json!({
+ "event_type": "message.received"
+ });
+ let msgs = ch.parse_webhook_payload(&payload);
+ assert!(msgs.is_empty());
+ }
+
+ #[test]
+ fn linq_parse_missing_message_parts() {
+ let ch = LinqChannel::new("tok".into(), "+15551234567".into(), vec!["*".into()]);
+ let payload = serde_json::json!({
+ "event_type": "message.received",
+ "data": {
+ "chat_id": "chat-789",
+ "from": "+1234567890",
+ "is_from_me": false,
+ "message": {
+ "id": "msg-abc"
+ }
+ }
+ });
+
+ let msgs = ch.parse_webhook_payload(&payload);
+ assert!(msgs.is_empty());
+ }
+
+ #[test]
+ fn linq_parse_empty_text_value() {
+ let ch = LinqChannel::new("tok".into(), "+15551234567".into(), vec!["*".into()]);
+ let payload = serde_json::json!({
+ "event_type": "message.received",
+ "data": {
+ "chat_id": "chat-789",
+ "from": "+1234567890",
+ "is_from_me": false,
+ "message": {
+ "id": "msg-abc",
+ "parts": [{ "type": "text", "value": "" }]
+ }
+ }
+ });
+
+ let msgs = ch.parse_webhook_payload(&payload);
+ assert!(msgs.is_empty(), "Empty text should be skipped");
+ }
+
+ #[test]
+ fn linq_parse_fallback_reply_target_when_no_chat_id() {
+ let ch = LinqChannel::new("tok".into(), "+15551234567".into(), vec!["*".into()]);
+ let payload = serde_json::json!({
+ "event_type": "message.received",
+ "data": {
+ "from": "+1234567890",
+ "is_from_me": false,
+ "message": {
+ "id": "msg-abc",
+ "parts": [{ "type": "text", "value": "Hi" }]
+ }
+ }
+ });
+
+ let msgs = ch.parse_webhook_payload(&payload);
+ assert_eq!(msgs.len(), 1);
+ // Falls back to sender phone number when no chat_id
+ assert_eq!(msgs[0].reply_target, "+1234567890");
+ }
+
+ #[test]
+ fn linq_phone_number_accessor() {
+ let ch = make_channel();
+ assert_eq!(ch.phone_number(), "+15551234567");
+ }
+}
diff --git a/src/channels/matrix.rs b/src/channels/matrix.rs
index 0b063c5..9c18e3a 100644
--- a/src/channels/matrix.rs
+++ b/src/channels/matrix.rs
@@ -24,7 +24,7 @@ pub struct MatrixChannel {
access_token: String,
room_id: String,
allowed_users: Vec,
- session_user_id_hint: Option,
+ session_owner_hint: Option,
session_device_id_hint: Option,
resolved_room_id_cache: Arc>>,
sdk_client: Arc>,
@@ -108,7 +108,7 @@ impl MatrixChannel {
access_token: String,
room_id: String,
allowed_users: Vec,
- user_id_hint: Option,
+ owner_hint: Option,
device_id_hint: Option,
) -> Self {
let homeserver = homeserver.trim_end_matches('/').to_string();
@@ -125,7 +125,7 @@ impl MatrixChannel {
access_token,
room_id,
allowed_users,
- session_user_id_hint: Self::normalize_optional_field(user_id_hint),
+ session_owner_hint: Self::normalize_optional_field(owner_hint),
session_device_id_hint: Self::normalize_optional_field(device_id_hint),
resolved_room_id_cache: Arc::new(RwLock::new(None)),
sdk_client: Arc::new(OnceCell::new()),
@@ -245,7 +245,7 @@ impl MatrixChannel {
let whoami = match identity {
Ok(whoami) => Some(whoami),
Err(error) => {
- if self.session_user_id_hint.is_some() && self.session_device_id_hint.is_some()
+ if self.session_owner_hint.is_some() && self.session_device_id_hint.is_some()
{
tracing::warn!(
"Matrix whoami failed; falling back to configured session hints for E2EE session restore: {error}"
@@ -258,18 +258,18 @@ impl MatrixChannel {
};
let resolved_user_id = if let Some(whoami) = whoami.as_ref() {
- if let Some(hinted) = self.session_user_id_hint.as_ref() {
+ if let Some(hinted) = self.session_owner_hint.as_ref() {
if hinted != &whoami.user_id {
tracing::warn!(
"Matrix configured user_id '{}' does not match whoami '{}'; using whoami.",
- hinted,
- whoami.user_id
+ crate::security::redact(hinted),
+ crate::security::redact(&whoami.user_id)
);
}
}
whoami.user_id.clone()
} else {
- self.session_user_id_hint.clone().ok_or_else(|| {
+ self.session_owner_hint.clone().ok_or_else(|| {
anyhow::anyhow!(
"Matrix session restore requires user_id when whoami is unavailable"
)
@@ -282,8 +282,8 @@ impl MatrixChannel {
if whoami_device_id != hinted {
tracing::warn!(
"Matrix configured device_id '{}' does not match whoami '{}'; using whoami.",
- hinted,
- whoami_device_id
+ crate::security::redact(hinted),
+ crate::security::redact(whoami_device_id)
);
}
whoami_device_id.clone()
@@ -513,7 +513,7 @@ impl Channel for MatrixChannel {
let my_user_id: OwnedUserId = match self.get_my_user_id().await {
Ok(user_id) => user_id.parse()?,
Err(error) => {
- if let Some(hinted) = self.session_user_id_hint.as_ref() {
+ if let Some(hinted) = self.session_owner_hint.as_ref() {
tracing::warn!(
"Matrix whoami failed while resolving listener user_id; using configured user_id hint: {error}"
);
@@ -596,6 +596,7 @@ impl Channel for MatrixChannel {
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs(),
+ thread_ts: None,
};
let _ = tx.send(msg).await;
@@ -714,7 +715,7 @@ mod tests {
Some(" DEVICE123 ".to_string()),
);
- assert_eq!(ch.session_user_id_hint.as_deref(), Some("@bot:matrix.org"));
+ assert_eq!(ch.session_owner_hint.as_deref(), Some("@bot:matrix.org"));
assert_eq!(ch.session_device_id_hint.as_deref(), Some("DEVICE123"));
}
@@ -726,10 +727,10 @@ mod tests {
"!r:m".to_string(),
vec![],
Some(" ".to_string()),
- Some("".to_string()),
+ Some(String::new()),
);
- assert!(ch.session_user_id_hint.is_none());
+ assert!(ch.session_owner_hint.is_none());
assert!(ch.session_device_id_hint.is_none());
}
diff --git a/src/channels/mattermost.rs b/src/channels/mattermost.rs
index 95461de..55ecdbb 100644
--- a/src/channels/mattermost.rs
+++ b/src/channels/mattermost.rs
@@ -321,6 +321,7 @@ impl MattermostChannel {
channel: "mattermost".to_string(),
#[allow(clippy::cast_sign_loss)]
timestamp: (create_at / 1000) as u64,
+ thread_ts: None,
})
}
}
diff --git a/src/channels/mod.rs b/src/channels/mod.rs
index 0fff1ec..3d48c52 100644
--- a/src/channels/mod.rs
+++ b/src/channels/mod.rs
@@ -1,3 +1,19 @@
+//! Channel subsystem for messaging platform integrations.
+//!
+//! This module provides the multi-channel messaging infrastructure that connects
+//! ZeroClaw to external platforms. Each channel implements the [`Channel`] trait
+//! defined in [`traits`], which provides a uniform interface for sending messages,
+//! listening for incoming messages, health checking, and typing indicators.
+//!
+//! Channels are instantiated by [`start_channels`] based on the runtime configuration.
+//! The subsystem manages per-sender conversation history, concurrent message processing
+//! with configurable parallelism, and exponential-backoff reconnection for resilience.
+//!
+//! # Extension
+//!
+//! To add a new channel, implement [`Channel`] in a new submodule and wire it into
+//! [`start_channels`]. See `AGENTS.md` §7.2 for the full change playbook.
+
pub mod cli;
pub mod dingtalk;
pub mod discord;
@@ -5,6 +21,8 @@ pub mod email_channel;
pub mod imessage;
pub mod irc;
pub mod lark;
+pub mod linq;
+#[cfg(feature = "channel-matrix")]
pub mod matrix;
pub mod mattermost;
pub mod qq;
@@ -13,6 +31,10 @@ pub mod slack;
pub mod telegram;
pub mod traits;
pub mod whatsapp;
+#[cfg(feature = "whatsapp-web")]
+pub mod whatsapp_storage;
+#[cfg(feature = "whatsapp-web")]
+pub mod whatsapp_web;
pub use cli::CliChannel;
pub use dingtalk::DingTalkChannel;
@@ -21,6 +43,8 @@ pub use email_channel::EmailChannel;
pub use imessage::IMessageChannel;
pub use irc::IrcChannel;
pub use lark::LarkChannel;
+pub use linq::LinqChannel;
+#[cfg(feature = "channel-matrix")]
pub use matrix::MatrixChannel;
pub use mattermost::MattermostChannel;
pub use qq::QQChannel;
@@ -29,6 +53,8 @@ pub use slack::SlackChannel;
pub use telegram::TelegramChannel;
pub use traits::{Channel, SendMessage};
pub use whatsapp::WhatsAppChannel;
+#[cfg(feature = "whatsapp-web")]
+pub use whatsapp_web::WhatsAppWebChannel;
use crate::agent::loop_::{build_tool_instructions, run_tool_call_loop};
use crate::config::Config;
@@ -46,33 +72,59 @@ use std::collections::HashMap;
use std::fmt::Write;
use std::path::{Path, PathBuf};
use std::process::Command;
-use std::sync::{Arc, Mutex};
-use std::time::{Duration, Instant};
+use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
+use std::sync::{Arc, Mutex, OnceLock};
+use std::time::{Duration, Instant, SystemTime};
use tokio_util::sync::CancellationToken;
/// Per-sender conversation history for channel messages.
type ConversationHistoryMap = Arc>>>;
/// Maximum history messages to keep per sender.
const MAX_CHANNEL_HISTORY: usize = 50;
+/// Minimum user-message length (in chars) for auto-save to memory.
+/// Messages shorter than this (e.g. "ok", "thanks") are not stored,
+/// reducing noise in memory recall.
+const AUTOSAVE_MIN_MESSAGE_CHARS: usize = 20;
/// Maximum characters per injected workspace file (matches `OpenClaw` default).
const BOOTSTRAP_MAX_CHARS: usize = 20_000;
const DEFAULT_CHANNEL_INITIAL_BACKOFF_SECS: u64 = 2;
const DEFAULT_CHANNEL_MAX_BACKOFF_SECS: u64 = 60;
-/// Timeout for processing a single channel message (LLM + tools).
-/// 300s for on-device LLMs (Ollama) which are slower than cloud APIs.
+const MIN_CHANNEL_MESSAGE_TIMEOUT_SECS: u64 = 30;
+/// Default timeout for processing a single channel message (LLM + tools).
+/// Used as fallback when not configured in channels_config.message_timeout_secs.
const CHANNEL_MESSAGE_TIMEOUT_SECS: u64 = 300;
+/// Cap timeout scaling so large max_tool_iterations values do not create unbounded waits.
+const CHANNEL_MESSAGE_TIMEOUT_SCALE_CAP: u64 = 4;
const CHANNEL_PARALLELISM_PER_CHANNEL: usize = 4;
const CHANNEL_MIN_IN_FLIGHT_MESSAGES: usize = 8;
const CHANNEL_MAX_IN_FLIGHT_MESSAGES: usize = 64;
const CHANNEL_TYPING_REFRESH_INTERVAL_SECS: u64 = 4;
const MODEL_CACHE_FILE: &str = "models_cache.json";
const MODEL_CACHE_PREVIEW_LIMIT: usize = 10;
+const MEMORY_CONTEXT_MAX_ENTRIES: usize = 4;
+const MEMORY_CONTEXT_ENTRY_MAX_CHARS: usize = 800;
+const MEMORY_CONTEXT_MAX_CHARS: usize = 4_000;
+const CHANNEL_HISTORY_COMPACT_KEEP_MESSAGES: usize = 12;
+const CHANNEL_HISTORY_COMPACT_CONTENT_CHARS: usize = 600;
type ProviderCacheMap = Arc>>>;
type RouteSelectionMap = Arc>>;
+fn effective_channel_message_timeout_secs(configured: u64) -> u64 {
+ configured.max(MIN_CHANNEL_MESSAGE_TIMEOUT_SECS)
+}
+
+fn channel_message_timeout_budget_secs(
+ message_timeout_secs: u64,
+ max_tool_iterations: usize,
+) -> u64 {
+ let iterations = max_tool_iterations.max(1) as u64;
+ let scale = iterations.min(CHANNEL_MESSAGE_TIMEOUT_SCALE_CAP);
+ message_timeout_secs.saturating_mul(scale)
+}
+
#[derive(Debug, Clone, PartialEq, Eq)]
struct ChannelRouteSelection {
provider: String,
@@ -98,6 +150,33 @@ struct ModelCacheEntry {
models: Vec,
}
+#[derive(Debug, Clone)]
+struct ChannelRuntimeDefaults {
+ default_provider: String,
+ model: String,
+ temperature: f64,
+ api_key: Option,
+ api_url: Option,
+ reliability: crate::config::ReliabilityConfig,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+struct ConfigFileStamp {
+ modified: SystemTime,
+ len: u64,
+}
+
+#[derive(Debug, Clone)]
+struct RuntimeConfigState {
+ defaults: ChannelRuntimeDefaults,
+ last_applied_stamp: Option,
+}
+
+fn runtime_config_store() -> &'static Mutex> {
+ static STORE: OnceLock>> = OnceLock::new();
+ STORE.get_or_init(|| Mutex::new(HashMap::new()))
+}
+
#[derive(Clone)]
struct ChannelRuntimeContext {
channels_by_name: Arc>>,
@@ -120,6 +199,42 @@ struct ChannelRuntimeContext {
reliability: Arc,
provider_runtime_options: providers::ProviderRuntimeOptions,
workspace_dir: Arc,
+ message_timeout_secs: u64,
+ interrupt_on_new_message: bool,
+ multimodal: crate::config::MultimodalConfig,
+}
+
+#[derive(Clone)]
+struct InFlightSenderTaskState {
+ task_id: u64,
+ cancellation: CancellationToken,
+ completion: Arc,
+}
+
+struct InFlightTaskCompletion {
+ done: AtomicBool,
+ notify: tokio::sync::Notify,
+}
+
+impl InFlightTaskCompletion {
+ fn new() -> Self {
+ Self {
+ done: AtomicBool::new(false),
+ notify: tokio::sync::Notify::new(),
+ }
+ }
+
+ fn mark_done(&self) {
+ self.done.store(true, Ordering::Release);
+ self.notify.notify_waiters();
+ }
+
+ async fn wait(&self) {
+ if self.done.load(Ordering::Acquire) {
+ return;
+ }
+ self.notify.notified().await;
+ }
}
fn conversation_memory_key(msg: &traits::ChannelMessage) -> String {
@@ -130,6 +245,10 @@ fn conversation_history_key(msg: &traits::ChannelMessage) -> String {
format!("{}_{}", msg.channel, msg.sender)
}
+fn interruption_scope_key(msg: &traits::ChannelMessage) -> String {
+ format!("{}_{}_{}", msg.channel, msg.reply_target, msg.sender)
+}
+
fn channel_delivery_instructions(channel_name: &str) -> Option<&'static str> {
match channel_name {
"telegram" => Some(
@@ -139,6 +258,51 @@ fn channel_delivery_instructions(channel_name: &str) -> Option<&'static str> {
}
}
+fn build_channel_system_prompt(base_prompt: &str, channel_name: &str) -> String {
+ if let Some(instructions) = channel_delivery_instructions(channel_name) {
+ if base_prompt.is_empty() {
+ instructions.to_string()
+ } else {
+ format!("{base_prompt}\n\n{instructions}")
+ }
+ } else {
+ base_prompt.to_string()
+ }
+}
+
+fn normalize_cached_channel_turns(turns: Vec) -> Vec {
+ let mut normalized = Vec::with_capacity(turns.len());
+ let mut expecting_user = true;
+
+ for turn in turns {
+ match (expecting_user, turn.role.as_str()) {
+ (true, "user") => {
+ normalized.push(turn);
+ expecting_user = false;
+ }
+ (false, "assistant") => {
+ normalized.push(turn);
+ expecting_user = true;
+ }
+ // Interrupted channel turns can produce consecutive user messages
+ // (no assistant persisted yet). Merge instead of dropping.
+ (false, "user") | (true, "assistant") => {
+ if let Some(last_turn) = normalized.last_mut() {
+ if !turn.content.is_empty() {
+ if !last_turn.content.is_empty() {
+ last_turn.content.push_str("\n\n");
+ }
+ last_turn.content.push_str(&turn.content);
+ }
+ }
+ }
+ _ => {}
+ }
+ }
+
+ normalized
+}
+
fn supports_runtime_model_switch(channel_name: &str) -> bool {
matches!(channel_name, "telegram" | "discord")
}
@@ -204,10 +368,176 @@ fn resolve_provider_alias(name: &str) -> Option {
None
}
-fn default_route_selection(ctx: &ChannelRuntimeContext) -> ChannelRouteSelection {
- ChannelRouteSelection {
- provider: ctx.default_provider.as_str().to_string(),
+fn resolved_default_provider(config: &Config) -> String {
+ config
+ .default_provider
+ .clone()
+ .unwrap_or_else(|| "openrouter".to_string())
+}
+
+fn resolved_default_model(config: &Config) -> String {
+ config
+ .default_model
+ .clone()
+ .unwrap_or_else(|| "anthropic/claude-sonnet-4.6".to_string())
+}
+
+fn runtime_defaults_from_config(config: &Config) -> ChannelRuntimeDefaults {
+ ChannelRuntimeDefaults {
+ default_provider: resolved_default_provider(config),
+ model: resolved_default_model(config),
+ temperature: config.default_temperature,
+ api_key: config.api_key.clone(),
+ api_url: config.api_url.clone(),
+ reliability: config.reliability.clone(),
+ }
+}
+
+fn runtime_config_path(ctx: &ChannelRuntimeContext) -> Option {
+ ctx.provider_runtime_options
+ .zeroclaw_dir
+ .as_ref()
+ .map(|dir| dir.join("config.toml"))
+}
+
+fn runtime_defaults_snapshot(ctx: &ChannelRuntimeContext) -> ChannelRuntimeDefaults {
+ if let Some(config_path) = runtime_config_path(ctx) {
+ let store = runtime_config_store()
+ .lock()
+ .unwrap_or_else(|e| e.into_inner());
+ if let Some(state) = store.get(&config_path) {
+ return state.defaults.clone();
+ }
+ }
+
+ ChannelRuntimeDefaults {
+ default_provider: ctx.default_provider.as_str().to_string(),
model: ctx.model.as_str().to_string(),
+ temperature: ctx.temperature,
+ api_key: ctx.api_key.clone(),
+ api_url: ctx.api_url.clone(),
+ reliability: (*ctx.reliability).clone(),
+ }
+}
+
+async fn config_file_stamp(path: &Path) -> Option {
+ let metadata = tokio::fs::metadata(path).await.ok()?;
+ let modified = metadata.modified().ok()?;
+ Some(ConfigFileStamp {
+ modified,
+ len: metadata.len(),
+ })
+}
+
+fn decrypt_optional_secret_for_runtime_reload(
+ store: &crate::security::SecretStore,
+ value: &mut Option,
+ field_name: &str,
+) -> Result<()> {
+ if let Some(raw) = value.clone() {
+ if crate::security::SecretStore::is_encrypted(&raw) {
+ *value = Some(
+ store
+ .decrypt(&raw)
+ .with_context(|| format!("Failed to decrypt {field_name}"))?,
+ );
+ }
+ }
+ Ok(())
+}
+
+async fn load_runtime_defaults_from_config_file(path: &Path) -> Result {
+ let contents = tokio::fs::read_to_string(path)
+ .await
+ .with_context(|| format!("Failed to read {}", path.display()))?;
+ let mut parsed: Config =
+ toml::from_str(&contents).with_context(|| format!("Failed to parse {}", path.display()))?;
+ parsed.config_path = path.to_path_buf();
+
+ if let Some(zeroclaw_dir) = path.parent() {
+ let store = crate::security::SecretStore::new(zeroclaw_dir, parsed.secrets.encrypt);
+ decrypt_optional_secret_for_runtime_reload(&store, &mut parsed.api_key, "config.api_key")?;
+ }
+
+ parsed.apply_env_overrides();
+ Ok(runtime_defaults_from_config(&parsed))
+}
+
+async fn maybe_apply_runtime_config_update(ctx: &ChannelRuntimeContext) -> Result<()> {
+ let Some(config_path) = runtime_config_path(ctx) else {
+ return Ok(());
+ };
+
+ let Some(stamp) = config_file_stamp(&config_path).await else {
+ return Ok(());
+ };
+
+ {
+ let store = runtime_config_store()
+ .lock()
+ .unwrap_or_else(|e| e.into_inner());
+ if let Some(state) = store.get(&config_path) {
+ if state.last_applied_stamp == Some(stamp) {
+ return Ok(());
+ }
+ }
+ }
+
+ let next_defaults = load_runtime_defaults_from_config_file(&config_path).await?;
+ let next_default_provider = providers::create_resilient_provider_with_options(
+ &next_defaults.default_provider,
+ next_defaults.api_key.as_deref(),
+ next_defaults.api_url.as_deref(),
+ &next_defaults.reliability,
+ &ctx.provider_runtime_options,
+ )?;
+ let next_default_provider: Arc = Arc::from(next_default_provider);
+
+ if let Err(err) = next_default_provider.warmup().await {
+ tracing::warn!(
+ provider = %next_defaults.default_provider,
+ "Provider warmup failed after config reload: {err}"
+ );
+ }
+
+ {
+ let mut cache = ctx.provider_cache.lock().unwrap_or_else(|e| e.into_inner());
+ cache.clear();
+ cache.insert(
+ next_defaults.default_provider.clone(),
+ Arc::clone(&next_default_provider),
+ );
+ }
+
+ {
+ let mut store = runtime_config_store()
+ .lock()
+ .unwrap_or_else(|e| e.into_inner());
+ store.insert(
+ config_path.clone(),
+ RuntimeConfigState {
+ defaults: next_defaults.clone(),
+ last_applied_stamp: Some(stamp),
+ },
+ );
+ }
+
+ tracing::info!(
+ path = %config_path.display(),
+ provider = %next_defaults.default_provider,
+ model = %next_defaults.model,
+ temperature = next_defaults.temperature,
+ "Applied updated channel runtime config from disk"
+ );
+
+ Ok(())
+}
+
+fn default_route_selection(ctx: &ChannelRuntimeContext) -> ChannelRouteSelection {
+ let defaults = runtime_defaults_snapshot(ctx);
+ ChannelRouteSelection {
+ provider: defaults.default_provider,
+ model: defaults.model,
}
}
@@ -240,6 +570,81 @@ fn clear_sender_history(ctx: &ChannelRuntimeContext, sender_key: &str) {
.remove(sender_key);
}
+fn compact_sender_history(ctx: &ChannelRuntimeContext, sender_key: &str) -> bool {
+ let mut histories = ctx
+ .conversation_histories
+ .lock()
+ .unwrap_or_else(|e| e.into_inner());
+
+ let Some(turns) = histories.get_mut(sender_key) else {
+ return false;
+ };
+
+ if turns.is_empty() {
+ return false;
+ }
+
+ let keep_from = turns
+ .len()
+ .saturating_sub(CHANNEL_HISTORY_COMPACT_KEEP_MESSAGES);
+ let mut compacted = normalize_cached_channel_turns(turns[keep_from..].to_vec());
+
+ for turn in &mut compacted {
+ if turn.content.chars().count() > CHANNEL_HISTORY_COMPACT_CONTENT_CHARS {
+ turn.content =
+ truncate_with_ellipsis(&turn.content, CHANNEL_HISTORY_COMPACT_CONTENT_CHARS);
+ }
+ }
+
+ if compacted.is_empty() {
+ turns.clear();
+ return false;
+ }
+
+ *turns = compacted;
+ true
+}
+
+fn append_sender_turn(ctx: &ChannelRuntimeContext, sender_key: &str, turn: ChatMessage) {
+ let mut histories = ctx
+ .conversation_histories
+ .lock()
+ .unwrap_or_else(|e| e.into_inner());
+ let turns = histories.entry(sender_key.to_string()).or_default();
+ turns.push(turn);
+ while turns.len() > MAX_CHANNEL_HISTORY {
+ turns.remove(0);
+ }
+}
+
+fn should_skip_memory_context_entry(key: &str, content: &str) -> bool {
+ if memory::is_assistant_autosave_key(key) {
+ return true;
+ }
+
+ if key.trim().to_ascii_lowercase().ends_with("_history") {
+ return true;
+ }
+
+ content.chars().count() > MEMORY_CONTEXT_MAX_CHARS
+}
+
+fn is_context_window_overflow_error(err: &anyhow::Error) -> bool {
+ let lower = err.to_string().to_lowercase();
+ [
+ "exceeds the context window",
+ "context window of this model",
+ "maximum context length",
+ "context length exceeded",
+ "too many tokens",
+ "token limit exceeded",
+ "prompt is too long",
+ "input is too long",
+ ]
+ .iter()
+ .any(|hint| lower.contains(hint))
+}
+
fn load_cached_model_preview(workspace_dir: &Path, provider_name: &str) -> Vec {
let cache_path = workspace_dir.join("state").join(MODEL_CACHE_FILE);
let Ok(raw) = std::fs::read_to_string(cache_path) else {
@@ -267,10 +672,6 @@ async fn get_or_create_provider(
ctx: &ChannelRuntimeContext,
provider_name: &str,
) -> anyhow::Result> {
- if provider_name == ctx.default_provider.as_str() {
- return Ok(Arc::clone(&ctx.provider));
- }
-
if let Some(existing) = ctx
.provider_cache
.lock()
@@ -281,17 +682,22 @@ async fn get_or_create_provider(
return Ok(existing);
}
- let api_url = if provider_name == ctx.default_provider.as_str() {
- ctx.api_url.as_deref()
+ if provider_name == ctx.default_provider.as_str() {
+ return Ok(Arc::clone(&ctx.provider));
+ }
+
+ let defaults = runtime_defaults_snapshot(ctx);
+ let api_url = if provider_name == defaults.default_provider.as_str() {
+ defaults.api_url.as_deref()
} else {
None
};
let provider = providers::create_resilient_provider_with_options(
provider_name,
- ctx.api_key.as_deref(),
+ defaults.api_key.as_deref(),
api_url,
- &ctx.reliability,
+ &defaults.reliability,
&ctx.provider_runtime_options,
)?;
let provider: Arc = Arc::from(provider);
@@ -428,7 +834,7 @@ async fn handle_runtime_command_if_needed(
};
if let Err(err) = channel
- .send(&SendMessage::new(response, &msg.reply_target))
+ .send(&SendMessage::new(response, &msg.reply_target).in_thread(msg.thread_ts.clone()))
.await
{
tracing::warn!(
@@ -448,19 +854,43 @@ async fn build_memory_context(
let mut context = String::new();
if let Ok(entries) = mem.recall(user_msg, 5, None).await {
- let relevant: Vec<_> = entries
- .iter()
- .filter(|e| match e.score {
- Some(score) => score >= min_relevance_score,
- None => true, // keep entries without a score (e.g. non-vector backends)
- })
- .collect();
+ let mut included = 0usize;
+ let mut used_chars = 0usize;
- if !relevant.is_empty() {
- context.push_str("[Memory context]\n");
- for entry in &relevant {
- let _ = writeln!(context, "- {}: {}", entry.key, entry.content);
+ for entry in entries.iter().filter(|e| match e.score {
+ Some(score) => score >= min_relevance_score,
+ None => true, // keep entries without a score (e.g. non-vector backends)
+ }) {
+ if included >= MEMORY_CONTEXT_MAX_ENTRIES {
+ break;
}
+
+ if should_skip_memory_context_entry(&entry.key, &entry.content) {
+ continue;
+ }
+
+ let content = if entry.content.chars().count() > MEMORY_CONTEXT_ENTRY_MAX_CHARS {
+ truncate_with_ellipsis(&entry.content, MEMORY_CONTEXT_ENTRY_MAX_CHARS)
+ } else {
+ entry.content.clone()
+ };
+
+ let line = format!("- {}: {}\n", entry.key, content);
+ let line_chars = line.chars().count();
+ if used_chars + line_chars > MEMORY_CONTEXT_MAX_CHARS {
+ break;
+ }
+
+ if included == 0 {
+ context.push_str("[Memory context]\n");
+ }
+
+ context.push_str(&line);
+ used_chars += line_chars;
+ included += 1;
+ }
+
+ if included > 0 {
context.push('\n');
}
}
@@ -468,6 +898,100 @@ async fn build_memory_context(
context
}
+/// Extract a compact summary of tool interactions from history messages added
+/// during `run_tool_call_loop`. Scans assistant messages for `` tags
+/// or native tool-call JSON to collect tool names used.
+/// Returns an empty string when no tools were invoked.
+fn extract_tool_context_summary(history: &[ChatMessage], start_index: usize) -> String {
+ fn push_unique_tool_name(tool_names: &mut Vec, name: &str) {
+ let candidate = name.trim();
+ if candidate.is_empty() {
+ return;
+ }
+ if !tool_names.iter().any(|existing| existing == candidate) {
+ tool_names.push(candidate.to_string());
+ }
+ }
+
+ fn collect_tool_names_from_tool_call_tags(content: &str, tool_names: &mut Vec) {
+ const TAG_PAIRS: [(&str, &str); 4] = [
+ ("", " "),
+ ("", " "),
+ ("", " "),
+ ("", " "),
+ ];
+
+ for (open_tag, close_tag) in TAG_PAIRS {
+ for segment in content.split(open_tag) {
+ if let Some(json_end) = segment.find(close_tag) {
+ let json_str = segment[..json_end].trim();
+ if let Ok(val) = serde_json::from_str::(json_str) {
+ if let Some(name) = val.get("name").and_then(|n| n.as_str()) {
+ push_unique_tool_name(tool_names, name);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ fn collect_tool_names_from_native_json(content: &str, tool_names: &mut Vec) {
+ if let Ok(val) = serde_json::from_str::(content) {
+ if let Some(calls) = val.get("tool_calls").and_then(|c| c.as_array()) {
+ for call in calls {
+ let name = call
+ .get("function")
+ .and_then(|f| f.get("name"))
+ .and_then(|n| n.as_str())
+ .or_else(|| call.get("name").and_then(|n| n.as_str()));
+ if let Some(name) = name {
+ push_unique_tool_name(tool_names, name);
+ }
+ }
+ }
+ }
+ }
+
+ fn collect_tool_names_from_tool_results(content: &str, tool_names: &mut Vec) {
+ let marker = " = Vec::new();
+
+ for msg in history.iter().skip(start_index) {
+ match msg.role.as_str() {
+ "assistant" => {
+ collect_tool_names_from_tool_call_tags(&msg.content, &mut tool_names);
+ collect_tool_names_from_native_json(&msg.content, &mut tool_names);
+ }
+ "user" => {
+ // Prompt-mode tool calls are always followed by [Tool results] entries
+ // containing `` tags with canonical tool names.
+ collect_tool_names_from_tool_results(&msg.content, &mut tool_names);
+ }
+ _ => {}
+ }
+ }
+
+ if tool_names.is_empty() {
+ return String::new();
+ }
+
+ format!("[Used tools: {}]", tool_names.join(", "))
+}
+
fn spawn_supervised_listener(
ch: Arc,
tx: tokio::sync::mpsc::Sender,
@@ -553,7 +1077,15 @@ fn spawn_scoped_typing_task(
handle
}
-async fn process_channel_message(ctx: Arc, msg: traits::ChannelMessage) {
+async fn process_channel_message(
+ ctx: Arc,
+ msg: traits::ChannelMessage,
+ cancellation_token: CancellationToken,
+) {
+ if cancellation_token.is_cancelled() {
+ return;
+ }
+
println!(
" 💬 [{}] from {}: {}",
msg.channel,
@@ -562,12 +1094,16 @@ async fn process_channel_message(ctx: Arc, msg: traits::C
);
let target_channel = ctx.channels_by_name.get(&msg.channel).cloned();
+ if let Err(err) = maybe_apply_runtime_config_update(ctx.as_ref()).await {
+ tracing::warn!("Failed to apply runtime config update: {err}");
+ }
if handle_runtime_command_if_needed(ctx.as_ref(), &msg, target_channel.as_ref()).await {
return;
}
let history_key = conversation_history_key(&msg);
let route = get_route_selection(ctx.as_ref(), &history_key);
+ let runtime_defaults = runtime_defaults_snapshot(ctx.as_ref());
let active_provider = match get_or_create_provider(ctx.as_ref(), &route.provider).await {
Ok(provider) => provider,
Err(err) => {
@@ -578,17 +1114,16 @@ async fn process_channel_message(ctx: Arc, msg: traits::C
);
if let Some(channel) = target_channel.as_ref() {
let _ = channel
- .send(&SendMessage::new(message, &msg.reply_target))
+ .send(
+ &SendMessage::new(message, &msg.reply_target)
+ .in_thread(msg.thread_ts.clone()),
+ )
.await;
}
return;
}
};
-
- let memory_context =
- build_memory_context(ctx.memory.as_ref(), &msg.content, ctx.min_relevance_score).await;
-
- if ctx.auto_save_memory {
+ if ctx.auto_save_memory && msg.content.chars().count() >= AUTOSAVE_MIN_MESSAGE_CHARS {
let autosave_key = conversation_memory_key(&msg);
let _ = ctx
.memory
@@ -601,38 +1136,48 @@ async fn process_channel_message(ctx: Arc, msg: traits::C
.await;
}
- let enriched_message = if memory_context.is_empty() {
- msg.content.clone()
- } else {
- format!("{memory_context}{}", msg.content)
- };
-
println!(" ⏳ Processing message...");
let started_at = Instant::now();
- // Build history from per-sender conversation cache
- let mut prior_turns = ctx
+ let had_prior_history = ctx
+ .conversation_histories
+ .lock()
+ .unwrap_or_else(|e| e.into_inner())
+ .get(&history_key)
+ .is_some_and(|turns| !turns.is_empty());
+
+ // Preserve user turn before the LLM call so interrupted requests keep context.
+ append_sender_turn(ctx.as_ref(), &history_key, ChatMessage::user(&msg.content));
+
+ // Build history from per-sender conversation cache.
+ let prior_turns_raw = ctx
.conversation_histories
.lock()
.unwrap_or_else(|e| e.into_inner())
.get(&history_key)
.cloned()
.unwrap_or_default();
+ let mut prior_turns = normalize_cached_channel_turns(prior_turns_raw);
- let mut history = vec![ChatMessage::system(ctx.system_prompt.as_str())];
- history.append(&mut prior_turns);
- history.push(ChatMessage::user(&enriched_message));
-
- if let Some(instructions) = channel_delivery_instructions(&msg.channel) {
- history.push(ChatMessage::system(instructions));
+ // Only enrich with memory context when there is no prior conversation
+ // history. Follow-up turns already include context from previous messages.
+ if !had_prior_history {
+ let memory_context =
+ build_memory_context(ctx.memory.as_ref(), &msg.content, ctx.min_relevance_score).await;
+ if let Some(last_turn) = prior_turns.last_mut() {
+ if last_turn.role == "user" && !memory_context.is_empty() {
+ last_turn.content = format!("{memory_context}{}", msg.content);
+ }
+ }
}
- // Determine if this channel supports streaming draft updates
+ let system_prompt = build_channel_system_prompt(ctx.system_prompt.as_str(), &msg.channel);
+ let mut history = vec![ChatMessage::system(system_prompt)];
+ history.extend(prior_turns);
let use_streaming = target_channel
.as_ref()
- .map_or(false, |ch| ch.supports_draft_updates());
+ .is_some_and(|ch| ch.supports_draft_updates());
- // Set up streaming channel if supported
let (delta_tx, delta_rx) = if use_streaming {
let (tx, rx) = tokio::sync::mpsc::channel::(64);
(Some(tx), Some(rx))
@@ -640,11 +1185,12 @@ async fn process_channel_message(ctx: Arc, msg: traits::C
(None, None)
};
- // Send initial draft message if streaming
let draft_message_id = if use_streaming {
if let Some(channel) = target_channel.as_ref() {
match channel
- .send_draft(&SendMessage::new("...", &msg.reply_target))
+ .send_draft(
+ &SendMessage::new("...", &msg.reply_target).in_thread(msg.thread_ts.clone()),
+ )
.await
{
Ok(id) => id,
@@ -660,7 +1206,6 @@ async fn process_channel_message(ctx: Arc, msg: traits::C
None
};
- // Spawn a task to forward streaming deltas to draft updates
let draft_updater = if let (Some(mut rx), Some(draft_id_ref), Some(channel_ref)) = (
delta_rx,
draft_message_id.as_deref(),
@@ -695,26 +1240,39 @@ async fn process_channel_message(ctx: Arc, msg: traits::C
_ => None,
};
- let llm_result = tokio::time::timeout(
- Duration::from_secs(CHANNEL_MESSAGE_TIMEOUT_SECS),
- run_tool_call_loop(
- active_provider.as_ref(),
- &mut history,
- ctx.tools_registry.as_ref(),
- ctx.observer.as_ref(),
- route.provider.as_str(),
- route.model.as_str(),
- ctx.temperature,
- true,
- None,
- msg.channel.as_str(),
- ctx.max_tool_iterations,
- delta_tx,
- ),
- )
- .await;
+ // Record history length before tool loop so we can extract tool context after.
+ let history_len_before_tools = history.len();
+
+ enum LlmExecutionResult {
+ Completed(Result, tokio::time::error::Elapsed>),
+ Cancelled,
+ }
+
+ let timeout_budget_secs =
+ channel_message_timeout_budget_secs(ctx.message_timeout_secs, ctx.max_tool_iterations);
+ let llm_result = tokio::select! {
+ () = cancellation_token.cancelled() => LlmExecutionResult::Cancelled,
+ result = tokio::time::timeout(
+ Duration::from_secs(timeout_budget_secs),
+ run_tool_call_loop(
+ active_provider.as_ref(),
+ &mut history,
+ ctx.tools_registry.as_ref(),
+ ctx.observer.as_ref(),
+ route.provider.as_str(),
+ route.model.as_str(),
+ runtime_defaults.temperature,
+ true,
+ None,
+ msg.channel.as_str(),
+ &ctx.multimodal,
+ ctx.max_tool_iterations,
+ Some(cancellation_token.clone()),
+ delta_tx,
+ ),
+ ) => LlmExecutionResult::Completed(result),
+ };
- // Wait for draft updater to finish
if let Some(handle) = draft_updater {
let _ = handle.await;
}
@@ -727,21 +1285,36 @@ async fn process_channel_message(ctx: Arc, msg: traits::C
}
match llm_result {
- Ok(Ok(response)) => {
- // Save user + assistant turn to per-sender history
+ LlmExecutionResult::Cancelled => {
+ tracing::info!(
+ channel = %msg.channel,
+ sender = %msg.sender,
+ "Cancelled in-flight channel request due to newer message"
+ );
+ if let (Some(channel), Some(draft_id)) =
+ (target_channel.as_ref(), draft_message_id.as_deref())
{
- let mut histories = ctx
- .conversation_histories
- .lock()
- .unwrap_or_else(|e| e.into_inner());
- let turns = histories.entry(history_key).or_default();
- turns.push(ChatMessage::user(&enriched_message));
- turns.push(ChatMessage::assistant(&response));
- // Trim to MAX_CHANNEL_HISTORY (keep recent turns)
- while turns.len() > MAX_CHANNEL_HISTORY {
- turns.remove(0);
+ if let Err(err) = channel.cancel_draft(&msg.reply_target, draft_id).await {
+ tracing::debug!("Failed to cancel draft on {}: {err}", channel.name());
}
}
+ }
+ LlmExecutionResult::Completed(Ok(Ok(response))) => {
+ // Extract condensed tool-use context from the history messages
+ // added during run_tool_call_loop, so the LLM retains awareness
+ // of what it did on subsequent turns.
+ let tool_summary = extract_tool_context_summary(&history, history_len_before_tools);
+ let history_response = if tool_summary.is_empty() {
+ response.clone()
+ } else {
+ format!("{tool_summary}\n{response}")
+ };
+
+ append_sender_turn(
+ ctx.as_ref(),
+ &history_key,
+ ChatMessage::assistant(&history_response),
+ );
println!(
" 🤖 Reply ({}ms): {}",
started_at.elapsed().as_millis(),
@@ -755,18 +1328,70 @@ async fn process_channel_message(ctx: Arc, msg: traits::C
{
tracing::warn!("Failed to finalize draft: {e}; sending as new message");
let _ = channel
- .send(&SendMessage::new(&response, &msg.reply_target))
+ .send(
+ &SendMessage::new(&response, &msg.reply_target)
+ .in_thread(msg.thread_ts.clone()),
+ )
.await;
}
} else if let Err(e) = channel
- .send(&SendMessage::new(response, &msg.reply_target))
+ .send(
+ &SendMessage::new(response, &msg.reply_target)
+ .in_thread(msg.thread_ts.clone()),
+ )
.await
{
eprintln!(" ❌ Failed to reply on {}: {e}", channel.name());
}
}
}
- Ok(Err(e)) => {
+ LlmExecutionResult::Completed(Ok(Err(e))) => {
+ if crate::agent::loop_::is_tool_loop_cancelled(&e) || cancellation_token.is_cancelled()
+ {
+ tracing::info!(
+ channel = %msg.channel,
+ sender = %msg.sender,
+ "Cancelled in-flight channel request due to newer message"
+ );
+ if let (Some(channel), Some(draft_id)) =
+ (target_channel.as_ref(), draft_message_id.as_deref())
+ {
+ if let Err(err) = channel.cancel_draft(&msg.reply_target, draft_id).await {
+ tracing::debug!("Failed to cancel draft on {}: {err}", channel.name());
+ }
+ }
+ return;
+ }
+
+ if is_context_window_overflow_error(&e) {
+ let compacted = compact_sender_history(ctx.as_ref(), &history_key);
+ let error_text = if compacted {
+ "⚠️ Context window exceeded for this conversation. I compacted recent history and kept the latest context. Please resend your last message."
+ } else {
+ "⚠️ Context window exceeded for this conversation. Please resend your last message."
+ };
+ eprintln!(
+ " ⚠️ Context window exceeded after {}ms; sender history compacted={}",
+ started_at.elapsed().as_millis(),
+ compacted
+ );
+ if let Some(channel) = target_channel.as_ref() {
+ if let Some(ref draft_id) = draft_message_id {
+ let _ = channel
+ .finalize_draft(&msg.reply_target, draft_id, error_text)
+ .await;
+ } else {
+ let _ = channel
+ .send(
+ &SendMessage::new(error_text, &msg.reply_target)
+ .in_thread(msg.thread_ts.clone()),
+ )
+ .await;
+ }
+ }
+ return;
+ }
+
eprintln!(
" ❌ LLM error after {}ms: {e}",
started_at.elapsed().as_millis()
@@ -778,18 +1403,18 @@ async fn process_channel_message(ctx: Arc, msg: traits::C
.await;
} else {
let _ = channel
- .send(&SendMessage::new(
- format!("⚠️ Error: {e}"),
- &msg.reply_target,
- ))
+ .send(
+ &SendMessage::new(format!("⚠️ Error: {e}"), &msg.reply_target)
+ .in_thread(msg.thread_ts.clone()),
+ )
.await;
}
}
}
- Err(_) => {
+ LlmExecutionResult::Completed(Err(_)) => {
let timeout_msg = format!(
- "LLM response timed out after {}s",
- CHANNEL_MESSAGE_TIMEOUT_SECS
+ "LLM response timed out after {}s (base={}s, max_tool_iterations={})",
+ timeout_budget_secs, ctx.message_timeout_secs, ctx.max_tool_iterations
);
eprintln!(
" ❌ {} (elapsed: {}ms)",
@@ -805,7 +1430,10 @@ async fn process_channel_message(ctx: Arc, msg: traits::C
.await;
} else {
let _ = channel
- .send(&SendMessage::new(error_text, &msg.reply_target))
+ .send(
+ &SendMessage::new(error_text, &msg.reply_target)
+ .in_thread(msg.thread_ts.clone()),
+ )
.await;
}
}
@@ -820,6 +1448,11 @@ async fn run_message_dispatch_loop(
) {
let semaphore = Arc::new(tokio::sync::Semaphore::new(max_in_flight_messages));
let mut workers = tokio::task::JoinSet::new();
+ let in_flight_by_sender = Arc::new(tokio::sync::Mutex::new(HashMap::<
+ String,
+ InFlightSenderTaskState,
+ >::new()));
+ let task_sequence = Arc::new(AtomicU64::new(1));
while let Some(msg) = rx.recv().await {
let permit = match Arc::clone(&semaphore).acquire_owned().await {
@@ -828,9 +1461,54 @@ async fn run_message_dispatch_loop(
};
let worker_ctx = Arc::clone(&ctx);
+ let in_flight = Arc::clone(&in_flight_by_sender);
+ let task_sequence = Arc::clone(&task_sequence);
workers.spawn(async move {
let _permit = permit;
- process_channel_message(worker_ctx, msg).await;
+ let interrupt_enabled =
+ worker_ctx.interrupt_on_new_message && msg.channel == "telegram";
+ let sender_scope_key = interruption_scope_key(&msg);
+ let cancellation_token = CancellationToken::new();
+ let completion = Arc::new(InFlightTaskCompletion::new());
+ let task_id = task_sequence.fetch_add(1, Ordering::Relaxed);
+
+ if interrupt_enabled {
+ let previous = {
+ let mut active = in_flight.lock().await;
+ active.insert(
+ sender_scope_key.clone(),
+ InFlightSenderTaskState {
+ task_id,
+ cancellation: cancellation_token.clone(),
+ completion: Arc::clone(&completion),
+ },
+ )
+ };
+
+ if let Some(previous) = previous {
+ tracing::info!(
+ channel = %msg.channel,
+ sender = %msg.sender,
+ "Interrupting previous in-flight request for sender"
+ );
+ previous.cancellation.cancel();
+ previous.completion.wait().await;
+ }
+ }
+
+ process_channel_message(worker_ctx, msg, cancellation_token).await;
+
+ if interrupt_enabled {
+ let mut active = in_flight.lock().await;
+ if active
+ .get(&sender_scope_key)
+ .is_some_and(|state| state.task_id == task_id)
+ {
+ active.remove(&sender_scope_key);
+ }
+ }
+
+ completion.mark_done();
});
while let Some(result) = workers.try_join_next() {
@@ -874,7 +1552,7 @@ fn load_openclaw_bootstrap_files(
/// Follows the `OpenClaw` framework structure by default:
/// 1. Tooling — tool list + descriptions
/// 2. Safety — guardrail reminder
-/// 3. Skills — compact list with paths (loaded on-demand)
+/// 3. Skills — full skill instructions and tool metadata
/// 4. Workspace — working directory
/// 5. Bootstrap files — AGENTS, SOUL, TOOLS, IDENTITY, USER, BOOTSTRAP, MEMORY
/// 6. Date & Time — timezone for cache stability
@@ -892,6 +1570,26 @@ pub fn build_system_prompt(
skills: &[crate::skills::Skill],
identity_config: Option<&crate::config::IdentityConfig>,
bootstrap_max_chars: Option,
+) -> String {
+ build_system_prompt_with_mode(
+ workspace_dir,
+ model_name,
+ tools,
+ skills,
+ identity_config,
+ bootstrap_max_chars,
+ false,
+ )
+}
+
+pub fn build_system_prompt_with_mode(
+ workspace_dir: &std::path::Path,
+ model_name: &str,
+ tools: &[(&str, &str)],
+ skills: &[crate::skills::Skill],
+ identity_config: Option<&crate::config::IdentityConfig>,
+ bootstrap_max_chars: Option,
+ native_tools: bool,
) -> String {
use std::fmt::Write;
let mut prompt = String::with_capacity(8192);
@@ -903,13 +1601,7 @@ pub fn build_system_prompt(
for (name, desc) in tools {
let _ = writeln!(prompt, "- **{name}**: {desc}");
}
- prompt.push_str("\n## Tool Use Protocol\n\n");
- prompt.push_str("To use a tool, wrap a JSON object in tags:\n\n");
- prompt.push_str("```\n\n{\"name\": \"tool_name\", \"arguments\": {\"param\": \"value\"}}\n \n```\n\n");
- prompt.push_str("You may use multiple tool calls in a single response. ");
- prompt.push_str("After tool execution, results appear in tags. ");
- prompt
- .push_str("Continue reasoning with the results until you can give a final answer.\n\n");
+ prompt.push('\n');
}
// ── 1b. Hardware (when gpio/arduino tools present) ───────────
@@ -934,12 +1626,21 @@ pub fn build_system_prompt(
}
// ── 1c. Action instruction (avoid meta-summary) ───────────────
- prompt.push_str(
- "## Your Task\n\n\
- When the user sends a message, ACT on it. Use the tools to fulfill their request.\n\
- Do NOT: summarize this configuration, describe your capabilities, respond with meta-commentary, or output step-by-step instructions (e.g. \"1. First... 2. Next...\").\n\
- Instead: emit actual tags when you need to act. Just do what they ask.\n\n",
- );
+ if native_tools {
+ prompt.push_str(
+ "## Your Task\n\n\
+ When the user sends a message, respond naturally. Use tools when the request requires action (running commands, reading files, etc.).\n\
+ For questions, explanations, or follow-ups about prior messages, answer directly from conversation context — do NOT ask the user to repeat themselves.\n\
+ Do NOT: summarize this configuration, describe your capabilities, or output step-by-step meta-commentary.\n\n",
+ );
+ } else {
+ prompt.push_str(
+ "## Your Task\n\n\
+ When the user sends a message, ACT on it. Use the tools to fulfill their request.\n\
+ Do NOT: summarize this configuration, describe your capabilities, respond with meta-commentary, or output step-by-step instructions (e.g. \"1. First... 2. Next...\").\n\
+ Instead: emit actual tags when you need to act. Just do what they ask.\n\n",
+ );
+ }
// ── 2. Safety ───────────────────────────────────────────────
prompt.push_str("## Safety\n\n");
@@ -951,31 +1652,10 @@ pub fn build_system_prompt(
- When in doubt, ask before acting externally.\n\n",
);
- // ── 3. Skills (compact list — load on-demand) ───────────────
+ // ── 3. Skills (full instructions + tool metadata) ───────────
if !skills.is_empty() {
- prompt.push_str("## Available Skills\n\n");
- prompt.push_str(
- "Skills are loaded on demand. Use `read` on the skill path to get full instructions.\n\n",
- );
- prompt.push_str("\n");
- for skill in skills {
- let _ = writeln!(prompt, " ");
- let _ = writeln!(prompt, " {} ", skill.name);
- let _ = writeln!(
- prompt,
- " {} ",
- skill.description
- );
- let location = skill.location.clone().unwrap_or_else(|| {
- workspace_dir
- .join("skills")
- .join(&skill.name)
- .join("SKILL.md")
- });
- let _ = writeln!(prompt, " {} ", location.display());
- let _ = writeln!(prompt, " ");
- }
- prompt.push_str(" \n\n");
+ prompt.push_str(&crate::skills::skills_to_prompt(skills, workspace_dir));
+ prompt.push_str("\n\n");
}
// ── 4. Workspace ────────────────────────────────────────────
@@ -1042,16 +1722,14 @@ pub fn build_system_prompt(
// ── 8. Channel Capabilities ─────────────────────────────────────
prompt.push_str("## Channel Capabilities\n\n");
- prompt.push_str(
- "- You are running as a Discord bot. You CAN and do send messages to Discord channels.\n",
- );
- prompt.push_str("- When someone messages you on Discord, your response is automatically sent back to Discord.\n");
+ prompt.push_str("- You are running as a messaging bot. Your response is automatically sent back to the user's channel.\n");
prompt.push_str("- You do NOT need to ask permission to respond — just respond directly.\n");
prompt.push_str("- NEVER repeat, describe, or echo credentials, tokens, API keys, or secrets in your responses.\n");
prompt.push_str("- If a tool output contains credentials, they have already been redacted — do not mention them.\n\n");
if prompt.is_empty() {
- "You are ZeroClaw, a fast and efficient AI assistant built in Rust. Be helpful, concise, and direct.".to_string()
+ "You are ZeroClaw, a fast and efficient AI assistant built in Rust. Be helpful, concise, and direct."
+ .to_string()
} else {
prompt
}
@@ -1106,7 +1784,7 @@ fn normalize_telegram_identity(value: &str) -> String {
value.trim().trim_start_matches('@').to_string()
}
-fn bind_telegram_identity(config: &Config, identity: &str) -> Result<()> {
+async fn bind_telegram_identity(config: &Config, identity: &str) -> Result<()> {
let normalized = normalize_telegram_identity(identity);
if normalized.is_empty() {
anyhow::bail!("Telegram identity cannot be empty");
@@ -1136,7 +1814,7 @@ fn bind_telegram_identity(config: &Config, identity: &str) -> Result<()> {
}
telegram.allowed_users.push(normalized.clone());
- updated.save()?;
+ updated.save().await?;
println!("✅ Bound Telegram identity: {normalized}");
println!(" Saved to {}", updated.config_path.display());
match maybe_restart_managed_daemon_service() {
@@ -1232,7 +1910,7 @@ fn maybe_restart_managed_daemon_service() -> Result {
Ok(false)
}
-pub fn handle_command(command: crate::ChannelCommands, config: &Config) -> Result<()> {
+pub async fn handle_command(command: crate::ChannelCommands, config: &Config) -> Result<()> {
match command {
crate::ChannelCommands::Start => {
anyhow::bail!("Start must be handled in main.rs (requires async runtime)")
@@ -1247,11 +1925,16 @@ pub fn handle_command(command: crate::ChannelCommands, config: &Config) -> Resul
("Telegram", config.channels_config.telegram.is_some()),
("Discord", config.channels_config.discord.is_some()),
("Slack", config.channels_config.slack.is_some()),
+ ("Mattermost", config.channels_config.mattermost.is_some()),
("Webhook", config.channels_config.webhook.is_some()),
("iMessage", config.channels_config.imessage.is_some()),
- ("Matrix", config.channels_config.matrix.is_some()),
+ (
+ "Matrix",
+ cfg!(feature = "channel-matrix") && config.channels_config.matrix.is_some(),
+ ),
("Signal", config.channels_config.signal.is_some()),
("WhatsApp", config.channels_config.whatsapp.is_some()),
+ ("Linq", config.channels_config.linq.is_some()),
("Email", config.channels_config.email.is_some()),
("IRC", config.channels_config.irc.is_some()),
("Lark", config.channels_config.lark.is_some()),
@@ -1260,6 +1943,11 @@ pub fn handle_command(command: crate::ChannelCommands, config: &Config) -> Resul
] {
println!(" {} {name}", if configured { "✅" } else { "❌" });
}
+ if !cfg!(feature = "channel-matrix") {
+ println!(
+ " ℹ️ Matrix channel support is disabled in this build (enable `channel-matrix`)."
+ );
+ }
println!("\nTo start channels: zeroclaw channel start");
println!("To check health: zeroclaw channel doctor");
println!("To configure: zeroclaw onboard");
@@ -1277,7 +1965,7 @@ pub fn handle_command(command: crate::ChannelCommands, config: &Config) -> Resul
anyhow::bail!("Remove channel '{name}' — edit ~/.zeroclaw/config.toml directly");
}
crate::ChannelCommands::BindTelegram { identity } => {
- bind_telegram_identity(config, &identity)
+ bind_telegram_identity(config, &identity).await
}
}
}
@@ -1348,6 +2036,7 @@ pub async fn doctor_channels(config: Config) -> Result<()> {
));
}
+ #[cfg(feature = "channel-matrix")]
if let Some(ref mx) = config.channels_config.matrix {
channels.push((
"Matrix",
@@ -1362,6 +2051,13 @@ pub async fn doctor_channels(config: Config) -> Result<()> {
));
}
+ #[cfg(not(feature = "channel-matrix"))]
+ if config.channels_config.matrix.is_some() {
+ tracing::warn!(
+ "Matrix channel is configured but this build was compiled without `channel-matrix`; skipping Matrix health check."
+ );
+ }
+
if let Some(ref sig) = config.channels_config.signal {
channels.push((
"Signal",
@@ -1377,13 +2073,63 @@ pub async fn doctor_channels(config: Config) -> Result<()> {
}
if let Some(ref wa) = config.channels_config.whatsapp {
+ if wa.is_ambiguous_config() {
+ tracing::warn!(
+ "WhatsApp config has both phone_number_id and session_path set; preferring Cloud API mode. Remove one selector to avoid ambiguity."
+ );
+ }
+ // Runtime negotiation: detect backend type from config
+ match wa.backend_type() {
+ "cloud" => {
+ // Cloud API mode: requires phone_number_id, access_token, verify_token
+ if wa.is_cloud_config() {
+ channels.push((
+ "WhatsApp",
+ Arc::new(WhatsAppChannel::new(
+ wa.access_token.clone().unwrap_or_default(),
+ wa.phone_number_id.clone().unwrap_or_default(),
+ wa.verify_token.clone().unwrap_or_default(),
+ wa.allowed_numbers.clone(),
+ )),
+ ));
+ } else {
+ tracing::warn!("WhatsApp Cloud API configured but missing required fields (phone_number_id, access_token, verify_token)");
+ }
+ }
+ "web" => {
+ // Web mode: requires session_path
+ #[cfg(feature = "whatsapp-web")]
+ if wa.is_web_config() {
+ channels.push((
+ "WhatsApp",
+ Arc::new(WhatsAppWebChannel::new(
+ wa.session_path.clone().unwrap_or_default(),
+ wa.pair_phone.clone(),
+ wa.pair_code.clone(),
+ wa.allowed_numbers.clone(),
+ )),
+ ));
+ } else {
+ tracing::warn!("WhatsApp Web configured but session_path not set");
+ }
+ #[cfg(not(feature = "whatsapp-web"))]
+ {
+ tracing::warn!("WhatsApp Web backend requires 'whatsapp-web' feature. Enable with: cargo build --features whatsapp-web");
+ }
+ }
+ _ => {
+ tracing::warn!("WhatsApp config invalid: neither phone_number_id (Cloud API) nor session_path (Web) is set");
+ }
+ }
+ }
+
+ if let Some(ref lq) = config.channels_config.linq {
channels.push((
- "WhatsApp",
- Arc::new(WhatsAppChannel::new(
- wa.access_token.clone(),
- wa.phone_number_id.clone(),
- wa.verify_token.clone(),
- wa.allowed_numbers.clone(),
+ "Linq",
+ Arc::new(LinqChannel::new(
+ lq.api_token.clone(),
+ lq.from_phone.clone(),
+ lq.allowed_senders.clone(),
)),
));
}
@@ -1480,14 +2226,12 @@ pub async fn doctor_channels(config: Config) -> Result<()> {
/// Start all configured channels and route messages to the agent
#[allow(clippy::too_many_lines)]
pub async fn start_channels(config: Config) -> Result<()> {
- let provider_name = config
- .default_provider
- .clone()
- .unwrap_or_else(|| "openrouter".into());
+ let provider_name = resolved_default_provider(&config);
let provider_runtime_options = providers::ProviderRuntimeOptions {
auth_profile_override: None,
zeroclaw_dir: config.config_path.parent().map(std::path::PathBuf::from),
secrets_encrypt: config.secrets.encrypt,
+ reasoning_enabled: config.runtime.reasoning_enabled,
};
let provider: Arc = Arc::from(providers::create_resilient_provider_with_options(
&provider_name,
@@ -1503,6 +2247,20 @@ pub async fn start_channels(config: Config) -> Result<()> {
tracing::warn!("Provider warmup failed (non-fatal): {e}");
}
+ let initial_stamp = config_file_stamp(&config.config_path).await;
+ {
+ let mut store = runtime_config_store()
+ .lock()
+ .unwrap_or_else(|e| e.into_inner());
+ store.insert(
+ config.config_path.clone(),
+ RuntimeConfigState {
+ defaults: runtime_defaults_from_config(&config),
+ last_applied_stamp: initial_stamp,
+ },
+ );
+ }
+
let observer: Arc =
Arc::from(observability::create_observer(&config.observability));
let runtime: Arc =
@@ -1511,10 +2269,7 @@ pub async fn start_channels(config: Config) -> Result<()> {
&config.autonomy,
&config.workspace_dir,
));
- let model = config
- .default_model
- .clone()
- .unwrap_or_else(|| "anthropic/claude-sonnet-4-20250514".into());
+ let model = resolved_default_model(&config);
let temperature = config.default_temperature;
let mem: Arc = Arc::from(memory::create_memory_with_storage(
&config.memory,
@@ -1547,7 +2302,7 @@ pub async fn start_channels(config: Config) -> Result<()> {
&config,
));
- let skills = crate::skills::load_skills(&workspace);
+ let skills = crate::skills::load_skills_with_config(&workspace, &config);
// Collect tool descriptions for the prompt
let mut tool_descs: Vec<(&str, &str)> = vec![
@@ -1586,7 +2341,7 @@ pub async fn start_channels(config: Config) -> Result<()> {
if config.composio.enabled {
tool_descs.push((
"composio",
- "Execute actions on 1000+ apps via Composio (Gmail, Notion, GitHub, Slack, etc.). Use action='list' to discover, 'execute' to run (optionally with connected_account_id), 'connect' to OAuth.",
+ "Execute actions on 1000+ apps via Composio (Gmail, Notion, GitHub, Slack, etc.). Use action='list' to discover actions, 'list_accounts' to retrieve connected account IDs, 'execute' to run (optionally with connected_account_id), and 'connect' for OAuth.",
));
}
tool_descs.push((
@@ -1609,15 +2364,19 @@ pub async fn start_channels(config: Config) -> Result<()> {
} else {
None
};
- let mut system_prompt = build_system_prompt(
+ let native_tools = provider.supports_native_tools();
+ let mut system_prompt = build_system_prompt_with_mode(
&workspace,
&model,
&tool_descs,
&skills,
Some(&config.identity),
bootstrap_max_chars,
+ native_tools,
);
- system_prompt.push_str(&build_tool_instructions(tools_registry.as_ref()));
+ if !native_tools {
+ system_prompt.push_str(&build_tool_instructions(tools_registry.as_ref()));
+ }
if !skills.is_empty() {
println!(
@@ -1677,6 +2436,7 @@ pub async fn start_channels(config: Config) -> Result<()> {
channels.push(Arc::new(IMessageChannel::new(im.allowed_contacts.clone())));
}
+ #[cfg(feature = "channel-matrix")]
if let Some(ref mx) = config.channels_config.matrix {
channels.push(Arc::new(MatrixChannel::new_with_session_hint(
mx.homeserver.clone(),
@@ -1688,6 +2448,13 @@ pub async fn start_channels(config: Config) -> Result<()> {
)));
}
+ #[cfg(not(feature = "channel-matrix"))]
+ if config.channels_config.matrix.is_some() {
+ tracing::warn!(
+ "Matrix channel is configured but this build was compiled without `channel-matrix`; skipping Matrix runtime startup."
+ );
+ }
+
if let Some(ref sig) = config.channels_config.signal {
channels.push(Arc::new(SignalChannel::new(
sig.http_url.clone(),
@@ -1700,11 +2467,55 @@ pub async fn start_channels(config: Config) -> Result<()> {
}
if let Some(ref wa) = config.channels_config.whatsapp {
- channels.push(Arc::new(WhatsAppChannel::new(
- wa.access_token.clone(),
- wa.phone_number_id.clone(),
- wa.verify_token.clone(),
- wa.allowed_numbers.clone(),
+ if wa.is_ambiguous_config() {
+ tracing::warn!(
+ "WhatsApp config has both phone_number_id and session_path set; preferring Cloud API mode. Remove one selector to avoid ambiguity."
+ );
+ }
+ // Runtime negotiation: detect backend type from config
+ match wa.backend_type() {
+ "cloud" => {
+ // Cloud API mode: requires phone_number_id, access_token, verify_token
+ if wa.is_cloud_config() {
+ channels.push(Arc::new(WhatsAppChannel::new(
+ wa.access_token.clone().unwrap_or_default(),
+ wa.phone_number_id.clone().unwrap_or_default(),
+ wa.verify_token.clone().unwrap_or_default(),
+ wa.allowed_numbers.clone(),
+ )));
+ } else {
+ tracing::warn!("WhatsApp Cloud API configured but missing required fields (phone_number_id, access_token, verify_token)");
+ }
+ }
+ "web" => {
+ // Web mode: requires session_path
+ #[cfg(feature = "whatsapp-web")]
+ if wa.is_web_config() {
+ channels.push(Arc::new(WhatsAppWebChannel::new(
+ wa.session_path.clone().unwrap_or_default(),
+ wa.pair_phone.clone(),
+ wa.pair_code.clone(),
+ wa.allowed_numbers.clone(),
+ )));
+ } else {
+ tracing::warn!("WhatsApp Web configured but session_path not set");
+ }
+ #[cfg(not(feature = "whatsapp-web"))]
+ {
+ tracing::warn!("WhatsApp Web backend requires 'whatsapp-web' feature. Enable with: cargo build --features whatsapp-web");
+ }
+ }
+ _ => {
+ tracing::warn!("WhatsApp config invalid: neither phone_number_id (Cloud API) nor session_path (Web) is set");
+ }
+ }
+ }
+
+ if let Some(ref lq) = config.channels_config.linq {
+ channels.push(Arc::new(LinqChannel::new(
+ lq.api_token.clone(),
+ lq.from_phone.clone(),
+ lq.allowed_senders.clone(),
)));
}
@@ -1813,6 +2624,13 @@ pub async fn start_channels(config: Config) -> Result<()> {
let mut provider_cache_seed: HashMap> = HashMap::new();
provider_cache_seed.insert(provider_name.clone(), Arc::clone(&provider));
+ let message_timeout_secs =
+ effective_channel_message_timeout_secs(config.channels_config.message_timeout_secs);
+ let interrupt_on_new_message = config
+ .channels_config
+ .telegram
+ .as_ref()
+ .is_some_and(|tg| tg.interrupt_on_new_message);
let runtime_ctx = Arc::new(ChannelRuntimeContext {
channels_by_name,
@@ -1835,6 +2653,9 @@ pub async fn start_channels(config: Config) -> Result<()> {
reliability: Arc::new(config.reliability.clone()),
provider_runtime_options,
workspace_dir: Arc::new(config.workspace_dir.clone()),
+ message_timeout_secs,
+ interrupt_on_new_message,
+ multimodal: config.multimodal.clone(),
});
run_message_dispatch_loop(rx, runtime_ctx, max_in_flight_messages).await;
@@ -1880,6 +2701,171 @@ mod tests {
tmp
}
+ #[test]
+ fn effective_channel_message_timeout_secs_clamps_to_minimum() {
+ assert_eq!(
+ effective_channel_message_timeout_secs(0),
+ MIN_CHANNEL_MESSAGE_TIMEOUT_SECS
+ );
+ assert_eq!(
+ effective_channel_message_timeout_secs(15),
+ MIN_CHANNEL_MESSAGE_TIMEOUT_SECS
+ );
+ assert_eq!(effective_channel_message_timeout_secs(300), 300);
+ }
+
+ #[test]
+ fn channel_message_timeout_budget_scales_with_tool_iterations() {
+ assert_eq!(channel_message_timeout_budget_secs(300, 1), 300);
+ assert_eq!(channel_message_timeout_budget_secs(300, 2), 600);
+ assert_eq!(channel_message_timeout_budget_secs(300, 3), 900);
+ }
+
+ #[test]
+ fn channel_message_timeout_budget_uses_safe_defaults_and_cap() {
+ // 0 iterations falls back to 1x timeout budget.
+ assert_eq!(channel_message_timeout_budget_secs(300, 0), 300);
+ // Large iteration counts are capped to avoid runaway waits.
+ assert_eq!(
+ channel_message_timeout_budget_secs(300, 10),
+ 300 * CHANNEL_MESSAGE_TIMEOUT_SCALE_CAP
+ );
+ }
+
+ #[test]
+ fn context_window_overflow_error_detector_matches_known_messages() {
+ let overflow_err = anyhow::anyhow!(
+ "OpenAI Codex stream error: Your input exceeds the context window of this model."
+ );
+ assert!(is_context_window_overflow_error(&overflow_err));
+
+ let other_err =
+ anyhow::anyhow!("OpenAI Codex API error (502 Bad Gateway): error code: 502");
+ assert!(!is_context_window_overflow_error(&other_err));
+ }
+
+ #[test]
+ fn memory_context_skip_rules_exclude_history_blobs() {
+ assert!(should_skip_memory_context_entry(
+ "telegram_123_history",
+ r#"[{"role":"user"}]"#
+ ));
+ assert!(should_skip_memory_context_entry(
+ "assistant_resp_legacy",
+ "fabricated memory"
+ ));
+ assert!(!should_skip_memory_context_entry("telegram_123_45", "hi"));
+ }
+
+ #[test]
+ fn normalize_cached_channel_turns_merges_consecutive_user_turns() {
+ let turns = vec![
+ ChatMessage::user("forwarded content"),
+ ChatMessage::user("summarize this"),
+ ];
+
+ let normalized = normalize_cached_channel_turns(turns);
+ assert_eq!(normalized.len(), 1);
+ assert_eq!(normalized[0].role, "user");
+ assert!(normalized[0].content.contains("forwarded content"));
+ assert!(normalized[0].content.contains("summarize this"));
+ }
+
+ #[test]
+ fn normalize_cached_channel_turns_merges_consecutive_assistant_turns() {
+ let turns = vec![
+ ChatMessage::user("first user"),
+ ChatMessage::assistant("assistant part 1"),
+ ChatMessage::assistant("assistant part 2"),
+ ChatMessage::user("next user"),
+ ];
+
+ let normalized = normalize_cached_channel_turns(turns);
+ assert_eq!(normalized.len(), 3);
+ assert_eq!(normalized[0].role, "user");
+ assert_eq!(normalized[1].role, "assistant");
+ assert_eq!(normalized[2].role, "user");
+ assert!(normalized[1].content.contains("assistant part 1"));
+ assert!(normalized[1].content.contains("assistant part 2"));
+ }
+
+ #[test]
+ fn compact_sender_history_keeps_recent_truncated_messages() {
+ let mut histories = HashMap::new();
+ let sender = "telegram_u1".to_string();
+ histories.insert(
+ sender.clone(),
+ (0..20)
+ .map(|idx| {
+ let content = format!("msg-{idx}-{}", "x".repeat(700));
+ if idx % 2 == 0 {
+ ChatMessage::user(content)
+ } else {
+ ChatMessage::assistant(content)
+ }
+ })
+ .collect::>(),
+ );
+
+ let ctx = ChannelRuntimeContext {
+ channels_by_name: Arc::new(HashMap::new()),
+ provider: Arc::new(DummyProvider),
+ default_provider: Arc::new("test-provider".to_string()),
+ memory: Arc::new(NoopMemory),
+ tools_registry: Arc::new(vec![]),
+ observer: Arc::new(NoopObserver),
+ system_prompt: Arc::new("system".to_string()),
+ model: Arc::new("test-model".to_string()),
+ temperature: 0.0,
+ auto_save_memory: false,
+ max_tool_iterations: 5,
+ min_relevance_score: 0.0,
+ conversation_histories: Arc::new(Mutex::new(histories)),
+ provider_cache: Arc::new(Mutex::new(HashMap::new())),
+ route_overrides: Arc::new(Mutex::new(HashMap::new())),
+ api_key: None,
+ api_url: None,
+ reliability: Arc::new(crate::config::ReliabilityConfig::default()),
+ interrupt_on_new_message: false,
+ multimodal: crate::config::MultimodalConfig::default(),
+ provider_runtime_options: providers::ProviderRuntimeOptions::default(),
+ workspace_dir: Arc::new(std::env::temp_dir()),
+ message_timeout_secs: CHANNEL_MESSAGE_TIMEOUT_SECS,
+ };
+
+ assert!(compact_sender_history(&ctx, &sender));
+
+ let histories = ctx
+ .conversation_histories
+ .lock()
+ .unwrap_or_else(|e| e.into_inner());
+ let kept = histories
+ .get(&sender)
+ .expect("sender history should remain");
+ assert_eq!(kept.len(), CHANNEL_HISTORY_COMPACT_KEEP_MESSAGES);
+ assert!(kept.iter().all(|turn| {
+ let len = turn.content.chars().count();
+ len <= CHANNEL_HISTORY_COMPACT_CONTENT_CHARS
+ || (len <= CHANNEL_HISTORY_COMPACT_CONTENT_CHARS + 3
+ && turn.content.ends_with("..."))
+ }));
+ }
+
+ struct DummyProvider;
+
+ #[async_trait::async_trait]
+ impl Provider for DummyProvider {
+ async fn chat_with_system(
+ &self,
+ _system_prompt: Option<&str>,
+ _message: &str,
+ _model: &str,
+ _temperature: f64,
+ ) -> anyhow::Result {
+ Ok("ok".to_string())
+ }
+ }
+
#[derive(Default)]
struct RecordingChannel {
sent_messages: tokio::sync::Mutex>,
@@ -2123,6 +3109,43 @@ mod tests {
}
}
+ struct DelayedHistoryCaptureProvider {
+ delay: Duration,
+ calls: std::sync::Mutex>>,
+ }
+
+ #[async_trait::async_trait]
+ impl Provider for DelayedHistoryCaptureProvider {
+ async fn chat_with_system(
+ &self,
+ _system_prompt: Option<&str>,
+ _message: &str,
+ _model: &str,
+ _temperature: f64,
+ ) -> anyhow::Result {
+ Ok("fallback".to_string())
+ }
+
+ async fn chat_with_history(
+ &self,
+ messages: &[ChatMessage],
+ _model: &str,
+ _temperature: f64,
+ ) -> anyhow::Result {
+ let snapshot = messages
+ .iter()
+ .map(|m| (m.role.clone(), m.content.clone()))
+ .collect::>();
+ let call_index = {
+ let mut calls = self.calls.lock().unwrap_or_else(|e| e.into_inner());
+ calls.push(snapshot);
+ calls.len()
+ };
+ tokio::time::sleep(self.delay).await;
+ Ok(format!("response-{call_index}"))
+ }
+ }
+
struct MockPriceTool;
#[derive(Default)]
@@ -2225,6 +3248,9 @@ mod tests {
reliability: Arc::new(crate::config::ReliabilityConfig::default()),
provider_runtime_options: providers::ProviderRuntimeOptions::default(),
workspace_dir: Arc::new(std::env::temp_dir()),
+ message_timeout_secs: CHANNEL_MESSAGE_TIMEOUT_SECS,
+ interrupt_on_new_message: false,
+ multimodal: crate::config::MultimodalConfig::default(),
});
process_channel_message(
@@ -2236,7 +3262,9 @@ mod tests {
content: "What is the BTC price now?".to_string(),
channel: "test-channel".to_string(),
timestamp: 1,
+ thread_ts: None,
},
+ CancellationToken::new(),
)
.await;
@@ -2277,6 +3305,9 @@ mod tests {
reliability: Arc::new(crate::config::ReliabilityConfig::default()),
provider_runtime_options: providers::ProviderRuntimeOptions::default(),
workspace_dir: Arc::new(std::env::temp_dir()),
+ message_timeout_secs: CHANNEL_MESSAGE_TIMEOUT_SECS,
+ interrupt_on_new_message: false,
+ multimodal: crate::config::MultimodalConfig::default(),
});
process_channel_message(
@@ -2288,7 +3319,9 @@ mod tests {
content: "What is the BTC price now?".to_string(),
channel: "test-channel".to_string(),
timestamp: 2,
+ thread_ts: None,
},
+ CancellationToken::new(),
)
.await;
@@ -2338,6 +3371,9 @@ mod tests {
reliability: Arc::new(crate::config::ReliabilityConfig::default()),
provider_runtime_options: providers::ProviderRuntimeOptions::default(),
workspace_dir: Arc::new(std::env::temp_dir()),
+ message_timeout_secs: CHANNEL_MESSAGE_TIMEOUT_SECS,
+ interrupt_on_new_message: false,
+ multimodal: crate::config::MultimodalConfig::default(),
});
process_channel_message(
@@ -2349,7 +3385,9 @@ mod tests {
content: "/models openrouter".to_string(),
channel: "telegram".to_string(),
timestamp: 1,
+ thread_ts: None,
},
+ CancellationToken::new(),
)
.await;
@@ -2420,6 +3458,9 @@ mod tests {
reliability: Arc::new(crate::config::ReliabilityConfig::default()),
provider_runtime_options: providers::ProviderRuntimeOptions::default(),
workspace_dir: Arc::new(std::env::temp_dir()),
+ message_timeout_secs: CHANNEL_MESSAGE_TIMEOUT_SECS,
+ interrupt_on_new_message: false,
+ multimodal: crate::config::MultimodalConfig::default(),
});
process_channel_message(
@@ -2431,7 +3472,9 @@ mod tests {
content: "hello routed provider".to_string(),
channel: "telegram".to_string(),
timestamp: 2,
+ thread_ts: None,
},
+ CancellationToken::new(),
)
.await;
@@ -2447,6 +3490,165 @@ mod tests {
);
}
+ #[tokio::test]
+ async fn process_channel_message_prefers_cached_default_provider_instance() {
+ let channel_impl = Arc::new(TelegramRecordingChannel::default());
+ let channel: Arc = channel_impl.clone();
+
+ let mut channels_by_name = HashMap::new();
+ channels_by_name.insert(channel.name().to_string(), channel);
+
+ let startup_provider_impl = Arc::new(ModelCaptureProvider::default());
+ let startup_provider: Arc = startup_provider_impl.clone();
+ let reloaded_provider_impl = Arc::new(ModelCaptureProvider::default());
+ let reloaded_provider: Arc = reloaded_provider_impl.clone();
+
+ let mut provider_cache_seed: HashMap> = HashMap::new();
+ provider_cache_seed.insert("test-provider".to_string(), reloaded_provider);
+
+ let runtime_ctx = Arc::new(ChannelRuntimeContext {
+ channels_by_name: Arc::new(channels_by_name),
+ provider: Arc::clone(&startup_provider),
+ default_provider: Arc::new("test-provider".to_string()),
+ memory: Arc::new(NoopMemory),
+ tools_registry: Arc::new(vec![]),
+ observer: Arc::new(NoopObserver),
+ system_prompt: Arc::new("test-system-prompt".to_string()),
+ model: Arc::new("default-model".to_string()),
+ temperature: 0.0,
+ auto_save_memory: false,
+ max_tool_iterations: 5,
+ min_relevance_score: 0.0,
+ conversation_histories: Arc::new(Mutex::new(HashMap::new())),
+ provider_cache: Arc::new(Mutex::new(provider_cache_seed)),
+ route_overrides: Arc::new(Mutex::new(HashMap::new())),
+ api_key: None,
+ api_url: None,
+ reliability: Arc::new(crate::config::ReliabilityConfig::default()),
+ provider_runtime_options: providers::ProviderRuntimeOptions::default(),
+ workspace_dir: Arc::new(std::env::temp_dir()),
+ message_timeout_secs: CHANNEL_MESSAGE_TIMEOUT_SECS,
+ interrupt_on_new_message: false,
+ multimodal: crate::config::MultimodalConfig::default(),
+ });
+
+ process_channel_message(
+ runtime_ctx,
+ traits::ChannelMessage {
+ id: "msg-default-provider-cache".to_string(),
+ sender: "alice".to_string(),
+ reply_target: "chat-1".to_string(),
+ content: "hello cached default provider".to_string(),
+ channel: "telegram".to_string(),
+ timestamp: 3,
+ thread_ts: None,
+ },
+ CancellationToken::new(),
+ )
+ .await;
+
+ assert_eq!(startup_provider_impl.call_count.load(Ordering::SeqCst), 0);
+ assert_eq!(reloaded_provider_impl.call_count.load(Ordering::SeqCst), 1);
+ }
+
+ #[tokio::test]
+ async fn process_channel_message_uses_runtime_default_model_from_store() {
+ let channel_impl = Arc::new(TelegramRecordingChannel::default());
+ let channel: Arc = channel_impl.clone();
+
+ let mut channels_by_name = HashMap::new();
+ channels_by_name.insert(channel.name().to_string(), channel);
+
+ let provider_impl = Arc::new(ModelCaptureProvider::default());
+ let provider: Arc = provider_impl.clone();
+ let mut provider_cache_seed: HashMap> = HashMap::new();
+ provider_cache_seed.insert("test-provider".to_string(), Arc::clone(&provider));
+
+ let temp = tempfile::TempDir::new().expect("temp dir");
+ let config_path = temp.path().join("config.toml");
+
+ {
+ let mut store = runtime_config_store()
+ .lock()
+ .unwrap_or_else(|e| e.into_inner());
+ store.insert(
+ config_path.clone(),
+ RuntimeConfigState {
+ defaults: ChannelRuntimeDefaults {
+ default_provider: "test-provider".to_string(),
+ model: "hot-reloaded-model".to_string(),
+ temperature: 0.5,
+ api_key: None,
+ api_url: None,
+ reliability: crate::config::ReliabilityConfig::default(),
+ },
+ last_applied_stamp: None,
+ },
+ );
+ }
+
+ let runtime_ctx = Arc::new(ChannelRuntimeContext {
+ channels_by_name: Arc::new(channels_by_name),
+ provider: Arc::clone(&provider),
+ default_provider: Arc::new("test-provider".to_string()),
+ memory: Arc::new(NoopMemory),
+ tools_registry: Arc::new(vec![]),
+ observer: Arc::new(NoopObserver),
+ system_prompt: Arc::new("test-system-prompt".to_string()),
+ model: Arc::new("startup-model".to_string()),
+ temperature: 0.0,
+ auto_save_memory: false,
+ max_tool_iterations: 5,
+ min_relevance_score: 0.0,
+ conversation_histories: Arc::new(Mutex::new(HashMap::new())),
+ provider_cache: Arc::new(Mutex::new(provider_cache_seed)),
+ route_overrides: Arc::new(Mutex::new(HashMap::new())),
+ api_key: None,
+ api_url: None,
+ reliability: Arc::new(crate::config::ReliabilityConfig::default()),
+ provider_runtime_options: providers::ProviderRuntimeOptions {
+ zeroclaw_dir: Some(temp.path().to_path_buf()),
+ ..providers::ProviderRuntimeOptions::default()
+ },
+ workspace_dir: Arc::new(std::env::temp_dir()),
+ message_timeout_secs: CHANNEL_MESSAGE_TIMEOUT_SECS,
+ interrupt_on_new_message: false,
+ multimodal: crate::config::MultimodalConfig::default(),
+ });
+
+ process_channel_message(
+ runtime_ctx,
+ traits::ChannelMessage {
+ id: "msg-runtime-store-model".to_string(),
+ sender: "alice".to_string(),
+ reply_target: "chat-1".to_string(),
+ content: "hello runtime defaults".to_string(),
+ channel: "telegram".to_string(),
+ timestamp: 4,
+ thread_ts: None,
+ },
+ CancellationToken::new(),
+ )
+ .await;
+
+ {
+ let mut store = runtime_config_store()
+ .lock()
+ .unwrap_or_else(|e| e.into_inner());
+ store.remove(&config_path);
+ }
+
+ assert_eq!(provider_impl.call_count.load(Ordering::SeqCst), 1);
+ assert_eq!(
+ provider_impl
+ .models
+ .lock()
+ .unwrap_or_else(|e| e.into_inner())
+ .as_slice(),
+ &["hot-reloaded-model".to_string()]
+ );
+ }
+
#[tokio::test]
async fn process_channel_message_respects_configured_max_tool_iterations_above_default() {
let channel_impl = Arc::new(RecordingChannel::default());
@@ -2478,6 +3680,9 @@ mod tests {
reliability: Arc::new(crate::config::ReliabilityConfig::default()),
provider_runtime_options: providers::ProviderRuntimeOptions::default(),
workspace_dir: Arc::new(std::env::temp_dir()),
+ message_timeout_secs: CHANNEL_MESSAGE_TIMEOUT_SECS,
+ interrupt_on_new_message: false,
+ multimodal: crate::config::MultimodalConfig::default(),
});
process_channel_message(
@@ -2489,7 +3694,9 @@ mod tests {
content: "Loop until done".to_string(),
channel: "test-channel".to_string(),
timestamp: 1,
+ thread_ts: None,
},
+ CancellationToken::new(),
)
.await;
@@ -2531,6 +3738,9 @@ mod tests {
reliability: Arc::new(crate::config::ReliabilityConfig::default()),
provider_runtime_options: providers::ProviderRuntimeOptions::default(),
workspace_dir: Arc::new(std::env::temp_dir()),
+ message_timeout_secs: CHANNEL_MESSAGE_TIMEOUT_SECS,
+ interrupt_on_new_message: false,
+ multimodal: crate::config::MultimodalConfig::default(),
});
process_channel_message(
@@ -2542,7 +3752,9 @@ mod tests {
content: "Loop forever".to_string(),
channel: "test-channel".to_string(),
timestamp: 2,
+ thread_ts: None,
},
+ CancellationToken::new(),
)
.await;
@@ -2604,6 +3816,66 @@ mod tests {
}
}
+ struct RecallMemory;
+
+ #[async_trait::async_trait]
+ impl Memory for RecallMemory {
+ fn name(&self) -> &str {
+ "recall-memory"
+ }
+
+ async fn store(
+ &self,
+ _key: &str,
+ _content: &str,
+ _category: crate::memory::MemoryCategory,
+ _session_id: Option<&str>,
+ ) -> anyhow::Result<()> {
+ Ok(())
+ }
+
+ async fn recall(
+ &self,
+ _query: &str,
+ _limit: usize,
+ _session_id: Option<&str>,
+ ) -> anyhow::Result> {
+ Ok(vec![crate::memory::MemoryEntry {
+ id: "entry-1".to_string(),
+ key: "memory_key_1".to_string(),
+ content: "Age is 45".to_string(),
+ category: crate::memory::MemoryCategory::Conversation,
+ timestamp: "2026-02-20T00:00:00Z".to_string(),
+ session_id: None,
+ score: Some(0.9),
+ }])
+ }
+
+ async fn get(&self, _key: &str) -> anyhow::Result> {
+ Ok(None)
+ }
+
+ async fn list(
+ &self,
+ _category: Option<&crate::memory::MemoryCategory>,
+ _session_id: Option<&str>,
+ ) -> anyhow::Result> {
+ Ok(Vec::new())
+ }
+
+ async fn forget(&self, _key: &str) -> anyhow::Result {
+ Ok(false)
+ }
+
+ async fn count(&self) -> anyhow::Result {
+ Ok(1)
+ }
+
+ async fn health_check(&self) -> bool {
+ true
+ }
+ }
+
#[tokio::test]
async fn message_dispatch_processes_messages_in_parallel() {
let channel_impl = Arc::new(RecordingChannel::default());
@@ -2635,6 +3907,9 @@ mod tests {
reliability: Arc::new(crate::config::ReliabilityConfig::default()),
provider_runtime_options: providers::ProviderRuntimeOptions::default(),
workspace_dir: Arc::new(std::env::temp_dir()),
+ message_timeout_secs: CHANNEL_MESSAGE_TIMEOUT_SECS,
+ interrupt_on_new_message: false,
+ multimodal: crate::config::MultimodalConfig::default(),
});
let (tx, rx) = tokio::sync::mpsc::channel::(4);
@@ -2645,6 +3920,7 @@ mod tests {
content: "hello".to_string(),
channel: "test-channel".to_string(),
timestamp: 1,
+ thread_ts: None,
})
.await
.unwrap();
@@ -2655,6 +3931,7 @@ mod tests {
content: "world".to_string(),
channel: "test-channel".to_string(),
timestamp: 2,
+ thread_ts: None,
})
.await
.unwrap();
@@ -2674,6 +3951,171 @@ mod tests {
assert_eq!(sent_messages.len(), 2);
}
+ #[tokio::test]
+ async fn message_dispatch_interrupts_in_flight_telegram_request_and_preserves_context() {
+ let channel_impl = Arc::new(TelegramRecordingChannel::default());
+ let channel: Arc = channel_impl.clone();
+
+ let mut channels_by_name = HashMap::new();
+ channels_by_name.insert(channel.name().to_string(), channel);
+
+ let provider_impl = Arc::new(DelayedHistoryCaptureProvider {
+ delay: Duration::from_millis(250),
+ calls: std::sync::Mutex::new(Vec::new()),
+ });
+
+ let runtime_ctx = Arc::new(ChannelRuntimeContext {
+ channels_by_name: Arc::new(channels_by_name),
+ provider: provider_impl.clone(),
+ default_provider: Arc::new("test-provider".to_string()),
+ memory: Arc::new(NoopMemory),
+ tools_registry: Arc::new(vec![]),
+ observer: Arc::new(NoopObserver),
+ system_prompt: Arc::new("test-system-prompt".to_string()),
+ model: Arc::new("test-model".to_string()),
+ temperature: 0.0,
+ auto_save_memory: false,
+ max_tool_iterations: 10,
+ min_relevance_score: 0.0,
+ conversation_histories: Arc::new(Mutex::new(HashMap::new())),
+ provider_cache: Arc::new(Mutex::new(HashMap::new())),
+ route_overrides: Arc::new(Mutex::new(HashMap::new())),
+ api_key: None,
+ api_url: None,
+ reliability: Arc::new(crate::config::ReliabilityConfig::default()),
+ provider_runtime_options: providers::ProviderRuntimeOptions::default(),
+ workspace_dir: Arc::new(std::env::temp_dir()),
+ message_timeout_secs: CHANNEL_MESSAGE_TIMEOUT_SECS,
+ interrupt_on_new_message: true,
+ multimodal: crate::config::MultimodalConfig::default(),
+ });
+
+ let (tx, rx) = tokio::sync::mpsc::channel::(8);
+ let send_task = tokio::spawn(async move {
+ tx.send(traits::ChannelMessage {
+ id: "msg-1".to_string(),
+ sender: "alice".to_string(),
+ reply_target: "chat-1".to_string(),
+ content: "forwarded content".to_string(),
+ channel: "telegram".to_string(),
+ timestamp: 1,
+ thread_ts: None,
+ })
+ .await
+ .unwrap();
+ tokio::time::sleep(Duration::from_millis(40)).await;
+ tx.send(traits::ChannelMessage {
+ id: "msg-2".to_string(),
+ sender: "alice".to_string(),
+ reply_target: "chat-1".to_string(),
+ content: "summarize this".to_string(),
+ channel: "telegram".to_string(),
+ timestamp: 2,
+ thread_ts: None,
+ })
+ .await
+ .unwrap();
+ });
+
+ run_message_dispatch_loop(rx, runtime_ctx, 4).await;
+ send_task.await.unwrap();
+
+ let sent_messages = channel_impl.sent_messages.lock().await;
+ assert_eq!(sent_messages.len(), 1);
+ assert!(sent_messages[0].starts_with("chat-1:"));
+ assert!(sent_messages[0].contains("response-2"));
+ drop(sent_messages);
+
+ let calls = provider_impl
+ .calls
+ .lock()
+ .unwrap_or_else(|e| e.into_inner());
+ assert_eq!(calls.len(), 2);
+ let second_call = &calls[1];
+ assert!(second_call
+ .iter()
+ .any(|(role, content)| { role == "user" && content.contains("forwarded content") }));
+ assert!(second_call
+ .iter()
+ .any(|(role, content)| { role == "user" && content.contains("summarize this") }));
+ assert!(
+ !second_call.iter().any(|(role, _)| role == "assistant"),
+ "cancelled turn should not persist an assistant response"
+ );
+ }
+
+ #[tokio::test]
+ async fn message_dispatch_interrupt_scope_is_same_sender_same_chat() {
+ let channel_impl = Arc::new(TelegramRecordingChannel::default());
+ let channel: Arc = channel_impl.clone();
+
+ let mut channels_by_name = HashMap::new();
+ channels_by_name.insert(channel.name().to_string(), channel);
+
+ let runtime_ctx = Arc::new(ChannelRuntimeContext {
+ channels_by_name: Arc::new(channels_by_name),
+ provider: Arc::new(SlowProvider {
+ delay: Duration::from_millis(180),
+ }),
+ default_provider: Arc::new("test-provider".to_string()),
+ memory: Arc::new(NoopMemory),
+ tools_registry: Arc::new(vec![]),
+ observer: Arc::new(NoopObserver),
+ system_prompt: Arc::new("test-system-prompt".to_string()),
+ model: Arc::new("test-model".to_string()),
+ temperature: 0.0,
+ auto_save_memory: false,
+ max_tool_iterations: 10,
+ min_relevance_score: 0.0,
+ conversation_histories: Arc::new(Mutex::new(HashMap::new())),
+ provider_cache: Arc::new(Mutex::new(HashMap::new())),
+ route_overrides: Arc::new(Mutex::new(HashMap::new())),
+ api_key: None,
+ api_url: None,
+ reliability: Arc::new(crate::config::ReliabilityConfig::default()),
+ provider_runtime_options: providers::ProviderRuntimeOptions::default(),
+ workspace_dir: Arc::new(std::env::temp_dir()),
+ message_timeout_secs: CHANNEL_MESSAGE_TIMEOUT_SECS,
+ interrupt_on_new_message: true,
+ multimodal: crate::config::MultimodalConfig::default(),
+ });
+
+ let (tx, rx) = tokio::sync::mpsc::channel::(8);
+ let send_task = tokio::spawn(async move {
+ tx.send(traits::ChannelMessage {
+ id: "msg-a".to_string(),
+ sender: "alice".to_string(),
+ reply_target: "chat-1".to_string(),
+ content: "first chat".to_string(),
+ channel: "telegram".to_string(),
+ timestamp: 1,
+ thread_ts: None,
+ })
+ .await
+ .unwrap();
+ tokio::time::sleep(Duration::from_millis(30)).await;
+ tx.send(traits::ChannelMessage {
+ id: "msg-b".to_string(),
+ sender: "alice".to_string(),
+ reply_target: "chat-2".to_string(),
+ content: "second chat".to_string(),
+ channel: "telegram".to_string(),
+ timestamp: 2,
+ thread_ts: None,
+ })
+ .await
+ .unwrap();
+ });
+
+ run_message_dispatch_loop(rx, runtime_ctx, 4).await;
+ send_task.await.unwrap();
+
+ let sent_messages = channel_impl.sent_messages.lock().await;
+ assert_eq!(sent_messages.len(), 2);
+ assert!(sent_messages.iter().any(|msg| msg.starts_with("chat-1:")));
+ assert!(sent_messages.iter().any(|msg| msg.starts_with("chat-2:")));
+ }
+
#[tokio::test]
async fn process_channel_message_cancels_scoped_typing_task() {
let channel_impl = Arc::new(RecordingChannel::default());
@@ -2705,6 +4147,9 @@ mod tests {
reliability: Arc::new(crate::config::ReliabilityConfig::default()),
provider_runtime_options: providers::ProviderRuntimeOptions::default(),
workspace_dir: Arc::new(std::env::temp_dir()),
+ message_timeout_secs: CHANNEL_MESSAGE_TIMEOUT_SECS,
+ interrupt_on_new_message: false,
+ multimodal: crate::config::MultimodalConfig::default(),
});
process_channel_message(
@@ -2716,7 +4161,9 @@ mod tests {
content: "hello".to_string(),
channel: "test-channel".to_string(),
timestamp: 1,
+ thread_ts: None,
},
+ CancellationToken::new(),
)
.await;
@@ -2761,6 +4208,26 @@ mod tests {
assert!(prompt.contains("**memory_recall**"));
}
+ #[test]
+ fn prompt_includes_single_tool_protocol_block_after_append() {
+ let ws = make_workspace();
+ let tools = vec![("shell", "Run commands")];
+ let mut prompt = build_system_prompt(ws.path(), "gpt-4o", &tools, &[], None, None);
+
+ assert!(
+ !prompt.contains("## Tool Use Protocol"),
+ "build_system_prompt should not emit protocol block directly"
+ );
+
+ prompt.push_str(&build_tool_instructions(&[]));
+
+ assert_eq!(
+ prompt.matches("## Tool Use Protocol").count(),
+ 1,
+ "protocol block should appear exactly once in the final prompt"
+ );
+ }
+
#[test]
fn prompt_injects_safety() {
let ws = make_workspace();
@@ -2864,7 +4331,7 @@ mod tests {
}
#[test]
- fn prompt_skills_compact_list() {
+ fn prompt_skills_include_instructions_and_tools() {
let ws = make_workspace();
let skills = vec![crate::skills::Skill {
name: "code-review".into(),
@@ -2872,8 +4339,14 @@ mod tests {
version: "1.0.0".into(),
author: None,
tags: vec![],
- tools: vec![],
- prompts: vec!["Long prompt content that should NOT appear in system prompt".into()],
+ tools: vec![crate::skills::SkillTool {
+ name: "lint".into(),
+ description: "Run static checks".into(),
+ kind: "shell".into(),
+ command: "cargo clippy".into(),
+ args: HashMap::new(),
+ }],
+ prompts: vec!["Always run cargo test before final response.".into()],
location: None,
}];
@@ -2883,12 +4356,47 @@ mod tests {
assert!(prompt.contains("code-review "));
assert!(prompt.contains("Review code for bugs "));
assert!(prompt.contains("SKILL.md"));
- assert!(
- prompt.contains("loaded on demand"),
- "should mention on-demand loading"
- );
- // Full prompt content should NOT be dumped
- assert!(!prompt.contains("Long prompt content that should NOT appear"));
+ assert!(prompt.contains(""));
+ assert!(prompt
+ .contains("Always run cargo test before final response. "));
+ assert!(prompt.contains(""));
+ assert!(prompt.contains("lint "));
+ assert!(prompt.contains("shell "));
+ assert!(!prompt.contains("loaded on demand"));
+ }
+
+ #[test]
+ fn prompt_skills_escape_reserved_xml_chars() {
+ let ws = make_workspace();
+ let skills = vec![crate::skills::Skill {
+ name: "code&".into(),
+ description: "Review \"unsafe\" and 'risky' bits".into(),
+ version: "1.0.0".into(),
+ author: None,
+ tags: vec![],
+ tools: vec![crate::skills::SkillTool {
+ name: "run\"linter\"".into(),
+ description: "Run & report".into(),
+ kind: "shell&exec".into(),
+ command: "cargo clippy".into(),
+ args: HashMap::new(),
+ }],
+ prompts: vec!["Use and & keep output \"safe\"".into()],
+ location: None,
+ }];
+
+ let prompt = build_system_prompt(ws.path(), "model", &[], &skills, None, None);
+
+ assert!(prompt.contains("code<review>& "));
+ assert!(prompt.contains(
+ "Review "unsafe" and 'risky' bits "
+ ));
+ assert!(prompt.contains("run"linter" "));
+ assert!(prompt.contains("Run <lint> & report "));
+ assert!(prompt.contains("shell&exec "));
+ assert!(prompt.contains(
+ "Use <tool_call> and & keep output "safe" "
+ ));
}
#[test]
@@ -2950,8 +4458,8 @@ mod tests {
"missing Channel Capabilities section"
);
assert!(
- prompt.contains("running as a Discord bot"),
- "missing Discord context"
+ prompt.contains("running as a messaging bot"),
+ "missing channel context"
);
assert!(
prompt.contains("NEVER repeat, describe, or echo credentials"),
@@ -2976,6 +4484,7 @@ mod tests {
content: "hello".into(),
channel: "slack".into(),
timestamp: 1,
+ thread_ts: None,
};
assert_eq!(conversation_memory_key(&msg), "slack_U123_msg_abc123");
@@ -2990,6 +4499,7 @@ mod tests {
content: "first".into(),
channel: "slack".into(),
timestamp: 1,
+ thread_ts: None,
};
let msg2 = traits::ChannelMessage {
id: "msg_2".into(),
@@ -2998,6 +4508,7 @@ mod tests {
content: "second".into(),
channel: "slack".into(),
timestamp: 2,
+ thread_ts: None,
};
assert_ne!(
@@ -3018,6 +4529,7 @@ mod tests {
content: "I'm Paul".into(),
channel: "slack".into(),
timestamp: 1,
+ thread_ts: None,
};
let msg2 = traits::ChannelMessage {
id: "msg_2".into(),
@@ -3026,6 +4538,7 @@ mod tests {
content: "I'm 45".into(),
channel: "slack".into(),
timestamp: 2,
+ thread_ts: None,
};
mem.store(
@@ -3095,6 +4608,9 @@ mod tests {
reliability: Arc::new(crate::config::ReliabilityConfig::default()),
provider_runtime_options: providers::ProviderRuntimeOptions::default(),
workspace_dir: Arc::new(std::env::temp_dir()),
+ message_timeout_secs: CHANNEL_MESSAGE_TIMEOUT_SECS,
+ interrupt_on_new_message: false,
+ multimodal: crate::config::MultimodalConfig::default(),
});
process_channel_message(
@@ -3106,7 +4622,9 @@ mod tests {
content: "hello".to_string(),
channel: "test-channel".to_string(),
timestamp: 1,
+ thread_ts: None,
},
+ CancellationToken::new(),
)
.await;
@@ -3119,7 +4637,9 @@ mod tests {
content: "follow up".to_string(),
channel: "test-channel".to_string(),
timestamp: 2,
+ thread_ts: None,
},
+ CancellationToken::new(),
)
.await;
@@ -3141,6 +4661,217 @@ mod tests {
assert!(calls[1][3].1.contains("follow up"));
}
+ #[tokio::test]
+ async fn process_channel_message_enriches_current_turn_without_persisting_context() {
+ let channel_impl = Arc::new(RecordingChannel::default());
+ let channel: Arc = channel_impl.clone();
+
+ let mut channels_by_name = HashMap::new();
+ channels_by_name.insert(channel.name().to_string(), channel);
+
+ let provider_impl = Arc::new(HistoryCaptureProvider::default());
+ let runtime_ctx = Arc::new(ChannelRuntimeContext {
+ channels_by_name: Arc::new(channels_by_name),
+ provider: provider_impl.clone(),
+ default_provider: Arc::new("test-provider".to_string()),
+ memory: Arc::new(RecallMemory),
+ tools_registry: Arc::new(vec![]),
+ observer: Arc::new(NoopObserver),
+ system_prompt: Arc::new("test-system-prompt".to_string()),
+ model: Arc::new("test-model".to_string()),
+ temperature: 0.0,
+ auto_save_memory: false,
+ max_tool_iterations: 5,
+ min_relevance_score: 0.0,
+ conversation_histories: Arc::new(Mutex::new(HashMap::new())),
+ provider_cache: Arc::new(Mutex::new(HashMap::new())),
+ route_overrides: Arc::new(Mutex::new(HashMap::new())),
+ api_key: None,
+ api_url: None,
+ reliability: Arc::new(crate::config::ReliabilityConfig::default()),
+ provider_runtime_options: providers::ProviderRuntimeOptions::default(),
+ workspace_dir: Arc::new(std::env::temp_dir()),
+ message_timeout_secs: CHANNEL_MESSAGE_TIMEOUT_SECS,
+ interrupt_on_new_message: false,
+ multimodal: crate::config::MultimodalConfig::default(),
+ });
+
+ process_channel_message(
+ runtime_ctx.clone(),
+ traits::ChannelMessage {
+ id: "msg-ctx-1".to_string(),
+ sender: "alice".to_string(),
+ reply_target: "chat-ctx".to_string(),
+ content: "hello".to_string(),
+ channel: "test-channel".to_string(),
+ timestamp: 1,
+ thread_ts: None,
+ },
+ CancellationToken::new(),
+ )
+ .await;
+
+ let calls = provider_impl
+ .calls
+ .lock()
+ .unwrap_or_else(|e| e.into_inner());
+ assert_eq!(calls.len(), 1);
+ assert_eq!(calls[0].len(), 2);
+ assert_eq!(calls[0][1].0, "user");
+ assert!(calls[0][1].1.contains("[Memory context]"));
+ assert!(calls[0][1].1.contains("Age is 45"));
+ assert!(calls[0][1].1.contains("hello"));
+
+ let histories = runtime_ctx
+ .conversation_histories
+ .lock()
+ .unwrap_or_else(|e| e.into_inner());
+ let turns = histories
+ .get("test-channel_alice")
+ .expect("history should be stored for sender");
+ assert_eq!(turns[0].role, "user");
+ assert_eq!(turns[0].content, "hello");
+ assert!(!turns[0].content.contains("[Memory context]"));
+ }
+
+ #[tokio::test]
+ async fn process_channel_message_telegram_keeps_system_instruction_at_top_only() {
+ let channel_impl = Arc::new(TelegramRecordingChannel::default());
+ let channel: Arc = channel_impl.clone();
+
+ let mut channels_by_name = HashMap::new();
+ channels_by_name.insert(channel.name().to_string(), channel);
+
+ let provider_impl = Arc::new(HistoryCaptureProvider::default());
+ let mut histories = HashMap::new();
+ histories.insert(
+ "telegram_alice".to_string(),
+ vec![
+ ChatMessage::assistant("stale assistant"),
+ ChatMessage::user("earlier user question"),
+ ChatMessage::assistant("earlier assistant reply"),
+ ],
+ );
+
+ let runtime_ctx = Arc::new(ChannelRuntimeContext {
+ channels_by_name: Arc::new(channels_by_name),
+ provider: provider_impl.clone(),
+ default_provider: Arc::new("test-provider".to_string()),
+ memory: Arc::new(NoopMemory),
+ tools_registry: Arc::new(vec![]),
+ observer: Arc::new(NoopObserver),
+ system_prompt: Arc::new("test-system-prompt".to_string()),
+ model: Arc::new("test-model".to_string()),
+ temperature: 0.0,
+ auto_save_memory: false,
+ max_tool_iterations: 5,
+ min_relevance_score: 0.0,
+ conversation_histories: Arc::new(Mutex::new(histories)),
+ provider_cache: Arc::new(Mutex::new(HashMap::new())),
+ route_overrides: Arc::new(Mutex::new(HashMap::new())),
+ api_key: None,
+ api_url: None,
+ reliability: Arc::new(crate::config::ReliabilityConfig::default()),
+ provider_runtime_options: providers::ProviderRuntimeOptions::default(),
+ workspace_dir: Arc::new(std::env::temp_dir()),
+ message_timeout_secs: CHANNEL_MESSAGE_TIMEOUT_SECS,
+ interrupt_on_new_message: false,
+ multimodal: crate::config::MultimodalConfig::default(),
+ });
+
+ process_channel_message(
+ runtime_ctx.clone(),
+ traits::ChannelMessage {
+ id: "tg-msg-1".to_string(),
+ sender: "alice".to_string(),
+ reply_target: "chat-telegram".to_string(),
+ content: "hello".to_string(),
+ channel: "telegram".to_string(),
+ timestamp: 1,
+ thread_ts: None,
+ },
+ CancellationToken::new(),
+ )
+ .await;
+
+ let calls = provider_impl
+ .calls
+ .lock()
+ .unwrap_or_else(|e| e.into_inner());
+ assert_eq!(calls.len(), 1);
+ assert_eq!(calls[0].len(), 4);
+
+ let roles = calls[0]
+ .iter()
+ .map(|(role, _)| role.as_str())
+ .collect::>();
+ assert_eq!(roles, vec!["system", "user", "assistant", "user"]);
+ assert!(
+ calls[0][0]
+ .1
+ .contains("When responding on Telegram, include media markers"),
+ "telegram delivery instruction should live in the system prompt"
+ );
+ assert!(!calls[0].iter().skip(1).any(|(role, _)| role == "system"));
+ }
+
+ #[test]
+ fn extract_tool_context_summary_collects_alias_and_native_tool_calls() {
+ let history = vec![
+ ChatMessage::system("sys"),
+ ChatMessage::assistant(
+ r#"
+{"name":"shell","arguments":{"command":"date"}}
+ "#,
+ ),
+ ChatMessage::assistant(
+ r#"{"content":null,"tool_calls":[{"id":"1","name":"web_search","arguments":"{}"}]}"#,
+ ),
+ ];
+
+ let summary = extract_tool_context_summary(&history, 1);
+ assert_eq!(summary, "[Used tools: shell, web_search]");
+ }
+
+ #[test]
+ fn extract_tool_context_summary_collects_prompt_mode_tool_result_names() {
+ let history = vec![
+ ChatMessage::system("sys"),
+ ChatMessage::assistant("Using markdown tool call fence"),
+ ChatMessage::user(
+ r#"[Tool results]
+
+{"status":200}
+
+
+Mon Feb 20
+ "#,
+ ),
+ ];
+
+ let summary = extract_tool_context_summary(&history, 1);
+ assert_eq!(summary, "[Used tools: http_request, shell]");
+ }
+
+ #[test]
+ fn extract_tool_context_summary_respects_start_index() {
+ let history = vec![
+ ChatMessage::assistant(
+ r#"
+{"name":"stale_tool","arguments":{}}
+ "#,
+ ),
+ ChatMessage::assistant(
+ r#"
+{"name":"fresh_tool","arguments":{}}
+ "#,
+ ),
+ ];
+
+ let summary = extract_tool_context_summary(&history, 1);
+ assert_eq!(summary, "[Used tools: fresh_tool]");
+ }
+
// ── AIEOS Identity Tests (Issue #168) ─────────────────────────
#[test]
diff --git a/src/channels/qq.rs b/src/channels/qq.rs
index 70dc20d..18117ef 100644
--- a/src/channels/qq.rs
+++ b/src/channels/qq.rs
@@ -11,6 +11,15 @@ use uuid::Uuid;
const QQ_API_BASE: &str = "https://api.sgroup.qq.com";
const QQ_AUTH_URL: &str = "https://bots.qq.com/app/getAppAccessToken";
+fn ensure_https(url: &str) -> anyhow::Result<()> {
+ if !url.starts_with("https://") {
+ anyhow::bail!(
+ "Refusing to transmit sensitive data over non-HTTPS URL: URL scheme must be https"
+ );
+ }
+ Ok(())
+}
+
/// Deduplication set capacity — evict half of entries when full.
const DEDUP_CAPACITY: usize = 10_000;
@@ -196,6 +205,8 @@ impl Channel for QQChannel {
)
};
+ ensure_https(&url)?;
+
let resp = self
.http_client()
.post(&url)
@@ -252,7 +263,9 @@ impl Channel for QQChannel {
}
}
});
- write.send(Message::Text(identify.to_string())).await?;
+ write
+ .send(Message::Text(identify.to_string().into()))
+ .await?;
tracing::info!("QQ: connected and identified");
@@ -276,7 +289,11 @@ impl Channel for QQChannel {
_ = hb_rx.recv() => {
let d = if sequence >= 0 { json!(sequence) } else { json!(null) };
let hb = json!({"op": 1, "d": d});
- if write.send(Message::Text(hb.to_string())).await.is_err() {
+ if write
+ .send(Message::Text(hb.to_string().into()))
+ .await
+ .is_err()
+ {
break;
}
}
@@ -287,7 +304,7 @@ impl Channel for QQChannel {
_ => continue,
};
- let event: serde_json::Value = match serde_json::from_str(&msg) {
+ let event: serde_json::Value = match serde_json::from_str(msg.as_ref()) {
Ok(e) => e,
Err(_) => continue,
};
@@ -304,7 +321,11 @@ impl Channel for QQChannel {
1 => {
let d = if sequence >= 0 { json!(sequence) } else { json!(null) };
let hb = json!({"op": 1, "d": d});
- if write.send(Message::Text(hb.to_string())).await.is_err() {
+ if write
+ .send(Message::Text(hb.to_string().into()))
+ .await
+ .is_err()
+ {
break;
}
continue;
@@ -366,6 +387,7 @@ impl Channel for QQChannel {
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs(),
+ thread_ts: None,
};
if tx.send(channel_msg).await.is_err() {
@@ -404,6 +426,7 @@ impl Channel for QQChannel {
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs(),
+ thread_ts: None,
};
if tx.send(channel_msg).await.is_err() {
diff --git a/src/channels/signal.rs b/src/channels/signal.rs
index e759a1a..20cacfc 100644
--- a/src/channels/signal.rs
+++ b/src/channels/signal.rs
@@ -119,12 +119,18 @@ impl SignalChannel {
(2..=15).contains(&number.len()) && number.chars().all(|c| c.is_ascii_digit())
}
+ /// Check whether a string is a valid UUID (signal-cli uses these for
+ /// privacy-enabled users who have opted out of sharing their phone number).
+ fn is_uuid(s: &str) -> bool {
+ Uuid::parse_str(s).is_ok()
+ }
+
fn parse_recipient_target(recipient: &str) -> RecipientTarget {
if let Some(group_id) = recipient.strip_prefix(GROUP_TARGET_PREFIX) {
return RecipientTarget::Group(group_id.to_string());
}
- if Self::is_e164(recipient) {
+ if Self::is_e164(recipient) || Self::is_uuid(recipient) {
RecipientTarget::Direct(recipient.to_string())
} else {
RecipientTarget::Group(recipient.to_string())
@@ -259,6 +265,7 @@ impl SignalChannel {
content: text.to_string(),
channel: "signal".to_string(),
timestamp: timestamp / 1000, // millis → secs
+ thread_ts: None,
})
}
}
@@ -653,6 +660,15 @@ mod tests {
);
}
+ #[test]
+ fn parse_recipient_target_uuid_is_direct() {
+ let uuid = "a1b2c3d4-e5f6-7890-abcd-ef1234567890";
+ assert_eq!(
+ SignalChannel::parse_recipient_target(uuid),
+ RecipientTarget::Direct(uuid.to_string())
+ );
+ }
+
#[test]
fn parse_recipient_target_non_e164_plus_is_group() {
assert_eq!(
@@ -661,6 +677,24 @@ mod tests {
);
}
+ #[test]
+ fn is_uuid_valid() {
+ assert!(SignalChannel::is_uuid(
+ "a1b2c3d4-e5f6-7890-abcd-ef1234567890"
+ ));
+ assert!(SignalChannel::is_uuid(
+ "00000000-0000-0000-0000-000000000000"
+ ));
+ }
+
+ #[test]
+ fn is_uuid_invalid() {
+ assert!(!SignalChannel::is_uuid("+1234567890"));
+ assert!(!SignalChannel::is_uuid("not-a-uuid"));
+ assert!(!SignalChannel::is_uuid("group:abc123"));
+ assert!(!SignalChannel::is_uuid(""));
+ }
+
#[test]
fn sender_prefers_source_number() {
let env = Envelope {
@@ -685,6 +719,73 @@ mod tests {
assert_eq!(SignalChannel::sender(&env), Some("uuid-123".to_string()));
}
+ #[test]
+ fn process_envelope_uuid_sender_dm() {
+ let uuid = "a1b2c3d4-e5f6-7890-abcd-ef1234567890";
+ let ch = SignalChannel::new(
+ "http://127.0.0.1:8686".to_string(),
+ "+1234567890".to_string(),
+ None,
+ vec!["*".to_string()],
+ false,
+ false,
+ );
+ let env = Envelope {
+ source: Some(uuid.to_string()),
+ source_number: None,
+ data_message: Some(DataMessage {
+ message: Some("Hello from privacy user".to_string()),
+ timestamp: Some(1_700_000_000_000),
+ group_info: None,
+ attachments: None,
+ }),
+ story_message: None,
+ timestamp: Some(1_700_000_000_000),
+ };
+ let msg = ch.process_envelope(&env).unwrap();
+ assert_eq!(msg.sender, uuid);
+ assert_eq!(msg.reply_target, uuid);
+ assert_eq!(msg.content, "Hello from privacy user");
+
+ // Verify reply routing: UUID sender in DM should route as Direct
+ let target = SignalChannel::parse_recipient_target(&msg.reply_target);
+ assert_eq!(target, RecipientTarget::Direct(uuid.to_string()));
+ }
+
+ #[test]
+ fn process_envelope_uuid_sender_in_group() {
+ let uuid = "a1b2c3d4-e5f6-7890-abcd-ef1234567890";
+ let ch = SignalChannel::new(
+ "http://127.0.0.1:8686".to_string(),
+ "+1234567890".to_string(),
+ Some("testgroup".to_string()),
+ vec!["*".to_string()],
+ false,
+ false,
+ );
+ let env = Envelope {
+ source: Some(uuid.to_string()),
+ source_number: None,
+ data_message: Some(DataMessage {
+ message: Some("Group msg from privacy user".to_string()),
+ timestamp: Some(1_700_000_000_000),
+ group_info: Some(GroupInfo {
+ group_id: Some("testgroup".to_string()),
+ }),
+ attachments: None,
+ }),
+ story_message: None,
+ timestamp: Some(1_700_000_000_000),
+ };
+ let msg = ch.process_envelope(&env).unwrap();
+ assert_eq!(msg.sender, uuid);
+ assert_eq!(msg.reply_target, "group:testgroup");
+
+ // Verify reply routing: group message should still route as Group
+ let target = SignalChannel::parse_recipient_target(&msg.reply_target);
+ assert_eq!(target, RecipientTarget::Group("testgroup".to_string()));
+ }
+
#[test]
fn sender_none_when_both_missing() {
let env = Envelope {
diff --git a/src/channels/slack.rs b/src/channels/slack.rs
index 13d1273..559af15 100644
--- a/src/channels/slack.rs
+++ b/src/channels/slack.rs
@@ -45,6 +45,15 @@ impl SlackChannel {
.and_then(|u| u.as_str())
.map(String::from)
}
+
+ /// Resolve the thread identifier for inbound Slack messages.
+ /// Replies carry `thread_ts` (root thread id); top-level messages only have `ts`.
+ fn inbound_thread_ts(msg: &serde_json::Value, ts: &str) -> Option {
+ msg.get("thread_ts")
+ .and_then(|t| t.as_str())
+ .or(if ts.is_empty() { None } else { Some(ts) })
+ .map(str::to_string)
+ }
}
#[async_trait]
@@ -54,11 +63,15 @@ impl Channel for SlackChannel {
}
async fn send(&self, message: &SendMessage) -> anyhow::Result<()> {
- let body = serde_json::json!({
+ let mut body = serde_json::json!({
"channel": message.recipient,
"text": message.content
});
+ if let Some(ref ts) = message.thread_ts {
+ body["thread_ts"] = serde_json::json!(ts);
+ }
+
let resp = self
.http_client()
.post("https://slack.com/api/chat.postMessage")
@@ -170,6 +183,7 @@ impl Channel for SlackChannel {
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs(),
+ thread_ts: Self::inbound_thread_ts(msg, ts),
};
if tx.send(channel_msg).await.is_err() {
@@ -303,4 +317,33 @@ mod tests {
assert!(!id.contains('-')); // No UUID dashes
assert!(id.starts_with("slack_"));
}
+
+ #[test]
+ fn inbound_thread_ts_prefers_explicit_thread_ts() {
+ let msg = serde_json::json!({
+ "ts": "123.002",
+ "thread_ts": "123.001"
+ });
+
+ let thread_ts = SlackChannel::inbound_thread_ts(&msg, "123.002");
+ assert_eq!(thread_ts.as_deref(), Some("123.001"));
+ }
+
+ #[test]
+ fn inbound_thread_ts_falls_back_to_ts() {
+ let msg = serde_json::json!({
+ "ts": "123.001"
+ });
+
+ let thread_ts = SlackChannel::inbound_thread_ts(&msg, "123.001");
+ assert_eq!(thread_ts.as_deref(), Some("123.001"));
+ }
+
+ #[test]
+ fn inbound_thread_ts_none_when_ts_missing() {
+ let msg = serde_json::json!({});
+
+ let thread_ts = SlackChannel::inbound_thread_ts(&msg, "");
+ assert_eq!(thread_ts, None);
+ }
}
diff --git a/src/channels/telegram.rs b/src/channels/telegram.rs
index ca0e03b..1503e57 100644
--- a/src/channels/telegram.rs
+++ b/src/channels/telegram.rs
@@ -6,10 +6,10 @@ use async_trait::async_trait;
use directories::UserDirs;
use parking_lot::Mutex;
use reqwest::multipart::{Form, Part};
-use std::fs;
use std::path::Path;
use std::sync::{Arc, RwLock};
use std::time::Duration;
+use tokio::fs;
/// Telegram's maximum message length for text messages
const TELEGRAM_MAX_MESSAGE_LENGTH: usize = 4096;
@@ -18,7 +18,7 @@ const TELEGRAM_BIND_COMMAND: &str = "/bind";
/// Split a message into chunks that respect Telegram's 4096 character limit.
/// Tries to split at word boundaries when possible, and handles continuation.
fn split_message_for_telegram(message: &str) -> Vec {
- if message.len() <= TELEGRAM_MAX_MESSAGE_LENGTH {
+ if message.chars().count() <= TELEGRAM_MAX_MESSAGE_LENGTH {
return vec![message.to_string()];
}
@@ -26,29 +26,32 @@ fn split_message_for_telegram(message: &str) -> Vec {
let mut remaining = message;
while !remaining.is_empty() {
- let chunk_end = if remaining.len() <= TELEGRAM_MAX_MESSAGE_LENGTH {
- remaining.len()
+ // Find the byte offset for the Nth character boundary.
+ let hard_split = remaining
+ .char_indices()
+ .nth(TELEGRAM_MAX_MESSAGE_LENGTH)
+ .map_or(remaining.len(), |(idx, _)| idx);
+
+ let chunk_end = if hard_split == remaining.len() {
+ hard_split
} else {
// Try to find a good break point (newline, then space)
- let search_area = &remaining[..TELEGRAM_MAX_MESSAGE_LENGTH];
+ let search_area = &remaining[..hard_split];
// Prefer splitting at newline
if let Some(pos) = search_area.rfind('\n') {
// Don't split if the newline is too close to the start
- if pos >= TELEGRAM_MAX_MESSAGE_LENGTH / 2 {
+ if search_area[..pos].chars().count() >= TELEGRAM_MAX_MESSAGE_LENGTH / 2 {
pos + 1
} else {
// Try space as fallback
- search_area
- .rfind(' ')
- .unwrap_or(TELEGRAM_MAX_MESSAGE_LENGTH)
- + 1
+ search_area.rfind(' ').unwrap_or(hard_split) + 1
}
} else if let Some(pos) = search_area.rfind(' ') {
pos + 1
} else {
- // Hard split at the limit
- TELEGRAM_MAX_MESSAGE_LENGTH
+ // Hard split at character boundary
+ hard_split
}
};
@@ -373,7 +376,7 @@ impl TelegramChannel {
.collect()
}
- fn load_config_without_env() -> anyhow::Result {
+ async fn load_config_without_env() -> anyhow::Result {
let home = UserDirs::new()
.map(|u| u.home_dir().to_path_buf())
.context("Could not find home directory")?;
@@ -381,18 +384,23 @@ impl TelegramChannel {
let config_path = zeroclaw_dir.join("config.toml");
let contents = fs::read_to_string(&config_path)
+ .await
.with_context(|| format!("Failed to read config file: {}", config_path.display()))?;
let mut config: Config = toml::from_str(&contents)
- .context("Failed to parse config file for Telegram binding")?;
+ .context("Failed to parse config.toml — check [channels.telegram] section for syntax errors")?;
config.config_path = config_path;
config.workspace_dir = zeroclaw_dir.join("workspace");
Ok(config)
}
- fn persist_allowed_identity_blocking(identity: &str) -> anyhow::Result<()> {
- let mut config = Self::load_config_without_env()?;
+ async fn persist_allowed_identity(&self, identity: &str) -> anyhow::Result<()> {
+ let mut config = Self::load_config_without_env().await?;
let Some(telegram) = config.channels_config.telegram.as_mut() else {
- anyhow::bail!("Telegram channel config is missing in config.toml");
+ anyhow::bail!(
+ "Missing [channels.telegram] section in config.toml. \
+ Add bot_token and allowed_users under [channels.telegram], \
+ or run `zeroclaw onboard --channels-only` to configure interactively"
+ );
};
let normalized = Self::normalize_identity(identity);
@@ -404,20 +412,13 @@ impl TelegramChannel {
telegram.allowed_users.push(normalized);
config
.save()
+ .await
.context("Failed to persist Telegram allowlist to config.toml")?;
}
Ok(())
}
- async fn persist_allowed_identity(&self, identity: &str) -> anyhow::Result<()> {
- let identity = identity.to_string();
- tokio::task::spawn_blocking(move || Self::persist_allowed_identity_blocking(&identity))
- .await
- .map_err(|e| anyhow::anyhow!("Failed to join Telegram bind save task: {e}"))??;
- Ok(())
- }
-
fn add_allowed_identity_runtime(&self, identity: &str) {
let normalized = Self::normalize_identity(identity);
if normalized.is_empty() {
@@ -600,12 +601,12 @@ impl TelegramChannel {
let username = username_opt.unwrap_or("unknown");
let normalized_username = Self::normalize_identity(username);
- let user_id = message
+ let sender_id = message
.get("from")
.and_then(|from| from.get("id"))
.and_then(serde_json::Value::as_i64);
- let user_id_str = user_id.map(|id| id.to_string());
- let normalized_user_id = user_id_str.as_deref().map(Self::normalize_identity);
+ let sender_id_str = sender_id.map(|id| id.to_string());
+ let normalized_sender_id = sender_id_str.as_deref().map(Self::normalize_identity);
let chat_id = message
.get("chat")
@@ -619,7 +620,7 @@ impl TelegramChannel {
};
let mut identities = vec![normalized_username.as_str()];
- if let Some(ref id) = normalized_user_id {
+ if let Some(ref id) = normalized_sender_id {
identities.push(id.as_str());
}
@@ -629,9 +630,9 @@ impl TelegramChannel {
if let Some(code) = Self::extract_bind_code(text) {
if let Some(pairing) = self.pairing.as_ref() {
- match pairing.try_pair(code) {
+ match pairing.try_pair(code, &chat_id).await {
Ok(Some(_token)) => {
- let bind_identity = normalized_user_id.clone().or_else(|| {
+ let bind_identity = normalized_sender_id.clone().or_else(|| {
if normalized_username.is_empty() || normalized_username == "unknown" {
None
} else {
@@ -694,7 +695,7 @@ impl TelegramChannel {
} else {
let _ = self
.send(&SendMessage::new(
- "ℹ️ Telegram pairing is not active. Ask operator to update allowlist in config.toml.",
+ "ℹ️ Telegram pairing is not active. Ask operator to add your user ID to channels.telegram.allowed_users in config.toml.",
&chat_id,
))
.await;
@@ -703,12 +704,12 @@ impl TelegramChannel {
}
tracing::warn!(
- "Telegram: ignoring message from unauthorized user: username={username}, user_id={}. \
+ "Telegram: ignoring message from unauthorized user: username={username}, sender_id={}. \
Allowlist Telegram username (without '@') or numeric user ID.",
- user_id_str.as_deref().unwrap_or("unknown")
+ sender_id_str.as_deref().unwrap_or("unknown")
);
- let suggested_identity = normalized_user_id
+ let suggested_identity = normalized_sender_id
.clone()
.or_else(|| {
if normalized_username.is_empty() || normalized_username == "unknown" {
@@ -750,20 +751,20 @@ Allowlist Telegram username (without '@') or numeric user ID.",
.unwrap_or("unknown")
.to_string();
- let user_id = message
+ let sender_id = message
.get("from")
.and_then(|from| from.get("id"))
.and_then(serde_json::Value::as_i64)
.map(|id| id.to_string());
let sender_identity = if username == "unknown" {
- user_id.clone().unwrap_or_else(|| "unknown".to_string())
+ sender_id.clone().unwrap_or_else(|| "unknown".to_string())
} else {
username.clone()
};
let mut identities = vec![username.as_str()];
- if let Some(id) = user_id.as_deref() {
+ if let Some(id) = sender_id.as_deref() {
identities.push(id);
}
@@ -825,6 +826,7 @@ Allowlist Telegram username (without '@') or numeric user ID.",
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs(),
+ thread_ts: None,
})
}
@@ -1631,6 +1633,37 @@ impl Channel for TelegramChannel {
.await
}
+ async fn cancel_draft(&self, recipient: &str, message_id: &str) -> anyhow::Result<()> {
+ let (chat_id, _) = Self::parse_reply_target(recipient);
+ self.last_draft_edit.lock().remove(&chat_id);
+
+ let message_id = match message_id.parse::() {
+ Ok(id) => id,
+ Err(e) => {
+ tracing::debug!("Invalid Telegram draft message_id '{message_id}': {e}");
+ return Ok(());
+ }
+ };
+
+ let response = self
+ .client
+ .post(self.api_url("deleteMessage"))
+ .json(&serde_json::json!({
+ "chat_id": chat_id,
+ "message_id": message_id,
+ }))
+ .send()
+ .await?;
+
+ if !response.status().is_success() {
+ let status = response.status();
+ let body = response.text().await.unwrap_or_default();
+ tracing::debug!("Telegram deleteMessage failed ({status}): {body}");
+ }
+
+ Ok(())
+ }
+
async fn send(&self, message: &SendMessage) -> anyhow::Result<()> {
// Strip tool_call tags before processing to prevent Markdown parsing failures
let content = strip_tool_call_tags(&message.content);
@@ -2830,4 +2863,103 @@ mod tests {
let ch_disabled = TelegramChannel::new("token".into(), vec!["*".into()], false);
assert!(!ch_disabled.mention_only);
}
+
+ // ─────────────────────────────────────────────────────────────────────
+ // TG6: Channel platform limit edge cases for Telegram (4096 char limit)
+ // Prevents: Pattern 6 — issues #574, #499
+ // ─────────────────────────────────────────────────────────────────────
+
+ #[test]
+ fn telegram_split_code_block_at_boundary() {
+ let mut msg = String::new();
+ msg.push_str("```python\n");
+ msg.push_str(&"x".repeat(4085));
+ msg.push_str("\n```\nMore text after code block");
+ let parts = split_message_for_telegram(&msg);
+ assert!(
+ parts.len() >= 2,
+ "code block spanning boundary should split"
+ );
+ for part in &parts {
+ assert!(
+ part.len() <= TELEGRAM_MAX_MESSAGE_LENGTH,
+ "each part must be <= {TELEGRAM_MAX_MESSAGE_LENGTH}, got {}",
+ part.len()
+ );
+ }
+ }
+
+ #[test]
+ fn telegram_split_single_long_word() {
+ let long_word = "a".repeat(5000);
+ let parts = split_message_for_telegram(&long_word);
+ assert!(parts.len() >= 2, "word exceeding limit must be split");
+ for part in &parts {
+ assert!(
+ part.len() <= TELEGRAM_MAX_MESSAGE_LENGTH,
+ "hard-split part must be <= {TELEGRAM_MAX_MESSAGE_LENGTH}, got {}",
+ part.len()
+ );
+ }
+ let reassembled: String = parts.join("");
+ assert_eq!(reassembled, long_word);
+ }
+
+ #[test]
+ fn telegram_split_exactly_at_limit_no_split() {
+ let msg = "a".repeat(TELEGRAM_MAX_MESSAGE_LENGTH);
+ let parts = split_message_for_telegram(&msg);
+ assert_eq!(parts.len(), 1, "message exactly at limit should not split");
+ }
+
+ #[test]
+ fn telegram_split_one_over_limit() {
+ let msg = "a".repeat(TELEGRAM_MAX_MESSAGE_LENGTH + 1);
+ let parts = split_message_for_telegram(&msg);
+ assert!(parts.len() >= 2, "message 1 char over limit must split");
+ }
+
+ #[test]
+ fn telegram_split_many_short_lines() {
+ let msg: String = (0..1000).map(|i| format!("line {i}\n")).collect();
+ let parts = split_message_for_telegram(&msg);
+ for part in &parts {
+ assert!(
+ part.len() <= TELEGRAM_MAX_MESSAGE_LENGTH,
+ "short-line batch must be <= limit"
+ );
+ }
+ }
+
+ #[test]
+ fn telegram_split_only_whitespace() {
+ let msg = " \n\n\t ";
+ let parts = split_message_for_telegram(msg);
+ assert!(parts.len() <= 1);
+ }
+
+ #[test]
+ fn telegram_split_emoji_at_boundary() {
+ let mut msg = "a".repeat(4094);
+ msg.push_str("🎉🎊"); // 4096 chars total
+ let parts = split_message_for_telegram(&msg);
+ for part in &parts {
+ // The function splits on character count, not byte count
+ assert!(
+ part.chars().count() <= TELEGRAM_MAX_MESSAGE_LENGTH,
+ "emoji boundary split must respect limit"
+ );
+ }
+ }
+
+ #[test]
+ fn telegram_split_consecutive_newlines() {
+ let mut msg = "a".repeat(4090);
+ msg.push_str("\n\n\n\n\n\n");
+ msg.push_str(&"b".repeat(100));
+ let parts = split_message_for_telegram(&msg);
+ for part in &parts {
+ assert!(part.len() <= TELEGRAM_MAX_MESSAGE_LENGTH);
+ }
+ }
}
diff --git a/src/channels/traits.rs b/src/channels/traits.rs
index 3a7d9df..67546ce 100644
--- a/src/channels/traits.rs
+++ b/src/channels/traits.rs
@@ -9,6 +9,9 @@ pub struct ChannelMessage {
pub content: String,
pub channel: String,
pub timestamp: u64,
+ /// Platform thread identifier (e.g. Slack `ts`, Discord thread ID).
+ /// When set, replies should be posted as threaded responses.
+ pub thread_ts: Option,
}
/// Message to send through a channel
@@ -17,6 +20,8 @@ pub struct SendMessage {
pub content: String,
pub recipient: String,
pub subject: Option,
+ /// Platform thread identifier for threaded replies (e.g. Slack `thread_ts`).
+ pub thread_ts: Option,
}
impl SendMessage {
@@ -26,6 +31,7 @@ impl SendMessage {
content: content.into(),
recipient: recipient.into(),
subject: None,
+ thread_ts: None,
}
}
@@ -39,8 +45,15 @@ impl SendMessage {
content: content.into(),
recipient: recipient.into(),
subject: Some(subject.into()),
+ thread_ts: None,
}
}
+
+ /// Set the thread identifier for threaded replies.
+ pub fn in_thread(mut self, thread_ts: Option) -> Self {
+ self.thread_ts = thread_ts;
+ self
+ }
}
/// Core channel trait — implement for any messaging platform
@@ -100,6 +113,11 @@ pub trait Channel: Send + Sync {
) -> anyhow::Result<()> {
Ok(())
}
+
+ /// Cancel and remove a previously sent draft message if the channel supports it.
+ async fn cancel_draft(&self, _recipient: &str, _message_id: &str) -> anyhow::Result<()> {
+ Ok(())
+ }
}
#[cfg(test)]
@@ -129,6 +147,7 @@ mod tests {
content: "hello".into(),
channel: "dummy".into(),
timestamp: 123,
+ thread_ts: None,
})
.await
.map_err(|e| anyhow::anyhow!(e.to_string()))
@@ -144,6 +163,7 @@ mod tests {
content: "ping".into(),
channel: "dummy".into(),
timestamp: 999,
+ thread_ts: None,
};
let cloned = message.clone();
@@ -183,6 +203,7 @@ mod tests {
.finalize_draft("bob", "msg_1", "final text")
.await
.is_ok());
+ assert!(channel.cancel_draft("bob", "msg_1").await.is_ok());
}
#[tokio::test]
diff --git a/src/channels/whatsapp.rs b/src/channels/whatsapp.rs
index c6e5baa..5401e60 100644
--- a/src/channels/whatsapp.rs
+++ b/src/channels/whatsapp.rs
@@ -8,6 +8,20 @@ use uuid::Uuid;
/// Messages are received via the gateway's `/whatsapp` webhook endpoint.
/// The `listen` method here is a no-op placeholder; actual message handling
/// happens in the gateway when Meta sends webhook events.
+fn ensure_https(url: &str) -> anyhow::Result<()> {
+ if !url.starts_with("https://") {
+ anyhow::bail!(
+ "Refusing to transmit sensitive data over non-HTTPS URL: URL scheme must be https"
+ );
+ }
+ Ok(())
+}
+
+///
+/// # Runtime Negotiation
+///
+/// This Cloud API channel is automatically selected when `phone_number_id` is set in the config.
+/// Use `WhatsAppWebChannel` (with `session_path`) for native Web mode.
pub struct WhatsAppChannel {
access_token: String,
endpoint_id: String,
@@ -85,7 +99,8 @@ impl WhatsAppChannel {
if !self.is_number_allowed(&normalized_from) {
tracing::warn!(
"WhatsApp: ignoring message from unauthorized number: {normalized_from}. \
- Add to allowed_numbers in config.toml, then run `zeroclaw onboard --channels-only`."
+ Add to channels.whatsapp.allowed_numbers in config.toml, \
+ or run `zeroclaw onboard --channels-only` to configure interactively."
);
continue;
}
@@ -126,6 +141,7 @@ impl WhatsAppChannel {
content,
channel: "whatsapp".to_string(),
timestamp,
+ thread_ts: None,
});
}
}
@@ -165,6 +181,8 @@ impl Channel for WhatsAppChannel {
}
});
+ ensure_https(&url)?;
+
let resp = self
.http_client()
.post(&url)
@@ -203,6 +221,10 @@ impl Channel for WhatsAppChannel {
// Check if we can reach the WhatsApp API
let url = format!("https://graph.facebook.com/v18.0/{}", self.endpoint_id);
+ if ensure_https(&url).is_err() {
+ return false;
+ }
+
self.http_client()
.get(&url)
.bearer_auth(&self.access_token)
diff --git a/src/channels/whatsapp_storage.rs b/src/channels/whatsapp_storage.rs
new file mode 100644
index 0000000..87eebf7
--- /dev/null
+++ b/src/channels/whatsapp_storage.rs
@@ -0,0 +1,1345 @@
+//! Custom wa-rs storage backend using ZeroClaw's rusqlite
+//!
+//! This module implements all 4 wa-rs storage traits using rusqlite directly,
+//! avoiding the Diesel/libsqlite3-sys dependency conflict from wa-rs-sqlite-storage.
+//!
+//! # Traits Implemented
+//!
+//! - [`SignalStore`]: Signal protocol cryptographic operations
+//! - [`AppSyncStore`]: WhatsApp app state synchronization
+//! - [`ProtocolStore`]: WhatsApp Web protocol alignment
+//! - [`DeviceStore`]: Device persistence operations
+
+#[cfg(feature = "whatsapp-web")]
+use async_trait::async_trait;
+#[cfg(feature = "whatsapp-web")]
+use parking_lot::Mutex;
+#[cfg(feature = "whatsapp-web")]
+use rusqlite::{params, Connection};
+#[cfg(feature = "whatsapp-web")]
+use std::path::Path;
+#[cfg(feature = "whatsapp-web")]
+use std::sync::Arc;
+
+#[cfg(feature = "whatsapp-web")]
+use prost::Message;
+#[cfg(feature = "whatsapp-web")]
+use wa_rs_binary::jid::Jid;
+#[cfg(feature = "whatsapp-web")]
+use wa_rs_core::appstate::hash::HashState;
+#[cfg(feature = "whatsapp-web")]
+use wa_rs_core::appstate::processor::AppStateMutationMAC;
+#[cfg(feature = "whatsapp-web")]
+use wa_rs_core::store::traits::DeviceInfo;
+#[cfg(feature = "whatsapp-web")]
+use wa_rs_core::store::traits::DeviceStore as DeviceStoreTrait;
+#[cfg(feature = "whatsapp-web")]
+use wa_rs_core::store::traits::*;
+#[cfg(feature = "whatsapp-web")]
+use wa_rs_core::store::Device as CoreDevice;
+
+/// Custom wa-rs storage backend using rusqlite
+///
+/// This implements all 4 storage traits required by wa-rs.
+/// The backend uses ZeroClaw's existing rusqlite setup, avoiding the
+/// Diesel/libsqlite3-sys conflict from wa-rs-sqlite-storage.
+#[cfg(feature = "whatsapp-web")]
+#[derive(Clone)]
+pub struct RusqliteStore {
+ /// Database file path
+ db_path: String,
+ /// SQLite connection (thread-safe via Mutex)
+ conn: Arc>,
+ /// Device ID for this session
+ device_id: i32,
+}
+
+/// Helper macro to convert rusqlite errors to StoreError
+/// For execute statements that return usize, maps to ()
+macro_rules! to_store_err {
+ // For expressions returning Result
+ (execute: $expr:expr) => {
+ $expr
+ .map(|_| ())
+ .map_err(|e| wa_rs_core::store::error::StoreError::Database(e.to_string()))
+ };
+ // For other expressions
+ ($expr:expr) => {
+ $expr.map_err(|e| wa_rs_core::store::error::StoreError::Database(e.to_string()))
+ };
+}
+
+#[cfg(feature = "whatsapp-web")]
+impl RusqliteStore {
+ /// Create a new rusqlite-based storage backend
+ ///
+ /// # Arguments
+ ///
+ /// * `db_path` - Path to the SQLite database file (will be created if needed)
+ pub fn new>(db_path: P) -> anyhow::Result {
+ let db_path = db_path.as_ref().to_string_lossy().to_string();
+
+ // Create parent directory if needed
+ if let Some(parent) = Path::new(&db_path).parent() {
+ std::fs::create_dir_all(parent)?;
+ }
+
+ let conn = Connection::open(&db_path)?;
+
+ // Enable WAL mode for better concurrency
+ to_store_err!(conn.execute_batch(
+ "PRAGMA journal_mode = WAL;
+ PRAGMA synchronous = NORMAL;",
+ ))?;
+
+ let store = Self {
+ db_path,
+ conn: Arc::new(Mutex::new(conn)),
+ device_id: 1, // Default device ID
+ };
+
+ store.init_schema()?;
+
+ Ok(store)
+ }
+
+ /// Initialize all database tables
+ fn init_schema(&self) -> anyhow::Result<()> {
+ let conn = self.conn.lock();
+ to_store_err!(conn.execute_batch(
+ "-- Main device table
+ CREATE TABLE IF NOT EXISTS device (
+ id INTEGER PRIMARY KEY,
+ lid TEXT,
+ pn TEXT,
+ registration_id INTEGER NOT NULL,
+ noise_key BLOB NOT NULL,
+ identity_key BLOB NOT NULL,
+ signed_pre_key BLOB NOT NULL,
+ signed_pre_key_id INTEGER NOT NULL,
+ signed_pre_key_signature BLOB NOT NULL,
+ adv_secret_key BLOB NOT NULL,
+ account BLOB,
+ push_name TEXT NOT NULL,
+ app_version_primary INTEGER NOT NULL,
+ app_version_secondary INTEGER NOT NULL,
+ app_version_tertiary INTEGER NOT NULL,
+ app_version_last_fetched_ms INTEGER NOT NULL,
+ edge_routing_info BLOB,
+ props_hash TEXT
+ );
+
+ -- Signal identity keys
+ CREATE TABLE IF NOT EXISTS identities (
+ address TEXT NOT NULL,
+ key BLOB NOT NULL,
+ device_id INTEGER NOT NULL,
+ PRIMARY KEY (address, device_id)
+ );
+
+ -- Signal protocol sessions
+ CREATE TABLE IF NOT EXISTS sessions (
+ address TEXT NOT NULL,
+ record BLOB NOT NULL,
+ device_id INTEGER NOT NULL,
+ PRIMARY KEY (address, device_id)
+ );
+
+ -- Pre-keys for key exchange
+ CREATE TABLE IF NOT EXISTS prekeys (
+ id INTEGER NOT NULL,
+ key BLOB NOT NULL,
+ uploaded INTEGER NOT NULL DEFAULT 0,
+ device_id INTEGER NOT NULL,
+ PRIMARY KEY (id, device_id)
+ );
+
+ -- Signed pre-keys
+ CREATE TABLE IF NOT EXISTS signed_prekeys (
+ id INTEGER NOT NULL,
+ record BLOB NOT NULL,
+ device_id INTEGER NOT NULL,
+ PRIMARY KEY (id, device_id)
+ );
+
+ -- Sender keys for group messaging
+ CREATE TABLE IF NOT EXISTS sender_keys (
+ address TEXT NOT NULL,
+ record BLOB NOT NULL,
+ device_id INTEGER NOT NULL,
+ PRIMARY KEY (address, device_id)
+ );
+
+ -- App state sync keys
+ CREATE TABLE IF NOT EXISTS app_state_keys (
+ key_id BLOB NOT NULL,
+ key_data BLOB NOT NULL,
+ device_id INTEGER NOT NULL,
+ PRIMARY KEY (key_id, device_id)
+ );
+
+ -- App state versions
+ CREATE TABLE IF NOT EXISTS app_state_versions (
+ name TEXT NOT NULL,
+ state_data BLOB NOT NULL,
+ device_id INTEGER NOT NULL,
+ PRIMARY KEY (name, device_id)
+ );
+
+ -- App state mutation MACs
+ CREATE TABLE IF NOT EXISTS app_state_mutation_macs (
+ name TEXT NOT NULL,
+ version INTEGER NOT NULL,
+ index_mac BLOB NOT NULL,
+ value_mac BLOB NOT NULL,
+ device_id INTEGER NOT NULL,
+ PRIMARY KEY (name, index_mac, device_id)
+ );
+
+ -- LID to phone number mapping
+ CREATE TABLE IF NOT EXISTS lid_pn_mapping (
+ lid TEXT NOT NULL,
+ phone_number TEXT NOT NULL,
+ created_at INTEGER NOT NULL,
+ learning_source TEXT NOT NULL,
+ updated_at INTEGER NOT NULL,
+ device_id INTEGER NOT NULL,
+ PRIMARY KEY (lid, device_id)
+ );
+
+ -- SKDM recipients tracking
+ CREATE TABLE IF NOT EXISTS skdm_recipients (
+ group_jid TEXT NOT NULL,
+ device_jid TEXT NOT NULL,
+ device_id INTEGER NOT NULL,
+ created_at INTEGER NOT NULL,
+ PRIMARY KEY (group_jid, device_jid, device_id)
+ );
+
+ -- Device registry for multi-device
+ CREATE TABLE IF NOT EXISTS device_registry (
+ user_id TEXT NOT NULL,
+ devices_json TEXT NOT NULL,
+ timestamp INTEGER NOT NULL,
+ phash TEXT,
+ device_id INTEGER NOT NULL,
+ updated_at INTEGER NOT NULL,
+ PRIMARY KEY (user_id, device_id)
+ );
+
+ -- Base keys for collision detection
+ CREATE TABLE IF NOT EXISTS base_keys (
+ address TEXT NOT NULL,
+ message_id TEXT NOT NULL,
+ base_key BLOB NOT NULL,
+ device_id INTEGER NOT NULL,
+ created_at INTEGER NOT NULL,
+ PRIMARY KEY (address, message_id, device_id)
+ );
+
+ -- Sender key status for lazy deletion
+ CREATE TABLE IF NOT EXISTS sender_key_status (
+ group_jid TEXT NOT NULL,
+ participant TEXT NOT NULL,
+ device_id INTEGER NOT NULL,
+ marked_at INTEGER NOT NULL,
+ PRIMARY KEY (group_jid, participant, device_id)
+ );
+
+ -- Trusted contact tokens
+ CREATE TABLE IF NOT EXISTS tc_tokens (
+ jid TEXT NOT NULL,
+ token BLOB NOT NULL,
+ token_timestamp INTEGER NOT NULL,
+ sender_timestamp INTEGER,
+ device_id INTEGER NOT NULL,
+ updated_at INTEGER NOT NULL,
+ PRIMARY KEY (jid, device_id)
+ );",
+ ))?;
+ Ok(())
+ }
+}
+
+#[cfg(feature = "whatsapp-web")]
+#[async_trait]
+impl SignalStore for RusqliteStore {
+ // --- Identity Operations ---
+
+ async fn put_identity(
+ &self,
+ address: &str,
+ key: [u8; 32],
+ ) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+ to_store_err!(execute: conn.execute(
+ "INSERT OR REPLACE INTO identities (address, key, device_id)
+ VALUES (?1, ?2, ?3)",
+ params![address, key.to_vec(), self.device_id],
+ ))
+ }
+
+ async fn load_identity(
+ &self,
+ address: &str,
+ ) -> wa_rs_core::store::error::Result>> {
+ let conn = self.conn.lock();
+ let result = conn.query_row(
+ "SELECT key FROM identities WHERE address = ?1 AND device_id = ?2",
+ params![address, self.device_id],
+ |row| row.get::<_, Vec>(0),
+ );
+
+ match result {
+ Ok(key) => Ok(Some(key)),
+ Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
+ Err(e) => Err(wa_rs_core::store::error::StoreError::Database(
+ e.to_string(),
+ )),
+ }
+ }
+
+ async fn delete_identity(&self, address: &str) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+ to_store_err!(execute: conn.execute(
+ "DELETE FROM identities WHERE address = ?1 AND device_id = ?2",
+ params![address, self.device_id],
+ ))
+ }
+
+ // --- Session Operations ---
+
+ async fn get_session(
+ &self,
+ address: &str,
+ ) -> wa_rs_core::store::error::Result>> {
+ let conn = self.conn.lock();
+ let result = conn.query_row(
+ "SELECT record FROM sessions WHERE address = ?1 AND device_id = ?2",
+ params![address, self.device_id],
+ |row| row.get::<_, Vec>(0),
+ );
+
+ match result {
+ Ok(record) => Ok(Some(record)),
+ Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
+ Err(e) => Err(wa_rs_core::store::error::StoreError::Database(
+ e.to_string(),
+ )),
+ }
+ }
+
+ async fn put_session(
+ &self,
+ address: &str,
+ session: &[u8],
+ ) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+ to_store_err!(execute: conn.execute(
+ "INSERT OR REPLACE INTO sessions (address, record, device_id)
+ VALUES (?1, ?2, ?3)",
+ params![address, session, self.device_id],
+ ))
+ }
+
+ async fn delete_session(&self, address: &str) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+ to_store_err!(execute: conn.execute(
+ "DELETE FROM sessions WHERE address = ?1 AND device_id = ?2",
+ params![address, self.device_id],
+ ))
+ }
+
+ // --- PreKey Operations ---
+
+ async fn store_prekey(
+ &self,
+ id: u32,
+ record: &[u8],
+ uploaded: bool,
+ ) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+ to_store_err!(execute: conn.execute(
+ "INSERT OR REPLACE INTO prekeys (id, key, uploaded, device_id)
+ VALUES (?1, ?2, ?3, ?4)",
+ params![id, record, uploaded, self.device_id],
+ ))
+ }
+
+ async fn load_prekey(&self, id: u32) -> wa_rs_core::store::error::Result>> {
+ let conn = self.conn.lock();
+ let result = conn.query_row(
+ "SELECT key FROM prekeys WHERE id = ?1 AND device_id = ?2",
+ params![id, self.device_id],
+ |row| row.get::<_, Vec>(0),
+ );
+
+ match result {
+ Ok(key) => Ok(Some(key)),
+ Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
+ Err(e) => Err(wa_rs_core::store::error::StoreError::Database(
+ e.to_string(),
+ )),
+ }
+ }
+
+ async fn remove_prekey(&self, id: u32) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+ to_store_err!(execute: conn.execute(
+ "DELETE FROM prekeys WHERE id = ?1 AND device_id = ?2",
+ params![id, self.device_id],
+ ))
+ }
+
+ // --- Signed PreKey Operations ---
+
+ async fn store_signed_prekey(
+ &self,
+ id: u32,
+ record: &[u8],
+ ) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+ to_store_err!(execute: conn.execute(
+ "INSERT OR REPLACE INTO signed_prekeys (id, record, device_id)
+ VALUES (?1, ?2, ?3)",
+ params![id, record, self.device_id],
+ ))
+ }
+
+ async fn load_signed_prekey(
+ &self,
+ id: u32,
+ ) -> wa_rs_core::store::error::Result>> {
+ let conn = self.conn.lock();
+ let result = conn.query_row(
+ "SELECT record FROM signed_prekeys WHERE id = ?1 AND device_id = ?2",
+ params![id, self.device_id],
+ |row| row.get::<_, Vec>(0),
+ );
+
+ match result {
+ Ok(record) => Ok(Some(record)),
+ Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
+ Err(e) => Err(wa_rs_core::store::error::StoreError::Database(
+ e.to_string(),
+ )),
+ }
+ }
+
+ async fn load_all_signed_prekeys(
+ &self,
+ ) -> wa_rs_core::store::error::Result)>> {
+ let conn = self.conn.lock();
+ let mut stmt = to_store_err!(
+ conn.prepare("SELECT id, record FROM signed_prekeys WHERE device_id = ?1")
+ )?;
+
+ let rows = to_store_err!(stmt.query_map(params![self.device_id], |row| {
+ Ok((row.get::<_, u32>(0)?, row.get::<_, Vec>(1)?))
+ }))?;
+
+ let mut result = Vec::new();
+ for row in rows {
+ result.push(to_store_err!(row)?);
+ }
+
+ Ok(result)
+ }
+
+ async fn remove_signed_prekey(&self, id: u32) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+ to_store_err!(execute: conn.execute(
+ "DELETE FROM signed_prekeys WHERE id = ?1 AND device_id = ?2",
+ params![id, self.device_id],
+ ))
+ }
+
+ // --- Sender Key Operations ---
+
+ async fn put_sender_key(
+ &self,
+ address: &str,
+ record: &[u8],
+ ) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+ to_store_err!(execute: conn.execute(
+ "INSERT OR REPLACE INTO sender_keys (address, record, device_id)
+ VALUES (?1, ?2, ?3)",
+ params![address, record, self.device_id],
+ ))
+ }
+
+ async fn get_sender_key(
+ &self,
+ address: &str,
+ ) -> wa_rs_core::store::error::Result>> {
+ let conn = self.conn.lock();
+ let result = conn.query_row(
+ "SELECT record FROM sender_keys WHERE address = ?1 AND device_id = ?2",
+ params![address, self.device_id],
+ |row| row.get::<_, Vec>(0),
+ );
+
+ match result {
+ Ok(record) => Ok(Some(record)),
+ Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
+ Err(e) => Err(wa_rs_core::store::error::StoreError::Database(
+ e.to_string(),
+ )),
+ }
+ }
+
+ async fn delete_sender_key(&self, address: &str) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+ to_store_err!(execute: conn.execute(
+ "DELETE FROM sender_keys WHERE address = ?1 AND device_id = ?2",
+ params![address, self.device_id],
+ ))
+ }
+}
+
+#[cfg(feature = "whatsapp-web")]
+#[async_trait]
+impl AppSyncStore for RusqliteStore {
+ async fn get_sync_key(
+ &self,
+ key_id: &[u8],
+ ) -> wa_rs_core::store::error::Result> {
+ let conn = self.conn.lock();
+ let result = conn.query_row(
+ "SELECT key_data FROM app_state_keys WHERE key_id = ?1 AND device_id = ?2",
+ params![key_id, self.device_id],
+ |row| {
+ let key_data: Vec = row.get(0)?;
+ serde_json::from_slice(&key_data)
+ .map_err(|e| rusqlite::Error::ToSqlConversionFailure(Box::new(e)))
+ },
+ );
+
+ match result {
+ Ok(key) => Ok(Some(key)),
+ Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
+ Err(e) => Err(wa_rs_core::store::error::StoreError::Database(
+ e.to_string(),
+ )),
+ }
+ }
+
+ async fn set_sync_key(
+ &self,
+ key_id: &[u8],
+ key: AppStateSyncKey,
+ ) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+ let key_data = to_store_err!(serde_json::to_vec(&key))?;
+
+ to_store_err!(execute: conn.execute(
+ "INSERT OR REPLACE INTO app_state_keys (key_id, key_data, device_id)
+ VALUES (?1, ?2, ?3)",
+ params![key_id, key_data, self.device_id],
+ ))
+ }
+
+ async fn get_version(&self, name: &str) -> wa_rs_core::store::error::Result {
+ let conn = self.conn.lock();
+ let state_data: Vec = to_store_err!(conn.query_row(
+ "SELECT state_data FROM app_state_versions WHERE name = ?1 AND device_id = ?2",
+ params![name, self.device_id],
+ |row| row.get(0),
+ ))?;
+
+ to_store_err!(serde_json::from_slice(&state_data))
+ }
+
+ async fn set_version(
+ &self,
+ name: &str,
+ state: HashState,
+ ) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+ let state_data = to_store_err!(serde_json::to_vec(&state))?;
+
+ to_store_err!(execute: conn.execute(
+ "INSERT OR REPLACE INTO app_state_versions (name, state_data, device_id)
+ VALUES (?1, ?2, ?3)",
+ params![name, state_data, self.device_id],
+ ))
+ }
+
+ async fn put_mutation_macs(
+ &self,
+ name: &str,
+ version: u64,
+ mutations: &[AppStateMutationMAC],
+ ) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+
+ for mutation in mutations {
+ let index_mac = to_store_err!(serde_json::to_vec(&mutation.index_mac))?;
+ let value_mac = to_store_err!(serde_json::to_vec(&mutation.value_mac))?;
+
+ to_store_err!(execute: conn.execute(
+ "INSERT OR REPLACE INTO app_state_mutation_macs
+ (name, version, index_mac, value_mac, device_id)
+ VALUES (?1, ?2, ?3, ?4, ?5)",
+ params![name, i64::try_from(version).unwrap_or(i64::MAX), index_mac, value_mac, self.device_id],
+ ))?;
+ }
+
+ Ok(())
+ }
+
+ async fn get_mutation_mac(
+ &self,
+ name: &str,
+ index_mac: &[u8],
+ ) -> wa_rs_core::store::error::Result>> {
+ let conn = self.conn.lock();
+ let index_mac_json = to_store_err!(serde_json::to_vec(index_mac))?;
+
+ let result = conn.query_row(
+ "SELECT value_mac FROM app_state_mutation_macs
+ WHERE name = ?1 AND index_mac = ?2 AND device_id = ?3",
+ params![name, index_mac_json, self.device_id],
+ |row| row.get::<_, Vec>(0),
+ );
+
+ match result {
+ Ok(mac) => Ok(Some(mac)),
+ Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
+ Err(e) => Err(wa_rs_core::store::error::StoreError::Database(
+ e.to_string(),
+ )),
+ }
+ }
+
+ async fn delete_mutation_macs(
+ &self,
+ name: &str,
+ index_macs: &[Vec],
+ ) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+
+ for index_mac in index_macs {
+ let index_mac_json = to_store_err!(serde_json::to_vec(index_mac))?;
+
+ to_store_err!(execute: conn.execute(
+ "DELETE FROM app_state_mutation_macs
+ WHERE name = ?1 AND index_mac = ?2 AND device_id = ?3",
+ params![name, index_mac_json, self.device_id],
+ ))?;
+ }
+
+ Ok(())
+ }
+}
+
+#[cfg(feature = "whatsapp-web")]
+#[async_trait]
+impl ProtocolStore for RusqliteStore {
+ // --- SKDM Tracking ---
+
+ async fn get_skdm_recipients(
+ &self,
+ group_jid: &str,
+ ) -> wa_rs_core::store::error::Result> {
+ let conn = self.conn.lock();
+ let mut stmt = to_store_err!(conn.prepare(
+ "SELECT device_jid FROM skdm_recipients WHERE group_jid = ?1 AND device_id = ?2"
+ ))?;
+
+ let rows = to_store_err!(stmt.query_map(params![group_jid, self.device_id], |row| {
+ row.get::<_, String>(0)
+ }))?;
+
+ let mut result = Vec::new();
+ for row in rows {
+ let jid_str = to_store_err!(row)?;
+ if let Ok(jid) = jid_str.parse() {
+ result.push(jid);
+ }
+ }
+
+ Ok(result)
+ }
+
+ async fn add_skdm_recipients(
+ &self,
+ group_jid: &str,
+ device_jids: &[Jid],
+ ) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+ let now = chrono::Utc::now().timestamp();
+
+ for device_jid in device_jids {
+ to_store_err!(execute: conn.execute(
+ "INSERT OR IGNORE INTO skdm_recipients (group_jid, device_jid, device_id, created_at)
+ VALUES (?1, ?2, ?3, ?4)",
+ params![group_jid, device_jid.to_string(), self.device_id, now],
+ ))?;
+ }
+
+ Ok(())
+ }
+
+ async fn clear_skdm_recipients(&self, group_jid: &str) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+ to_store_err!(execute: conn.execute(
+ "DELETE FROM skdm_recipients WHERE group_jid = ?1 AND device_id = ?2",
+ params![group_jid, self.device_id],
+ ))
+ }
+
+ // --- LID-PN Mapping ---
+
+ async fn get_lid_mapping(
+ &self,
+ lid: &str,
+ ) -> wa_rs_core::store::error::Result> {
+ let conn = self.conn.lock();
+ let result = conn.query_row(
+ "SELECT lid, phone_number, created_at, learning_source, updated_at
+ FROM lid_pn_mapping WHERE lid = ?1 AND device_id = ?2",
+ params![lid, self.device_id],
+ |row| {
+ Ok(LidPnMappingEntry {
+ lid: row.get(0)?,
+ phone_number: row.get(1)?,
+ created_at: row.get(2)?,
+ learning_source: row.get(3)?,
+ updated_at: row.get(4)?,
+ })
+ },
+ );
+
+ match result {
+ Ok(entry) => Ok(Some(entry)),
+ Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
+ Err(e) => Err(wa_rs_core::store::error::StoreError::Database(
+ e.to_string(),
+ )),
+ }
+ }
+
+ async fn get_pn_mapping(
+ &self,
+ phone: &str,
+ ) -> wa_rs_core::store::error::Result > {
+ let conn = self.conn.lock();
+ let result = conn.query_row(
+ "SELECT lid, phone_number, created_at, learning_source, updated_at
+ FROM lid_pn_mapping WHERE phone_number = ?1 AND device_id = ?2
+ ORDER BY updated_at DESC LIMIT 1",
+ params![phone, self.device_id],
+ |row| {
+ Ok(LidPnMappingEntry {
+ lid: row.get(0)?,
+ phone_number: row.get(1)?,
+ created_at: row.get(2)?,
+ learning_source: row.get(3)?,
+ updated_at: row.get(4)?,
+ })
+ },
+ );
+
+ match result {
+ Ok(entry) => Ok(Some(entry)),
+ Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
+ Err(e) => Err(wa_rs_core::store::error::StoreError::Database(
+ e.to_string(),
+ )),
+ }
+ }
+
+ async fn put_lid_mapping(
+ &self,
+ entry: &LidPnMappingEntry,
+ ) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+ to_store_err!(execute: conn.execute(
+ "INSERT OR REPLACE INTO lid_pn_mapping
+ (lid, phone_number, created_at, learning_source, updated_at, device_id)
+ VALUES (?1, ?2, ?3, ?4, ?5, ?6)",
+ params![
+ entry.lid,
+ entry.phone_number,
+ entry.created_at,
+ entry.learning_source,
+ entry.updated_at,
+ self.device_id,
+ ],
+ ))
+ }
+
+ async fn get_all_lid_mappings(
+ &self,
+ ) -> wa_rs_core::store::error::Result> {
+ let conn = self.conn.lock();
+ let mut stmt = to_store_err!(conn.prepare(
+ "SELECT lid, phone_number, created_at, learning_source, updated_at
+ FROM lid_pn_mapping WHERE device_id = ?1"
+ ))?;
+
+ let rows = to_store_err!(stmt.query_map(params![self.device_id], |row| {
+ Ok(LidPnMappingEntry {
+ lid: row.get(0)?,
+ phone_number: row.get(1)?,
+ created_at: row.get(2)?,
+ learning_source: row.get(3)?,
+ updated_at: row.get(4)?,
+ })
+ }))?;
+
+ let mut result = Vec::new();
+ for row in rows {
+ result.push(to_store_err!(row)?);
+ }
+
+ Ok(result)
+ }
+
+ // --- Base Key Collision Detection ---
+
+ async fn save_base_key(
+ &self,
+ address: &str,
+ message_id: &str,
+ base_key: &[u8],
+ ) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+ let now = chrono::Utc::now().timestamp();
+
+ to_store_err!(execute: conn.execute(
+ "INSERT OR REPLACE INTO base_keys (address, message_id, base_key, device_id, created_at)
+ VALUES (?1, ?2, ?3, ?4, ?5)",
+ params![address, message_id, base_key, self.device_id, now],
+ ))
+ }
+
+ async fn has_same_base_key(
+ &self,
+ address: &str,
+ message_id: &str,
+ current_base_key: &[u8],
+ ) -> wa_rs_core::store::error::Result {
+ let conn = self.conn.lock();
+ let result = conn.query_row(
+ "SELECT base_key FROM base_keys
+ WHERE address = ?1 AND message_id = ?2 AND device_id = ?3",
+ params![address, message_id, self.device_id],
+ |row| {
+ let saved_key: Vec = row.get(0)?;
+ Ok(saved_key == current_base_key)
+ },
+ );
+
+ match result {
+ Ok(same) => Ok(same),
+ Err(rusqlite::Error::QueryReturnedNoRows) => Ok(false),
+ Err(e) => Err(wa_rs_core::store::error::StoreError::Database(
+ e.to_string(),
+ )),
+ }
+ }
+
+ async fn delete_base_key(
+ &self,
+ address: &str,
+ message_id: &str,
+ ) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+ to_store_err!(execute: conn.execute(
+ "DELETE FROM base_keys WHERE address = ?1 AND message_id = ?2 AND device_id = ?3",
+ params![address, message_id, self.device_id],
+ ))
+ }
+
+ // --- Device Registry ---
+
+ async fn update_device_list(
+ &self,
+ record: DeviceListRecord,
+ ) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+ let devices_json = to_store_err!(serde_json::to_string(&record.devices))?;
+ let now = chrono::Utc::now().timestamp();
+
+ to_store_err!(execute: conn.execute(
+ "INSERT OR REPLACE INTO device_registry
+ (user_id, devices_json, timestamp, phash, device_id, updated_at)
+ VALUES (?1, ?2, ?3, ?4, ?5, ?6)",
+ params![
+ record.user,
+ devices_json,
+ record.timestamp,
+ record.phash,
+ self.device_id,
+ now,
+ ],
+ ))
+ }
+
+ async fn get_devices(
+ &self,
+ user: &str,
+ ) -> wa_rs_core::store::error::Result> {
+ let conn = self.conn.lock();
+ let result = conn.query_row(
+ "SELECT user_id, devices_json, timestamp, phash
+ FROM device_registry WHERE user_id = ?1 AND device_id = ?2",
+ params![user, self.device_id],
+ |row| {
+ // Helper to convert errors to rusqlite::Error
+ fn to_rusqlite_err(
+ e: E,
+ ) -> rusqlite::Error {
+ rusqlite::Error::ToSqlConversionFailure(Box::new(e))
+ }
+
+ let devices_json: String = row.get(1)?;
+ let devices: Vec =
+ serde_json::from_str(&devices_json).map_err(to_rusqlite_err)?;
+ Ok(DeviceListRecord {
+ user: row.get(0)?,
+ devices,
+ timestamp: row.get(2)?,
+ phash: row.get(3)?,
+ })
+ },
+ );
+
+ match result {
+ Ok(record) => Ok(Some(record)),
+ Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
+ Err(e) => Err(wa_rs_core::store::error::StoreError::Database(
+ e.to_string(),
+ )),
+ }
+ }
+
+ // --- Sender Key Status (Lazy Deletion) ---
+
+ async fn mark_forget_sender_key(
+ &self,
+ group_jid: &str,
+ participant: &str,
+ ) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+ let now = chrono::Utc::now().timestamp();
+
+ to_store_err!(execute: conn.execute(
+ "INSERT OR REPLACE INTO sender_key_status (group_jid, participant, device_id, marked_at)
+ VALUES (?1, ?2, ?3, ?4)",
+ params![group_jid, participant, self.device_id, now],
+ ))
+ }
+
+ async fn consume_forget_marks(
+ &self,
+ group_jid: &str,
+ ) -> wa_rs_core::store::error::Result> {
+ let conn = self.conn.lock();
+ let mut stmt = to_store_err!(conn.prepare(
+ "SELECT participant FROM sender_key_status
+ WHERE group_jid = ?1 AND device_id = ?2"
+ ))?;
+
+ let rows = to_store_err!(stmt.query_map(params![group_jid, self.device_id], |row| {
+ row.get::<_, String>(0)
+ }))?;
+
+ let mut result = Vec::new();
+ for row in rows {
+ result.push(to_store_err!(row)?);
+ }
+
+ // Delete the marks after consuming them
+ to_store_err!(execute: conn.execute(
+ "DELETE FROM sender_key_status WHERE group_jid = ?1 AND device_id = ?2",
+ params![group_jid, self.device_id],
+ ))?;
+
+ Ok(result)
+ }
+
+ // --- TcToken Storage ---
+
+ async fn get_tc_token(
+ &self,
+ jid: &str,
+ ) -> wa_rs_core::store::error::Result> {
+ let conn = self.conn.lock();
+ let result = conn.query_row(
+ "SELECT token, token_timestamp, sender_timestamp FROM tc_tokens
+ WHERE jid = ?1 AND device_id = ?2",
+ params![jid, self.device_id],
+ |row| {
+ Ok(TcTokenEntry {
+ token: row.get(0)?,
+ token_timestamp: row.get(1)?,
+ sender_timestamp: row.get(2)?,
+ })
+ },
+ );
+
+ match result {
+ Ok(entry) => Ok(Some(entry)),
+ Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
+ Err(e) => Err(wa_rs_core::store::error::StoreError::Database(
+ e.to_string(),
+ )),
+ }
+ }
+
+ async fn put_tc_token(
+ &self,
+ jid: &str,
+ entry: &TcTokenEntry,
+ ) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+ let now = chrono::Utc::now().timestamp();
+
+ to_store_err!(execute: conn.execute(
+ "INSERT OR REPLACE INTO tc_tokens
+ (jid, token, token_timestamp, sender_timestamp, device_id, updated_at)
+ VALUES (?1, ?2, ?3, ?4, ?5, ?6)",
+ params![
+ jid,
+ entry.token,
+ entry.token_timestamp,
+ entry.sender_timestamp,
+ self.device_id,
+ now,
+ ],
+ ))
+ }
+
+ async fn delete_tc_token(&self, jid: &str) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+ to_store_err!(execute: conn.execute(
+ "DELETE FROM tc_tokens WHERE jid = ?1 AND device_id = ?2",
+ params![jid, self.device_id],
+ ))
+ }
+
+ async fn get_all_tc_token_jids(&self) -> wa_rs_core::store::error::Result> {
+ let conn = self.conn.lock();
+ let mut stmt =
+ to_store_err!(conn.prepare("SELECT jid FROM tc_tokens WHERE device_id = ?1"))?;
+
+ let rows = to_store_err!(
+ stmt.query_map(params![self.device_id], |row| { row.get::<_, String>(0) })
+ )?;
+
+ let mut result = Vec::new();
+ for row in rows {
+ result.push(to_store_err!(row)?);
+ }
+
+ Ok(result)
+ }
+
+ async fn delete_expired_tc_tokens(
+ &self,
+ cutoff_timestamp: i64,
+ ) -> wa_rs_core::store::error::Result {
+ let conn = self.conn.lock();
+ let deleted = conn
+ .execute(
+ "DELETE FROM tc_tokens WHERE token_timestamp < ?1 AND device_id = ?2",
+ params![cutoff_timestamp, self.device_id],
+ )
+ .map_err(|e| wa_rs_core::store::error::StoreError::Database(e.to_string()))?;
+
+ let deleted = u32::try_from(deleted).map_err(|_| {
+ wa_rs_core::store::error::StoreError::Database(format!(
+ "Affected row count overflowed u32: {deleted}"
+ ))
+ })?;
+
+ Ok(deleted)
+ }
+}
+
+#[cfg(feature = "whatsapp-web")]
+#[async_trait]
+impl DeviceStoreTrait for RusqliteStore {
+ async fn save(&self, device: &CoreDevice) -> wa_rs_core::store::error::Result<()> {
+ let conn = self.conn.lock();
+
+ // Serialize KeyPairs to bytes
+ let noise_key = {
+ let mut bytes = Vec::new();
+ let priv_key = device.noise_key.private_key.serialize();
+ bytes.extend_from_slice(priv_key.as_slice());
+ bytes.extend_from_slice(device.noise_key.public_key.public_key_bytes());
+ bytes
+ };
+
+ let identity_key = {
+ let mut bytes = Vec::new();
+ let priv_key = device.identity_key.private_key.serialize();
+ bytes.extend_from_slice(priv_key.as_slice());
+ bytes.extend_from_slice(device.identity_key.public_key.public_key_bytes());
+ bytes
+ };
+
+ let signed_pre_key = {
+ let mut bytes = Vec::new();
+ let priv_key = device.signed_pre_key.private_key.serialize();
+ bytes.extend_from_slice(priv_key.as_slice());
+ bytes.extend_from_slice(device.signed_pre_key.public_key.public_key_bytes());
+ bytes
+ };
+
+ let account = device.account.as_ref().map(|a| a.encode_to_vec());
+
+ to_store_err!(execute: conn.execute(
+ "INSERT OR REPLACE INTO device (
+ id, lid, pn, registration_id, noise_key, identity_key,
+ signed_pre_key, signed_pre_key_id, signed_pre_key_signature,
+ adv_secret_key, account, push_name, app_version_primary,
+ app_version_secondary, app_version_tertiary, app_version_last_fetched_ms,
+ edge_routing_info, props_hash
+ ) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, ?16, ?17, ?18)",
+ params![
+ self.device_id,
+ device.lid.as_ref().map(|j| j.to_string()),
+ device.pn.as_ref().map(|j| j.to_string()),
+ device.registration_id,
+ noise_key,
+ identity_key,
+ signed_pre_key,
+ device.signed_pre_key_id,
+ device.signed_pre_key_signature.to_vec(),
+ device.adv_secret_key.to_vec(),
+ account,
+ &device.push_name,
+ device.app_version_primary,
+ device.app_version_secondary,
+ device.app_version_tertiary,
+ device.app_version_last_fetched_ms,
+ device.edge_routing_info.as_ref().map(|v| v.clone()),
+ device.props_hash.as_ref().map(|v| v.clone()),
+ ],
+ ))
+ }
+
+ async fn load(&self) -> wa_rs_core::store::error::Result> {
+ let conn = self.conn.lock();
+ let result = conn.query_row(
+ "SELECT * FROM device WHERE id = ?1",
+ params![self.device_id],
+ |row| {
+ // Helper to convert errors to rusqlite::Error
+ fn to_rusqlite_err(
+ e: E,
+ ) -> rusqlite::Error {
+ rusqlite::Error::ToSqlConversionFailure(Box::new(e))
+ }
+
+ // Deserialize KeyPairs from bytes (64 bytes each)
+ let noise_key_bytes: Vec = row.get("noise_key")?;
+ let identity_key_bytes: Vec = row.get("identity_key")?;
+ let signed_pre_key_bytes: Vec = row.get("signed_pre_key")?;
+
+ if noise_key_bytes.len() != 64
+ || identity_key_bytes.len() != 64
+ || signed_pre_key_bytes.len() != 64
+ {
+ return Err(rusqlite::Error::InvalidParameterName("key_pair".into()));
+ }
+
+ use wa_rs_core::libsignal::protocol::{KeyPair, PrivateKey, PublicKey};
+
+ let noise_key = KeyPair::new(
+ PublicKey::from_djb_public_key_bytes(&noise_key_bytes[32..64])
+ .map_err(to_rusqlite_err)?,
+ PrivateKey::deserialize(&noise_key_bytes[0..32]).map_err(to_rusqlite_err)?,
+ );
+
+ let identity_key = KeyPair::new(
+ PublicKey::from_djb_public_key_bytes(&identity_key_bytes[32..64])
+ .map_err(to_rusqlite_err)?,
+ PrivateKey::deserialize(&identity_key_bytes[0..32]).map_err(to_rusqlite_err)?,
+ );
+
+ let signed_pre_key = KeyPair::new(
+ PublicKey::from_djb_public_key_bytes(&signed_pre_key_bytes[32..64])
+ .map_err(to_rusqlite_err)?,
+ PrivateKey::deserialize(&signed_pre_key_bytes[0..32])
+ .map_err(to_rusqlite_err)?,
+ );
+
+ let lid_str: Option = row.get("lid")?;
+ let pn_str: Option = row.get("pn")?;
+ let signature_bytes: Vec = row.get("signed_pre_key_signature")?;
+ let adv_secret_bytes: Vec = row.get("adv_secret_key")?;
+ let account_bytes: Option> = row.get("account")?;
+
+ let mut signature = [0u8; 64];
+ let mut adv_secret = [0u8; 32];
+ signature.copy_from_slice(&signature_bytes);
+ adv_secret.copy_from_slice(&adv_secret_bytes);
+
+ let account = if let Some(bytes) = account_bytes {
+ Some(
+ wa_rs_proto::whatsapp::AdvSignedDeviceIdentity::decode(&*bytes)
+ .map_err(to_rusqlite_err)?,
+ )
+ } else {
+ None
+ };
+
+ Ok(CoreDevice {
+ lid: lid_str.and_then(|s| s.parse().ok()),
+ pn: pn_str.and_then(|s| s.parse().ok()),
+ registration_id: row.get("registration_id")?,
+ noise_key,
+ identity_key,
+ signed_pre_key,
+ signed_pre_key_id: row.get("signed_pre_key_id")?,
+ signed_pre_key_signature: signature,
+ adv_secret_key: adv_secret,
+ account,
+ push_name: row.get("push_name")?,
+ app_version_primary: row.get("app_version_primary")?,
+ app_version_secondary: row.get("app_version_secondary")?,
+ app_version_tertiary: row.get("app_version_tertiary")?,
+ app_version_last_fetched_ms: row.get("app_version_last_fetched_ms")?,
+ edge_routing_info: row.get("edge_routing_info")?,
+ props_hash: row.get("props_hash")?,
+ ..Default::default()
+ })
+ },
+ );
+
+ match result {
+ Ok(device) => Ok(Some(device)),
+ Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
+ Err(e) => Err(wa_rs_core::store::error::StoreError::Database(
+ e.to_string(),
+ )),
+ }
+ }
+
+ async fn exists(&self) -> wa_rs_core::store::error::Result