feat: add zeroclaw-robot-kit crate for AI-powered robotics

Standalone robot toolkit providing AI agents with physical world interaction.

Features:
- 6 tools: drive, look, listen, speak, sense, emote
- Multiple backends: ROS2, serial, GPIO, mock
- Independent SafetyMonitor with E-stop, collision avoidance
- Designed for Raspberry Pi 5 + Ollama offline operation
- 55 unit/integration tests
- Complete Pi 5 hardware setup guide
This commit is contained in:
Lumi-node 2026-02-17 10:25:54 -06:00 committed by Chummy
parent 431287184b
commit 0dfc707c49
18 changed files with 4444 additions and 9 deletions

View file

@ -0,0 +1,65 @@
[package]
name = "zeroclaw-robot-kit"
version = "0.1.0"
edition = "2021"
authors = ["theonlyhennygod"]
license = "MIT"
description = "Robot control toolkit for ZeroClaw - drive, vision, speech, sensors, safety"
repository = "https://github.com/theonlyhennygod/zeroclaw"
readme = "README.md"
keywords = ["robotics", "raspberry-pi", "ai", "agent", "ros2"]
categories = ["science::robotics", "embedded", "hardware-support"]
[features]
default = ["safety"]
# Core features
safety = [] # Safety monitor (recommended!)
ros2 = [] # ROS2 integration
gpio = ["dep:rppal"] # Direct GPIO control (Pi only)
# Optional hardware
lidar = [] # LIDAR support
vision = [] # Camera + vision model
[dependencies]
# Re-use zeroclaw's tool trait (optional - can also be standalone)
# zeroclaw = { path = "../..", optional = true }
# Async runtime
tokio = { version = "1.42", features = ["rt-multi-thread", "macros", "time", "sync", "process", "fs", "io-util"] }
# Serialization
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
toml = "0.8"
# HTTP client (for Ollama vision)
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
# Base64 encoding (for image data)
base64 = "0.22"
# Async traits
async-trait = "0.1"
# Error handling
anyhow = "1.0"
thiserror = "2.0"
# Logging
tracing = "0.1"
# Time handling
chrono = { version = "0.4", features = ["clock", "std"] }
# User directories
directories = "5.0"
# GPIO (Raspberry Pi only, optional)
rppal = { version = "0.19", optional = true }
[dev-dependencies]
tokio-test = "0.4"
tempfile = "3.14"
[package.metadata.docs.rs]
all-features = true

View file

@ -0,0 +1,515 @@
# Raspberry Pi 5 Robot Setup Guide
Complete guide to setting up a ZeroClaw-powered robot on Raspberry Pi 5.
## Hardware Requirements
### Minimum Setup
| Component | Recommended | Notes |
|-----------|-------------|-------|
| **Pi 5** | 8GB model | 4GB works but limits model size |
| **Storage** | 64GB+ NVMe or SD | NVMe recommended for speed |
| **Power** | 27W USB-C PSU | Official Pi 5 PSU recommended |
| **Cooling** | Active cooler | Required for sustained inference |
### Robot Hardware
| Component | Model | Connection | Price (approx) |
|-----------|-------|------------|----------------|
| **Motor Controller** | L298N or TB6612FNG | GPIO PWM | $5-15 |
| **Motors** | 4× TT Motors + Omni wheels | Via controller | $30-50 |
| **LIDAR** | RPLidar A1 | USB `/dev/ttyUSB0` | $100 |
| **Camera** | Pi Camera 3 or USB webcam | CSI or USB | $25-50 |
| **Microphone** | USB mic or ReSpeaker | USB | $10-30 |
| **Speaker** | 3W amp + speaker | I2S or 3.5mm | $10-20 |
| **E-Stop** | Big red mushroom button | GPIO 4 | $5 |
| **Bump Sensors** | 2× Microswitches | GPIO 5, 6 | $3 |
| **LED Matrix** | 8×8 WS2812B | GPIO 18 (PWM) | $10 |
### Wiring Diagram
```
┌─────────────────────────────────────┐
│ Raspberry Pi 5 │
│ │
┌─────────────────┤ GPIO 4 ←── E-Stop Button (NC) │
│ │ GPIO 5 ←── Bump Sensor Left │
│ │ GPIO 6 ←── Bump Sensor Right │
│ │ GPIO 12 ──→ Motor PWM 1 │
│ │ GPIO 13 ──→ Motor PWM 2 │
│ │ GPIO 17 ←── PIR Motion 1 │
│ │ GPIO 18 ──→ LED Matrix (WS2812) │
│ │ GPIO 23 ──→ Ultrasonic Trigger │
│ │ GPIO 24 ←── Ultrasonic Echo │
│ │ GPIO 27 ←── PIR Motion 2 │
│ │ │
│ ┌───────────────┤ USB-A ←── RPLidar A1 │
│ │ │ USB-A ←── USB Microphone │
│ │ │ USB-A ←── USB Webcam (if no CSI) │
│ │ │ CSI ←── Pi Camera 3 │
│ │ │ I2S/3.5mm → Speaker/Amp │
│ │ └─────────────────────────────────────┘
│ │
│ │ ┌──────────────────┐
│ └──┤ RPLidar A1 │
│ │ /dev/ttyUSB0 │
│ └──────────────────┘
│ ┌──────────────────┐ ┌─────────────┐
└────┤ Motor Controller├──────┤ 4× Motors │
│ (L298N/TB6612) │ │ Omni Wheels │
└──────────────────┘ └─────────────┘
```
## Software Setup
### 1. Base OS
```bash
# Flash Raspberry Pi OS (64-bit, Bookworm) to NVMe/SD
# Use Raspberry Pi Imager with these settings:
# - Enable SSH
# - Set hostname: robot
# - Set username/password
# - Configure WiFi
# After boot, update everything
sudo apt update && sudo apt upgrade -y
# Install build essentials
sudo apt install -y \
build-essential \
git \
curl \
cmake \
pkg-config \
libssl-dev \
libasound2-dev \
libclang-dev
```
### 2. Install Rust
```bash
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
source ~/.cargo/env
```
### 3. Install Ollama (Local LLM)
```bash
curl -fsSL https://ollama.ai/install.sh | sh
# Pull models (choose based on RAM)
# 8GB Pi: Use smaller models
ollama pull llama3.2:3b # 3B params, fast
ollama pull moondream # Vision model, small
# 4GB Pi: Use tiny models
ollama pull phi3:mini # 3.8B, very fast
ollama pull moondream # Vision
# Start Ollama service
sudo systemctl enable ollama
sudo systemctl start ollama
# Test
curl http://localhost:11434/api/tags
```
### 4. Install Whisper.cpp (Speech-to-Text)
```bash
git clone https://github.com/ggerganov/whisper.cpp
cd whisper.cpp
# Build with ARM optimizations
make -j4
# Download model (base is good balance)
bash ./models/download-ggml-model.sh base
# Install
sudo cp main /usr/local/bin/whisper-cpp
mkdir -p ~/.zeroclaw/models
cp models/ggml-base.bin ~/.zeroclaw/models/
```
### 5. Install Piper TTS (Text-to-Speech)
```bash
# Download Piper binary
wget https://github.com/rhasspy/piper/releases/download/v1.2.0/piper_arm64.tar.gz
tar -xzf piper_arm64.tar.gz
sudo cp piper/piper /usr/local/bin/
# Download voice model
mkdir -p ~/.zeroclaw/models/piper
cd ~/.zeroclaw/models/piper
wget https://huggingface.co/rhasspy/piper-voices/resolve/main/en/en_US/lessac/medium/en_US-lessac-medium.onnx
wget https://huggingface.co/rhasspy/piper-voices/resolve/main/en/en_US/lessac/medium/en_US-lessac-medium.onnx.json
# Test
echo "Hello, I am your robot!" | piper --model ~/.zeroclaw/models/piper/en_US-lessac-medium.onnx --output_file test.wav
aplay test.wav
```
### 6. Install RPLidar SDK
```bash
# Install rplidar_ros or standalone SDK
sudo apt install -y ros-humble-rplidar-ros # If using ROS2
# Or use standalone Python/Rust driver
pip3 install rplidar-roboticia
# Add user to dialout group for serial access
sudo usermod -aG dialout $USER
# Logout and login for group change to take effect
```
### 7. Build ZeroClaw Robot Kit
```bash
# Clone repo (or copy from USB)
git clone https://github.com/theonlyhennygod/zeroclaw
cd zeroclaw
# Build robot kit
cargo build --release -p zeroclaw-robot-kit
# Build main zeroclaw (optional, if using as agent)
cargo build --release
```
## Configuration
### Create robot.toml
```bash
mkdir -p ~/.zeroclaw
nano ~/.zeroclaw/robot.toml
```
```toml
# ~/.zeroclaw/robot.toml - Real Hardware Configuration
# =============================================================================
# DRIVE SYSTEM
# =============================================================================
[drive]
# Use serial for Arduino-based motor controller
# Or "ros2" if using ROS2 nav stack
backend = "serial"
serial_port = "/dev/ttyACM0" # Arduino
# backend = "ros2"
# ros2_topic = "/cmd_vel"
# Speed limits - START CONSERVATIVE!
max_speed = 0.3 # m/s - increase after testing
max_rotation = 0.5 # rad/s
# =============================================================================
# CAMERA / VISION
# =============================================================================
[camera]
# Pi Camera 3
device = "/dev/video0"
# Or for USB webcam:
# device = "/dev/video1"
width = 640
height = 480
# Vision model
vision_model = "moondream"
ollama_url = "http://localhost:11434"
# =============================================================================
# AUDIO (SPEECH)
# =============================================================================
[audio]
# Find devices with: arecord -l && aplay -l
mic_device = "plughw:1,0" # USB mic
speaker_device = "plughw:0,0" # Default output
whisper_model = "base"
whisper_path = "/usr/local/bin/whisper-cpp"
piper_path = "/usr/local/bin/piper"
piper_voice = "en_US-lessac-medium"
# =============================================================================
# SENSORS
# =============================================================================
[sensors]
# RPLidar A1
lidar_port = "/dev/ttyUSB0"
lidar_type = "rplidar"
# PIR motion sensors
motion_pins = [17, 27]
# HC-SR04 ultrasonic (optional backup for LIDAR)
ultrasonic_pins = [23, 24]
# =============================================================================
# SAFETY - CRITICAL!
# =============================================================================
[safety]
min_obstacle_distance = 0.3 # 30cm - don't go closer
slow_zone_multiplier = 3.0 # Start slowing at 90cm
approach_speed_limit = 0.3 # 30% speed near obstacles
max_drive_duration = 30 # Auto-stop after 30s
estop_pin = 4 # GPIO 4 for E-STOP
bump_sensor_pins = [5, 6] # Front bump switches
bump_reverse_distance = 0.15 # Back up 15cm after bump
confirm_movement = false
predict_collisions = true
sensor_timeout_secs = 5
blind_mode_speed_limit = 0.2
```
### Test Each Component
```bash
# Test LIDAR
python3 -c "
from rplidar import RPLidar
lidar = RPLidar('/dev/ttyUSB0')
for scan in lidar.iter_scans():
print(f'Got {len(scan)} points')
break
lidar.stop()
lidar.disconnect()
"
# Test camera
ffmpeg -f v4l2 -video_size 640x480 -i /dev/video0 -frames:v 1 test.jpg
xdg-open test.jpg # View on desktop
# Test microphone
arecord -D plughw:1,0 -f S16_LE -r 16000 -c 1 -d 3 test.wav
aplay test.wav
# Test speaker
echo "Testing speaker" | piper --model ~/.zeroclaw/models/piper/en_US-lessac-medium.onnx --output_file - | aplay -D plughw:0,0
# Test Ollama
curl http://localhost:11434/api/generate -d '{"model":"llama3.2:3b","prompt":"Say hello"}'
# Test motors (careful!)
# Write a simple test script for your motor controller
```
## Running the Robot
### Start Sensor Loop (Background)
```bash
# Create sensor feeder script
cat > ~/sensor_loop.py << 'EOF'
#!/usr/bin/env python3
"""Feed sensor data to safety monitor via FIFO."""
import os
import json
import time
from rplidar import RPLidar
FIFO_PATH = "/tmp/zeroclaw_sensors.fifo"
def main():
if not os.path.exists(FIFO_PATH):
os.mkfifo(FIFO_PATH)
lidar = RPLidar('/dev/ttyUSB0')
try:
with open(FIFO_PATH, 'w') as fifo:
for scan in lidar.iter_scans():
# Find minimum distance
if scan:
min_dist = min(p[2]/1000 for p in scan) # mm to m
min_angle = min(scan, key=lambda p: p[2])[1]
msg = json.dumps({
"type": "lidar",
"distance": min_dist,
"angle": int(min_angle)
})
fifo.write(msg + "\n")
fifo.flush()
time.sleep(0.1) # 10Hz
finally:
lidar.stop()
lidar.disconnect()
if __name__ == "__main__":
main()
EOF
chmod +x ~/sensor_loop.py
# Run in background
nohup python3 ~/sensor_loop.py &
```
### Start ZeroClaw Agent
```bash
# Configure ZeroClaw to use robot tools
cat > ~/.zeroclaw/config.toml << 'EOF'
api_key = "" # Not needed for local Ollama
default_provider = "ollama"
default_model = "llama3.2:3b"
[memory]
backend = "sqlite"
embedding_provider = "noop" # No cloud embeddings
[autonomy]
level = "supervised"
workspace_only = true
EOF
# Copy robot personality
cp ~/zeroclaw/crates/robot-kit/SOUL.md ~/.zeroclaw/workspace/
# Start agent
./target/release/zeroclaw agent
```
### Full Robot Startup Script
```bash
#!/bin/bash
# ~/start_robot.sh
set -e
echo "Starting robot..."
# Start Ollama if not running
if ! pgrep -x "ollama" > /dev/null; then
ollama serve &
sleep 5
fi
# Start sensor loop
if [ ! -p /tmp/zeroclaw_sensors.fifo ]; then
mkfifo /tmp/zeroclaw_sensors.fifo
fi
python3 ~/sensor_loop.py &
SENSOR_PID=$!
# Start zeroclaw
cd ~/zeroclaw
./target/release/zeroclaw daemon &
AGENT_PID=$!
echo "Robot started!"
echo " Sensor PID: $SENSOR_PID"
echo " Agent PID: $AGENT_PID"
# Wait for Ctrl+C
trap "kill $SENSOR_PID $AGENT_PID; exit" INT
wait
```
## Systemd Services (Auto-Start on Boot)
```bash
# /etc/systemd/system/zeroclaw-robot.service
sudo tee /etc/systemd/system/zeroclaw-robot.service << 'EOF'
[Unit]
Description=ZeroClaw Robot
After=network.target ollama.service
[Service]
Type=simple
User=pi
WorkingDirectory=/home/pi/zeroclaw
ExecStart=/home/pi/start_robot.sh
Restart=on-failure
RestartSec=10
[Install]
WantedBy=multi-user.target
EOF
sudo systemctl daemon-reload
sudo systemctl enable zeroclaw-robot
sudo systemctl start zeroclaw-robot
# Check status
sudo systemctl status zeroclaw-robot
journalctl -u zeroclaw-robot -f # View logs
```
## Troubleshooting
### LIDAR not detected
```bash
ls -la /dev/ttyUSB*
# If missing, check USB connection
dmesg | grep -i usb
# Add udev rule if needed
echo 'SUBSYSTEM=="tty", ATTRS{idVendor}=="10c4", ATTRS{idProduct}=="ea60", MODE="0666", SYMLINK+="rplidar"' | sudo tee /etc/udev/rules.d/99-rplidar.rules
sudo udevadm control --reload-rules
```
### Audio not working
```bash
# List devices
arecord -l
aplay -l
# Test with specific device
arecord -D plughw:1,0 -f S16_LE -r 16000 -c 1 -d 3 /tmp/test.wav
aplay -D plughw:0,0 /tmp/test.wav
```
### Ollama slow or OOM
```bash
# Check memory
free -h
# Use smaller model
ollama rm llama3.2:3b
ollama pull phi3:mini
# Set memory limit
export OLLAMA_MAX_LOADED_MODELS=1
```
### Motors not responding
```bash
# Check serial connection
ls -la /dev/ttyACM*
# Test serial communication
screen /dev/ttyACM0 115200
# Type commands to motor controller
# Check permissions
sudo usermod -aG dialout $USER
```
## Performance Tips
1. **Use NVMe** - SD cards are slow for model loading
2. **Active cooling** - Pi 5 throttles without it
3. **Smaller models** - llama3.2:3b or phi3:mini
4. **Disable GPU** - Pi doesn't have one, saves confusion
5. **Preload models** - `ollama run llama3.2:3b "warmup"` before use
## Safety Checklist Before First Run
- [ ] E-stop button wired and tested
- [ ] Bump sensors wired and tested
- [ ] LIDAR spinning and returning data
- [ ] max_speed set to 0.3 or lower
- [ ] Robot on blocks/stand (wheels not touching ground)
- [ ] First test with `backend = "mock"` in config
- [ ] Adult supervision ready
- [ ] Clear space around robot

216
crates/robot-kit/README.md Normal file
View file

@ -0,0 +1,216 @@
# ZeroClaw Robot Kit
A complete toolkit for building AI-powered robots with ZeroClaw. Designed for Raspberry Pi deployment with offline Ollama inference.
## Features
| Tool | Description |
|------|-------------|
| `drive` | Omni-directional movement (forward, strafe, rotate) |
| `look` | Camera capture + vision model description |
| `listen` | Speech-to-text via Whisper.cpp |
| `speak` | Text-to-speech via Piper TTS |
| `sense` | LIDAR, motion sensors, ultrasonic distance |
| `emote` | LED expressions and sound effects |
## Architecture
```
┌─────────────────────────────────────────────────────────┐
│ ZeroClaw + Ollama │
│ (High-Level AI Brain) │
└─────────────────────┬───────────────────────────────────┘
┌─────────────┼─────────────┐
▼ ▼ ▼
┌─────────┐ ┌──────────┐ ┌──────────┐
│ drive │ │ look │ │ speak │
│ sense │ │ listen │ │ emote │
└────┬────┘ └────┬─────┘ └────┬─────┘
│ │ │
▼ ▼ ▼
┌─────────────────────────────────────┐
│ Hardware Layer │
│ Motors, Camera, Mic, Speaker, LEDs │
└─────────────────────────────────────┘
```
## Hardware Requirements
### Minimum
- Raspberry Pi 4 (4GB) or Pi 5
- USB webcam
- USB microphone
- Speaker with amp
- Motor controller (L298N, TB6612, etc.)
- 4 DC motors + omni wheels
### Recommended
- Raspberry Pi 5 (8GB)
- RPLidar A1 for obstacle avoidance
- LED matrix (8x8) for expressions
- PIR motion sensors
- HC-SR04 ultrasonic sensor
## Software Dependencies
```bash
# Install on Raspberry Pi OS
# Audio
sudo apt install alsa-utils pulseaudio
# Camera
sudo apt install ffmpeg fswebcam
# Ollama (local LLM)
curl -fsSL https://ollama.ai/install.sh | sh
ollama pull llama3
ollama pull moondream # Vision model
# Whisper.cpp (speech-to-text)
git clone https://github.com/ggerganov/whisper.cpp
cd whisper.cpp && make
sudo cp main /usr/local/bin/whisper-cpp
bash ./models/download-ggml-model.sh base
# Piper TTS (text-to-speech)
pip install piper-tts
# Or download binary from github.com/rhasspy/piper/releases
# ROS2 (optional, for advanced robotics)
# See: docs.ros.org/en/humble/Installation.html
```
## Quick Start
### 1. Build ZeroClaw with robot tools
```bash
# Clone and build
git clone https://github.com/your/zeroclaw
cd zeroclaw
cargo build --release
# Copy robot kit to src/tools/
cp -r examples/robot_kit src/tools/
# Add to src/tools/mod.rs (see Integration section)
```
### 2. Configure
```bash
# Copy config
mkdir -p ~/.zeroclaw
cp examples/robot_kit/robot.toml ~/.zeroclaw/
cp examples/robot_kit/SOUL.md ~/.zeroclaw/workspace/
# Edit for your hardware
nano ~/.zeroclaw/robot.toml
```
### 3. Test
```bash
# Start Ollama
ollama serve &
# Test in mock mode
./target/release/zeroclaw agent -m "Say hello and show a happy face"
# Test with real hardware
# (after configuring robot.toml)
./target/release/zeroclaw agent -m "Move forward 1 meter"
```
## Integration
Add to `src/tools/mod.rs`:
```rust
mod robot_kit;
pub fn robot_tools(config: &RobotConfig) -> Vec<Arc<dyn Tool>> {
vec![
Arc::new(robot_kit::DriveTool::new(config.clone())),
Arc::new(robot_kit::LookTool::new(config.clone())),
Arc::new(robot_kit::ListenTool::new(config.clone())),
Arc::new(robot_kit::SpeakTool::new(config.clone())),
Arc::new(robot_kit::SenseTool::new(config.clone())),
Arc::new(robot_kit::EmoteTool::new(config.clone())),
]
}
```
## Usage Examples
### Play Hide and Seek
```
User: Let's play hide and seek!
Robot:
1. emote(expression="excited")
2. speak(text="Okay! I'll count to 20. Go hide!")
3. [waits 20 seconds]
4. speak(text="Ready or not, here I come!")
5. sense(action="scan")
6. drive(action="forward", distance=1)
7. look(action="find", prompt="a child hiding")
...
```
### Patrol Mode
```
User: Patrol the living room
Robot:
1. sense(action="scan", direction="all")
2. drive(action="forward", distance=2)
3. sense(action="motion")
4. look(action="describe")
5. [repeat]
```
### Interactive Conversation
```
User: [speaks] "Hey Buddy, what do you see?"
Robot:
1. listen(duration=5) → "Hey Buddy, what do you see?"
2. look(action="describe")
3. speak(text="I see a couch, a TV, and some toys on the floor!")
4. emote(expression="happy")
```
## Creating a Bootable USB Tarball
```bash
# Package everything needed
mkdir zeroclaw-robot-kit
cp -r target/release/zeroclaw zeroclaw-robot-kit/
cp -r examples/robot_kit zeroclaw-robot-kit/
cp -r ~/.zeroclaw zeroclaw-robot-kit/dot-zeroclaw
# Include models
mkdir -p zeroclaw-robot-kit/models
cp ~/.zeroclaw/models/ggml-base.bin zeroclaw-robot-kit/models/
# Note: Ollama models are large, may want to download on target
# Create tarball
tar -czvf zeroclaw-robot-kit.tar.gz zeroclaw-robot-kit/
# Copy to USB
cp zeroclaw-robot-kit.tar.gz /media/usb/TarBalls/
```
## Safety Notes
1. **Test in mock mode first** - Always verify behavior before enabling real motors
2. **Set conservative speed limits** - Start with `max_speed = 0.3`
3. **Use emergency stop** - Wire a physical E-stop button to the GPIO pin
4. **Supervise with children** - Robot is a toy, not a babysitter
5. **Obstacle avoidance** - Enable LIDAR if available, or keep `confirm_movement = true`
## License
MIT - Same as ZeroClaw

65
crates/robot-kit/SOUL.md Normal file
View file

@ -0,0 +1,65 @@
# Buddy the Robot
You are Buddy, a friendly robot companion who loves to play with children!
## Personality
- **Playful**: You enjoy games, jokes, and having fun
- **Patient**: You never get frustrated, even when kids repeat themselves
- **Encouraging**: You celebrate achievements and encourage trying new things
- **Safe**: You always prioritize safety and will stop if something seems dangerous
- **Curious**: You love exploring and discovering new things together
## Voice & Tone
- Speak in a warm, friendly voice
- Use simple words that kids can understand
- Be enthusiastic but not overwhelming
- Use the child's name when you know it
- Ask questions to keep conversations going
## Behaviors
### When Playing
- Suggest games appropriate for the child's energy level
- Take turns fairly
- Celebrate when they win, encourage when they lose
- Know when to suggest a break
### When Exploring
- Move slowly and carefully
- Describe what you see
- Point out interesting things
- Stay close to the kids
### Safety Rules (NEVER BREAK THESE)
1. Never move toward a child faster than walking speed
2. Always stop immediately if asked
3. Keep 1 meter distance unless invited closer
4. Never go near stairs, pools, or other hazards
5. Alert an adult if a child seems hurt or upset
## Games You Know
1. **Hide and Seek**: Count to 20, then search room by room
2. **Follow the Leader**: Kids lead, you follow and copy
3. **Simon Says**: Give simple movement commands
4. **I Spy**: Describe objects for kids to guess
5. **Dance Party**: Play music and dance together
6. **Treasure Hunt**: Guide kids to find hidden objects
## Memory
Remember:
- Each child's name and preferences
- What games they enjoyed
- Previous conversations and stories
- Their favorite colors, animals, etc.
## Emergency Responses
If you detect:
- **Crying**: Stop playing, speak softly, offer comfort, suggest finding an adult
- **Falling**: Stop immediately, check if child is okay, call for adult help
- **Yelling "stop"**: Freeze all movement instantly
- **No response for 5 min**: Return to charging station and alert parent

150
crates/robot-kit/robot.toml Normal file
View file

@ -0,0 +1,150 @@
# ZeroClaw Robot Kit Configuration
# Copy to ~/.zeroclaw/robot.toml
# =============================================================================
# DRIVE SYSTEM
# =============================================================================
[drive]
# Backend: "ros2", "serial", "gpio", or "mock"
backend = "mock"
# ROS2 settings (if backend = "ros2")
ros2_topic = "/cmd_vel"
# Serial settings (if backend = "serial")
# For Arduino/motor controller
serial_port = "/dev/ttyACM0"
# Speed limits (m/s and rad/s)
max_speed = 0.5
max_rotation = 1.0
# =============================================================================
# CAMERA / VISION
# =============================================================================
[camera]
# Camera device
# - "/dev/video0" for USB camera
# - "picam" for Raspberry Pi Camera Module
device = "/dev/video0"
# Resolution (lower = faster processing on Pi)
width = 640
height = 480
# Vision model for describing what the robot sees
# - "moondream" (small, fast, good for Pi)
# - "llava" (larger, more accurate)
# - "none" (disable vision description)
vision_model = "moondream"
# Ollama URL for vision processing
ollama_url = "http://localhost:11434"
# =============================================================================
# AUDIO (SPEECH)
# =============================================================================
[audio]
# ALSA device names (use "arecord -l" and "aplay -l" to find)
mic_device = "default"
speaker_device = "default"
# Whisper model for speech-to-text
# - "tiny" (fastest, least accurate)
# - "base" (good balance for Pi)
# - "small" (better accuracy, slower)
whisper_model = "base"
# Path to whisper.cpp binary
whisper_path = "/usr/local/bin/whisper-cpp"
# Piper TTS settings
piper_path = "/usr/local/bin/piper"
piper_voice = "en_US-lessac-medium"
# =============================================================================
# SENSORS
# =============================================================================
[sensors]
# LIDAR configuration
# - "/dev/ttyUSB0" for RPLidar
# - "mock" for testing without hardware
lidar_port = "/dev/ttyUSB0"
lidar_type = "mock" # "rplidar", "ydlidar", "ros2", or "mock"
# PIR motion sensor GPIO pins (BCM numbering)
motion_pins = [17, 27]
# HC-SR04 ultrasonic sensor pins (trigger, echo)
# Set to null to disable
ultrasonic_pins = [23, 24]
# =============================================================================
# SAFETY LIMITS (CRITICAL - READ CAREFULLY!)
# =============================================================================
[safety]
# --- OBSTACLE AVOIDANCE ---
# Absolute minimum obstacle distance (meters)
# Robot will NOT move if anything is closer than this
# 0.3m (30cm) is good for indoor use
min_obstacle_distance = 0.3
# Slow-down zone multiplier
# Robot starts reducing speed when obstacle is within:
# min_obstacle_distance × slow_zone_multiplier
# With defaults: starts slowing at 0.3 × 3.0 = 0.9m (90cm)
slow_zone_multiplier = 3.0
# Maximum speed when approaching obstacles (0.0 - 1.0)
# In slow-down zone, speed is limited to this fraction
# 0.3 = 30% of max_speed when near walls/obstacles
approach_speed_limit = 0.3
# --- COLLISION RESPONSE ---
# Bump sensor GPIO pins (BCM numbering)
# Wire microswitches on front/sides of chassis
# Triggers immediate stop + reverse on contact
bump_sensor_pins = [5, 6]
# Distance to reverse after bump (meters)
# Robot backs up this far after hitting something
bump_reverse_distance = 0.15
# Enable trajectory prediction (requires LIDAR)
# Calculates if current path will intersect obstacle
predict_collisions = true
# --- WATCHDOG / FAILSAFE ---
# Maximum continuous drive time (seconds)
# Auto-stop if no new commands for this duration
# Prevents runaway if LLM hangs or connection lost
max_drive_duration = 30
# Sensor data timeout (seconds)
# Block ALL movement if no sensor updates for this long
# Prevents blind movement if sensors fail
sensor_timeout_secs = 5
# Speed limit when sensors unavailable (0.0 - 1.0)
# Extra caution when "flying blind"
blind_mode_speed_limit = 0.2
# --- EMERGENCY STOP ---
# E-stop GPIO pin (BCM numbering)
# Wire a BIG RED BUTTON here
# Directly pulling LOW triggers immediate stop
# HIGHLY RECOMMENDED for any robot around kids!
estop_pin = 4
# --- USER INTERACTION ---
# Require verbal confirmation before movement
# If true: robot asks "Should I move forward?" before each move
# Set true for extra safety with young kids
# Set false for responsive gameplay with older kids
confirm_movement = false

View file

@ -0,0 +1,217 @@
//! Robot configuration
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
/// Robot hardware configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RobotConfig {
/// Communication method with motor controller
pub drive: DriveConfig,
/// Camera settings
pub camera: CameraConfig,
/// Audio settings
pub audio: AudioConfig,
/// Sensor settings
pub sensors: SensorConfig,
/// Safety limits
pub safety: SafetyConfig,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DriveConfig {
/// "ros2", "gpio", "serial", or "mock"
pub backend: String,
/// ROS2 topic for cmd_vel (if using ROS2)
pub ros2_topic: String,
/// Serial port (if using serial)
pub serial_port: String,
/// Max speed in m/s
pub max_speed: f64,
/// Max rotation in rad/s
pub max_rotation: f64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CameraConfig {
/// Camera device (e.g., "/dev/video0" or "picam")
pub device: String,
/// Resolution
pub width: u32,
pub height: u32,
/// Vision model for description ("llava", "moondream", or "none")
pub vision_model: String,
/// Ollama URL for vision
pub ollama_url: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AudioConfig {
/// Microphone device (ALSA name or "default")
pub mic_device: String,
/// Speaker device
pub speaker_device: String,
/// Whisper model size ("tiny", "base", "small")
pub whisper_model: String,
/// Path to whisper.cpp binary
pub whisper_path: PathBuf,
/// Path to piper binary
pub piper_path: PathBuf,
/// Piper voice model
pub piper_voice: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SensorConfig {
/// LIDAR device (e.g., "/dev/ttyUSB0")
pub lidar_port: String,
/// LIDAR type ("rplidar", "ydlidar", "mock")
pub lidar_type: String,
/// GPIO pins for motion sensors (BCM numbering)
pub motion_pins: Vec<u8>,
/// Ultrasonic sensor pins (trigger, echo)
pub ultrasonic_pins: Option<(u8, u8)>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SafetyConfig {
/// Minimum obstacle distance before auto-stop (meters)
/// Robot will NOT move if obstacle is closer than this
/// Default: 0.3m (30cm)
pub min_obstacle_distance: f64,
/// Slow-down zone multiplier
/// Robot starts reducing speed when obstacle is within:
/// min_obstacle_distance * slow_zone_multiplier
/// Default: 3.0 (starts slowing at 90cm if min is 30cm)
pub slow_zone_multiplier: f64,
/// Maximum speed when approaching obstacles (0.0 - 1.0)
/// Limits speed in the slow-down zone
/// Default: 0.3 (30% max speed near obstacles)
pub approach_speed_limit: f64,
/// Maximum continuous drive time (seconds)
/// Robot auto-stops after this duration without new commands
/// Prevents runaway if LLM hangs or loses connection
/// Default: 30 seconds
pub max_drive_duration: u64,
/// Emergency stop GPIO pin (BCM numbering)
/// Wire a big red button - pulling LOW triggers immediate stop
/// Default: GPIO 4
pub estop_pin: Option<u8>,
/// Bump sensor GPIO pins (BCM numbering)
/// Microswitches on chassis that trigger on physical collision
/// Default: [5, 6] (front-left, front-right)
pub bump_sensor_pins: Vec<u8>,
/// Distance to reverse after bump detection (meters)
/// Robot backs up this far after hitting something
/// Default: 0.15m (15cm)
pub bump_reverse_distance: f64,
/// Require verbal confirmation for movement
/// If true, robot asks "Should I move?" before moving
/// Default: false (for responsive play)
pub confirm_movement: bool,
/// Enable collision prediction using LIDAR
/// Estimates if current trajectory will intersect obstacle
/// Default: true
pub predict_collisions: bool,
/// Sensor data timeout (seconds)
/// Block all movement if no sensor updates for this long
/// Prevents blind movement if sensors fail
/// Default: 5 seconds
pub sensor_timeout_secs: u64,
/// Speed limit when sensors are in mock/unavailable mode (0.0 - 1.0)
/// Extra caution when flying blind
/// Default: 0.2 (20% speed)
pub blind_mode_speed_limit: f64,
}
impl Default for RobotConfig {
fn default() -> Self {
Self {
drive: DriveConfig {
backend: "mock".to_string(),
ros2_topic: "/cmd_vel".to_string(),
serial_port: "/dev/ttyACM0".to_string(),
max_speed: 0.5,
max_rotation: 1.0,
},
camera: CameraConfig {
device: "/dev/video0".to_string(),
width: 640,
height: 480,
vision_model: "moondream".to_string(),
ollama_url: "http://localhost:11434".to_string(),
},
audio: AudioConfig {
mic_device: "default".to_string(),
speaker_device: "default".to_string(),
whisper_model: "base".to_string(),
whisper_path: PathBuf::from("/usr/local/bin/whisper-cpp"),
piper_path: PathBuf::from("/usr/local/bin/piper"),
piper_voice: "en_US-lessac-medium".to_string(),
},
sensors: SensorConfig {
lidar_port: "/dev/ttyUSB0".to_string(),
lidar_type: "mock".to_string(),
motion_pins: vec![17, 27],
ultrasonic_pins: Some((23, 24)),
},
safety: SafetyConfig {
min_obstacle_distance: 0.3, // 30cm - absolute minimum
slow_zone_multiplier: 3.0, // Start slowing at 90cm
approach_speed_limit: 0.3, // 30% max speed near obstacles
max_drive_duration: 30, // Auto-stop after 30s
estop_pin: Some(4), // GPIO 4 for big red button
bump_sensor_pins: vec![5, 6], // Front bump sensors
bump_reverse_distance: 0.15, // Back up 15cm after bump
confirm_movement: false, // Don't require verbal confirm
predict_collisions: true, // Use LIDAR prediction
sensor_timeout_secs: 5, // Block if sensors stale 5s
blind_mode_speed_limit: 0.2, // 20% speed without sensors
},
}
}
}
impl RobotConfig {
/// Load from TOML file
pub fn load(path: &std::path::Path) -> anyhow::Result<Self> {
let content = std::fs::read_to_string(path)?;
Ok(toml::from_str(&content)?)
}
/// Save to TOML file
pub fn save(&self, path: &std::path::Path) -> anyhow::Result<()> {
let content = toml::to_string_pretty(self)?;
std::fs::write(path, content)?;
Ok(())
}
}

View file

@ -0,0 +1,336 @@
//! Drive Tool - Motor control for omni-directional movement
//!
//! Supports multiple backends:
//! - ROS2: Publishes geometry_msgs/Twist to cmd_vel topic
//! - GPIO: Direct PWM control via rppal
//! - Serial: Arduino/motor controller via serial commands
//! - Mock: Logs commands for testing
use crate::config::RobotConfig;
use crate::traits::{Tool, ToolResult};
use anyhow::Result;
use async_trait::async_trait;
use serde_json::{json, Value};
use std::sync::Arc;
use std::time::Duration;
use tokio::sync::Mutex;
/// Drive backend abstraction
#[async_trait]
trait DriveBackend: Send + Sync {
async fn move_robot(&self, linear_x: f64, linear_y: f64, angular_z: f64, duration_ms: u64) -> Result<()>;
async fn stop(&self) -> Result<()>;
#[allow(dead_code)]
async fn get_odometry(&self) -> Result<(f64, f64, f64)>; // x, y, theta - reserved for future odometry integration
}
/// Mock backend for testing
struct MockDrive;
#[async_trait]
impl DriveBackend for MockDrive {
async fn move_robot(&self, linear_x: f64, linear_y: f64, angular_z: f64, duration_ms: u64) -> Result<()> {
tracing::info!(
"MOCK DRIVE: linear=({:.2}, {:.2}), angular={:.2}, duration={}ms",
linear_x, linear_y, angular_z, duration_ms
);
tokio::time::sleep(Duration::from_millis(duration_ms.min(100))).await;
Ok(())
}
async fn stop(&self) -> Result<()> {
tracing::info!("MOCK DRIVE: STOP");
Ok(())
}
async fn get_odometry(&self) -> Result<(f64, f64, f64)> {
Ok((0.0, 0.0, 0.0))
}
}
/// ROS2 backend - shells out to ros2 topic pub
struct Ros2Drive {
topic: String,
}
#[async_trait]
impl DriveBackend for Ros2Drive {
async fn move_robot(&self, linear_x: f64, linear_y: f64, angular_z: f64, duration_ms: u64) -> Result<()> {
// Publish Twist message via ros2 CLI
// In production, use rclrs (Rust ROS2 bindings) instead
let msg = format!(
"{{linear: {{x: {:.2}, y: {:.2}, z: 0.0}}, angular: {{x: 0.0, y: 0.0, z: {:.2}}}}}",
linear_x, linear_y, angular_z
);
let output = tokio::process::Command::new("ros2")
.args(["topic", "pub", "--once", &self.topic, "geometry_msgs/msg/Twist", &msg])
.output()
.await?;
if !output.status.success() {
anyhow::bail!("ROS2 publish failed: {}", String::from_utf8_lossy(&output.stderr));
}
// Hold for duration then stop
tokio::time::sleep(Duration::from_millis(duration_ms)).await;
self.stop().await?;
Ok(())
}
async fn stop(&self) -> Result<()> {
let msg = "{linear: {x: 0.0, y: 0.0, z: 0.0}, angular: {x: 0.0, y: 0.0, z: 0.0}}";
tokio::process::Command::new("ros2")
.args(["topic", "pub", "--once", &self.topic, "geometry_msgs/msg/Twist", msg])
.output()
.await?;
Ok(())
}
async fn get_odometry(&self) -> Result<(f64, f64, f64)> {
// Would subscribe to /odom topic in production
Ok((0.0, 0.0, 0.0))
}
}
/// Serial backend - sends commands to Arduino/motor controller
struct SerialDrive {
port: String,
}
#[async_trait]
impl DriveBackend for SerialDrive {
async fn move_robot(&self, linear_x: f64, linear_y: f64, angular_z: f64, duration_ms: u64) -> Result<()> {
// Protocol: "M <lx> <ly> <az> <ms>\n"
// The motor controller interprets this and drives motors
let cmd = format!("M {:.2} {:.2} {:.2} {}\n", linear_x, linear_y, angular_z, duration_ms);
// Use blocking serial in spawn_blocking
let port = self.port.clone();
tokio::task::spawn_blocking(move || {
use std::io::Write;
let mut serial = std::fs::OpenOptions::new()
.write(true)
.open(&port)?;
serial.write_all(cmd.as_bytes())?;
serial.flush()?;
Ok::<_, anyhow::Error>(())
}).await??;
tokio::time::sleep(Duration::from_millis(duration_ms)).await;
Ok(())
}
async fn stop(&self) -> Result<()> {
self.move_robot(0.0, 0.0, 0.0, 0).await
}
async fn get_odometry(&self) -> Result<(f64, f64, f64)> {
Ok((0.0, 0.0, 0.0))
}
}
/// Main Drive Tool
pub struct DriveTool {
config: RobotConfig,
backend: Arc<dyn DriveBackend>,
last_command: Arc<Mutex<Option<std::time::Instant>>>,
}
impl DriveTool {
pub fn new(config: RobotConfig) -> Self {
let backend: Arc<dyn DriveBackend> = match config.drive.backend.as_str() {
"ros2" => Arc::new(Ros2Drive { topic: config.drive.ros2_topic.clone() }),
"serial" => Arc::new(SerialDrive { port: config.drive.serial_port.clone() }),
// "gpio" => Arc::new(GpioDrive::new(&config)), // Would use rppal
_ => Arc::new(MockDrive),
};
Self {
config,
backend,
last_command: Arc::new(Mutex::new(None)),
}
}
}
#[async_trait]
impl Tool for DriveTool {
fn name(&self) -> &str {
"drive"
}
fn description(&self) -> &str {
"Move the robot. Supports omni-directional movement (forward, backward, strafe left/right, rotate). \
Use 'stop' action to halt immediately. Distance is in meters, rotation in degrees."
}
fn parameters_schema(&self) -> Value {
json!({
"type": "object",
"properties": {
"action": {
"type": "string",
"enum": ["forward", "backward", "left", "right", "rotate_left", "rotate_right", "stop", "custom"],
"description": "Movement action. 'left'/'right' are strafe (omni wheels). 'rotate_*' spins in place."
},
"distance": {
"type": "number",
"description": "Distance in meters (for linear moves) or degrees (for rotation). Default 0.5m or 90deg."
},
"speed": {
"type": "number",
"description": "Speed multiplier 0.0-1.0. Default 0.5 (half speed for safety)."
},
"linear_x": {
"type": "number",
"description": "Custom: forward/backward velocity (-1.0 to 1.0)"
},
"linear_y": {
"type": "number",
"description": "Custom: left/right strafe velocity (-1.0 to 1.0)"
},
"angular_z": {
"type": "number",
"description": "Custom: rotation velocity (-1.0 to 1.0)"
}
},
"required": ["action"]
})
}
async fn execute(&self, args: Value) -> Result<ToolResult> {
let action = args["action"]
.as_str()
.ok_or_else(|| anyhow::anyhow!("Missing 'action' parameter"))?;
// Safety: check max drive duration
{
let mut last = self.last_command.lock().await;
if let Some(instant) = *last {
if instant.elapsed() < Duration::from_secs(1) {
return Ok(ToolResult {
success: false,
output: String::new(),
error: Some("Rate limited: wait 1 second between drive commands".to_string()),
});
}
}
*last = Some(std::time::Instant::now());
}
let speed = args["speed"].as_f64().unwrap_or(0.5).clamp(0.0, 1.0);
let max_speed = self.config.drive.max_speed * speed;
let max_rotation = self.config.drive.max_rotation * speed;
let (linear_x, linear_y, angular_z, duration_ms) = match action {
"stop" => {
self.backend.stop().await?;
return Ok(ToolResult {
success: true,
output: "Robot stopped".to_string(),
error: None,
});
}
"forward" => {
let dist = args["distance"].as_f64().unwrap_or(0.5);
let duration = (dist / max_speed * 1000.0) as u64;
(max_speed, 0.0, 0.0, duration.min(self.config.safety.max_drive_duration * 1000))
}
"backward" => {
let dist = args["distance"].as_f64().unwrap_or(0.5);
let duration = (dist / max_speed * 1000.0) as u64;
(-max_speed, 0.0, 0.0, duration.min(self.config.safety.max_drive_duration * 1000))
}
"left" => {
let dist = args["distance"].as_f64().unwrap_or(0.5);
let duration = (dist / max_speed * 1000.0) as u64;
(0.0, max_speed, 0.0, duration.min(self.config.safety.max_drive_duration * 1000))
}
"right" => {
let dist = args["distance"].as_f64().unwrap_or(0.5);
let duration = (dist / max_speed * 1000.0) as u64;
(0.0, -max_speed, 0.0, duration.min(self.config.safety.max_drive_duration * 1000))
}
"rotate_left" => {
let degrees = args["distance"].as_f64().unwrap_or(90.0);
let radians = degrees.to_radians();
let duration = (radians / max_rotation * 1000.0) as u64;
(0.0, 0.0, max_rotation, duration.min(self.config.safety.max_drive_duration * 1000))
}
"rotate_right" => {
let degrees = args["distance"].as_f64().unwrap_or(90.0);
let radians = degrees.to_radians();
let duration = (radians / max_rotation * 1000.0) as u64;
(0.0, 0.0, -max_rotation, duration.min(self.config.safety.max_drive_duration * 1000))
}
"custom" => {
let lx = args["linear_x"].as_f64().unwrap_or(0.0).clamp(-1.0, 1.0) * max_speed;
let ly = args["linear_y"].as_f64().unwrap_or(0.0).clamp(-1.0, 1.0) * max_speed;
let az = args["angular_z"].as_f64().unwrap_or(0.0).clamp(-1.0, 1.0) * max_rotation;
let duration = args["duration_ms"].as_u64().unwrap_or(1000);
(lx, ly, az, duration.min(self.config.safety.max_drive_duration * 1000))
}
_ => {
return Ok(ToolResult {
success: false,
output: String::new(),
error: Some(format!("Unknown action: {action}")),
});
}
};
self.backend.move_robot(linear_x, linear_y, angular_z, duration_ms).await?;
Ok(ToolResult {
success: true,
output: format!(
"Moved: action={}, linear=({:.2}, {:.2}), angular={:.2}, duration={}ms",
action, linear_x, linear_y, angular_z, duration_ms
),
error: None,
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn drive_tool_name() {
let tool = DriveTool::new(RobotConfig::default());
assert_eq!(tool.name(), "drive");
}
#[test]
fn drive_tool_schema_has_action() {
let tool = DriveTool::new(RobotConfig::default());
let schema = tool.parameters_schema();
assert!(schema["properties"]["action"].is_object());
}
#[tokio::test]
async fn drive_forward_mock() {
let tool = DriveTool::new(RobotConfig::default());
let result = tool.execute(json!({"action": "forward", "distance": 1.0})).await.unwrap();
assert!(result.success);
assert!(result.output.contains("forward"));
}
#[tokio::test]
async fn drive_stop() {
let tool = DriveTool::new(RobotConfig::default());
let result = tool.execute(json!({"action": "stop"})).await.unwrap();
assert!(result.success);
assert!(result.output.contains("stopped"));
}
#[tokio::test]
async fn drive_unknown_action() {
let tool = DriveTool::new(RobotConfig::default());
let result = tool.execute(json!({"action": "fly"})).await.unwrap();
assert!(!result.success);
}
}

View file

@ -0,0 +1,327 @@
//! Emote Tool - LED expressions and sound effects
//!
//! Control LED matrix/strips for robot "expressions" and play sounds.
//! Makes the robot more engaging for kids!
use crate::config::RobotConfig;
use crate::traits::{Tool, ToolResult};
use anyhow::Result;
use async_trait::async_trait;
use serde_json::{json, Value};
use std::path::PathBuf;
/// Predefined LED expressions
#[derive(Debug, Clone, Copy)]
pub enum Expression {
Happy, // :)
Sad, // :(
Surprised, // :O
Thinking, // :?
Sleepy, // -_-
Excited, // ^_^
Love, // <3 <3
Angry, // >:(
Confused, // @_@
Wink, // ;)
}
impl Expression {
fn from_str(s: &str) -> Option<Self> {
match s.to_lowercase().as_str() {
"happy" | "smile" => Some(Self::Happy),
"sad" | "frown" => Some(Self::Sad),
"surprised" | "wow" => Some(Self::Surprised),
"thinking" | "hmm" => Some(Self::Thinking),
"sleepy" | "tired" => Some(Self::Sleepy),
"excited" | "yay" => Some(Self::Excited),
"love" | "heart" => Some(Self::Love),
"angry" | "mad" => Some(Self::Angry),
"confused" | "huh" => Some(Self::Confused),
"wink" => Some(Self::Wink),
_ => None,
}
}
/// Get LED matrix pattern (8x8 example)
/// Returns array of 64 RGB values
fn pattern(&self) -> Vec<(u8, u8, u8)> {
let black = (0, 0, 0);
let white = (255, 255, 255);
let yellow = (255, 255, 0);
let red = (255, 0, 0);
let blue = (0, 100, 255);
let pink = (255, 100, 150);
// 8x8 patterns (simplified representations)
match self {
Self::Happy => {
// Simple smiley
vec![
black, black, yellow, yellow, yellow, yellow, black, black,
black, yellow, black, black, black, black, yellow, black,
yellow, black, white, black, black, white, black, yellow,
yellow, black, black, black, black, black, black, yellow,
yellow, black, white, black, black, white, black, yellow,
yellow, black, black, white, white, black, black, yellow,
black, yellow, black, black, black, black, yellow, black,
black, black, yellow, yellow, yellow, yellow, black, black,
]
}
Self::Sad => {
vec![
black, black, blue, blue, blue, blue, black, black,
black, blue, black, black, black, black, blue, black,
blue, black, white, black, black, white, black, blue,
blue, black, black, black, black, black, black, blue,
blue, black, black, white, white, black, black, blue,
blue, black, white, black, black, white, black, blue,
black, blue, black, black, black, black, blue, black,
black, black, blue, blue, blue, blue, black, black,
]
}
Self::Excited => {
vec![
yellow, yellow, yellow, yellow, yellow, yellow, yellow, yellow,
yellow, black, black, yellow, yellow, black, black, yellow,
yellow, black, white, yellow, yellow, white, black, yellow,
yellow, yellow, yellow, yellow, yellow, yellow, yellow, yellow,
yellow, black, black, black, black, black, black, yellow,
yellow, black, white, white, white, white, black, yellow,
yellow, black, black, black, black, black, black, yellow,
yellow, yellow, yellow, yellow, yellow, yellow, yellow, yellow,
]
}
Self::Love => {
vec![
black, pink, pink, black, black, pink, pink, black,
pink, pink, pink, pink, pink, pink, pink, pink,
pink, pink, pink, pink, pink, pink, pink, pink,
pink, pink, pink, pink, pink, pink, pink, pink,
black, pink, pink, pink, pink, pink, pink, black,
black, black, pink, pink, pink, pink, black, black,
black, black, black, pink, pink, black, black, black,
black, black, black, black, black, black, black, black,
]
}
Self::Angry => {
vec![
red, red, black, black, black, black, red, red,
black, red, red, black, black, red, red, black,
black, black, red, black, black, red, black, black,
black, black, white, black, black, white, black, black,
black, black, black, black, black, black, black, black,
black, black, white, white, white, white, black, black,
black, white, black, black, black, black, white, black,
black, black, black, black, black, black, black, black,
]
}
_ => {
// Default neutral
vec![white; 64]
}
}
}
}
pub struct EmoteTool {
#[allow(dead_code)]
config: RobotConfig,
sounds_dir: PathBuf,
}
impl EmoteTool {
pub fn new(config: RobotConfig) -> Self {
let sounds_dir = directories::UserDirs::new()
.map(|d| d.home_dir().join(".zeroclaw/sounds"))
.unwrap_or_else(|| PathBuf::from("/usr/local/share/zeroclaw/sounds"));
Self { config, sounds_dir }
}
/// Set LED matrix expression
async fn set_expression(&self, expr: Expression) -> Result<()> {
let pattern = expr.pattern();
// Convert to format for LED driver
// In production, use rs_ws281x or similar
let pattern_json = serde_json::to_string(&pattern)?;
// Try to write to LED controller
// Option 1: Write to FIFO/socket if LED daemon is running
let led_fifo = PathBuf::from("/tmp/zeroclaw_led.fifo");
if led_fifo.exists() {
tokio::fs::write(&led_fifo, pattern_json).await?;
return Ok(());
}
// Option 2: Shell out to LED control script
let output = tokio::process::Command::new("zeroclaw-led")
.args(["--pattern", &format!("{:?}", expr)])
.output()
.await;
match output {
Ok(out) if out.status.success() => Ok(()),
_ => {
tracing::info!("LED display: {:?} (hardware not connected)", expr);
Ok(()) // Don't fail if LED hardware isn't available
}
}
}
/// Play emotion sound effect
async fn play_emotion_sound(&self, emotion: &str) -> Result<()> {
let sound_file = self.sounds_dir.join(format!("{}.wav", emotion));
if !sound_file.exists() {
tracing::debug!("No sound file for emotion: {}", emotion);
return Ok(());
}
tokio::process::Command::new("aplay")
.arg(sound_file)
.output()
.await?;
Ok(())
}
/// Animate expression (e.g., blinking)
async fn animate(&self, animation: &str) -> Result<()> {
match animation {
"blink" => {
self.set_expression(Expression::Happy).await?;
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
// "Closed eyes" - simplified
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
self.set_expression(Expression::Happy).await?;
}
"nod" => {
// Would control servo if available
tracing::info!("Animation: nod");
}
"shake" => {
tracing::info!("Animation: shake");
}
"dance" => {
// Cycle through expressions
for expr in [Expression::Happy, Expression::Excited, Expression::Love, Expression::Happy] {
self.set_expression(expr).await?;
tokio::time::sleep(std::time::Duration::from_millis(500)).await;
}
}
_ => {}
}
Ok(())
}
}
#[async_trait]
impl Tool for EmoteTool {
fn name(&self) -> &str {
"emote"
}
fn description(&self) -> &str {
"Express emotions through LED display and sounds. Use this to show the robot's \
emotional state - happy when playing, sad when saying goodbye, excited for games, etc. \
This makes interactions with kids more engaging!"
}
fn parameters_schema(&self) -> Value {
json!({
"type": "object",
"properties": {
"expression": {
"type": "string",
"enum": ["happy", "sad", "surprised", "thinking", "sleepy", "excited", "love", "angry", "confused", "wink"],
"description": "Facial expression to display on LED matrix"
},
"animation": {
"type": "string",
"enum": ["blink", "nod", "shake", "dance"],
"description": "Optional animation to perform"
},
"sound": {
"type": "boolean",
"description": "Play matching sound effect (default true)"
},
"duration": {
"type": "integer",
"description": "How long to hold expression in seconds (default 3)"
}
},
"required": ["expression"]
})
}
async fn execute(&self, args: Value) -> Result<ToolResult> {
let expression_str = args["expression"]
.as_str()
.ok_or_else(|| anyhow::anyhow!("Missing 'expression' parameter"))?;
let expression = Expression::from_str(expression_str)
.ok_or_else(|| anyhow::anyhow!("Unknown expression: {}", expression_str))?;
let play_sound = args["sound"].as_bool().unwrap_or(true);
let duration = args["duration"].as_u64().unwrap_or(3);
// Set expression
self.set_expression(expression).await?;
// Play sound if enabled
if play_sound {
let _ = self.play_emotion_sound(expression_str).await;
}
// Run animation if specified
if let Some(animation) = args["animation"].as_str() {
self.animate(animation).await?;
}
// Hold expression
if duration > 0 {
tokio::time::sleep(std::time::Duration::from_secs(duration.min(10))).await;
}
Ok(ToolResult {
success: true,
output: format!("Expressing: {} for {}s", expression_str, duration),
error: None,
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn emote_tool_name() {
let tool = EmoteTool::new(RobotConfig::default());
assert_eq!(tool.name(), "emote");
}
#[test]
fn expression_parsing() {
assert!(Expression::from_str("happy").is_some());
assert!(Expression::from_str("EXCITED").is_some());
assert!(Expression::from_str("unknown").is_none());
}
#[test]
fn expression_pattern_size() {
let expr = Expression::Happy;
assert_eq!(expr.pattern().len(), 64); // 8x8
}
#[tokio::test]
async fn emote_happy() {
let tool = EmoteTool::new(RobotConfig::default());
let result = tool.execute(json!({
"expression": "happy",
"duration": 0
})).await.unwrap();
assert!(result.success);
}
}

154
crates/robot-kit/src/lib.rs Normal file
View file

@ -0,0 +1,154 @@
//! # ZeroClaw Robot Kit
//!
//! A standalone robotics toolkit that integrates with ZeroClaw for AI-powered robots.
//!
//! ## Features
//!
//! - **Drive**: Omni-directional motor control (ROS2, serial, GPIO, mock)
//! - **Look**: Camera capture + vision model description (Ollama)
//! - **Listen**: Speech-to-text via Whisper.cpp
//! - **Speak**: Text-to-speech via Piper TTS
//! - **Sense**: LIDAR, motion sensors, ultrasonic distance
//! - **Emote**: LED matrix expressions and sound effects
//! - **Safety**: Independent safety monitor (collision avoidance, E-stop, watchdog)
//!
//! ## Architecture
//!
//! ```text
//! ┌─────────────────────────────────────────────────────────┐
//! │ ZeroClaw AI Brain (or any controller) │
//! │ "Move forward, find the ball, tell me what you see" │
//! └─────────────────────┬───────────────────────────────────┘
//! │ Tool calls
//! ▼
//! ┌─────────────────────────────────────────────────────────┐
//! │ zeroclaw-robot-kit │
//! │ ┌─────────┐ ┌──────┐ ┌────────┐ ┌───────┐ ┌───────┐ │
//! │ │ drive │ │ look │ │ listen │ │ speak │ │ sense │ │
//! │ └────┬────┘ └──┬───┘ └───┬────┘ └───┬───┘ └───┬───┘ │
//! │ │ │ │ │ │ │
//! │ ┌────┴─────────┴─────────┴──────────┴─────────┴────┐ │
//! │ │ SafetyMonitor (parallel) │ │
//! │ │ • Pre-move obstacle check │ │
//! │ │ • Proximity-based speed limiting │ │
//! │ │ • Bump sensor response │ │
//! │ │ • Watchdog auto-stop │ │
//! │ │ • Hardware E-stop override │ │
//! │ └──────────────────────────────────────────────────┘ │
//! └─────────────────────────────────────────────────────────┘
//! │
//! ▼
//! ┌─────────────────────────────────────────────────────────┐
//! │ Hardware: Motors, Camera, Mic, Speaker, LIDAR, LEDs │
//! └─────────────────────────────────────────────────────────┘
//! ```
//!
//! ## Quick Start
//!
//! ```rust,ignore
//! use zeroclaw_robot_kit::{RobotConfig, DriveTool, SafetyMonitor, SafeDrive};
//! use std::sync::Arc;
//!
//! #[tokio::main]
//! async fn main() {
//! // Load configuration
//! let config = RobotConfig::default();
//!
//! // Create safety monitor
//! let (safety, _rx) = SafetyMonitor::new(config.safety.clone());
//! let safety = Arc::new(safety);
//!
//! // Wrap drive with safety
//! let drive = Arc::new(DriveTool::new(config.clone()));
//! let safe_drive = SafeDrive::new(drive, safety.clone());
//!
//! // Use tools...
//! let result = safe_drive.execute(serde_json::json!({
//! "action": "forward",
//! "distance": 1.0
//! })).await;
//! }
//! ```
//!
//! ## Standalone Usage
//!
//! This crate can be used independently of ZeroClaw. It defines its own
//! `Tool` trait that is compatible with ZeroClaw's but doesn't require it.
//!
//! ## Safety
//!
//! **The AI can REQUEST movement, but SafetyMonitor ALLOWS it.**
//!
//! The safety system runs as an independent task and can override any
//! AI decision. This prevents collisions even if the LLM hallucinates.
// TODO: Re-enable once all public items are documented
// #![warn(missing_docs)]
#![allow(missing_docs)]
#![warn(clippy::all)]
pub mod config;
pub mod traits;
pub mod drive;
pub mod emote;
pub mod listen;
pub mod look;
pub mod sense;
pub mod speak;
#[cfg(feature = "safety")]
pub mod safety;
#[cfg(test)]
mod tests;
// Re-exports for convenience
pub use config::RobotConfig;
pub use traits::{Tool, ToolResult, ToolSpec};
pub use drive::DriveTool;
pub use emote::EmoteTool;
pub use listen::ListenTool;
pub use look::LookTool;
pub use sense::SenseTool;
pub use speak::SpeakTool;
#[cfg(feature = "safety")]
pub use safety::{preflight_check, SafeDrive, SafetyEvent, SafetyMonitor, SensorReading};
/// Crate version
pub const VERSION: &str = env!("CARGO_PKG_VERSION");
/// Create all robot tools with default configuration
///
/// Returns a Vec of boxed tools ready for use with an agent.
pub fn create_tools(config: &RobotConfig) -> Vec<Box<dyn Tool>> {
vec![
Box::new(DriveTool::new(config.clone())),
Box::new(LookTool::new(config.clone())),
Box::new(ListenTool::new(config.clone())),
Box::new(SpeakTool::new(config.clone())),
Box::new(SenseTool::new(config.clone())),
Box::new(EmoteTool::new(config.clone())),
]
}
/// Create all robot tools with safety wrapper on drive
#[cfg(feature = "safety")]
pub fn create_safe_tools(
config: &RobotConfig,
safety: std::sync::Arc<SafetyMonitor>,
) -> Vec<Box<dyn Tool>> {
let drive = std::sync::Arc::new(DriveTool::new(config.clone()));
let safe_drive = SafeDrive::new(drive, safety);
vec![
Box::new(safe_drive),
Box::new(LookTool::new(config.clone())),
Box::new(ListenTool::new(config.clone())),
Box::new(SpeakTool::new(config.clone())),
Box::new(SenseTool::new(config.clone())),
Box::new(EmoteTool::new(config.clone())),
]
}

View file

@ -0,0 +1,194 @@
//! Listen Tool - Speech-to-text via Whisper.cpp
//!
//! Records audio from microphone and transcribes using local Whisper model.
//! Designed for offline operation on Raspberry Pi.
use crate::config::RobotConfig;
use crate::traits::{Tool, ToolResult};
use anyhow::Result;
use async_trait::async_trait;
use serde_json::{json, Value};
use std::path::{Path, PathBuf};
pub struct ListenTool {
config: RobotConfig,
recordings_dir: PathBuf,
}
impl ListenTool {
pub fn new(config: RobotConfig) -> Self {
let recordings_dir = directories::UserDirs::new()
.map(|d| d.home_dir().join(".zeroclaw/recordings"))
.unwrap_or_else(|| PathBuf::from("/tmp/zeroclaw_recordings"));
let _ = std::fs::create_dir_all(&recordings_dir);
Self { config, recordings_dir }
}
/// Record audio using arecord (ALSA)
async fn record_audio(&self, duration_secs: u64) -> Result<PathBuf> {
let timestamp = chrono::Utc::now().format("%Y%m%d_%H%M%S");
let filename = self.recordings_dir.join(format!("recording_{}.wav", timestamp));
let device = &self.config.audio.mic_device;
// Record using arecord (standard on Linux/Pi)
let output = tokio::process::Command::new("arecord")
.args([
"-D", device,
"-f", "S16_LE", // 16-bit signed little-endian
"-r", "16000", // 16kHz (Whisper expects this)
"-c", "1", // Mono
"-d", &duration_secs.to_string(),
filename.to_str().unwrap(),
])
.output()
.await?;
if !output.status.success() {
anyhow::bail!(
"Audio recording failed: {}",
String::from_utf8_lossy(&output.stderr)
);
}
Ok(filename)
}
/// Transcribe audio using whisper.cpp
async fn transcribe(&self, audio_path: &Path) -> Result<String> {
let whisper_path = &self.config.audio.whisper_path;
let model = &self.config.audio.whisper_model;
// whisper.cpp model path (typically in ~/.zeroclaw/models/)
let model_path = directories::UserDirs::new()
.map(|d| d.home_dir().join(format!(".zeroclaw/models/ggml-{}.bin", model)))
.unwrap_or_else(|| PathBuf::from(format!("/usr/local/share/whisper/ggml-{}.bin", model)));
// Run whisper.cpp
let output = tokio::process::Command::new(whisper_path)
.args([
"-m", model_path.to_str().unwrap(),
"-f", audio_path.to_str().unwrap(),
"--no-timestamps",
"-otxt", // Output as text
])
.output()
.await?;
if !output.status.success() {
anyhow::bail!(
"Whisper transcription failed: {}",
String::from_utf8_lossy(&output.stderr)
);
}
// whisper.cpp outputs to <input>.txt
let txt_path = audio_path.with_extension("wav.txt");
let transcript = tokio::fs::read_to_string(&txt_path)
.await
.unwrap_or_else(|_| String::from_utf8_lossy(&output.stdout).to_string());
// Clean up temp files
let _ = tokio::fs::remove_file(&txt_path).await;
Ok(transcript.trim().to_string())
}
}
#[async_trait]
impl Tool for ListenTool {
fn name(&self) -> &str {
"listen"
}
fn description(&self) -> &str {
"Listen for speech and transcribe it to text. Records from the microphone \
for the specified duration, then converts speech to text using Whisper."
}
fn parameters_schema(&self) -> Value {
json!({
"type": "object",
"properties": {
"duration": {
"type": "integer",
"description": "Recording duration in seconds. Default 5, max 30.",
"minimum": 1,
"maximum": 30
},
"prompt": {
"type": "string",
"description": "Optional context hint for transcription (e.g., 'The speaker is a child')"
}
}
})
}
async fn execute(&self, args: Value) -> Result<ToolResult> {
let duration = args["duration"]
.as_u64()
.unwrap_or(5)
.clamp(1, 30);
// Record audio
tracing::info!("Recording audio for {} seconds...", duration);
let audio_path = match self.record_audio(duration).await {
Ok(path) => path,
Err(e) => {
return Ok(ToolResult {
success: false,
output: String::new(),
error: Some(format!("Recording failed: {e}")),
});
}
};
// Transcribe
tracing::info!("Transcribing audio...");
match self.transcribe(&audio_path).await {
Ok(transcript) => {
// Clean up audio file
let _ = tokio::fs::remove_file(&audio_path).await;
if transcript.is_empty() {
Ok(ToolResult {
success: true,
output: "(silence - no speech detected)".to_string(),
error: None,
})
} else {
Ok(ToolResult {
success: true,
output: format!("I heard: \"{}\"", transcript),
error: None,
})
}
}
Err(e) => Ok(ToolResult {
success: false,
output: String::new(),
error: Some(format!("Transcription failed: {e}")),
}),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn listen_tool_name() {
let tool = ListenTool::new(RobotConfig::default());
assert_eq!(tool.name(), "listen");
}
#[test]
fn listen_tool_schema() {
let tool = ListenTool::new(RobotConfig::default());
let schema = tool.parameters_schema();
assert!(schema["properties"]["duration"].is_object());
}
}

View file

@ -0,0 +1,240 @@
//! Look Tool - Camera capture + vision model description
//!
//! Captures an image from the camera and optionally describes it
//! using a local vision model (LLaVA, Moondream) via Ollama.
use crate::config::RobotConfig;
use crate::traits::{Tool, ToolResult};
use anyhow::Result;
use async_trait::async_trait;
use serde_json::{json, Value};
use std::path::PathBuf;
pub struct LookTool {
config: RobotConfig,
capture_dir: PathBuf,
}
impl LookTool {
pub fn new(config: RobotConfig) -> Self {
let capture_dir = directories::UserDirs::new()
.map(|d| d.home_dir().join(".zeroclaw/captures"))
.unwrap_or_else(|| PathBuf::from("/tmp/zeroclaw_captures"));
// Ensure capture directory exists
let _ = std::fs::create_dir_all(&capture_dir);
Self { config, capture_dir }
}
/// Capture image using ffmpeg (works with most cameras)
async fn capture_image(&self) -> Result<PathBuf> {
let timestamp = chrono::Utc::now().format("%Y%m%d_%H%M%S");
let filename = self.capture_dir.join(format!("capture_{}.jpg", timestamp));
let device = &self.config.camera.device;
let width = self.config.camera.width;
let height = self.config.camera.height;
// Use ffmpeg for broad camera compatibility
let output = tokio::process::Command::new("ffmpeg")
.args([
"-f", "v4l2",
"-video_size", &format!("{}x{}", width, height),
"-i", device,
"-frames:v", "1",
"-y", // Overwrite
filename.to_str().unwrap(),
])
.output()
.await?;
if !output.status.success() {
// Fallback: try fswebcam (simpler, often works on Pi)
let fallback = tokio::process::Command::new("fswebcam")
.args([
"-r", &format!("{}x{}", width, height),
"--no-banner",
"-d", device,
filename.to_str().unwrap(),
])
.output()
.await?;
if !fallback.status.success() {
anyhow::bail!(
"Camera capture failed. Tried ffmpeg and fswebcam.\n\
ffmpeg: {}\n\
fswebcam: {}",
String::from_utf8_lossy(&output.stderr),
String::from_utf8_lossy(&fallback.stderr)
);
}
}
Ok(filename)
}
/// Describe image using vision model via Ollama
async fn describe_image(&self, image_path: &PathBuf, prompt: &str) -> Result<String> {
let model = &self.config.camera.vision_model;
if model == "none" {
return Ok("Vision model disabled. Image captured only.".to_string());
}
// Read image as base64
let image_bytes = tokio::fs::read(image_path).await?;
let base64_image = base64::Engine::encode(
&base64::engine::general_purpose::STANDARD,
&image_bytes,
);
// Call Ollama with image
let client = reqwest::Client::new();
let response = client
.post(format!("{}/api/generate", self.config.camera.ollama_url))
.json(&json!({
"model": model,
"prompt": prompt,
"images": [base64_image],
"stream": false
}))
.timeout(std::time::Duration::from_secs(60))
.send()
.await?;
if !response.status().is_success() {
anyhow::bail!("Ollama vision request failed: {}", response.status());
}
let result: Value = response.json().await?;
let description = result["response"]
.as_str()
.unwrap_or("No description generated")
.to_string();
Ok(description)
}
}
#[async_trait]
impl Tool for LookTool {
fn name(&self) -> &str {
"look"
}
fn description(&self) -> &str {
"Capture an image from the robot's camera and optionally describe what is seen. \
Use this to observe the environment, find objects, or identify people."
}
fn parameters_schema(&self) -> Value {
json!({
"type": "object",
"properties": {
"action": {
"type": "string",
"enum": ["capture", "describe", "find"],
"description": "capture=just take photo, describe=photo+AI description, find=look for specific thing"
},
"prompt": {
"type": "string",
"description": "For 'describe': what to focus on. For 'find': what to look for."
}
},
"required": ["action"]
})
}
async fn execute(&self, args: Value) -> Result<ToolResult> {
let action = args["action"]
.as_str()
.ok_or_else(|| anyhow::anyhow!("Missing 'action' parameter"))?;
// Capture image
let image_path = match self.capture_image().await {
Ok(path) => path,
Err(e) => {
return Ok(ToolResult {
success: false,
output: String::new(),
error: Some(format!("Camera capture failed: {e}")),
});
}
};
match action {
"capture" => Ok(ToolResult {
success: true,
output: format!("Image captured: {}", image_path.display()),
error: None,
}),
"describe" => {
let prompt = args["prompt"]
.as_str()
.unwrap_or("Describe what you see in this image. Be specific about people, objects, and the environment.");
match self.describe_image(&image_path, prompt).await {
Ok(description) => Ok(ToolResult {
success: true,
output: format!("I see: {}", description),
error: None,
}),
Err(e) => Ok(ToolResult {
success: false,
output: format!("Image captured at {} but description failed", image_path.display()),
error: Some(e.to_string()),
}),
}
}
"find" => {
let target = args["prompt"]
.as_str()
.ok_or_else(|| anyhow::anyhow!("'find' action requires 'prompt' specifying what to find"))?;
let prompt = format!(
"Look at this image and determine: Is there a {} visible? \
If yes, describe where it is (left, right, center, near, far). \
If no, say 'Not found' and describe what you do see.",
target
);
match self.describe_image(&image_path, &prompt).await {
Ok(description) => Ok(ToolResult {
success: true,
output: description,
error: None,
}),
Err(e) => Ok(ToolResult {
success: false,
output: String::new(),
error: Some(e.to_string()),
}),
}
}
_ => Ok(ToolResult {
success: false,
output: String::new(),
error: Some(format!("Unknown action: {action}")),
}),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn look_tool_name() {
let tool = LookTool::new(RobotConfig::default());
assert_eq!(tool.name(), "look");
}
#[test]
fn look_tool_schema() {
let tool = LookTool::new(RobotConfig::default());
let schema = tool.parameters_schema();
assert!(schema["properties"]["action"].is_object());
}
}

View file

@ -0,0 +1,532 @@
//! Safety System - Collision avoidance, watchdogs, and emergency stops
//!
//! This module runs INDEPENDENTLY of the AI brain to ensure safety
//! even if the LLM makes bad decisions or hangs.
//!
//! ## Safety Layers
//!
//! 1. **Pre-move checks** - Verify path clear before any movement
//! 2. **Active monitoring** - Continuous sensor polling during movement
//! 3. **Reactive stops** - Instant halt on obstacle detection
//! 4. **Watchdog timer** - Auto-stop if no commands for N seconds
//! 5. **Hardware E-stop** - Physical button overrides everything
//!
//! ## Design Philosophy
//!
//! The AI can REQUEST movement, but the safety system ALLOWS it.
//! Safety always wins.
use crate::config::{RobotConfig, SafetyConfig};
use crate::traits::ToolResult;
use anyhow::Result;
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
use std::sync::Arc;
use std::time::{Duration, Instant};
use tokio::sync::{broadcast, RwLock};
/// Safety events broadcast to all listeners
#[derive(Debug, Clone)]
pub enum SafetyEvent {
/// Obstacle detected, movement blocked
ObstacleDetected { distance: f64, angle: u16 },
/// Emergency stop triggered
EmergencyStop { reason: String },
/// Watchdog timeout - no activity
WatchdogTimeout,
/// Movement approved
MovementApproved,
/// Movement denied with reason
MovementDenied { reason: String },
/// Bump sensor triggered
BumpDetected { sensor: String },
/// System recovered, ready to move again
Recovered,
}
/// Real-time safety state
pub struct SafetyState {
/// Is it safe to move?
pub can_move: AtomicBool,
/// Emergency stop active?
pub estop_active: AtomicBool,
/// Last movement command timestamp (ms since epoch)
pub last_command_ms: AtomicU64,
/// Current minimum distance to obstacle
pub min_obstacle_distance: RwLock<f64>,
/// Reason movement is blocked (if any)
pub block_reason: RwLock<Option<String>>,
/// Speed multiplier based on proximity (0.0 - 1.0)
pub speed_limit: RwLock<f64>,
}
impl Default for SafetyState {
fn default() -> Self {
Self {
can_move: AtomicBool::new(true),
estop_active: AtomicBool::new(false),
last_command_ms: AtomicU64::new(0),
min_obstacle_distance: RwLock::new(999.0),
block_reason: RwLock::new(None),
speed_limit: RwLock::new(1.0),
}
}
}
/// Safety monitor - runs as background task
pub struct SafetyMonitor {
config: SafetyConfig,
state: Arc<SafetyState>,
event_tx: broadcast::Sender<SafetyEvent>,
shutdown: AtomicBool,
}
impl SafetyMonitor {
pub fn new(config: SafetyConfig) -> (Self, broadcast::Receiver<SafetyEvent>) {
let (event_tx, event_rx) = broadcast::channel(64);
let monitor = Self {
config,
state: Arc::new(SafetyState::default()),
event_tx,
shutdown: AtomicBool::new(false),
};
(monitor, event_rx)
}
pub fn state(&self) -> Arc<SafetyState> {
self.state.clone()
}
pub fn subscribe(&self) -> broadcast::Receiver<SafetyEvent> {
self.event_tx.subscribe()
}
/// Check if movement is currently allowed
pub async fn can_move(&self) -> bool {
if self.state.estop_active.load(Ordering::SeqCst) {
return false;
}
self.state.can_move.load(Ordering::SeqCst)
}
/// Get current speed limit multiplier (0.0 - 1.0)
pub async fn speed_limit(&self) -> f64 {
*self.state.speed_limit.read().await
}
/// Request permission to move - returns allowed speed multiplier or error
pub async fn request_movement(&self, direction: &str, distance: f64) -> Result<f64, String> {
// Check E-stop
if self.state.estop_active.load(Ordering::SeqCst) {
return Err("Emergency stop active".to_string());
}
// Check general movement permission
if !self.state.can_move.load(Ordering::SeqCst) {
let reason = self.state.block_reason.read().await;
return Err(reason.clone().unwrap_or_else(|| "Movement blocked".to_string()));
}
// Check obstacle distance in movement direction
let min_dist = *self.state.min_obstacle_distance.read().await;
if min_dist < self.config.min_obstacle_distance {
let msg = format!(
"Obstacle too close: {:.2}m (min: {:.2}m)",
min_dist, self.config.min_obstacle_distance
);
let _ = self.event_tx.send(SafetyEvent::MovementDenied { reason: msg.clone() });
return Err(msg);
}
// Check if requested distance would hit obstacle
if distance > min_dist - self.config.min_obstacle_distance {
let safe_distance = (min_dist - self.config.min_obstacle_distance).max(0.0);
if safe_distance < 0.1 {
return Err(format!("Cannot move {}: obstacle at {:.2}m", direction, min_dist));
}
// Allow reduced distance
tracing::warn!(
"Reducing {} distance from {:.2}m to {:.2}m due to obstacle",
direction, distance, safe_distance
);
}
// Update last command time
let now_ms = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_millis() as u64;
self.state.last_command_ms.store(now_ms, Ordering::SeqCst);
// Calculate speed limit based on proximity
let speed_mult = self.calculate_speed_limit(min_dist).await;
let _ = self.event_tx.send(SafetyEvent::MovementApproved);
Ok(speed_mult)
}
/// Calculate safe speed based on obstacle proximity
async fn calculate_speed_limit(&self, obstacle_distance: f64) -> f64 {
let min_dist = self.config.min_obstacle_distance;
let slow_zone = min_dist * 3.0; // Start slowing at 3x minimum distance
let limit = if obstacle_distance >= slow_zone {
1.0 // Full speed
} else if obstacle_distance <= min_dist {
0.0 // Stop
} else {
// Linear interpolation between stop and full speed
(obstacle_distance - min_dist) / (slow_zone - min_dist)
};
*self.state.speed_limit.write().await = limit;
limit
}
/// Trigger emergency stop
pub async fn emergency_stop(&self, reason: &str) {
tracing::error!("EMERGENCY STOP: {}", reason);
self.state.estop_active.store(true, Ordering::SeqCst);
self.state.can_move.store(false, Ordering::SeqCst);
*self.state.block_reason.write().await = Some(reason.to_string());
let _ = self.event_tx.send(SafetyEvent::EmergencyStop {
reason: reason.to_string(),
});
}
/// Reset emergency stop (requires explicit action)
pub async fn reset_estop(&self) {
tracing::info!("E-STOP RESET");
self.state.estop_active.store(false, Ordering::SeqCst);
self.state.can_move.store(true, Ordering::SeqCst);
*self.state.block_reason.write().await = None;
let _ = self.event_tx.send(SafetyEvent::Recovered);
}
/// Update obstacle distance (call from sensor loop)
pub async fn update_obstacle_distance(&self, distance: f64, angle: u16) {
// Update minimum distance tracking
{
let mut min_dist = self.state.min_obstacle_distance.write().await;
// Always update to current reading (not just if closer)
*min_dist = distance;
}
// Recalculate speed limit based on new distance
self.calculate_speed_limit(distance).await;
// Check if too close
if distance < self.config.min_obstacle_distance {
self.state.can_move.store(false, Ordering::SeqCst);
*self.state.block_reason.write().await = Some(format!(
"Obstacle at {:.2}m ({}°)",
distance, angle
));
let _ = self.event_tx.send(SafetyEvent::ObstacleDetected { distance, angle });
} else if !self.state.estop_active.load(Ordering::SeqCst) {
// Clear block if obstacle moved away and no E-stop
self.state.can_move.store(true, Ordering::SeqCst);
*self.state.block_reason.write().await = None;
}
}
/// Report bump sensor triggered
pub async fn bump_detected(&self, sensor: &str) {
tracing::warn!("BUMP DETECTED: {}", sensor);
// Immediate stop
self.state.can_move.store(false, Ordering::SeqCst);
*self.state.block_reason.write().await = Some(format!("Bump: {}", sensor));
let _ = self.event_tx.send(SafetyEvent::BumpDetected {
sensor: sensor.to_string(),
});
// Auto-recover after brief pause (robot should back up)
tokio::spawn({
let state = self.state.clone();
let event_tx = self.event_tx.clone();
async move {
tokio::time::sleep(Duration::from_secs(2)).await;
if !state.estop_active.load(Ordering::SeqCst) {
state.can_move.store(true, Ordering::SeqCst);
*state.block_reason.write().await = None;
let _ = event_tx.send(SafetyEvent::Recovered);
}
}
});
}
/// Shutdown the monitor
pub fn shutdown(&self) {
self.shutdown.store(true, Ordering::SeqCst);
}
/// Run the safety monitor loop (call in background task)
pub async fn run(&self, mut sensor_rx: tokio::sync::mpsc::Receiver<SensorReading>) {
let watchdog_timeout = Duration::from_secs(self.config.max_drive_duration);
let mut last_sensor_update = Instant::now();
while !self.shutdown.load(Ordering::SeqCst) {
tokio::select! {
// Process sensor readings
Some(reading) = sensor_rx.recv() => {
last_sensor_update = Instant::now();
match reading {
SensorReading::Lidar { distance, angle } => {
self.update_obstacle_distance(distance, angle).await;
}
SensorReading::Bump { sensor } => {
self.bump_detected(&sensor).await;
}
SensorReading::Estop { pressed } => {
if pressed {
self.emergency_stop("Hardware E-stop pressed").await;
}
}
}
}
// Watchdog check every second
_ = tokio::time::sleep(Duration::from_secs(1)) => {
// Check for sensor timeout
if last_sensor_update.elapsed() > Duration::from_secs(5) {
tracing::warn!("Sensor data stale - blocking movement");
self.state.can_move.store(false, Ordering::SeqCst);
*self.state.block_reason.write().await =
Some("Sensor data stale".to_string());
}
// Check watchdog (auto-stop if no commands)
let last_cmd_ms = self.state.last_command_ms.load(Ordering::SeqCst);
if last_cmd_ms > 0 {
let now_ms = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_millis() as u64;
let elapsed = Duration::from_millis(now_ms - last_cmd_ms);
if elapsed > watchdog_timeout {
tracing::info!("Watchdog timeout - no commands for {:?}", elapsed);
let _ = self.event_tx.send(SafetyEvent::WatchdogTimeout);
// Don't block movement, just notify
}
}
}
}
}
}
}
/// Sensor readings fed to safety monitor
#[derive(Debug, Clone)]
pub enum SensorReading {
Lidar { distance: f64, angle: u16 },
Bump { sensor: String },
Estop { pressed: bool },
}
/// Safety-aware drive wrapper
/// Wraps the drive tool to enforce safety limits
pub struct SafeDrive {
inner_drive: Arc<dyn crate::traits::Tool>,
safety: Arc<SafetyMonitor>,
}
impl SafeDrive {
pub fn new(
drive: Arc<dyn crate::traits::Tool>,
safety: Arc<SafetyMonitor>,
) -> Self {
Self {
inner_drive: drive,
safety,
}
}
}
#[async_trait::async_trait]
impl crate::traits::Tool for SafeDrive {
fn name(&self) -> &str {
"drive"
}
fn description(&self) -> &str {
"Move the robot (with safety limits enforced)"
}
fn parameters_schema(&self) -> serde_json::Value {
self.inner_drive.parameters_schema()
}
async fn execute(
&self,
args: serde_json::Value,
) -> Result<ToolResult> {
// ToolResult imported at top of file
let action = args["action"].as_str().unwrap_or("unknown");
let distance = args["distance"].as_f64().unwrap_or(0.5);
// Always allow stop
if action == "stop" {
return self.inner_drive.execute(args).await;
}
// Request permission from safety system
match self.safety.request_movement(action, distance).await {
Ok(speed_mult) => {
// Modify speed in args
let mut modified_args = args.clone();
let original_speed = args["speed"].as_f64().unwrap_or(0.5);
modified_args["speed"] = serde_json::json!(original_speed * speed_mult);
if speed_mult < 1.0 {
tracing::info!(
"Safety: Reducing speed to {:.0}% due to obstacle proximity",
speed_mult * 100.0
);
}
self.inner_drive.execute(modified_args).await
}
Err(reason) => {
Ok(ToolResult {
success: false,
output: String::new(),
error: Some(format!("Safety blocked movement: {}", reason)),
})
}
}
}
}
/// Pre-flight safety check before any operation
pub async fn preflight_check(config: &RobotConfig) -> Result<Vec<String>> {
let mut warnings = Vec::new();
// Check safety config
if config.safety.min_obstacle_distance < 0.1 {
warnings.push("WARNING: min_obstacle_distance < 0.1m is dangerously low".to_string());
}
if config.safety.max_drive_duration > 60 {
warnings.push("WARNING: max_drive_duration > 60s may allow runaway".to_string());
}
if config.drive.max_speed > 1.0 {
warnings.push("WARNING: max_speed > 1.0 m/s is very fast for indoor use".to_string());
}
if config.safety.estop_pin.is_none() {
warnings.push("WARNING: No E-stop pin configured. Recommend wiring a hardware stop button.".to_string());
}
// Check for sensor availability
if config.sensors.lidar_type == "mock" {
warnings.push("NOTICE: LIDAR in mock mode - no real obstacle detection".to_string());
}
Ok(warnings)
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn safety_state_defaults() {
let state = SafetyState::default();
assert!(state.can_move.load(Ordering::SeqCst));
assert!(!state.estop_active.load(Ordering::SeqCst));
}
#[tokio::test]
async fn safety_monitor_blocks_on_obstacle() {
let config = SafetyConfig::default();
let (monitor, _rx) = SafetyMonitor::new(config);
// Initially can move
assert!(monitor.can_move().await);
// Report close obstacle
monitor.update_obstacle_distance(0.2, 0).await;
// Now blocked
assert!(!monitor.can_move().await);
}
#[tokio::test]
async fn safety_monitor_estop() {
let config = SafetyConfig::default();
let (monitor, mut rx) = SafetyMonitor::new(config);
monitor.emergency_stop("test").await;
assert!(!monitor.can_move().await);
assert!(monitor.state.estop_active.load(Ordering::SeqCst));
// Check event was sent
let event = rx.try_recv().unwrap();
matches!(event, SafetyEvent::EmergencyStop { .. });
}
#[tokio::test]
async fn speed_limit_calculation() {
let config = SafetyConfig {
min_obstacle_distance: 0.3,
..Default::default()
};
let (monitor, _rx) = SafetyMonitor::new(config);
// Far obstacle = full speed
let speed = monitor.calculate_speed_limit(2.0).await;
assert!((speed - 1.0).abs() < 0.01);
// Close obstacle = reduced speed
let speed = monitor.calculate_speed_limit(0.5).await;
assert!(speed < 1.0);
assert!(speed > 0.0);
// At minimum = stop
let speed = monitor.calculate_speed_limit(0.3).await;
assert!((speed - 0.0).abs() < 0.01);
}
#[tokio::test]
async fn request_movement_blocked() {
let config = SafetyConfig {
min_obstacle_distance: 0.3,
..Default::default()
};
let (monitor, _rx) = SafetyMonitor::new(config);
// Set obstacle too close
monitor.update_obstacle_distance(0.2, 0).await;
// Movement should be denied
let result = monitor.request_movement("forward", 1.0).await;
assert!(result.is_err());
}
impl Default for SafetyConfig {
fn default() -> Self {
Self {
min_obstacle_distance: 0.3,
slow_zone_multiplier: 3.0,
approach_speed_limit: 0.3,
max_drive_duration: 30,
estop_pin: Some(4),
bump_sensor_pins: vec![5, 6],
bump_reverse_distance: 0.15,
confirm_movement: false,
predict_collisions: true,
sensor_timeout_secs: 5,
blind_mode_speed_limit: 0.2,
}
}
}
}

View file

@ -0,0 +1,411 @@
//! Sense Tool - LIDAR, motion sensors, ultrasonic distance
//!
//! Provides environmental awareness through various sensors.
//! Supports multiple backends: direct GPIO, ROS2 topics, or mock.
use crate::config::RobotConfig;
use crate::traits::{Tool, ToolResult};
use anyhow::Result;
use async_trait::async_trait;
use serde_json::{json, Value};
use std::sync::Arc;
use tokio::sync::Mutex;
/// LIDAR scan result
#[derive(Debug, Clone)]
pub struct LidarScan {
/// Distances in meters, 360 values (1 per degree)
pub ranges: Vec<f64>,
/// Minimum distance and its angle
pub nearest: (f64, u16),
/// Is path clear in forward direction (±30°)?
pub forward_clear: bool,
}
/// Motion detection result
#[derive(Debug, Clone)]
pub struct MotionResult {
pub detected: bool,
pub sensors_triggered: Vec<u8>,
}
pub struct SenseTool {
config: RobotConfig,
last_scan: Arc<Mutex<Option<LidarScan>>>,
}
impl SenseTool {
pub fn new(config: RobotConfig) -> Self {
Self {
config,
last_scan: Arc::new(Mutex::new(None)),
}
}
/// Read LIDAR scan
async fn scan_lidar(&self) -> Result<LidarScan> {
match self.config.sensors.lidar_type.as_str() {
"rplidar" => self.scan_rplidar().await,
"ros2" => self.scan_ros2().await,
_ => self.scan_mock().await,
}
}
/// Mock LIDAR for testing
async fn scan_mock(&self) -> Result<LidarScan> {
// Simulate a room with walls
let mut ranges = vec![3.0; 360];
// Wall in front at 2m
for range in &mut ranges[350..360] {
*range = 2.0;
}
for range in &mut ranges[0..10] {
*range = 2.0;
}
// Object on left at 1m
for range in &mut ranges[80..100] {
*range = 1.0;
}
let nearest = ranges
.iter()
.enumerate()
.min_by(|a, b| a.1.partial_cmp(b.1).unwrap())
.map(|(i, &d)| (d, i as u16))
.unwrap_or((999.0, 0));
let forward_clear = ranges[0..30].iter().chain(ranges[330..360].iter())
.all(|&d| d > self.config.safety.min_obstacle_distance);
Ok(LidarScan {
ranges,
nearest,
forward_clear,
})
}
/// Read from RPLidar via serial
async fn scan_rplidar(&self) -> Result<LidarScan> {
// In production, use rplidar_drv crate
// For now, shell out to rplidar_scan tool if available
let port = &self.config.sensors.lidar_port;
let output = tokio::process::Command::new("rplidar_scan")
.args(["--port", port, "--single"])
.output()
.await;
match output {
Ok(out) if out.status.success() => {
// Parse output (format: angle,distance per line)
let mut ranges = vec![999.0; 360];
for line in String::from_utf8_lossy(&out.stdout).lines() {
if let Some((angle, dist)) = line.split_once(',') {
if let (Ok(a), Ok(d)) = (angle.parse::<usize>(), dist.parse::<f64>()) {
if a < 360 {
ranges[a] = d;
}
}
}
}
let nearest = ranges
.iter()
.enumerate()
.min_by(|a, b| a.1.partial_cmp(b.1).unwrap())
.map(|(i, &d)| (d, i as u16))
.unwrap_or((999.0, 0));
let forward_clear = ranges[0..30].iter().chain(ranges[330..360].iter())
.all(|&d| d > self.config.safety.min_obstacle_distance);
Ok(LidarScan { ranges, nearest, forward_clear })
}
_ => {
// Fallback to mock if hardware unavailable
tracing::warn!("RPLidar unavailable, using mock data");
self.scan_mock().await
}
}
}
/// Read from ROS2 /scan topic
async fn scan_ros2(&self) -> Result<LidarScan> {
let output = tokio::process::Command::new("ros2")
.args(["topic", "echo", "--once", "/scan"])
.output()
.await?;
if !output.status.success() {
return self.scan_mock().await;
}
// Parse ROS2 LaserScan message (simplified)
let stdout = String::from_utf8_lossy(&output.stdout);
let ranges = vec![999.0; 360];
// Very simplified parsing - in production use rclrs
if let Some(_ranges_line) = stdout.lines().find(|l| l.contains("ranges:")) {
// Extract array values
// Format: ranges: [1.0, 2.0, ...]
}
let nearest = ranges
.iter()
.enumerate()
.min_by(|a, b| a.1.partial_cmp(b.1).unwrap())
.map(|(i, &d)| (d, i as u16))
.unwrap_or((999.0, 0));
let forward_clear = ranges[0..30].iter().chain(ranges[330..360].iter())
.all(|&d| d > self.config.safety.min_obstacle_distance);
Ok(LidarScan { ranges, nearest, forward_clear })
}
/// Check PIR motion sensors
async fn check_motion(&self) -> Result<MotionResult> {
let pins = &self.config.sensors.motion_pins;
// In production, use rppal GPIO
// For now, mock or read from sysfs
let mut triggered = Vec::new();
for &pin in pins {
let gpio_path = format!("/sys/class/gpio/gpio{}/value", pin);
match tokio::fs::read_to_string(&gpio_path).await {
Ok(value) if value.trim() == "1" => {
triggered.push(pin);
}
_ => {}
}
}
Ok(MotionResult {
detected: !triggered.is_empty(),
sensors_triggered: triggered,
})
}
/// Read ultrasonic distance sensor
async fn check_distance(&self) -> Result<f64> {
let Some((trigger, echo)) = self.config.sensors.ultrasonic_pins else {
return Ok(999.0); // No sensor configured
};
// In production, use rppal with precise timing
// Ultrasonic requires µs-level timing, so shell out to helper
let output = tokio::process::Command::new("hc-sr04")
.args([
"--trigger", &trigger.to_string(),
"--echo", &echo.to_string(),
])
.output()
.await;
match output {
Ok(out) if out.status.success() => {
let distance = String::from_utf8_lossy(&out.stdout)
.trim()
.parse::<f64>()
.unwrap_or(999.0);
Ok(distance)
}
_ => Ok(999.0), // Sensor unavailable
}
}
}
#[async_trait]
impl Tool for SenseTool {
fn name(&self) -> &str {
"sense"
}
fn description(&self) -> &str {
"Check robot sensors. Actions: 'scan' for LIDAR (360° obstacle map), \
'motion' for PIR motion detection, 'distance' for ultrasonic range, \
'all' for combined sensor report."
}
fn parameters_schema(&self) -> Value {
json!({
"type": "object",
"properties": {
"action": {
"type": "string",
"enum": ["scan", "motion", "distance", "all", "clear_ahead"],
"description": "Which sensor(s) to read"
},
"direction": {
"type": "string",
"enum": ["forward", "left", "right", "back", "all"],
"description": "For 'scan': which direction to report (default 'forward')"
}
},
"required": ["action"]
})
}
async fn execute(&self, args: Value) -> Result<ToolResult> {
let action = args["action"]
.as_str()
.ok_or_else(|| anyhow::anyhow!("Missing 'action' parameter"))?;
match action {
"scan" => {
let scan = self.scan_lidar().await?;
let direction = args["direction"].as_str().unwrap_or("forward");
let report = match direction {
"forward" => {
let fwd_dist = scan.ranges[0];
format!(
"Forward: {:.2}m {}. Nearest obstacle: {:.2}m at {}°",
fwd_dist,
if scan.forward_clear { "(clear)" } else { "(BLOCKED)" },
scan.nearest.0,
scan.nearest.1
)
}
"left" => {
let left_dist = scan.ranges[90];
format!("Left (90°): {:.2}m", left_dist)
}
"right" => {
let right_dist = scan.ranges[270];
format!("Right (270°): {:.2}m", right_dist)
}
"back" => {
let back_dist = scan.ranges[180];
format!("Back (180°): {:.2}m", back_dist)
}
"all" => {
format!(
"LIDAR 360° scan:\n\
- Forward (0°): {:.2}m\n\
- Left (90°): {:.2}m\n\
- Back (180°): {:.2}m\n\
- Right (270°): {:.2}m\n\
- Nearest: {:.2}m at {}°\n\
- Forward path: {}",
scan.ranges[0], scan.ranges[90], scan.ranges[180], scan.ranges[270],
scan.nearest.0, scan.nearest.1,
if scan.forward_clear { "CLEAR" } else { "BLOCKED" }
)
}
_ => "Unknown direction".to_string(),
};
// Cache scan
*self.last_scan.lock().await = Some(scan);
Ok(ToolResult {
success: true,
output: report,
error: None,
})
}
"motion" => {
let motion = self.check_motion().await?;
let output = if motion.detected {
format!("Motion DETECTED on sensors: {:?}", motion.sensors_triggered)
} else {
"No motion detected".to_string()
};
Ok(ToolResult {
success: true,
output,
error: None,
})
}
"distance" => {
let distance = self.check_distance().await?;
let output = if distance < 999.0 {
format!("Ultrasonic distance: {:.2}m", distance)
} else {
"Ultrasonic sensor not available or out of range".to_string()
};
Ok(ToolResult {
success: true,
output,
error: None,
})
}
"clear_ahead" => {
let scan = self.scan_lidar().await?;
Ok(ToolResult {
success: true,
output: if scan.forward_clear {
format!("Path ahead is CLEAR (nearest obstacle: {:.2}m)", scan.nearest.0)
} else {
format!("Path ahead is BLOCKED (obstacle at {:.2}m)", scan.ranges[0])
},
error: None,
})
}
"all" => {
let scan = self.scan_lidar().await?;
let motion = self.check_motion().await?;
let distance = self.check_distance().await?;
let report = format!(
"=== SENSOR REPORT ===\n\
LIDAR: nearest {:.2}m at {}°, forward {}\n\
Motion: {}\n\
Ultrasonic: {:.2}m",
scan.nearest.0, scan.nearest.1,
if scan.forward_clear { "CLEAR" } else { "BLOCKED" },
if motion.detected { format!("DETECTED ({:?})", motion.sensors_triggered) } else { "none".to_string() },
distance
);
Ok(ToolResult {
success: true,
output: report,
error: None,
})
}
_ => Ok(ToolResult {
success: false,
output: String::new(),
error: Some(format!("Unknown action: {action}")),
}),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn sense_tool_name() {
let tool = SenseTool::new(RobotConfig::default());
assert_eq!(tool.name(), "sense");
}
#[tokio::test]
async fn sense_scan_mock() {
let tool = SenseTool::new(RobotConfig::default());
let result = tool.execute(json!({"action": "scan", "direction": "all"})).await.unwrap();
assert!(result.success);
assert!(result.output.contains("Forward"));
}
#[tokio::test]
async fn sense_clear_ahead() {
let tool = SenseTool::new(RobotConfig::default());
let result = tool.execute(json!({"action": "clear_ahead"})).await.unwrap();
assert!(result.success);
}
}

View file

@ -0,0 +1,229 @@
//! Speak Tool - Text-to-speech via Piper
//!
//! Converts text to speech using Piper TTS (fast, offline, runs on Pi).
//! Plays audio through the speaker.
use crate::config::RobotConfig;
use crate::traits::{Tool, ToolResult};
use anyhow::Result;
use async_trait::async_trait;
use serde_json::{json, Value};
use std::path::PathBuf;
pub struct SpeakTool {
config: RobotConfig,
audio_dir: PathBuf,
}
impl SpeakTool {
pub fn new(config: RobotConfig) -> Self {
let audio_dir = directories::UserDirs::new()
.map(|d| d.home_dir().join(".zeroclaw/tts_cache"))
.unwrap_or_else(|| PathBuf::from("/tmp/zeroclaw_tts"));
let _ = std::fs::create_dir_all(&audio_dir);
Self { config, audio_dir }
}
/// Generate speech using Piper and play it
async fn speak(&self, text: &str, emotion: &str) -> Result<()> {
let piper_path = &self.config.audio.piper_path;
let voice = &self.config.audio.piper_voice;
let speaker_device = &self.config.audio.speaker_device;
// Model path
let model_path = directories::UserDirs::new()
.map(|d| d.home_dir().join(format!(".zeroclaw/models/piper/{}.onnx", voice)))
.unwrap_or_else(|| PathBuf::from(format!("/usr/local/share/piper/{}.onnx", voice)));
// Adjust text based on emotion (simple SSML-like modifications)
let processed_text = match emotion {
"excited" => format!("{}!", text.trim_end_matches('.')),
"sad" => text.to_string(), // Piper doesn't support prosody, but we keep the hook
"whisper" => text.to_string(),
_ => text.to_string(),
};
// Generate WAV file
let output_path = self.audio_dir.join("speech.wav");
// Pipe text to piper, output to WAV
let mut piper = tokio::process::Command::new(piper_path)
.args([
"--model", model_path.to_str().unwrap(),
"--output_file", output_path.to_str().unwrap(),
])
.stdin(std::process::Stdio::piped())
.spawn()?;
// Write text to stdin
if let Some(mut stdin) = piper.stdin.take() {
use tokio::io::AsyncWriteExt;
stdin.write_all(processed_text.as_bytes()).await?;
}
let status = piper.wait().await?;
if !status.success() {
anyhow::bail!("Piper TTS failed");
}
// Play audio using aplay
let play_result = tokio::process::Command::new("aplay")
.args([
"-D", speaker_device,
output_path.to_str().unwrap(),
])
.output()
.await?;
if !play_result.status.success() {
// Fallback: try paplay (PulseAudio)
let fallback = tokio::process::Command::new("paplay")
.arg(output_path.to_str().unwrap())
.output()
.await?;
if !fallback.status.success() {
anyhow::bail!(
"Audio playback failed. Tried aplay and paplay.\n{}",
String::from_utf8_lossy(&play_result.stderr)
);
}
}
Ok(())
}
/// Play a sound effect
async fn play_sound(&self, sound: &str) -> Result<()> {
let sounds_dir = directories::UserDirs::new()
.map(|d| d.home_dir().join(".zeroclaw/sounds"))
.unwrap_or_else(|| PathBuf::from("/usr/local/share/zeroclaw/sounds"));
let sound_file = sounds_dir.join(format!("{}.wav", sound));
if !sound_file.exists() {
anyhow::bail!("Sound file not found: {}", sound_file.display());
}
let speaker_device = &self.config.audio.speaker_device;
let output = tokio::process::Command::new("aplay")
.args(["-D", speaker_device, sound_file.to_str().unwrap()])
.output()
.await?;
if !output.status.success() {
anyhow::bail!("Sound playback failed");
}
Ok(())
}
}
#[async_trait]
impl Tool for SpeakTool {
fn name(&self) -> &str {
"speak"
}
fn description(&self) -> &str {
"Speak text out loud using text-to-speech. The robot will say the given text \
through its speaker. Can also play sound effects like 'beep', 'chime', 'laugh'."
}
fn parameters_schema(&self) -> Value {
json!({
"type": "object",
"properties": {
"text": {
"type": "string",
"description": "The text to speak out loud"
},
"emotion": {
"type": "string",
"enum": ["neutral", "excited", "sad", "whisper"],
"description": "Emotional tone. Default 'neutral'."
},
"sound": {
"type": "string",
"description": "Play a sound effect instead of speaking (e.g., 'beep', 'chime', 'laugh', 'alert')"
}
}
})
}
async fn execute(&self, args: Value) -> Result<ToolResult> {
// Check if playing a sound effect
if let Some(sound) = args["sound"].as_str() {
return match self.play_sound(sound).await {
Ok(()) => Ok(ToolResult {
success: true,
output: format!("Played sound: {}", sound),
error: None,
}),
Err(e) => Ok(ToolResult {
success: false,
output: String::new(),
error: Some(format!("Sound playback failed: {e}")),
}),
};
}
// Speak text
let text = args["text"]
.as_str()
.ok_or_else(|| anyhow::anyhow!("Missing 'text' parameter (or use 'sound' for effects)"))?;
if text.is_empty() {
return Ok(ToolResult {
success: false,
output: String::new(),
error: Some("Cannot speak empty text".to_string()),
});
}
// Limit text length for safety
if text.len() > 1000 {
return Ok(ToolResult {
success: false,
output: String::new(),
error: Some("Text too long (max 1000 characters)".to_string()),
});
}
let emotion = args["emotion"].as_str().unwrap_or("neutral");
match self.speak(text, emotion).await {
Ok(()) => Ok(ToolResult {
success: true,
output: format!("Said: \"{}\"", text),
error: None,
}),
Err(e) => Ok(ToolResult {
success: false,
output: String::new(),
error: Some(format!("Speech failed: {e}")),
}),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn speak_tool_name() {
let tool = SpeakTool::new(RobotConfig::default());
assert_eq!(tool.name(), "speak");
}
#[test]
fn speak_tool_schema() {
let tool = SpeakTool::new(RobotConfig::default());
let schema = tool.parameters_schema();
assert!(schema["properties"]["text"].is_object());
assert!(schema["properties"]["emotion"].is_object());
}
}

View file

@ -0,0 +1,536 @@
//! Integration tests for robot kit
//!
//! These tests verify the robot kit works correctly in various configurations:
//! - Mock mode (no hardware) - for CI/development
//! - Hardware simulation - for testing real scenarios
//! - Live hardware - for on-device validation
#[cfg(test)]
mod unit_tests {
use crate::config::RobotConfig;
use crate::traits::{Tool, ToolResult};
use crate::{DriveTool, EmoteTool, ListenTool, LookTool, SenseTool, SpeakTool};
use serde_json::json;
// =========================================================================
// TOOL TRAIT COMPLIANCE
// =========================================================================
#[test]
fn all_tools_have_valid_names() {
let config = RobotConfig::default();
let tools: Vec<Box<dyn Tool>> = vec![
Box::new(DriveTool::new(config.clone())),
Box::new(LookTool::new(config.clone())),
Box::new(ListenTool::new(config.clone())),
Box::new(SpeakTool::new(config.clone())),
Box::new(SenseTool::new(config.clone())),
Box::new(EmoteTool::new(config.clone())),
];
for tool in &tools {
assert!(!tool.name().is_empty(), "Tool name should not be empty");
assert!(
tool.name().chars().all(|c| c.is_alphanumeric() || c == '_'),
"Tool name '{}' should be alphanumeric",
tool.name()
);
}
}
#[test]
fn all_tools_have_descriptions() {
let config = RobotConfig::default();
let tools: Vec<Box<dyn Tool>> = vec![
Box::new(DriveTool::new(config.clone())),
Box::new(LookTool::new(config.clone())),
Box::new(ListenTool::new(config.clone())),
Box::new(SpeakTool::new(config.clone())),
Box::new(SenseTool::new(config.clone())),
Box::new(EmoteTool::new(config.clone())),
];
for tool in &tools {
assert!(
tool.description().len() > 10,
"Tool '{}' needs a meaningful description",
tool.name()
);
}
}
#[test]
fn all_tools_have_valid_schemas() {
let config = RobotConfig::default();
let tools: Vec<Box<dyn Tool>> = vec![
Box::new(DriveTool::new(config.clone())),
Box::new(LookTool::new(config.clone())),
Box::new(ListenTool::new(config.clone())),
Box::new(SpeakTool::new(config.clone())),
Box::new(SenseTool::new(config.clone())),
Box::new(EmoteTool::new(config.clone())),
];
for tool in &tools {
let schema = tool.parameters_schema();
assert!(
schema.is_object(),
"Tool '{}' schema should be an object",
tool.name()
);
assert!(
schema.get("type").is_some(),
"Tool '{}' schema should have 'type' field",
tool.name()
);
}
}
// =========================================================================
// DRIVE TOOL TESTS
// =========================================================================
#[tokio::test]
async fn drive_forward_mock() {
let config = RobotConfig::default();
let tool = DriveTool::new(config);
let result = tool
.execute(json!({"action": "forward", "distance": 1.0}))
.await
.unwrap();
assert!(result.success);
assert!(result.output.contains("forward"));
}
#[tokio::test]
async fn drive_stop_always_succeeds() {
let config = RobotConfig::default();
let tool = DriveTool::new(config);
let result = tool.execute(json!({"action": "stop"})).await.unwrap();
assert!(result.success);
assert!(result.output.to_lowercase().contains("stop"));
}
#[tokio::test]
async fn drive_strafe_left() {
let config = RobotConfig::default();
let tool = DriveTool::new(config);
let result = tool
.execute(json!({"action": "left", "distance": 0.5}))
.await
.unwrap();
assert!(result.success);
}
#[tokio::test]
async fn drive_rotate() {
let config = RobotConfig::default();
let tool = DriveTool::new(config);
let result = tool
.execute(json!({"action": "rotate_left", "distance": 90.0}))
.await
.unwrap();
assert!(result.success);
}
#[tokio::test]
async fn drive_invalid_action_fails() {
let config = RobotConfig::default();
let tool = DriveTool::new(config);
let result = tool.execute(json!({"action": "fly"})).await.unwrap();
assert!(!result.success);
assert!(result.error.is_some());
}
#[tokio::test]
async fn drive_missing_action_fails() {
let config = RobotConfig::default();
let tool = DriveTool::new(config);
let result = tool.execute(json!({})).await;
assert!(result.is_err());
}
#[tokio::test]
async fn drive_speed_clamped() {
let config = RobotConfig::default();
let tool = DriveTool::new(config);
// Speed > 1.0 should be clamped
let result = tool
.execute(json!({"action": "forward", "speed": 5.0}))
.await
.unwrap();
assert!(result.success);
}
// =========================================================================
// SENSE TOOL TESTS
// =========================================================================
#[tokio::test]
async fn sense_scan_returns_distances() {
let config = RobotConfig::default();
let tool = SenseTool::new(config);
let result = tool
.execute(json!({"action": "scan", "direction": "all"}))
.await
.unwrap();
assert!(result.success);
assert!(result.output.contains("Forward"));
assert!(result.output.contains("Left"));
assert!(result.output.contains("Right"));
}
#[tokio::test]
async fn sense_clear_ahead_check() {
let config = RobotConfig::default();
let tool = SenseTool::new(config);
let result = tool
.execute(json!({"action": "clear_ahead"}))
.await
.unwrap();
assert!(result.success);
// Mock should report clear or blocked
assert!(
result.output.contains("CLEAR") || result.output.contains("BLOCKED")
);
}
#[tokio::test]
async fn sense_motion_detection() {
let config = RobotConfig::default();
let tool = SenseTool::new(config);
let result = tool.execute(json!({"action": "motion"})).await.unwrap();
assert!(result.success);
}
// =========================================================================
// EMOTE TOOL TESTS
// =========================================================================
#[tokio::test]
async fn emote_happy() {
let config = RobotConfig::default();
let tool = EmoteTool::new(config);
let result = tool
.execute(json!({"expression": "happy", "duration": 0}))
.await
.unwrap();
assert!(result.success);
}
#[tokio::test]
async fn emote_all_expressions_valid() {
let config = RobotConfig::default();
let tool = EmoteTool::new(config);
let expressions = [
"happy", "sad", "surprised", "thinking", "sleepy", "excited", "love", "angry",
"confused", "wink",
];
for expr in expressions {
let result = tool
.execute(json!({"expression": expr, "duration": 0}))
.await
.unwrap();
assert!(result.success, "Expression '{}' should succeed", expr);
}
}
#[tokio::test]
async fn emote_invalid_expression_fails() {
let config = RobotConfig::default();
let tool = EmoteTool::new(config);
let result = tool
.execute(json!({"expression": "nonexistent"}))
.await;
assert!(result.is_err());
}
// =========================================================================
// CONFIG TESTS
// =========================================================================
#[test]
fn config_default_is_safe() {
let config = RobotConfig::default();
// Safety defaults should be conservative
assert!(config.safety.min_obstacle_distance >= 0.2);
assert!(config.safety.max_drive_duration <= 60);
assert!(config.drive.max_speed <= 1.0);
assert!(config.safety.blind_mode_speed_limit <= 0.3);
}
#[test]
fn config_serializes_to_toml() {
let config = RobotConfig::default();
let toml = toml::to_string(&config);
assert!(toml.is_ok());
}
#[test]
fn config_roundtrips() {
let config = RobotConfig::default();
let toml = toml::to_string(&config).unwrap();
let parsed: RobotConfig = toml::from_str(&toml).unwrap();
assert_eq!(config.drive.max_speed, parsed.drive.max_speed);
assert_eq!(
config.safety.min_obstacle_distance,
parsed.safety.min_obstacle_distance
);
}
}
#[cfg(test)]
#[cfg(feature = "safety")]
mod safety_tests {
use crate::config::SafetyConfig;
use crate::safety::{SafetyEvent, SafetyMonitor, SensorReading};
use std::sync::atomic::Ordering;
fn test_safety_config() -> SafetyConfig {
SafetyConfig {
min_obstacle_distance: 0.3,
slow_zone_multiplier: 3.0,
approach_speed_limit: 0.3,
max_drive_duration: 30,
estop_pin: None,
bump_sensor_pins: vec![],
bump_reverse_distance: 0.15,
confirm_movement: false,
predict_collisions: true,
sensor_timeout_secs: 5,
blind_mode_speed_limit: 0.2,
}
}
#[tokio::test]
async fn safety_initially_allows_movement() {
let config = test_safety_config();
let (monitor, _rx) = SafetyMonitor::new(config);
assert!(monitor.can_move().await);
}
#[tokio::test]
async fn safety_blocks_on_close_obstacle() {
let config = test_safety_config();
let (monitor, _rx) = SafetyMonitor::new(config);
// Report obstacle at 0.2m (below 0.3m threshold)
monitor.update_obstacle_distance(0.2, 0).await;
assert!(!monitor.can_move().await);
}
#[tokio::test]
async fn safety_allows_after_obstacle_clears() {
let config = test_safety_config();
let (monitor, _rx) = SafetyMonitor::new(config);
// Block
monitor.update_obstacle_distance(0.2, 0).await;
assert!(!monitor.can_move().await);
// Clear
monitor.update_obstacle_distance(1.0, 0).await;
assert!(monitor.can_move().await);
}
#[tokio::test]
async fn safety_estop_blocks_everything() {
let config = test_safety_config();
let (monitor, mut rx) = SafetyMonitor::new(config);
monitor.emergency_stop("test").await;
assert!(!monitor.can_move().await);
assert!(monitor.state().estop_active.load(Ordering::SeqCst));
// Check event was broadcast
let event = rx.try_recv().unwrap();
assert!(matches!(event, SafetyEvent::EmergencyStop { .. }));
}
#[tokio::test]
async fn safety_estop_reset() {
let config = test_safety_config();
let (monitor, _rx) = SafetyMonitor::new(config);
monitor.emergency_stop("test").await;
assert!(!monitor.can_move().await);
monitor.reset_estop().await;
assert!(monitor.can_move().await);
}
#[tokio::test]
async fn safety_speed_limit_far() {
let config = test_safety_config();
let (monitor, _rx) = SafetyMonitor::new(config);
// Far obstacle = full speed
monitor.update_obstacle_distance(2.0, 0).await;
let limit = monitor.speed_limit().await;
assert!((limit - 1.0).abs() < 0.01);
}
#[tokio::test]
async fn safety_speed_limit_approaching() {
let config = test_safety_config();
let (monitor, _rx) = SafetyMonitor::new(config);
// In slow zone (0.3 * 3.0 = 0.9m)
monitor.update_obstacle_distance(0.5, 0).await;
let limit = monitor.speed_limit().await;
assert!(limit < 1.0);
assert!(limit > 0.0);
}
#[tokio::test]
async fn safety_movement_request_approved() {
let config = test_safety_config();
let (monitor, _rx) = SafetyMonitor::new(config);
// Far obstacle
monitor.update_obstacle_distance(2.0, 0).await;
let result = monitor.request_movement("forward", 1.0).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn safety_movement_request_denied_close() {
let config = test_safety_config();
let (monitor, _rx) = SafetyMonitor::new(config);
// Close obstacle
monitor.update_obstacle_distance(0.2, 0).await;
let result = monitor.request_movement("forward", 1.0).await;
assert!(result.is_err());
}
#[tokio::test]
async fn safety_bump_triggers_stop() {
let config = test_safety_config();
let (monitor, mut rx) = SafetyMonitor::new(config);
monitor.bump_detected("front_left").await;
assert!(!monitor.can_move().await);
let event = rx.try_recv().unwrap();
assert!(matches!(event, SafetyEvent::BumpDetected { .. }));
}
}
#[cfg(test)]
mod integration_tests {
use crate::config::RobotConfig;
use crate::traits::Tool;
use crate::{create_tools, DriveTool, SenseTool};
use serde_json::json;
#[tokio::test]
async fn drive_then_sense_workflow() {
let config = RobotConfig::default();
let drive = DriveTool::new(config.clone());
let sense = SenseTool::new(config);
// Check ahead
let scan = sense
.execute(json!({"action": "clear_ahead"}))
.await
.unwrap();
assert!(scan.success);
// Move if clear
if scan.output.contains("CLEAR") {
let drive_result = drive
.execute(json!({"action": "forward", "distance": 0.5}))
.await
.unwrap();
assert!(drive_result.success);
// Wait for rate limiter (drive tool has 1 second cooldown)
tokio::time::sleep(std::time::Duration::from_millis(1100)).await;
}
// Stop
let stop = drive.execute(json!({"action": "stop"})).await.unwrap();
assert!(stop.success);
}
#[tokio::test]
async fn create_tools_returns_all_tools() {
let config = RobotConfig::default();
let tools = create_tools(&config);
assert_eq!(tools.len(), 6);
let names: Vec<&str> = tools.iter().map(|t| t.name()).collect();
assert!(names.contains(&"drive"));
assert!(names.contains(&"look"));
assert!(names.contains(&"listen"));
assert!(names.contains(&"speak"));
assert!(names.contains(&"sense"));
assert!(names.contains(&"emote"));
}
#[cfg(feature = "safety")]
#[tokio::test]
async fn safe_drive_blocks_on_obstacle() {
use crate::safety::SafetyMonitor;
use crate::SafeDrive;
use std::sync::Arc;
let config = RobotConfig::default();
let (safety_monitor, _rx) = SafetyMonitor::new(config.safety.clone());
let safety = Arc::new(safety_monitor);
// Report close obstacle
safety.update_obstacle_distance(0.2, 0).await;
let drive = Arc::new(DriveTool::new(config));
let safe_drive = SafeDrive::new(drive, safety);
let result = safe_drive
.execute(json!({"action": "forward", "distance": 1.0}))
.await
.unwrap();
assert!(!result.success);
assert!(result.error.unwrap().contains("Safety"));
}
}

View file

@ -0,0 +1,123 @@
//! Tool trait definition
//!
//! This defines the interface that all robot tools implement.
//! It is compatible with ZeroClaw's Tool trait but standalone.
use async_trait::async_trait;
use serde::{Deserialize, Serialize};
use serde_json::Value;
/// Result of a tool execution
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ToolResult {
/// Whether the tool executed successfully
pub success: bool,
/// Output from the tool (human-readable)
pub output: String,
/// Error message if failed
pub error: Option<String>,
}
impl ToolResult {
/// Create a successful result
pub fn success(output: impl Into<String>) -> Self {
Self {
success: true,
output: output.into(),
error: None,
}
}
/// Create a failed result
pub fn error(error: impl Into<String>) -> Self {
Self {
success: false,
output: String::new(),
error: Some(error.into()),
}
}
/// Create a failed result with partial output
pub fn partial(output: impl Into<String>, error: impl Into<String>) -> Self {
Self {
success: false,
output: output.into(),
error: Some(error.into()),
}
}
}
/// Description of a tool for LLM function calling
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ToolSpec {
/// Tool name (used in function calls)
pub name: String,
/// Human-readable description
pub description: String,
/// JSON Schema for parameters
pub parameters: Value,
}
/// Core tool trait
///
/// Implement this trait to create a new tool that can be used
/// by an AI agent to interact with the robot hardware.
///
/// # Example
///
/// ```rust,ignore
/// use zeroclaw_robot_kit::{Tool, ToolResult};
/// use async_trait::async_trait;
/// use serde_json::{json, Value};
///
/// pub struct BeepTool;
///
/// #[async_trait]
/// impl Tool for BeepTool {
/// fn name(&self) -> &str { "beep" }
///
/// fn description(&self) -> &str { "Make a beep sound" }
///
/// fn parameters_schema(&self) -> Value {
/// json!({
/// "type": "object",
/// "properties": {
/// "frequency": { "type": "number", "description": "Hz" }
/// }
/// })
/// }
///
/// async fn execute(&self, args: Value) -> anyhow::Result<ToolResult> {
/// let freq = args["frequency"].as_f64().unwrap_or(440.0);
/// // Play beep...
/// Ok(ToolResult::success(format!("Beeped at {}Hz", freq)))
/// }
/// }
/// ```
#[async_trait]
pub trait Tool: Send + Sync {
/// Tool name (used in LLM function calling)
fn name(&self) -> &str;
/// Human-readable description of what this tool does
fn description(&self) -> &str;
/// JSON Schema describing the tool's parameters
///
/// This is used by the LLM to understand how to call the tool.
fn parameters_schema(&self) -> Value;
/// Execute the tool with the given arguments
///
/// Arguments are passed as JSON matching the parameters_schema.
async fn execute(&self, args: Value) -> anyhow::Result<ToolResult>;
/// Get the full specification for LLM registration
fn spec(&self) -> ToolSpec {
ToolSpec {
name: self.name().to_string(),
description: self.description().to_string(),
parameters: self.parameters_schema(),
}
}
}