diff --git a/README.md b/README.md index bb46349..2fc9ffb 100644 --- a/README.md +++ b/README.md @@ -186,6 +186,9 @@ zeroclaw channel bind-telegram 123456789 # Get integration setup details zeroclaw integrations info Telegram +# Note: Channels (Telegram, Discord, Slack) require daemon to be running +# zeroclaw daemon + # Manage background service zeroclaw service install zeroclaw service status @@ -431,6 +434,12 @@ default_provider = "openrouter" default_model = "anthropic/claude-sonnet-4-20250514" default_temperature = 0.7 +# Custom OpenAI-compatible endpoint +# default_provider = "custom:https://your-api.com" + +# Custom Anthropic-compatible endpoint +# default_provider = "anthropic-custom:https://your-api.com" + [memory] backend = "sqlite" # "sqlite", "lucid", "markdown", "none" auto_save = true @@ -531,6 +540,10 @@ api_url = "https://ollama.com" api_key = "ollama_api_key_here" ``` +### Custom Provider Endpoints + +For detailed configuration of custom OpenAI-compatible and Anthropic-compatible endpoints, see [docs/custom-providers.md](docs/custom-providers.md). + ## Python Companion Package (`zeroclaw-tools`) For LLM providers with inconsistent native tool calling (e.g., GLM-5/Zhipu), ZeroClaw ships a Python companion package with **LangGraph-based tool calling** for guaranteed consistency: diff --git a/docs/custom-providers.md b/docs/custom-providers.md new file mode 100644 index 0000000..c0a8809 --- /dev/null +++ b/docs/custom-providers.md @@ -0,0 +1,99 @@ +# Custom Provider Configuration + +ZeroClaw supports custom API endpoints for both OpenAI-compatible and Anthropic-compatible providers. + +## Provider Types + +### OpenAI-Compatible Endpoints (`custom:`) + +For services that implement the OpenAI API format: + +```toml +default_provider = "custom:https://your-api.com" +api_key = "your-api-key" +default_model = "your-model-name" +``` + +### Anthropic-Compatible Endpoints (`anthropic-custom:`) + +For services that implement the Anthropic API format: + +```toml +default_provider = "anthropic-custom:https://your-api.com" +api_key = "your-api-key" +default_model = "your-model-name" +``` + +## Configuration Methods + +### Config File + +Edit `~/.zeroclaw/config.toml`: + +```toml +api_key = "your-api-key" +default_provider = "anthropic-custom:https://api.example.com" +default_model = "claude-sonnet-4" +``` + +### Environment Variables + +```bash +export ANTHROPIC_API_KEY="your-api-key" +zeroclaw agent +``` + +## Testing Configuration + +Verify your custom endpoint: + +```bash +# Interactive mode +zeroclaw agent + +# Single message test +zeroclaw agent -m "test message" +``` + +## Troubleshooting + +### Authentication Errors + +- Verify API key is correct +- Check endpoint URL format (must include `https://`) +- Ensure endpoint is accessible from your network + +### Model Not Found + +- Confirm model name matches provider's available models +- Check provider documentation for exact model identifiers + +### Connection Issues + +- Test endpoint accessibility: `curl -I https://your-api.com` +- Verify firewall/proxy settings +- Check provider status page + +## Examples + +### Local LLM Server + +```toml +default_provider = "custom:http://localhost:8080" +default_model = "local-model" +``` + +### Corporate Proxy + +```toml +default_provider = "anthropic-custom:https://llm-proxy.corp.example.com" +api_key = "internal-token" +``` + +### Cloud Provider Gateway + +```toml +default_provider = "custom:https://gateway.cloud-provider.com/v1" +api_key = "gateway-api-key" +default_model = "gpt-4" +```