Initial Slug Code Rust implementation

Core features:
- OpenAI-compatible streaming provider (vLLM, Ollama, OpenAI, etc.)
- Agent loop with tool use (bash, read, write, edit, glob, grep)
- Permission system: ask/yolo/sandbox/allowEdits + glob patterns
- SLUG.md hierarchy loaded every turn (CLAUDE.md equivalent)
- Session persistence with --continue/--resume/--fork-session
- Hook system: 5 lifecycle events, command + prompt types
- Compaction: ToolResultTrim/Truncate strategies, /compact command
- Config via TOML, CLI args, env vars

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Bryan Ramos 2026-03-31 14:23:04 -04:00
parent f2e1d53e37
commit b8bf9029fe
21 changed files with 6280 additions and 0 deletions

169
src/provider/mod.rs Normal file
View file

@ -0,0 +1,169 @@
mod types;
use anyhow::Result;
use futures::Stream;
use std::pin::Pin;
pub use types::*;
use crate::config::Config;
use crate::tools::ToolDefinition;
/// Trait for LLM providers. Anything that speaks OpenAI chat completions works.
pub trait Provider: Send + Sync {
fn stream_chat(
&self,
messages: &[ChatMessage],
tools: &[ToolDefinition],
) -> Pin<Box<dyn Stream<Item = Result<StreamEvent>> + Send + '_>>;
}
/// OpenAI-compatible provider (works with vLLM, Ollama, llama.cpp, OpenAI, etc.)
pub struct OpenAIProvider {
client: reqwest::Client,
endpoint: String,
api_key: Option<String>,
model: String,
max_tokens: u32,
temperature: Option<f32>,
}
impl OpenAIProvider {
pub fn new(config: &Config) -> Self {
Self {
client: reqwest::Client::new(),
endpoint: config.endpoint.trim_end_matches('/').to_string(),
api_key: config.api_key.clone(),
model: config.model.clone(),
max_tokens: config.max_tokens,
temperature: config.temperature,
}
}
fn build_request_body(
&self,
messages: &[ChatMessage],
tools: &[ToolDefinition],
) -> serde_json::Value {
let mut body = serde_json::json!({
"model": self.model,
"messages": messages,
"max_tokens": self.max_tokens,
"stream": true,
});
if let Some(temp) = self.temperature {
body["temperature"] = serde_json::json!(temp);
}
if !tools.is_empty() {
let tool_defs: Vec<serde_json::Value> = tools
.iter()
.map(|t| {
serde_json::json!({
"type": "function",
"function": {
"name": t.name,
"description": t.description,
"parameters": t.parameters,
}
})
})
.collect();
body["tools"] = serde_json::json!(tool_defs);
}
body
}
}
impl Provider for OpenAIProvider {
fn stream_chat(
&self,
messages: &[ChatMessage],
tools: &[ToolDefinition],
) -> Pin<Box<dyn Stream<Item = Result<StreamEvent>> + Send + '_>> {
let body = self.build_request_body(messages, tools);
let url = format!("{}/chat/completions", self.endpoint);
let mut req = self.client.post(&url).json(&body);
if let Some(ref key) = self.api_key {
req = req.bearer_auth(key);
}
Box::pin(async_stream::stream! {
let response = match req.send().await {
Ok(r) => r,
Err(e) => {
yield Err(anyhow::anyhow!(e));
return;
}
};
if !response.status().is_success() {
let status = response.status();
let text = response.text().await.unwrap_or_default();
yield Err(anyhow::anyhow!("API error {status}: {text}"));
return;
}
use futures::StreamExt;
let mut stream = response.bytes_stream();
let mut buffer = String::new();
while let Some(chunk) = stream.next().await {
let chunk = match chunk {
Ok(c) => c,
Err(e) => {
yield Err(anyhow::anyhow!(e));
return;
}
};
buffer.push_str(&String::from_utf8_lossy(&chunk));
// Process complete SSE lines
while let Some(line_end) = buffer.find('\n') {
let line = buffer[..line_end].trim().to_string();
buffer = buffer[line_end + 1..].to_string();
if line.is_empty() || line.starts_with(':') {
continue;
}
if let Some(data) = line.strip_prefix("data: ") {
if data == "[DONE]" {
yield Ok(StreamEvent::Done);
return;
}
match serde_json::from_str::<StreamChunk>(data) {
Ok(chunk) => {
for choice in &chunk.choices {
if let Some(ref content) = choice.delta.content {
yield Ok(StreamEvent::Text(content.clone()));
}
if let Some(ref tool_calls) = choice.delta.tool_calls {
for tc in tool_calls {
yield Ok(StreamEvent::ToolCallDelta(ToolCallDelta {
index: tc.index,
id: tc.id.clone(),
name: tc.function.as_ref().and_then(|f| f.name.clone()),
arguments_delta: tc.function.as_ref().and_then(|f| f.arguments.clone()),
}));
}
}
if choice.finish_reason.is_some() {
yield Ok(StreamEvent::Finish);
}
}
}
Err(e) => {
tracing::warn!("Failed to parse SSE chunk: {e}: {data}");
}
}
}
}
}
})
}
}

121
src/provider/types.rs Normal file
View file

@ -0,0 +1,121 @@
use serde::{Deserialize, Serialize};
/// A message in the chat conversation.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChatMessage {
pub role: Role,
pub content: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tool_calls: Option<Vec<ToolCall>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tool_call_id: Option<String>,
}
impl ChatMessage {
pub fn system(content: &str) -> Self {
Self {
role: Role::System,
content: Some(content.to_string()),
tool_calls: None,
tool_call_id: None,
}
}
pub fn user(content: &str) -> Self {
Self {
role: Role::User,
content: Some(content.to_string()),
tool_calls: None,
tool_call_id: None,
}
}
pub fn assistant(content: Option<String>, tool_calls: Option<Vec<ToolCall>>) -> Self {
Self {
role: Role::Assistant,
content,
tool_calls,
tool_call_id: None,
}
}
pub fn tool_result(tool_call_id: &str, content: &str) -> Self {
Self {
role: Role::Tool,
content: Some(content.to_string()),
tool_calls: None,
tool_call_id: Some(tool_call_id.to_string()),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum Role {
System,
User,
Assistant,
Tool,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ToolCall {
pub id: String,
#[serde(rename = "type")]
pub call_type: String,
pub function: FunctionCall,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FunctionCall {
pub name: String,
pub arguments: String,
}
/// Streaming SSE chunk from the OpenAI-compatible API.
#[derive(Debug, Deserialize)]
pub struct StreamChunk {
pub choices: Vec<StreamChoice>,
}
#[derive(Debug, Deserialize)]
pub struct StreamChoice {
pub delta: StreamDelta,
pub finish_reason: Option<String>,
}
#[derive(Debug, Deserialize)]
pub struct StreamDelta {
pub content: Option<String>,
pub tool_calls: Option<Vec<StreamToolCall>>,
}
#[derive(Debug, Deserialize)]
pub struct StreamToolCall {
pub index: usize,
pub id: Option<String>,
pub function: Option<StreamFunctionCall>,
}
#[derive(Debug, Deserialize)]
pub struct StreamFunctionCall {
pub name: Option<String>,
pub arguments: Option<String>,
}
/// High-level stream events emitted by the provider.
#[derive(Debug, Clone)]
pub enum StreamEvent {
Text(String),
ToolCallDelta(ToolCallDelta),
Finish,
Done,
}
#[derive(Debug, Clone)]
pub struct ToolCallDelta {
pub index: usize,
pub id: Option<String>,
pub name: Option<String>,
pub arguments_delta: Option<String>,
}