package main import ( "context" "encoding/json" "fmt" "github.com/sashabaranov/go-openai" ) // LLM is an OpenAI LLM wrapper with tool-calling support type LLM struct { client *openai.Client model string temperature float32 maxTokens int } // NewLLM creates a new LLM instance func NewLLM(apiKey, model string, temperature float32, baseURL string, maxTokens int) *LLM { config := openai.DefaultConfig(apiKey) if baseURL != "" { config.BaseURL = baseURL } return &LLM{ client: openai.NewClientWithConfig(config), model: model, temperature: temperature, maxTokens: maxTokens, } } // ChatCompletionRequest is a request for chat completion type ChatCompletionRequest struct { Messages []openai.ChatCompletionMessage Tools []openai.Tool } // ChatCompletionResponse is a response from chat completion type ChatCompletionResponse struct { Message openai.ChatCompletionMessage } // Chat sends a chat completion request func (l *LLM) Chat(ctx context.Context, messages []openai.ChatCompletionMessage, tools []openai.Tool) (*openai.ChatCompletionMessage, error) { req := openai.ChatCompletionRequest{ Model: l.model, Messages: messages, Temperature: l.temperature, MaxTokens: l.maxTokens, } if len(tools) > 0 { req.Tools = tools } resp, err := l.client.CreateChatCompletion(ctx, req) if err != nil { return nil, fmt.Errorf("failed to create chat completion: %w", err) } if len(resp.Choices) == 0 { return nil, fmt.Errorf("no response choices returned") } // Log warning if finish reason indicates an issue choice := resp.Choices[0] if choice.FinishReason == "length" { // Model hit token limit - may have incomplete response // This is common with reasoning models that need more tokens return nil, fmt.Errorf("response truncated: model hit token limit (finish_reason: length). Consider increasing OPENAI_MAX_TOKENS (current: %d). Usage: prompt=%d, completion=%d, total=%d", l.maxTokens, resp.Usage.PromptTokens, resp.Usage.CompletionTokens, resp.Usage.TotalTokens) } return &choice.Message, nil } // ConvertMCPToolsToOpenAI converts MCP tools to OpenAI tool format func ConvertMCPToolsToOpenAI(mcpTools []Tool) []openai.Tool { tools := make([]openai.Tool, len(mcpTools)) for i, t := range mcpTools { // Convert InputSchema to JSON schema format using map[string]interface{} props := make(map[string]interface{}) for name, prop := range t.InputSchema.Properties { propMap := map[string]interface{}{ "type": prop.Type, "description": prop.Description, } // For object types without explicit nested properties, // allow additionalProperties so the LLM can pass any key-value pairs // This is important for tools like 'query' and 'mutate' that accept // arbitrary variables objects if prop.Type == "object" { propMap["additionalProperties"] = true } props[name] = propMap } // Build parameters map, omitting empty required array params := map[string]interface{}{ "type": t.InputSchema.Type, "properties": props, } // Only include required if it has elements - empty slice marshals as null if len(t.InputSchema.Required) > 0 { params["required"] = t.InputSchema.Required } tools[i] = openai.Tool{ Type: openai.ToolTypeFunction, Function: &openai.FunctionDefinition{ Name: t.Name, Description: t.Description, Parameters: params, }, } } return tools } // ParseToolCall parses a tool call from the LLM response func ParseToolCall(toolCall openai.ToolCall) (string, map[string]interface{}, error) { name := toolCall.Function.Name var args map[string]interface{} if err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args); err != nil { return name, nil, fmt.Errorf("failed to parse tool arguments: %w", err) } return name, args, nil } // TestConnection tests the connection to OpenAI API func (l *LLM) TestConnection(ctx context.Context) error { // Simple test request - use enough tokens for reasoning models // Reasoning models need more tokens for their thinking process req := openai.ChatCompletionRequest{ Model: l.model, Messages: []openai.ChatCompletionMessage{ { Role: openai.ChatMessageRoleUser, Content: "Hello", }, }, MaxTokens: 100, } _, err := l.client.CreateChatCompletion(ctx, req) if err != nil { return fmt.Errorf("failed to connect to OpenAI API: %w", err) } return nil }