Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 11 additions & 1 deletion .env.example
Original file line number Diff line number Diff line change
@@ -1,9 +1,19 @@
OPENAI_API_KEY=

# LLM provider — auto-detected from API keys if unset
# Options: openai, minimax
# LLM_PROVIDER=openai

# LLM model — strong models are required for reliable UI generation
# Recommended: gpt-5.4, gpt-5.4-pro, claude-opus-4-6, gemini-3.1-pro
# Recommended: gpt-5.4, gpt-5.4-pro, claude-opus-4-6, gemini-3.1-pro, MiniMax-M2.7
LLM_MODEL=gpt-5.4-2026-03-05

# Custom base URL for OpenAI-compatible providers (overrides provider preset)
# LLM_BASE_URL=

# MiniMax (https://www.minimaxi.com) — set key to auto-select MiniMax provider
# MINIMAX_API_KEY=

# Rate limiting (per IP) — disabled by default
RATE_LIMIT_ENABLED=false
RATE_LIMIT_WINDOW_MS=60000
Expand Down
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,11 @@ make dev # Start all services
> | `gpt-5.4` / `gpt-5.4-pro` | OpenAI |
> | `claude-opus-4-6` | Anthropic |
> | `gemini-3.1-pro` | Google |
> | `MiniMax-M2.7` / `MiniMax-M2.7-highspeed` | [MiniMax](https://www.minimaxi.com) |
>
> Smaller or weaker models will produce broken layouts, missing interactivity, or incomplete visualizations.
>
> **Using MiniMax:** Set `MINIMAX_API_KEY` in your `.env` — the provider is auto-detected. Defaults to `MiniMax-M2.7` (1M context window). See [MiniMax docs](https://www.minimaxi.com/document/introduction) for API keys.

- **App**: http://localhost:3000
- **Agent**: http://localhost:8123
Expand Down
6 changes: 2 additions & 4 deletions apps/agent/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,10 @@
It defines the workflow graph, state, tools, nodes and edges.
"""

import os

from copilotkit import CopilotKitMiddleware
from langchain.agents import create_agent
from langchain_openai import ChatOpenAI

from src.llm_provider import create_llm
from src.query import query_data
from src.todos import AgentState, todo_tools
from src.form import generate_form
Expand All @@ -19,7 +17,7 @@
_skills_text = load_all_skills()

agent = create_agent(
model=ChatOpenAI(model=os.environ.get("LLM_MODEL", "gpt-5.4-2026-03-05")),
model=create_llm(),
tools=[query_data, *todo_tools, generate_form, *template_tools],
middleware=[CopilotKitMiddleware()],
state_schema=AgentState,
Expand Down
89 changes: 89 additions & 0 deletions apps/agent/src/llm_provider.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
"""
LLM provider factory for multi-provider support.

Supports OpenAI (default), MiniMax, and any OpenAI-compatible provider
via the LLM_BASE_URL environment variable.

Provider auto-detection priority:
1. Explicit LLM_PROVIDER env var
2. MINIMAX_API_KEY present → minimax
3. OPENAI_API_KEY present → openai (default)

MiniMax models use the OpenAI-compatible API at https://api.minimax.io/v1
with temperature clamped to (0.0, 1.0].
"""

import os
from langchain_openai import ChatOpenAI


# Provider presets: base_url and api_key env var name
PROVIDER_PRESETS = {
"openai": {
"base_url": None, # uses default
"api_key_env": "OPENAI_API_KEY",
},
"minimax": {
"base_url": "https://api.minimax.io/v1",
"api_key_env": "MINIMAX_API_KEY",
"default_model": "MiniMax-M2.7",
},
}

# Models that require temperature clamping to (0.0, 1.0]
_CLAMP_TEMPERATURE_PROVIDERS = {"minimax"}


def _detect_provider() -> str:
"""Auto-detect provider from environment variables."""
explicit = os.environ.get("LLM_PROVIDER", "").strip().lower()
if explicit:
return explicit

if os.environ.get("MINIMAX_API_KEY"):
return "minimax"

return "openai"


def create_llm() -> ChatOpenAI:
"""
Create a ChatOpenAI-compatible LLM instance based on environment config.

Environment variables:
LLM_PROVIDER – Provider name: "openai" | "minimax" (auto-detected if unset)
LLM_MODEL – Model name (provider-specific default if unset)
LLM_BASE_URL – Custom base URL (overrides provider preset)
LLM_TEMPERATURE – Temperature value (default: 0.7)
OPENAI_API_KEY – OpenAI API key
MINIMAX_API_KEY – MiniMax API key
"""
provider = _detect_provider()
preset = PROVIDER_PRESETS.get(provider, {})

model = os.environ.get("LLM_MODEL") or preset.get("default_model") or "gpt-5.4-2026-03-05"
base_url = os.environ.get("LLM_BASE_URL") or preset.get("base_url")

# Resolve API key
api_key_env = preset.get("api_key_env", "OPENAI_API_KEY")
api_key = os.environ.get(api_key_env) or os.environ.get("OPENAI_API_KEY", "")

# Parse temperature
temperature = float(os.environ.get("LLM_TEMPERATURE", "0.7"))

# Clamp temperature for providers that require it (MiniMax: (0.0, 1.0])
if provider in _CLAMP_TEMPERATURE_PROVIDERS:
temperature = max(0.01, min(temperature, 1.0))

kwargs = {
"model": model,
"temperature": temperature,
}

if base_url:
kwargs["base_url"] = base_url

if api_key:
kwargs["api_key"] = api_key

return ChatOpenAI(**kwargs)
Empty file added apps/agent/tests/__init__.py
Empty file.
26 changes: 26 additions & 0 deletions apps/agent/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
"""Configure imports so tests can import from the agent src directory."""

import importlib
import sys
import types
from pathlib import Path

# The system has a `src` package installed globally that shadows our local
# `src/` directory. Remove it so that the agent's own `src` package is found.
agent_root = Path(__file__).resolve().parents[1]
src_dir = agent_root / "src"

# Remove any pre-existing `src` module from the cache
for key in list(sys.modules):
if key == "src" or key.startswith("src."):
del sys.modules[key]

# Ensure agent root is first on the path
if str(agent_root) not in sys.path:
sys.path.insert(0, str(agent_root))

# Register our local src as a namespace package so submodule imports work
src_mod = types.ModuleType("src")
src_mod.__path__ = [str(src_dir)]
src_mod.__package__ = "src"
sys.modules["src"] = src_mod
Loading