Merge pull request #13 from Octane0411/feat/multi-provider-support

feat: Add multi-provider support (GPT, Gemini, etc.)
This commit is contained in:
Xinlu Lai 2026-01-20 20:50:23 +08:00 committed by GitHub
commit 1521580cba
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 291 additions and 44 deletions

View File

@ -1,4 +1,21 @@
# API Configuration # Provider Selection (defaults to anthropic for backward compatibility)
AI_PROVIDER=anthropic # Options: anthropic, openai, gemini, or any OpenAI-compatible service
# Model Name (auto-defaults based on provider, but can be overridden)
MODEL_NAME=kimi-k2-turbo-preview
# Anthropic Configuration
ANTHROPIC_API_KEY=sk-xxx ANTHROPIC_API_KEY=sk-xxx
ANTHROPIC_BASE_URL=https://api.moonshot.cn/anthropic ANTHROPIC_BASE_URL=https://api.moonshot.cn/anthropic
MODEL_NAME=kimi-k2-turbo-preview
# OpenAI Configuration
OPENAI_API_KEY=sk-xxx
OPENAI_BASE_URL=https://api.openai.com/v1
# Google Gemini Configuration (via OpenAI-compatible endpoint)
GEMINI_API_KEY=xxx
GEMINI_BASE_URL=https://generativelanguage.googleapis.com/v1beta/openai/
# Example: Custom OpenAI-compatible service
# CUSTOM_API_KEY=xxx
# CUSTOM_BASE_URL=https://api.custom-service.com/v1

View File

@ -37,11 +37,12 @@ A progressive tutorial that demystifies AI coding agents like Kode, Claude Code,
## Quick Start ## Quick Start
```bash ```bash
pip install anthropic python-dotenv # Install dependencies
pip install -r requirements.txt
# Configure your API # Configure your API
cp .env.example .env cp .env.example .env
# Edit .env with your API key # Edit .env with your API key (supports Anthropic, OpenAI, Gemini, etc.)
# Run any version # Run any version
python v0_bash_agent.py # Minimal python v0_bash_agent.py # Minimal
@ -171,4 +172,4 @@ MIT
**Model as Agent. That's the whole secret.** **Model as Agent. That's the whole secret.**
[@baicai003](https://x.com/baicai003) [@baicai003](https://x.com/baicai003)

242
provider_utils.py Normal file
View File

@ -0,0 +1,242 @@
"""
Provider utilities for multi-provider AI agent support.
This module provides a unified interface for multiple AI providers (Anthropic, OpenAI, Gemini),
allowing the existing agent code (v0-v4) to run unchanged.
It uses the Adapter Pattern to make OpenAI-compatible clients look exactly like
Anthropic clients to the consuming code.
"""
import os
import json
from typing import Any, Dict, List, Union, Optional
from dotenv import load_dotenv
# Load environment variables
load_dotenv()
# =============================================================================
# Data Structures (Mimic Anthropic SDK)
# =============================================================================
class ResponseWrapper:
"""Wrapper to make OpenAI responses look like Anthropic responses."""
def __init__(self, content, stop_reason):
self.content = content
self.stop_reason = stop_reason
class ContentBlock:
"""Wrapper to make content blocks look like Anthropic content blocks."""
def __init__(self, block_type, **kwargs):
self.type = block_type
for key, value in kwargs.items():
setattr(self, key, value)
def __repr__(self):
attrs = ", ".join(f"{k}={v!r}" for k, v in self.__dict__.items())
return f"ContentBlock({attrs})"
# =============================================================================
# Adapters
# =============================================================================
class OpenAIAdapter:
"""
Adapts the OpenAI client to look like an Anthropic client.
Key Magic:
self.messages = self
This allows the agent code to call:
client.messages.create(...)
which resolves to:
adapter.create(...)
"""
def __init__(self, openai_client):
self.client = openai_client
self.messages = self # Duck typing: act as the 'messages' resource
def create(self, model: str, system: str, messages: List[Dict], tools: List[Dict], max_tokens: int = 8000):
"""
The core translation layer.
Converts Anthropic inputs -> OpenAI inputs -> OpenAI API -> Anthropic outputs.
"""
# 1. Convert Messages (Anthropic -> OpenAI)
openai_messages = [{"role": "system", "content": system}]
for msg in messages:
role = msg["role"]
content = msg["content"]
if role == "user":
if isinstance(content, str):
# Simple text message
openai_messages.append({"role": "user", "content": content})
elif isinstance(content, list):
# Tool results (User role in Anthropic, Tool role in OpenAI)
for part in content:
if part.get("type") == "tool_result":
openai_messages.append({
"role": "tool",
"tool_call_id": part["tool_use_id"],
"content": part["content"] or "(no output)"
})
# Note: Anthropic user messages can also contain text+image,
# but v0-v4 agents don't use that yet.
elif role == "assistant":
if isinstance(content, str):
# Simple text message
openai_messages.append({"role": "assistant", "content": content})
elif isinstance(content, list):
# Tool calls (Assistant role)
# Anthropic splits thought (text) and tool_use into blocks
# OpenAI puts thought in 'content' and tools in 'tool_calls'
text_parts = []
tool_calls = []
for part in content:
# Handle both dicts and objects (ContentBlock)
if isinstance(part, dict):
part_type = part.get("type")
part_text = part.get("text")
part_id = part.get("id")
part_name = part.get("name")
part_input = part.get("input")
else:
part_type = getattr(part, "type", None)
part_text = getattr(part, "text", None)
part_id = getattr(part, "id", None)
part_name = getattr(part, "name", None)
part_input = getattr(part, "input", None)
if part_type == "text":
text_parts.append(part_text)
elif part_type == "tool_use":
tool_calls.append({
"id": part_id,
"type": "function",
"function": {
"name": part_name,
"arguments": json.dumps(part_input)
}
})
assistant_msg = {"role": "assistant"}
if text_parts:
assistant_msg["content"] = "\n".join(text_parts)
if tool_calls:
assistant_msg["tool_calls"] = tool_calls
openai_messages.append(assistant_msg)
# 2. Convert Tools (Anthropic -> OpenAI)
openai_tools = []
for tool in tools:
openai_tools.append({
"type": "function",
"function": {
"name": tool["name"],
"description": tool["description"],
"parameters": tool["input_schema"]
}
})
# 3. Call OpenAI API
# Note: Gemini/OpenAI handle max_tokens differently, but usually support the param
response = self.client.chat.completions.create(
model=model,
messages=openai_messages,
tools=openai_tools if openai_tools else None,
max_tokens=max_tokens
)
# 4. Convert Response (OpenAI -> Anthropic)
message = response.choices[0].message
content_blocks = []
# Extract text content
if message.content:
content_blocks.append(ContentBlock("text", text=message.content))
# Extract tool calls
if message.tool_calls:
for tool_call in message.tool_calls:
content_blocks.append(ContentBlock(
"tool_use",
id=tool_call.id,
name=tool_call.function.name,
input=json.loads(tool_call.function.arguments)
))
# Map stop reasons: OpenAI "stop"/"tool_calls" -> Anthropic "end_turn"/"tool_use"
# OpenAI: stop, length, content_filter, tool_calls
finish_reason = response.choices[0].finish_reason
if finish_reason == "tool_calls":
stop_reason = "tool_use"
elif finish_reason == "stop":
stop_reason = "end_turn"
else:
stop_reason = finish_reason # Fallback
return ResponseWrapper(content_blocks, stop_reason)
# =============================================================================
# Factory Functions
# =============================================================================
def get_provider():
"""Get the current AI provider from environment variable."""
return os.getenv("AI_PROVIDER", "anthropic").lower()
def get_client():
"""
Return a client that conforms to the Anthropic interface.
If AI_PROVIDER is 'anthropic', returns the native Anthropic client.
Otherwise, returns an OpenAIAdapter wrapping an OpenAI-compatible client.
"""
provider = get_provider()
if provider == "anthropic":
from anthropic import Anthropic
base_url = os.getenv("ANTHROPIC_BASE_URL")
# Return native client - guarantees 100% behavior compatibility
return Anthropic(
api_key=os.getenv("ANTHROPIC_API_KEY"),
base_url=base_url
)
else:
# For OpenAI/Gemini, we wrap the client to mimic Anthropic
try:
from openai import OpenAI
except ImportError:
raise ImportError("Please install openai: pip install openai")
if provider == "openai":
api_key = os.getenv("OPENAI_API_KEY")
base_url = os.getenv("OPENAI_BASE_URL", "https://api.openai.com/v1")
elif provider == "gemini":
api_key = os.getenv("GEMINI_API_KEY")
# Gemini OpenAI-compatible endpoint
base_url = os.getenv("GEMINI_BASE_URL", "https://generativelanguage.googleapis.com/v1beta/openai/")
else:
# Generic OpenAI-compatible provider
api_key = os.getenv(f"{provider.upper()}_API_KEY")
base_url = os.getenv(f"{provider.upper()}_BASE_URL")
if not api_key:
raise ValueError(f"API Key for {provider} is missing. Please check your .env file.")
raw_client = OpenAI(api_key=api_key, base_url=base_url)
return OpenAIAdapter(raw_client)
def get_model():
"""Return model name from environment variable."""
model = os.getenv("MODEL_NAME")
if not model:
raise ValueError("MODEL_NAME environment variable is missing. Please set it in your .env file.")
return model

View File

@ -1,2 +1,5 @@
anthropic>=0.25.0
openai>=1.0.0
python-dotenv>=1.0.0
pygame==2.5.2 pygame==2.5.2
numpy==1.24.3 numpy==1.24.3

View File

@ -47,21 +47,14 @@ Usage:
python v0_bash_agent.py "explore src/ and summarize" python v0_bash_agent.py "explore src/ and summarize"
""" """
from anthropic import Anthropic from provider_utils import get_client, get_model
from dotenv import load_dotenv
import subprocess import subprocess
import sys import sys
import os import os
# Load environment variables from .env file # Initialize API client and model using provider utilities
load_dotenv() client = get_client()
MODEL = get_model()
# Initialize API client with credentials from environment
client = Anthropic(
api_key=os.getenv("ANTHROPIC_API_KEY"),
base_url=os.getenv("ANTHROPIC_BASE_URL")
)
MODEL = os.getenv("MODEL_NAME", "claude-sonnet-4-20250514")
# The ONE tool that does everything # The ONE tool that does everything
# Notice how the description teaches the model common patterns AND how to spawn subagents # Notice how the description teaches the model common patterns AND how to spawn subagents

View File

@ -1,7 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
"""v0_bash_agent_mini.py - Mini Claude Code (Compact)""" """v0_bash_agent_mini.py - Mini Claude Code (Compact)"""
from anthropic import Anthropic; from dotenv import load_dotenv; import subprocess as sp, sys, os from provider_utils import get_client, get_model; import subprocess as sp, sys, os
load_dotenv(); C = Anthropic(api_key=os.getenv("ANTHROPIC_API_KEY"), base_url=os.getenv("ANTHROPIC_BASE_URL")); M = os.getenv("MODEL_NAME", "claude-sonnet-4-20250514") C = get_client(); M = get_model()
T = [{"name":"bash","description":"Shell cmd. Read:cat/grep/find/rg/ls. Write:echo>/sed. Subagent(for complex subtask): python v0_bash_agent_mini.py 'task'","input_schema":{"type":"object","properties":{"command":{"type":"string"}},"required":["command"]}}] T = [{"name":"bash","description":"Shell cmd. Read:cat/grep/find/rg/ls. Write:echo>/sed. Subagent(for complex subtask): python v0_bash_agent_mini.py 'task'","input_schema":{"type":"object","properties":{"command":{"type":"string"}},"required":["command"]}}]
S = f"CLI agent at {os.getcwd()}. Use bash to solve problems. Spawn subagent for complex subtasks: python v0_bash_agent_mini.py 'task'. Subagent isolates context and returns summary. Be concise." S = f"CLI agent at {os.getcwd()}. Use bash to solve problems. Spawn subagent for complex subtasks: python v0_bash_agent_mini.py 'task'. Subagent isolates context and returns summary. Be concise."

View File

@ -56,23 +56,20 @@ from dotenv import load_dotenv
# Load configuration from .env file # Load configuration from .env file
load_dotenv() load_dotenv()
# Import unified client provider
try: try:
from anthropic import Anthropic from provider_utils import get_client, get_model
except ImportError: except ImportError:
sys.exit("Please install: pip install anthropic python-dotenv") sys.exit("Error: provider_utils.py not found. Please ensure you are in the project root.")
# ============================================================================= # =============================================================================
# Configuration # Configuration
# ============================================================================= # =============================================================================
API_KEY = os.getenv("ANTHROPIC_API_KEY")
BASE_URL = os.getenv("ANTHROPIC_BASE_URL")
MODEL = os.getenv("MODEL_NAME", "claude-sonnet-4-20250514")
WORKDIR = Path.cwd() WORKDIR = Path.cwd()
MODEL = get_model()
# Initialize client - handles both direct Anthropic and compatible APIs client = get_client()
client = Anthropic(api_key=API_KEY, base_url=BASE_URL) if BASE_URL else Anthropic(api_key=API_KEY)
# ============================================================================= # =============================================================================

View File

@ -66,21 +66,19 @@ from dotenv import load_dotenv
load_dotenv() load_dotenv()
try: try:
from anthropic import Anthropic from provider_utils import get_client, get_model
except ImportError: except ImportError:
sys.exit("Please install: pip install anthropic python-dotenv") sys.exit("Error: provider_utils.py not found. Please ensure you are in the project root.")
# ============================================================================= # =============================================================================
# Configuration # Configuration
# ============================================================================= # =============================================================================
API_KEY = os.getenv("ANTHROPIC_API_KEY")
BASE_URL = os.getenv("ANTHROPIC_BASE_URL")
MODEL = os.getenv("MODEL_NAME", "claude-sonnet-4-20250514")
WORKDIR = Path.cwd() WORKDIR = Path.cwd()
client = Anthropic(api_key=API_KEY, base_url=BASE_URL) if BASE_URL else Anthropic(api_key=API_KEY) client = get_client()
MODEL = get_model()
# ============================================================================= # =============================================================================

View File

@ -84,21 +84,19 @@ from dotenv import load_dotenv
load_dotenv() load_dotenv()
try: try:
from anthropic import Anthropic from provider_utils import get_client, get_model
except ImportError: except ImportError:
sys.exit("Please install: pip install anthropic python-dotenv") sys.exit("Error: provider_utils.py not found. Please ensure you are in the project root.")
# ============================================================================= # =============================================================================
# Configuration # Configuration
# ============================================================================= # =============================================================================
API_KEY = os.getenv("ANTHROPIC_API_KEY")
BASE_URL = os.getenv("ANTHROPIC_BASE_URL")
MODEL = os.getenv("MODEL_NAME", "claude-sonnet-4-20250514")
WORKDIR = Path.cwd() WORKDIR = Path.cwd()
client = Anthropic(api_key=API_KEY, base_url=BASE_URL) if BASE_URL else Anthropic(api_key=API_KEY) client = get_client()
MODEL = get_model()
# ============================================================================= # =============================================================================

View File

@ -89,22 +89,20 @@ from dotenv import load_dotenv
load_dotenv() load_dotenv()
try: try:
from anthropic import Anthropic from provider_utils import get_client, get_model
except ImportError: except ImportError:
sys.exit("Please install: pip install anthropic python-dotenv") sys.exit("Error: provider_utils.py not found. Please ensure you are in the project root.")
# ============================================================================= # =============================================================================
# Configuration # Configuration
# ============================================================================= # =============================================================================
API_KEY = os.getenv("ANTHROPIC_API_KEY")
BASE_URL = os.getenv("ANTHROPIC_BASE_URL")
MODEL = os.getenv("MODEL_NAME", "claude-sonnet-4-20250514")
WORKDIR = Path.cwd() WORKDIR = Path.cwd()
SKILLS_DIR = WORKDIR / "skills" SKILLS_DIR = WORKDIR / "skills"
client = Anthropic(api_key=API_KEY, base_url=BASE_URL) if BASE_URL else Anthropic(api_key=API_KEY) client = get_client()
MODEL = get_model()
# ============================================================================= # =============================================================================