Add custom API URL and model to AI settings
- Settings → AI: optional base URL and model name fields - Defaults to Anthropic/OpenAI public APIs when left blank - Custom URL enables Open WebUI, LM Studio, Ollama, and any OpenAI-compatible endpoint - Parse endpoint uses custom base URL and model if configured - Migration 0004: ai_base_url + ai_model columns on users - OpenAI provider label updated to "OpenAI-compatible" Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
b1c160f607
commit
d6118bac54
6 changed files with 124 additions and 29 deletions
|
|
@ -331,8 +331,13 @@ async def parse_attachment(
|
|||
"Return ONLY the JSON object. No markdown, no explanation, no code fences."
|
||||
)
|
||||
|
||||
custom_base_url = (user_row.ai_base_url or "").rstrip("/")
|
||||
custom_model = (user_row.ai_model or "").strip()
|
||||
|
||||
try:
|
||||
if user_row.ai_provider == "anthropic":
|
||||
base_url = custom_base_url or "https://api.anthropic.com"
|
||||
model = custom_model or "claude-haiku-4-5-20251001"
|
||||
if mime_type == "application/pdf":
|
||||
content_block = {
|
||||
"type": "document",
|
||||
|
|
@ -343,16 +348,16 @@ async def parse_attachment(
|
|||
"type": "image",
|
||||
"source": {"type": "base64", "media_type": mime_type, "data": b64},
|
||||
}
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
async with httpx.AsyncClient(timeout=60) as client:
|
||||
resp = await client.post(
|
||||
"https://api.anthropic.com/v1/messages",
|
||||
f"{base_url}/v1/messages",
|
||||
headers={
|
||||
"x-api-key": api_key,
|
||||
"anthropic-version": "2023-06-01",
|
||||
"content-type": "application/json",
|
||||
},
|
||||
json={
|
||||
"model": "claude-haiku-4-5-20251001",
|
||||
"model": model,
|
||||
"max_tokens": 512,
|
||||
"messages": [{"role": "user", "content": [content_block, {"type": "text", "text": prompt}]}],
|
||||
},
|
||||
|
|
@ -361,14 +366,16 @@ async def parse_attachment(
|
|||
text = resp.json()["content"][0]["text"].strip()
|
||||
|
||||
elif user_row.ai_provider == "openai":
|
||||
if mime_type == "application/pdf":
|
||||
base_url = custom_base_url or "https://api.openai.com"
|
||||
model = custom_model or "gpt-4o-mini"
|
||||
if mime_type == "application/pdf" and not custom_base_url:
|
||||
raise HTTPException(status_code=400, detail="PDF parsing is not supported with the OpenAI provider. Use an image format or switch to Anthropic.")
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
async with httpx.AsyncClient(timeout=60) as client:
|
||||
resp = await client.post(
|
||||
"https://api.openai.com/v1/chat/completions",
|
||||
f"{base_url}/v1/chat/completions",
|
||||
headers={"Authorization": f"Bearer {api_key}", "content-type": "application/json"},
|
||||
json={
|
||||
"model": "gpt-4o-mini",
|
||||
"model": model,
|
||||
"max_tokens": 512,
|
||||
"messages": [{
|
||||
"role": "user",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue