Add custom API URL and model to AI settings
- Settings → AI: optional base URL and model name fields - Defaults to Anthropic/OpenAI public APIs when left blank - Custom URL enables Open WebUI, LM Studio, Ollama, and any OpenAI-compatible endpoint - Parse endpoint uses custom base URL and model if configured - Migration 0004: ai_base_url + ai_model columns on users - OpenAI provider label updated to "OpenAI-compatible" Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
b1c160f607
commit
d6118bac54
6 changed files with 124 additions and 29 deletions
23
backend/alembic/versions/0004_ai_base_url.py
Normal file
23
backend/alembic/versions/0004_ai_base_url.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
"""add ai_base_url and ai_model to users
|
||||
|
||||
Revision ID: 0004
|
||||
Revises: 0003
|
||||
Create Date: 2026-04-22
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
revision = "0004"
|
||||
down_revision = "0003"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.add_column("users", sa.Column("ai_base_url", sa.Text, nullable=True))
|
||||
op.add_column("users", sa.Column("ai_model", sa.Text, nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_column("users", "ai_model")
|
||||
op.drop_column("users", "ai_base_url")
|
||||
|
|
@ -20,11 +20,15 @@ SUPPORTED_PROVIDERS = {"anthropic", "openai"}
|
|||
class AiSettingsResponse(BaseModel):
|
||||
provider: str | None
|
||||
has_api_key: bool
|
||||
base_url: str | None
|
||||
model: str | None
|
||||
|
||||
|
||||
class AiSettingsSave(BaseModel):
|
||||
provider: str
|
||||
api_key: str
|
||||
api_key: str = ""
|
||||
base_url: str = ""
|
||||
model: str = ""
|
||||
|
||||
|
||||
@router.get("/ai", response_model=AiSettingsResponse)
|
||||
|
|
@ -32,6 +36,8 @@ async def get_ai_settings(user: User = Depends(get_current_user)):
|
|||
return AiSettingsResponse(
|
||||
provider=user.ai_provider,
|
||||
has_api_key=bool(user.ai_api_key_enc),
|
||||
base_url=user.ai_base_url,
|
||||
model=user.ai_model,
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -43,17 +49,26 @@ async def save_ai_settings(
|
|||
):
|
||||
if body.provider not in SUPPORTED_PROVIDERS:
|
||||
raise HTTPException(status_code=400, detail=f"Unsupported provider. Choose: {', '.join(SUPPORTED_PROVIDERS)}")
|
||||
if not body.api_key.strip():
|
||||
raise HTTPException(status_code=400, detail="api_key must not be empty")
|
||||
|
||||
encrypted = encrypt_field(body.api_key.strip())
|
||||
await db.execute(
|
||||
update(User)
|
||||
.where(User.id == user.id)
|
||||
.values(ai_provider=body.provider, ai_api_key_enc=encrypted)
|
||||
)
|
||||
values: dict = {
|
||||
"ai_provider": body.provider,
|
||||
"ai_base_url": body.base_url.rstrip("/") or None,
|
||||
"ai_model": body.model.strip() or None,
|
||||
}
|
||||
|
||||
if body.api_key.strip():
|
||||
values["ai_api_key_enc"] = encrypt_field(body.api_key.strip())
|
||||
elif not user.ai_api_key_enc:
|
||||
raise HTTPException(status_code=400, detail="api_key is required when no key is saved yet")
|
||||
|
||||
await db.execute(update(User).where(User.id == user.id).values(**values))
|
||||
await db.commit()
|
||||
return AiSettingsResponse(provider=body.provider, has_api_key=True)
|
||||
return AiSettingsResponse(
|
||||
provider=body.provider,
|
||||
has_api_key=True,
|
||||
base_url=values["ai_base_url"],
|
||||
model=values["ai_model"],
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/ai", status_code=204)
|
||||
|
|
@ -64,6 +79,6 @@ async def clear_ai_settings(
|
|||
await db.execute(
|
||||
update(User)
|
||||
.where(User.id == user.id)
|
||||
.values(ai_provider=None, ai_api_key_enc=None)
|
||||
.values(ai_provider=None, ai_api_key_enc=None, ai_base_url=None, ai_model=None)
|
||||
)
|
||||
await db.commit()
|
||||
|
|
|
|||
|
|
@ -331,8 +331,13 @@ async def parse_attachment(
|
|||
"Return ONLY the JSON object. No markdown, no explanation, no code fences."
|
||||
)
|
||||
|
||||
custom_base_url = (user_row.ai_base_url or "").rstrip("/")
|
||||
custom_model = (user_row.ai_model or "").strip()
|
||||
|
||||
try:
|
||||
if user_row.ai_provider == "anthropic":
|
||||
base_url = custom_base_url or "https://api.anthropic.com"
|
||||
model = custom_model or "claude-haiku-4-5-20251001"
|
||||
if mime_type == "application/pdf":
|
||||
content_block = {
|
||||
"type": "document",
|
||||
|
|
@ -343,16 +348,16 @@ async def parse_attachment(
|
|||
"type": "image",
|
||||
"source": {"type": "base64", "media_type": mime_type, "data": b64},
|
||||
}
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
async with httpx.AsyncClient(timeout=60) as client:
|
||||
resp = await client.post(
|
||||
"https://api.anthropic.com/v1/messages",
|
||||
f"{base_url}/v1/messages",
|
||||
headers={
|
||||
"x-api-key": api_key,
|
||||
"anthropic-version": "2023-06-01",
|
||||
"content-type": "application/json",
|
||||
},
|
||||
json={
|
||||
"model": "claude-haiku-4-5-20251001",
|
||||
"model": model,
|
||||
"max_tokens": 512,
|
||||
"messages": [{"role": "user", "content": [content_block, {"type": "text", "text": prompt}]}],
|
||||
},
|
||||
|
|
@ -361,14 +366,16 @@ async def parse_attachment(
|
|||
text = resp.json()["content"][0]["text"].strip()
|
||||
|
||||
elif user_row.ai_provider == "openai":
|
||||
if mime_type == "application/pdf":
|
||||
base_url = custom_base_url or "https://api.openai.com"
|
||||
model = custom_model or "gpt-4o-mini"
|
||||
if mime_type == "application/pdf" and not custom_base_url:
|
||||
raise HTTPException(status_code=400, detail="PDF parsing is not supported with the OpenAI provider. Use an image format or switch to Anthropic.")
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
async with httpx.AsyncClient(timeout=60) as client:
|
||||
resp = await client.post(
|
||||
"https://api.openai.com/v1/chat/completions",
|
||||
f"{base_url}/v1/chat/completions",
|
||||
headers={"Authorization": f"Bearer {api_key}", "content-type": "application/json"},
|
||||
json={
|
||||
"model": "gpt-4o-mini",
|
||||
"model": model,
|
||||
"max_tokens": 512,
|
||||
"messages": [{
|
||||
"role": "user",
|
||||
|
|
|
|||
|
|
@ -31,6 +31,8 @@ class User(Base):
|
|||
|
||||
ai_provider: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
ai_api_key_enc: Mapped[bytes | None] = mapped_column(LargeBinary, nullable=True)
|
||||
ai_base_url: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
ai_model: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
|
||||
accounts: Mapped[list["Account"]] = relationship(back_populates="user", lazy="noload") # type: ignore[name-defined]
|
||||
sessions: Mapped[list["Session"]] = relationship(back_populates="user", lazy="noload") # type: ignore[name-defined]
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue