Fix xterm package versions and make models endpoint resilient
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -399,9 +399,10 @@ async def list_models():
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
resp = await client.get(f"{OLLAMA_URL}/api/tags")
|
||||
resp.raise_for_status()
|
||||
return {"models": resp.json().get("models", [])}
|
||||
return {"models": resp.json().get("models", []), "error": None}
|
||||
except Exception as exc:
|
||||
raise HTTPException(status_code=500, detail=str(exc))
|
||||
# Return empty list instead of crashing — Ollama may not be reachable yet
|
||||
return {"models": [], "error": str(exc)}
|
||||
|
||||
|
||||
@app.websocket("/api/models/pull")
|
||||
@@ -447,4 +448,4 @@ async def delete_model(model_name: str):
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(app, host="0.0.0.0", port=8080, reload=True)
|
||||
uvicorn.run("main:app", host="0.0.0.0", port=8080, reload=True)
|
||||
|
||||
Reference in New Issue
Block a user