- ab_ai_bot: raise requests.post timeout 120s -> 600s so long OCR+LLM runs don't silently drop the reply in Discuss - upload: run parse_upload in ThreadPoolExecutor so tesseract OCR doesn't block the FastAPI event loop - expenses_agent: parse all receipts concurrently with asyncio.gather (Ollama semaphore caps parallelism at 2); reduces 13-receipt LLM time from ~39s sequential to ~20s parallel Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
87 lines
3.0 KiB
Python
87 lines
3.0 KiB
Python
from __future__ import annotations
|
|
import asyncio
|
|
import logging
|
|
import uuid
|
|
from typing import List, Optional
|
|
|
|
from fastapi import APIRouter, File, Form, HTTPException, Request, UploadFile, status
|
|
|
|
from ..config import get_settings
|
|
from .dispatch import DispatchResponse, _check_rate_limit, _verify_webhook_secret
|
|
from ..tools.receipt_parser import parse_upload
|
|
|
|
logger = logging.getLogger(__name__)
|
|
router = APIRouter(prefix='/upload', tags=['upload'])
|
|
|
|
|
|
@router.post('', response_model=DispatchResponse)
|
|
async def upload(
|
|
request: Request,
|
|
user_id: str = Form(...),
|
|
message: str = Form(default='Create an employee expense report from these receipts.'),
|
|
session_id: Optional[str] = Form(default=None),
|
|
files: List[UploadFile] = File(default=[]),
|
|
):
|
|
_verify_webhook_secret(request)
|
|
_check_rate_limit(user_id)
|
|
|
|
from ..app_state import get_master_agent
|
|
master = get_master_agent()
|
|
if master is None:
|
|
raise HTTPException(status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
|
detail='Agent service not ready')
|
|
|
|
import asyncio
|
|
from concurrent.futures import ThreadPoolExecutor
|
|
_ocr_executor = ThreadPoolExecutor(max_workers=2)
|
|
|
|
receipts: list[dict] = []
|
|
loop = asyncio.get_event_loop()
|
|
for f in files:
|
|
data = await f.read()
|
|
filename = f.filename or 'receipt'
|
|
try:
|
|
# parse_upload may run OCR (CPU-bound) — offload to thread pool
|
|
parsed = await loop.run_in_executor(_ocr_executor, parse_upload, filename, data)
|
|
receipts.extend(parsed)
|
|
logger.info('upload: parsed %s → %d receipt(s)', filename, len(parsed))
|
|
except Exception as exc:
|
|
logger.warning('upload: parse failed for %s: %s', filename, exc)
|
|
|
|
if not receipts:
|
|
logger.warning('upload: no parseable receipts found in upload from user_id=%s', user_id)
|
|
|
|
directive_id = session_id or uuid.uuid4().hex
|
|
extra_context = {'receipts': receipts, 'user_id': user_id}
|
|
|
|
settings = get_settings()
|
|
timeout = settings.directive_timeout_minutes * 60
|
|
|
|
try:
|
|
response = await asyncio.wait_for(
|
|
master.handle_message(
|
|
user_id=user_id,
|
|
channel_id=None,
|
|
message=message,
|
|
directive_id=directive_id,
|
|
extra_context=extra_context,
|
|
),
|
|
timeout=timeout,
|
|
)
|
|
except asyncio.TimeoutError:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_504_GATEWAY_TIMEOUT,
|
|
detail=f'Directive timed out after {settings.directive_timeout_minutes}m',
|
|
)
|
|
except Exception as exc:
|
|
logger.exception('upload error user=%s: %s', user_id, exc)
|
|
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc))
|
|
|
|
return DispatchResponse(
|
|
directive_id=response.directive_id,
|
|
reply=response.response,
|
|
escalations=response.escalations,
|
|
actions_taken=response.actions_taken,
|
|
session_id=session_id,
|
|
)
|