chore: add alembic migrations for memory and directive schema

This commit is contained in:
ActiveBlue Build
2026-04-12 14:18:07 -04:00
parent bea58371c1
commit 34a65a485e
3 changed files with 265 additions and 0 deletions

View File

@@ -0,0 +1,39 @@
[alembic]
script_location = migrations
prepend_sys_path = .
version_path_separator = os
sqlalchemy.url = postgresql+asyncpg://%(POSTGRES_USER)s:%(POSTGRES_PASSWORD)s@%(POSTGRES_HOST)s:%(POSTGRES_PORT)s/%(POSTGRES_DB)s
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@@ -0,0 +1,64 @@
import os
import asyncio
from logging.config import fileConfig
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from alembic import context
config = context.config
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# Override sqlalchemy.url from environment variables
def get_url():
return (
f"postgresql+asyncpg://"
f"{os.environ['POSTGRES_USER']}:{os.environ['POSTGRES_PASSWORD']}"
f"@{os.environ['POSTGRES_HOST']}:{os.environ.get('POSTGRES_PORT', '5432')}"
f"/{os.environ['POSTGRES_DB']}"
)
config.set_main_option("sqlalchemy.url", get_url())
target_metadata = None
def run_migrations_offline() -> None:
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,162 @@
"""Initial schema — memory, directive, session, LLM config, rate limit tables
Revision ID: 001
Revises:
Create Date: 2026-04-12
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
revision = '001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# -----------------------------------------------------------------------
# Tier 1: Conversation memory per user
# Hard cap: 200 rows per user — enforced in application layer before insert
# -----------------------------------------------------------------------
op.execute("""
CREATE TABLE IF NOT EXISTS ab_conversation_memory (
id SERIAL PRIMARY KEY,
user_id INTEGER NOT NULL,
role VARCHAR(16) NOT NULL,
content TEXT NOT NULL,
directive_id VARCHAR(64),
is_summary BOOLEAN DEFAULT FALSE,
created_at TIMESTAMP DEFAULT NOW()
)
""")
op.execute("""
CREATE INDEX IF NOT EXISTS idx_conv_user
ON ab_conversation_memory(user_id, created_at DESC)
""")
# -----------------------------------------------------------------------
# Tier 2: Operational memory — 90-day TTL
# -----------------------------------------------------------------------
op.execute("""
CREATE TABLE IF NOT EXISTS ab_operational_memory (
id SERIAL PRIMARY KEY,
scope VARCHAR(64) NOT NULL,
summary TEXT NOT NULL,
raw_data JSONB,
source_directive_id VARCHAR(64),
expires_at TIMESTAMP,
created_at TIMESTAMP DEFAULT NOW()
)
""")
op.execute("""
CREATE INDEX IF NOT EXISTS idx_op_scope
ON ab_operational_memory(scope, created_at DESC)
""")
# -----------------------------------------------------------------------
# Tier 3: Long-term knowledge — permanent
# -----------------------------------------------------------------------
op.execute("""
CREATE TABLE IF NOT EXISTS ab_knowledge_store (
id SERIAL PRIMARY KEY,
entity_type VARCHAR(32) NOT NULL,
entity_key VARCHAR(128) NOT NULL,
facts JSONB NOT NULL,
updated_at TIMESTAMP DEFAULT NOW(),
UNIQUE(entity_type, entity_key)
)
""")
# -----------------------------------------------------------------------
# Directive execution graph
# -----------------------------------------------------------------------
op.execute("""
CREATE TABLE IF NOT EXISTS ab_directive_log (
id SERIAL PRIMARY KEY,
directive_id VARCHAR(64) UNIQUE NOT NULL,
user_id INTEGER NOT NULL,
channel_id INTEGER NOT NULL,
raw_message TEXT NOT NULL,
intent_summary TEXT,
agents_involved JSONB DEFAULT '[]',
peer_calls JSONB DEFAULT '[]',
actions_taken JSONB DEFAULT '[]',
escalations JSONB DEFAULT '[]',
final_response TEXT,
status VARCHAR(32) DEFAULT 'pending',
started_at TIMESTAMP DEFAULT NOW(),
completed_at TIMESTAMP,
error TEXT
)
""")
op.execute("""
CREATE INDEX IF NOT EXISTS idx_dir_user
ON ab_directive_log(user_id, started_at DESC)
""")
op.execute("""
CREATE INDEX IF NOT EXISTS idx_dir_status
ON ab_directive_log(status)
""")
# -----------------------------------------------------------------------
# Per-agent session state during execution
# -----------------------------------------------------------------------
op.execute("""
CREATE TABLE IF NOT EXISTS ab_agent_session (
id SERIAL PRIMARY KEY,
directive_id VARCHAR(64) NOT NULL,
agent_name VARCHAR(64) NOT NULL,
messages JSONB NOT NULL DEFAULT '[]',
status VARCHAR(32) DEFAULT 'running',
started_at TIMESTAMP DEFAULT NOW(),
completed_at TIMESTAMP
)
""")
# -----------------------------------------------------------------------
# LLM backend config — runtime per-agent model routing
# caller='__system__' stores the global privacy mode
# -----------------------------------------------------------------------
op.execute("""
CREATE TABLE IF NOT EXISTS ab_llm_config (
caller VARCHAR(64) PRIMARY KEY,
backend VARCHAR(16) NOT NULL,
set_by INTEGER,
set_at TIMESTAMP DEFAULT NOW(),
note TEXT
)
""")
# Seed default privacy mode = 'local'
op.execute("""
INSERT INTO ab_llm_config (caller, backend, note)
VALUES ('__system__', 'local', 'Default privacy mode — local Ollama only')
ON CONFLICT (caller) DO NOTHING
""")
# -----------------------------------------------------------------------
# Rate limiting
# -----------------------------------------------------------------------
op.execute("""
CREATE TABLE IF NOT EXISTS ab_rate_limit (
user_id INTEGER NOT NULL,
window_start TIMESTAMP NOT NULL,
request_count INTEGER DEFAULT 0,
PRIMARY KEY (user_id, window_start)
)
""")
def downgrade() -> None:
op.execute("DROP TABLE IF EXISTS ab_rate_limit")
op.execute("DROP TABLE IF EXISTS ab_llm_config")
op.execute("DROP TABLE IF EXISTS ab_agent_session")
op.execute("DROP INDEX IF EXISTS idx_dir_status")
op.execute("DROP INDEX IF EXISTS idx_dir_user")
op.execute("DROP TABLE IF EXISTS ab_directive_log")
op.execute("DROP TABLE IF EXISTS ab_knowledge_store")
op.execute("DROP INDEX IF EXISTS idx_op_scope")
op.execute("DROP TABLE IF EXISTS ab_operational_memory")
op.execute("DROP INDEX IF EXISTS idx_conv_user")
op.execute("DROP TABLE IF EXISTS ab_conversation_memory")