Initial implementation of IRC LLM bot
Full implementation from spec: ZNC/IRC client with TLS, Ollama LLM backend, per-user SQLite conversation memory, and Flask web admin portal with 7 pages. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
99
bot/message_handler.py
Normal file
99
bot/message_handler.py
Normal file
@@ -0,0 +1,99 @@
|
||||
import logging
|
||||
import re
|
||||
from collections import deque
|
||||
|
||||
from bot import memory as mem
|
||||
from bot import llm_client
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Per-channel rolling context buffer: {channel: deque}
|
||||
_context_buffers: dict[str, deque] = {}
|
||||
|
||||
|
||||
def _get_context(channel: str, window: int) -> deque:
|
||||
if channel not in _context_buffers:
|
||||
_context_buffers[channel] = deque(maxlen=window)
|
||||
else:
|
||||
_context_buffers[channel] = deque(_context_buffers[channel], maxlen=window)
|
||||
return _context_buffers[channel]
|
||||
|
||||
|
||||
def handle_privmsg(nick: str, channel: str, text: str, config: dict) -> str | None:
|
||||
"""
|
||||
Returns a reply string if the bot should respond, else None.
|
||||
Also maintains the context buffer as a side effect.
|
||||
"""
|
||||
window = config.get("context_window", 5)
|
||||
ctx = _get_context(channel, window)
|
||||
|
||||
ignored = [n.lower() for n in config.get("ignored_nicks", [])]
|
||||
if nick.lower() in ignored:
|
||||
return None
|
||||
|
||||
bot_nick = config.get("bot_nick", "avcbot").lower()
|
||||
trigger_prefix = config.get("trigger_prefix")
|
||||
trigger_on_nick = config.get("trigger_on_nick", True)
|
||||
|
||||
# Detect "forget me" command before trigger check
|
||||
forget_pattern = re.compile(
|
||||
rf"^{re.escape(bot_nick)}\s*[:,]\s*forget\s+me\s*$", re.IGNORECASE
|
||||
)
|
||||
if forget_pattern.match(text.strip()):
|
||||
mem.delete_user_history(channel, nick)
|
||||
logger.info(f"[MEMORY] Forgot history for {nick} in {channel}")
|
||||
return f"{nick}: Done, I've cleared your history."
|
||||
|
||||
# Determine if triggered
|
||||
stripped = None
|
||||
|
||||
if trigger_on_nick:
|
||||
nick_pattern = re.compile(
|
||||
rf"^{re.escape(bot_nick)}\s*[:,]\s*", re.IGNORECASE
|
||||
)
|
||||
m = nick_pattern.match(text)
|
||||
if m:
|
||||
stripped = text[m.end():].strip()
|
||||
|
||||
if stripped is None and trigger_prefix:
|
||||
if text.startswith(trigger_prefix):
|
||||
stripped = text[len(trigger_prefix):].strip()
|
||||
|
||||
# Add to context buffer regardless
|
||||
ctx.append(f"<{nick}> {text}")
|
||||
|
||||
if stripped is None:
|
||||
return None
|
||||
|
||||
# Build and send to LLM
|
||||
history = []
|
||||
if config.get("memory_enabled", True):
|
||||
limit = config.get("memory_history_limit", 8)
|
||||
history = mem.load_history(channel, nick, limit)
|
||||
|
||||
prompt = llm_client.build_prompt(
|
||||
user_message=stripped,
|
||||
nick=nick,
|
||||
persistent_history=history,
|
||||
context_buffer=list(ctx)[:-1], # exclude the current message already in buffer
|
||||
)
|
||||
|
||||
system = config.get(
|
||||
"system_prompt",
|
||||
"You are a helpful IRC assistant for Active Blue. Keep responses concise and under 3 sentences when possible.",
|
||||
)
|
||||
|
||||
logger.info(f"[LLM] Request from {nick} in {channel}: {stripped[:80]}")
|
||||
|
||||
try:
|
||||
reply = llm_client.generate(prompt, system, config)
|
||||
except TimeoutError:
|
||||
return f"{nick}: [LLM timeout — try again]"
|
||||
except Exception as e:
|
||||
logger.error(f"[LLM] Generation error: {e}")
|
||||
return f"{nick}: [LLM error — check logs]"
|
||||
|
||||
if config.get("memory_enabled", True):
|
||||
mem.save_exchange(channel, nick, stripped, reply)
|
||||
|
||||
return f"{nick}: {reply}"
|
||||
Reference in New Issue
Block a user