Files
telegram-scam-baiter/app/ai.py
Cameron Grant 6ce7117cc4
All checks were successful
continuous-integration/drone/push Build is passing
Correctly handles multiple messages.
2025-10-06 16:12:38 -07:00

51 lines
1.9 KiB
Python

from openai import OpenAI
from . import config, prompt
import logging
import time
_client = OpenAI(api_key=config.OPENAI_API_KEY)
async def _build_messages_from_telegram(client, entity, system_prompt: str, short: bool):
# Pull recent Telegram messages and convert to OpenAI chat format.
# Note: we estimate tokens to keep the context in check.
MAX_FETCH = max(20, getattr(config, "MAX_MESSAGES_HISTORY", 40))
msgs = await client.get_messages(entity, limit=MAX_FETCH)
msgs = list(reversed(msgs)) # oldest -> newest
chat_msgs = [{"role": "system", "content": system_prompt}]
total_tokens_est = max(1, len(system_prompt) // 4)
def can_add(text: str) -> bool:
nonlocal total_tokens_est
add = max(1, len(text or "") // 4)
limit = getattr(config, "MAX_TOKENS_HISTORY", 2000)
if total_tokens_est + add > limit and len(chat_msgs) > 10:
return False
total_tokens_est += add
return True
for m in msgs:
text = (getattr(m, "message", None) or "").strip()
if not text:
continue
role = "assistant" if getattr(m, "out", False) else "user"
if can_add(text):
chat_msgs.append({"role": role, "content": text})
# Use a tighter nudge for early small-talk
nudge = prompt.EARLY_NUDGE if short else prompt.USER_NUDGE
chat_msgs.append({"role": "user", "content": nudge})
return chat_msgs
async def generate_reply(client, entity, short: bool = False) -> str:
msgs = await _build_messages_from_telegram(client, entity, prompt.SYSTEM_PROMPT, short)
resp = _client.chat.completions.create(
model=config.OPENAI_MODEL,
messages=msgs,
temperature=0.8,
max_tokens=80 if short else 180,
presence_penalty=0.3,
frequency_penalty=0.2,
)
content = resp.choices[0].message.content
return content.strip() if content else ""