summaryrefslogtreecommitdiff
path: root/Omni
diff options
context:
space:
mode:
authorBen Sima <ben@bensima.com>2025-12-13 00:57:06 -0500
committerBen Sima <ben@bensima.com>2025-12-13 00:57:06 -0500
commit7d516a14552e1c531935cfee27fb5edbf81e3b82 (patch)
tree9cf623827c8f59006626924256ffbe0ded2c5d9b /Omni
parent42dec1ddd4e83957ad4c6747067eb6e8351d3a4d (diff)
telegram: add cheap pre-filter for group messages
Use Gemini Flash to classify group messages before running the full Sonnet agent. Skips casual banter to save tokens/cost. - shouldEngageInGroup: yes/no classifier using gemini-2.0-flash - Only runs for group chats, private chats skip the filter - On classifier failure, defaults to engaging (fail-open)
Diffstat (limited to 'Omni')
-rw-r--r--Omni/Agent/Telegram.hs68
1 files changed, 65 insertions, 3 deletions
diff --git a/Omni/Agent/Telegram.hs b/Omni/Agent/Telegram.hs
index f8afcb7..27d7413 100644
--- a/Omni/Agent/Telegram.hs
+++ b/Omni/Agent/Telegram.hs
@@ -433,11 +433,35 @@ handleAuthorizedMessage tgConfig provider engineCfg msg uid userName chatId = do
let userMessage = replyContext <> baseMessage
- _ <- Memory.saveMessage uid chatId Memory.UserRole (Just userName) userMessage
+ shouldEngage <-
+ if Types.isGroupChat msg
+ then do
+ putText "Checking if should engage (group chat)..."
+ shouldEngageInGroup (Types.tgOpenRouterApiKey tgConfig) userMessage
+ else pure True
- (conversationContext, contextTokens) <- Memory.getConversationContext uid chatId maxConversationTokens
- putText <| "Conversation context: " <> tshow contextTokens <> " tokens"
+ if not shouldEngage
+ then putText "Skipping group message (pre-filter said no)"
+ else do
+ _ <- Memory.saveMessage uid chatId Memory.UserRole (Just userName) userMessage
+ (conversationContext, contextTokens) <- Memory.getConversationContext uid chatId maxConversationTokens
+ putText <| "Conversation context: " <> tshow contextTokens <> " tokens"
+
+ processEngagedMessage tgConfig provider engineCfg msg uid userName chatId userMessage conversationContext
+
+processEngagedMessage ::
+ Types.TelegramConfig ->
+ Provider.Provider ->
+ Engine.EngineConfig ->
+ Types.TelegramMessage ->
+ Text ->
+ Text ->
+ Int ->
+ Text ->
+ Text ->
+ IO ()
+processEngagedMessage tgConfig provider engineCfg msg uid userName chatId userMessage conversationContext = do
memories <- Memory.recallMemories uid userMessage 5
let memoryContext = Memory.formatMemoriesForPrompt memories
@@ -563,6 +587,44 @@ checkAndSummarize openRouterKey uid chatId = do
_ <- Memory.summarizeAndArchive uid chatId summary
putText "Conversation summarized and archived (gemini)"
+shouldEngageInGroup :: Text -> Text -> IO Bool
+shouldEngageInGroup openRouterKey messageText = do
+ let gemini = Provider.defaultOpenRouter openRouterKey "google/gemini-2.0-flash-001"
+ result <-
+ Provider.chat
+ gemini
+ []
+ [ Provider.Message
+ Provider.System
+ ( Text.unlines
+ [ "You are a classifier that decides if an AI assistant should respond to a message in a group chat.",
+ "Respond with ONLY 'yes' or 'no' (lowercase, nothing else).",
+ "",
+ "Say 'yes' if:",
+ "- The message is a direct question the assistant could answer",
+ "- The message contains a factual error worth correcting",
+ "- The message mentions the bot or asks for help",
+ "- The message shares a link or document to analyze",
+ "",
+ "Say 'no' if:",
+ "- It's casual banter or chit-chat between people",
+ "- It's a greeting or farewell",
+ "- It's an inside joke or personal conversation",
+ "- It doesn't require or benefit from bot input"
+ ]
+ )
+ Nothing
+ Nothing,
+ Provider.Message Provider.User messageText Nothing Nothing
+ ]
+ case result of
+ Left err -> do
+ putText <| "Engagement check failed: " <> err
+ pure True
+ Right msg -> do
+ let response = Text.toLower (Text.strip (Provider.msgContent msg))
+ pure (response == "yes" || response == "y")
+
checkOllama :: IO (Either Text ())
checkOllama = do
ollamaUrl <- fromMaybe "http://localhost:11434" </ lookupEnv "OLLAMA_URL"