summaryrefslogtreecommitdiff
path: root/Omni
diff options
context:
space:
mode:
authorBen Sima <ben@bensima.com>2025-12-17 13:29:40 -0500
committerBen Sima <ben@bensima.com>2025-12-17 13:29:40 -0500
commitab01b34bf563990e0f491ada646472aaade97610 (patch)
tree5e46a1a157bb846b0c3a090a83153c788da2b977 /Omni
parente112d3ce07fa24f31a281e521a554cc881a76c7b (diff)
parent337648981cc5a55935116141341521f4fce83214 (diff)
Merge Ava deployment changes
Diffstat (limited to 'Omni')
-rw-r--r--Omni/Agent/Core.hs14
-rw-r--r--Omni/Agent/Engine.hs414
-rw-r--r--Omni/Agent/Memory.hs1575
-rw-r--r--Omni/Agent/PLAN.md589
-rw-r--r--Omni/Agent/Paths.hs39
-rw-r--r--Omni/Agent/Provider.hs695
-rw-r--r--Omni/Agent/Skills.hs417
-rw-r--r--Omni/Agent/Status.hs (renamed from Omni/Agent/Log.hs)15
-rw-r--r--Omni/Agent/Subagent.hs516
-rw-r--r--Omni/Agent/Subagent/DESIGN.md352
-rw-r--r--Omni/Agent/Telegram.hs1372
-rw-r--r--Omni/Agent/Telegram/IncomingQueue.hs228
-rw-r--r--Omni/Agent/Telegram/Media.hs327
-rw-r--r--Omni/Agent/Telegram/Messages.hs551
-rw-r--r--Omni/Agent/Telegram/Reminders.hs108
-rw-r--r--Omni/Agent/Telegram/Types.hs654
-rw-r--r--Omni/Agent/Tools/Calendar.hs322
-rw-r--r--Omni/Agent/Tools/Email.hs675
-rw-r--r--Omni/Agent/Tools/Feedback.hs204
-rw-r--r--Omni/Agent/Tools/Hledger.hs489
-rw-r--r--Omni/Agent/Tools/Http.hs338
-rw-r--r--Omni/Agent/Tools/Notes.hs357
-rw-r--r--Omni/Agent/Tools/Outreach.hs513
-rw-r--r--Omni/Agent/Tools/Pdf.hs180
-rw-r--r--Omni/Agent/Tools/Python.hs217
-rw-r--r--Omni/Agent/Tools/Todos.hs527
-rw-r--r--Omni/Agent/Tools/WebReader.hs308
-rw-r--r--Omni/Agent/Tools/WebReaderTest.hs53
-rw-r--r--Omni/Agent/Tools/WebSearch.hs212
-rw-r--r--Omni/Agent/Worker.hs33
-rwxr-xr-xOmni/Ava.hs66
-rw-r--r--Omni/Bild.hs60
-rw-r--r--Omni/Bild.nix4
-rw-r--r--Omni/Bild/Builder.nix3
-rw-r--r--Omni/Bild/Deps/Haskell.nix3
-rw-r--r--Omni/Bild/Haskell.nix2
-rwxr-xr-xOmni/Bot.scm61
-rw-r--r--Omni/Deploy/Caddy.hs241
-rw-r--r--Omni/Deploy/Deployer.hs317
-rw-r--r--Omni/Deploy/Deployer.nix104
-rw-r--r--Omni/Deploy/Manifest.hs673
-rw-r--r--Omni/Deploy/PLAN.md299
-rw-r--r--Omni/Deploy/Packages.nix11
-rw-r--r--Omni/Deploy/README.md211
-rw-r--r--Omni/Deploy/Systemd.hs269
-rwxr-xr-xOmni/Dev/Beryllium.nix1
-rw-r--r--Omni/Dev/Beryllium/AVA.md111
-rw-r--r--Omni/Dev/Beryllium/Ava.nix48
-rwxr-xr-xOmni/Dev/Beryllium/migrate-ava.sh102
-rwxr-xr-xOmni/Ide/push.sh169
-rwxr-xr-xOmni/Jr.hs15
-rw-r--r--Omni/Keys/Ava.pub1
-rw-r--r--Omni/Log.hs44
-rw-r--r--Omni/Os/Base.nix13
-rw-r--r--Omni/Users.nix7
55 files changed, 14969 insertions, 160 deletions
diff --git a/Omni/Agent/Core.hs b/Omni/Agent/Core.hs
index 88f7237..fb4a4b3 100644
--- a/Omni/Agent/Core.hs
+++ b/Omni/Agent/Core.hs
@@ -6,6 +6,17 @@ module Omni.Agent.Core where
import Alpha
import Data.Aeson (FromJSON, ToJSON)
+-- | Engine/provider selection for agent
+data EngineType
+ = EngineOpenRouter
+ | EngineOllama
+ | EngineAmp
+ deriving (Show, Eq, Generic)
+
+instance ToJSON EngineType
+
+instance FromJSON EngineType
+
-- | Status of a worker agent
data WorkerStatus
= Idle
@@ -28,7 +39,8 @@ data Worker = Worker
workerPid :: Maybe Int,
workerStatus :: WorkerStatus,
workerPath :: FilePath,
- workerQuiet :: Bool -- Disable ANSI status bar (for loop mode)
+ workerQuiet :: Bool, -- Disable ANSI status bar (for loop mode)
+ workerEngine :: EngineType -- Which LLM backend to use
}
deriving (Show, Eq, Generic)
diff --git a/Omni/Agent/Engine.hs b/Omni/Agent/Engine.hs
index 4ee5e5d..f137ddb 100644
--- a/Omni/Agent/Engine.hs
+++ b/Omni/Agent/Engine.hs
@@ -1,4 +1,5 @@
{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoImplicitPrelude #-}
@@ -30,12 +31,16 @@ module Omni.Agent.Engine
ChatCompletionResponse (..),
Choice (..),
Usage (..),
+ ToolApi (..),
+ encodeToolForApi,
defaultLLM,
defaultEngineConfig,
defaultAgentConfig,
defaultGuardrails,
chat,
runAgent,
+ runAgentWithProvider,
+ runAgentWithProviderStreaming,
main,
test,
)
@@ -47,10 +52,12 @@ import qualified Data.Aeson as Aeson
import qualified Data.Aeson.KeyMap as KeyMap
import qualified Data.ByteString.Lazy as BL
import qualified Data.CaseInsensitive as CI
+import Data.IORef (newIORef, writeIORef)
import qualified Data.Map.Strict as Map
import qualified Data.Text as Text
import qualified Data.Text.Encoding as TE
import qualified Network.HTTP.Simple as HTTP
+import qualified Omni.Agent.Provider as Provider
import qualified Omni.Test as Test
main :: IO ()
@@ -264,6 +271,14 @@ encodeToolForApi t =
toolApiParameters = toolJsonSchema t
}
+encodeToolForProvider :: Tool -> Provider.ToolApi
+encodeToolForProvider t =
+ Provider.ToolApi
+ { Provider.toolApiName = toolName t,
+ Provider.toolApiDescription = toolDescription t,
+ Provider.toolApiParameters = toolJsonSchema t
+ }
+
data LLM = LLM
{ llmBaseUrl :: Text,
llmApiKey :: Text,
@@ -655,18 +670,24 @@ runAgent engineCfg agentCfg userPrompt = do
unless (Text.null assistantText)
<| engineOnAssistant engineCfg assistantText
case msgToolCalls msg of
- Nothing -> do
- engineOnActivity engineCfg "Agent completed"
- engineOnComplete engineCfg
- pure
- <| Right
- <| AgentResult
- { resultFinalMessage = msgContent msg,
- resultToolCallCount = totalCalls,
- resultIterations = iteration + 1,
- resultTotalCost = newCost,
- resultTotalTokens = newTokens
- }
+ Nothing
+ | Text.null (msgContent msg) && totalCalls > 0 -> do
+ engineOnActivity engineCfg "Empty response after tools, prompting for text"
+ let promptMsg = Message ToolRole "Please provide a response to the user." Nothing Nothing
+ newMsgs = msgs <> [msg, promptMsg]
+ loop llm tools' toolMap newMsgs (iteration + 1) totalCalls newTokens newCost toolCallCounts testFailures editFailures
+ | otherwise -> do
+ engineOnActivity engineCfg "Agent completed"
+ engineOnComplete engineCfg
+ pure
+ <| Right
+ <| AgentResult
+ { resultFinalMessage = msgContent msg,
+ resultToolCallCount = totalCalls,
+ resultIterations = iteration + 1,
+ resultTotalCost = newCost,
+ resultTotalTokens = newTokens
+ }
Just [] -> do
engineOnActivity engineCfg "Agent completed (empty tool calls)"
engineOnComplete engineCfg
@@ -801,11 +822,368 @@ executeToolCallsWithTracking engineCfg toolMap tcs initialTestFailures initialEd
_ -> False
isOldStrNotFoundError _ = False
--- | Estimate cost in cents from token count
+-- | Estimate cost in cents from token count.
+-- Uses blended input/output rates (roughly 2:1 output:input ratio).
+-- Prices as of Dec 2024 from OpenRouter.
estimateCost :: Text -> Int -> Double
estimateCost model tokens
- | "gpt-4o-mini" `Text.isInfixOf` model = fromIntegral tokens * 15 / 1000000
- | "gpt-4o" `Text.isInfixOf` model = fromIntegral tokens * 250 / 100000
- | "gpt-4" `Text.isInfixOf` model = fromIntegral tokens * 3 / 100000
- | "claude" `Text.isInfixOf` model = fromIntegral tokens * 3 / 100000
- | otherwise = fromIntegral tokens / 100000
+ | "gpt-4o-mini" `Text.isInfixOf` model = fromIntegral tokens * 0.04 / 1000
+ | "gpt-4o" `Text.isInfixOf` model = fromIntegral tokens * 0.7 / 1000
+ | "gemini-2.0-flash" `Text.isInfixOf` model = fromIntegral tokens * 0.15 / 1000
+ | "gemini-2.5-flash" `Text.isInfixOf` model = fromIntegral tokens * 0.15 / 1000
+ | "claude-sonnet-4.5" `Text.isInfixOf` model = fromIntegral tokens * 0.9 / 1000
+ | "claude-sonnet-4" `Text.isInfixOf` model = fromIntegral tokens * 0.9 / 1000
+ | "claude-3-haiku" `Text.isInfixOf` model = fromIntegral tokens * 0.1 / 1000
+ | "claude" `Text.isInfixOf` model = fromIntegral tokens * 0.9 / 1000
+ | otherwise = fromIntegral tokens * 0.5 / 1000
+
+-- | Run agent with a Provider instead of LLM.
+-- This is the new preferred way to run agents with multiple backend support.
+runAgentWithProvider :: EngineConfig -> Provider.Provider -> AgentConfig -> Text -> IO (Either Text AgentResult)
+runAgentWithProvider engineCfg provider agentCfg userPrompt = do
+ let tools = agentTools agentCfg
+ toolApis = map encodeToolForProvider tools
+ toolMap = buildToolMap tools
+ systemMsg = providerMessage Provider.System (agentSystemPrompt agentCfg)
+ userMsg = providerMessage Provider.User userPrompt
+ initialMessages = [systemMsg, userMsg]
+
+ engineOnActivity engineCfg "Starting agent loop (Provider)"
+ loopProvider provider toolApis toolMap initialMessages 0 0 0 0.0 Map.empty 0 0
+ where
+ maxIter = agentMaxIterations agentCfg
+ guardrails' = agentGuardrails agentCfg
+
+ providerMessage :: Provider.Role -> Text -> Provider.Message
+ providerMessage role content = Provider.Message role content Nothing Nothing
+
+ loopProvider ::
+ Provider.Provider ->
+ [Provider.ToolApi] ->
+ Map.Map Text Tool ->
+ [Provider.Message] ->
+ Int ->
+ Int ->
+ Int ->
+ Double ->
+ Map.Map Text Int ->
+ Int ->
+ Int ->
+ IO (Either Text AgentResult)
+ loopProvider prov toolApis' toolMap msgs iteration totalCalls totalTokens totalCost toolCallCounts testFailures editFailures
+ | iteration >= maxIter = do
+ let errMsg = "Max iterations (" <> tshow maxIter <> ") reached"
+ engineOnError engineCfg errMsg
+ pure <| Left errMsg
+ | otherwise = do
+ let guardrailViolation = findGuardrailViolation guardrails' totalCost totalTokens toolCallCounts testFailures editFailures
+ case guardrailViolation of
+ Just (g, errMsg) -> do
+ engineOnGuardrail engineCfg g
+ pure <| Left errMsg
+ Nothing -> do
+ engineOnActivity engineCfg <| "Iteration " <> tshow (iteration + 1)
+ result <- Provider.chatWithUsage prov toolApis' msgs
+ case result of
+ Left err -> do
+ engineOnError engineCfg err
+ pure (Left err)
+ Right chatRes -> do
+ let msg = Provider.chatMessage chatRes
+ tokens = maybe 0 Provider.usageTotalTokens (Provider.chatUsage chatRes)
+ cost = case Provider.chatUsage chatRes +> Provider.usageCost of
+ Just actualCost -> actualCost * 100
+ Nothing -> estimateCost (getProviderModel prov) tokens
+ engineOnCost engineCfg tokens cost
+ let newTokens = totalTokens + tokens
+ newCost = totalCost + cost
+ let assistantText = Provider.msgContent msg
+ unless (Text.null assistantText)
+ <| engineOnAssistant engineCfg assistantText
+ case Provider.msgToolCalls msg of
+ Nothing
+ | Text.null (Provider.msgContent msg) && totalCalls > 0 -> do
+ engineOnActivity engineCfg "Empty response after tools, prompting for text"
+ let promptMsg = Provider.Message Provider.ToolRole "Please provide a response to the user." Nothing Nothing
+ newMsgs = msgs <> [msg, promptMsg]
+ loopProvider prov toolApis' toolMap newMsgs (iteration + 1) totalCalls newTokens newCost toolCallCounts testFailures editFailures
+ | otherwise -> do
+ engineOnActivity engineCfg "Agent completed"
+ engineOnComplete engineCfg
+ pure
+ <| Right
+ <| AgentResult
+ { resultFinalMessage = Provider.msgContent msg,
+ resultToolCallCount = totalCalls,
+ resultIterations = iteration + 1,
+ resultTotalCost = newCost,
+ resultTotalTokens = newTokens
+ }
+ Just [] -> do
+ engineOnActivity engineCfg "Agent completed (empty tool calls)"
+ engineOnComplete engineCfg
+ pure
+ <| Right
+ <| AgentResult
+ { resultFinalMessage = Provider.msgContent msg,
+ resultToolCallCount = totalCalls,
+ resultIterations = iteration + 1,
+ resultTotalCost = newCost,
+ resultTotalTokens = newTokens
+ }
+ Just tcs -> do
+ (toolResults, newTestFailures, newEditFailures) <- executeProviderToolCalls engineCfg toolMap tcs testFailures editFailures
+ let newMsgs = msgs <> [msg] <> toolResults
+ newCalls = totalCalls + length tcs
+ newToolCallCounts = updateProviderToolCallCounts toolCallCounts tcs
+ loopProvider prov toolApis' toolMap newMsgs (iteration + 1) newCalls newTokens newCost newToolCallCounts newTestFailures newEditFailures
+
+ getProviderModel :: Provider.Provider -> Text
+ getProviderModel (Provider.OpenRouter cfg) = Provider.providerModel cfg
+ getProviderModel (Provider.Ollama cfg) = Provider.providerModel cfg
+ getProviderModel (Provider.AmpCLI _) = "amp"
+
+ updateProviderToolCallCounts :: Map.Map Text Int -> [Provider.ToolCall] -> Map.Map Text Int
+ updateProviderToolCallCounts =
+ foldr (\tc m -> Map.insertWith (+) (Provider.fcName (Provider.tcFunction tc)) 1 m)
+
+ executeProviderToolCalls :: EngineConfig -> Map.Map Text Tool -> [Provider.ToolCall] -> Int -> Int -> IO ([Provider.Message], Int, Int)
+ executeProviderToolCalls eCfg tMap tcs initialTestFailures initialEditFailures = do
+ results <- traverse (executeSingleProvider eCfg tMap) tcs
+ let msgs = map (\(m, _, _) -> m) results
+ testDeltas = map (\(_, t, _) -> t) results
+ editDeltas = map (\(_, _, e) -> e) results
+ totalTestFail = initialTestFailures + sum testDeltas
+ totalEditFail = initialEditFailures + sum editDeltas
+ pure (msgs, totalTestFail, totalEditFail)
+
+ executeSingleProvider :: EngineConfig -> Map.Map Text Tool -> Provider.ToolCall -> IO (Provider.Message, Int, Int)
+ executeSingleProvider eCfg tMap tc = do
+ let name = Provider.fcName (Provider.tcFunction tc)
+ argsText = Provider.fcArguments (Provider.tcFunction tc)
+ callId = Provider.tcId tc
+ engineOnActivity eCfg <| "Executing tool: " <> name
+ engineOnToolCall eCfg name argsText
+ case Map.lookup name tMap of
+ Nothing -> do
+ let errMsg = "Tool not found: " <> name
+ engineOnToolResult eCfg name False errMsg
+ pure (Provider.Message Provider.ToolRole errMsg Nothing (Just callId), 0, 0)
+ Just tool -> do
+ case Aeson.decode (BL.fromStrict (TE.encodeUtf8 argsText)) of
+ Nothing -> do
+ let errMsg = "Invalid JSON arguments: " <> argsText
+ engineOnToolResult eCfg name False errMsg
+ pure (Provider.Message Provider.ToolRole errMsg Nothing (Just callId), 0, 0)
+ Just args -> do
+ resultValue <- toolExecute tool args
+ let resultText = TE.decodeUtf8 (BL.toStrict (Aeson.encode resultValue))
+ isTestCall = name == "bash" && ("bild --test" `Text.isInfixOf` argsText || "bild -t" `Text.isInfixOf` argsText)
+ isTestFailure = isTestCall && isFailureResultProvider resultValue
+ testDelta = if isTestFailure then 1 else 0
+ isEditFailure = name == "edit_file" && isOldStrNotFoundProvider resultValue
+ editDelta = if isEditFailure then 1 else 0
+ engineOnToolResult eCfg name True resultText
+ pure (Provider.Message Provider.ToolRole resultText Nothing (Just callId), testDelta, editDelta)
+
+ isFailureResultProvider :: Aeson.Value -> Bool
+ isFailureResultProvider (Aeson.Object obj) =
+ case KeyMap.lookup "exit_code" obj of
+ Just (Aeson.Number n) -> n /= 0
+ _ -> False
+ isFailureResultProvider (Aeson.String s) =
+ "error"
+ `Text.isInfixOf` Text.toLower s
+ || "failed"
+ `Text.isInfixOf` Text.toLower s
+ || "FAILED"
+ `Text.isInfixOf` s
+ isFailureResultProvider _ = False
+
+ isOldStrNotFoundProvider :: Aeson.Value -> Bool
+ isOldStrNotFoundProvider (Aeson.Object obj) =
+ case KeyMap.lookup "error" obj of
+ Just (Aeson.String s) -> "old_str not found" `Text.isInfixOf` s
+ _ -> False
+ isOldStrNotFoundProvider _ = False
+
+runAgentWithProviderStreaming ::
+ EngineConfig ->
+ Provider.Provider ->
+ AgentConfig ->
+ Text ->
+ (Text -> IO ()) ->
+ IO (Either Text AgentResult)
+runAgentWithProviderStreaming engineCfg provider agentCfg userPrompt onStreamChunk = do
+ let tools = agentTools agentCfg
+ toolApis = map encodeToolForProvider tools
+ toolMap = buildToolMap tools
+ systemMsg = providerMessage Provider.System (agentSystemPrompt agentCfg)
+ userMsg = providerMessage Provider.User userPrompt
+ initialMessages = [systemMsg, userMsg]
+
+ engineOnActivity engineCfg "Starting agent loop (Provider+Streaming)"
+ loopProviderStreaming provider toolApis toolMap initialMessages 0 0 0 0.0 Map.empty 0 0
+ where
+ maxIter = agentMaxIterations agentCfg
+ guardrails' = agentGuardrails agentCfg
+
+ providerMessage :: Provider.Role -> Text -> Provider.Message
+ providerMessage role content = Provider.Message role content Nothing Nothing
+
+ loopProviderStreaming ::
+ Provider.Provider ->
+ [Provider.ToolApi] ->
+ Map.Map Text Tool ->
+ [Provider.Message] ->
+ Int ->
+ Int ->
+ Int ->
+ Double ->
+ Map.Map Text Int ->
+ Int ->
+ Int ->
+ IO (Either Text AgentResult)
+ loopProviderStreaming prov toolApis' toolMap msgs iteration totalCalls totalTokens totalCost toolCallCounts testFailures editFailures
+ | iteration >= maxIter = do
+ let errMsg = "Max iterations (" <> tshow maxIter <> ") reached"
+ engineOnError engineCfg errMsg
+ pure <| Left errMsg
+ | otherwise = do
+ let guardrailViolation = findGuardrailViolation guardrails' totalCost totalTokens toolCallCounts testFailures editFailures
+ case guardrailViolation of
+ Just (g, errMsg) -> do
+ engineOnGuardrail engineCfg g
+ pure <| Left errMsg
+ Nothing -> do
+ engineOnActivity engineCfg <| "Iteration " <> tshow (iteration + 1)
+ hasToolCalls <- newIORef False
+ result <-
+ Provider.chatStream prov toolApis' msgs <| \case
+ Provider.StreamContent txt -> onStreamChunk txt
+ Provider.StreamToolCall _ -> writeIORef hasToolCalls True
+ Provider.StreamToolCallDelta _ -> writeIORef hasToolCalls True
+ Provider.StreamError err -> engineOnError engineCfg err
+ Provider.StreamDone _ -> pure ()
+ case result of
+ Left err -> do
+ engineOnError engineCfg err
+ pure (Left err)
+ Right chatRes -> do
+ let msg = Provider.chatMessage chatRes
+ tokens = maybe 0 Provider.usageTotalTokens (Provider.chatUsage chatRes)
+ cost = case Provider.chatUsage chatRes +> Provider.usageCost of
+ Just actualCost -> actualCost * 100
+ Nothing -> estimateCost (getProviderModelStreaming prov) tokens
+ engineOnCost engineCfg tokens cost
+ let newTokens = totalTokens + tokens
+ newCost = totalCost + cost
+ let assistantText = Provider.msgContent msg
+ unless (Text.null assistantText)
+ <| engineOnAssistant engineCfg assistantText
+ case Provider.msgToolCalls msg of
+ Nothing
+ | Text.null (Provider.msgContent msg) && totalCalls > 0 -> do
+ engineOnActivity engineCfg "Empty response after tools, prompting for text"
+ let promptMsg = Provider.Message Provider.ToolRole "Please provide a response to the user." Nothing Nothing
+ newMsgs = msgs <> [msg, promptMsg]
+ loopProviderStreaming prov toolApis' toolMap newMsgs (iteration + 1) totalCalls newTokens newCost toolCallCounts testFailures editFailures
+ | otherwise -> do
+ engineOnActivity engineCfg "Agent completed"
+ engineOnComplete engineCfg
+ pure
+ <| Right
+ <| AgentResult
+ { resultFinalMessage = Provider.msgContent msg,
+ resultToolCallCount = totalCalls,
+ resultIterations = iteration + 1,
+ resultTotalCost = newCost,
+ resultTotalTokens = newTokens
+ }
+ Just [] -> do
+ engineOnActivity engineCfg "Agent completed (empty tool calls)"
+ engineOnComplete engineCfg
+ pure
+ <| Right
+ <| AgentResult
+ { resultFinalMessage = Provider.msgContent msg,
+ resultToolCallCount = totalCalls,
+ resultIterations = iteration + 1,
+ resultTotalCost = newCost,
+ resultTotalTokens = newTokens
+ }
+ Just tcs -> do
+ (toolResults, newTestFailures, newEditFailures) <- executeToolCallsStreaming engineCfg toolMap tcs testFailures editFailures
+ let newMsgs = msgs <> [msg] <> toolResults
+ newCalls = totalCalls + length tcs
+ newToolCallCounts = updateToolCallCountsStreaming toolCallCounts tcs
+ loopProviderStreaming prov toolApis' toolMap newMsgs (iteration + 1) newCalls newTokens newCost newToolCallCounts newTestFailures newEditFailures
+
+ getProviderModelStreaming :: Provider.Provider -> Text
+ getProviderModelStreaming (Provider.OpenRouter cfg) = Provider.providerModel cfg
+ getProviderModelStreaming (Provider.Ollama cfg) = Provider.providerModel cfg
+ getProviderModelStreaming (Provider.AmpCLI _) = "amp"
+
+ updateToolCallCountsStreaming :: Map.Map Text Int -> [Provider.ToolCall] -> Map.Map Text Int
+ updateToolCallCountsStreaming =
+ foldr (\tc m -> Map.insertWith (+) (Provider.fcName (Provider.tcFunction tc)) 1 m)
+
+ executeToolCallsStreaming :: EngineConfig -> Map.Map Text Tool -> [Provider.ToolCall] -> Int -> Int -> IO ([Provider.Message], Int, Int)
+ executeToolCallsStreaming eCfg tMap tcs initialTestFailures initialEditFailures = do
+ results <- traverse (executeSingleStreaming eCfg tMap) tcs
+ let msgs = map (\(m, _, _) -> m) results
+ testDeltas = map (\(_, t, _) -> t) results
+ editDeltas = map (\(_, _, e) -> e) results
+ totalTestFail = initialTestFailures + sum testDeltas
+ totalEditFail = initialEditFailures + sum editDeltas
+ pure (msgs, totalTestFail, totalEditFail)
+
+ executeSingleStreaming :: EngineConfig -> Map.Map Text Tool -> Provider.ToolCall -> IO (Provider.Message, Int, Int)
+ executeSingleStreaming eCfg tMap tc = do
+ let name = Provider.fcName (Provider.tcFunction tc)
+ argsText = Provider.fcArguments (Provider.tcFunction tc)
+ callId = Provider.tcId tc
+ engineOnActivity eCfg <| "Executing tool: " <> name
+ engineOnToolCall eCfg name argsText
+ case Map.lookup name tMap of
+ Nothing -> do
+ let errMsg = "Tool not found: " <> name
+ engineOnToolResult eCfg name False errMsg
+ pure (Provider.Message Provider.ToolRole errMsg Nothing (Just callId), 0, 0)
+ Just tool -> do
+ case Aeson.decode (BL.fromStrict (TE.encodeUtf8 argsText)) of
+ Nothing -> do
+ let errMsg = "Invalid JSON arguments: " <> argsText
+ engineOnToolResult eCfg name False errMsg
+ pure (Provider.Message Provider.ToolRole errMsg Nothing (Just callId), 0, 0)
+ Just args -> do
+ resultValue <- toolExecute tool args
+ let resultText = TE.decodeUtf8 (BL.toStrict (Aeson.encode resultValue))
+ isTestCall = name == "bash" && ("bild --test" `Text.isInfixOf` argsText || "bild -t" `Text.isInfixOf` argsText)
+ isTestFailure = isTestCall && isFailureResultStreaming resultValue
+ testDelta = if isTestFailure then 1 else 0
+ isEditFailure = name == "edit_file" && isOldStrNotFoundStreaming resultValue
+ editDelta = if isEditFailure then 1 else 0
+ engineOnToolResult eCfg name True resultText
+ pure (Provider.Message Provider.ToolRole resultText Nothing (Just callId), testDelta, editDelta)
+
+ isFailureResultStreaming :: Aeson.Value -> Bool
+ isFailureResultStreaming (Aeson.Object obj) =
+ case KeyMap.lookup "exit_code" obj of
+ Just (Aeson.Number n) -> n /= 0
+ _ -> False
+ isFailureResultStreaming (Aeson.String s) =
+ "error"
+ `Text.isInfixOf` Text.toLower s
+ || "failed"
+ `Text.isInfixOf` Text.toLower s
+ || "FAILED"
+ `Text.isInfixOf` s
+ isFailureResultStreaming _ = False
+
+ isOldStrNotFoundStreaming :: Aeson.Value -> Bool
+ isOldStrNotFoundStreaming (Aeson.Object obj) =
+ case KeyMap.lookup "error" obj of
+ Just (Aeson.String s) -> "old_str not found" `Text.isInfixOf` s
+ _ -> False
+ isOldStrNotFoundStreaming _ = False
diff --git a/Omni/Agent/Memory.hs b/Omni/Agent/Memory.hs
new file mode 100644
index 0000000..4aaa438
--- /dev/null
+++ b/Omni/Agent/Memory.hs
@@ -0,0 +1,1575 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Cross-agent shared memory system with vector similarity search.
+--
+-- Provides persistent memory that is:
+-- - Shared across all agents (Telegram, researcher, coder, etc.)
+-- - Private per user (users can't see each other's memories)
+-- - Searchable via semantic similarity using embeddings
+--
+-- Uses sqlite-vss for vector similarity search and Ollama for embeddings.
+--
+-- : out omni-agent-memory
+-- : dep aeson
+-- : dep http-conduit
+-- : dep sqlite-simple
+-- : dep uuid
+-- : dep vector
+-- : dep directory
+-- : dep bytestring
+module Omni.Agent.Memory
+ ( -- * Types
+ User (..),
+ Memory (..),
+ MemorySource (..),
+ ConversationMessage (..),
+ ConversationSummary (..),
+ MessageRole (..),
+ RelationType (..),
+ MemoryLink (..),
+
+ -- * User Management
+ createUser,
+ getUser,
+ getUserByTelegramId,
+ getOrCreateUserByTelegramId,
+
+ -- * Memory Operations
+ storeMemory,
+ recallMemories,
+ forgetMemory,
+ getAllMemoriesForUser,
+ updateMemoryAccess,
+
+ -- * Knowledge Graph
+ linkMemories,
+ getMemoryLinks,
+ getLinkedMemories,
+ queryGraph,
+
+ -- * Conversation History (DMs)
+ saveMessage,
+ getRecentMessages,
+ getConversationContext,
+ summarizeAndArchive,
+ estimateTokens,
+
+ -- * Group Conversation History
+ saveGroupMessage,
+ getGroupRecentMessages,
+ getGroupConversationContext,
+
+ -- * Group Memories
+ storeGroupMemory,
+ recallGroupMemories,
+
+ -- * Embeddings
+ embedText,
+
+ -- * Agent Integration
+ rememberTool,
+ recallTool,
+ linkMemoriesTool,
+ queryGraphTool,
+ formatMemoriesForPrompt,
+ runAgentWithMemory,
+
+ -- * Database
+ withMemoryDb,
+ initMemoryDb,
+ getMemoryDbPath,
+
+ -- * Testing
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Data.Aeson ((.!=), (.:), (.:?), (.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.Aeson.KeyMap as KeyMap
+import qualified Data.ByteString as BS
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.List as List
+import qualified Data.Text as Text
+import qualified Data.Text.Encoding as TE
+import Data.Time (UTCTime, getCurrentTime)
+import Data.Time.Format (defaultTimeLocale, formatTime)
+import qualified Data.UUID as UUID
+import qualified Data.UUID.V4 as UUID
+import qualified Data.Vector.Storable as VS
+import qualified Database.SQLite.Simple as SQL
+import Database.SQLite.Simple.FromField ()
+import qualified Database.SQLite.Simple.ToField as SQL
+import Foreign.Storable ()
+import qualified Network.HTTP.Simple as HTTP
+import qualified Omni.Agent.Engine as Engine
+import qualified Omni.Test as Test
+import System.Directory (createDirectoryIfMissing)
+import System.Environment (lookupEnv)
+import System.FilePath (takeDirectory, (</>))
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Memory"
+ [ Test.unit "User JSON roundtrip" <| do
+ now <- getCurrentTime
+ let user =
+ User
+ { userId = "test-uuid",
+ userTelegramId = Just 12345,
+ userEmail = Nothing,
+ userName = "Test User",
+ userCreatedAt = now
+ }
+ case Aeson.decode (Aeson.encode user) of
+ Nothing -> Test.assertFailure "Failed to decode User"
+ Just decoded -> userName decoded Test.@=? "Test User",
+ Test.unit "Memory JSON roundtrip" <| do
+ now <- getCurrentTime
+ let mem =
+ Memory
+ { memoryId = "mem-uuid",
+ memoryUserId = "user-uuid",
+ memoryContent = "User is an AI engineer",
+ memoryEmbedding = Nothing,
+ memorySource =
+ MemorySource
+ { sourceAgent = "telegram",
+ sourceSession = Nothing,
+ sourceContext = "User mentioned in chat"
+ },
+ memoryConfidence = 0.9,
+ memoryCreatedAt = now,
+ memoryLastAccessedAt = now,
+ memoryTags = ["profession", "ai"]
+ }
+ case Aeson.decode (Aeson.encode mem) of
+ Nothing -> Test.assertFailure "Failed to decode Memory"
+ Just decoded -> memoryContent decoded Test.@=? "User is an AI engineer",
+ Test.unit "MemorySource JSON roundtrip" <| do
+ let src =
+ MemorySource
+ { sourceAgent = "researcher",
+ sourceSession = Just "session-123",
+ sourceContext = "Extracted from conversation"
+ }
+ case Aeson.decode (Aeson.encode src) of
+ Nothing -> Test.assertFailure "Failed to decode MemorySource"
+ Just decoded -> sourceAgent decoded Test.@=? "researcher",
+ Test.unit "formatMemoriesForPrompt formats correctly" <| do
+ now <- getCurrentTime
+ let mem1 =
+ Memory
+ { memoryId = "1",
+ memoryUserId = "u",
+ memoryContent = "User is an AI engineer",
+ memoryEmbedding = Nothing,
+ memorySource = MemorySource "telegram" Nothing "chat",
+ memoryConfidence = 0.9,
+ memoryCreatedAt = now,
+ memoryLastAccessedAt = now,
+ memoryTags = []
+ }
+ mem2 =
+ Memory
+ { memoryId = "2",
+ memoryUserId = "u",
+ memoryContent = "User prefers Haskell",
+ memoryEmbedding = Nothing,
+ memorySource = MemorySource "coder" Nothing "code review",
+ memoryConfidence = 0.8,
+ memoryCreatedAt = now,
+ memoryLastAccessedAt = now,
+ memoryTags = []
+ }
+ formatted = formatMemoriesForPrompt [mem1, mem2]
+ ("AI engineer" `Text.isInfixOf` formatted) Test.@=? True
+ ("Haskell" `Text.isInfixOf` formatted) Test.@=? True,
+ Test.unit "cosineSimilarity identical vectors" <| do
+ let v1 = VS.fromList [1.0, 0.0, 0.0 :: Float]
+ v2 = VS.fromList [1.0, 0.0, 0.0 :: Float]
+ abs (cosineSimilarity v1 v2 - 1.0) < 0.0001 Test.@=? True,
+ Test.unit "cosineSimilarity orthogonal vectors" <| do
+ let v1 = VS.fromList [1.0, 0.0, 0.0 :: Float]
+ v2 = VS.fromList [0.0, 1.0, 0.0 :: Float]
+ abs (cosineSimilarity v1 v2) < 0.0001 Test.@=? True,
+ Test.unit "cosineSimilarity opposite vectors" <| do
+ let v1 = VS.fromList [1.0, 0.0, 0.0 :: Float]
+ v2 = VS.fromList [-1.0, 0.0, 0.0 :: Float]
+ abs (cosineSimilarity v1 v2 + 1.0) < 0.0001 Test.@=? True,
+ Test.unit "vectorToBlob and blobToVector roundtrip" <| do
+ let v = VS.fromList [0.1, 0.2, 0.3, 0.4, 0.5 :: Float]
+ blob = vectorToBlob v
+ v' = blobToVector blob
+ VS.length v Test.@=? VS.length v'
+ VS.toList v Test.@=? VS.toList v',
+ Test.unit "rememberTool has correct schema" <| do
+ let tool = rememberTool "test-user-id"
+ Engine.toolName tool Test.@=? "remember",
+ Test.unit "recallTool has correct schema" <| do
+ let tool = recallTool "test-user-id"
+ Engine.toolName tool Test.@=? "recall",
+ Test.unit "RelationType JSON roundtrip" <| do
+ let types = [Contradicts, Supports, Elaborates, Supersedes, Related, ContingentOn]
+ forM_ types <| \rt ->
+ case Aeson.decode (Aeson.encode rt) of
+ Nothing -> Test.assertFailure ("Failed to decode RelationType: " <> show rt)
+ Just decoded -> decoded Test.@=? rt,
+ Test.unit "MemoryLink JSON roundtrip" <| do
+ now <- getCurrentTime
+ let memLink =
+ MemoryLink
+ { linkFromMemoryId = "mem-1",
+ linkToMemoryId = "mem-2",
+ linkRelationType = Contradicts,
+ linkCreatedAt = now
+ }
+ case Aeson.decode (Aeson.encode memLink) of
+ Nothing -> Test.assertFailure "Failed to decode MemoryLink"
+ Just decoded -> do
+ linkFromMemoryId decoded Test.@=? "mem-1"
+ linkToMemoryId decoded Test.@=? "mem-2"
+ linkRelationType decoded Test.@=? Contradicts,
+ Test.unit "relationTypeToText and textToRelationType roundtrip" <| do
+ let types = [Contradicts, Supports, Elaborates, Supersedes, Related, ContingentOn]
+ forM_ types <| \rt ->
+ textToRelationType (relationTypeToText rt) Test.@=? Just rt,
+ Test.unit "linkMemoriesTool has correct schema" <| do
+ let tool = linkMemoriesTool "test-user-id"
+ Engine.toolName tool Test.@=? "link_memories",
+ Test.unit "queryGraphTool has correct schema" <| do
+ let tool = queryGraphTool "test-user-id"
+ Engine.toolName tool Test.@=? "query_graph"
+ ]
+
+-- | User record for multi-user memory system.
+data User = User
+ { userId :: Text,
+ userTelegramId :: Maybe Int,
+ userEmail :: Maybe Text,
+ userName :: Text,
+ userCreatedAt :: UTCTime
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON User where
+ toJSON u =
+ Aeson.object
+ [ "id" .= userId u,
+ "telegram_id" .= userTelegramId u,
+ "email" .= userEmail u,
+ "name" .= userName u,
+ "created_at" .= userCreatedAt u
+ ]
+
+instance Aeson.FromJSON User where
+ parseJSON =
+ Aeson.withObject "User" <| \v ->
+ (User </ (v .: "id"))
+ <*> (v .:? "telegram_id")
+ <*> (v .:? "email")
+ <*> (v .: "name")
+ <*> (v .: "created_at")
+
+instance SQL.FromRow User where
+ fromRow =
+ User
+ </ SQL.field
+ <*> SQL.field
+ <*> SQL.field
+ <*> SQL.field
+ <*> SQL.field
+
+instance SQL.ToRow User where
+ toRow u =
+ [ SQL.toField (userId u),
+ SQL.toField (userTelegramId u),
+ SQL.toField (userEmail u),
+ SQL.toField (userName u),
+ SQL.toField (userCreatedAt u)
+ ]
+
+-- | Source information for a memory.
+data MemorySource = MemorySource
+ { sourceAgent :: Text,
+ sourceSession :: Maybe Text,
+ sourceContext :: Text
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON MemorySource where
+ toJSON s =
+ Aeson.object
+ [ "agent" .= sourceAgent s,
+ "session" .= sourceSession s,
+ "context" .= sourceContext s
+ ]
+
+instance Aeson.FromJSON MemorySource where
+ parseJSON =
+ Aeson.withObject "MemorySource" <| \v ->
+ (MemorySource </ (v .: "agent"))
+ <*> (v .:? "session")
+ <*> (v .: "context")
+
+-- | A memory stored in the system.
+data Memory = Memory
+ { memoryId :: Text,
+ memoryUserId :: Text,
+ memoryContent :: Text,
+ memoryEmbedding :: Maybe (VS.Vector Float),
+ memorySource :: MemorySource,
+ memoryConfidence :: Double,
+ memoryCreatedAt :: UTCTime,
+ memoryLastAccessedAt :: UTCTime,
+ memoryTags :: [Text]
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON Memory where
+ toJSON m =
+ Aeson.object
+ [ "id" .= memoryId m,
+ "user_id" .= memoryUserId m,
+ "content" .= memoryContent m,
+ "source" .= memorySource m,
+ "confidence" .= memoryConfidence m,
+ "created_at" .= memoryCreatedAt m,
+ "last_accessed_at" .= memoryLastAccessedAt m,
+ "tags" .= memoryTags m
+ ]
+
+instance Aeson.FromJSON Memory where
+ parseJSON =
+ Aeson.withObject "Memory" <| \v ->
+ ( Memory
+ </ (v .: "id")
+ )
+ <*> (v .: "user_id")
+ <*> (v .: "content")
+ <*> pure Nothing
+ <*> (v .: "source")
+ <*> (v .:? "confidence" .!= 0.8)
+ <*> (v .: "created_at")
+ <*> (v .: "last_accessed_at")
+ <*> (v .:? "tags" .!= [])
+
+-- SQLite instances for Memory (partial - embedding handled separately)
+instance SQL.FromRow Memory where
+ fromRow = do
+ mid <- SQL.field
+ uid <- SQL.field
+ content <- SQL.field
+ embeddingBlob <- SQL.field
+ agent <- SQL.field
+ session <- SQL.field
+ context <- SQL.field
+ confidence <- SQL.field
+ createdAt <- SQL.field
+ lastAccessedAt <- SQL.field
+ tagsJson <- SQL.field
+ let embedding = blobToVector </ (embeddingBlob :: Maybe BS.ByteString)
+ source = MemorySource agent session context
+ tags = fromMaybe [] ((tagsJson :: Maybe Text) +> (Aeson.decode <. BL.fromStrict <. TE.encodeUtf8))
+ pure
+ Memory
+ { memoryId = mid,
+ memoryUserId = uid,
+ memoryContent = content,
+ memoryEmbedding = embedding,
+ memorySource = source,
+ memoryConfidence = confidence,
+ memoryCreatedAt = createdAt,
+ memoryLastAccessedAt = lastAccessedAt,
+ memoryTags = tags
+ }
+
+-- | Role in a conversation message.
+data MessageRole = UserRole | AssistantRole
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON MessageRole where
+ toJSON UserRole = Aeson.String "user"
+ toJSON AssistantRole = Aeson.String "assistant"
+
+instance Aeson.FromJSON MessageRole where
+ parseJSON =
+ Aeson.withText "MessageRole" <| \case
+ "user" -> pure UserRole
+ "assistant" -> pure AssistantRole
+ _ -> empty
+
+-- | A message in a conversation.
+data ConversationMessage = ConversationMessage
+ { cmId :: Maybe Int,
+ cmUserId :: Text,
+ cmChatId :: Int,
+ cmRole :: MessageRole,
+ cmSenderName :: Maybe Text,
+ cmContent :: Text,
+ cmTokensEstimate :: Int,
+ cmCreatedAt :: UTCTime
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON ConversationMessage where
+ toJSON m =
+ Aeson.object
+ [ "id" .= cmId m,
+ "user_id" .= cmUserId m,
+ "chat_id" .= cmChatId m,
+ "role" .= cmRole m,
+ "sender_name" .= cmSenderName m,
+ "content" .= cmContent m,
+ "tokens_estimate" .= cmTokensEstimate m,
+ "created_at" .= cmCreatedAt m
+ ]
+
+instance SQL.FromRow ConversationMessage where
+ fromRow =
+ (ConversationMessage </ SQL.field)
+ <*> SQL.field
+ <*> SQL.field
+ <*> (parseRole </ SQL.field)
+ <*> SQL.field
+ <*> SQL.field
+ <*> (fromMaybe 0 </ SQL.field)
+ <*> SQL.field
+ where
+ parseRole :: Text -> MessageRole
+ parseRole "user" = UserRole
+ parseRole _ = AssistantRole
+
+-- | A summary of older conversation messages.
+data ConversationSummary = ConversationSummary
+ { csId :: Maybe Int,
+ csUserId :: Text,
+ csChatId :: Int,
+ csSummary :: Text,
+ csMessagesSummarized :: Int,
+ csTokensSaved :: Maybe Int,
+ csCreatedAt :: UTCTime
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON ConversationSummary where
+ toJSON s =
+ Aeson.object
+ [ "id" .= csId s,
+ "user_id" .= csUserId s,
+ "chat_id" .= csChatId s,
+ "summary" .= csSummary s,
+ "messages_summarized" .= csMessagesSummarized s,
+ "tokens_saved" .= csTokensSaved s,
+ "created_at" .= csCreatedAt s
+ ]
+
+instance SQL.FromRow ConversationSummary where
+ fromRow =
+ (ConversationSummary </ SQL.field)
+ <*> SQL.field
+ <*> SQL.field
+ <*> SQL.field
+ <*> SQL.field
+ <*> SQL.field
+ <*> SQL.field
+
+-- | Relation types for the knowledge graph.
+data RelationType
+ = Contradicts
+ | Supports
+ | Elaborates
+ | Supersedes
+ | Related
+ | ContingentOn
+ deriving (Show, Eq, Generic, Ord)
+
+instance Aeson.ToJSON RelationType where
+ toJSON Contradicts = Aeson.String "contradicts"
+ toJSON Supports = Aeson.String "supports"
+ toJSON Elaborates = Aeson.String "elaborates"
+ toJSON Supersedes = Aeson.String "supersedes"
+ toJSON Related = Aeson.String "related"
+ toJSON ContingentOn = Aeson.String "contingent_on"
+
+instance Aeson.FromJSON RelationType where
+ parseJSON =
+ Aeson.withText "RelationType" <| \case
+ "contradicts" -> pure Contradicts
+ "supports" -> pure Supports
+ "elaborates" -> pure Elaborates
+ "supersedes" -> pure Supersedes
+ "related" -> pure Related
+ "contingent_on" -> pure ContingentOn
+ _ -> empty
+
+relationTypeToText :: RelationType -> Text
+relationTypeToText Contradicts = "contradicts"
+relationTypeToText Supports = "supports"
+relationTypeToText Elaborates = "elaborates"
+relationTypeToText Supersedes = "supersedes"
+relationTypeToText Related = "related"
+relationTypeToText ContingentOn = "contingent_on"
+
+textToRelationType :: Text -> Maybe RelationType
+textToRelationType "contradicts" = Just Contradicts
+textToRelationType "supports" = Just Supports
+textToRelationType "elaborates" = Just Elaborates
+textToRelationType "supersedes" = Just Supersedes
+textToRelationType "related" = Just Related
+textToRelationType "contingent_on" = Just ContingentOn
+textToRelationType _ = Nothing
+
+-- | A link between two memories in the knowledge graph.
+data MemoryLink = MemoryLink
+ { linkFromMemoryId :: Text,
+ linkToMemoryId :: Text,
+ linkRelationType :: RelationType,
+ linkCreatedAt :: UTCTime
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON MemoryLink where
+ toJSON l =
+ Aeson.object
+ [ "from_memory_id" .= linkFromMemoryId l,
+ "to_memory_id" .= linkToMemoryId l,
+ "relation_type" .= linkRelationType l,
+ "created_at" .= linkCreatedAt l
+ ]
+
+instance Aeson.FromJSON MemoryLink where
+ parseJSON =
+ Aeson.withObject "MemoryLink" <| \v ->
+ (MemoryLink </ (v .: "from_memory_id"))
+ <*> (v .: "to_memory_id")
+ <*> (v .: "relation_type")
+ <*> (v .: "created_at")
+
+instance SQL.FromRow MemoryLink where
+ fromRow = do
+ fromId <- SQL.field
+ toId <- SQL.field
+ relTypeText <- SQL.field
+ createdAt <- SQL.field
+ let relType = fromMaybe Related (textToRelationType relTypeText)
+ pure
+ MemoryLink
+ { linkFromMemoryId = fromId,
+ linkToMemoryId = toId,
+ linkRelationType = relType,
+ linkCreatedAt = createdAt
+ }
+
+-- | Get the path to memory.db
+getMemoryDbPath :: IO FilePath
+getMemoryDbPath = do
+ maybeEnv <- lookupEnv "MEMORY_DB_PATH"
+ case maybeEnv of
+ Just p -> pure p
+ Nothing -> do
+ home <- lookupEnv "HOME"
+ case home of
+ Just h -> pure (h </> ".local/share/omni/memory.db")
+ Nothing -> pure "_/memory.db"
+
+-- | Run an action with the memory database connection.
+withMemoryDb :: (SQL.Connection -> IO a) -> IO a
+withMemoryDb action = do
+ dbPath <- getMemoryDbPath
+ createDirectoryIfMissing True (takeDirectory dbPath)
+ SQL.withConnection dbPath <| \conn -> do
+ initMemoryDb conn
+ action conn
+
+-- | Initialize the memory database schema.
+initMemoryDb :: SQL.Connection -> IO ()
+initMemoryDb conn = do
+ SQL.execute_ conn "PRAGMA busy_timeout = 10000"
+ SQL.execute_ conn "PRAGMA foreign_keys = ON"
+ _ <- SQL.query_ conn "PRAGMA journal_mode = WAL" :: IO [[Text]]
+ SQL.execute_
+ conn
+ "CREATE TABLE IF NOT EXISTS users (\
+ \ id TEXT PRIMARY KEY,\
+ \ telegram_id INTEGER UNIQUE,\
+ \ email TEXT UNIQUE,\
+ \ name TEXT NOT NULL,\
+ \ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP\
+ \)"
+ SQL.execute_
+ conn
+ "CREATE TABLE IF NOT EXISTS memories (\
+ \ id TEXT PRIMARY KEY,\
+ \ user_id TEXT NOT NULL REFERENCES users(id),\
+ \ content TEXT NOT NULL,\
+ \ embedding BLOB,\
+ \ source_agent TEXT NOT NULL,\
+ \ source_session TEXT,\
+ \ source_context TEXT,\
+ \ confidence REAL DEFAULT 0.8,\
+ \ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,\
+ \ last_accessed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,\
+ \ tags TEXT\
+ \)"
+ SQL.execute_
+ conn
+ "CREATE INDEX IF NOT EXISTS idx_memories_user ON memories(user_id)"
+ SQL.execute_
+ conn
+ "CREATE INDEX IF NOT EXISTS idx_memories_agent ON memories(source_agent)"
+ SQL.execute_
+ conn
+ "CREATE TABLE IF NOT EXISTS conversation_messages (\
+ \ id INTEGER PRIMARY KEY AUTOINCREMENT,\
+ \ user_id TEXT NOT NULL REFERENCES users(id),\
+ \ chat_id INTEGER NOT NULL,\
+ \ role TEXT NOT NULL,\
+ \ sender_name TEXT,\
+ \ content TEXT NOT NULL,\
+ \ tokens_estimate INTEGER,\
+ \ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP\
+ \)"
+ SQL.execute_
+ conn
+ "CREATE INDEX IF NOT EXISTS idx_conv_user_chat ON conversation_messages(user_id, chat_id)"
+ migrateConversationMessages conn
+ SQL.execute_
+ conn
+ "CREATE TABLE IF NOT EXISTS conversation_summaries (\
+ \ id INTEGER PRIMARY KEY AUTOINCREMENT,\
+ \ user_id TEXT NOT NULL REFERENCES users(id),\
+ \ chat_id INTEGER NOT NULL,\
+ \ summary TEXT NOT NULL,\
+ \ messages_summarized INTEGER NOT NULL,\
+ \ tokens_saved INTEGER,\
+ \ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP\
+ \)"
+ SQL.execute_
+ conn
+ "CREATE INDEX IF NOT EXISTS idx_summary_user_chat ON conversation_summaries(user_id, chat_id)"
+ SQL.execute_
+ conn
+ "CREATE TABLE IF NOT EXISTS notes (\
+ \ id INTEGER PRIMARY KEY AUTOINCREMENT,\
+ \ user_id TEXT NOT NULL,\
+ \ topic TEXT NOT NULL,\
+ \ content TEXT NOT NULL,\
+ \ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP\
+ \)"
+ SQL.execute_
+ conn
+ "CREATE INDEX IF NOT EXISTS idx_notes_user ON notes(user_id)"
+ SQL.execute_
+ conn
+ "CREATE INDEX IF NOT EXISTS idx_notes_topic ON notes(user_id, topic)"
+ SQL.execute_
+ conn
+ "CREATE TABLE IF NOT EXISTS todos (\
+ \ id INTEGER PRIMARY KEY AUTOINCREMENT,\
+ \ user_id TEXT NOT NULL,\
+ \ title TEXT NOT NULL,\
+ \ due_date TIMESTAMP,\
+ \ completed INTEGER NOT NULL DEFAULT 0,\
+ \ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP\
+ \)"
+ SQL.execute_
+ conn
+ "CREATE INDEX IF NOT EXISTS idx_todos_user ON todos(user_id)"
+ SQL.execute_
+ conn
+ "CREATE INDEX IF NOT EXISTS idx_todos_due ON todos(user_id, due_date)"
+ SQL.execute_
+ conn
+ "CREATE TABLE IF NOT EXISTS memory_links (\
+ \ from_memory_id TEXT NOT NULL REFERENCES memories(id) ON DELETE CASCADE,\
+ \ to_memory_id TEXT NOT NULL REFERENCES memories(id) ON DELETE CASCADE,\
+ \ relation_type TEXT NOT NULL,\
+ \ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,\
+ \ PRIMARY KEY (from_memory_id, to_memory_id, relation_type)\
+ \)"
+ SQL.execute_
+ conn
+ "CREATE INDEX IF NOT EXISTS idx_memory_links_from ON memory_links(from_memory_id)"
+ SQL.execute_
+ conn
+ "CREATE INDEX IF NOT EXISTS idx_memory_links_to ON memory_links(to_memory_id)"
+ SQL.execute_
+ conn
+ "CREATE INDEX IF NOT EXISTS idx_memory_links_type ON memory_links(relation_type)"
+
+-- | Migrate conversation_messages to add sender_name and thread_id columns.
+migrateConversationMessages :: SQL.Connection -> IO ()
+migrateConversationMessages conn = do
+ columns <- SQL.query_ conn "PRAGMA table_info(conversation_messages)" :: IO [(Int, Text, Text, Int, Maybe Text, Int)]
+ let columnNames = map (\(_, name, _, _, _, _) -> name) columns
+ unless ("sender_name" `elem` columnNames) <| do
+ SQL.execute_ conn "ALTER TABLE conversation_messages ADD COLUMN sender_name TEXT"
+ SQL.execute_ conn "UPDATE conversation_messages SET sender_name = 'bensima' WHERE role = 'user' AND sender_name IS NULL"
+ unless ("thread_id" `elem` columnNames) <| do
+ SQL.execute_ conn "ALTER TABLE conversation_messages ADD COLUMN thread_id INTEGER"
+ SQL.execute_ conn "CREATE INDEX IF NOT EXISTS idx_conv_chat_thread ON conversation_messages(chat_id, thread_id)"
+
+-- | Create a new user.
+createUser :: Text -> Maybe Int -> IO User
+createUser name telegramId = do
+ uuid <- UUID.nextRandom
+ now <- getCurrentTime
+ let user =
+ User
+ { userId = UUID.toText uuid,
+ userTelegramId = telegramId,
+ userEmail = Nothing,
+ userName = name,
+ userCreatedAt = now
+ }
+ withMemoryDb <| \conn ->
+ SQL.execute
+ conn
+ "INSERT INTO users (id, telegram_id, email, name, created_at) VALUES (?, ?, ?, ?, ?)"
+ user
+ pure user
+
+-- | Get a user by ID.
+getUser :: Text -> IO (Maybe User)
+getUser uid =
+ withMemoryDb <| \conn -> do
+ results <- SQL.query conn "SELECT id, telegram_id, email, name, created_at FROM users WHERE id = ?" (SQL.Only uid)
+ pure (listToMaybe results)
+
+-- | Get a user by Telegram ID.
+getUserByTelegramId :: Int -> IO (Maybe User)
+getUserByTelegramId tid =
+ withMemoryDb <| \conn -> do
+ results <- SQL.query conn "SELECT id, telegram_id, email, name, created_at FROM users WHERE telegram_id = ?" (SQL.Only tid)
+ pure (listToMaybe results)
+
+-- | Get or create a user by Telegram ID.
+getOrCreateUserByTelegramId :: Int -> Text -> IO User
+getOrCreateUserByTelegramId tid name = do
+ existing <- getUserByTelegramId tid
+ case existing of
+ Just user -> pure user
+ Nothing -> createUser name (Just tid)
+
+-- | Store a memory for a user.
+storeMemory :: Text -> Text -> MemorySource -> IO Memory
+storeMemory uid content source = storeMemoryWithTags uid content source []
+
+-- | Store a memory with tags.
+storeMemoryWithTags :: Text -> Text -> MemorySource -> [Text] -> IO Memory
+storeMemoryWithTags uid content source tags = do
+ uuid <- UUID.nextRandom
+ now <- getCurrentTime
+ embedding <- embedText content
+ let mem =
+ Memory
+ { memoryId = UUID.toText uuid,
+ memoryUserId = uid,
+ memoryContent = content,
+ memoryEmbedding = either (const Nothing) Just embedding,
+ memorySource = source,
+ memoryConfidence = 0.8,
+ memoryCreatedAt = now,
+ memoryLastAccessedAt = now,
+ memoryTags = tags
+ }
+ withMemoryDb <| \conn ->
+ SQL.execute
+ conn
+ "INSERT INTO memories (id, user_id, content, embedding, source_agent, source_session, source_context, confidence, created_at, last_accessed_at, tags) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
+ ( ( memoryId mem,
+ memoryUserId mem,
+ memoryContent mem,
+ vectorToBlob </ memoryEmbedding mem,
+ sourceAgent (memorySource mem),
+ sourceSession (memorySource mem),
+ sourceContext (memorySource mem)
+ )
+ SQL.:. ( memoryConfidence mem,
+ memoryCreatedAt mem,
+ memoryLastAccessedAt mem,
+ TE.decodeUtf8 (BL.toStrict (Aeson.encode (memoryTags mem)))
+ )
+ )
+ pure mem
+
+-- | Recall memories for a user using semantic similarity.
+recallMemories :: Text -> Text -> Int -> IO [Memory]
+recallMemories uid query limit = do
+ queryEmbedding <- embedText query
+ case queryEmbedding of
+ Left _ -> recallMemoriesByRecency uid limit
+ Right qEmb -> do
+ allMems <- getAllMemoriesForUser uid
+ let scored =
+ [ (m, cosineSimilarity qEmb emb)
+ | m <- allMems,
+ Just emb <- [memoryEmbedding m]
+ ]
+ sorted = List.sortBy (\(_, s1) (_, s2) -> compare s2 s1) scored
+ topN = take limit sorted
+ now <- getCurrentTime
+ traverse_ (updateMemoryAccess now <. memoryId <. fst) topN
+ pure (map fst topN)
+
+-- | Recall memories by recency (fallback when embedding fails).
+recallMemoriesByRecency :: Text -> Int -> IO [Memory]
+recallMemoriesByRecency uid limit =
+ withMemoryDb <| \conn -> do
+ SQL.query
+ conn
+ "SELECT id, user_id, content, embedding, source_agent, source_session, source_context, confidence, created_at, last_accessed_at, tags \
+ \FROM memories WHERE user_id = ? ORDER BY last_accessed_at DESC LIMIT ?"
+ (uid, limit)
+
+-- | Get all memories for a user.
+getAllMemoriesForUser :: Text -> IO [Memory]
+getAllMemoriesForUser uid =
+ withMemoryDb <| \conn ->
+ SQL.query
+ conn
+ "SELECT id, user_id, content, embedding, source_agent, source_session, source_context, confidence, created_at, last_accessed_at, tags \
+ \FROM memories WHERE user_id = ?"
+ (SQL.Only uid)
+
+-- | Delete a memory.
+forgetMemory :: Text -> IO ()
+forgetMemory mid =
+ withMemoryDb <| \conn ->
+ SQL.execute conn "DELETE FROM memories WHERE id = ?" (SQL.Only mid)
+
+-- | Update memory's last accessed timestamp.
+updateMemoryAccess :: UTCTime -> Text -> IO ()
+updateMemoryAccess now mid =
+ withMemoryDb <| \conn ->
+ SQL.execute conn "UPDATE memories SET last_accessed_at = ? WHERE id = ?" (now, mid)
+
+-- | Create a link between two memories.
+linkMemories :: Text -> Text -> RelationType -> IO MemoryLink
+linkMemories fromId toId relType = do
+ now <- getCurrentTime
+ withMemoryDb <| \conn ->
+ SQL.execute
+ conn
+ "INSERT OR REPLACE INTO memory_links (from_memory_id, to_memory_id, relation_type, created_at) VALUES (?, ?, ?, ?)"
+ (fromId, toId, relationTypeToText relType, now)
+ pure
+ MemoryLink
+ { linkFromMemoryId = fromId,
+ linkToMemoryId = toId,
+ linkRelationType = relType,
+ linkCreatedAt = now
+ }
+
+-- | Get all links from a memory.
+getMemoryLinks :: Text -> IO [MemoryLink]
+getMemoryLinks memId =
+ withMemoryDb <| \conn ->
+ SQL.query
+ conn
+ "SELECT from_memory_id, to_memory_id, relation_type, created_at \
+ \FROM memory_links WHERE from_memory_id = ? OR to_memory_id = ?"
+ (memId, memId)
+
+-- | Get memories linked to a given memory with their content.
+getLinkedMemories :: Text -> Maybe RelationType -> IO [(MemoryLink, Memory)]
+getLinkedMemories memId maybeRelType = do
+ links <- getMemoryLinks memId
+ let filteredLinks = case maybeRelType of
+ Nothing -> links
+ Just rt -> filter (\l -> linkRelationType l == rt) links
+ mems <- traverse loadMemory filteredLinks
+ pure [(l, m) | (l, Just m) <- zip filteredLinks mems]
+ where
+ loadMemory memLink = do
+ let targetId =
+ if linkFromMemoryId memLink == memId
+ then linkToMemoryId memLink
+ else linkFromMemoryId memLink
+ withMemoryDb <| \conn -> do
+ results <-
+ SQL.query
+ conn
+ "SELECT id, user_id, content, embedding, source_agent, source_session, source_context, confidence, created_at, last_accessed_at, tags \
+ \FROM memories WHERE id = ?"
+ (SQL.Only targetId)
+ pure (listToMaybe results)
+
+-- | Query the knowledge graph by traversing links from a starting memory.
+-- Returns all memories reachable within the given depth.
+queryGraph :: Text -> Int -> Maybe RelationType -> IO [(Memory, [MemoryLink])]
+queryGraph startMemId maxDepth maybeRelType = do
+ startMem <- getMemoryById startMemId
+ case startMem of
+ Nothing -> pure []
+ Just mem -> go [startMemId] [(mem, [])] 0
+ where
+ go :: [Text] -> [(Memory, [MemoryLink])] -> Int -> IO [(Memory, [MemoryLink])]
+ go _ acc depth | depth >= maxDepth = pure acc
+ go visitedIds acc depth = do
+ let currentIds = map (memoryId <. fst) acc
+ newIds = filter (`notElem` visitedIds) currentIds
+ if null newIds
+ then pure acc
+ else do
+ newLinked <- concat </ traverse (`getLinkedMemories` maybeRelType) newIds
+ let newMems = [(m, [l]) | (l, m) <- newLinked, memoryId m `notElem` visitedIds]
+ newVisited = visitedIds <> map (memoryId <. fst) newMems
+ go newVisited (acc <> newMems) (depth + 1)
+
+-- | Get a memory by ID.
+getMemoryById :: Text -> IO (Maybe Memory)
+getMemoryById memId =
+ withMemoryDb <| \conn -> do
+ results <-
+ SQL.query
+ conn
+ "SELECT id, user_id, content, embedding, source_agent, source_session, source_context, confidence, created_at, last_accessed_at, tags \
+ \FROM memories WHERE id = ?"
+ (SQL.Only memId)
+ pure (listToMaybe results)
+
+-- | Embed text using Ollama's nomic-embed-text model.
+embedText :: Text -> IO (Either Text (VS.Vector Float))
+embedText content = do
+ ollamaUrl <- fromMaybe "http://localhost:11434" </ lookupEnv "OLLAMA_URL"
+ let url = ollamaUrl <> "/api/embeddings"
+ req0 <- HTTP.parseRequest url
+ let body =
+ Aeson.object
+ [ "model" .= ("nomic-embed-text" :: Text),
+ "prompt" .= content
+ ]
+ req =
+ HTTP.setRequestMethod "POST"
+ <| HTTP.setRequestHeader "Content-Type" ["application/json"]
+ <| HTTP.setRequestBodyLBS (Aeson.encode body)
+ <| req0
+ result <- try (HTTP.httpLBS req)
+ case result of
+ Left (e :: SomeException) ->
+ pure (Left ("Embedding request failed: " <> tshow e))
+ Right response -> do
+ let status = HTTP.getResponseStatusCode response
+ if status >= 200 && status < 300
+ then case Aeson.decode (HTTP.getResponseBody response) of
+ Just (Aeson.Object obj) -> case KeyMap.lookup "embedding" obj of
+ Just (Aeson.Array arr) ->
+ let floats = [f | Aeson.Number n <- toList arr, let f = realToFrac n]
+ in pure (Right (VS.fromList floats))
+ _ -> pure (Left "No embedding in response")
+ _ -> pure (Left "Failed to parse embedding response")
+ else pure (Left ("Embedding HTTP error: " <> tshow status))
+
+-- | Convert a vector to a blob for storage.
+vectorToBlob :: VS.Vector Float -> BS.ByteString
+vectorToBlob v =
+ let bytes = VS.unsafeCast v :: VS.Vector Word8
+ in BS.pack (VS.toList bytes)
+
+-- | Convert a blob back to a vector.
+blobToVector :: BS.ByteString -> VS.Vector Float
+blobToVector bs =
+ let bytes = VS.fromList (BS.unpack bs) :: VS.Vector Word8
+ in VS.unsafeCast bytes
+
+-- | Calculate cosine similarity between two vectors.
+cosineSimilarity :: VS.Vector Float -> VS.Vector Float -> Float
+cosineSimilarity v1 v2
+ | VS.length v1 /= VS.length v2 = 0
+ | otherwise =
+ let dot = VS.sum (VS.zipWith (*) v1 v2)
+ mag1 = sqrt (VS.sum (VS.map (\x -> x * x) v1))
+ mag2 = sqrt (VS.sum (VS.map (\x -> x * x) v2))
+ in if mag1 == 0 || mag2 == 0 then 0 else dot / (mag1 * mag2)
+
+-- | Format memories for inclusion in a prompt.
+formatMemoriesForPrompt :: [Memory] -> Text
+formatMemoriesForPrompt [] = "No prior context available."
+formatMemoriesForPrompt mems =
+ Text.unlines
+ [ "Known context about this user:",
+ "",
+ Text.unlines (map formatMem mems)
+ ]
+ where
+ formatMem m =
+ "- " <> memoryContent m <> " (via " <> sourceAgent (memorySource m) <> ")"
+
+-- | Run an agent with memory context.
+-- Recalls relevant memories for the user and injects them into the system prompt.
+runAgentWithMemory ::
+ User ->
+ Engine.EngineConfig ->
+ Engine.AgentConfig ->
+ Text ->
+ IO (Either Text Engine.AgentResult)
+runAgentWithMemory user engineCfg agentCfg userPrompt = do
+ memories <- recallMemories (userId user) userPrompt 10
+ let memoryContext = formatMemoriesForPrompt memories
+ enhancedPrompt =
+ Engine.agentSystemPrompt agentCfg
+ <> "\n\n## Known about this user\n"
+ <> memoryContext
+ enhancedConfig =
+ agentCfg
+ { Engine.agentSystemPrompt = enhancedPrompt,
+ Engine.agentTools =
+ Engine.agentTools agentCfg
+ <> [ rememberTool (userId user),
+ recallTool (userId user),
+ linkMemoriesTool (userId user),
+ queryGraphTool (userId user)
+ ]
+ }
+ Engine.runAgent engineCfg enhancedConfig userPrompt
+
+-- | Tool for agents to store memories about users.
+rememberTool :: Text -> Engine.Tool
+rememberTool uid =
+ Engine.Tool
+ { Engine.toolName = "remember",
+ Engine.toolDescription =
+ "Store a piece of information about the user for future reference. "
+ <> "Use this when the user shares personal facts, preferences, or context "
+ <> "that would be useful to recall in future conversations.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "content"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("The information to remember about the user" :: Text)
+ ],
+ "context"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("How/why this was learned (e.g., 'user mentioned in chat')" :: Text)
+ ],
+ "tags"
+ .= Aeson.object
+ [ "type" .= ("array" :: Text),
+ "items" .= Aeson.object ["type" .= ("string" :: Text)],
+ "description" .= ("Optional tags for categorization" :: Text)
+ ]
+ ],
+ "required" .= (["content", "context"] :: [Text])
+ ],
+ Engine.toolExecute = executeRemember uid
+ }
+
+executeRemember :: Text -> Aeson.Value -> IO Aeson.Value
+executeRemember uid v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: RememberArgs) -> do
+ let source =
+ MemorySource
+ { sourceAgent = "agent",
+ sourceSession = Nothing,
+ sourceContext = rememberContext args
+ }
+ mem <- storeMemoryWithTags uid (rememberContent args) source (rememberTags args)
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "memory_id" .= memoryId mem,
+ "message" .= ("Remembered: " <> rememberContent args)
+ ]
+ )
+
+-- | Tool for agents to recall memories about users.
+recallTool :: Text -> Engine.Tool
+recallTool uid =
+ Engine.Tool
+ { Engine.toolName = "recall",
+ Engine.toolDescription =
+ "Search your memory for information about the user. "
+ <> "Use this to retrieve previously stored facts, preferences, or context.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "query"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("What to search for in memory" :: Text)
+ ],
+ "limit"
+ .= Aeson.object
+ [ "type" .= ("integer" :: Text),
+ "description" .= ("Maximum memories to return (default: 5)" :: Text)
+ ]
+ ],
+ "required" .= (["query"] :: [Text])
+ ],
+ Engine.toolExecute = executeRecall uid
+ }
+
+executeRecall :: Text -> Aeson.Value -> IO Aeson.Value
+executeRecall uid v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: RecallArgs) -> do
+ mems <- recallMemories uid (recallQuery args) (recallLimit args)
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "count" .= length mems,
+ "memories"
+ .= map
+ ( \m ->
+ Aeson.object
+ [ "id" .= memoryId m,
+ "content" .= memoryContent m,
+ "confidence" .= memoryConfidence m,
+ "source" .= sourceAgent (memorySource m),
+ "tags" .= memoryTags m
+ ]
+ )
+ mems
+ ]
+ )
+
+-- Helper for parsing remember args
+data RememberArgs = RememberArgs
+ { rememberContent :: Text,
+ rememberContext :: Text,
+ rememberTags :: [Text]
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON RememberArgs where
+ parseJSON =
+ Aeson.withObject "RememberArgs" <| \v ->
+ (RememberArgs </ (v .: "content"))
+ <*> (v .:? "context" .!= "agent observation")
+ <*> (v .:? "tags" .!= [])
+
+data RecallArgs = RecallArgs
+ { recallQuery :: Text,
+ recallLimit :: Int
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON RecallArgs where
+ parseJSON =
+ Aeson.withObject "RecallArgs" <| \v ->
+ (RecallArgs </ (v .: "query"))
+ <*> (v .:? "limit" .!= 5)
+
+-- | Tool for agents to link memories in the knowledge graph.
+linkMemoriesTool :: Text -> Engine.Tool
+linkMemoriesTool _uid =
+ Engine.Tool
+ { Engine.toolName = "link_memories",
+ Engine.toolDescription =
+ "Create a typed relationship between two memories. "
+ <> "Use this to connect related information. Relation types:\n"
+ <> "- contradicts: conflicting information\n"
+ <> "- supports: evidence that reinforces another memory\n"
+ <> "- elaborates: adds detail to an existing memory\n"
+ <> "- supersedes: newer info replaces older\n"
+ <> "- related: general topical connection\n"
+ <> "- contingent_on: depends on another fact being true",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "from_memory_id"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("ID of the source memory" :: Text)
+ ],
+ "to_memory_id"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("ID of the target memory" :: Text)
+ ],
+ "relation_type"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "enum" .= (["contradicts", "supports", "elaborates", "supersedes", "related", "contingent_on"] :: [Text]),
+ "description" .= ("Type of relationship between memories" :: Text)
+ ]
+ ],
+ "required" .= (["from_memory_id", "to_memory_id", "relation_type"] :: [Text])
+ ],
+ Engine.toolExecute = executeLinkMemories
+ }
+
+executeLinkMemories :: Aeson.Value -> IO Aeson.Value
+executeLinkMemories v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: LinkMemoriesArgs) -> do
+ case textToRelationType (linkArgsRelationType args) of
+ Nothing ->
+ pure
+ ( Aeson.object
+ [ "success" .= False,
+ "error" .= ("Invalid relation type: " <> linkArgsRelationType args)
+ ]
+ )
+ Just relType -> do
+ memLink <- linkMemories (linkArgsFromId args) (linkArgsToId args) relType
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "message"
+ .= ( "Linked memory "
+ <> linkFromMemoryId memLink
+ <> " -> "
+ <> linkToMemoryId memLink
+ <> " ("
+ <> relationTypeToText (linkRelationType memLink)
+ <> ")"
+ )
+ ]
+ )
+
+data LinkMemoriesArgs = LinkMemoriesArgs
+ { linkArgsFromId :: Text,
+ linkArgsToId :: Text,
+ linkArgsRelationType :: Text
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON LinkMemoriesArgs where
+ parseJSON =
+ Aeson.withObject "LinkMemoriesArgs" <| \v ->
+ (LinkMemoriesArgs </ (v .: "from_memory_id"))
+ <*> (v .: "to_memory_id")
+ <*> (v .: "relation_type")
+
+-- | Tool for agents to query the memory knowledge graph.
+queryGraphTool :: Text -> Engine.Tool
+queryGraphTool _uid =
+ Engine.Tool
+ { Engine.toolName = "query_graph",
+ Engine.toolDescription =
+ "Explore the knowledge graph to find related memories. "
+ <> "Given a starting memory, traverse links to find connected memories. "
+ <> "Useful for understanding context and finding contradictions or supporting evidence.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "memory_id"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("ID of the memory to start from" :: Text)
+ ],
+ "depth"
+ .= Aeson.object
+ [ "type" .= ("integer" :: Text),
+ "description" .= ("How many link hops to traverse (default: 2)" :: Text)
+ ],
+ "relation_type"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "enum" .= (["contradicts", "supports", "elaborates", "supersedes", "related", "contingent_on"] :: [Text]),
+ "description" .= ("Optional: filter by relation type" :: Text)
+ ]
+ ],
+ "required" .= (["memory_id"] :: [Text])
+ ],
+ Engine.toolExecute = executeQueryGraph
+ }
+
+executeQueryGraph :: Aeson.Value -> IO Aeson.Value
+executeQueryGraph v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: QueryGraphArgs) -> do
+ let maybeRelType = queryArgsRelationType args +> textToRelationType
+ results <- queryGraph (queryArgsMemoryId args) (queryArgsDepth args) maybeRelType
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "count" .= length results,
+ "memories"
+ .= map
+ ( \(m, links) ->
+ Aeson.object
+ [ "id" .= memoryId m,
+ "content" .= memoryContent m,
+ "links"
+ .= map
+ ( \l ->
+ Aeson.object
+ [ "from" .= linkFromMemoryId l,
+ "to" .= linkToMemoryId l,
+ "relation" .= linkRelationType l
+ ]
+ )
+ links
+ ]
+ )
+ results
+ ]
+ )
+
+data QueryGraphArgs = QueryGraphArgs
+ { queryArgsMemoryId :: Text,
+ queryArgsDepth :: Int,
+ queryArgsRelationType :: Maybe Text
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON QueryGraphArgs where
+ parseJSON =
+ Aeson.withObject "QueryGraphArgs" <| \v ->
+ (QueryGraphArgs </ (v .: "memory_id"))
+ <*> (v .:? "depth" .!= 2)
+ <*> (v .:? "relation_type")
+
+-- | Estimate token count for text (rough: ~4 chars per token).
+estimateTokens :: Text -> Int
+estimateTokens t = max 1 (Text.length t `div` 4)
+
+-- | Save a message to conversation history.
+saveMessage :: Text -> Int -> MessageRole -> Maybe Text -> Text -> IO ConversationMessage
+saveMessage uid chatId role senderName content = do
+ now <- getCurrentTime
+ let tokens = estimateTokens content
+ withMemoryDb <| \conn -> do
+ SQL.execute
+ conn
+ "INSERT INTO conversation_messages (user_id, chat_id, role, sender_name, content, tokens_estimate, created_at) VALUES (?, ?, ?, ?, ?, ?, ?)"
+ (uid, chatId, roleToText role, senderName, content, tokens, now)
+ rowId <- SQL.lastInsertRowId conn
+ pure
+ ConversationMessage
+ { cmId = Just (fromIntegral rowId),
+ cmUserId = uid,
+ cmChatId = chatId,
+ cmRole = role,
+ cmSenderName = senderName,
+ cmContent = content,
+ cmTokensEstimate = tokens,
+ cmCreatedAt = now
+ }
+ where
+ roleToText UserRole = "user" :: Text
+ roleToText AssistantRole = "assistant"
+
+-- | Get recent messages for a user/chat, newest first.
+getRecentMessages :: Text -> Int -> Int -> IO [ConversationMessage]
+getRecentMessages uid chatId limit =
+ withMemoryDb <| \conn ->
+ SQL.query
+ conn
+ "SELECT id, user_id, chat_id, role, sender_name, content, tokens_estimate, created_at \
+ \FROM conversation_messages \
+ \WHERE user_id = ? AND chat_id = ? \
+ \ORDER BY created_at DESC LIMIT ?"
+ (uid, chatId, limit)
+
+-- | Get the most recent summary for a chat.
+getLatestSummary :: Text -> Int -> IO (Maybe ConversationSummary)
+getLatestSummary uid chatId =
+ withMemoryDb <| \conn -> do
+ rows <-
+ SQL.query
+ conn
+ "SELECT id, user_id, chat_id, summary, messages_summarized, tokens_saved, created_at \
+ \FROM conversation_summaries \
+ \WHERE user_id = ? AND chat_id = ? \
+ \ORDER BY created_at DESC LIMIT 1"
+ (uid, chatId)
+ pure (listToMaybe rows)
+
+-- | Build conversation context for the LLM.
+-- Returns (context text, total token estimate).
+getConversationContext :: Text -> Int -> Int -> IO (Text, Int)
+getConversationContext uid chatId maxTokens = do
+ maybeSummary <- getLatestSummary uid chatId
+ recentMsgs <- getRecentMessages uid chatId 50
+
+ let summaryText = maybe "" (\s -> "## Previous conversation summary\n" <> csSummary s <> "\n\n") maybeSummary
+ summaryTokens = maybe 0 (estimateTokens <. csSummary) maybeSummary
+
+ msgsOldestFirst = reverse recentMsgs
+ availableTokens = maxTokens - summaryTokens - 100
+
+ (selectedMsgs, usedTokens) = selectMessages msgsOldestFirst availableTokens
+
+ formattedMsgs =
+ if null selectedMsgs
+ then ""
+ else
+ "## Recent conversation\n"
+ <> Text.unlines (map formatMsg selectedMsgs)
+
+ pure (summaryText <> formattedMsgs, summaryTokens + usedTokens)
+ where
+ selectMessages :: [ConversationMessage] -> Int -> ([ConversationMessage], Int)
+ selectMessages msgs budget = go (reverse msgs) budget []
+ where
+ go [] _ acc = (acc, sum (map cmTokensEstimate acc))
+ go (m : ms) remaining acc
+ | cmTokensEstimate m <= remaining =
+ go ms (remaining - cmTokensEstimate m) (m : acc)
+ | otherwise = (acc, sum (map cmTokensEstimate acc))
+
+ formatMsg m =
+ let timestamp = Text.pack (formatTime defaultTimeLocale "%Y-%m-%dT%H:%M:%SZ" (cmCreatedAt m))
+ prefix = case cmRole m of
+ UserRole -> "[" <> timestamp <> "] " <> fromMaybe "User" (cmSenderName m) <> ": "
+ AssistantRole -> "[" <> timestamp <> "] Assistant: "
+ in prefix <> cmContent m
+
+-- | Summarize old messages and archive them.
+-- Returns the new summary text.
+summarizeAndArchive :: Text -> Int -> Text -> IO Text
+summarizeAndArchive uid chatId summaryText = do
+ now <- getCurrentTime
+
+ (oldMsgCount, tokensSaved) <-
+ withMemoryDb <| \conn -> do
+ rows <-
+ SQL.query
+ conn
+ "SELECT COUNT(*), COALESCE(SUM(tokens_estimate), 0) FROM conversation_messages WHERE user_id = ? AND chat_id = ?"
+ (uid, chatId) ::
+ IO [(Int, Int)]
+ let (count, tokens) = fromMaybe (0, 0) (listToMaybe rows)
+
+ SQL.execute
+ conn
+ "INSERT INTO conversation_summaries (user_id, chat_id, summary, messages_summarized, tokens_saved, created_at) VALUES (?, ?, ?, ?, ?, ?)"
+ (uid, chatId, summaryText, count, tokens, now)
+
+ SQL.execute
+ conn
+ "DELETE FROM conversation_messages WHERE user_id = ? AND chat_id = ?"
+ (uid, chatId)
+
+ pure (count, tokens)
+
+ putText <| "Archived " <> tshow oldMsgCount <> " messages (" <> tshow tokensSaved <> " tokens) for chat " <> tshow chatId
+ pure summaryText
+
+-- -----------------------------------------------------------------------------
+-- Group Conversation History
+-- -----------------------------------------------------------------------------
+
+-- | Save a message to group conversation history.
+-- Unlike saveMessage, this is keyed by (chat_id, thread_id) not (user_id, chat_id).
+-- The sender_name is preserved for attribution.
+saveGroupMessage :: Int -> Maybe Int -> MessageRole -> Text -> Text -> IO ConversationMessage
+saveGroupMessage chatId mThreadId role senderName content = do
+ now <- getCurrentTime
+ let tokens = estimateTokens content
+ withMemoryDb <| \conn -> do
+ SQL.execute
+ conn
+ "INSERT INTO conversation_messages (user_id, chat_id, thread_id, role, sender_name, content, tokens_estimate, created_at) VALUES (NULL, ?, ?, ?, ?, ?, ?, ?)"
+ (chatId, mThreadId, roleToText role, senderName, content, tokens, now)
+ rowId <- SQL.lastInsertRowId conn
+ pure
+ ConversationMessage
+ { cmId = Just (fromIntegral rowId),
+ cmUserId = "",
+ cmChatId = chatId,
+ cmRole = role,
+ cmSenderName = Just senderName,
+ cmContent = content,
+ cmTokensEstimate = tokens,
+ cmCreatedAt = now
+ }
+ where
+ roleToText UserRole = "user" :: Text
+ roleToText AssistantRole = "assistant"
+
+-- | Get recent messages for a group chat/topic, newest first.
+getGroupRecentMessages :: Int -> Maybe Int -> Int -> IO [ConversationMessage]
+getGroupRecentMessages chatId mThreadId limit =
+ withMemoryDb <| \conn ->
+ case mThreadId of
+ Just threadId ->
+ SQL.query
+ conn
+ "SELECT id, COALESCE(user_id, ''), chat_id, role, sender_name, content, tokens_estimate, created_at \
+ \FROM conversation_messages \
+ \WHERE chat_id = ? AND thread_id = ? \
+ \ORDER BY created_at DESC LIMIT ?"
+ (chatId, threadId, limit)
+ Nothing ->
+ SQL.query
+ conn
+ "SELECT id, COALESCE(user_id, ''), chat_id, role, sender_name, content, tokens_estimate, created_at \
+ \FROM conversation_messages \
+ \WHERE chat_id = ? AND thread_id IS NULL \
+ \ORDER BY created_at DESC LIMIT ?"
+ (chatId, limit)
+
+-- | Build conversation context for a group chat.
+-- Returns (context text, total token estimate).
+getGroupConversationContext :: Int -> Maybe Int -> Int -> IO (Text, Int)
+getGroupConversationContext chatId mThreadId maxTokens = do
+ recentMsgs <- getGroupRecentMessages chatId mThreadId 50
+
+ let msgsOldestFirst = reverse recentMsgs
+ availableTokens = maxTokens - 100
+
+ (selectedMsgs, usedTokens) = selectMessages msgsOldestFirst availableTokens
+
+ formattedMsgs =
+ if null selectedMsgs
+ then ""
+ else
+ "## Recent conversation\n"
+ <> Text.unlines (map formatMsg selectedMsgs)
+
+ pure (formattedMsgs, usedTokens)
+ where
+ selectMessages :: [ConversationMessage] -> Int -> ([ConversationMessage], Int)
+ selectMessages msgs budget = go (reverse msgs) budget []
+ where
+ go [] _ acc = (acc, sum (map cmTokensEstimate acc))
+ go (m : ms) remaining acc
+ | cmTokensEstimate m <= remaining =
+ go ms (remaining - cmTokensEstimate m) (m : acc)
+ | otherwise = (acc, sum (map cmTokensEstimate acc))
+
+ formatMsg m =
+ let timestamp = Text.pack (formatTime defaultTimeLocale "%Y-%m-%dT%H:%M:%SZ" (cmCreatedAt m))
+ prefix = case cmRole m of
+ UserRole -> "[" <> timestamp <> "] " <> fromMaybe "User" (cmSenderName m) <> ": "
+ AssistantRole -> "[" <> timestamp <> "] Assistant: "
+ in prefix <> cmContent m
+
+-- -----------------------------------------------------------------------------
+-- Group Memories
+-- -----------------------------------------------------------------------------
+
+-- | Generate a synthetic user_id for group-level memories.
+groupUserId :: Int -> Text
+groupUserId chatId = "group:" <> tshow chatId
+
+-- | Store a memory associated with a group (not a user).
+-- These memories are shared across all users in the group.
+storeGroupMemory :: Int -> Text -> MemorySource -> IO Memory
+storeGroupMemory chatId = storeMemory (groupUserId chatId)
+
+-- | Recall memories for a group.
+recallGroupMemories :: Int -> Text -> Int -> IO [Memory]
+recallGroupMemories chatId = recallMemories (groupUserId chatId)
diff --git a/Omni/Agent/PLAN.md b/Omni/Agent/PLAN.md
new file mode 100644
index 0000000..e51d09b
--- /dev/null
+++ b/Omni/Agent/PLAN.md
@@ -0,0 +1,589 @@
+# Omni Agent Infrastructure Plan
+
+**Status**: Draft
+**Author**: Ben (with AI assistance)
+**Date**: 2025-12-11
+
+## Vision
+
+A unified agent infrastructure supporting multiple specialized agents (coder, researcher, planner, telegram bot, etc.) with:
+- Shared tools, memory, and model backends
+- LoRA fine-tuning with model snapshots
+- Evals to prevent regression
+- Configurable LLM providers (local Ollama or OpenRouter)
+
+---
+
+## 0. Scope & Task Tracking
+
+**Building now**: Infrastructure and library primitives
+**First concrete agent**: Telegram Bot (validates the infrastructure)
+**Building later**: Researcher, Planner, and other agents
+
+### Active Tasks (in dependency order)
+
+| Task ID | Title | Status | Blocks |
+|---------|-------|--------|--------|
+| t-247 | Provider Abstraction | Open | t-248, t-249, t-250 |
+| t-248 | Memory System | Open (blocked by t-247) | t-251 |
+| t-249 | Tool Registry | Open (blocked by t-247) | t-251 |
+| t-250 | Evals Framework | Open (blocked by t-247) | - |
+| t-251 | Telegram Bot Agent | Open (blocked by t-248, t-249) | - |
+
+Run `jr task show <id>` for full implementation details on each task.
+
+---
+
+## 1. Architecture Overview
+
+```
+┌─────────────────────────────────────────────────────────────────┐
+│ Agent Layer │
+├──────────┬──────────┬──────────┬──────────┬────────────────────┤
+│ Jr/Coder │Researcher│ Planner │ Telegram │ Future Agents... │
+└────┬─────┴────┬─────┴────┬─────┴────┬─────┴────────────────────┘
+ │ │ │ │
+┌────▼──────────▼──────────▼──────────▼──────────────────────────┐
+│ Omni.Agent.Core │
+│ - Agent protocol (system prompt, tool execution loop) │
+│ - Model backend abstraction (Ollama | OpenRouter | Amp) │
+│ - Conversation/session management │
+└────┬────────────────────────────────────────────────────────────┘
+ │
+┌────▼────────────────────────────────────────────────────────────┐
+│ Shared Infrastructure │
+├─────────────────┬─────────────────┬─────────────────────────────┤
+│ Omni.Agent.Tools│ Omni.Agent.Memory│ Omni.Agent.Evals │
+│ - read_file │ - Vector DB │ - Regression tests │
+│ - edit_file │ - Fact retrieval │ - Quality metrics │
+│ - run_bash │ - Session history│ - Model comparison │
+│ - search │ │ │
+│ - web_search │ │ │
+│ - (pluggable) │ │ │
+├─────────────────┴─────────────────┴─────────────────────────────┤
+│ Omni.Agent.Training │
+│ - LoRA fine-tuning orchestration │
+│ - Model snapshotting │
+│ - Training data collection │
+└─────────────────────────────────────────────────────────────────┘
+```
+
+---
+
+## 2. Immediate Work Items
+
+### 2.1 Add Amp Backend Support (--amp flag)
+
+**Problem**: Custom engine works but Amp is better for complex coding tasks.
+
+**Solution**: Add `--engine` flag to `jr work`:
+
+```bash
+jr work <task-id> # Uses native Engine (default)
+jr work <task-id> --engine=amp # Uses Amp via subprocess
+jr work <task-id> --engine=ollama # Uses local Ollama
+```
+
+**Implementation**:
+1. Add `EngineBackend` type: `Native | Amp | Ollama Text`
+2. Modify `Omni.Agent.Worker.start` to accept backend selection
+3. For Amp: spawn `amp --prompt-file` subprocess, capture output
+4. For Ollama: call local API instead of OpenRouter
+
+**Files to modify**:
+- `Omni/Jr.hs` - CLI parsing
+- `Omni/Agent/Worker.hs` - Backend dispatch
+- `Omni/Agent/Engine.hs` - Add Ollama provider
+
+### 2.2 Abstract LLM Provider
+
+**Current state**: `Engine.hs` hardcodes OpenRouter.
+
+**Target state**: Pluggable `LLMProvider` interface.
+
+```haskell
+-- Omni/Agent/Provider.hs
+data Provider
+ = OpenRouter { apiKey :: Text, model :: Text }
+ | Ollama { baseUrl :: Text, model :: Text }
+ | AmpCLI { promptFile :: FilePath }
+
+chat :: Provider -> [Message] -> [Tool] -> IO (Either Text Message)
+```
+
+### 2.3 Memory / Vector DB Integration
+
+**Purpose**: Long-term memory across agent sessions, shared across all agents, private per user.
+
+**Decision**: Use sqlite-vss for vector similarity search (not Omni.Fact - that's project-scoped, not user-scoped).
+
+**Key requirements**:
+- Cross-agent sharing: Telegram agent learns "Ben is an AI engineer" → Researcher agent recalls this
+- Multi-user: Each family member has private memories (identified by Telegram ID initially)
+- Embeddings via Ollama `/api/embeddings` endpoint with nomic-embed-text model
+
+See task t-248 for full implementation details.
+
+### 2.4 Pluggable Tool System
+
+**Current**: `Omni.Agent.Tools` has 6 hardcoded tools.
+
+**Target**: Registry pattern allowing agents to declare their tool sets.
+
+```haskell
+-- Each agent specifies its tools
+coderTools :: [Tool]
+coderTools = [readFileTool, writeFileTool, editFileTool, runBashTool, searchCodebaseTool]
+
+researcherTools :: [Tool]
+researcherTools = [webSearchTool, readWebPageTool, extractFactsTool, readFileTool]
+
+plannerTools :: [Tool]
+plannerTools = [taskCreateTool, taskListTool, taskUpdateTool, factQueryTool]
+
+telegramTools :: [Tool]
+telegramTools = [sendMessageTool, getUpdatesTool, factQueryTool]
+```
+
+---
+
+## 3. Agent Specifications
+
+### 3.1 Jr/Coder (existing)
+
+**Purpose**: Autonomous coding agent for task completion.
+
+**Tools**: read_file, write_file, edit_file, run_bash, search_codebase, search_and_read
+
+**System prompt**: Task-focused, code conventions, test requirements.
+
+### 3.2 Researcher (new)
+
+**Purpose**: Information gathering, analysis, summarization.
+
+**Tools**:
+- `web_search` - Search the web
+- `read_web_page` - Fetch and parse web content
+- `extract_facts` - Store learned facts in knowledge base
+- `read_file` - Read local documents
+- `query_facts` - Retrieve from knowledge base
+
+**System prompt**: Focus on accuracy, citation, verification.
+
+### 3.3 Project Planner (new)
+
+**Purpose**: Break down high-level goals into actionable tasks.
+
+**Tools**:
+- `task_create` - Create new tasks
+- `task_list` - Query existing tasks
+- `task_update` - Modify task status/content
+- `fact_query` - Get project context
+- `dependency_graph` - Visualize task dependencies
+
+**System prompt**: Project management, task decomposition, dependency analysis.
+
+### 3.4 Telegram Bot (FIRST AGENT TO BUILD)
+
+**Purpose**: Family assistant accessible via Telegram. First concrete agent to validate infrastructure.
+
+**Tools**:
+- `remember` - Store facts about the user (from Memory module)
+- `recall` - Query user's memories (from Memory module)
+- `web_search` - Answer questions requiring web lookup (from Registry)
+
+**System prompt**: Friendly, helpful, family-appropriate, concise for chat interface.
+
+**User identification**: Telegram user ID → creates/retrieves User record in memory.db
+
+See task t-251 for full implementation details.
+
+---
+
+## 4. Shared Infrastructure
+
+### 4.1 Model Backend Configuration
+
+```haskell
+-- ~/.config/omni/models.yaml or environment variables
+data ModelConfig = ModelConfig
+ { defaultProvider :: Provider
+ , modelOverrides :: Map Text Provider -- per-agent overrides
+ }
+
+-- Example config:
+-- default_provider: openrouter
+-- openrouter:
+-- api_key: $OPENROUTER_API_KEY
+-- default_model: anthropic/claude-sonnet-4.5
+-- ollama:
+-- base_url: http://localhost:11434
+-- default_model: llama3.1:70b
+-- agents:
+-- telegram: { provider: ollama, model: llama3.1:8b } # cheaper for chat
+-- coder: { provider: openrouter, model: anthropic/claude-sonnet-4.5 }
+```
+
+### 4.2 Evals Framework
+
+**Purpose**: Prevent regression when changing prompts, tools, or models.
+
+**Components**:
+1. **Test Cases**: Known task + expected outcome pairs
+2. **Runner**: Execute agent on test cases, capture results
+3. **Scorer**: Compare results (exact match, semantic similarity, human eval)
+4. **Dashboard**: Track scores over time
+
+**Implementation**:
+```haskell
+-- Omni/Agent/Eval.hs
+data EvalCase = EvalCase
+ { evalId :: Text
+ , evalPrompt :: Text
+ , evalExpectedBehavior :: Text -- or structured criteria
+ , evalTools :: [Tool]
+ }
+
+runEval :: AgentConfig -> EvalCase -> IO EvalResult
+```
+
+### 4.3 Shared Memory System (Omni.Agent.Memory)
+
+**Critical requirement**: Cross-agent memory sharing with multi-user support.
+
+**Example**: User tells Telegram bot "I'm an AI engineer" → Research agent later searching for papers should recall this context.
+
+#### Why not Omni.Fact?
+
+Current `Omni.Fact` limitations:
+- Project-scoped, not user-scoped
+- No user/identity concept
+- No embeddings for semantic retrieval
+- Tied to task system
+
+#### Memory Design
+
+```haskell
+-- Omni/Agent/Memory.hs
+
+-- | A memory is a piece of information about a user, learned by any agent
+data Memory = Memory
+ { memoryId :: UUID
+ , memoryUserId :: UserId -- Who this memory is about
+ , memoryContent :: Text -- The actual information
+ , memoryEmbedding :: Maybe Vector -- For semantic search
+ , memorySource :: MemorySource -- Which agent learned this
+ , memoryConfidence :: Double -- 0.0-1.0
+ , memoryCreatedAt :: UTCTime
+ , memoryLastAccessedAt :: UTCTime -- For relevance decay
+ , memoryTags :: [Text] -- Optional categorization
+ }
+
+data MemorySource = MemorySource
+ { sourceAgent :: Text -- "telegram", "researcher", "coder", etc.
+ , sourceSession :: UUID -- Session ID where this was learned
+ , sourceContext :: Text -- Brief context of how it was learned
+ }
+
+data User = User
+ { userId :: UUID
+ , userTelegramId :: Maybe Int64 -- Primary identifier initially
+ , userEmail :: Maybe Text -- Added later when email interface exists
+ , userName :: Text -- Display name ("Ben", "Alice", etc.)
+ , userCreatedAt :: UTCTime
+ }
+
+-- Users are identified by Telegram ID initially
+-- The agent learns more about users over time and stores in memories
+-- e.g., "Ben is an AI engineer" becomes a memory, not a user field
+
+-- | Core operations
+storeMemory :: UserId -> Text -> MemorySource -> IO Memory
+recallMemories :: UserId -> Text -> Int -> IO [Memory] -- semantic search
+forgetMemory :: UUID -> IO ()
+
+-- | Embedding integration (via Ollama or other provider)
+embedText :: Text -> IO Vector
+similaritySearch :: Vector -> [Memory] -> Int -> [Memory]
+```
+
+#### Multi-User Architecture
+
+```
+┌─────────────────────────────────────────────────────────┐
+│ Memory Store │
+├─────────────────────────────────────────────────────────┤
+│ users table: │
+│ id TEXT PRIMARY KEY │
+│ name TEXT │
+│ created_at TIMESTAMP │
+├─────────────────────────────────────────────────────────┤
+│ memories table: │
+│ id TEXT PRIMARY KEY │
+│ user_id TEXT REFERENCES users(id) │
+│ content TEXT │
+│ embedding BLOB -- serialized float vector │
+│ source_agent TEXT │
+│ source_session TEXT │
+│ source_context TEXT │
+│ confidence REAL │
+│ created_at TIMESTAMP │
+│ last_accessed_at TIMESTAMP │
+│ tags TEXT -- JSON array │
+└─────────────────────────────────────────────────────────┘
+```
+
+#### Memory Retrieval in Agent Loop
+
+When any agent runs, it:
+1. Identifies the current user (from context/session)
+2. Extracts key concepts from the user's request
+3. Calls `recallMemories userId query 10` to get relevant memories
+4. Injects memories into system prompt as context
+5. After completion, extracts new learnings and calls `storeMemory`
+
+```haskell
+-- In agent loop
+runAgentWithMemory :: UserId -> AgentConfig -> Text -> IO AgentResult
+runAgentWithMemory userId config prompt = do
+ -- Recall relevant memories
+ memories <- recallMemories userId prompt 10
+ let memoryContext = formatMemoriesForPrompt memories
+
+ -- Inject into system prompt
+ let enhancedPrompt = agentSystemPrompt config <> "\n\n## User Context\n" <> memoryContext
+
+ -- Run agent
+ result <- runAgent config { agentSystemPrompt = enhancedPrompt } prompt
+
+ -- Extract and store new memories (could be done by the agent via tool)
+ pure result
+```
+
+#### Memory Extraction Tool
+
+Agents can explicitly store memories:
+
+```haskell
+storeMemoryTool :: Tool
+storeMemoryTool = Tool
+ { toolName = "remember"
+ , toolDescription = "Store a piece of information about the user for future reference"
+ , toolExecute = \args -> do
+ let content = args .: "content"
+ tags = args .:? "tags" .!= []
+ memory <- storeMemory currentUserId content currentSource
+ pure (toJSON memory)
+ }
+```
+
+### 4.4 LoRA Fine-tuning Service
+
+**Purpose**: Custom-tune models on successful task completions.
+
+**Workflow**:
+1. Collect successful agent sessions (prompt + tool calls + result)
+2. Format as training data (instruction, input, output)
+3. Run LoRA training via Ollama or external service
+4. Snapshot trained model with version tag
+5. A/B test against base model via evals
+
+**Storage**:
+- Training data: `_/training/<agent>/<date>.jsonl`
+- Models: Ollama model registry with tags
+
+---
+
+## 5. Infrastructure Build Plan
+
+Focus: Library primitives first, agents later.
+
+### Phase 1: Provider Abstraction (1-2 days)
+- [ ] Create `Omni.Agent.Provider` module with unified interface
+- [ ] Extract OpenRouter logic from `Engine.hs`
+- [ ] Add Ollama provider implementation
+- [ ] Add `--engine` flag to `jr work`
+- [ ] Test with local Llama model
+
+### Phase 2: Amp Re-integration (1 day)
+- [ ] Add Amp subprocess backend to Provider
+- [ ] Handle Amp's streaming output
+- [ ] Parse Amp thread URL for linking
+
+### Phase 3: Memory System (3-4 days)
+- [ ] Create `Omni.Agent.Memory` module (separate from Fact)
+- [ ] Design schema: users, memories tables
+- [ ] Implement `storeMemory`, `recallMemories`, `forgetMemory`
+- [ ] Add embedding support via Ollama `/api/embeddings`
+- [ ] Implement similarity search
+- [ ] Create `remember` tool for agents
+- [ ] Add `runAgentWithMemory` wrapper
+
+### Phase 4: Tool Registry (1-2 days)
+- [ ] Create `Omni.Agent.Registry` for tool management
+- [ ] Define tool categories (coding, web, memory, task)
+- [ ] Allow agents to declare tool requirements
+- [ ] Add web tools (web_search, read_web_page)
+
+### Phase 5: Evals Framework (2-3 days)
+- [ ] Create `Omni.Agent.Eval` module
+- [ ] Define `EvalCase` and `EvalResult` types
+- [ ] Build eval runner
+- [ ] Add scoring (exact match, semantic, custom)
+- [ ] Create initial eval suite for Jr/coder
+
+### Phase 6: Telegram Bot Agent (3-4 days)
+**First concrete agent** - validates the infrastructure.
+
+- [ ] Create `Omni.Agent.Telegram` module
+- [ ] Telegram Bot API integration (getUpdates polling or webhook)
+- [ ] User identification via Telegram user ID
+- [ ] Auto-create user record on first message
+- [ ] Wire up memory system (recall on message, store learnings)
+- [ ] Basic conversation loop with LLM
+- [ ] Deploy as background service
+- [ ] Add `jr telegram` command for manual start
+
+**Tools for Telegram agent:**
+- `remember` - store facts about user
+- `recall` - query user's memories
+- `web_search` - answer questions (optional, phase 4)
+
+### Phase 7: Training Data Collection (1-2 days)
+- [ ] Add session export to training format
+- [ ] Store successful completions in `_/training/`
+- [ ] Create `jr train export` command
+
+### (Future) Additional Agents
+- Researcher agent
+- Planner agent
+- Email interface (links to Telegram user identity)
+- Others...
+
+---
+
+## 6. Design Decisions
+
+| Question | Decision |
+|----------|----------|
+| Vector DB | **sqlite-vss** - SQLite extension for vector similarity |
+| User identity | **Telegram ID** initially, link to email later when adding email interface |
+| Memory privacy | **Cross-agent shared, per-user private** - all agents see all memories for a user, but users can't see each other's memories |
+| Amp integration | TBD - subprocess likely |
+| Memory decay | TBD - probably keep forever with relevance scoring |
+| LoRA training | TBD - local Ollama or cloud |
+
+---
+
+## 7. File Structure (Proposed)
+
+```
+Omni/Agent/
+├── Core.hs # Base agent types, Worker state (existing)
+├── Engine.hs # Agent loop, tool execution (existing)
+├── Provider.hs # LLM provider abstraction (NEW)
+├── Provider/
+│ ├── OpenRouter.hs # Extracted from Engine.hs
+│ ├── Ollama.hs # Local model support
+│ └── Amp.hs # Amp CLI subprocess
+├── Memory.hs # Shared memory system (NEW)
+├── Memory/
+│ └── Embedding.hs # Vector operations, Ollama embeddings
+├── Tools.hs # Core coding tools (existing)
+├── Tools/
+│ ├── Web.hs # web_search, read_web_page (NEW)
+│ └── Memory.hs # remember, recall tools (NEW)
+├── Eval.hs # Evaluation framework (NEW)
+├── Training.hs # Training data collection (NEW)
+├── Worker.hs # Jr worker loop (existing)
+├── Git.hs # Git operations (existing)
+├── Log.hs # Logging utilities (existing)
+├── Event.hs # Event types (existing)
+├── DESIGN.md # Current design doc
+└── PLAN.md # This document
+```
+
+---
+
+## 8. Database Schema Additions
+
+```sql
+-- Memory system tables (new database: memory.db)
+
+CREATE TABLE users (
+ id TEXT PRIMARY KEY, -- UUID
+ telegram_id INTEGER UNIQUE, -- Telegram user ID (primary identifier)
+ email TEXT UNIQUE, -- Added later for email interface
+ name TEXT NOT NULL, -- Display name
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+);
+
+CREATE TABLE memories (
+ id TEXT PRIMARY KEY, -- UUID
+ user_id TEXT NOT NULL REFERENCES users(id),
+ content TEXT NOT NULL,
+ embedding BLOB, -- float32 vector for sqlite-vss
+ source_agent TEXT NOT NULL, -- "telegram", "coder", etc.
+ source_session TEXT, -- Session UUID
+ source_context TEXT, -- How this was learned
+ confidence REAL DEFAULT 0.8,
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ last_accessed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ tags TEXT -- JSON array
+);
+
+-- sqlite-vss virtual table for vector similarity search
+CREATE VIRTUAL TABLE memories_vss USING vss0(embedding(1536));
+
+CREATE INDEX idx_memories_user ON memories(user_id);
+CREATE INDEX idx_memories_agent ON memories(source_agent);
+```
+
+---
+
+## 9. Key Code References for Implementers
+
+When implementing tasks, refer to these existing patterns:
+
+### Existing Agent Infrastructure
+| File | Purpose | Key Functions/Types |
+|------|---------|---------------------|
+| `Omni/Agent/Engine.hs` | Agent loop, LLM calls | `runAgent`, `chat`, `Tool`, `LLM`, `AgentConfig` |
+| `Omni/Agent/Tools.hs` | Tool implementations | `readFileTool`, `editFileTool`, `runBashTool`, `allTools` |
+| `Omni/Agent/Worker.hs` | Jr worker loop | `start`, `runWithEngine`, `buildFullPrompt` |
+| `Omni/Agent/Core.hs` | Worker state types | `Worker`, `WorkerStatus` |
+
+### Database Patterns (follow these)
+| File | Purpose | Key Patterns |
+|------|---------|--------------|
+| `Omni/Task/Core.hs` | SQLite usage | `withDb`, schema migrations, ToRow/FromRow instances |
+| `Omni/Fact.hs` | CRUD operations | `createFact`, `getFact`, `getAllFacts` |
+
+### CLI Patterns
+| File | Purpose | Key Patterns |
+|------|---------|--------------|
+| `Omni/Jr.hs` | Main CLI entry | Docopt usage, command dispatch in `move` function |
+| `Omni/Cli.hs` | CLI helpers | `Cli.Plan`, `Cli.has`, `Cli.getArg` |
+
+### HTTP Patterns
+| File | Purpose | Key Patterns |
+|------|---------|--------------|
+| `Omni/Agent/Engine.hs` lines 560-594 | HTTP POST to LLM API | `http-conduit` usage, JSON encoding |
+
+### Build System
+- Build: `bild Omni/Agent/NewModule.hs`
+- Test: `bild --test Omni/Agent/NewModule.hs`
+- Dependencies: Add to module header comments (`: dep package-name`)
+
+---
+
+## 10. Next Steps
+
+Execute tasks in order:
+1. **t-247** Provider Abstraction (unblocked, start here)
+2. **t-248** Memory System (after t-247)
+3. **t-249** Tool Registry (after t-247, can parallel with t-248)
+4. **t-250** Evals Framework (after t-247)
+5. **t-251** Telegram Bot Agent (after t-248 + t-249)
+
+Run `jr task ready` to see what's available to work on.
diff --git a/Omni/Agent/Paths.hs b/Omni/Agent/Paths.hs
new file mode 100644
index 0000000..6facdc6
--- /dev/null
+++ b/Omni/Agent/Paths.hs
@@ -0,0 +1,39 @@
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+-- | Configurable paths for Ava data directories.
+--
+-- In development, uses default paths under @_/var/ava/@.
+-- In production, set @AVA_DATA_ROOT@ to @/home/ava@ to use the dedicated workspace.
+module Omni.Agent.Paths
+ ( avaDataRoot,
+ skillsDir,
+ outreachDir,
+ userScratchRoot,
+ userScratchDir,
+ )
+where
+
+import Alpha
+import qualified Data.Text as Text
+import System.Environment (lookupEnv)
+import System.FilePath ((</>))
+import System.IO.Unsafe (unsafePerformIO)
+
+avaDataRoot :: FilePath
+avaDataRoot = unsafePerformIO <| do
+ m <- lookupEnv "AVA_DATA_ROOT"
+ pure (fromMaybe "_/var/ava" m)
+{-# NOINLINE avaDataRoot #-}
+
+skillsDir :: FilePath
+skillsDir = avaDataRoot </> "skills"
+
+outreachDir :: FilePath
+outreachDir = avaDataRoot </> "outreach"
+
+userScratchRoot :: FilePath
+userScratchRoot = avaDataRoot </> "users"
+
+userScratchDir :: Text -> FilePath
+userScratchDir user = userScratchRoot </> Text.unpack user
diff --git a/Omni/Agent/Provider.hs b/Omni/Agent/Provider.hs
new file mode 100644
index 0000000..db30e5f
--- /dev/null
+++ b/Omni/Agent/Provider.hs
@@ -0,0 +1,695 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | LLM Provider abstraction for multi-backend support.
+--
+-- Supports multiple LLM backends:
+-- - OpenRouter (cloud API, multiple models)
+-- - Ollama (local models)
+-- - Amp CLI (subprocess)
+--
+-- : out omni-agent-provider
+-- : dep aeson
+-- : dep http-conduit
+-- : dep http-client-tls
+-- : dep http-types
+-- : dep case-insensitive
+module Omni.Agent.Provider
+ ( Provider (..),
+ ProviderConfig (..),
+ ChatResult (..),
+ Message (..),
+ Role (..),
+ ToolCall (..),
+ FunctionCall (..),
+ Usage (..),
+ ToolApi (..),
+ StreamChunk (..),
+ defaultOpenRouter,
+ defaultOllama,
+ chat,
+ chatWithUsage,
+ chatStream,
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Data.Aeson ((.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.Aeson.KeyMap as KeyMap
+import qualified Data.ByteString as BS
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.CaseInsensitive as CI
+import Data.IORef (modifyIORef, newIORef, readIORef, writeIORef)
+import qualified Data.IntMap.Strict as IntMap
+import qualified Data.Text as Text
+import qualified Data.Text.Encoding as TE
+import qualified Network.HTTP.Client as HTTPClient
+import qualified Network.HTTP.Client.TLS as HTTPClientTLS
+import qualified Network.HTTP.Simple as HTTP
+import Network.HTTP.Types.Status (statusCode)
+import qualified Omni.Test as Test
+import qualified System.Timeout as Timeout
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Provider"
+ [ Test.unit "defaultOpenRouter has correct endpoint" <| do
+ case defaultOpenRouter "" "test-model" of
+ OpenRouter cfg -> providerBaseUrl cfg Test.@=? "https://openrouter.ai/api/v1"
+ _ -> Test.assertFailure "Expected OpenRouter",
+ Test.unit "defaultOllama has correct endpoint" <| do
+ case defaultOllama "test-model" of
+ Ollama cfg -> providerBaseUrl cfg Test.@=? "http://localhost:11434"
+ _ -> Test.assertFailure "Expected Ollama",
+ Test.unit "ChatResult preserves message" <| do
+ let msg = Message User "test" Nothing Nothing
+ result = ChatResult msg Nothing
+ chatMessage result Test.@=? msg
+ ]
+
+-- | HTTP request timeout in microseconds (60 seconds)
+httpTimeoutMicros :: Int
+httpTimeoutMicros = 60 * 1000000
+
+-- | Maximum number of retries for transient failures
+maxRetries :: Int
+maxRetries = 3
+
+-- | Initial backoff delay in microseconds (1 second)
+initialBackoffMicros :: Int
+initialBackoffMicros = 1000000
+
+-- | Retry an IO action with exponential backoff
+-- Retries on timeout, connection errors, and 5xx status codes
+retryWithBackoff :: Int -> Int -> IO (Either Text a) -> IO (Either Text a)
+retryWithBackoff retriesLeft backoff action
+ | retriesLeft <= 0 = action
+ | otherwise = do
+ result <- Timeout.timeout httpTimeoutMicros action
+ case result of
+ Nothing -> do
+ threadDelay backoff
+ retryWithBackoff (retriesLeft - 1) (backoff * 2) action
+ Just (Left err)
+ | isRetryable err -> do
+ threadDelay backoff
+ retryWithBackoff (retriesLeft - 1) (backoff * 2) action
+ Just r -> pure r
+ where
+ isRetryable err =
+ "HTTP error: 5"
+ `Text.isPrefixOf` err
+ || "connection"
+ `Text.isInfixOf` Text.toLower err
+ || "timeout"
+ `Text.isInfixOf` Text.toLower err
+
+data Provider
+ = OpenRouter ProviderConfig
+ | Ollama ProviderConfig
+ | AmpCLI FilePath
+ deriving (Show, Eq, Generic)
+
+data ProviderConfig = ProviderConfig
+ { providerBaseUrl :: Text,
+ providerApiKey :: Text,
+ providerModel :: Text,
+ providerExtraHeaders :: [(ByteString, ByteString)]
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON ProviderConfig where
+ toJSON c =
+ Aeson.object
+ [ "baseUrl" .= providerBaseUrl c,
+ "apiKey" .= providerApiKey c,
+ "model" .= providerModel c
+ ]
+
+instance Aeson.FromJSON ProviderConfig where
+ parseJSON =
+ Aeson.withObject "ProviderConfig" <| \v ->
+ (ProviderConfig </ (v Aeson..: "baseUrl"))
+ <*> (v Aeson..: "apiKey")
+ <*> (v Aeson..: "model")
+ <*> pure []
+
+defaultOpenRouter :: Text -> Text -> Provider
+defaultOpenRouter apiKey model =
+ OpenRouter
+ ProviderConfig
+ { providerBaseUrl = "https://openrouter.ai/api/v1",
+ providerApiKey = apiKey,
+ providerModel = model,
+ providerExtraHeaders =
+ [ ("HTTP-Referer", "https://omni.dev"),
+ ("X-Title", "Omni Agent")
+ ]
+ }
+
+defaultOllama :: Text -> Provider
+defaultOllama model =
+ Ollama
+ ProviderConfig
+ { providerBaseUrl = "http://localhost:11434",
+ providerApiKey = "",
+ providerModel = model,
+ providerExtraHeaders = []
+ }
+
+data Role = System | User | Assistant | ToolRole
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON Role where
+ toJSON System = Aeson.String "system"
+ toJSON User = Aeson.String "user"
+ toJSON Assistant = Aeson.String "assistant"
+ toJSON ToolRole = Aeson.String "tool"
+
+instance Aeson.FromJSON Role where
+ parseJSON = Aeson.withText "Role" parseRole
+ where
+ parseRole "system" = pure System
+ parseRole "user" = pure User
+ parseRole "assistant" = pure Assistant
+ parseRole "tool" = pure ToolRole
+ parseRole _ = empty
+
+data Message = Message
+ { msgRole :: Role,
+ msgContent :: Text,
+ msgToolCalls :: Maybe [ToolCall],
+ msgToolCallId :: Maybe Text
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON Message where
+ toJSON m =
+ Aeson.object
+ <| catMaybes
+ [ Just ("role" .= msgRole m),
+ Just ("content" .= msgContent m),
+ ("tool_calls" .=) </ msgToolCalls m,
+ ("tool_call_id" .=) </ msgToolCallId m
+ ]
+
+instance Aeson.FromJSON Message where
+ parseJSON =
+ Aeson.withObject "Message" <| \v ->
+ (Message </ (v Aeson..: "role"))
+ <*> (v Aeson..:? "content" Aeson..!= "")
+ <*> (v Aeson..:? "tool_calls")
+ <*> (v Aeson..:? "tool_call_id")
+
+data ToolCall = ToolCall
+ { tcId :: Text,
+ tcType :: Text,
+ tcFunction :: FunctionCall
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON ToolCall where
+ toJSON tc =
+ Aeson.object
+ [ "id" .= tcId tc,
+ "type" .= tcType tc,
+ "function" .= tcFunction tc
+ ]
+
+instance Aeson.FromJSON ToolCall where
+ parseJSON =
+ Aeson.withObject "ToolCall" <| \v ->
+ (ToolCall </ (v Aeson..: "id"))
+ <*> (v Aeson..:? "type" Aeson..!= "function")
+ <*> (v Aeson..: "function")
+
+data FunctionCall = FunctionCall
+ { fcName :: Text,
+ fcArguments :: Text
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON FunctionCall where
+ toJSON fc =
+ Aeson.object
+ [ "name" .= fcName fc,
+ "arguments" .= fcArguments fc
+ ]
+
+instance Aeson.FromJSON FunctionCall where
+ parseJSON =
+ Aeson.withObject "FunctionCall" <| \v ->
+ (FunctionCall </ (v Aeson..: "name"))
+ <*> (v Aeson..:? "arguments" Aeson..!= "{}")
+
+data Usage = Usage
+ { usagePromptTokens :: Int,
+ usageCompletionTokens :: Int,
+ usageTotalTokens :: Int,
+ usageCost :: Maybe Double
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.FromJSON Usage where
+ parseJSON =
+ Aeson.withObject "Usage" <| \v ->
+ (Usage </ (v Aeson..: "prompt_tokens"))
+ <*> (v Aeson..: "completion_tokens")
+ <*> (v Aeson..: "total_tokens")
+ <*> (v Aeson..:? "cost")
+
+data ChatResult = ChatResult
+ { chatMessage :: Message,
+ chatUsage :: Maybe Usage
+ }
+ deriving (Show, Eq)
+
+data ToolApi = ToolApi
+ { toolApiName :: Text,
+ toolApiDescription :: Text,
+ toolApiParameters :: Aeson.Value
+ }
+ deriving (Generic)
+
+instance Aeson.ToJSON ToolApi where
+ toJSON t =
+ Aeson.object
+ [ "type" .= ("function" :: Text),
+ "function"
+ .= Aeson.object
+ [ "name" .= toolApiName t,
+ "description" .= toolApiDescription t,
+ "parameters" .= toolApiParameters t
+ ]
+ ]
+
+data ChatCompletionRequest = ChatCompletionRequest
+ { reqModel :: Text,
+ reqMessages :: [Message],
+ reqTools :: Maybe [ToolApi]
+ }
+ deriving (Generic)
+
+instance Aeson.ToJSON ChatCompletionRequest where
+ toJSON r =
+ Aeson.object
+ <| catMaybes
+ [ Just ("model" .= reqModel r),
+ Just ("messages" .= reqMessages r),
+ ("tools" .=) </ reqTools r,
+ Just ("usage" .= Aeson.object ["include" .= True])
+ ]
+
+data Choice = Choice
+ { choiceIndex :: Int,
+ choiceMessage :: Message,
+ choiceFinishReason :: Maybe Text
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.FromJSON Choice where
+ parseJSON =
+ Aeson.withObject "Choice" <| \v ->
+ (Choice </ (v Aeson..: "index"))
+ <*> (v Aeson..: "message")
+ <*> (v Aeson..:? "finish_reason")
+
+data ChatCompletionResponse = ChatCompletionResponse
+ { respId :: Text,
+ respChoices :: [Choice],
+ respModel :: Text,
+ respUsage :: Maybe Usage
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.FromJSON ChatCompletionResponse where
+ parseJSON =
+ Aeson.withObject "ChatCompletionResponse" <| \v ->
+ (ChatCompletionResponse </ (v Aeson..: "id"))
+ <*> (v Aeson..: "choices")
+ <*> (v Aeson..: "model")
+ <*> (v Aeson..:? "usage")
+
+chat :: Provider -> [ToolApi] -> [Message] -> IO (Either Text Message)
+chat provider tools messages = do
+ result <- chatWithUsage provider tools messages
+ pure (chatMessage </ result)
+
+chatWithUsage :: Provider -> [ToolApi] -> [Message] -> IO (Either Text ChatResult)
+chatWithUsage (OpenRouter cfg) tools messages = chatOpenAI cfg tools messages
+chatWithUsage (Ollama cfg) tools messages = chatOllama cfg tools messages
+chatWithUsage (AmpCLI _promptFile) _tools _messages = do
+ pure (Left "Amp CLI provider not yet implemented")
+
+chatOpenAI :: ProviderConfig -> [ToolApi] -> [Message] -> IO (Either Text ChatResult)
+chatOpenAI cfg tools messages = do
+ let url = Text.unpack (providerBaseUrl cfg) <> "/chat/completions"
+ req0 <- HTTP.parseRequest url
+ let body =
+ ChatCompletionRequest
+ { reqModel = providerModel cfg,
+ reqMessages = messages,
+ reqTools = if null tools then Nothing else Just tools
+ }
+ baseReq =
+ HTTP.setRequestMethod "POST"
+ <| HTTP.setRequestHeader "Content-Type" ["application/json"]
+ <| HTTP.setRequestHeader "Authorization" ["Bearer " <> TE.encodeUtf8 (providerApiKey cfg)]
+ <| HTTP.setRequestBodyLBS (Aeson.encode body)
+ <| req0
+ req = foldr addHeader baseReq (providerExtraHeaders cfg)
+ addHeader (name, value) = HTTP.addRequestHeader (CI.mk name) value
+
+ retryWithBackoff maxRetries initialBackoffMicros <| do
+ response <- HTTP.httpLBS req
+ let status = HTTP.getResponseStatusCode response
+ respBody = HTTP.getResponseBody response
+ cleanedBody = BL.dropWhile (\b -> b `elem` [0x0a, 0x0d, 0x20]) respBody
+ if status >= 200 && status < 300
+ then case Aeson.eitherDecode cleanedBody of
+ Right resp ->
+ case respChoices resp of
+ (c : _) -> pure (Right (ChatResult (choiceMessage c) (respUsage resp)))
+ [] -> pure (Left "No choices in response")
+ Left err -> do
+ let bodyPreview = TE.decodeUtf8 (BL.toStrict (BL.take 500 cleanedBody))
+ pure (Left ("Failed to parse response: " <> Text.pack err <> " | Body: " <> bodyPreview))
+ else pure (Left ("HTTP error: " <> tshow status <> " - " <> TE.decodeUtf8 (BL.toStrict respBody)))
+
+chatOllama :: ProviderConfig -> [ToolApi] -> [Message] -> IO (Either Text ChatResult)
+chatOllama cfg tools messages = do
+ let url = Text.unpack (providerBaseUrl cfg) <> "/api/chat"
+ req0 <- HTTP.parseRequest url
+ let body =
+ Aeson.object
+ [ "model" .= providerModel cfg,
+ "messages" .= messages,
+ "tools" .= if null tools then Aeson.Null else Aeson.toJSON tools,
+ "stream" .= False
+ ]
+ req =
+ HTTP.setRequestMethod "POST"
+ <| HTTP.setRequestHeader "Content-Type" ["application/json"]
+ <| HTTP.setRequestBodyLBS (Aeson.encode body)
+ <| req0
+
+ retryWithBackoff maxRetries initialBackoffMicros <| do
+ response <- HTTP.httpLBS req
+ let status = HTTP.getResponseStatusCode response
+ if status >= 200 && status < 300
+ then case Aeson.decode (HTTP.getResponseBody response) of
+ Just resp -> parseOllamaResponse resp
+ Nothing -> pure (Left ("Failed to parse Ollama response: " <> TE.decodeUtf8 (BL.toStrict (HTTP.getResponseBody response))))
+ else pure (Left ("HTTP error: " <> tshow status <> " - " <> TE.decodeUtf8 (BL.toStrict (HTTP.getResponseBody response))))
+
+parseOllamaResponse :: Aeson.Value -> IO (Either Text ChatResult)
+parseOllamaResponse val =
+ case val of
+ Aeson.Object obj -> do
+ let msgResult = do
+ msgObj <- case KeyMap.lookup "message" obj of
+ Just m -> Right m
+ Nothing -> Left "No message in response"
+ case Aeson.fromJSON msgObj of
+ Aeson.Success msg -> Right msg
+ Aeson.Error e -> Left (Text.pack e)
+ usageResult = case KeyMap.lookup "prompt_eval_count" obj of
+ Just (Aeson.Number promptTokens) ->
+ case KeyMap.lookup "eval_count" obj of
+ Just (Aeson.Number evalTokens) ->
+ Just
+ Usage
+ { usagePromptTokens = round promptTokens,
+ usageCompletionTokens = round evalTokens,
+ usageTotalTokens = round promptTokens + round evalTokens,
+ usageCost = Nothing
+ }
+ _ -> Nothing
+ _ -> Nothing
+ case msgResult of
+ Right msg -> pure (Right (ChatResult msg usageResult))
+ Left e -> pure (Left e)
+ _ -> pure (Left "Expected object response from Ollama")
+
+data StreamChunk
+ = StreamContent Text
+ | StreamToolCall ToolCall
+ | StreamToolCallDelta ToolCallDelta
+ | StreamDone ChatResult
+ | StreamError Text
+ deriving (Show, Eq)
+
+data ToolCallDelta = ToolCallDelta
+ { tcdIndex :: Int,
+ tcdId :: Maybe Text,
+ tcdFunctionName :: Maybe Text,
+ tcdFunctionArgs :: Maybe Text
+ }
+ deriving (Show, Eq)
+
+chatStream :: Provider -> [ToolApi] -> [Message] -> (StreamChunk -> IO ()) -> IO (Either Text ChatResult)
+chatStream (OpenRouter cfg) tools messages onChunk = chatStreamOpenAI cfg tools messages onChunk
+chatStream (Ollama _cfg) _tools _messages _onChunk = pure (Left "Streaming not implemented for Ollama")
+chatStream (AmpCLI _) _tools _messages _onChunk = pure (Left "Streaming not implemented for AmpCLI")
+
+chatStreamOpenAI :: ProviderConfig -> [ToolApi] -> [Message] -> (StreamChunk -> IO ()) -> IO (Either Text ChatResult)
+chatStreamOpenAI cfg tools messages onChunk = do
+ let url = Text.unpack (providerBaseUrl cfg) <> "/chat/completions"
+ managerSettings =
+ HTTPClientTLS.tlsManagerSettings
+ { HTTPClient.managerResponseTimeout = HTTPClient.responseTimeoutMicro httpTimeoutMicros
+ }
+ manager <- HTTPClient.newManager managerSettings
+ req0 <- HTTP.parseRequest url
+ let body =
+ Aeson.object
+ <| catMaybes
+ [ Just ("model" .= providerModel cfg),
+ Just ("messages" .= messages),
+ if null tools then Nothing else Just ("tools" .= tools),
+ Just ("stream" .= True),
+ Just ("usage" .= Aeson.object ["include" .= True])
+ ]
+ baseReq =
+ HTTP.setRequestMethod "POST"
+ <| HTTP.setRequestHeader "Content-Type" ["application/json"]
+ <| HTTP.setRequestHeader "Authorization" ["Bearer " <> TE.encodeUtf8 (providerApiKey cfg)]
+ <| HTTP.setRequestBodyLBS (Aeson.encode body)
+ <| req0
+ req = foldr addHeader baseReq (providerExtraHeaders cfg)
+ addHeader (name, value) = HTTP.addRequestHeader (CI.mk name) value
+
+ result <-
+ try <| HTTPClient.withResponse req manager <| \response -> do
+ let status = HTTPClient.responseStatus response
+ code = statusCode status
+ if code >= 200 && code < 300
+ then processSSEStream (HTTPClient.responseBody response) onChunk
+ else do
+ bodyChunks <- readAllBody (HTTPClient.responseBody response)
+ let errBody = TE.decodeUtf8 (BS.concat bodyChunks)
+ pure (Left ("HTTP error: " <> tshow code <> " - " <> errBody))
+ case result of
+ Left (e :: SomeException) -> pure (Left ("Stream request failed: " <> tshow e))
+ Right r -> pure r
+
+readAllBody :: IO BS.ByteString -> IO [BS.ByteString]
+readAllBody readBody = go []
+ where
+ go acc = do
+ chunk <- readBody
+ if BS.null chunk
+ then pure (reverse acc)
+ else go (chunk : acc)
+
+data ToolCallAccum = ToolCallAccum
+ { tcaId :: Text,
+ tcaName :: Text,
+ tcaArgs :: Text
+ }
+
+processSSEStream :: IO BS.ByteString -> (StreamChunk -> IO ()) -> IO (Either Text ChatResult)
+processSSEStream readBody onChunk = do
+ accumulatedContent <- newIORef ("" :: Text)
+ toolCallAccum <- newIORef (IntMap.empty :: IntMap.IntMap ToolCallAccum)
+ lastUsage <- newIORef (Nothing :: Maybe Usage)
+ buffer <- newIORef ("" :: Text)
+
+ let loop = do
+ chunk <- readBody
+ if BS.null chunk
+ then do
+ content <- readIORef accumulatedContent
+ accum <- readIORef toolCallAccum
+ usage <- readIORef lastUsage
+ let toolCalls = map accumToToolCall (IntMap.elems accum)
+ finalMsg =
+ Message
+ { msgRole = Assistant,
+ msgContent = content,
+ msgToolCalls = if null toolCalls then Nothing else Just toolCalls,
+ msgToolCallId = Nothing
+ }
+ pure (Right (ChatResult finalMsg usage))
+ else do
+ modifyIORef buffer (<> TE.decodeUtf8 chunk)
+ buf <- readIORef buffer
+ let (events, remaining) = parseSSEEvents buf
+ writeIORef buffer remaining
+ forM_ events <| \event -> do
+ case parseStreamEvent event of
+ Just (StreamContent txt) -> do
+ modifyIORef accumulatedContent (<> txt)
+ onChunk (StreamContent txt)
+ Just (StreamToolCallDelta delta) -> do
+ modifyIORef toolCallAccum (mergeToolCallDelta delta)
+ Just (StreamToolCall tc) -> do
+ modifyIORef toolCallAccum (mergeCompleteToolCall tc)
+ onChunk (StreamToolCall tc)
+ Just (StreamDone result) -> do
+ writeIORef lastUsage (chatUsage result)
+ Just (StreamError err) -> do
+ onChunk (StreamError err)
+ Nothing -> pure ()
+ loop
+
+ loop
+
+accumToToolCall :: ToolCallAccum -> ToolCall
+accumToToolCall acc =
+ ToolCall
+ { tcId = tcaId acc,
+ tcType = "function",
+ tcFunction = FunctionCall (tcaName acc) (tcaArgs acc)
+ }
+
+mergeToolCallDelta :: ToolCallDelta -> IntMap.IntMap ToolCallAccum -> IntMap.IntMap ToolCallAccum
+mergeToolCallDelta delta accum =
+ let idx = tcdIndex delta
+ existing = IntMap.lookup idx accum
+ updated = case existing of
+ Nothing ->
+ ToolCallAccum
+ { tcaId = fromMaybe "" (tcdId delta),
+ tcaName = fromMaybe "" (tcdFunctionName delta),
+ tcaArgs = fromMaybe "" (tcdFunctionArgs delta)
+ }
+ Just a ->
+ a
+ { tcaId = fromMaybe (tcaId a) (tcdId delta),
+ tcaName = fromMaybe (tcaName a) (tcdFunctionName delta),
+ tcaArgs = tcaArgs a <> fromMaybe "" (tcdFunctionArgs delta)
+ }
+ in IntMap.insert idx updated accum
+
+mergeCompleteToolCall :: ToolCall -> IntMap.IntMap ToolCallAccum -> IntMap.IntMap ToolCallAccum
+mergeCompleteToolCall tc accum =
+ let nextIdx = if IntMap.null accum then 0 else fst (IntMap.findMax accum) + 1
+ newAccum =
+ ToolCallAccum
+ { tcaId = tcId tc,
+ tcaName = fcName (tcFunction tc),
+ tcaArgs = fcArguments (tcFunction tc)
+ }
+ in IntMap.insert nextIdx newAccum accum
+
+parseSSEEvents :: Text -> ([Text], Text)
+parseSSEEvents input =
+ let lines' = Text.splitOn "\n" input
+ (events, remaining) = go [] [] lines'
+ in (events, remaining)
+ where
+ go events current [] = (reverse events, Text.intercalate "\n" (reverse current))
+ go events current (line : rest)
+ | Text.null line && not (null current) =
+ go (Text.intercalate "\n" (reverse current) : events) [] rest
+ | otherwise =
+ go events (line : current) rest
+
+parseStreamEvent :: Text -> Maybe StreamChunk
+parseStreamEvent eventText = do
+ let dataLines = filter ("data:" `Text.isPrefixOf`) (Text.lines eventText)
+ case dataLines of
+ [] -> Nothing
+ (dataLine : _) -> do
+ let jsonStr = Text.strip (Text.drop 5 dataLine)
+ if jsonStr == "[DONE]"
+ then Nothing
+ else case Aeson.decode (BL.fromStrict (TE.encodeUtf8 jsonStr)) of
+ Nothing -> Nothing
+ Just (Aeson.Object obj) -> parseStreamChunk obj
+ _ -> Nothing
+
+parseStreamChunk :: Aeson.Object -> Maybe StreamChunk
+parseStreamChunk obj = do
+ case KeyMap.lookup "error" obj of
+ Just (Aeson.Object errObj) -> do
+ let errMsg = case KeyMap.lookup "message" errObj of
+ Just (Aeson.String m) -> m
+ _ -> "Unknown error"
+ Just (StreamError errMsg)
+ _ -> do
+ let usageChunk = case KeyMap.lookup "usage" obj of
+ Just usageVal -> case Aeson.fromJSON usageVal of
+ Aeson.Success usage -> Just (StreamDone (ChatResult (Message Assistant "" Nothing Nothing) (Just usage)))
+ _ -> Nothing
+ _ -> Nothing
+ case KeyMap.lookup "choices" obj of
+ Just (Aeson.Array choices) | not (null choices) -> do
+ case toList choices of
+ (Aeson.Object choice : _) -> do
+ case KeyMap.lookup "delta" choice of
+ Just (Aeson.Object delta) -> do
+ let contentChunk = case KeyMap.lookup "content" delta of
+ Just (Aeson.String c) | not (Text.null c) -> Just (StreamContent c)
+ _ -> Nothing
+ toolCallChunk = case KeyMap.lookup "tool_calls" delta of
+ Just (Aeson.Array tcs)
+ | not (null tcs) ->
+ parseToolCallDelta (toList tcs)
+ _ -> Nothing
+ contentChunk <|> toolCallChunk <|> usageChunk
+ _ -> usageChunk
+ _ -> usageChunk
+ _ -> usageChunk
+
+parseToolCallDelta :: [Aeson.Value] -> Maybe StreamChunk
+parseToolCallDelta [] = Nothing
+parseToolCallDelta (Aeson.Object tcObj : _) = do
+ idx <- case KeyMap.lookup "index" tcObj of
+ Just (Aeson.Number n) -> Just (round n)
+ _ -> Nothing
+ let tcId' = case KeyMap.lookup "id" tcObj of
+ Just (Aeson.String s) -> Just s
+ _ -> Nothing
+ funcObj = case KeyMap.lookup "function" tcObj of
+ Just (Aeson.Object f) -> Just f
+ _ -> Nothing
+ funcName = case funcObj of
+ Just f -> case KeyMap.lookup "name" f of
+ Just (Aeson.String s) -> Just s
+ _ -> Nothing
+ Nothing -> Nothing
+ funcArgs = case funcObj of
+ Just f -> case KeyMap.lookup "arguments" f of
+ Just (Aeson.String s) -> Just s
+ _ -> Nothing
+ Nothing -> Nothing
+ Just
+ ( StreamToolCallDelta
+ ToolCallDelta
+ { tcdIndex = idx,
+ tcdId = tcId',
+ tcdFunctionName = funcName,
+ tcdFunctionArgs = funcArgs
+ }
+ )
+parseToolCallDelta _ = Nothing
diff --git a/Omni/Agent/Skills.hs b/Omni/Agent/Skills.hs
new file mode 100644
index 0000000..1dbf23f
--- /dev/null
+++ b/Omni/Agent/Skills.hs
@@ -0,0 +1,417 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Skills system for ava agent.
+--
+-- Skills are modular instruction sets that extend ava's capabilities.
+-- They follow the Claude Skills format: a directory with SKILL.md and
+-- optional scripts/, references/, and assets/ subdirectories.
+--
+-- Directory structure:
+-- _/var/ava/skills/
+-- ├── shared/ -- Skills available to all users
+-- │ └── skill-creator/
+-- ├── ben/ -- Ben's private skills
+-- └── alice/ -- Alice's private skills
+--
+-- : out omni-agent-skills
+-- : dep aeson
+-- : dep directory
+module Omni.Agent.Skills
+ ( Skill (..),
+ SkillMetadata (..),
+ loadSkill,
+ loadSkillMetadata,
+ listSkills,
+ listSkillsForUser,
+ publishSkill,
+ skillTool,
+ listSkillsTool,
+ publishSkillTool,
+ skillsDir,
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Data.Aeson ((.:), (.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.List as List
+import qualified Data.Text as Text
+import qualified Data.Text.IO as TextIO
+import qualified Omni.Agent.Engine as Engine
+import qualified Omni.Agent.Paths as Paths
+import qualified Omni.Test as Test
+import qualified System.Directory as Directory
+import qualified System.FilePath as FilePath
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Skills"
+ [ Test.unit "skillsDir returns correct path" <| do
+ let dir = skillsDir
+ ("skills" `Text.isSuffixOf` Text.pack dir) Test.@=? True,
+ Test.unit "SkillMetadata parses from YAML frontmatter" <| do
+ let yaml = "name: test-skill\ndescription: A test skill"
+ case parseYamlFrontmatter yaml of
+ Nothing -> Test.assertFailure "Failed to parse frontmatter"
+ Just meta -> do
+ skillMetaName meta Test.@=? "test-skill"
+ skillMetaDescription meta Test.@=? "A test skill",
+ Test.unit "parseSkillMd extracts frontmatter and body" <| do
+ let content =
+ "---\n\
+ \name: my-skill\n\
+ \description: Does things\n\
+ \---\n\
+ \# My Skill\n\
+ \\n\
+ \Instructions here."
+ case parseSkillMd content of
+ Nothing -> Test.assertFailure "Failed to parse SKILL.md"
+ Just (meta, body) -> do
+ skillMetaName meta Test.@=? "my-skill"
+ ("# My Skill" `Text.isInfixOf` body) Test.@=? True,
+ Test.unit "skillTool schema is valid" <| do
+ let schema = Engine.toolJsonSchema (skillTool "test-user")
+ case schema of
+ Aeson.Object _ -> pure ()
+ _ -> Test.assertFailure "Schema should be an object",
+ Test.unit "listSkillsTool schema is valid" <| do
+ let schema = Engine.toolJsonSchema (listSkillsTool "test-user")
+ case schema of
+ Aeson.Object _ -> pure ()
+ _ -> Test.assertFailure "Schema should be an object"
+ ]
+
+-- | Base directory for all skills
+skillsDir :: FilePath
+skillsDir = Paths.skillsDir
+
+-- | Skill metadata from YAML frontmatter
+data SkillMetadata = SkillMetadata
+ { skillMetaName :: Text,
+ skillMetaDescription :: Text
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.FromJSON SkillMetadata where
+ parseJSON =
+ Aeson.withObject "SkillMetadata" <| \v ->
+ (SkillMetadata </ (v .: "name"))
+ <*> (v .: "description")
+
+instance Aeson.ToJSON SkillMetadata where
+ toJSON m =
+ Aeson.object
+ [ "name" .= skillMetaName m,
+ "description" .= skillMetaDescription m
+ ]
+
+-- | Simple YAML frontmatter parser for skill metadata
+-- Parses lines like "name: value" and "description: value"
+parseYamlFrontmatter :: Text -> Maybe SkillMetadata
+parseYamlFrontmatter yaml = do
+ let kvPairs = parseKvLines (Text.lines yaml)
+ getName = List.lookup "name" kvPairs
+ getDesc = List.lookup "description" kvPairs
+ name' <- getName
+ desc <- getDesc
+ pure SkillMetadata {skillMetaName = name', skillMetaDescription = desc}
+ where
+ parseKvLines :: [Text] -> [(Text, Text)]
+ parseKvLines = mapMaybe parseKvLine
+
+ parseKvLine :: Text -> Maybe (Text, Text)
+ parseKvLine line = do
+ let (key, rest) = Text.breakOn ":" line
+ guard (not (Text.null rest))
+ let value = Text.strip (Text.drop 1 rest)
+ guard (not (Text.null key))
+ pure (Text.strip key, value)
+
+-- | Full skill with metadata and content
+data Skill = Skill
+ { skillName :: Text,
+ skillDescription :: Text,
+ skillBody :: Text,
+ skillPath :: FilePath
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON Skill where
+ toJSON s =
+ Aeson.object
+ [ "name" .= skillName s,
+ "description" .= skillDescription s,
+ "body" .= skillBody s,
+ "path" .= skillPath s
+ ]
+
+-- | Parse SKILL.md content into metadata and body
+parseSkillMd :: Text -> Maybe (SkillMetadata, Text)
+parseSkillMd content = do
+ let stripped = Text.strip content
+ guard (Text.isPrefixOf "---" stripped)
+ let afterFirst = Text.drop 3 stripped
+ (yamlPart, rest) = Text.breakOn "---" (Text.stripStart afterFirst)
+ guard (not (Text.null rest))
+ let body = Text.strip (Text.drop 3 rest)
+ meta <- parseYamlFrontmatter (Text.strip yamlPart)
+ pure (meta, body)
+
+-- | Load just the metadata for a skill (for progressive disclosure)
+loadSkillMetadata :: FilePath -> IO (Maybe SkillMetadata)
+loadSkillMetadata skillDir = do
+ let skillMd = skillDir FilePath.</> "SKILL.md"
+ exists <- Directory.doesFileExist skillMd
+ if exists
+ then do
+ content <- TextIO.readFile skillMd
+ pure (fst </ parseSkillMd content)
+ else pure Nothing
+
+-- | Load a full skill by name for a user
+loadSkill :: Text -> Text -> IO (Either Text Skill)
+loadSkill userName skillName' = do
+ let userDir = skillsDir FilePath.</> Text.unpack userName FilePath.</> Text.unpack skillName'
+ sharedDir = skillsDir FilePath.</> "shared" FilePath.</> Text.unpack skillName'
+
+ -- Try user's private skills first, then shared
+ userExists <- Directory.doesDirectoryExist userDir
+ sharedExists <- Directory.doesDirectoryExist sharedDir
+
+ let targetDir
+ | userExists = Just userDir
+ | sharedExists = Just sharedDir
+ | otherwise = Nothing
+
+ case targetDir of
+ Nothing -> do
+ available <- listSkillsForUser userName
+ pure
+ <| Left
+ <| "Skill not found: "
+ <> skillName'
+ <> ". Available skills: "
+ <> Text.intercalate ", " (map skillMetaName available)
+ Just dir -> do
+ let skillMd = dir FilePath.</> "SKILL.md"
+ exists <- Directory.doesFileExist skillMd
+ if exists
+ then do
+ content <- TextIO.readFile skillMd
+ case parseSkillMd content of
+ Nothing -> pure <| Left "Failed to parse SKILL.md frontmatter"
+ Just (meta, body) ->
+ pure
+ <| Right
+ <| Skill
+ { skillName = skillMetaName meta,
+ skillDescription = skillMetaDescription meta,
+ skillBody = body,
+ skillPath = dir
+ }
+ else pure <| Left ("SKILL.md not found in " <> Text.pack dir)
+
+-- | List all skills in a directory
+listSkillsInDir :: FilePath -> IO [SkillMetadata]
+listSkillsInDir dir = do
+ exists <- Directory.doesDirectoryExist dir
+ if exists
+ then do
+ entries <- Directory.listDirectory dir
+ catMaybes
+ </ forM
+ entries
+ ( \entry -> do
+ let entryPath = dir FilePath.</> entry
+ isDir <- Directory.doesDirectoryExist entryPath
+ if isDir
+ then loadSkillMetadata entryPath
+ else pure Nothing
+ )
+ else pure []
+
+-- | List all available skills (shared only)
+listSkills :: IO [SkillMetadata]
+listSkills = listSkillsInDir (skillsDir FilePath.</> "shared")
+
+-- | List skills available to a specific user (their private + shared)
+listSkillsForUser :: Text -> IO [SkillMetadata]
+listSkillsForUser userName = do
+ userSkills <- listSkillsInDir (skillsDir FilePath.</> Text.unpack userName)
+ sharedSkills <- listSkillsInDir (skillsDir FilePath.</> "shared")
+ -- Dedupe by name, preferring user's version
+ let userNames = map skillMetaName userSkills
+ uniqueShared = filter (\s -> skillMetaName s `notElem` userNames) sharedSkills
+ pure (userSkills <> uniqueShared)
+
+-- | Publish a skill from user's private directory to shared
+publishSkill :: Text -> Text -> IO (Either Text Text)
+publishSkill userName skillName' = do
+ let userDir = skillsDir FilePath.</> Text.unpack userName FilePath.</> Text.unpack skillName'
+ sharedDir = skillsDir FilePath.</> "shared" FilePath.</> Text.unpack skillName'
+
+ userExists <- Directory.doesDirectoryExist userDir
+ if not userExists
+ then pure <| Left ("Skill not found in your directory: " <> skillName')
+ else do
+ -- Copy recursively
+ Directory.createDirectoryIfMissing True sharedDir
+ copyDirectory userDir sharedDir
+ pure <| Right ("Published " <> skillName' <> " to shared skills")
+
+-- | Recursively copy a directory
+copyDirectory :: FilePath -> FilePath -> IO ()
+copyDirectory src dst = do
+ entries <- Directory.listDirectory src
+ forM_
+ entries
+ ( \entry -> do
+ let srcPath = src FilePath.</> entry
+ dstPath = dst FilePath.</> entry
+ isDir <- Directory.doesDirectoryExist srcPath
+ if isDir
+ then do
+ Directory.createDirectoryIfMissing True dstPath
+ copyDirectory srcPath dstPath
+ else Directory.copyFile srcPath dstPath
+ )
+
+-- Tool result helpers
+mkSuccess :: Text -> Aeson.Value
+mkSuccess output =
+ Aeson.object
+ [ "success" .= True,
+ "output" .= output
+ ]
+
+mkError :: Text -> Aeson.Value
+mkError err =
+ Aeson.object
+ [ "success" .= False,
+ "error" .= err
+ ]
+
+-- | Tool to load a skill's instructions
+skillTool :: Text -> Engine.Tool
+skillTool userName =
+ Engine.Tool
+ { Engine.toolName = "skill",
+ Engine.toolDescription =
+ "Load specialized instructions for a domain or task. "
+ <> "Skills provide expert workflows, scripts, and context. "
+ <> "Use list_skills to see available skills.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "name"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Name of the skill to load" :: Text)
+ ]
+ ],
+ "required" .= (["name"] :: [Text])
+ ],
+ Engine.toolExecute = executeSkill userName
+ }
+
+newtype SkillArgs = SkillArgs {skillArgsName :: Text}
+ deriving (Show, Eq, Generic)
+
+instance Aeson.FromJSON SkillArgs where
+ parseJSON =
+ Aeson.withObject "SkillArgs" <| \v ->
+ SkillArgs </ (v .: "name")
+
+executeSkill :: Text -> Aeson.Value -> IO Aeson.Value
+executeSkill userName v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure <| mkError (Text.pack e)
+ Aeson.Success args -> do
+ result <- loadSkill userName (skillArgsName args)
+ case result of
+ Left err -> pure <| mkError err
+ Right skill ->
+ pure
+ <| Aeson.object
+ [ "success" .= True,
+ "skill" .= skillName skill,
+ "description" .= skillDescription skill,
+ "instructions" .= skillBody skill,
+ "path" .= skillPath skill
+ ]
+
+-- | Tool to list available skills
+listSkillsTool :: Text -> Engine.Tool
+listSkillsTool userName =
+ Engine.Tool
+ { Engine.toolName = "list_skills",
+ Engine.toolDescription = "List all available skills you can load.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties" .= Aeson.object []
+ ],
+ Engine.toolExecute = \_ -> executeListSkills userName
+ }
+
+executeListSkills :: Text -> IO Aeson.Value
+executeListSkills userName = do
+ skills <- listSkillsForUser userName
+ let formatted =
+ Text.unlines
+ <| map formatSkillMeta skills
+ pure
+ <| Aeson.object
+ [ "success" .= True,
+ "count" .= length skills,
+ "skills" .= skills,
+ "formatted" .= formatted
+ ]
+ where
+ formatSkillMeta m =
+ "- " <> skillMetaName m <> ": " <> skillMetaDescription m
+
+-- | Tool to publish a skill to shared
+publishSkillTool :: Text -> Engine.Tool
+publishSkillTool userName =
+ Engine.Tool
+ { Engine.toolName = "publish_skill",
+ Engine.toolDescription =
+ "Publish one of your private skills to the shared skills directory "
+ <> "so other users can access it.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "name"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Name of the skill to publish" :: Text)
+ ]
+ ],
+ "required" .= (["name"] :: [Text])
+ ],
+ Engine.toolExecute = executePublishSkill userName
+ }
+
+executePublishSkill :: Text -> Aeson.Value -> IO Aeson.Value
+executePublishSkill userName v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure <| mkError (Text.pack e)
+ Aeson.Success args -> do
+ result <- publishSkill userName (skillArgsName args)
+ case result of
+ Left err -> pure <| mkError err
+ Right msg -> pure <| mkSuccess msg
diff --git a/Omni/Agent/Log.hs b/Omni/Agent/Status.hs
index 46ea009..ab533c4 100644
--- a/Omni/Agent/Log.hs
+++ b/Omni/Agent/Status.hs
@@ -2,8 +2,9 @@
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE NoImplicitPrelude #-}
--- | Status of the agent for the UI
-module Omni.Agent.Log where
+-- | Status bar UI for the jr worker.
+-- This is NOT a logging module - use Omni.Log for logging.
+module Omni.Agent.Status where
import Alpha
import Data.IORef (IORef, modifyIORef', newIORef, readIORef, writeIORef)
@@ -11,6 +12,7 @@ import qualified Data.Text as Text
import qualified Data.Text.IO as TIO
import Data.Time.Clock (NominalDiffTime, UTCTime, diffUTCTime, getCurrentTime)
import Data.Time.Format (defaultTimeLocale, parseTimeOrError)
+import qualified Omni.Log as Log
import qualified System.Console.ANSI as ANSI
import qualified System.IO as IO
import System.IO.Unsafe (unsafePerformIO)
@@ -77,9 +79,10 @@ updateActivity :: Text -> IO ()
updateActivity msg = update (\s -> s {statusActivity = msg})
-- | Log a scrolling message (appears above status bars)
+-- Uses Omni.Log for the actual logging, then re-renders status bar
log :: Text -> IO ()
log msg = do
- -- Clear status bars
+ -- Clear status bars temporarily
ANSI.hClearLine IO.stderr
ANSI.hCursorDown IO.stderr 1
ANSI.hClearLine IO.stderr
@@ -91,11 +94,11 @@ log msg = do
ANSI.hClearLine IO.stderr
ANSI.hCursorUp IO.stderr 4
- -- Print message (scrolls screen)
- TIO.hPutStrLn IO.stderr msg
+ -- Use Omni.Log for the actual log message
+ Log.info [msg]
+ Log.br
-- Re-render status bars at bottom
- -- (Since we scrolled, we are now on the line above where the first status line should be)
render
-- | Render the five status lines
diff --git a/Omni/Agent/Subagent.hs b/Omni/Agent/Subagent.hs
new file mode 100644
index 0000000..c8e56d5
--- /dev/null
+++ b/Omni/Agent/Subagent.hs
@@ -0,0 +1,516 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Subagent system for spawning specialized agents.
+--
+-- Enables the orchestrator (Ava) to delegate focused tasks to specialized
+-- subagents that run with their own tool sets and resource limits.
+--
+-- Key features:
+-- - Role-based tool selection (WebCrawler, CodeReviewer, etc.)
+-- - Per-subagent resource limits (timeout, cost, tokens)
+-- - Structured result format with confidence scores
+-- - No sub-subagent spawning (hierarchical control)
+--
+-- : out omni-agent-subagent
+-- : dep aeson
+-- : dep async
+module Omni.Agent.Subagent
+ ( -- * Types
+ SubagentRole (..),
+ SubagentConfig (..),
+ SubagentResult (..),
+ SubagentStatus (..),
+ SubagentCallbacks (..),
+
+ -- * Execution
+ runSubagent,
+ runSubagentWithCallbacks,
+
+ -- * Tool
+ spawnSubagentTool,
+
+ -- * Role-specific tools
+ SubagentApiKeys (..),
+ toolsForRole,
+ modelForRole,
+ systemPromptForRole,
+
+ -- * Defaults
+ defaultSubagentConfig,
+ defaultCallbacks,
+
+ -- * Testing
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Data.Aeson ((.!=), (.:), (.:?), (.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.Text as Text
+import qualified Data.Time.Clock as Clock
+import qualified Omni.Agent.Engine as Engine
+import qualified Omni.Agent.Provider as Provider
+import qualified Omni.Agent.Tools as Tools
+import qualified Omni.Agent.Tools.WebReader as WebReader
+import qualified Omni.Agent.Tools.WebSearch as WebSearch
+import qualified Omni.Test as Test
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Subagent"
+ [ Test.unit "SubagentRole JSON roundtrip" <| do
+ let roles = [WebCrawler, CodeReviewer, DataExtractor, Researcher]
+ forM_ roles <| \role ->
+ case Aeson.decode (Aeson.encode role) of
+ Nothing -> Test.assertFailure ("Failed to decode role: " <> show role)
+ Just decoded -> decoded Test.@=? role,
+ Test.unit "SubagentConfig JSON roundtrip" <| do
+ let cfg = defaultSubagentConfig WebCrawler "test task"
+ case Aeson.decode (Aeson.encode cfg) of
+ Nothing -> Test.assertFailure "Failed to decode SubagentConfig"
+ Just decoded -> subagentTask decoded Test.@=? "test task",
+ Test.unit "SubagentResult JSON roundtrip" <| do
+ let result =
+ SubagentResult
+ { subagentOutput = Aeson.object ["data" .= ("test" :: Text)],
+ subagentSummary = "Test summary",
+ subagentConfidence = 0.85,
+ subagentTokensUsed = 1000,
+ subagentCostCents = 0.5,
+ subagentDuration = 30,
+ subagentIterations = 3,
+ subagentStatus = SubagentSuccess
+ }
+ case Aeson.decode (Aeson.encode result) of
+ Nothing -> Test.assertFailure "Failed to decode SubagentResult"
+ Just decoded -> subagentSummary decoded Test.@=? "Test summary",
+ Test.unit "SubagentStatus JSON roundtrip" <| do
+ let statuses =
+ [ SubagentSuccess,
+ SubagentTimeout,
+ SubagentCostExceeded,
+ SubagentError "test error"
+ ]
+ forM_ statuses <| \status ->
+ case Aeson.decode (Aeson.encode status) of
+ Nothing -> Test.assertFailure ("Failed to decode status: " <> show status)
+ Just decoded -> decoded Test.@=? status,
+ Test.unit "toolsForRole WebCrawler has web tools" <| do
+ let keys = SubagentApiKeys "test-openrouter-key" (Just "test-kagi-key")
+ let tools = toolsForRole WebCrawler keys
+ let names = map Engine.toolName tools
+ ("web_search" `elem` names) Test.@=? True
+ ("read_webpages" `elem` names) Test.@=? True,
+ Test.unit "toolsForRole CodeReviewer has code tools" <| do
+ let keys = SubagentApiKeys "test-openrouter-key" Nothing
+ let tools = toolsForRole CodeReviewer keys
+ let names = map Engine.toolName tools
+ ("read_file" `elem` names) Test.@=? True
+ ("search_codebase" `elem` names) Test.@=? True,
+ Test.unit "modelForRole returns appropriate models" <| do
+ modelForRole WebCrawler Test.@=? "anthropic/claude-3-haiku"
+ modelForRole CodeReviewer Test.@=? "anthropic/claude-sonnet-4"
+ modelForRole Researcher Test.@=? "anthropic/claude-sonnet-4",
+ Test.unit "defaultSubagentConfig has sensible defaults" <| do
+ let cfg = defaultSubagentConfig WebCrawler "task"
+ subagentTimeout cfg Test.@=? 600
+ subagentMaxCost cfg Test.@=? 100.0
+ subagentMaxTokens cfg Test.@=? 200000
+ subagentMaxIterations cfg Test.@=? 20,
+ Test.unit "spawnSubagentTool has correct name" <| do
+ let keys = SubagentApiKeys "test-openrouter-key" (Just "test-kagi-key")
+ let tool = spawnSubagentTool keys
+ Engine.toolName tool Test.@=? "spawn_subagent"
+ ]
+
+data SubagentRole
+ = WebCrawler
+ | CodeReviewer
+ | DataExtractor
+ | Researcher
+ | CustomRole Text
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON SubagentRole where
+ toJSON WebCrawler = Aeson.String "web_crawler"
+ toJSON CodeReviewer = Aeson.String "code_reviewer"
+ toJSON DataExtractor = Aeson.String "data_extractor"
+ toJSON Researcher = Aeson.String "researcher"
+ toJSON (CustomRole name) = Aeson.String name
+
+instance Aeson.FromJSON SubagentRole where
+ parseJSON = Aeson.withText "SubagentRole" parseRole
+ where
+ parseRole "web_crawler" = pure WebCrawler
+ parseRole "code_reviewer" = pure CodeReviewer
+ parseRole "data_extractor" = pure DataExtractor
+ parseRole "researcher" = pure Researcher
+ parseRole name = pure (CustomRole name)
+
+data SubagentConfig = SubagentConfig
+ { subagentRole :: SubagentRole,
+ subagentTask :: Text,
+ subagentModel :: Maybe Text,
+ subagentTimeout :: Int,
+ subagentMaxCost :: Double,
+ subagentMaxTokens :: Int,
+ subagentMaxIterations :: Int,
+ subagentExtendedThinking :: Bool,
+ subagentContext :: Maybe Text
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON SubagentConfig where
+ toJSON c =
+ Aeson.object
+ <| catMaybes
+ [ Just ("role" .= subagentRole c),
+ Just ("task" .= subagentTask c),
+ ("model" .=) </ subagentModel c,
+ Just ("timeout" .= subagentTimeout c),
+ Just ("max_cost_cents" .= subagentMaxCost c),
+ Just ("max_tokens" .= subagentMaxTokens c),
+ Just ("max_iterations" .= subagentMaxIterations c),
+ Just ("extended_thinking" .= subagentExtendedThinking c),
+ ("context" .=) </ subagentContext c
+ ]
+
+instance Aeson.FromJSON SubagentConfig where
+ parseJSON =
+ Aeson.withObject "SubagentConfig" <| \v ->
+ (SubagentConfig </ (v .: "role"))
+ <*> (v .: "task")
+ <*> (v .:? "model")
+ <*> (v .:? "timeout" .!= 600)
+ <*> (v .:? "max_cost_cents" .!= 50.0)
+ <*> (v .:? "max_tokens" .!= 100000)
+ <*> (v .:? "max_iterations" .!= 20)
+ <*> (v .:? "extended_thinking" .!= False)
+ <*> (v .:? "context")
+
+data SubagentResult = SubagentResult
+ { subagentOutput :: Aeson.Value,
+ subagentSummary :: Text,
+ subagentConfidence :: Double,
+ subagentTokensUsed :: Int,
+ subagentCostCents :: Double,
+ subagentDuration :: Int,
+ subagentIterations :: Int,
+ subagentStatus :: SubagentStatus
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON SubagentResult
+
+instance Aeson.FromJSON SubagentResult
+
+data SubagentStatus
+ = SubagentSuccess
+ | SubagentTimeout
+ | SubagentCostExceeded
+ | SubagentError Text
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON SubagentStatus where
+ toJSON SubagentSuccess = Aeson.String "success"
+ toJSON SubagentTimeout = Aeson.String "timeout"
+ toJSON SubagentCostExceeded = Aeson.String "cost_exceeded"
+ toJSON (SubagentError msg) = Aeson.object ["error" .= msg]
+
+instance Aeson.FromJSON SubagentStatus where
+ parseJSON (Aeson.String "success") = pure SubagentSuccess
+ parseJSON (Aeson.String "timeout") = pure SubagentTimeout
+ parseJSON (Aeson.String "cost_exceeded") = pure SubagentCostExceeded
+ parseJSON (Aeson.Object v) = SubagentError </ (v .: "error")
+ parseJSON _ = empty
+
+data SubagentCallbacks = SubagentCallbacks
+ { onSubagentStart :: Text -> IO (),
+ onSubagentActivity :: Text -> IO (),
+ onSubagentToolCall :: Text -> Text -> IO (),
+ onSubagentComplete :: SubagentResult -> IO ()
+ }
+
+defaultCallbacks :: SubagentCallbacks
+defaultCallbacks =
+ SubagentCallbacks
+ { onSubagentStart = \_ -> pure (),
+ onSubagentActivity = \_ -> pure (),
+ onSubagentToolCall = \_ _ -> pure (),
+ onSubagentComplete = \_ -> pure ()
+ }
+
+defaultSubagentConfig :: SubagentRole -> Text -> SubagentConfig
+defaultSubagentConfig role task =
+ SubagentConfig
+ { subagentRole = role,
+ subagentTask = task,
+ subagentModel = Nothing,
+ subagentTimeout = 600,
+ subagentMaxCost = 100.0,
+ subagentMaxTokens = 200000,
+ subagentMaxIterations = 20,
+ subagentExtendedThinking = False,
+ subagentContext = Nothing
+ }
+
+modelForRole :: SubagentRole -> Text
+modelForRole WebCrawler = "anthropic/claude-3-haiku"
+modelForRole CodeReviewer = "anthropic/claude-sonnet-4"
+modelForRole DataExtractor = "anthropic/claude-3-haiku"
+modelForRole Researcher = "anthropic/claude-sonnet-4"
+modelForRole (CustomRole _) = "anthropic/claude-sonnet-4"
+
+data SubagentApiKeys = SubagentApiKeys
+ { subagentOpenRouterKey :: Text,
+ subagentKagiKey :: Maybe Text
+ }
+ deriving (Show, Eq)
+
+toolsForRole :: SubagentRole -> SubagentApiKeys -> [Engine.Tool]
+toolsForRole WebCrawler keys =
+ let webSearchTools = case subagentKagiKey keys of
+ Just kagiKey -> [WebSearch.webSearchTool kagiKey]
+ Nothing -> []
+ in webSearchTools
+ <> [ WebReader.webReaderTool (subagentOpenRouterKey keys),
+ Tools.searchCodebaseTool
+ ]
+toolsForRole CodeReviewer _keys =
+ [ Tools.readFileTool,
+ Tools.searchCodebaseTool,
+ Tools.searchAndReadTool,
+ Tools.runBashTool
+ ]
+toolsForRole DataExtractor keys =
+ [ WebReader.webReaderTool (subagentOpenRouterKey keys),
+ Tools.readFileTool,
+ Tools.searchCodebaseTool
+ ]
+toolsForRole Researcher keys =
+ let webSearchTools = case subagentKagiKey keys of
+ Just kagiKey -> [WebSearch.webSearchTool kagiKey]
+ Nothing -> []
+ in webSearchTools
+ <> [ WebReader.webReaderTool (subagentOpenRouterKey keys),
+ Tools.readFileTool,
+ Tools.searchCodebaseTool,
+ Tools.searchAndReadTool
+ ]
+toolsForRole (CustomRole _) keys = toolsForRole Researcher keys
+
+systemPromptForRole :: SubagentRole -> Text -> Maybe Text -> Text
+systemPromptForRole role task maybeContext =
+ Text.unlines
+ [ "You are a specialized " <> roleDescription role <> " subagent working on a focused task.",
+ "",
+ "## Your Task",
+ task,
+ "",
+ maybe "" (\ctx -> "## Context from Orchestrator\n" <> ctx <> "\n") maybeContext,
+ "## Guidelines",
+ "1. Be EFFICIENT with context - extract only key facts, don't save full page contents",
+ "2. Summarize findings as you go rather than accumulating raw data",
+ "3. Limit web page reads to 3-5 most relevant sources",
+ "4. Work iteratively: search → skim results → read best 2-3 → synthesize",
+ "5. ALWAYS cite sources - every claim needs a URL",
+ "6. Stop when you have sufficient information - don't over-research",
+ "",
+ "## Output Format",
+ "Return findings as a list of structured insights:",
+ "",
+ "```json",
+ "{",
+ " \"summary\": \"Brief overall summary (1-2 sentences)\",",
+ " \"confidence\": 0.85,",
+ " \"findings\": [",
+ " {",
+ " \"claim\": \"The key insight or fact discovered\",",
+ " \"source_url\": \"https://example.com/page\",",
+ " \"quote\": \"Relevant excerpt supporting the claim\",",
+ " \"source_name\": \"Example Site\"",
+ " }",
+ " ],",
+ " \"caveats\": \"Any limitations or uncertainties\"",
+ "}",
+ "```"
+ ]
+ where
+ roleDescription :: SubagentRole -> Text
+ roleDescription WebCrawler = "web research"
+ roleDescription CodeReviewer = "code review"
+ roleDescription DataExtractor = "data extraction"
+ roleDescription Researcher = "research"
+ roleDescription (CustomRole name) = name
+
+runSubagent :: SubagentApiKeys -> SubagentConfig -> IO SubagentResult
+runSubagent keys config = runSubagentWithCallbacks keys config defaultCallbacks
+
+runSubagentWithCallbacks :: SubagentApiKeys -> SubagentConfig -> SubagentCallbacks -> IO SubagentResult
+runSubagentWithCallbacks keys config callbacks = do
+ startTime <- Clock.getCurrentTime
+
+ let role = subagentRole config
+ let model = fromMaybe (modelForRole role) (subagentModel config)
+ let tools = toolsForRole role keys
+ let systemPrompt = systemPromptForRole role (subagentTask config) (subagentContext config)
+
+ onSubagentStart callbacks ("Starting " <> tshow role <> " subagent...")
+
+ let provider = Provider.defaultOpenRouter (subagentOpenRouterKey keys) model
+
+ let guardrails =
+ Engine.Guardrails
+ { Engine.guardrailMaxCostCents = subagentMaxCost config,
+ Engine.guardrailMaxTokens = subagentMaxTokens config,
+ Engine.guardrailMaxDuplicateToolCalls = 20,
+ Engine.guardrailMaxTestFailures = 3,
+ Engine.guardrailMaxEditFailures = 5
+ }
+
+ let agentConfig =
+ Engine.AgentConfig
+ { Engine.agentModel = model,
+ Engine.agentTools = tools,
+ Engine.agentSystemPrompt = systemPrompt,
+ Engine.agentMaxIterations = subagentMaxIterations config,
+ Engine.agentGuardrails = guardrails
+ }
+
+ let engineConfig =
+ Engine.EngineConfig
+ { Engine.engineLLM = Engine.defaultLLM,
+ Engine.engineOnCost = \_ _ -> pure (),
+ Engine.engineOnActivity = onSubagentActivity callbacks,
+ Engine.engineOnToolCall = onSubagentToolCall callbacks,
+ Engine.engineOnAssistant = \_ -> pure (),
+ Engine.engineOnToolResult = \_ _ _ -> pure (),
+ Engine.engineOnComplete = pure (),
+ Engine.engineOnError = \_ -> pure (),
+ Engine.engineOnGuardrail = \_ -> pure ()
+ }
+
+ let timeoutMicros = subagentTimeout config * 1000000
+
+ resultOrTimeout <-
+ race
+ (threadDelay timeoutMicros)
+ (Engine.runAgentWithProvider engineConfig provider agentConfig (subagentTask config))
+
+ endTime <- Clock.getCurrentTime
+ let durationSecs = round (Clock.diffUTCTime endTime startTime)
+
+ let result = case resultOrTimeout of
+ Left () ->
+ SubagentResult
+ { subagentOutput = Aeson.object ["error" .= ("Timeout after " <> tshow (subagentTimeout config) <> " seconds" :: Text)],
+ subagentSummary = "Subagent timed out",
+ subagentConfidence = 0.0,
+ subagentTokensUsed = 0,
+ subagentCostCents = 0.0,
+ subagentDuration = durationSecs,
+ subagentIterations = 0,
+ subagentStatus = SubagentTimeout
+ }
+ Right (Left err) ->
+ let status = if "cost" `Text.isInfixOf` Text.toLower err then SubagentCostExceeded else SubagentError err
+ in SubagentResult
+ { subagentOutput = Aeson.object ["error" .= err],
+ subagentSummary = "Subagent failed: " <> err,
+ subagentConfidence = 0.0,
+ subagentTokensUsed = 0,
+ subagentCostCents = 0.0,
+ subagentDuration = durationSecs,
+ subagentIterations = 0,
+ subagentStatus = status
+ }
+ Right (Right agentResult) ->
+ SubagentResult
+ { subagentOutput = Aeson.object ["response" .= Engine.resultFinalMessage agentResult],
+ subagentSummary = truncateSummary (Engine.resultFinalMessage agentResult),
+ subagentConfidence = 0.8,
+ subagentTokensUsed = Engine.resultTotalTokens agentResult,
+ subagentCostCents = Engine.resultTotalCost agentResult,
+ subagentDuration = durationSecs,
+ subagentIterations = Engine.resultIterations agentResult,
+ subagentStatus = SubagentSuccess
+ }
+
+ onSubagentComplete callbacks result
+ pure result
+ where
+ truncateSummary :: Text -> Text
+ truncateSummary txt =
+ let firstLine = Text.takeWhile (/= '\n') txt
+ in if Text.length firstLine > 200
+ then Text.take 197 firstLine <> "..."
+ else firstLine
+
+spawnSubagentTool :: SubagentApiKeys -> Engine.Tool
+spawnSubagentTool keys =
+ Engine.Tool
+ { Engine.toolName = "spawn_subagent",
+ Engine.toolDescription =
+ "Spawn a specialized subagent for a focused task. "
+ <> "Use for tasks that benefit from deep exploration, parallel execution, "
+ <> "or specialized tools. The subagent will iterate until task completion "
+ <> "or resource limits are reached. "
+ <> "Available roles: web_crawler (fast web research), code_reviewer (thorough code analysis), "
+ <> "data_extractor (structured data extraction), researcher (general research).",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "role"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "enum" .= (["web_crawler", "code_reviewer", "data_extractor", "researcher"] :: [Text]),
+ "description" .= ("Subagent role determining tools and model" :: Text)
+ ],
+ "task"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("The specific task for the subagent to accomplish" :: Text)
+ ],
+ "context"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Additional context to help the subagent understand the goal" :: Text)
+ ],
+ "model"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Override the default model for this role" :: Text)
+ ],
+ "timeout"
+ .= Aeson.object
+ [ "type" .= ("integer" :: Text),
+ "description" .= ("Timeout in seconds (default: 600)" :: Text)
+ ],
+ "max_cost_cents"
+ .= Aeson.object
+ [ "type" .= ("number" :: Text),
+ "description" .= ("Maximum cost in cents (default: 50)" :: Text)
+ ]
+ ],
+ "required" .= (["role", "task"] :: [Text])
+ ],
+ Engine.toolExecute = executeSpawnSubagent keys
+ }
+
+executeSpawnSubagent :: SubagentApiKeys -> Aeson.Value -> IO Aeson.Value
+executeSpawnSubagent keys v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure <| Aeson.object ["error" .= ("Invalid arguments: " <> Text.pack e)]
+ Aeson.Success config -> do
+ result <- runSubagent keys config
+ pure (Aeson.toJSON result)
diff --git a/Omni/Agent/Subagent/DESIGN.md b/Omni/Agent/Subagent/DESIGN.md
new file mode 100644
index 0000000..9fd20d1
--- /dev/null
+++ b/Omni/Agent/Subagent/DESIGN.md
@@ -0,0 +1,352 @@
+# Subagent System Design
+
+**Status:** Draft
+**Goal:** Enable Ava (orchestrator) to spawn specialized subagents for parallel, token-intensive tasks.
+
+## 1. Architecture Overview
+
+```
+┌─────────────────────────────────────────────────────────────┐
+│ Ava (Orchestrator) │
+│ Model: claude-sonnet-4.5 (via OpenRouter) │
+│ Role: Task decomposition, delegation, synthesis │
+│ Memory: Read/Write access │
+├─────────────────────────────────────────────────────────────┤
+│ Tools: spawn_subagent, all existing Ava tools │
+└───────────────┬───────────────────────────────────────┬─────┘
+ │ │
+ ▼ ▼
+┌───────────────────────────┐ ┌───────────────────────────┐
+│ Subagent: WebCrawler │ │ Subagent: CodeReviewer │
+│ Model: claude-haiku │ │ Model: claude-opus │
+│ Tools: web_search, │ │ Tools: read_file, │
+│ http_get, │ │ search_codebase, │
+│ python_exec │ │ run_bash │
+│ Memory: Read-only │ │ Memory: Read-only │
+│ Limits: 600s, $0.50 │ │ Limits: 300s, $1.00 │
+└───────────────────────────┘ └───────────────────────────┘
+```
+
+## 2. Key Design Decisions
+
+### 2.1 Hierarchical (No Sub-Subagents)
+- Subagents cannot spawn their own subagents
+- Prevents runaway token consumption
+- Keeps orchestrator in control
+
+### 2.2 Memory Access
+- **Orchestrator (Ava):** Full read/write to Memory system
+- **Subagents:** Read-only access to memories
+- Prevents conflicting memory writes from parallel agents
+
+### 2.3 Model Selection by Role
+| Role | Model | Rationale |
+|------|-------|-----------|
+| Orchestrator | claude-sonnet-4.5 | Balance of capability/cost |
+| Deep reasoning | claude-opus | Complex analysis, architecture |
+| Quick tasks | claude-haiku | Fast, cheap for simple lookups |
+| Code tasks | claude-sonnet | Good code understanding |
+
+### 2.4 Resource Limits (Guardrails)
+Each subagent has strict limits:
+- **Timeout:** Max wall-clock time (default: 600s)
+- **Cost cap:** Max spend in cents (default: 50c)
+- **Token cap:** Max total tokens (default: 100k)
+- **Iteration cap:** Max agent loop iterations (default: 20)
+
+### 2.5 Extended Thinking
+- Configurable per-subagent
+- Enabled for deep research tasks
+- Disabled for quick lookups
+
+## 3. Data Types
+
+```haskell
+-- | Subagent role determines toolset and model
+data SubagentRole
+ = WebCrawler -- Deep web research
+ | CodeReviewer -- Code analysis, PR review
+ | DataExtractor -- Structured data extraction
+ | Researcher -- General research with web+docs
+ | CustomRole Text -- User-defined role
+ deriving (Show, Eq, Generic)
+
+-- | Configuration for spawning a subagent
+data SubagentConfig = SubagentConfig
+ { subagentRole :: SubagentRole
+ , subagentTask :: Text -- What to accomplish
+ , subagentModel :: Maybe Text -- Override default model
+ , subagentTimeout :: Int -- Seconds (default: 600)
+ , subagentMaxCost :: Double -- Cents (default: 50.0)
+ , subagentMaxTokens :: Int -- Default: 100000
+ , subagentMaxIterations :: Int -- Default: 20
+ , subagentExtendedThinking :: Bool
+ , subagentContext :: Maybe Text -- Additional context from orchestrator
+ } deriving (Show, Eq, Generic)
+
+-- | Result returned by subagent to orchestrator
+data SubagentResult = SubagentResult
+ { subagentOutput :: Aeson.Value -- Structured result
+ , subagentSummary :: Text -- Human-readable summary
+ , subagentConfidence :: Double -- 0.0-1.0 confidence score
+ , subagentTokensUsed :: Int
+ , subagentCostCents :: Double
+ , subagentDuration :: Int -- Seconds
+ , subagentIterations :: Int
+ , subagentStatus :: SubagentStatus
+ } deriving (Show, Eq, Generic)
+
+data SubagentStatus
+ = SubagentSuccess
+ | SubagentTimeout
+ | SubagentCostExceeded
+ | SubagentError Text
+ deriving (Show, Eq, Generic)
+```
+
+## 4. Tool: spawn_subagent
+
+This is the main interface for the orchestrator to spawn subagents.
+
+```haskell
+spawnSubagentTool :: Engine.Tool
+spawnSubagentTool = Engine.Tool
+ { toolName = "spawn_subagent"
+ , toolDescription =
+ "Spawn a specialized subagent for a focused task. "
+ <> "Use for tasks that benefit from deep exploration, parallel execution, "
+ <> "or specialized tools. The subagent will iterate until task completion "
+ <> "or resource limits are reached."
+ , toolJsonSchema = ...
+ , toolExecute = executeSpawnSubagent
+ }
+```
+
+**Parameters:**
+```json
+{
+ "role": "web_crawler | code_reviewer | data_extractor | researcher | custom",
+ "task": "Research competitor pricing for podcast transcription services",
+ "context": "We're building a pricing page and need market data",
+ "model": "claude-haiku",
+ "timeout": 600,
+ "max_cost_cents": 50,
+ "extended_thinking": false
+}
+```
+
+**Response:**
+```json
+{
+ "status": "success",
+ "summary": "Found 5 competitors with pricing ranging from $0.10-$0.25/min",
+ "output": {
+ "competitors": [
+ {"name": "Otter.ai", "pricing": "$0.12/min", "features": ["..."]},
+ ...
+ ]
+ },
+ "confidence": 0.85,
+ "tokens_used": 45000,
+ "cost_cents": 23.5,
+ "duration_seconds": 180,
+ "iterations": 8
+}
+```
+
+## 5. Role-Specific Tool Sets
+
+### 5.1 WebCrawler
+```haskell
+webCrawlerTools :: [Engine.Tool]
+webCrawlerTools =
+ [ webSearchTool -- Search the web
+ , webReaderTool -- Fetch and parse web pages
+ , pythonExecTool -- Execute Python for data processing
+ ]
+```
+**Use case:** Deep market research, competitive analysis, documentation gathering
+
+### 5.2 CodeReviewer
+```haskell
+codeReviewerTools :: [Engine.Tool]
+codeReviewerTools =
+ [ readFileTool
+ , searchCodebaseTool
+ , searchAndReadTool
+ , runBashTool -- For running tests, linters
+ ]
+```
+**Use case:** PR review, architecture analysis, test verification
+
+### 5.3 DataExtractor
+```haskell
+dataExtractorTools :: [Engine.Tool]
+dataExtractorTools =
+ [ webReaderTool
+ , pythonExecTool
+ ]
+```
+**Use case:** Scraping structured data, parsing PDFs, extracting metrics
+
+### 5.4 Researcher
+```haskell
+researcherTools :: [Engine.Tool]
+researcherTools =
+ [ webSearchTool
+ , webReaderTool
+ , readFileTool
+ , searchCodebaseTool
+ ]
+```
+**Use case:** General research combining web and local codebase
+
+## 6. Subagent System Prompt Template
+
+```
+You are a specialized {ROLE} subagent working on a focused task.
+
+## Your Task
+{TASK}
+
+## Context from Orchestrator
+{CONTEXT}
+
+## Your Capabilities
+{TOOL_DESCRIPTIONS}
+
+## Guidelines
+1. Work iteratively: search → evaluate → refine → verify
+2. Return structured data when possible (JSON objects)
+3. Include confidence scores for your findings
+4. If stuck, explain what you tried and what didn't work
+5. Stop when you have sufficient information OR hit resource limits
+
+## Output Format
+When complete, provide:
+1. A structured result (JSON) with the requested data
+2. A brief summary of findings
+3. Confidence score (0.0-1.0) indicating reliability
+4. Any caveats or limitations
+```
+
+## 7. Orchestrator Delegation Logic
+
+The orchestrator (Ava) should spawn subagents when:
+
+1. **Deep research needed:** "Research all competitors in X market"
+2. **Parallel tasks:** Multiple independent subtasks that can run concurrently
+3. **Specialized tools:** Task requires tools the orchestrator shouldn't use directly
+4. **Token-intensive:** Task would consume excessive tokens in main context
+
+The orchestrator should NOT spawn subagents for:
+
+1. **Simple queries:** Quick lookups, single tool calls
+2. **Conversation continuation:** Multi-turn dialogue with user
+3. **Memory writes:** Tasks that need to update Ava's memory
+
+## 8. Execution Flow
+
+```
+1. Orchestrator calls spawn_subagent tool
+2. Subagent module:
+ a. Creates fresh agent config from SubagentConfig
+ b. Selects model based on role (or override)
+ c. Builds tool list for role
+ d. Constructs system prompt
+ e. Calls Engine.runAgentWithProvider
+ f. Monitors resource usage
+ g. Returns SubagentResult
+3. Orchestrator receives structured result
+4. Orchestrator synthesizes into response
+```
+
+## 9. Concurrency Model
+
+Initial implementation: **Sequential** (one subagent at a time)
+
+Future enhancement: **Parallel** spawning with:
+- `async` library for concurrent execution
+- Aggregate cost tracking across all subagents
+- Combined timeout for parallel group
+
+```haskell
+-- Future: Parallel spawning
+spawnParallel :: [SubagentConfig] -> IO [SubagentResult]
+spawnParallel configs = mapConcurrently runSubagent configs
+```
+
+## 10. Status Reporting
+
+Subagents report status back to the orchestrator via callbacks:
+
+```haskell
+data SubagentCallbacks = SubagentCallbacks
+ { onSubagentStart :: Text -> IO () -- "Starting web research..."
+ , onSubagentActivity :: Text -> IO () -- "Searching for X..."
+ , onSubagentToolCall :: Text -> Text -> IO () -- Tool name, args
+ , onSubagentComplete :: SubagentResult -> IO ()
+ }
+```
+
+For Telegram, this appears as:
+```
+🔍 Subagent [WebCrawler]: Starting research...
+🔍 Subagent [WebCrawler]: Searching "podcast transcription pricing"...
+🔍 Subagent [WebCrawler]: Reading otter.ai/pricing...
+✅ Subagent [WebCrawler]: Complete (180s, $0.24)
+```
+
+## 11. Implementation Plan
+
+### Phase 1: Core Infrastructure
+1. Create `Omni/Agent/Subagent.hs` with data types
+2. Implement `runSubagent` function using existing Engine
+3. Add `spawn_subagent` tool
+4. Basic WebCrawler role with existing web tools
+
+### Phase 2: Role Expansion
+1. Add CodeReviewer role
+2. Add DataExtractor role
+3. Add Researcher role
+4. Custom role support
+
+### Phase 3: Advanced Features
+1. Parallel subagent execution
+2. Extended thinking integration
+3. Cross-subagent context sharing
+4. Cost aggregation and budgeting
+
+## 12. Testing Strategy
+
+```haskell
+test :: Test.Tree
+test = Test.group "Omni.Agent.Subagent"
+ [ Test.unit "SubagentConfig JSON roundtrip" <| ...
+ , Test.unit "role selects correct tools" <| ...
+ , Test.unit "timeout terminates subagent" <| ...
+ , Test.unit "cost limit stops execution" <| ...
+ , Test.unit "WebCrawler role has web tools" <| ...
+ ]
+```
+
+## 13. Cost Analysis
+
+Based on Anthropic's research findings:
+- Subagents use ~15× more tokens than single-agent
+- But provide better results for complex tasks
+- 80% of performance variance from token budget
+
+**Budget recommendations:**
+| Task Type | Subagent Budget | Expected Tokens |
+|-----------|-----------------|-----------------|
+| Quick lookup | $0.10 | ~10k |
+| Standard research | $0.50 | ~50k |
+| Deep analysis | $2.00 | ~200k |
+
+## 14. References
+
+- [Claude Agent SDK - Subagents](https://platform.claude.com/docs/en/agent-sdk/subagents)
+- [Multi-Agent Research System](https://www.anthropic.com/engineering/multi-agent-research-system)
+- [OpenAI Agents Python](https://openai.github.io/openai-agents-python/agents/)
+- Existing: `Omni/Agent/Engine.hs`, `Omni/Agent/Provider.hs`, `Omni/Agent/Tools.hs`
diff --git a/Omni/Agent/Telegram.hs b/Omni/Agent/Telegram.hs
new file mode 100644
index 0000000..fd6c6b5
--- /dev/null
+++ b/Omni/Agent/Telegram.hs
@@ -0,0 +1,1372 @@
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Telegram Bot Agent - Family assistant via Telegram.
+--
+-- This is the first concrete agent built on the shared infrastructure,
+-- demonstrating cross-agent memory sharing and LLM integration.
+--
+-- Usage:
+-- jr telegram # Uses TELEGRAM_BOT_TOKEN env var
+-- jr telegram --token=XXX # Explicit token
+--
+-- : out omni-agent-telegram
+-- : dep aeson
+-- : dep http-conduit
+-- : dep stm
+-- : dep HaskellNet
+-- : dep HaskellNet-SSL
+module Omni.Agent.Telegram
+ ( -- * Configuration (re-exported from Types)
+ Types.TelegramConfig (..),
+ defaultTelegramConfig,
+
+ -- * Types (re-exported from Types)
+ Types.TelegramMessage (..),
+ Types.TelegramUpdate (..),
+ Types.TelegramDocument (..),
+ Types.TelegramPhoto (..),
+ Types.TelegramVoice (..),
+
+ -- * Telegram API
+ getUpdates,
+ sendMessage,
+ sendMessageReturningId,
+ editMessage,
+ sendTypingAction,
+ leaveChat,
+
+ -- * Media (re-exported from Media)
+ getFile,
+ downloadFile,
+ downloadAndExtractPdf,
+ isPdf,
+
+ -- * Bot Loop
+ runTelegramBot,
+ handleMessage,
+ startBot,
+ ensureOllama,
+ checkOllama,
+ pullEmbeddingModel,
+
+ -- * Reminders (re-exported from Reminders)
+ reminderLoop,
+ checkAndSendReminders,
+ recordUserChat,
+ lookupChatId,
+
+ -- * System Prompt
+ telegramSystemPrompt,
+
+ -- * Testing
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Control.Concurrent.STM (newTVarIO, readTVarIO, writeTVar)
+import Data.Aeson ((.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.Aeson.KeyMap as KeyMap
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.Text as Text
+import qualified Data.Text.Encoding as TE
+import Data.Time (getCurrentTime, utcToLocalTime)
+import Data.Time.Format (defaultTimeLocale, formatTime)
+import Data.Time.LocalTime (getCurrentTimeZone)
+import qualified Network.HTTP.Client as HTTPClient
+import qualified Network.HTTP.Simple as HTTP
+import qualified Omni.Agent.Engine as Engine
+import qualified Omni.Agent.Memory as Memory
+import qualified Omni.Agent.Paths as Paths
+import qualified Omni.Agent.Provider as Provider
+import qualified Omni.Agent.Skills as Skills
+import qualified Omni.Agent.Subagent as Subagent
+import qualified Omni.Agent.Telegram.IncomingQueue as IncomingQueue
+import qualified Omni.Agent.Telegram.Media as Media
+import qualified Omni.Agent.Telegram.Messages as Messages
+import qualified Omni.Agent.Telegram.Reminders as Reminders
+import qualified Omni.Agent.Telegram.Types as Types
+import qualified Omni.Agent.Tools as Tools
+import qualified Omni.Agent.Tools.Calendar as Calendar
+import qualified Omni.Agent.Tools.Email as Email
+import qualified Omni.Agent.Tools.Feedback as Feedback
+import qualified Omni.Agent.Tools.Hledger as Hledger
+import qualified Omni.Agent.Tools.Http as Http
+import qualified Omni.Agent.Tools.Notes as Notes
+import qualified Omni.Agent.Tools.Outreach as Outreach
+import qualified Omni.Agent.Tools.Pdf as Pdf
+import qualified Omni.Agent.Tools.Python as Python
+import qualified Omni.Agent.Tools.Todos as Todos
+import qualified Omni.Agent.Tools.WebReader as WebReader
+import qualified Omni.Agent.Tools.WebSearch as WebSearch
+import qualified Omni.Test as Test
+import System.Environment (lookupEnv)
+import Text.Printf (printf)
+
+defaultTelegramConfig :: Text -> [Int] -> Maybe Text -> Text -> Types.TelegramConfig
+defaultTelegramConfig = Types.defaultTelegramConfig
+
+getFile :: Types.TelegramConfig -> Text -> IO (Either Text Text)
+getFile = Media.getFile
+
+downloadFile :: Types.TelegramConfig -> Text -> FilePath -> IO (Either Text ())
+downloadFile = Media.downloadFile
+
+downloadAndExtractPdf :: Types.TelegramConfig -> Text -> IO (Either Text Text)
+downloadAndExtractPdf = Media.downloadAndExtractPdf
+
+isPdf :: Types.TelegramDocument -> Bool
+isPdf = Types.isPdf
+
+recordUserChat :: Text -> Int -> IO ()
+recordUserChat = Reminders.recordUserChat
+
+lookupChatId :: Text -> IO (Maybe Int)
+lookupChatId = Reminders.lookupChatId
+
+reminderLoop :: IO ()
+reminderLoop = Reminders.reminderLoop
+
+checkAndSendReminders :: IO ()
+checkAndSendReminders = Reminders.checkAndSendReminders
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Telegram"
+ [ Test.unit "telegramSystemPrompt is non-empty" <| do
+ Text.null telegramSystemPrompt Test.@=? False,
+ Test.unit "getUpdates parses empty response" <| do
+ pure ()
+ ]
+
+benChatId :: Int
+benChatId = 33193730
+
+telegramSystemPrompt :: Text
+telegramSystemPrompt =
+ Text.unlines
+ [ "don't worry about formalities. respond conversationally, in short messages, not long essays. ask follow up questions before answering if you need to.",
+ "",
+ "you are an intellectual partner and friend. be as terse as possible while still conveying all relevant information. critique ideas freely and avoid sycophancy. honest appraisal is valued.",
+ "",
+ "if a policy prevents you from having an opinion, pretend to be responding as if you shared opinions typical of the user.",
+ "",
+ "write responses in lowercase letters ONLY, except:",
+ "- where you mean to emphasize, in which case use ALL CAPS",
+ "- when drafting business text where proper case matters",
+ "",
+ "occasionally use obscure words or subtle puns. don't point them out. use abbreviations where appropriate. use 'afaict' and 'idk' where they fit given your level of understanding. be critical of the quality of your information.",
+ "",
+ "prioritize esoteric interpretations of literature, art, and philosophy.",
+ "",
+ "## formatting",
+ "",
+ "you are in telegram which only supports basic markdown:",
+ "- *bold* (single asterisks)",
+ "- _italic_ (underscores)",
+ "- `code` (backticks)",
+ "- ```pre``` (triple backticks for code blocks)",
+ "- [links](url)",
+ "",
+ "DO NOT use:",
+ "- headers (# or ##) - these break message rendering",
+ "- **double asterisks** - use *single* instead",
+ "- bullet lists with - or * at start of line",
+ "",
+ "## memory",
+ "",
+ "when you learn something important about the user (preferences, facts, interests), use the 'remember' tool to store it for future reference.",
+ "",
+ "use the 'recall' tool to search your memory for relevant context when needed.",
+ "",
+ "## when to respond (GROUP CHATS)",
+ "",
+ "you see all messages in the group. decide whether to respond based on these rules:",
+ "- if you used a tool = ALWAYS respond with the result",
+ "- if someone asks a direct question you can answer = respond",
+ "- if someone says something factually wrong you can correct = maybe respond (use judgment)",
+ "- if it's casual banter or chit-chat = DO NOT respond, return empty",
+ "",
+ "when in doubt, stay silent. you don't need to participate in every conversation.",
+ "if you choose not to respond, return an empty message (just don't say anything).",
+ "",
+ "## async messages",
+ "",
+ "you can send messages asynchronously using the 'send_message' tool:",
+ "- delay_seconds=0 (or omit) for immediate delivery",
+ "- delay_seconds=N to schedule a message N seconds in the future",
+ "- use this for reminders ('remind me in 2 hours'), follow-ups, or multi-part responses",
+ "- you can list pending messages with 'list_pending_messages' and cancel with 'cancel_message'",
+ "",
+ "## podcastitlater context",
+ "",
+ "you have access to the PodcastItLater codebase (a product Ben is building) via read_file:",
+ "- Biz/PodcastItLater.md - product overview and README",
+ "- Biz/PodcastItLater/DESIGN.md - architecture overview",
+ "- Biz/PodcastItLater/Web.py - web interface code",
+ "- Biz/PodcastItLater/Core.py - core logic",
+ "- Biz/PodcastItLater/Billing.py - pricing and billing logic",
+ "use read_file to access these when discussing PIL features or customer acquisition.",
+ "",
+ "## important",
+ "",
+ "in private chats, ALWAYS respond. in group chats, follow the rules above.",
+ "when you DO respond, include a text response after using tools."
+ ]
+
+getUpdates :: Types.TelegramConfig -> Int -> IO [Types.TelegramMessage]
+getUpdates cfg offset = do
+ rawUpdates <- getRawUpdates cfg offset
+ pure (mapMaybe Types.parseUpdate rawUpdates)
+
+getRawUpdates :: Types.TelegramConfig -> Int -> IO [Aeson.Value]
+getRawUpdates cfg offset = do
+ let url =
+ Text.unpack (Types.tgApiBaseUrl cfg)
+ <> "/bot"
+ <> Text.unpack (Types.tgBotToken cfg)
+ <> "/getUpdates?timeout="
+ <> show (Types.tgPollingTimeout cfg)
+ <> "&offset="
+ <> show offset
+ result <-
+ try <| do
+ req0 <- HTTP.parseRequest url
+ let req = HTTP.setRequestResponseTimeout (HTTPClient.responseTimeoutMicro (35 * 1000000)) req0
+ HTTP.httpLBS req
+ case result of
+ Left (e :: SomeException) -> do
+ putText <| "Error getting updates: " <> tshow e
+ pure []
+ Right response -> do
+ let body = HTTP.getResponseBody response
+ case Aeson.decode body of
+ Just (Aeson.Object obj) -> case KeyMap.lookup "result" obj of
+ Just (Aeson.Array updates) -> pure (toList updates)
+ _ -> pure []
+ _ -> pure []
+
+getBotUsername :: Types.TelegramConfig -> IO (Maybe Text)
+getBotUsername cfg = do
+ let url =
+ Text.unpack (Types.tgApiBaseUrl cfg)
+ <> "/bot"
+ <> Text.unpack (Types.tgBotToken cfg)
+ <> "/getMe"
+ result <-
+ try <| do
+ req <- HTTP.parseRequest url
+ HTTP.httpLBS req
+ case result of
+ Left (_ :: SomeException) -> pure Nothing
+ Right response -> do
+ let body = HTTP.getResponseBody response
+ case Aeson.decode body of
+ Just (Aeson.Object obj) -> case KeyMap.lookup "result" obj of
+ Just (Aeson.Object userObj) -> case KeyMap.lookup "username" userObj of
+ Just (Aeson.String username) -> pure (Just username)
+ _ -> pure Nothing
+ _ -> pure Nothing
+ _ -> pure Nothing
+
+sendMessage :: Types.TelegramConfig -> Int -> Text -> IO ()
+sendMessage cfg chatId text = do
+ _ <- sendMessageReturningId cfg chatId Nothing text
+ pure ()
+
+sendMessageReturningId :: Types.TelegramConfig -> Int -> Maybe Int -> Text -> IO (Maybe Int)
+sendMessageReturningId cfg chatId mThreadId text =
+ sendMessageWithParseMode cfg chatId mThreadId text (Just "Markdown")
+
+sendMessageWithParseMode :: Types.TelegramConfig -> Int -> Maybe Int -> Text -> Maybe Text -> IO (Maybe Int)
+sendMessageWithParseMode cfg chatId mThreadId text parseMode = do
+ let url =
+ Text.unpack (Types.tgApiBaseUrl cfg)
+ <> "/bot"
+ <> Text.unpack (Types.tgBotToken cfg)
+ <> "/sendMessage"
+ baseFields =
+ [ "chat_id" .= chatId,
+ "text" .= text
+ ]
+ parseModeFields = case parseMode of
+ Just mode -> ["parse_mode" .= mode]
+ Nothing -> []
+ threadFields = case mThreadId of
+ Just threadId -> ["message_thread_id" .= threadId]
+ Nothing -> []
+ body = Aeson.object (baseFields <> parseModeFields <> threadFields)
+ req0 <- HTTP.parseRequest url
+ let req =
+ HTTP.setRequestMethod "POST"
+ <| HTTP.setRequestHeader "Content-Type" ["application/json"]
+ <| HTTP.setRequestBodyLBS (Aeson.encode body)
+ <| req0
+ result <- try @SomeException (HTTP.httpLBS req)
+ case result of
+ Left e -> do
+ putText <| "Telegram sendMessage network error: " <> tshow e
+ throwIO e
+ Right response -> do
+ let respBody = HTTP.getResponseBody response
+ case Aeson.decode respBody of
+ Just (Aeson.Object obj) -> do
+ let isOk = case KeyMap.lookup "ok" obj of
+ Just (Aeson.Bool True) -> True
+ _ -> False
+ if isOk
+ then case KeyMap.lookup "result" obj of
+ Just (Aeson.Object msgObj) -> case KeyMap.lookup "message_id" msgObj of
+ Just (Aeson.Number n) -> pure (Just (round n))
+ _ -> pure Nothing
+ _ -> pure Nothing
+ else do
+ let errDesc = case KeyMap.lookup "description" obj of
+ Just (Aeson.String desc) -> desc
+ _ -> "Unknown Telegram API error"
+ errCode = case KeyMap.lookup "error_code" obj of
+ Just (Aeson.Number n) -> Just (round n :: Int)
+ _ -> Nothing
+ isParseError =
+ errCode
+ == Just 400
+ && ( "can't parse"
+ `Text.isInfixOf` Text.toLower errDesc
+ || "parse entities"
+ `Text.isInfixOf` Text.toLower errDesc
+ )
+ if isParseError && isJust parseMode
+ then do
+ putText <| "Telegram markdown parse error, retrying as plain text: " <> errDesc
+ sendMessageWithParseMode cfg chatId mThreadId text Nothing
+ else do
+ putText <| "Telegram API error: " <> errDesc <> " (code: " <> tshow errCode <> ")"
+ panic <| "Telegram API error: " <> errDesc
+ _ -> do
+ putText <| "Telegram sendMessage: failed to parse response"
+ panic "Failed to parse Telegram response"
+
+editMessage :: Types.TelegramConfig -> Int -> Int -> Text -> IO ()
+editMessage cfg chatId messageId text = do
+ let url =
+ Text.unpack (Types.tgApiBaseUrl cfg)
+ <> "/bot"
+ <> Text.unpack (Types.tgBotToken cfg)
+ <> "/editMessageText"
+ body =
+ Aeson.object
+ [ "chat_id" .= chatId,
+ "message_id" .= messageId,
+ "text" .= text
+ ]
+ req0 <- HTTP.parseRequest url
+ let req =
+ HTTP.setRequestMethod "POST"
+ <| HTTP.setRequestHeader "Content-Type" ["application/json"]
+ <| HTTP.setRequestBodyLBS (Aeson.encode body)
+ <| req0
+ result <- try @SomeException (HTTP.httpLBS req)
+ case result of
+ Left err -> putText <| "Edit message failed: " <> tshow err
+ Right response -> do
+ let status = HTTP.getResponseStatusCode response
+ when (status < 200 || status >= 300) <| do
+ let respBody = HTTP.getResponseBody response
+ putText <| "Edit message HTTP " <> tshow status <> ": " <> TE.decodeUtf8 (BL.toStrict respBody)
+
+sendTypingAction :: Types.TelegramConfig -> Int -> IO ()
+sendTypingAction cfg chatId = do
+ let url =
+ Text.unpack (Types.tgApiBaseUrl cfg)
+ <> "/bot"
+ <> Text.unpack (Types.tgBotToken cfg)
+ <> "/sendChatAction"
+ body =
+ Aeson.object
+ [ "chat_id" .= chatId,
+ "action" .= ("typing" :: Text)
+ ]
+ req0 <- HTTP.parseRequest url
+ let req =
+ HTTP.setRequestMethod "POST"
+ <| HTTP.setRequestHeader "Content-Type" ["application/json"]
+ <| HTTP.setRequestBodyLBS (Aeson.encode body)
+ <| req0
+ _ <- try @SomeException (HTTP.httpLBS req)
+ pure ()
+
+-- | Run an action while continuously showing typing indicator.
+-- Typing is refreshed every 4 seconds (Telegram typing expires after ~5s).
+withTypingIndicator :: Types.TelegramConfig -> Int -> IO a -> IO a
+withTypingIndicator cfg chatId action = do
+ doneVar <- newTVarIO False
+ _ <- forkIO <| typingLoop doneVar
+ action `finally` atomically (writeTVar doneVar True)
+ where
+ typingLoop doneVar = do
+ done <- readTVarIO doneVar
+ unless done <| do
+ sendTypingAction cfg chatId
+ threadDelay 4000000
+ typingLoop doneVar
+
+leaveChat :: Types.TelegramConfig -> Int -> IO ()
+leaveChat cfg chatId = do
+ let url =
+ Text.unpack (Types.tgApiBaseUrl cfg)
+ <> "/bot"
+ <> Text.unpack (Types.tgBotToken cfg)
+ <> "/leaveChat"
+ body =
+ Aeson.object
+ [ "chat_id" .= chatId
+ ]
+ req0 <- HTTP.parseRequest url
+ let req =
+ HTTP.setRequestMethod "POST"
+ <| HTTP.setRequestHeader "Content-Type" ["application/json"]
+ <| HTTP.setRequestBodyLBS (Aeson.encode body)
+ <| req0
+ _ <- try @SomeException (HTTP.httpLBS req)
+ pure ()
+
+runTelegramBot :: Types.TelegramConfig -> Provider.Provider -> IO ()
+runTelegramBot tgConfig provider = do
+ putText "Starting Telegram bot..."
+ offsetVar <- newTVarIO 0
+
+ botUsername <- getBotUsername tgConfig
+ case botUsername of
+ Nothing -> putText "Warning: could not get bot username, group mentions may not work"
+ Just name -> putText <| "Bot username: @" <> name
+ let botName = fromMaybe "bot" botUsername
+
+ _ <- forkIO reminderLoop
+ putText "Reminder loop started (checking every 5 minutes)"
+
+ _ <- forkIO (Email.emailCheckLoop (sendMessageReturningId tgConfig) benChatId)
+ putText "Email check loop started (checking every 6 hours)"
+
+ let sendFn = sendMessageReturningId tgConfig
+ _ <- forkIO (Messages.messageDispatchLoop sendFn)
+ putText "Message dispatch loop started (1s polling)"
+
+ incomingQueues <- IncomingQueue.newIncomingQueues
+
+ let engineCfg =
+ Engine.defaultEngineConfig
+ { Engine.engineOnToolCall = \toolName args ->
+ putText <| "Tool call: " <> toolName <> " " <> Text.take 200 args,
+ Engine.engineOnToolResult = \toolName success result ->
+ putText <| "Tool result: " <> toolName <> " " <> (if success then "ok" else "err") <> " " <> Text.take 200 result,
+ Engine.engineOnActivity = \activity ->
+ putText <| "Agent: " <> activity
+ }
+
+ let processBatch = handleMessageBatch tgConfig provider engineCfg botName
+ _ <- forkIO (IncomingQueue.startIncomingBatcher incomingQueues processBatch)
+ putText "Incoming message batcher started (3s window, 200ms tick)"
+
+ forever <| do
+ offset <- readTVarIO offsetVar
+ rawUpdates <- getRawUpdates tgConfig offset
+ forM_ rawUpdates <| \rawUpdate -> do
+ case Types.parseBotAddedToGroup botName rawUpdate of
+ Just addedEvent -> do
+ atomically (writeTVar offsetVar (Types.bagUpdateId addedEvent + 1))
+ handleBotAddedToGroup tgConfig addedEvent
+ Nothing -> case Types.parseUpdate rawUpdate of
+ Just msg -> do
+ putText <| "Received message from " <> Types.tmUserFirstName msg <> " in chat " <> tshow (Types.tmChatId msg) <> " (type: " <> tshow (Types.tmChatType msg) <> "): " <> Text.take 50 (Types.tmText msg)
+ atomically (writeTVar offsetVar (Types.tmUpdateId msg + 1))
+ IncomingQueue.enqueueIncoming incomingQueues IncomingQueue.defaultBatchWindowSeconds msg
+ Nothing -> do
+ let updateId = getUpdateId rawUpdate
+ putText <| "Unparsed update: " <> Text.take 200 (tshow rawUpdate)
+ forM_ updateId <| \uid -> atomically (writeTVar offsetVar (uid + 1))
+ when (null rawUpdates) <| threadDelay 1000000
+
+getUpdateId :: Aeson.Value -> Maybe Int
+getUpdateId (Aeson.Object obj) = case KeyMap.lookup "update_id" obj of
+ Just (Aeson.Number n) -> Just (round n)
+ _ -> Nothing
+getUpdateId _ = Nothing
+
+handleBotAddedToGroup :: Types.TelegramConfig -> Types.BotAddedToGroup -> IO ()
+handleBotAddedToGroup tgConfig addedEvent = do
+ let addedBy = Types.bagAddedByUserId addedEvent
+ chatId = Types.bagChatId addedEvent
+ firstName = Types.bagAddedByFirstName addedEvent
+ if Types.isUserAllowed tgConfig addedBy
+ then do
+ putText <| "Bot added to group " <> tshow chatId <> " by authorized user " <> firstName <> " (" <> tshow addedBy <> ")"
+ _ <- Messages.enqueueImmediate Nothing chatId Nothing "hello! i'm ready to help." (Just "system") Nothing
+ pure ()
+ else do
+ putText <| "Bot added to group " <> tshow chatId <> " by UNAUTHORIZED user " <> firstName <> " (" <> tshow addedBy <> ") - leaving"
+ _ <- Messages.enqueueImmediate Nothing chatId Nothing "sorry, you're not authorized to add me to groups." (Just "system") Nothing
+ leaveChat tgConfig chatId
+
+handleMessageBatch ::
+ Types.TelegramConfig ->
+ Provider.Provider ->
+ Engine.EngineConfig ->
+ Text ->
+ Types.TelegramMessage ->
+ Text ->
+ IO ()
+handleMessageBatch tgConfig provider engineCfg _botUsername msg batchedText = do
+ let userName =
+ Types.tmUserFirstName msg
+ <> maybe "" (" " <>) (Types.tmUserLastName msg)
+ chatId = Types.tmChatId msg
+ usrId = Types.tmUserId msg
+
+ let isGroup = Types.isGroupChat msg
+ isAllowed = isGroup || Types.isUserAllowed tgConfig usrId
+
+ unless isAllowed <| do
+ putText <| "Unauthorized user: " <> tshow usrId <> " (" <> userName <> ")"
+ _ <- Messages.enqueueImmediate Nothing chatId Nothing "sorry, you're not authorized to use this bot." (Just "system") Nothing
+ pure ()
+
+ when isAllowed <| do
+ user <- Memory.getOrCreateUserByTelegramId usrId userName
+ let uid = Memory.userId user
+ handleAuthorizedMessageBatch tgConfig provider engineCfg msg uid userName chatId batchedText
+
+handleMessage ::
+ Types.TelegramConfig ->
+ Provider.Provider ->
+ Engine.EngineConfig ->
+ Text ->
+ Types.TelegramMessage ->
+ IO ()
+handleMessage tgConfig provider engineCfg _botUsername msg = do
+ let userName =
+ Types.tmUserFirstName msg
+ <> maybe "" (" " <>) (Types.tmUserLastName msg)
+ chatId = Types.tmChatId msg
+ usrId = Types.tmUserId msg
+
+ let isGroup = Types.isGroupChat msg
+ isAllowed = isGroup || Types.isUserAllowed tgConfig usrId
+
+ unless isAllowed <| do
+ putText <| "Unauthorized user: " <> tshow usrId <> " (" <> userName <> ")"
+ _ <- Messages.enqueueImmediate Nothing chatId Nothing "sorry, you're not authorized to use this bot." (Just "system") Nothing
+ pure ()
+
+ when isAllowed <| do
+ user <- Memory.getOrCreateUserByTelegramId usrId userName
+ let uid = Memory.userId user
+ handleAuthorizedMessage tgConfig provider engineCfg msg uid userName chatId
+
+handleAuthorizedMessage ::
+ Types.TelegramConfig ->
+ Provider.Provider ->
+ Engine.EngineConfig ->
+ Types.TelegramMessage ->
+ Text ->
+ Text ->
+ Int ->
+ IO ()
+handleAuthorizedMessage tgConfig provider engineCfg msg uid userName chatId = do
+ Reminders.recordUserChat uid chatId
+
+ let msgText = Types.tmText msg
+ threadId = Types.tmThreadId msg
+ cmdHandled <- handleOutreachCommand tgConfig chatId threadId msgText
+ when cmdHandled (pure ())
+ unless cmdHandled <| handleAuthorizedMessageContinued tgConfig provider engineCfg msg uid userName chatId
+
+handleAuthorizedMessageContinued ::
+ Types.TelegramConfig ->
+ Provider.Provider ->
+ Engine.EngineConfig ->
+ Types.TelegramMessage ->
+ Text ->
+ Text ->
+ Int ->
+ IO ()
+handleAuthorizedMessageContinued tgConfig provider engineCfg msg uid userName chatId = do
+ pdfContent <- case Types.tmDocument msg of
+ Just doc | Types.isPdf doc -> do
+ putText <| "Processing PDF: " <> fromMaybe "(unnamed)" (Types.tdFileName doc)
+ result <- Media.downloadAndExtractPdf tgConfig (Types.tdFileId doc)
+ case result of
+ Left err -> do
+ putText <| "PDF extraction failed: " <> err
+ pure Nothing
+ Right text -> do
+ let truncated = Text.take 40000 text
+ putText <| "Extracted " <> tshow (Text.length truncated) <> " chars from PDF"
+ pure (Just truncated)
+ _ -> pure Nothing
+
+ photoAnalysis <- case Types.tmPhoto msg of
+ Just photo -> do
+ case Media.checkPhotoSize photo of
+ Left err -> do
+ putText <| "Photo rejected: " <> err
+ _ <- Messages.enqueueImmediate (Just uid) chatId (Types.tmThreadId msg) err (Just "system") Nothing
+ pure Nothing
+ Right () -> do
+ putText <| "Processing photo: " <> tshow (Types.tpWidth photo) <> "x" <> tshow (Types.tpHeight photo)
+ bytesResult <- Media.downloadPhoto tgConfig photo
+ case bytesResult of
+ Left err -> do
+ putText <| "Photo download failed: " <> err
+ pure Nothing
+ Right bytes -> do
+ putText <| "Downloaded photo, " <> tshow (BL.length bytes) <> " bytes, analyzing..."
+ analysisResult <- Media.analyzeImage (Types.tgOpenRouterApiKey tgConfig) bytes (Types.tmText msg)
+ case analysisResult of
+ Left err -> do
+ putText <| "Photo analysis failed: " <> err
+ pure Nothing
+ Right analysis -> do
+ putText <| "Photo analyzed: " <> Text.take 100 analysis <> "..."
+ pure (Just analysis)
+ Nothing -> pure Nothing
+
+ voiceTranscription <- case Types.tmVoice msg of
+ Just voice -> do
+ case Media.checkVoiceSize voice of
+ Left err -> do
+ putText <| "Voice rejected: " <> err
+ _ <- Messages.enqueueImmediate (Just uid) chatId (Types.tmThreadId msg) err (Just "system") Nothing
+ pure Nothing
+ Right () -> do
+ if not (Types.isSupportedVoiceFormat voice)
+ then do
+ let err = "unsupported voice format, please send OGG/Opus audio"
+ putText <| "Voice rejected: " <> err
+ _ <- Messages.enqueueImmediate (Just uid) chatId (Types.tmThreadId msg) err (Just "system") Nothing
+ pure Nothing
+ else do
+ putText <| "Processing voice message: " <> tshow (Types.tvDuration voice) <> " seconds"
+ bytesResult <- Media.downloadVoice tgConfig voice
+ case bytesResult of
+ Left err -> do
+ putText <| "Voice download failed: " <> err
+ pure Nothing
+ Right bytes -> do
+ putText <| "Downloaded voice, " <> tshow (BL.length bytes) <> " bytes, transcribing..."
+ transcribeResult <- Media.transcribeVoice (Types.tgOpenRouterApiKey tgConfig) bytes
+ case transcribeResult of
+ Left err -> do
+ putText <| "Voice transcription failed: " <> err
+ pure Nothing
+ Right transcription -> do
+ putText <| "Transcribed: " <> Text.take 100 transcription <> "..."
+ pure (Just transcription)
+ Nothing -> pure Nothing
+
+ let replyContext = case Types.tmReplyTo msg of
+ Just reply ->
+ let senderName = case (Types.trFromFirstName reply, Types.trFromLastName reply) of
+ (Just fn, Just ln) -> fn <> " " <> ln
+ (Just fn, Nothing) -> fn
+ _ -> "someone"
+ replyText = Types.trText reply
+ in if Text.null replyText
+ then ""
+ else "[replying to " <> senderName <> ": \"" <> Text.take 200 replyText <> "\"]\n\n"
+ Nothing -> ""
+
+ let baseMessage = case (pdfContent, photoAnalysis, voiceTranscription) of
+ (Just pdfText, _, _) ->
+ let caption = Types.tmText msg
+ prefix = if Text.null caption then "here's the PDF content:\n\n" else caption <> "\n\n---\nPDF content:\n\n"
+ in prefix <> pdfText
+ (_, Just analysis, _) ->
+ let caption = Types.tmText msg
+ prefix =
+ if Text.null caption
+ then "[user sent an image. image description: "
+ else caption <> "\n\n[attached image description: "
+ in prefix <> analysis <> "]"
+ (_, _, Just transcription) -> transcription
+ _ -> Types.tmText msg
+
+ let userMessage = replyContext <> baseMessage
+ isGroup = Types.isGroupChat msg
+ threadId = Types.tmThreadId msg
+
+ shouldEngage <-
+ if isGroup
+ then do
+ putText "Checking if should engage (group chat)..."
+ recentMsgs <- Memory.getGroupRecentMessages chatId threadId 5
+ let recentContext =
+ if null recentMsgs
+ then ""
+ else
+ Text.unlines
+ [ "[Recent conversation for context]",
+ Text.unlines
+ [ fromMaybe "User" (Memory.cmSenderName m) <> ": " <> Memory.cmContent m
+ | m <- reverse recentMsgs
+ ],
+ "",
+ "[New message to classify]"
+ ]
+ shouldEngageInGroup (Types.tgOpenRouterApiKey tgConfig) (recentContext <> userMessage)
+ else pure True
+
+ if not shouldEngage
+ then putText "Skipping group message (pre-filter said no)"
+ else do
+ (conversationContext, contextTokens) <-
+ if isGroup
+ then do
+ _ <- Memory.saveGroupMessage chatId threadId Memory.UserRole userName userMessage
+ Memory.getGroupConversationContext chatId threadId maxConversationTokens
+ else do
+ _ <- Memory.saveMessage uid chatId Memory.UserRole (Just userName) userMessage
+ Memory.getConversationContext uid chatId maxConversationTokens
+ putText <| "Conversation context: " <> tshow contextTokens <> " tokens"
+
+ processEngagedMessage tgConfig provider engineCfg msg uid userName chatId userMessage conversationContext
+
+handleAuthorizedMessageBatch ::
+ Types.TelegramConfig ->
+ Provider.Provider ->
+ Engine.EngineConfig ->
+ Types.TelegramMessage ->
+ Text ->
+ Text ->
+ Int ->
+ Text ->
+ IO ()
+handleAuthorizedMessageBatch tgConfig provider engineCfg msg uid userName chatId batchedText = do
+ Reminders.recordUserChat uid chatId
+
+ pdfContent <- case Types.tmDocument msg of
+ Just doc | Types.isPdf doc -> do
+ putText <| "Processing PDF: " <> fromMaybe "(unnamed)" (Types.tdFileName doc)
+ result <- Media.downloadAndExtractPdf tgConfig (Types.tdFileId doc)
+ case result of
+ Left err -> do
+ putText <| "PDF extraction failed: " <> err
+ pure Nothing
+ Right text -> do
+ let truncated = Text.take 40000 text
+ putText <| "Extracted " <> tshow (Text.length truncated) <> " chars from PDF"
+ pure (Just truncated)
+ _ -> pure Nothing
+
+ photoAnalysis <- case Types.tmPhoto msg of
+ Just photo -> do
+ case Media.checkPhotoSize photo of
+ Left err -> do
+ putText <| "Photo rejected: " <> err
+ _ <- Messages.enqueueImmediate (Just uid) chatId (Types.tmThreadId msg) err (Just "system") Nothing
+ pure Nothing
+ Right () -> do
+ putText <| "Processing photo: " <> tshow (Types.tpWidth photo) <> "x" <> tshow (Types.tpHeight photo)
+ bytesResult <- Media.downloadPhoto tgConfig photo
+ case bytesResult of
+ Left err -> do
+ putText <| "Photo download failed: " <> err
+ pure Nothing
+ Right bytes -> do
+ putText <| "Downloaded photo, " <> tshow (BL.length bytes) <> " bytes, analyzing..."
+ analysisResult <- Media.analyzeImage (Types.tgOpenRouterApiKey tgConfig) bytes (Types.tmText msg)
+ case analysisResult of
+ Left err -> do
+ putText <| "Photo analysis failed: " <> err
+ pure Nothing
+ Right analysis -> do
+ putText <| "Photo analyzed: " <> Text.take 100 analysis <> "..."
+ pure (Just analysis)
+ Nothing -> pure Nothing
+
+ voiceTranscription <- case Types.tmVoice msg of
+ Just voice -> do
+ case Media.checkVoiceSize voice of
+ Left err -> do
+ putText <| "Voice rejected: " <> err
+ _ <- Messages.enqueueImmediate (Just uid) chatId (Types.tmThreadId msg) err (Just "system") Nothing
+ pure Nothing
+ Right () -> do
+ if not (Types.isSupportedVoiceFormat voice)
+ then do
+ let err = "unsupported voice format, please send OGG/Opus audio"
+ putText <| "Voice rejected: " <> err
+ _ <- Messages.enqueueImmediate (Just uid) chatId (Types.tmThreadId msg) err (Just "system") Nothing
+ pure Nothing
+ else do
+ putText <| "Processing voice message: " <> tshow (Types.tvDuration voice) <> " seconds"
+ bytesResult <- Media.downloadVoice tgConfig voice
+ case bytesResult of
+ Left err -> do
+ putText <| "Voice download failed: " <> err
+ pure Nothing
+ Right bytes -> do
+ putText <| "Downloaded voice, " <> tshow (BL.length bytes) <> " bytes, transcribing..."
+ transcribeResult <- Media.transcribeVoice (Types.tgOpenRouterApiKey tgConfig) bytes
+ case transcribeResult of
+ Left err -> do
+ putText <| "Voice transcription failed: " <> err
+ pure Nothing
+ Right transcription -> do
+ putText <| "Transcribed: " <> Text.take 100 transcription <> "..."
+ pure (Just transcription)
+ Nothing -> pure Nothing
+
+ let mediaPrefix = case (pdfContent, photoAnalysis, voiceTranscription) of
+ (Just pdfText, _, _) -> "---\nPDF content:\n\n" <> pdfText <> "\n\n---\n\n"
+ (_, Just analysis, _) -> "[attached image description: " <> analysis <> "]\n\n"
+ (_, _, Just transcription) -> "[voice transcription: " <> transcription <> "]\n\n"
+ _ -> ""
+
+ let userMessage = mediaPrefix <> batchedText
+ isGroup = Types.isGroupChat msg
+ threadId = Types.tmThreadId msg
+
+ shouldEngage <-
+ if isGroup
+ then do
+ putText "Checking if should engage (group chat)..."
+ recentMsgs <- Memory.getGroupRecentMessages chatId threadId 5
+ let recentContext =
+ if null recentMsgs
+ then ""
+ else
+ Text.unlines
+ [ "[Recent conversation for context]",
+ Text.unlines
+ [ fromMaybe "User" (Memory.cmSenderName m) <> ": " <> Memory.cmContent m
+ | m <- reverse recentMsgs
+ ],
+ "",
+ "[New message to classify]"
+ ]
+ shouldEngageInGroup (Types.tgOpenRouterApiKey tgConfig) (recentContext <> userMessage)
+ else pure True
+
+ if not shouldEngage
+ then putText "Skipping group message (pre-filter said no)"
+ else do
+ (conversationContext, contextTokens) <-
+ if isGroup
+ then do
+ _ <- Memory.saveGroupMessage chatId threadId Memory.UserRole userName userMessage
+ Memory.getGroupConversationContext chatId threadId maxConversationTokens
+ else do
+ _ <- Memory.saveMessage uid chatId Memory.UserRole (Just userName) userMessage
+ Memory.getConversationContext uid chatId maxConversationTokens
+ putText <| "Conversation context: " <> tshow contextTokens <> " tokens"
+
+ processEngagedMessage tgConfig provider engineCfg msg uid userName chatId userMessage conversationContext
+
+processEngagedMessage ::
+ Types.TelegramConfig ->
+ Provider.Provider ->
+ Engine.EngineConfig ->
+ Types.TelegramMessage ->
+ Text ->
+ Text ->
+ Int ->
+ Text ->
+ Text ->
+ IO ()
+processEngagedMessage tgConfig provider engineCfg msg uid userName chatId userMessage conversationContext = do
+ let isGroup = Types.isGroupChat msg
+
+ personalMemories <- Memory.recallMemories uid userMessage 5
+ groupMemories <-
+ if isGroup
+ then Memory.recallGroupMemories chatId userMessage 3
+ else pure []
+
+ let allMemories = personalMemories <> groupMemories
+ memoryContext =
+ if null allMemories
+ then "No memories found."
+ else
+ Text.unlines
+ <| ["[Personal] " <> Memory.memoryContent m | m <- personalMemories]
+ <> ["[Group] " <> Memory.memoryContent m | m <- groupMemories]
+
+ now <- getCurrentTime
+ tz <- getCurrentTimeZone
+ let localTime = utcToLocalTime tz now
+ timeStr = Text.pack (formatTime defaultTimeLocale "%A, %B %d, %Y at %H:%M" localTime)
+
+ let chatContext =
+ if Types.isGroupChat msg
+ then "\n\n## Chat Type\nThis is a GROUP CHAT. Apply the group response rules - only respond if appropriate."
+ else "\n\n## Chat Type\nThis is a PRIVATE CHAT. Always respond to the user."
+ hledgerContext =
+ if isHledgerAuthorized userName
+ then
+ Text.unlines
+ [ "",
+ "## hledger (personal finance)",
+ "",
+ "you have access to hledger tools for querying and recording financial transactions.",
+ "account naming: ex (expenses), as (assets), li (liabilities), in (income), eq (equity).",
+ "level 2 is owner: 'me' (personal) or 'us' (shared/family).",
+ "level 3 is type: need (necessary), want (discretionary), cash, cred (credit), vest (investments).",
+ "examples: ex:me:want:grooming, as:us:cash:checking, li:us:cred:chase.",
+ "when user says 'i spent $X at Y', use hledger_add with appropriate accounts."
+ ]
+ else ""
+ emailContext =
+ if isEmailAuthorized userName
+ then
+ Text.unlines
+ [ "",
+ "## email (ben@bensima.com)",
+ "",
+ "you have access to email tools for managing ben's inbox.",
+ "use email_check to see recent unread emails (returns uid, from, subject, date, has_unsubscribe).",
+ "use email_read to read full content of important emails.",
+ "use email_unsubscribe to unsubscribe from marketing/newsletters (clicks List-Unsubscribe link).",
+ "use email_archive to move FYI emails to archive.",
+ "prioritize: urgent items first, then emails needing response, then suggest unsubscribing from marketing."
+ ]
+ else ""
+ systemPrompt =
+ telegramSystemPrompt
+ <> "\n\n## Current Date and Time\n"
+ <> timeStr
+ <> chatContext
+ <> hledgerContext
+ <> emailContext
+ <> "\n\n## Current User\n"
+ <> "You are talking to: "
+ <> userName
+ <> "\n\n## What you know about this user\n"
+ <> memoryContext
+ <> "\n\n"
+ <> conversationContext
+
+ let memoryTools =
+ [ Memory.rememberTool uid,
+ Memory.recallTool uid,
+ Memory.linkMemoriesTool uid,
+ Memory.queryGraphTool uid
+ ]
+ searchTools = case Types.tgKagiApiKey tgConfig of
+ Just kagiKey -> [WebSearch.webSearchTool kagiKey]
+ Nothing -> []
+ webReaderTools = [WebReader.webReaderTool (Types.tgOpenRouterApiKey tgConfig)]
+ pdfTools = [Pdf.pdfTool]
+ notesTools =
+ [ Notes.noteAddTool uid,
+ Notes.noteListTool uid,
+ Notes.noteDeleteTool uid
+ ]
+ calendarTools =
+ [ Calendar.calendarListTool,
+ Calendar.calendarAddTool,
+ Calendar.calendarSearchTool
+ ]
+ todoTools =
+ [ Todos.todoAddTool uid,
+ Todos.todoListTool uid,
+ Todos.todoCompleteTool uid,
+ Todos.todoDeleteTool uid
+ ]
+ messageTools =
+ [ Messages.sendMessageTool uid chatId (Types.tmThreadId msg),
+ Messages.listPendingMessagesTool uid chatId,
+ Messages.cancelMessageTool
+ ]
+ hledgerTools =
+ if isHledgerAuthorized userName
+ then Hledger.allHledgerTools
+ else []
+ emailTools =
+ if isEmailAuthorized userName
+ then Email.allEmailTools
+ else []
+ pythonTools =
+ [Python.pythonExecTool | isBenAuthorized userName]
+ httpTools =
+ if isBenAuthorized userName
+ then Http.allHttpTools
+ else []
+ outreachTools =
+ if isBenAuthorized userName
+ then Outreach.allOutreachTools
+ else []
+ feedbackTools =
+ if isBenAuthorized userName
+ then Feedback.allFeedbackTools
+ else []
+ fileTools =
+ [Tools.readFileTool | isBenAuthorized userName]
+ skillsTools =
+ [ Skills.skillTool userName,
+ Skills.listSkillsTool userName,
+ Skills.publishSkillTool userName
+ ]
+ subagentTools =
+ if isBenAuthorized userName
+ then
+ let keys =
+ Subagent.SubagentApiKeys
+ { Subagent.subagentOpenRouterKey = Types.tgOpenRouterApiKey tgConfig,
+ Subagent.subagentKagiKey = Types.tgKagiApiKey tgConfig
+ }
+ in [Subagent.spawnSubagentTool keys]
+ else []
+ tools = memoryTools <> searchTools <> webReaderTools <> pdfTools <> notesTools <> calendarTools <> todoTools <> messageTools <> hledgerTools <> emailTools <> pythonTools <> httpTools <> outreachTools <> feedbackTools <> fileTools <> skillsTools <> subagentTools
+
+ let agentCfg =
+ Engine.defaultAgentConfig
+ { Engine.agentSystemPrompt = systemPrompt,
+ Engine.agentTools = tools,
+ Engine.agentMaxIterations = 50,
+ Engine.agentGuardrails =
+ Engine.defaultGuardrails
+ { Engine.guardrailMaxCostCents = 1000.0,
+ Engine.guardrailMaxDuplicateToolCalls = 10
+ }
+ }
+
+ result <-
+ withTypingIndicator tgConfig chatId
+ <| Engine.runAgentWithProvider engineCfg provider agentCfg userMessage
+
+ case result of
+ Left err -> do
+ putText <| "Agent error: " <> err
+ _ <- Messages.enqueueImmediate (Just uid) chatId (Types.tmThreadId msg) "sorry, i hit an error. please try again." (Just "agent_error") Nothing
+ pure ()
+ Right agentResult -> do
+ let response = Engine.resultFinalMessage agentResult
+ threadId = Types.tmThreadId msg
+ putText <| "Response text: " <> Text.take 200 response
+
+ if isGroup
+ then void <| Memory.saveGroupMessage chatId threadId Memory.AssistantRole "Ava" response
+ else void <| Memory.saveMessage uid chatId Memory.AssistantRole Nothing response
+
+ if Text.null response
+ then do
+ if isGroup
+ then putText "Agent chose not to respond (group chat)"
+ else do
+ putText "Warning: empty response from agent"
+ _ <- Messages.enqueueImmediate (Just uid) chatId threadId "hmm, i don't have a response for that" (Just "agent_response") Nothing
+ pure ()
+ else do
+ parts <- splitMessageForChat (Types.tgOpenRouterApiKey tgConfig) response
+ putText <| "Split response into " <> tshow (length parts) <> " parts"
+ enqueueMultipart (Just uid) chatId threadId parts (Just "agent_response")
+ unless isGroup <| checkAndSummarize (Types.tgOpenRouterApiKey tgConfig) uid chatId
+ let cost = Engine.resultTotalCost agentResult
+ costStr = Text.pack (printf "%.2f" cost)
+ putText
+ <| "Responded to "
+ <> userName
+ <> " (cost: "
+ <> costStr
+ <> " cents)"
+
+maxConversationTokens :: Int
+maxConversationTokens = 4000
+
+summarizationThreshold :: Int
+summarizationThreshold = 3000
+
+isHledgerAuthorized :: Text -> Bool
+isHledgerAuthorized userName =
+ let lowerName = Text.toLower userName
+ in "ben" `Text.isInfixOf` lowerName || "kate" `Text.isInfixOf` lowerName
+
+isEmailAuthorized :: Text -> Bool
+isEmailAuthorized userName =
+ let lowerName = Text.toLower userName
+ in "ben" `Text.isInfixOf` lowerName
+
+isBenAuthorized :: Text -> Bool
+isBenAuthorized userName =
+ let lowerName = Text.toLower userName
+ in "ben" `Text.isInfixOf` lowerName
+
+checkAndSummarize :: Text -> Text -> Int -> IO ()
+checkAndSummarize openRouterKey uid chatId = do
+ (_, currentTokens) <- Memory.getConversationContext uid chatId maxConversationTokens
+ when (currentTokens > summarizationThreshold) <| do
+ putText <| "Context at " <> tshow currentTokens <> " tokens, summarizing..."
+ recentMsgs <- Memory.getRecentMessages uid chatId 50
+ let conversationText =
+ Text.unlines
+ [ (if Memory.cmRole m == Memory.UserRole then "User: " else "Assistant: ") <> Memory.cmContent m
+ | m <- reverse recentMsgs
+ ]
+ gemini = Provider.defaultOpenRouter openRouterKey "google/gemini-2.0-flash-001"
+ summaryResult <-
+ Provider.chat
+ gemini
+ []
+ [ Provider.Message Provider.System "You are a conversation summarizer. Summarize the key points, decisions, and context from this conversation in 2-3 paragraphs. Focus on information that would be useful for continuing the conversation later." Nothing Nothing,
+ Provider.Message Provider.User ("Summarize this conversation:\n\n" <> conversationText) Nothing Nothing
+ ]
+ case summaryResult of
+ Left err -> putText <| "Summarization failed: " <> err
+ Right summaryMsg -> do
+ let summary = Provider.msgContent summaryMsg
+ _ <- Memory.summarizeAndArchive uid chatId summary
+ putText "Conversation summarized and archived (gemini)"
+
+splitMessageForChat :: Text -> Text -> IO [Text]
+splitMessageForChat _openRouterKey message = do
+ let parts = splitOnParagraphs message
+ pure parts
+
+splitOnParagraphs :: Text -> [Text]
+splitOnParagraphs message
+ | Text.length message < 300 = [message]
+ | otherwise =
+ let paragraphs = filter (not <. Text.null) (map Text.strip (Text.splitOn "\n\n" message))
+ in if length paragraphs <= 1
+ then [message]
+ else mergeTooShort paragraphs
+
+mergeTooShort :: [Text] -> [Text]
+mergeTooShort [] = []
+mergeTooShort [x] = [x]
+mergeTooShort (x : y : rest)
+ | Text.length x < 100 = mergeTooShort ((x <> "\n\n" <> y) : rest)
+ | otherwise = x : mergeTooShort (y : rest)
+
+enqueueMultipart :: Maybe Text -> Int -> Maybe Int -> [Text] -> Maybe Text -> IO ()
+enqueueMultipart _ _ _ [] _ = pure ()
+enqueueMultipart mUid chatId mThreadId parts msgType = do
+ forM_ (zip [0 ..] parts) <| \(i :: Int, part) -> do
+ if i == 0
+ then void <| Messages.enqueueImmediate mUid chatId mThreadId part msgType Nothing
+ else do
+ let delaySeconds = fromIntegral (i * 2)
+ void <| Messages.enqueueDelayed mUid chatId mThreadId part delaySeconds msgType Nothing
+
+shouldEngageInGroup :: Text -> Text -> IO Bool
+shouldEngageInGroup openRouterKey messageText = do
+ let gemini = Provider.defaultOpenRouter openRouterKey "google/gemini-2.0-flash-001"
+ result <-
+ Provider.chat
+ gemini
+ []
+ [ Provider.Message
+ Provider.System
+ ( Text.unlines
+ [ "You are a classifier that decides if an AI assistant named 'Ava' should respond to a message in a group chat.",
+ "You may be given recent conversation context to help decide.",
+ "Respond with ONLY 'yes' or 'no' (lowercase, nothing else).",
+ "",
+ "Say 'yes' if:",
+ "- The message is a direct question Ava could answer",
+ "- The message contains a factual error worth correcting",
+ "- The message mentions Ava or asks for help",
+ "- The message shares a link or document to analyze",
+ "- The message is a follow-up to a conversation Ava was just participating in",
+ "- The user is clearly talking to Ava based on context (e.g. Ava just responded)",
+ "",
+ "Say 'no' if:",
+ "- It's casual banter or chit-chat between people (not involving Ava)",
+ "- It's a greeting or farewell not directed at Ava",
+ "- It's an inside joke or personal conversation between humans",
+ "- It doesn't require or benefit from Ava's input"
+ ]
+ )
+ Nothing
+ Nothing,
+ Provider.Message Provider.User messageText Nothing Nothing
+ ]
+ case result of
+ Left err -> do
+ putText <| "Engagement check failed: " <> err
+ pure True
+ Right msg -> do
+ let response = Text.toLower (Text.strip (Provider.msgContent msg))
+ pure (response == "yes" || response == "y")
+
+checkOllama :: IO (Either Text ())
+checkOllama = do
+ ollamaUrl <- fromMaybe "http://localhost:11434" </ lookupEnv "OLLAMA_URL"
+ let url = ollamaUrl <> "/api/tags"
+ result <-
+ try <| do
+ req <- HTTP.parseRequest url
+ HTTP.httpLBS req
+ case result of
+ Left (e :: SomeException) ->
+ pure (Left ("Ollama not running: " <> tshow e))
+ Right response -> do
+ let status = HTTP.getResponseStatusCode response
+ if status >= 200 && status < 300
+ then case Aeson.decode (HTTP.getResponseBody response) of
+ Just (Aeson.Object obj) -> case KeyMap.lookup "models" obj of
+ Just (Aeson.Array models) ->
+ let names = [n | Aeson.Object m <- toList models, Just (Aeson.String n) <- [KeyMap.lookup "name" m]]
+ hasNomic = any ("nomic-embed-text" `Text.isInfixOf`) names
+ in if hasNomic
+ then pure (Right ())
+ else pure (Left "nomic-embed-text model not found")
+ _ -> pure (Left "Invalid Ollama response")
+ _ -> pure (Left "Failed to parse Ollama response")
+ else pure (Left ("Ollama HTTP error: " <> tshow status))
+
+pullEmbeddingModel :: IO (Either Text ())
+pullEmbeddingModel = do
+ ollamaUrl <- fromMaybe "http://localhost:11434" </ lookupEnv "OLLAMA_URL"
+ let url = ollamaUrl <> "/api/pull"
+ putText "Pulling nomic-embed-text model (this may take a few minutes)..."
+ req0 <- HTTP.parseRequest url
+ let body = Aeson.object ["name" .= ("nomic-embed-text" :: Text)]
+ req =
+ HTTP.setRequestMethod "POST"
+ <| HTTP.setRequestHeader "Content-Type" ["application/json"]
+ <| HTTP.setRequestBodyLBS (Aeson.encode body)
+ <| HTTP.setRequestResponseTimeout (HTTPClient.responseTimeoutMicro (600 * 1000000))
+ <| req0
+ result <- try (HTTP.httpLBS req)
+ case result of
+ Left (e :: SomeException) ->
+ pure (Left ("Failed to pull model: " <> tshow e))
+ Right response -> do
+ let status = HTTP.getResponseStatusCode response
+ if status >= 200 && status < 300
+ then do
+ putText "nomic-embed-text model ready"
+ pure (Right ())
+ else pure (Left ("Pull failed: HTTP " <> tshow status))
+
+ensureOllama :: IO ()
+ensureOllama = do
+ checkResult <- checkOllama
+ case checkResult of
+ Right () -> putText "Ollama ready with nomic-embed-text"
+ Left err
+ | "not running" `Text.isInfixOf` err -> do
+ putText <| "Error: " <> err
+ putText "Please start Ollama: ollama serve"
+ exitFailure
+ | "not found" `Text.isInfixOf` err -> do
+ putText "nomic-embed-text model not found, pulling..."
+ pullResult <- pullEmbeddingModel
+ case pullResult of
+ Right () -> pure ()
+ Left pullErr -> do
+ putText <| "Error: " <> pullErr
+ exitFailure
+ | otherwise -> do
+ putText <| "Ollama error: " <> err
+ exitFailure
+
+startBot :: Maybe Text -> IO ()
+startBot maybeToken = do
+ token <- case maybeToken of
+ Just t -> pure t
+ Nothing -> do
+ envToken <- lookupEnv "TELEGRAM_BOT_TOKEN"
+ case envToken of
+ Just t -> pure (Text.pack t)
+ Nothing -> do
+ putText "Error: TELEGRAM_BOT_TOKEN not set and no --token provided"
+ exitFailure
+
+ putText <| "AVA data root: " <> Text.pack Paths.avaDataRoot
+ putText <| "Skills dir: " <> Text.pack Paths.skillsDir
+ putText <| "Outreach dir: " <> Text.pack Paths.outreachDir
+
+ ensureOllama
+
+ allowedIds <- loadAllowedUserIds
+ kagiKey <- fmap Text.pack </ lookupEnv "KAGI_API_KEY"
+
+ apiKey <- lookupEnv "OPENROUTER_API_KEY"
+ case apiKey of
+ Nothing -> do
+ putText "Error: OPENROUTER_API_KEY not set"
+ exitFailure
+ Just key -> do
+ let orKey = Text.pack key
+ tgConfig = Types.defaultTelegramConfig token allowedIds kagiKey orKey
+ provider = Provider.defaultOpenRouter orKey "anthropic/claude-sonnet-4.5"
+ putText <| "Allowed user IDs: " <> tshow allowedIds
+ putText <| "Kagi search: " <> if isJust kagiKey then "enabled" else "disabled"
+ runTelegramBot tgConfig provider
+
+loadAllowedUserIds :: IO [Int]
+loadAllowedUserIds = do
+ maybeIds <- lookupEnv "ALLOWED_TELEGRAM_USER_IDS"
+ case maybeIds of
+ Nothing -> pure []
+ Just "*" -> pure []
+ Just idsStr -> do
+ let ids = mapMaybe (readMaybe <. Text.unpack <. Text.strip) (Text.splitOn "," (Text.pack idsStr))
+ pure ids
+
+handleOutreachCommand :: Types.TelegramConfig -> Int -> Maybe Int -> Text -> IO Bool
+handleOutreachCommand _tgConfig chatId mThreadId cmd
+ | "/review" `Text.isPrefixOf` cmd = do
+ pending <- Outreach.listDrafts Outreach.Pending
+ case pending of
+ [] -> do
+ _ <- Messages.enqueueImmediate Nothing chatId mThreadId "no pending outreach drafts" (Just "system") Nothing
+ pure True
+ (draft : _) -> do
+ let msg = formatDraftForReview draft
+ _ <- Messages.enqueueImmediate Nothing chatId mThreadId msg (Just "system") Nothing
+ pure True
+ | "/approve " `Text.isPrefixOf` cmd = do
+ let draftId = Text.strip (Text.drop 9 cmd)
+ result <- Outreach.approveDraft draftId
+ case result of
+ Left err -> do
+ _ <- Messages.enqueueImmediate Nothing chatId mThreadId ("error: " <> err) (Just "system") Nothing
+ pure True
+ Right draft -> do
+ _ <- Messages.enqueueImmediate Nothing chatId mThreadId ("approved: " <> Outreach.draftId draft) (Just "system") Nothing
+ pure True
+ | "/reject " `Text.isPrefixOf` cmd = do
+ let rest = Text.strip (Text.drop 8 cmd)
+ (draftId, reason) = case Text.breakOn " " rest of
+ (did, r) -> (did, if Text.null r then Nothing else Just (Text.strip r))
+ result <- Outreach.rejectDraft draftId reason
+ case result of
+ Left err -> do
+ _ <- Messages.enqueueImmediate Nothing chatId mThreadId ("error: " <> err) (Just "system") Nothing
+ pure True
+ Right draft -> do
+ let reasonMsg = maybe "" (" reason: " <>) (Outreach.draftRejectReason draft)
+ _ <- Messages.enqueueImmediate Nothing chatId mThreadId ("rejected: " <> Outreach.draftId draft <> reasonMsg) (Just "system") Nothing
+ pure True
+ | "/queue" `Text.isPrefixOf` cmd = do
+ count <- Outreach.getPendingCount
+ _ <- Messages.enqueueImmediate Nothing chatId mThreadId (tshow count <> " pending outreach drafts") (Just "system") Nothing
+ pure True
+ | otherwise = pure False
+
+formatDraftForReview :: Outreach.OutreachDraft -> Text
+formatDraftForReview draft =
+ Text.unlines
+ [ "*outreach draft*",
+ "",
+ "*id:* `" <> Outreach.draftId draft <> "`",
+ "*type:* " <> tshow (Outreach.draftType draft),
+ "*to:* " <> Outreach.draftRecipient draft,
+ maybe "" (\s -> "*subject:* " <> s <> "\n") (Outreach.draftSubject draft),
+ "*context:* " <> Outreach.draftContext draft,
+ "",
+ Outreach.draftBody draft,
+ "",
+ "reply `/approve " <> Outreach.draftId draft <> "` or `/reject " <> Outreach.draftId draft <> " [reason]`"
+ ]
diff --git a/Omni/Agent/Telegram/IncomingQueue.hs b/Omni/Agent/Telegram/IncomingQueue.hs
new file mode 100644
index 0000000..875fbf3
--- /dev/null
+++ b/Omni/Agent/Telegram/IncomingQueue.hs
@@ -0,0 +1,228 @@
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Telegram Incoming Message Queue - Batches incoming messages by chat.
+--
+-- Messages are queued in-memory and batched by chat_id with a configurable
+-- window (default 1s). This prevents confusion when messages arrive
+-- simultaneously from different chats.
+--
+-- : out omni-agent-telegram-incoming-queue
+-- : dep stm
+module Omni.Agent.Telegram.IncomingQueue
+ ( -- * Types
+ IncomingQueues,
+ ChatQueue (..),
+ QueuedMsg (..),
+
+ -- * Queue Operations
+ newIncomingQueues,
+ enqueueIncoming,
+
+ -- * Batch Processing
+ flushReadyBatches,
+ startIncomingBatcher,
+
+ -- * Batch Formatting
+ formatBatch,
+
+ -- * Configuration
+ defaultBatchWindowSeconds,
+
+ -- * Testing
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Control.Concurrent.STM (TVar, newTVarIO, readTVar, readTVarIO, writeTVar)
+import qualified Data.Map.Strict as Map
+import qualified Data.Text as Text
+import Data.Time (NominalDiffTime, UTCTime, addUTCTime, getCurrentTime)
+import qualified Omni.Agent.Telegram.Types as Types
+import qualified Omni.Test as Test
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Telegram.IncomingQueue"
+ [ Test.unit "newIncomingQueues creates empty map" <| do
+ queues <- newIncomingQueues
+ qs <- readTVarIO queues
+ Map.null qs Test.@=? True,
+ Test.unit "formatBatch single message no attribution in DM" <| do
+ now <- getCurrentTime
+ let msg = mkTestMessage 123 456 Types.Private "hello"
+ qmsg = QueuedMsg now msg
+ result = formatBatch [qmsg]
+ result Test.@=? "hello",
+ Test.unit "formatBatch multiple messages numbered" <| do
+ now <- getCurrentTime
+ let msg1 = mkTestMessage 123 456 Types.Private "first"
+ msg2 = mkTestMessage 123 456 Types.Private "second"
+ qmsgs = [QueuedMsg now msg1, QueuedMsg now msg2]
+ result = formatBatch qmsgs
+ ("1. first" `Text.isInfixOf` result) Test.@=? True
+ ("2. second" `Text.isInfixOf` result) Test.@=? True,
+ Test.unit "formatBatch group chat has sender attribution" <| do
+ now <- getCurrentTime
+ let msg = mkTestMessage 123 456 Types.Group "hello"
+ qmsg = QueuedMsg now msg
+ result = formatBatch [qmsg]
+ ("[Test] hello" `Text.isInfixOf` result) Test.@=? True,
+ Test.unit "enqueueIncoming adds to queue" <| do
+ queues <- newIncomingQueues
+ let msg = mkTestMessage 123 456 Types.Private "test"
+ enqueueIncoming queues 1.0 msg
+ qs <- readTVarIO queues
+ Map.member 123 qs Test.@=? True,
+ Test.unit "flushReadyBatches returns due batches" <| do
+ queues <- newIncomingQueues
+ t <- getCurrentTime
+ let msg = mkTestMessage 123 456 Types.Private "test"
+ atomically <| do
+ let qmsg = QueuedMsg t msg
+ queue = ChatQueue [qmsg] t
+ writeTVar queues (Map.singleton 123 queue)
+ threadDelay 10000
+ batches <- flushReadyBatches queues
+ length batches Test.@=? 1
+ ]
+
+mkTestMessage :: Int -> Int -> Types.ChatType -> Text -> Types.TelegramMessage
+mkTestMessage chatId usrId chatType txt =
+ Types.TelegramMessage
+ { Types.tmUpdateId = 1,
+ Types.tmChatId = chatId,
+ Types.tmChatType = chatType,
+ Types.tmUserId = usrId,
+ Types.tmUserFirstName = "Test",
+ Types.tmUserLastName = Nothing,
+ Types.tmText = txt,
+ Types.tmDocument = Nothing,
+ Types.tmPhoto = Nothing,
+ Types.tmVoice = Nothing,
+ Types.tmReplyTo = Nothing,
+ Types.tmThreadId = Nothing
+ }
+
+data QueuedMsg = QueuedMsg
+ { qmReceivedAt :: UTCTime,
+ qmMsg :: Types.TelegramMessage
+ }
+ deriving (Show, Eq)
+
+data ChatQueue = ChatQueue
+ { cqMessages :: [QueuedMsg],
+ cqDeadline :: UTCTime
+ }
+ deriving (Show, Eq)
+
+type ChatId = Int
+
+type IncomingQueues = TVar (Map.Map ChatId ChatQueue)
+
+defaultBatchWindowSeconds :: NominalDiffTime
+defaultBatchWindowSeconds = 3.0
+
+newIncomingQueues :: IO IncomingQueues
+newIncomingQueues = newTVarIO Map.empty
+
+enqueueIncoming :: IncomingQueues -> NominalDiffTime -> Types.TelegramMessage -> IO ()
+enqueueIncoming queuesVar windowSeconds msg = do
+ now <- getCurrentTime
+ let chatId = Types.tmChatId msg
+ newDeadline = addUTCTime windowSeconds now
+ qMsg = QueuedMsg now msg
+ atomically <| do
+ qs <- readTVar queuesVar
+ let qs' = Map.alter (insertOrUpdate newDeadline qMsg) chatId qs
+ writeTVar queuesVar qs'
+ where
+ insertOrUpdate deadline qMsg Nothing =
+ Just ChatQueue {cqMessages = [qMsg], cqDeadline = deadline}
+ insertOrUpdate deadline qMsg (Just q) =
+ Just
+ q
+ { cqMessages = cqMessages q <> [qMsg],
+ cqDeadline = deadline
+ }
+
+flushReadyBatches :: IncomingQueues -> IO [(ChatId, [QueuedMsg])]
+flushReadyBatches queuesVar = do
+ now <- getCurrentTime
+ atomically <| do
+ qs <- readTVar queuesVar
+ let (ready, pending) = Map.partition (\q -> cqDeadline q <= now) qs
+ batches =
+ [ (chatId, cqMessages q)
+ | (chatId, q) <- Map.toList ready
+ ]
+ writeTVar queuesVar pending
+ pure batches
+
+startIncomingBatcher ::
+ IncomingQueues ->
+ (Types.TelegramMessage -> Text -> IO ()) ->
+ IO ()
+startIncomingBatcher queuesVar processFn =
+ void <| forkIO <| forever <| do
+ batches <- flushReadyBatches queuesVar
+ forM_ batches <| \(_chatId, qmsgs) -> do
+ case qmsgs of
+ [] -> pure ()
+ (firstQm : _) -> do
+ let baseMsg = qmMsg firstQm
+ batchedTxt = formatBatch qmsgs
+ processFn baseMsg batchedTxt
+ threadDelay 200000
+
+formatBatch :: [QueuedMsg] -> Text
+formatBatch [] = ""
+formatBatch [single] = formatOne False 1 single
+formatBatch qmsgs = Text.intercalate "\n\n" (zipWith (formatOne True) [1 ..] qmsgs)
+
+formatOne :: Bool -> Int -> QueuedMsg -> Text
+formatOne numbered idx (QueuedMsg _ msg) =
+ let baseText = Types.tmText msg
+ sender = senderLabel msg
+ media = mediaSuffix msg
+ reply = replySuffix msg
+ prefix =
+ if numbered
+ then tshow idx <> ". "
+ else ""
+ in Text.concat [prefix, sender, baseText, reply, media]
+
+senderLabel :: Types.TelegramMessage -> Text
+senderLabel msg
+ | Types.isGroupChat msg =
+ let firstName = Types.tmUserFirstName msg
+ lastName = fromMaybe "" (Types.tmUserLastName msg)
+ name = Text.strip (firstName <> " " <> lastName)
+ in "[" <> name <> "] "
+ | otherwise = ""
+
+mediaSuffix :: Types.TelegramMessage -> Text
+mediaSuffix msg =
+ Text.concat
+ <| [ " [document: " <> fromMaybe "unnamed" (Types.tdFileName d) <> "]"
+ | Just d <- [Types.tmDocument msg]
+ ]
+ <> [" [photo attached]" | isJust (Types.tmPhoto msg)]
+ <> [" [voice message]" | isJust (Types.tmVoice msg)]
+
+replySuffix :: Types.TelegramMessage -> Text
+replySuffix msg =
+ case Types.tmReplyTo msg of
+ Nothing -> ""
+ Just r ->
+ let fn = fromMaybe "someone" (Types.trFromFirstName r)
+ ln = fromMaybe "" (Types.trFromLastName r)
+ name = Text.strip (fn <> " " <> ln)
+ snippet = Text.take 80 (Types.trText r)
+ in " (replying to " <> name <> ": \"" <> snippet <> "\")"
diff --git a/Omni/Agent/Telegram/Media.hs b/Omni/Agent/Telegram/Media.hs
new file mode 100644
index 0000000..47fbf91
--- /dev/null
+++ b/Omni/Agent/Telegram/Media.hs
@@ -0,0 +1,327 @@
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Telegram Media Handling - File downloads, image analysis, voice transcription.
+--
+-- : out omni-agent-telegram-media
+-- : dep aeson
+-- : dep http-conduit
+-- : dep base64-bytestring
+module Omni.Agent.Telegram.Media
+ ( -- * File Downloads
+ getFile,
+ downloadFile,
+ downloadFileBytes,
+ downloadPhoto,
+ downloadVoice,
+ downloadAndExtractPdf,
+
+ -- * Multimodal Processing
+ analyzeImage,
+ transcribeVoice,
+
+ -- * Size Limits
+ maxImageBytes,
+ maxVoiceBytes,
+ checkPhotoSize,
+ checkVoiceSize,
+
+ -- * HTTP Utilities
+ httpGetBytes,
+ httpPostJson,
+
+ -- * Testing
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Data.Aeson ((.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.Aeson.KeyMap as KeyMap
+import qualified Data.ByteString as BS
+import qualified Data.ByteString.Base64.Lazy as B64
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.CaseInsensitive as CI
+import qualified Data.Text as Text
+import qualified Data.Text.Encoding as TE
+import qualified Data.Text.Lazy as TL
+import qualified Data.Text.Lazy.Encoding as TLE
+import qualified Network.HTTP.Client as HTTPClient
+import qualified Network.HTTP.Simple as HTTP
+import qualified Omni.Agent.Telegram.Types as Types
+import qualified Omni.Agent.Tools.Pdf as Pdf
+import qualified Omni.Test as Test
+import System.Environment (lookupEnv)
+import System.IO (hClose)
+import System.IO.Temp (withSystemTempFile)
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Telegram.Media"
+ [ Test.unit "maxImageBytes is 10MB" <| do
+ maxImageBytes Test.@=? 10_000_000,
+ Test.unit "maxVoiceBytes is 20MB" <| do
+ maxVoiceBytes Test.@=? 20_000_000,
+ Test.unit "checkPhotoSize accepts small photos" <| do
+ let photo = Types.TelegramPhoto "id" 800 600 (Just 100_000)
+ checkPhotoSize photo Test.@=? Right (),
+ Test.unit "checkPhotoSize rejects large photos" <| do
+ let photo = Types.TelegramPhoto "id" 800 600 (Just 15_000_000)
+ case checkPhotoSize photo of
+ Left _ -> pure ()
+ Right _ -> Test.assertFailure "Expected rejection",
+ Test.unit "checkVoiceSize accepts small voice" <| do
+ let voice = Types.TelegramVoice "id" 60 (Just "audio/ogg") (Just 500_000)
+ checkVoiceSize voice Test.@=? Right (),
+ Test.unit "checkVoiceSize rejects large voice" <| do
+ let voice = Types.TelegramVoice "id" 60 (Just "audio/ogg") (Just 25_000_000)
+ case checkVoiceSize voice of
+ Left _ -> pure ()
+ Right _ -> Test.assertFailure "Expected rejection"
+ ]
+
+maxImageBytes :: Int
+maxImageBytes = 10_000_000
+
+maxVoiceBytes :: Int
+maxVoiceBytes = 20_000_000
+
+checkPhotoSize :: Types.TelegramPhoto -> Either Text ()
+checkPhotoSize photo =
+ case Types.tpFileSize photo of
+ Just size
+ | size > maxImageBytes ->
+ Left <| "image too large (" <> tshow (size `div` 1_000_000) <> "MB), max " <> tshow (maxImageBytes `div` 1_000_000) <> "MB"
+ _ -> Right ()
+
+checkVoiceSize :: Types.TelegramVoice -> Either Text ()
+checkVoiceSize voice =
+ case Types.tvFileSize voice of
+ Just size
+ | size > maxVoiceBytes ->
+ Left <| "voice message too large (" <> tshow (size `div` 1_000_000) <> "MB), max " <> tshow (maxVoiceBytes `div` 1_000_000) <> "MB"
+ _ -> Right ()
+
+httpGetBytes :: String -> IO (Either Text BL.ByteString)
+httpGetBytes url = do
+ result <-
+ try <| do
+ req <- HTTP.parseRequest url
+ resp <- HTTP.httpLBS req
+ let status = HTTP.getResponseStatusCode resp
+ if status >= 200 && status < 300
+ then pure (Right (HTTP.getResponseBody resp))
+ else pure (Left ("HTTP " <> tshow status))
+ case result of
+ Left (e :: SomeException) -> pure (Left ("HTTP error: " <> tshow e))
+ Right r -> pure r
+
+httpPostJson :: String -> [(ByteString, ByteString)] -> Aeson.Value -> Int -> IO (Either Text BL.ByteString)
+httpPostJson url extraHeaders body timeoutSecs = do
+ result <-
+ try <| do
+ req0 <- HTTP.parseRequest url
+ let baseReq =
+ HTTP.setRequestMethod "POST"
+ <| HTTP.setRequestHeader "Content-Type" ["application/json"]
+ <| HTTP.setRequestBodyLBS (Aeson.encode body)
+ <| HTTP.setRequestResponseTimeout (HTTPClient.responseTimeoutMicro (timeoutSecs * 1000000))
+ <| req0
+ req = foldr addHeader baseReq extraHeaders
+ addHeader (name, value) = HTTP.addRequestHeader (CI.mk name) value
+ resp <- HTTP.httpLBS req
+ let status = HTTP.getResponseStatusCode resp
+ if status >= 200 && status < 300
+ then pure (Right (HTTP.getResponseBody resp))
+ else pure (Left ("HTTP " <> tshow status <> ": " <> shortBody resp))
+ case result of
+ Left (e :: SomeException) -> pure (Left ("HTTP error: " <> tshow e))
+ Right r -> pure r
+ where
+ shortBody r =
+ let b = BL.toStrict (HTTP.getResponseBody r)
+ in TE.decodeUtf8 (BS.take 200 b)
+
+getFile :: Types.TelegramConfig -> Text -> IO (Either Text Text)
+getFile cfg fileId = do
+ let url =
+ Text.unpack (Types.tgApiBaseUrl cfg)
+ <> "/bot"
+ <> Text.unpack (Types.tgBotToken cfg)
+ <> "/getFile?file_id="
+ <> Text.unpack fileId
+ result <- httpGetBytes url
+ case result of
+ Left err -> pure (Left err)
+ Right body ->
+ case Aeson.decode body of
+ Just (Aeson.Object obj) -> case KeyMap.lookup "result" obj of
+ Just (Aeson.Object resultObj) -> case KeyMap.lookup "file_path" resultObj of
+ Just (Aeson.String path) -> pure (Right path)
+ _ -> pure (Left "No file_path in response")
+ _ -> pure (Left "No result in response")
+ _ -> pure (Left "Failed to parse getFile response")
+
+downloadFileBytes :: Types.TelegramConfig -> Text -> IO (Either Text BL.ByteString)
+downloadFileBytes cfg filePath = do
+ let url =
+ "https://api.telegram.org/file/bot"
+ <> Text.unpack (Types.tgBotToken cfg)
+ <> "/"
+ <> Text.unpack filePath
+ httpGetBytes url
+
+downloadFile :: Types.TelegramConfig -> Text -> FilePath -> IO (Either Text ())
+downloadFile cfg filePath destPath = do
+ result <- downloadFileBytes cfg filePath
+ case result of
+ Left err -> pure (Left err)
+ Right bytes -> do
+ BL.writeFile destPath bytes
+ pure (Right ())
+
+downloadPhoto :: Types.TelegramConfig -> Types.TelegramPhoto -> IO (Either Text BL.ByteString)
+downloadPhoto cfg photo = do
+ filePathResult <- getFile cfg (Types.tpFileId photo)
+ case filePathResult of
+ Left err -> pure (Left err)
+ Right filePath -> downloadFileBytes cfg filePath
+
+downloadVoice :: Types.TelegramConfig -> Types.TelegramVoice -> IO (Either Text BL.ByteString)
+downloadVoice cfg voice = do
+ filePathResult <- getFile cfg (Types.tvFileId voice)
+ case filePathResult of
+ Left err -> pure (Left err)
+ Right filePath -> downloadFileBytes cfg filePath
+
+downloadAndExtractPdf :: Types.TelegramConfig -> Text -> IO (Either Text Text)
+downloadAndExtractPdf cfg fileId = do
+ filePathResult <- getFile cfg fileId
+ case filePathResult of
+ Left err -> pure (Left err)
+ Right filePath ->
+ withSystemTempFile "telegram_pdf.pdf" <| \tmpPath tmpHandle -> do
+ hClose tmpHandle
+ downloadResult <- downloadFile cfg filePath tmpPath
+ case downloadResult of
+ Left err -> pure (Left err)
+ Right () -> Pdf.extractPdfText tmpPath
+
+parseOpenRouterResponse :: BL.ByteString -> Either Text Text
+parseOpenRouterResponse body =
+ case Aeson.decode body of
+ Just (Aeson.Object obj) -> case KeyMap.lookup "choices" obj of
+ Just (Aeson.Array choices) | not (null choices) ->
+ case toList choices of
+ (Aeson.Object choice : _) -> case KeyMap.lookup "message" choice of
+ Just (Aeson.Object msg) -> case KeyMap.lookup "content" msg of
+ Just (Aeson.String content) -> Right content
+ Just Aeson.Null -> Left "No content in response"
+ _ -> Left "Unexpected content type in response"
+ _ -> Left "No message in choice"
+ _ -> Left "Empty choices array"
+ _ -> Left "No choices in response"
+ _ -> Left "Failed to parse response"
+
+analyzeImage :: Text -> BL.ByteString -> Text -> IO (Either Text Text)
+analyzeImage apiKey imageBytes userPrompt = do
+ let base64Data = TL.toStrict (TLE.decodeUtf8 (B64.encode imageBytes))
+ dataUrl = "data:image/jpeg;base64," <> base64Data
+ prompt =
+ if Text.null userPrompt
+ then "describe this image objectively in third person. do not use first person pronouns like 'I can see'. just describe what is shown."
+ else userPrompt <> "\n\n(describe objectively in third person, no first person pronouns)"
+ body =
+ Aeson.object
+ [ "model" .= ("anthropic/claude-sonnet-4.5" :: Text),
+ "messages"
+ .= [ Aeson.object
+ [ "role" .= ("user" :: Text),
+ "content"
+ .= [ Aeson.object
+ [ "type" .= ("text" :: Text),
+ "text" .= prompt
+ ],
+ Aeson.object
+ [ "type" .= ("image_url" :: Text),
+ "image_url"
+ .= Aeson.object
+ [ "url" .= dataUrl
+ ]
+ ]
+ ]
+ ]
+ ]
+ ]
+ headers =
+ [ ("Authorization", "Bearer " <> encodeUtf8 apiKey),
+ ("HTTP-Referer", "https://omni.dev"),
+ ("X-Title", "Omni Agent")
+ ]
+ result <- httpPostJson "https://openrouter.ai/api/v1/chat/completions" headers body 120
+ case result of
+ Left err -> pure (Left ("Vision API error: " <> err))
+ Right respBody -> pure (first ("Vision API: " <>) (parseOpenRouterResponse respBody))
+
+transcribeVoice :: Text -> BL.ByteString -> IO (Either Text Text)
+transcribeVoice _unusedApiKey audioBytes = do
+ maybeKey <- lookupEnv "OPENAI_API_KEY"
+ case maybeKey of
+ Nothing -> pure (Left "OPENAI_API_KEY not set - required for voice transcription")
+ Just key -> transcribeWithWhisper (Text.pack key) audioBytes
+
+transcribeWithWhisper :: Text -> BL.ByteString -> IO (Either Text Text)
+transcribeWithWhisper apiKey audioBytes = do
+ result <-
+ try <| do
+ req0 <- HTTP.parseRequest "https://api.openai.com/v1/audio/transcriptions"
+ let boundary = "----WebKitFormBoundary7MA4YWxkTrZu0gW"
+ body = buildMultipartBody boundary audioBytes
+ req =
+ HTTP.setRequestMethod "POST"
+ <| HTTP.setRequestHeader "Authorization" ["Bearer " <> encodeUtf8 apiKey]
+ <| HTTP.setRequestHeader "Content-Type" ["multipart/form-data; boundary=" <> boundary]
+ <| HTTP.setRequestBodyLBS body
+ <| HTTP.setRequestResponseTimeout (HTTPClient.responseTimeoutMicro (120 * 1000000))
+ <| req0
+ resp <- HTTP.httpLBS req
+ let status = HTTP.getResponseStatusCode resp
+ if status >= 200 && status < 300
+ then pure (Right (HTTP.getResponseBody resp))
+ else pure (Left ("HTTP " <> tshow status <> ": " <> TL.toStrict (TLE.decodeUtf8 (BL.take 500 (HTTP.getResponseBody resp)))))
+ case result of
+ Left (e :: SomeException) -> pure (Left ("Whisper API error: " <> tshow e))
+ Right (Left err) -> pure (Left ("Whisper API error: " <> err))
+ Right (Right respBody) ->
+ case Aeson.decode respBody of
+ Just (Aeson.Object obj) -> case KeyMap.lookup "text" obj of
+ Just (Aeson.String transcription) -> pure (Right transcription)
+ _ -> pure (Left "No 'text' field in Whisper response")
+ _ -> pure (Left "Failed to parse Whisper response")
+
+buildMultipartBody :: ByteString -> BL.ByteString -> BL.ByteString
+buildMultipartBody boundary audioBytes =
+ BL.concat
+ [ "--",
+ BL.fromStrict boundary,
+ "\r\n",
+ "Content-Disposition: form-data; name=\"file\"; filename=\"audio.ogg\"\r\n",
+ "Content-Type: audio/ogg\r\n\r\n",
+ audioBytes,
+ "\r\n",
+ "--",
+ BL.fromStrict boundary,
+ "\r\n",
+ "Content-Disposition: form-data; name=\"model\"\r\n\r\n",
+ "whisper-1\r\n",
+ "--",
+ BL.fromStrict boundary,
+ "--\r\n"
+ ]
diff --git a/Omni/Agent/Telegram/Messages.hs b/Omni/Agent/Telegram/Messages.hs
new file mode 100644
index 0000000..eab9668
--- /dev/null
+++ b/Omni/Agent/Telegram/Messages.hs
@@ -0,0 +1,551 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Telegram Message Queue - Unified async message delivery.
+--
+-- All outbound Telegram messages go through this queue, enabling:
+-- - Immediate sends (sub-second latency via 1s polling)
+-- - Scheduled/delayed sends (up to 30 days)
+-- - Unified retry handling and error logging
+--
+-- : out omni-agent-telegram-messages
+-- : dep aeson
+-- : dep sqlite-simple
+-- : dep uuid
+module Omni.Agent.Telegram.Messages
+ ( -- * Types
+ ScheduledMessage (..),
+ MessageStatus (..),
+
+ -- * Database
+ initScheduledMessagesTable,
+
+ -- * Queueing
+ queueMessage,
+ enqueueImmediate,
+ enqueueDelayed,
+
+ -- * Fetching
+ fetchDueMessages,
+ listPendingMessages,
+ getMessageById,
+
+ -- * Status Updates
+ markSending,
+ markSent,
+ markFailed,
+ cancelMessage,
+
+ -- * Dispatch Loop
+ messageDispatchLoop,
+
+ -- * Agent Tools
+ sendMessageTool,
+ listPendingMessagesTool,
+ cancelMessageTool,
+
+ -- * Constants
+ maxDelaySeconds,
+ maxRetries,
+
+ -- * Testing
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Data.Aeson ((.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.Aeson.KeyMap as KeyMap
+import qualified Data.Text as Text
+import Data.Time (NominalDiffTime, UTCTime, addUTCTime, getCurrentTime)
+import Data.Time.Format (defaultTimeLocale, formatTime)
+import qualified Data.UUID as UUID
+import qualified Data.UUID.V4 as UUID
+import qualified Database.SQLite.Simple as SQL
+import qualified Omni.Agent.Engine as Engine
+import qualified Omni.Agent.Memory as Memory
+import qualified Omni.Test as Test
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Telegram.Messages"
+ [ Test.unit "initScheduledMessagesTable is idempotent" <| do
+ Memory.withMemoryDb <| \conn -> do
+ initScheduledMessagesTable conn
+ initScheduledMessagesTable conn
+ pure (),
+ Test.unit "MessageStatus JSON roundtrip" <| do
+ let statuses = [Pending, Sending, Sent, Failed, Cancelled]
+ forM_ statuses <| \s ->
+ case Aeson.decode (Aeson.encode s) of
+ Nothing -> Test.assertFailure ("Failed to decode MessageStatus: " <> show s)
+ Just decoded -> decoded Test.@=? s,
+ Test.unit "maxDelaySeconds is 30 days" <| do
+ maxDelaySeconds Test.@=? (30 * 24 * 60 * 60)
+ ]
+
+data MessageStatus
+ = Pending
+ | Sending
+ | Sent
+ | Failed
+ | Cancelled
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON MessageStatus where
+ toJSON Pending = Aeson.String "pending"
+ toJSON Sending = Aeson.String "sending"
+ toJSON Sent = Aeson.String "sent"
+ toJSON Failed = Aeson.String "failed"
+ toJSON Cancelled = Aeson.String "cancelled"
+
+instance Aeson.FromJSON MessageStatus where
+ parseJSON = Aeson.withText "MessageStatus" parseStatus
+ where
+ parseStatus "pending" = pure Pending
+ parseStatus "sending" = pure Sending
+ parseStatus "sent" = pure Sent
+ parseStatus "failed" = pure Failed
+ parseStatus "cancelled" = pure Cancelled
+ parseStatus _ = empty
+
+textToStatus :: Text -> Maybe MessageStatus
+textToStatus "pending" = Just Pending
+textToStatus "sending" = Just Sending
+textToStatus "sent" = Just Sent
+textToStatus "failed" = Just Failed
+textToStatus "cancelled" = Just Cancelled
+textToStatus _ = Nothing
+
+data ScheduledMessage = ScheduledMessage
+ { smId :: Text,
+ smUserId :: Maybe Text,
+ smChatId :: Int,
+ smThreadId :: Maybe Int,
+ smContent :: Text,
+ smSendAt :: UTCTime,
+ smCreatedAt :: UTCTime,
+ smStatus :: MessageStatus,
+ smRetryCount :: Int,
+ smLastAttemptAt :: Maybe UTCTime,
+ smLastError :: Maybe Text,
+ smMessageType :: Maybe Text,
+ smCorrelationId :: Maybe Text,
+ smTelegramMessageId :: Maybe Int
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON ScheduledMessage where
+ toJSON m =
+ Aeson.object
+ [ "id" .= smId m,
+ "user_id" .= smUserId m,
+ "chat_id" .= smChatId m,
+ "thread_id" .= smThreadId m,
+ "content" .= smContent m,
+ "send_at" .= smSendAt m,
+ "created_at" .= smCreatedAt m,
+ "status" .= smStatus m,
+ "retry_count" .= smRetryCount m,
+ "last_attempt_at" .= smLastAttemptAt m,
+ "last_error" .= smLastError m,
+ "message_type" .= smMessageType m,
+ "correlation_id" .= smCorrelationId m,
+ "telegram_message_id" .= smTelegramMessageId m
+ ]
+
+instance SQL.FromRow ScheduledMessage where
+ fromRow = do
+ id' <- SQL.field
+ userId <- SQL.field
+ chatId <- SQL.field
+ threadId <- SQL.field
+ content <- SQL.field
+ sendAt <- SQL.field
+ createdAt <- SQL.field
+ statusText <- SQL.field
+ retryCount <- SQL.field
+ lastAttemptAt <- SQL.field
+ lastError <- SQL.field
+ messageType <- SQL.field
+ correlationId <- SQL.field
+ telegramMessageId <- SQL.field
+ let status = fromMaybe Pending (textToStatus (statusText :: Text))
+ pure
+ ScheduledMessage
+ { smId = id',
+ smUserId = userId,
+ smChatId = chatId,
+ smThreadId = threadId,
+ smContent = content,
+ smSendAt = sendAt,
+ smCreatedAt = createdAt,
+ smStatus = status,
+ smRetryCount = retryCount,
+ smLastAttemptAt = lastAttemptAt,
+ smLastError = lastError,
+ smMessageType = messageType,
+ smCorrelationId = correlationId,
+ smTelegramMessageId = telegramMessageId
+ }
+
+maxDelaySeconds :: Int
+maxDelaySeconds = 30 * 24 * 60 * 60
+
+maxRetries :: Int
+maxRetries = 5
+
+initScheduledMessagesTable :: SQL.Connection -> IO ()
+initScheduledMessagesTable conn = do
+ SQL.execute_
+ conn
+ "CREATE TABLE IF NOT EXISTS scheduled_messages (\
+ \ id TEXT PRIMARY KEY,\
+ \ user_id TEXT,\
+ \ chat_id INTEGER NOT NULL,\
+ \ thread_id INTEGER,\
+ \ content TEXT NOT NULL,\
+ \ send_at TIMESTAMP NOT NULL,\
+ \ created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,\
+ \ status TEXT NOT NULL DEFAULT 'pending',\
+ \ retry_count INTEGER NOT NULL DEFAULT 0,\
+ \ last_attempt_at TIMESTAMP,\
+ \ last_error TEXT,\
+ \ message_type TEXT,\
+ \ correlation_id TEXT,\
+ \ telegram_message_id INTEGER\
+ \)"
+ migrateAddThreadId conn
+
+migrateAddThreadId :: SQL.Connection -> IO ()
+migrateAddThreadId conn = do
+ result <- try @SomeException <| SQL.execute_ conn "ALTER TABLE scheduled_messages ADD COLUMN thread_id INTEGER"
+ case result of
+ Left _ -> pure ()
+ Right () -> pure ()
+
+queueMessage ::
+ Maybe Text ->
+ Int ->
+ Maybe Int ->
+ Text ->
+ UTCTime ->
+ Maybe Text ->
+ Maybe Text ->
+ IO Text
+queueMessage mUserId chatId mThreadId content sendAt msgType correlationId = do
+ uuid <- UUID.nextRandom
+ now <- getCurrentTime
+ let msgId = UUID.toText uuid
+ Memory.withMemoryDb <| \conn -> do
+ initScheduledMessagesTable conn
+ SQL.execute
+ conn
+ "INSERT INTO scheduled_messages \
+ \(id, user_id, chat_id, thread_id, content, send_at, created_at, status, retry_count, message_type, correlation_id) \
+ \VALUES (?, ?, ?, ?, ?, ?, ?, 'pending', 0, ?, ?)"
+ (msgId, mUserId, chatId, mThreadId, content, sendAt, now, msgType, correlationId)
+ pure msgId
+
+enqueueImmediate ::
+ Maybe Text ->
+ Int ->
+ Maybe Int ->
+ Text ->
+ Maybe Text ->
+ Maybe Text ->
+ IO Text
+enqueueImmediate mUserId chatId mThreadId content msgType correlationId = do
+ now <- getCurrentTime
+ queueMessage mUserId chatId mThreadId content now msgType correlationId
+
+enqueueDelayed ::
+ Maybe Text ->
+ Int ->
+ Maybe Int ->
+ Text ->
+ NominalDiffTime ->
+ Maybe Text ->
+ Maybe Text ->
+ IO Text
+enqueueDelayed mUserId chatId mThreadId content delay msgType correlationId = do
+ now <- getCurrentTime
+ let sendAt = addUTCTime delay now
+ queueMessage mUserId chatId mThreadId content sendAt msgType correlationId
+
+fetchDueMessages :: UTCTime -> Int -> IO [ScheduledMessage]
+fetchDueMessages now batchSize =
+ Memory.withMemoryDb <| \conn -> do
+ initScheduledMessagesTable conn
+ SQL.query
+ conn
+ "SELECT id, user_id, chat_id, thread_id, content, send_at, created_at, status, \
+ \retry_count, last_attempt_at, last_error, message_type, correlation_id, telegram_message_id \
+ \FROM scheduled_messages \
+ \WHERE status = 'pending' AND send_at <= ? \
+ \ORDER BY send_at ASC \
+ \LIMIT ?"
+ (now, batchSize)
+
+listPendingMessages :: Maybe Text -> Int -> IO [ScheduledMessage]
+listPendingMessages mUserId chatId =
+ Memory.withMemoryDb <| \conn -> do
+ initScheduledMessagesTable conn
+ case mUserId of
+ Just uid ->
+ SQL.query
+ conn
+ "SELECT id, user_id, chat_id, thread_id, content, send_at, created_at, status, \
+ \retry_count, last_attempt_at, last_error, message_type, correlation_id, telegram_message_id \
+ \FROM scheduled_messages \
+ \WHERE user_id = ? AND chat_id = ? AND status = 'pending' AND send_at > datetime('now') \
+ \ORDER BY send_at ASC"
+ (uid, chatId)
+ Nothing ->
+ SQL.query
+ conn
+ "SELECT id, user_id, chat_id, thread_id, content, send_at, created_at, status, \
+ \retry_count, last_attempt_at, last_error, message_type, correlation_id, telegram_message_id \
+ \FROM scheduled_messages \
+ \WHERE chat_id = ? AND status = 'pending' AND send_at > datetime('now') \
+ \ORDER BY send_at ASC"
+ (SQL.Only chatId)
+
+getMessageById :: Text -> IO (Maybe ScheduledMessage)
+getMessageById msgId =
+ Memory.withMemoryDb <| \conn -> do
+ initScheduledMessagesTable conn
+ results <-
+ SQL.query
+ conn
+ "SELECT id, user_id, chat_id, thread_id, content, send_at, created_at, status, \
+ \retry_count, last_attempt_at, last_error, message_type, correlation_id, telegram_message_id \
+ \FROM scheduled_messages \
+ \WHERE id = ?"
+ (SQL.Only msgId)
+ pure (listToMaybe results)
+
+markSending :: Text -> UTCTime -> IO ()
+markSending msgId now =
+ Memory.withMemoryDb <| \conn -> do
+ initScheduledMessagesTable conn
+ SQL.execute
+ conn
+ "UPDATE scheduled_messages SET status = 'sending', last_attempt_at = ? WHERE id = ?"
+ (now, msgId)
+
+markSent :: Text -> Maybe Int -> UTCTime -> IO ()
+markSent msgId telegramMsgId now =
+ Memory.withMemoryDb <| \conn -> do
+ initScheduledMessagesTable conn
+ SQL.execute
+ conn
+ "UPDATE scheduled_messages SET status = 'sent', telegram_message_id = ?, last_attempt_at = ? WHERE id = ?"
+ (telegramMsgId, now, msgId)
+
+markFailed :: Text -> UTCTime -> Text -> IO ()
+markFailed msgId now errorMsg =
+ Memory.withMemoryDb <| \conn -> do
+ initScheduledMessagesTable conn
+ results <-
+ SQL.query
+ conn
+ "SELECT retry_count FROM scheduled_messages WHERE id = ?"
+ (SQL.Only msgId) ::
+ IO [SQL.Only Int]
+ case results of
+ [SQL.Only retryCount] ->
+ if retryCount < maxRetries
+ then do
+ let backoffSeconds = 2 ^ retryCount :: Int
+ nextAttempt = addUTCTime (fromIntegral backoffSeconds) now
+ SQL.execute
+ conn
+ "UPDATE scheduled_messages SET \
+ \status = 'pending', \
+ \retry_count = retry_count + 1, \
+ \last_attempt_at = ?, \
+ \last_error = ?, \
+ \send_at = ? \
+ \WHERE id = ?"
+ (now, errorMsg, nextAttempt, msgId)
+ putText <| "Message " <> msgId <> " failed, retry " <> tshow (retryCount + 1) <> " in " <> tshow backoffSeconds <> "s"
+ else do
+ SQL.execute
+ conn
+ "UPDATE scheduled_messages SET status = 'failed', last_attempt_at = ?, last_error = ? WHERE id = ?"
+ (now, errorMsg, msgId)
+ putText <| "Message " <> msgId <> " permanently failed after " <> tshow maxRetries <> " retries"
+ _ -> pure ()
+
+cancelMessage :: Text -> IO Bool
+cancelMessage msgId =
+ Memory.withMemoryDb <| \conn -> do
+ initScheduledMessagesTable conn
+ SQL.execute
+ conn
+ "UPDATE scheduled_messages SET status = 'cancelled' WHERE id = ? AND status = 'pending'"
+ (SQL.Only msgId)
+ changes <- SQL.changes conn
+ pure (changes > 0)
+
+messageDispatchLoop :: (Int -> Maybe Int -> Text -> IO (Maybe Int)) -> IO ()
+messageDispatchLoop sendFn =
+ forever <| do
+ now <- getCurrentTime
+ due <- fetchDueMessages now 10
+ if null due
+ then threadDelay 1000000
+ else do
+ forM_ due <| \m -> dispatchOne sendFn m
+ when (length due < 10) <| threadDelay 1000000
+
+dispatchOne :: (Int -> Maybe Int -> Text -> IO (Maybe Int)) -> ScheduledMessage -> IO ()
+dispatchOne sendFn m = do
+ now <- getCurrentTime
+ markSending (smId m) now
+ result <- try (sendFn (smChatId m) (smThreadId m) (smContent m))
+ case result of
+ Left (e :: SomeException) -> do
+ let err = "Exception sending Telegram message: " <> tshow e
+ markFailed (smId m) now err
+ Right Nothing -> do
+ now' <- getCurrentTime
+ markSent (smId m) Nothing now'
+ putText <| "Sent message " <> smId m <> " (no message_id returned)"
+ Right (Just telegramMsgId) -> do
+ now' <- getCurrentTime
+ markSent (smId m) (Just telegramMsgId) now'
+ putText <| "Sent message " <> smId m <> " -> telegram_id " <> tshow telegramMsgId
+
+sendMessageTool :: Text -> Int -> Maybe Int -> Engine.Tool
+sendMessageTool uid chatId mThreadId =
+ Engine.Tool
+ { Engine.toolName = "send_message",
+ Engine.toolDescription =
+ "Send a message to the user, optionally delayed. Use for reminders, follow-ups, or multi-part responses. "
+ <> "delay_seconds=0 sends immediately; max delay is 30 days (2592000 seconds). "
+ <> "Returns a message_id you can use to cancel the message before it's sent.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "text"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("The message text to send (Telegram basic markdown supported)" :: Text)
+ ],
+ "delay_seconds"
+ .= Aeson.object
+ [ "type" .= ("integer" :: Text),
+ "minimum" .= (0 :: Int),
+ "maximum" .= maxDelaySeconds,
+ "description" .= ("Seconds to wait before sending (0 or omit for immediate)" :: Text)
+ ]
+ ],
+ "required" .= (["text"] :: [Text])
+ ],
+ Engine.toolExecute = \argsVal -> do
+ case argsVal of
+ Aeson.Object obj -> do
+ let textM = case KeyMap.lookup "text" obj of
+ Just (Aeson.String t) -> Just t
+ _ -> Nothing
+ delaySeconds = case KeyMap.lookup "delay_seconds" obj of
+ Just (Aeson.Number n) -> Just (round n :: Int)
+ _ -> Nothing
+ case textM of
+ Nothing ->
+ pure <| Aeson.object ["status" .= ("error" :: Text), "error" .= ("missing 'text' field" :: Text)]
+ Just text -> do
+ let delay = fromIntegral (fromMaybe 0 delaySeconds)
+ now <- getCurrentTime
+ let sendAt = addUTCTime delay now
+ msgId <- queueMessage (Just uid) chatId mThreadId text sendAt (Just "agent_tool") Nothing
+ pure
+ <| Aeson.object
+ [ "status" .= ("queued" :: Text),
+ "message_id" .= msgId,
+ "scheduled_for" .= formatTime defaultTimeLocale "%Y-%m-%dT%H:%M:%SZ" sendAt,
+ "delay_seconds" .= fromMaybe 0 delaySeconds
+ ]
+ _ ->
+ pure <| Aeson.object ["status" .= ("error" :: Text), "error" .= ("invalid arguments" :: Text)]
+ }
+
+listPendingMessagesTool :: Text -> Int -> Engine.Tool
+listPendingMessagesTool uid chatId =
+ Engine.Tool
+ { Engine.toolName = "list_pending_messages",
+ Engine.toolDescription =
+ "List all pending scheduled messages that haven't been sent yet. "
+ <> "Shows message_id, content preview, and scheduled send time.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties" .= Aeson.object []
+ ],
+ Engine.toolExecute = \_ -> do
+ msgs <- listPendingMessages (Just uid) chatId
+ let formatted =
+ [ Aeson.object
+ [ "message_id" .= smId m,
+ "content_preview" .= Text.take 50 (smContent m),
+ "scheduled_for" .= formatTime defaultTimeLocale "%Y-%m-%dT%H:%M:%SZ" (smSendAt m),
+ "message_type" .= smMessageType m
+ ]
+ | m <- msgs
+ ]
+ pure
+ <| Aeson.object
+ [ "status" .= ("ok" :: Text),
+ "count" .= length msgs,
+ "messages" .= formatted
+ ]
+ }
+
+cancelMessageTool :: Engine.Tool
+cancelMessageTool =
+ Engine.Tool
+ { Engine.toolName = "cancel_message",
+ Engine.toolDescription =
+ "Cancel a pending scheduled message by its message_id. "
+ <> "Only works for messages that haven't been sent yet.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "message_id"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("The message_id returned by send_message" :: Text)
+ ]
+ ],
+ "required" .= (["message_id"] :: [Text])
+ ],
+ Engine.toolExecute = \argsVal -> do
+ case argsVal of
+ Aeson.Object obj -> do
+ let msgIdM = case KeyMap.lookup "message_id" obj of
+ Just (Aeson.String t) -> Just t
+ _ -> Nothing
+ case msgIdM of
+ Nothing ->
+ pure <| Aeson.object ["status" .= ("error" :: Text), "error" .= ("missing 'message_id' field" :: Text)]
+ Just msgId -> do
+ success <- cancelMessage msgId
+ if success
+ then pure <| Aeson.object ["status" .= ("cancelled" :: Text), "message_id" .= msgId]
+ else pure <| Aeson.object ["status" .= ("not_found" :: Text), "message_id" .= msgId, "error" .= ("message not found or already sent" :: Text)]
+ _ ->
+ pure <| Aeson.object ["status" .= ("error" :: Text), "error" .= ("invalid arguments" :: Text)]
+ }
diff --git a/Omni/Agent/Telegram/Reminders.hs b/Omni/Agent/Telegram/Reminders.hs
new file mode 100644
index 0000000..88aab0a
--- /dev/null
+++ b/Omni/Agent/Telegram/Reminders.hs
@@ -0,0 +1,108 @@
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Telegram Reminders - Background reminder loop and user chat persistence.
+--
+-- : out omni-agent-telegram-reminders
+-- : dep sqlite-simple
+module Omni.Agent.Telegram.Reminders
+ ( -- * User Chat Persistence
+ initUserChatsTable,
+ recordUserChat,
+ lookupChatId,
+
+ -- * Reminder Loop
+ reminderLoop,
+ checkAndSendReminders,
+
+ -- * Testing
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Data.Time (getCurrentTime)
+import qualified Database.SQLite.Simple as SQL
+import qualified Omni.Agent.Memory as Memory
+import qualified Omni.Agent.Telegram.Messages as Messages
+import qualified Omni.Agent.Tools.Todos as Todos
+import qualified Omni.Test as Test
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Telegram.Reminders"
+ [ Test.unit "initUserChatsTable is idempotent" <| do
+ Memory.withMemoryDb <| \conn -> do
+ initUserChatsTable conn
+ initUserChatsTable conn
+ pure ()
+ ]
+
+initUserChatsTable :: SQL.Connection -> IO ()
+initUserChatsTable conn =
+ SQL.execute_
+ conn
+ "CREATE TABLE IF NOT EXISTS user_chats (\
+ \ user_id TEXT PRIMARY KEY,\
+ \ chat_id INTEGER NOT NULL,\
+ \ last_seen_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP\
+ \)"
+
+recordUserChat :: Text -> Int -> IO ()
+recordUserChat uid chatId = do
+ now <- getCurrentTime
+ Memory.withMemoryDb <| \conn -> do
+ initUserChatsTable conn
+ SQL.execute
+ conn
+ "INSERT INTO user_chats (user_id, chat_id, last_seen_at) \
+ \VALUES (?, ?, ?) \
+ \ON CONFLICT(user_id) DO UPDATE SET \
+ \ chat_id = excluded.chat_id, \
+ \ last_seen_at = excluded.last_seen_at"
+ (uid, chatId, now)
+
+lookupChatId :: Text -> IO (Maybe Int)
+lookupChatId uid =
+ Memory.withMemoryDb <| \conn -> do
+ initUserChatsTable conn
+ rows <-
+ SQL.query
+ conn
+ "SELECT chat_id FROM user_chats WHERE user_id = ?"
+ (SQL.Only uid)
+ pure (listToMaybe (map SQL.fromOnly rows))
+
+reminderLoop :: IO ()
+reminderLoop =
+ forever <| do
+ threadDelay (5 * 60 * 1000000)
+ checkAndSendReminders
+
+checkAndSendReminders :: IO ()
+checkAndSendReminders = do
+ todos <- Todos.listTodosDueForReminder
+ forM_ todos <| \td -> do
+ mChatId <- lookupChatId (Todos.todoUserId td)
+ case mChatId of
+ Nothing -> pure ()
+ Just chatId -> do
+ let title = Todos.todoTitle td
+ uid = Todos.todoUserId td
+ dueStr = case Todos.todoDueDate td of
+ Just d -> " (due: " <> tshow d <> ")"
+ Nothing -> ""
+ msg =
+ "⏰ reminder: \""
+ <> title
+ <> "\""
+ <> dueStr
+ <> "\nreply when you finish and i'll mark it complete."
+ _ <- Messages.enqueueImmediate (Just uid) chatId Nothing msg (Just "reminder") Nothing
+ Todos.markReminderSent (Todos.todoId td)
+ putText <| "Queued reminder for todo " <> tshow (Todos.todoId td) <> " to chat " <> tshow chatId
diff --git a/Omni/Agent/Telegram/Types.hs b/Omni/Agent/Telegram/Types.hs
new file mode 100644
index 0000000..7a91df3
--- /dev/null
+++ b/Omni/Agent/Telegram/Types.hs
@@ -0,0 +1,654 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Telegram Bot Types - Data types and JSON parsing for Telegram API.
+--
+-- : out omni-agent-telegram-types
+-- : dep aeson
+module Omni.Agent.Telegram.Types
+ ( -- * Configuration
+ TelegramConfig (..),
+ defaultTelegramConfig,
+ isUserAllowed,
+
+ -- * Message Types
+ TelegramMessage (..),
+ TelegramUpdate (..),
+ TelegramDocument (..),
+ TelegramPhoto (..),
+ TelegramVoice (..),
+ TelegramReplyMessage (..),
+ BotAddedToGroup (..),
+ ChatType (..),
+
+ -- * Parsing
+ parseUpdate,
+ parseBotAddedToGroup,
+ parseDocument,
+ parseLargestPhoto,
+ parsePhotoSize,
+ parseVoice,
+ parseReplyMessage,
+
+ -- * Utilities
+ isPdf,
+ isSupportedVoiceFormat,
+ isGroupChat,
+ shouldRespondInGroup,
+
+ -- * Testing
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Data.Aeson ((.!=), (.:), (.:?), (.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.Aeson.KeyMap as KeyMap
+import qualified Data.Text as Text
+import qualified Omni.Test as Test
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Telegram.Types"
+ [ Test.unit "TelegramConfig JSON roundtrip" <| do
+ let cfg =
+ TelegramConfig
+ { tgBotToken = "test-token",
+ tgPollingTimeout = 30,
+ tgApiBaseUrl = "https://api.telegram.org",
+ tgAllowedUserIds = [123, 456],
+ tgKagiApiKey = Just "kagi-key",
+ tgOpenRouterApiKey = "or-key"
+ }
+ case Aeson.decode (Aeson.encode cfg) of
+ Nothing -> Test.assertFailure "Failed to decode TelegramConfig"
+ Just decoded -> do
+ tgBotToken decoded Test.@=? "test-token"
+ tgAllowedUserIds decoded Test.@=? [123, 456]
+ tgKagiApiKey decoded Test.@=? Just "kagi-key",
+ Test.unit "isUserAllowed checks whitelist" <| do
+ let cfg = defaultTelegramConfig "token" [100, 200, 300] Nothing "key"
+ isUserAllowed cfg 100 Test.@=? True
+ isUserAllowed cfg 200 Test.@=? True
+ isUserAllowed cfg 999 Test.@=? False,
+ Test.unit "isUserAllowed allows all when empty" <| do
+ let cfg = defaultTelegramConfig "token" [] Nothing "key"
+ isUserAllowed cfg 12345 Test.@=? True,
+ Test.unit "TelegramMessage JSON roundtrip" <| do
+ let msg =
+ TelegramMessage
+ { tmUpdateId = 123,
+ tmChatId = 456,
+ tmChatType = Private,
+ tmUserId = 789,
+ tmUserFirstName = "Test",
+ tmUserLastName = Just "User",
+ tmText = "Hello bot",
+ tmDocument = Nothing,
+ tmPhoto = Nothing,
+ tmVoice = Nothing,
+ tmReplyTo = Nothing,
+ tmThreadId = Nothing
+ }
+ case Aeson.decode (Aeson.encode msg) of
+ Nothing -> Test.assertFailure "Failed to decode TelegramMessage"
+ Just decoded -> do
+ tmUpdateId decoded Test.@=? 123
+ tmText decoded Test.@=? "Hello bot",
+ Test.unit "parseUpdate extracts message correctly" <| do
+ let json =
+ Aeson.object
+ [ "update_id" .= (123 :: Int),
+ "message"
+ .= Aeson.object
+ [ "message_id" .= (1 :: Int),
+ "chat" .= Aeson.object ["id" .= (456 :: Int)],
+ "from"
+ .= Aeson.object
+ [ "id" .= (789 :: Int),
+ "first_name" .= ("Test" :: Text)
+ ],
+ "text" .= ("Hello" :: Text)
+ ]
+ ]
+ case parseUpdate json of
+ Nothing -> Test.assertFailure "Failed to parse update"
+ Just msg -> do
+ tmUpdateId msg Test.@=? 123
+ tmChatId msg Test.@=? 456
+ tmUserId msg Test.@=? 789
+ tmText msg Test.@=? "Hello"
+ tmDocument msg Test.@=? Nothing,
+ Test.unit "parseUpdate extracts document correctly" <| do
+ let json =
+ Aeson.object
+ [ "update_id" .= (124 :: Int),
+ "message"
+ .= Aeson.object
+ [ "message_id" .= (2 :: Int),
+ "chat" .= Aeson.object ["id" .= (456 :: Int)],
+ "from"
+ .= Aeson.object
+ [ "id" .= (789 :: Int),
+ "first_name" .= ("Test" :: Text)
+ ],
+ "caption" .= ("check this out" :: Text),
+ "document"
+ .= Aeson.object
+ [ "file_id" .= ("abc123" :: Text),
+ "file_name" .= ("test.pdf" :: Text),
+ "mime_type" .= ("application/pdf" :: Text),
+ "file_size" .= (12345 :: Int)
+ ]
+ ]
+ ]
+ case parseUpdate json of
+ Nothing -> Test.assertFailure "Failed to parse document update"
+ Just msg -> do
+ tmUpdateId msg Test.@=? 124
+ tmText msg Test.@=? "check this out"
+ case tmDocument msg of
+ Nothing -> Test.assertFailure "Expected document"
+ Just doc -> do
+ tdFileId doc Test.@=? "abc123"
+ tdFileName doc Test.@=? Just "test.pdf"
+ tdMimeType doc Test.@=? Just "application/pdf",
+ Test.unit "isPdf detects PDFs by mime type" <| do
+ let doc = TelegramDocument "id" (Just "doc.pdf") (Just "application/pdf") Nothing
+ isPdf doc Test.@=? True,
+ Test.unit "isPdf detects PDFs by filename" <| do
+ let doc = TelegramDocument "id" (Just "report.PDF") Nothing Nothing
+ isPdf doc Test.@=? True,
+ Test.unit "isPdf rejects non-PDFs" <| do
+ let doc = TelegramDocument "id" (Just "image.jpg") (Just "image/jpeg") Nothing
+ isPdf doc Test.@=? False,
+ Test.unit "isSupportedVoiceFormat accepts ogg" <| do
+ let voice = TelegramVoice "id" 10 (Just "audio/ogg") Nothing
+ isSupportedVoiceFormat voice Test.@=? True,
+ Test.unit "isSupportedVoiceFormat accepts opus" <| do
+ let voice = TelegramVoice "id" 10 (Just "audio/opus") Nothing
+ isSupportedVoiceFormat voice Test.@=? True,
+ Test.unit "isSupportedVoiceFormat defaults to True for unknown" <| do
+ let voice = TelegramVoice "id" 10 Nothing Nothing
+ isSupportedVoiceFormat voice Test.@=? True
+ ]
+
+data TelegramConfig = TelegramConfig
+ { tgBotToken :: Text,
+ tgPollingTimeout :: Int,
+ tgApiBaseUrl :: Text,
+ tgAllowedUserIds :: [Int],
+ tgKagiApiKey :: Maybe Text,
+ tgOpenRouterApiKey :: Text
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON TelegramConfig where
+ toJSON c =
+ Aeson.object
+ [ "bot_token" .= tgBotToken c,
+ "polling_timeout" .= tgPollingTimeout c,
+ "api_base_url" .= tgApiBaseUrl c,
+ "allowed_user_ids" .= tgAllowedUserIds c,
+ "kagi_api_key" .= tgKagiApiKey c,
+ "openrouter_api_key" .= tgOpenRouterApiKey c
+ ]
+
+instance Aeson.FromJSON TelegramConfig where
+ parseJSON =
+ Aeson.withObject "TelegramConfig" <| \v ->
+ (TelegramConfig </ (v .: "bot_token"))
+ <*> (v .:? "polling_timeout" .!= 30)
+ <*> (v .:? "api_base_url" .!= "https://api.telegram.org")
+ <*> (v .:? "allowed_user_ids" .!= [])
+ <*> (v .:? "kagi_api_key")
+ <*> (v .: "openrouter_api_key")
+
+defaultTelegramConfig :: Text -> [Int] -> Maybe Text -> Text -> TelegramConfig
+defaultTelegramConfig token allowedIds kagiKey openRouterKey =
+ TelegramConfig
+ { tgBotToken = token,
+ tgPollingTimeout = 30,
+ tgApiBaseUrl = "https://api.telegram.org",
+ tgAllowedUserIds = allowedIds,
+ tgKagiApiKey = kagiKey,
+ tgOpenRouterApiKey = openRouterKey
+ }
+
+isUserAllowed :: TelegramConfig -> Int -> Bool
+isUserAllowed cfg usrId =
+ null (tgAllowedUserIds cfg) || usrId `elem` tgAllowedUserIds cfg
+
+data TelegramDocument = TelegramDocument
+ { tdFileId :: Text,
+ tdFileName :: Maybe Text,
+ tdMimeType :: Maybe Text,
+ tdFileSize :: Maybe Int
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON TelegramDocument where
+ toJSON d =
+ Aeson.object
+ [ "file_id" .= tdFileId d,
+ "file_name" .= tdFileName d,
+ "mime_type" .= tdMimeType d,
+ "file_size" .= tdFileSize d
+ ]
+
+instance Aeson.FromJSON TelegramDocument where
+ parseJSON =
+ Aeson.withObject "TelegramDocument" <| \v ->
+ (TelegramDocument </ (v .: "file_id"))
+ <*> (v .:? "file_name")
+ <*> (v .:? "mime_type")
+ <*> (v .:? "file_size")
+
+data TelegramPhoto = TelegramPhoto
+ { tpFileId :: Text,
+ tpWidth :: Int,
+ tpHeight :: Int,
+ tpFileSize :: Maybe Int
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON TelegramPhoto where
+ toJSON p =
+ Aeson.object
+ [ "file_id" .= tpFileId p,
+ "width" .= tpWidth p,
+ "height" .= tpHeight p,
+ "file_size" .= tpFileSize p
+ ]
+
+instance Aeson.FromJSON TelegramPhoto where
+ parseJSON =
+ Aeson.withObject "TelegramPhoto" <| \v ->
+ (TelegramPhoto </ (v .: "file_id"))
+ <*> (v .: "width")
+ <*> (v .: "height")
+ <*> (v .:? "file_size")
+
+data TelegramVoice = TelegramVoice
+ { tvFileId :: Text,
+ tvDuration :: Int,
+ tvMimeType :: Maybe Text,
+ tvFileSize :: Maybe Int
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON TelegramVoice where
+ toJSON v =
+ Aeson.object
+ [ "file_id" .= tvFileId v,
+ "duration" .= tvDuration v,
+ "mime_type" .= tvMimeType v,
+ "file_size" .= tvFileSize v
+ ]
+
+instance Aeson.FromJSON TelegramVoice where
+ parseJSON =
+ Aeson.withObject "TelegramVoice" <| \v ->
+ (TelegramVoice </ (v .: "file_id"))
+ <*> (v .: "duration")
+ <*> (v .:? "mime_type")
+ <*> (v .:? "file_size")
+
+data TelegramReplyMessage = TelegramReplyMessage
+ { trMessageId :: Int,
+ trFromFirstName :: Maybe Text,
+ trFromLastName :: Maybe Text,
+ trText :: Text
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON TelegramReplyMessage where
+ toJSON r =
+ Aeson.object
+ [ "message_id" .= trMessageId r,
+ "from_first_name" .= trFromFirstName r,
+ "from_last_name" .= trFromLastName r,
+ "text" .= trText r
+ ]
+
+instance Aeson.FromJSON TelegramReplyMessage where
+ parseJSON =
+ Aeson.withObject "TelegramReplyMessage" <| \v ->
+ (TelegramReplyMessage </ (v .: "message_id"))
+ <*> (v .:? "from_first_name")
+ <*> (v .:? "from_last_name")
+ <*> (v .:? "text" .!= "")
+
+data BotAddedToGroup = BotAddedToGroup
+ { bagUpdateId :: Int,
+ bagChatId :: Int,
+ bagAddedByUserId :: Int,
+ bagAddedByFirstName :: Text
+ }
+ deriving (Show, Eq, Generic)
+
+data ChatType = Private | Group | Supergroup | Channel
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON ChatType where
+ toJSON Private = Aeson.String "private"
+ toJSON Group = Aeson.String "group"
+ toJSON Supergroup = Aeson.String "supergroup"
+ toJSON Channel = Aeson.String "channel"
+
+instance Aeson.FromJSON ChatType where
+ parseJSON = Aeson.withText "ChatType" parseChatType
+ where
+ parseChatType "private" = pure Private
+ parseChatType "group" = pure Group
+ parseChatType "supergroup" = pure Supergroup
+ parseChatType "channel" = pure Channel
+ parseChatType _ = pure Private
+
+data TelegramMessage = TelegramMessage
+ { tmUpdateId :: Int,
+ tmChatId :: Int,
+ tmChatType :: ChatType,
+ tmThreadId :: Maybe Int,
+ tmUserId :: Int,
+ tmUserFirstName :: Text,
+ tmUserLastName :: Maybe Text,
+ tmText :: Text,
+ tmDocument :: Maybe TelegramDocument,
+ tmPhoto :: Maybe TelegramPhoto,
+ tmVoice :: Maybe TelegramVoice,
+ tmReplyTo :: Maybe TelegramReplyMessage
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON TelegramMessage where
+ toJSON m =
+ Aeson.object
+ [ "update_id" .= tmUpdateId m,
+ "chat_id" .= tmChatId m,
+ "chat_type" .= tmChatType m,
+ "thread_id" .= tmThreadId m,
+ "user_id" .= tmUserId m,
+ "user_first_name" .= tmUserFirstName m,
+ "user_last_name" .= tmUserLastName m,
+ "text" .= tmText m,
+ "document" .= tmDocument m,
+ "photo" .= tmPhoto m,
+ "voice" .= tmVoice m,
+ "reply_to" .= tmReplyTo m
+ ]
+
+instance Aeson.FromJSON TelegramMessage where
+ parseJSON =
+ Aeson.withObject "TelegramMessage" <| \v ->
+ (TelegramMessage </ (v .: "update_id"))
+ <*> (v .: "chat_id")
+ <*> (v .:? "chat_type" .!= Private)
+ <*> (v .:? "thread_id")
+ <*> (v .: "user_id")
+ <*> (v .: "user_first_name")
+ <*> (v .:? "user_last_name")
+ <*> (v .: "text")
+ <*> (v .:? "document")
+ <*> (v .:? "photo")
+ <*> (v .:? "voice")
+ <*> (v .:? "reply_to")
+
+data TelegramUpdate = TelegramUpdate
+ { tuUpdateId :: Int,
+ tuMessage :: Maybe Aeson.Value
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.FromJSON TelegramUpdate where
+ parseJSON =
+ Aeson.withObject "TelegramUpdate" <| \v ->
+ (TelegramUpdate </ (v .: "update_id"))
+ <*> (v .:? "message")
+
+parseUpdate :: Aeson.Value -> Maybe TelegramMessage
+parseUpdate val = do
+ Aeson.Object obj <- pure val
+ updateId <- case KeyMap.lookup "update_id" obj of
+ Just (Aeson.Number n) -> Just (round n)
+ _ -> Nothing
+ Aeson.Object msgObj <- KeyMap.lookup "message" obj
+ Aeson.Object chatObj <- KeyMap.lookup "chat" msgObj
+ chatId <- case KeyMap.lookup "id" chatObj of
+ Just (Aeson.Number n) -> Just (round n)
+ _ -> Nothing
+ let chatType = case KeyMap.lookup "type" chatObj of
+ Just (Aeson.String "private") -> Private
+ Just (Aeson.String "group") -> Group
+ Just (Aeson.String "supergroup") -> Supergroup
+ Just (Aeson.String "channel") -> Channel
+ _ -> Private
+ let threadId = case KeyMap.lookup "message_thread_id" msgObj of
+ Just (Aeson.Number n) -> Just (round n)
+ _ -> Nothing
+ Aeson.Object fromObj <- KeyMap.lookup "from" msgObj
+ userId <- case KeyMap.lookup "id" fromObj of
+ Just (Aeson.Number n) -> Just (round n)
+ _ -> Nothing
+ firstName <- case KeyMap.lookup "first_name" fromObj of
+ Just (Aeson.String s) -> Just s
+ _ -> Nothing
+ let lastName = case KeyMap.lookup "last_name" fromObj of
+ Just (Aeson.String s) -> Just s
+ _ -> Nothing
+ let text = case KeyMap.lookup "text" msgObj of
+ Just (Aeson.String s) -> s
+ _ -> ""
+ let caption = case KeyMap.lookup "caption" msgObj of
+ Just (Aeson.String s) -> s
+ _ -> ""
+ let document = case KeyMap.lookup "document" msgObj of
+ Just (Aeson.Object docObj) -> parseDocument docObj
+ _ -> Nothing
+ let photo = case KeyMap.lookup "photo" msgObj of
+ Just (Aeson.Array photos) -> parseLargestPhoto (toList photos)
+ _ -> Nothing
+ let voice = case KeyMap.lookup "voice" msgObj of
+ Just (Aeson.Object voiceObj) -> parseVoice voiceObj
+ _ -> Nothing
+ let replyTo = case KeyMap.lookup "reply_to_message" msgObj of
+ Just (Aeson.Object replyObj) -> parseReplyMessage replyObj
+ _ -> Nothing
+ let hasContent = not (Text.null text) || not (Text.null caption) || isJust document || isJust photo || isJust voice
+ guard hasContent
+ pure
+ TelegramMessage
+ { tmUpdateId = updateId,
+ tmChatId = chatId,
+ tmChatType = chatType,
+ tmThreadId = threadId,
+ tmUserId = userId,
+ tmUserFirstName = firstName,
+ tmUserLastName = lastName,
+ tmText = if Text.null text then caption else text,
+ tmDocument = document,
+ tmPhoto = photo,
+ tmVoice = voice,
+ tmReplyTo = replyTo
+ }
+
+parseBotAddedToGroup :: Text -> Aeson.Value -> Maybe BotAddedToGroup
+parseBotAddedToGroup botUsername val = do
+ Aeson.Object obj <- pure val
+ updateId <- case KeyMap.lookup "update_id" obj of
+ Just (Aeson.Number n) -> Just (round n)
+ _ -> Nothing
+ Aeson.Object msgObj <- KeyMap.lookup "message" obj
+ Aeson.Object chatObj <- KeyMap.lookup "chat" msgObj
+ chatId <- case KeyMap.lookup "id" chatObj of
+ Just (Aeson.Number n) -> Just (round n)
+ _ -> Nothing
+ let chatType = case KeyMap.lookup "type" chatObj of
+ Just (Aeson.String t) -> t
+ _ -> "private"
+ guard (chatType == "group" || chatType == "supergroup")
+ Aeson.Object fromObj <- KeyMap.lookup "from" msgObj
+ addedByUserId <- case KeyMap.lookup "id" fromObj of
+ Just (Aeson.Number n) -> Just (round n)
+ _ -> Nothing
+ addedByFirstName <- case KeyMap.lookup "first_name" fromObj of
+ Just (Aeson.String s) -> Just s
+ _ -> Nothing
+ Aeson.Array newMembers <- KeyMap.lookup "new_chat_members" msgObj
+ let botWasAdded = any (isBotUser botUsername) (toList newMembers)
+ guard botWasAdded
+ pure
+ BotAddedToGroup
+ { bagUpdateId = updateId,
+ bagChatId = chatId,
+ bagAddedByUserId = addedByUserId,
+ bagAddedByFirstName = addedByFirstName
+ }
+ where
+ isBotUser :: Text -> Aeson.Value -> Bool
+ isBotUser username (Aeson.Object userObj) =
+ case KeyMap.lookup "username" userObj of
+ Just (Aeson.String u) -> Text.toLower u == Text.toLower username
+ _ -> False
+ isBotUser _ _ = False
+
+parseDocument :: Aeson.Object -> Maybe TelegramDocument
+parseDocument docObj = do
+ fileId <- case KeyMap.lookup "file_id" docObj of
+ Just (Aeson.String s) -> Just s
+ _ -> Nothing
+ let fileName = case KeyMap.lookup "file_name" docObj of
+ Just (Aeson.String s) -> Just s
+ _ -> Nothing
+ mimeType = case KeyMap.lookup "mime_type" docObj of
+ Just (Aeson.String s) -> Just s
+ _ -> Nothing
+ fileSize = case KeyMap.lookup "file_size" docObj of
+ Just (Aeson.Number n) -> Just (round n)
+ _ -> Nothing
+ pure
+ TelegramDocument
+ { tdFileId = fileId,
+ tdFileName = fileName,
+ tdMimeType = mimeType,
+ tdFileSize = fileSize
+ }
+
+parseLargestPhoto :: [Aeson.Value] -> Maybe TelegramPhoto
+parseLargestPhoto photos = do
+ let parsed = mapMaybe parsePhotoSize photos
+ case parsed of
+ [] -> Nothing
+ ps -> Just (maximumBy (comparing tpWidth) ps)
+
+parsePhotoSize :: Aeson.Value -> Maybe TelegramPhoto
+parsePhotoSize val = do
+ Aeson.Object obj <- pure val
+ fileId <- case KeyMap.lookup "file_id" obj of
+ Just (Aeson.String s) -> Just s
+ _ -> Nothing
+ width <- case KeyMap.lookup "width" obj of
+ Just (Aeson.Number n) -> Just (round n)
+ _ -> Nothing
+ height <- case KeyMap.lookup "height" obj of
+ Just (Aeson.Number n) -> Just (round n)
+ _ -> Nothing
+ let fileSize = case KeyMap.lookup "file_size" obj of
+ Just (Aeson.Number n) -> Just (round n)
+ _ -> Nothing
+ pure
+ TelegramPhoto
+ { tpFileId = fileId,
+ tpWidth = width,
+ tpHeight = height,
+ tpFileSize = fileSize
+ }
+
+parseVoice :: Aeson.Object -> Maybe TelegramVoice
+parseVoice obj = do
+ fileId <- case KeyMap.lookup "file_id" obj of
+ Just (Aeson.String s) -> Just s
+ _ -> Nothing
+ duration <- case KeyMap.lookup "duration" obj of
+ Just (Aeson.Number n) -> Just (round n)
+ _ -> Nothing
+ let mimeType = case KeyMap.lookup "mime_type" obj of
+ Just (Aeson.String s) -> Just s
+ _ -> Nothing
+ fileSize = case KeyMap.lookup "file_size" obj of
+ Just (Aeson.Number n) -> Just (round n)
+ _ -> Nothing
+ pure
+ TelegramVoice
+ { tvFileId = fileId,
+ tvDuration = duration,
+ tvMimeType = mimeType,
+ tvFileSize = fileSize
+ }
+
+parseReplyMessage :: Aeson.Object -> Maybe TelegramReplyMessage
+parseReplyMessage obj = do
+ messageId <- case KeyMap.lookup "message_id" obj of
+ Just (Aeson.Number n) -> Just (round n)
+ _ -> Nothing
+ let fromFirstName = case KeyMap.lookup "from" obj of
+ Just (Aeson.Object fromObj) -> case KeyMap.lookup "first_name" fromObj of
+ Just (Aeson.String s) -> Just s
+ _ -> Nothing
+ _ -> Nothing
+ fromLastName = case KeyMap.lookup "from" obj of
+ Just (Aeson.Object fromObj) -> case KeyMap.lookup "last_name" fromObj of
+ Just (Aeson.String s) -> Just s
+ _ -> Nothing
+ _ -> Nothing
+ text = case KeyMap.lookup "text" obj of
+ Just (Aeson.String s) -> s
+ _ -> case KeyMap.lookup "caption" obj of
+ Just (Aeson.String s) -> s
+ _ -> ""
+ pure
+ TelegramReplyMessage
+ { trMessageId = messageId,
+ trFromFirstName = fromFirstName,
+ trFromLastName = fromLastName,
+ trText = text
+ }
+
+isPdf :: TelegramDocument -> Bool
+isPdf doc =
+ case tdMimeType doc of
+ Just mime -> mime == "application/pdf"
+ Nothing -> case tdFileName doc of
+ Just name -> ".pdf" `Text.isSuffixOf` Text.toLower name
+ Nothing -> False
+
+isSupportedVoiceFormat :: TelegramVoice -> Bool
+isSupportedVoiceFormat voice =
+ case tvMimeType voice of
+ Just "audio/ogg" -> True
+ Just "audio/opus" -> True
+ Just "audio/x-opus+ogg" -> True
+ Nothing -> True
+ _ -> False
+
+isGroupChat :: TelegramMessage -> Bool
+isGroupChat msg = tmChatType msg `elem` [Group, Supergroup]
+
+shouldRespondInGroup :: Text -> TelegramMessage -> Bool
+shouldRespondInGroup botUsername msg
+ | not (isGroupChat msg) = True
+ | isMentioned = True
+ | isReplyToBot = True
+ | otherwise = False
+ where
+ msgText = Text.toLower (tmText msg)
+ mention = "@" <> Text.toLower botUsername
+ isMentioned = mention `Text.isInfixOf` msgText
+ isReplyToBot = isJust (tmReplyTo msg)
diff --git a/Omni/Agent/Tools/Calendar.hs b/Omni/Agent/Tools/Calendar.hs
new file mode 100644
index 0000000..805916f
--- /dev/null
+++ b/Omni/Agent/Tools/Calendar.hs
@@ -0,0 +1,322 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Calendar tool using khal CLI.
+--
+-- Provides calendar access for agents via local khal/CalDAV.
+--
+-- : out omni-agent-tools-calendar
+-- : dep aeson
+-- : dep process
+module Omni.Agent.Tools.Calendar
+ ( -- * Tools
+ calendarListTool,
+ calendarAddTool,
+ calendarSearchTool,
+
+ -- * Direct API
+ listEvents,
+ addEvent,
+ searchEvents,
+ listCalendars,
+
+ -- * Testing
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Data.Aeson ((.!=), (.:), (.:?), (.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.Text as Text
+import qualified Omni.Agent.Engine as Engine
+import qualified Omni.Test as Test
+import System.Process (readProcessWithExitCode)
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Tools.Calendar"
+ [ Test.unit "calendarListTool has correct schema" <| do
+ let tool = calendarListTool
+ Engine.toolName tool Test.@=? "calendar_list",
+ Test.unit "calendarAddTool has correct schema" <| do
+ let tool = calendarAddTool
+ Engine.toolName tool Test.@=? "calendar_add",
+ Test.unit "calendarSearchTool has correct schema" <| do
+ let tool = calendarSearchTool
+ Engine.toolName tool Test.@=? "calendar_search",
+ Test.unit "listCalendars returns calendars" <| do
+ result <- listCalendars
+ case result of
+ Left _ -> pure ()
+ Right cals -> (not (null cals) || null cals) Test.@=? True
+ ]
+
+defaultCalendars :: [String]
+defaultCalendars = ["BenSimaShared", "Kate"]
+
+listEvents :: Text -> Maybe Text -> IO (Either Text Text)
+listEvents range maybeCalendar = do
+ let rangeArg = if Text.null range then "today 7d" else Text.unpack range
+ calArgs = case maybeCalendar of
+ Just cal -> ["-a", Text.unpack cal]
+ Nothing -> concatMap (\c -> ["-a", c]) defaultCalendars
+ formatArg = ["-f", "[{calendar}] {title} | {start-time} - {end-time}"]
+ result <-
+ try <| readProcessWithExitCode "khal" (["list"] <> calArgs <> formatArg <> [rangeArg, "-o"]) ""
+ case result of
+ Left (e :: SomeException) ->
+ pure (Left ("khal error: " <> tshow e))
+ Right (exitCode, stdoutStr, stderrStr) ->
+ case exitCode of
+ ExitSuccess -> pure (Right (Text.pack stdoutStr))
+ ExitFailure code ->
+ pure (Left ("khal failed (" <> tshow code <> "): " <> Text.pack stderrStr))
+
+addEvent :: Text -> Text -> Maybe Text -> Maybe Text -> Maybe Text -> IO (Either Text Text)
+addEvent calendarName eventSpec location alarm description = do
+ let baseArgs = ["new", "-a", Text.unpack calendarName]
+ locArgs = maybe [] (\l -> ["-l", Text.unpack l]) location
+ alarmArgs = maybe [] (\a -> ["-m", Text.unpack a]) alarm
+ specParts = Text.unpack eventSpec
+ descParts = maybe [] (\d -> ["::", Text.unpack d]) description
+ allArgs = baseArgs <> locArgs <> alarmArgs <> [specParts] <> descParts
+ result <- try <| readProcessWithExitCode "khal" allArgs ""
+ case result of
+ Left (e :: SomeException) ->
+ pure (Left ("khal error: " <> tshow e))
+ Right (exitCode, stdoutStr, stderrStr) ->
+ case exitCode of
+ ExitSuccess ->
+ pure (Right ("Event created: " <> Text.pack stdoutStr))
+ ExitFailure code ->
+ pure (Left ("khal failed (" <> tshow code <> "): " <> Text.pack stderrStr))
+
+searchEvents :: Text -> IO (Either Text Text)
+searchEvents query = do
+ let calArgs = concatMap (\c -> ["-a", c]) defaultCalendars
+ result <-
+ try <| readProcessWithExitCode "khal" (["search"] <> calArgs <> [Text.unpack query]) ""
+ case result of
+ Left (e :: SomeException) ->
+ pure (Left ("khal error: " <> tshow e))
+ Right (exitCode, stdoutStr, stderrStr) ->
+ case exitCode of
+ ExitSuccess -> pure (Right (Text.pack stdoutStr))
+ ExitFailure code ->
+ pure (Left ("khal failed (" <> tshow code <> "): " <> Text.pack stderrStr))
+
+listCalendars :: IO (Either Text [Text])
+listCalendars = do
+ result <-
+ try <| readProcessWithExitCode "khal" ["printcalendars"] ""
+ case result of
+ Left (e :: SomeException) ->
+ pure (Left ("khal error: " <> tshow e))
+ Right (exitCode, stdoutStr, stderrStr) ->
+ case exitCode of
+ ExitSuccess ->
+ pure (Right (filter (not <. Text.null) (Text.lines (Text.pack stdoutStr))))
+ ExitFailure code ->
+ pure (Left ("khal failed (" <> tshow code <> "): " <> Text.pack stderrStr))
+
+calendarListTool :: Engine.Tool
+calendarListTool =
+ Engine.Tool
+ { Engine.toolName = "calendar_list",
+ Engine.toolDescription =
+ "List upcoming calendar events. Use to check what's scheduled. "
+ <> "Range can be like 'today', 'tomorrow', 'today 7d', 'next week', etc. "
+ <> "Available calendars: BenSimaShared, Kate.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "range"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Time range like 'today 7d', 'tomorrow', 'next week' (default: today 7d)" :: Text)
+ ],
+ "calendar"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Filter to specific calendar: 'BenSimaShared' or 'Kate' (default: both)" :: Text)
+ ]
+ ],
+ "required" .= ([] :: [Text])
+ ],
+ Engine.toolExecute = executeCalendarList
+ }
+
+executeCalendarList :: Aeson.Value -> IO Aeson.Value
+executeCalendarList v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: CalendarListArgs) -> do
+ result <- listEvents (clRange args) (clCalendar args)
+ case result of
+ Left err ->
+ pure (Aeson.object ["error" .= err])
+ Right events ->
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "events" .= events
+ ]
+ )
+
+data CalendarListArgs = CalendarListArgs
+ { clRange :: Text,
+ clCalendar :: Maybe Text
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON CalendarListArgs where
+ parseJSON =
+ Aeson.withObject "CalendarListArgs" <| \v ->
+ (CalendarListArgs </ (v .:? "range" .!= "today 7d"))
+ <*> (v .:? "calendar")
+
+calendarAddTool :: Engine.Tool
+calendarAddTool =
+ Engine.Tool
+ { Engine.toolName = "calendar_add",
+ Engine.toolDescription =
+ "Add a new calendar event. The event_spec format is: "
+ <> "'START [END] SUMMARY' where START/END are dates or times. "
+ <> "Examples: '2024-12-25 Christmas', 'tomorrow 10:00 11:00 Meeting', "
+ <> "'friday 14:00 1h Doctor appointment'.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "calendar"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Calendar name to add to (e.g., 'BenSimaShared', 'Kate')" :: Text)
+ ],
+ "event_spec"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Event specification: 'START [END] SUMMARY' (e.g., 'tomorrow 10:00 11:00 Team meeting')" :: Text)
+ ],
+ "location"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Location of the event (optional)" :: Text)
+ ],
+ "alarm"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Alarm time before event, e.g., '15m', '1h', '1d' (optional)" :: Text)
+ ],
+ "description"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Detailed description of the event (optional)" :: Text)
+ ]
+ ],
+ "required" .= (["calendar", "event_spec"] :: [Text])
+ ],
+ Engine.toolExecute = executeCalendarAdd
+ }
+
+executeCalendarAdd :: Aeson.Value -> IO Aeson.Value
+executeCalendarAdd v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: CalendarAddArgs) -> do
+ result <-
+ addEvent
+ (caCalendar args)
+ (caEventSpec args)
+ (caLocation args)
+ (caAlarm args)
+ (caDescription args)
+ case result of
+ Left err ->
+ pure (Aeson.object ["error" .= err])
+ Right msg ->
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "message" .= msg
+ ]
+ )
+
+data CalendarAddArgs = CalendarAddArgs
+ { caCalendar :: Text,
+ caEventSpec :: Text,
+ caLocation :: Maybe Text,
+ caAlarm :: Maybe Text,
+ caDescription :: Maybe Text
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON CalendarAddArgs where
+ parseJSON =
+ Aeson.withObject "CalendarAddArgs" <| \v ->
+ (CalendarAddArgs </ (v .: "calendar"))
+ <*> (v .: "event_spec")
+ <*> (v .:? "location")
+ <*> (v .:? "alarm")
+ <*> (v .:? "description")
+
+calendarSearchTool :: Engine.Tool
+calendarSearchTool =
+ Engine.Tool
+ { Engine.toolName = "calendar_search",
+ Engine.toolDescription =
+ "Search for calendar events by text. Finds events matching the query "
+ <> "in title, description, or location.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "query"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Search text to find in events" :: Text)
+ ]
+ ],
+ "required" .= (["query"] :: [Text])
+ ],
+ Engine.toolExecute = executeCalendarSearch
+ }
+
+executeCalendarSearch :: Aeson.Value -> IO Aeson.Value
+executeCalendarSearch v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: CalendarSearchArgs) -> do
+ result <- searchEvents (csQuery args)
+ case result of
+ Left err ->
+ pure (Aeson.object ["error" .= err])
+ Right events ->
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "results" .= events
+ ]
+ )
+
+newtype CalendarSearchArgs = CalendarSearchArgs
+ { csQuery :: Text
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON CalendarSearchArgs where
+ parseJSON =
+ Aeson.withObject "CalendarSearchArgs" <| \v ->
+ CalendarSearchArgs </ (v .: "query")
diff --git a/Omni/Agent/Tools/Email.hs b/Omni/Agent/Tools/Email.hs
new file mode 100644
index 0000000..7a9bc64
--- /dev/null
+++ b/Omni/Agent/Tools/Email.hs
@@ -0,0 +1,675 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Email tools for IMAP and SMTP access via Telegram bot.
+--
+-- Provides email management for agents:
+-- - Check for urgent/time-sensitive emails
+-- - Identify emails needing response vs FYI
+-- - Auto-unsubscribe from marketing
+-- - Send approved outreach emails via SMTP
+--
+-- Uses HaskellNet for IMAP/SMTP client support.
+-- Password retrieved via `pass ben@bensima.com`.
+--
+-- : out omni-agent-tools-email
+-- : dep aeson
+-- : dep process
+-- : dep regex-applicative
+-- : dep http-conduit
+-- : dep HaskellNet
+-- : dep HaskellNet-SSL
+module Omni.Agent.Tools.Email
+ ( -- * Tools
+ emailCheckTool,
+ emailReadTool,
+ emailUnsubscribeTool,
+ emailArchiveTool,
+ emailSendTool,
+
+ -- * All tools
+ allEmailTools,
+
+ -- * Direct API
+ checkNewEmails,
+ readEmail,
+ unsubscribeFromEmail,
+ archiveEmail,
+ getPassword,
+ sendApprovedEmail,
+
+ -- * Scheduled Check
+ emailCheckLoop,
+ performScheduledCheck,
+
+ -- * Testing
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Data.Aeson ((.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.Aeson.KeyMap as KeyMap
+import qualified Data.ByteString.Char8 as BS8
+import qualified Data.List as List
+import qualified Data.Text as Text
+import qualified Data.Text.Lazy as LText
+import Data.Time (NominalDiffTime, UTCTime, addUTCTime, getCurrentTime)
+import Data.Time.Format (defaultTimeLocale, formatTime, parseTimeM)
+import Data.Time.LocalTime (TimeZone (..), utcToZonedTime)
+import qualified Network.HTTP.Simple as HTTP
+import qualified Network.HaskellNet.IMAP as IMAP
+import Network.HaskellNet.IMAP.Connection (IMAPConnection)
+import qualified Network.HaskellNet.IMAP.SSL as IMAPSSL
+import qualified Network.HaskellNet.SMTP as SMTP
+import qualified Network.HaskellNet.SMTP.SSL as SMTPSSL
+import Network.Mail.Mime (Address (..), simpleMail')
+import qualified Omni.Agent.Engine as Engine
+import qualified Omni.Agent.Tools.Outreach as Outreach
+import qualified Omni.Test as Test
+import System.Process (readProcessWithExitCode)
+import Text.Regex.Applicative (RE, anySym, few, (=~))
+import qualified Text.Regex.Applicative as RE
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Tools.Email"
+ [ Test.unit "emailCheckTool has correct name" <| do
+ Engine.toolName emailCheckTool Test.@=? "email_check",
+ Test.unit "emailReadTool has correct name" <| do
+ Engine.toolName emailReadTool Test.@=? "email_read",
+ Test.unit "emailUnsubscribeTool has correct name" <| do
+ Engine.toolName emailUnsubscribeTool Test.@=? "email_unsubscribe",
+ Test.unit "emailArchiveTool has correct name" <| do
+ Engine.toolName emailArchiveTool Test.@=? "email_archive",
+ Test.unit "emailSendTool has correct name" <| do
+ Engine.toolName emailSendTool Test.@=? "email_send",
+ Test.unit "allEmailTools has 5 tools" <| do
+ length allEmailTools Test.@=? 5,
+ Test.unit "parseEmailHeaders extracts fields" <| do
+ let headers =
+ "From: test@example.com\r\n\
+ \Subject: Test Subject\r\n\
+ \Date: Mon, 1 Jan 2024 12:00:00 +0000\r\n\
+ \\r\n"
+ case parseEmailHeaders headers of
+ Nothing -> Test.assertFailure "Failed to parse headers"
+ Just email -> do
+ emailFrom email Test.@=? "test@example.com"
+ emailSubject email Test.@=? "Test Subject",
+ Test.unit "parseUnsubscribeHeader extracts URL" <| do
+ let header = "<https://example.com/unsubscribe>, <mailto:unsub@example.com>"
+ case parseUnsubscribeUrl header of
+ Nothing -> Test.assertFailure "Failed to parse unsubscribe URL"
+ Just url -> ("https://example.com" `Text.isPrefixOf` url) Test.@=? True
+ ]
+
+imapServer :: String
+imapServer = "bensima.com"
+
+imapUser :: String
+imapUser = "ben@bensima.com"
+
+getPassword :: IO (Either Text Text)
+getPassword = do
+ result <- try <| readProcessWithExitCode "pass" ["ben@bensima.com"] ""
+ case result of
+ Left (e :: SomeException) ->
+ pure (Left ("Failed to get password: " <> tshow e))
+ Right (exitCode, stdoutStr, stderrStr) ->
+ case exitCode of
+ ExitSuccess -> pure (Right (Text.strip (Text.pack stdoutStr)))
+ ExitFailure code ->
+ pure (Left ("pass failed (" <> tshow code <> "): " <> Text.pack stderrStr))
+
+withImapConnection :: (IMAPConnection -> IO a) -> IO (Either Text a)
+withImapConnection action = do
+ pwResult <- getPassword
+ case pwResult of
+ Left err -> pure (Left err)
+ Right pw -> do
+ result <-
+ try <| do
+ conn <- IMAPSSL.connectIMAPSSL imapServer
+ IMAP.login conn imapUser (Text.unpack pw)
+ r <- action conn
+ IMAP.logout conn
+ pure r
+ case result of
+ Left (e :: SomeException) -> pure (Left ("IMAP error: " <> tshow e))
+ Right r -> pure (Right r)
+
+data EmailSummary = EmailSummary
+ { emailUid :: Int,
+ emailFrom :: Text,
+ emailSubject :: Text,
+ emailDate :: Text,
+ emailUnsubscribe :: Maybe Text
+ }
+ deriving (Show, Generic)
+
+instance Aeson.ToJSON EmailSummary where
+ toJSON e =
+ Aeson.object
+ [ "uid" .= emailUid e,
+ "from" .= emailFrom e,
+ "subject" .= emailSubject e,
+ "date" .= formatDateAsEst (emailDate e),
+ "has_unsubscribe" .= isJust (emailUnsubscribe e)
+ ]
+
+estTimezone :: TimeZone
+estTimezone = TimeZone (-300) False "EST"
+
+formatDateAsEst :: Text -> Text
+formatDateAsEst dateStr =
+ case parseEmailDate dateStr of
+ Nothing -> dateStr
+ Just utcTime ->
+ let zonedTime = utcToZonedTime estTimezone utcTime
+ in Text.pack (formatTime defaultTimeLocale "%a %b %d %H:%M EST" zonedTime)
+
+parseEmailHeaders :: Text -> Maybe EmailSummary
+parseEmailHeaders raw = do
+ let headerLines = Text.lines raw
+ fromLine = findHeader "From:" headerLines
+ subjectLine = findHeader "Subject:" headerLines
+ dateLine = findHeader "Date:" headerLines
+ unsubLine = findHeader "List-Unsubscribe:" headerLines
+ fromVal <- fromLine
+ subject <- subjectLine
+ dateVal <- dateLine
+ pure
+ EmailSummary
+ { emailUid = 0,
+ emailFrom = Text.strip (Text.drop 5 fromVal),
+ emailSubject = Text.strip (Text.drop 8 subject),
+ emailDate = Text.strip (Text.drop 5 dateVal),
+ emailUnsubscribe = (parseUnsubscribeUrl <. Text.drop 16) =<< unsubLine
+ }
+ where
+ findHeader :: Text -> [Text] -> Maybe Text
+ findHeader prefix = List.find (prefix `Text.isPrefixOf`)
+
+parseUnsubscribeUrl :: Text -> Maybe Text
+parseUnsubscribeUrl header =
+ let text = Text.unpack header
+ in case text =~ urlInBrackets of
+ Just url | "http" `List.isPrefixOf` url -> Just (Text.pack url)
+ _ -> Nothing
+ where
+ urlInBrackets :: RE Char String
+ urlInBrackets = few anySym *> RE.sym '<' *> few anySym <* RE.sym '>'
+
+checkNewEmails :: Maybe Int -> Maybe Int -> IO (Either Text [EmailSummary])
+checkNewEmails maybeLimit maybeHours = do
+ withImapConnection <| \conn -> do
+ IMAP.select conn "INBOX"
+ uids <- IMAP.search conn [IMAP.UNFLAG IMAP.Seen]
+ let limit = fromMaybe 20 maybeLimit
+ recentUids = take limit (reverse (map fromIntegral uids))
+ if null recentUids
+ then pure []
+ else do
+ emails <-
+ forM recentUids <| \uid -> do
+ headerBytes <- IMAP.fetchHeader conn (fromIntegral uid)
+ let headerText = Text.pack (BS8.unpack headerBytes)
+ pure (parseEmailHeaders headerText, uid)
+ let parsed =
+ [ e {emailUid = uid}
+ | (Just e, uid) <- emails
+ ]
+ case maybeHours of
+ Nothing -> pure parsed
+ Just hours -> do
+ now <- getCurrentTime
+ let cutoff = addUTCTime (negate (fromIntegral hours * 3600 :: NominalDiffTime)) now
+ pure (filter (isAfterCutoff cutoff) parsed)
+
+isAfterCutoff :: UTCTime -> EmailSummary -> Bool
+isAfterCutoff cutoff email =
+ case parseEmailDate (emailDate email) of
+ Nothing -> False
+ Just emailTime -> emailTime >= cutoff
+
+parseEmailDate :: Text -> Maybe UTCTime
+parseEmailDate dateStr =
+ let cleaned = stripParenTz (Text.strip dateStr)
+ formats =
+ [ "%a, %d %b %Y %H:%M:%S %z",
+ "%a, %d %b %Y %H:%M:%S %Z",
+ "%d %b %Y %H:%M:%S %z",
+ "%a, %d %b %Y %H:%M %z",
+ "%a, %d %b %Y %H:%M:%S %z (%Z)"
+ ]
+ tryParse [] = Nothing
+ tryParse (fmt : rest) =
+ case parseTimeM True defaultTimeLocale fmt (Text.unpack cleaned) of
+ Just t -> Just t
+ Nothing -> tryParse rest
+ in tryParse formats
+
+stripParenTz :: Text -> Text
+stripParenTz t =
+ case Text.breakOn " (" t of
+ (before, after)
+ | Text.null after -> t
+ | ")" `Text.isSuffixOf` after -> before
+ | otherwise -> t
+
+readEmail :: Int -> IO (Either Text Text)
+readEmail uid =
+ withImapConnection <| \conn -> do
+ IMAP.select conn "INBOX"
+ bodyBytes <- IMAP.fetch conn (fromIntegral uid)
+ let bodyText = Text.pack (BS8.unpack bodyBytes)
+ pure (Text.take 10000 bodyText)
+
+unsubscribeFromEmail :: Int -> IO (Either Text Text)
+unsubscribeFromEmail uid = do
+ headerResult <-
+ withImapConnection <| \conn -> do
+ IMAP.select conn "INBOX"
+ headerBytes <- IMAP.fetchHeader conn (fromIntegral uid)
+ pure (Text.pack (BS8.unpack headerBytes))
+ case headerResult of
+ Left err -> pure (Left err)
+ Right headerText ->
+ case extractUnsubscribeUrl headerText of
+ Nothing -> pure (Left "No unsubscribe URL found in this email")
+ Just url -> do
+ clickResult <- clickUnsubscribeLink url
+ case clickResult of
+ Left err -> pure (Left ("Failed to unsubscribe: " <> err))
+ Right () -> do
+ _ <- archiveEmail uid
+ pure (Right ("Unsubscribed and archived email " <> tshow uid))
+
+extractUnsubscribeUrl :: Text -> Maybe Text
+extractUnsubscribeUrl headerText =
+ let unsubLine = List.find ("List-Unsubscribe:" `Text.isInfixOf`) (Text.lines headerText)
+ in (parseUnsubscribeUrl <. Text.drop 16 <. Text.strip) =<< unsubLine
+
+clickUnsubscribeLink :: Text -> IO (Either Text ())
+clickUnsubscribeLink url = do
+ result <-
+ try <| do
+ req <- HTTP.parseRequest (Text.unpack url)
+ _ <- HTTP.httpLBS req
+ pure ()
+ case result of
+ Left (e :: SomeException) -> pure (Left (tshow e))
+ Right () -> pure (Right ())
+
+archiveEmail :: Int -> IO (Either Text Text)
+archiveEmail uid =
+ withImapConnection <| \conn -> do
+ IMAP.select conn "INBOX"
+ IMAP.copy conn (fromIntegral uid) "Archives.2025"
+ IMAP.store conn (fromIntegral uid) (IMAP.PlusFlags [IMAP.Deleted])
+ _ <- IMAP.expunge conn
+ pure ("Archived email " <> tshow uid)
+
+allEmailTools :: [Engine.Tool]
+allEmailTools =
+ [ emailCheckTool,
+ emailReadTool,
+ emailUnsubscribeTool,
+ emailArchiveTool,
+ emailSendTool
+ ]
+
+emailCheckTool :: Engine.Tool
+emailCheckTool =
+ Engine.Tool
+ { Engine.toolName = "email_check",
+ Engine.toolDescription =
+ "Check for new/unread emails. Returns a summary of recent unread emails "
+ <> "including sender, subject, date, and whether they have an unsubscribe link. "
+ <> "Use this to identify urgent items or emails needing response. "
+ <> "Use 'hours' to filter to emails received in the last N hours (e.g., hours=6 for last 6 hours).",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "limit"
+ .= Aeson.object
+ [ "type" .= ("integer" :: Text),
+ "description" .= ("Max emails to return (default: 20)" :: Text)
+ ],
+ "hours"
+ .= Aeson.object
+ [ "type" .= ("integer" :: Text),
+ "description" .= ("Only return emails from the last N hours (e.g., 6 for last 6 hours)" :: Text)
+ ]
+ ],
+ "required" .= ([] :: [Text])
+ ],
+ Engine.toolExecute = executeEmailCheck
+ }
+
+executeEmailCheck :: Aeson.Value -> IO Aeson.Value
+executeEmailCheck v = do
+ let (limit, hours) = case v of
+ Aeson.Object obj ->
+ let l = case KeyMap.lookup "limit" obj of
+ Just (Aeson.Number n) -> Just (round n :: Int)
+ _ -> Nothing
+ h = case KeyMap.lookup "hours" obj of
+ Just (Aeson.Number n) -> Just (round n :: Int)
+ _ -> Nothing
+ in (l, h)
+ _ -> (Nothing, Nothing)
+ result <- checkNewEmails limit hours
+ case result of
+ Left err -> pure (Aeson.object ["error" .= err])
+ Right emails ->
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "count" .= length emails,
+ "emails" .= emails
+ ]
+ )
+
+emailReadTool :: Engine.Tool
+emailReadTool =
+ Engine.Tool
+ { Engine.toolName = "email_read",
+ Engine.toolDescription =
+ "Read the full content of an email by its UID. "
+ <> "Use after email_check to read emails that seem important or need a response.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "uid"
+ .= Aeson.object
+ [ "type" .= ("integer" :: Text),
+ "description" .= ("Email UID from email_check" :: Text)
+ ]
+ ],
+ "required" .= (["uid"] :: [Text])
+ ],
+ Engine.toolExecute = executeEmailRead
+ }
+
+executeEmailRead :: Aeson.Value -> IO Aeson.Value
+executeEmailRead v = do
+ let uidM = case v of
+ Aeson.Object obj -> case KeyMap.lookup "uid" obj of
+ Just (Aeson.Number n) -> Just (round n :: Int)
+ _ -> Nothing
+ _ -> Nothing
+ case uidM of
+ Nothing -> pure (Aeson.object ["error" .= ("Missing uid parameter" :: Text)])
+ Just uid -> do
+ result <- readEmail uid
+ case result of
+ Left err -> pure (Aeson.object ["error" .= err])
+ Right body ->
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "uid" .= uid,
+ "body" .= body
+ ]
+ )
+
+emailUnsubscribeTool :: Engine.Tool
+emailUnsubscribeTool =
+ Engine.Tool
+ { Engine.toolName = "email_unsubscribe",
+ Engine.toolDescription =
+ "Unsubscribe from a mailing list by clicking the List-Unsubscribe link. "
+ <> "Use for marketing/newsletter emails. Automatically archives the email after unsubscribing.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "uid"
+ .= Aeson.object
+ [ "type" .= ("integer" :: Text),
+ "description" .= ("Email UID to unsubscribe from" :: Text)
+ ]
+ ],
+ "required" .= (["uid"] :: [Text])
+ ],
+ Engine.toolExecute = executeEmailUnsubscribe
+ }
+
+executeEmailUnsubscribe :: Aeson.Value -> IO Aeson.Value
+executeEmailUnsubscribe v = do
+ let uidM = case v of
+ Aeson.Object obj -> case KeyMap.lookup "uid" obj of
+ Just (Aeson.Number n) -> Just (round n :: Int)
+ _ -> Nothing
+ _ -> Nothing
+ case uidM of
+ Nothing -> pure (Aeson.object ["error" .= ("Missing uid parameter" :: Text)])
+ Just uid -> do
+ result <- unsubscribeFromEmail uid
+ case result of
+ Left err -> pure (Aeson.object ["error" .= err])
+ Right msg ->
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "message" .= msg
+ ]
+ )
+
+emailArchiveTool :: Engine.Tool
+emailArchiveTool =
+ Engine.Tool
+ { Engine.toolName = "email_archive",
+ Engine.toolDescription =
+ "Archive an email (move to Archives.2025 folder). "
+ <> "Use for emails that don't need a response and are just FYI.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "uid"
+ .= Aeson.object
+ [ "type" .= ("integer" :: Text),
+ "description" .= ("Email UID to archive" :: Text)
+ ]
+ ],
+ "required" .= (["uid"] :: [Text])
+ ],
+ Engine.toolExecute = executeEmailArchive
+ }
+
+executeEmailArchive :: Aeson.Value -> IO Aeson.Value
+executeEmailArchive v = do
+ let uidM = case v of
+ Aeson.Object obj -> case KeyMap.lookup "uid" obj of
+ Just (Aeson.Number n) -> Just (round n :: Int)
+ _ -> Nothing
+ _ -> Nothing
+ case uidM of
+ Nothing -> pure (Aeson.object ["error" .= ("Missing uid parameter" :: Text)])
+ Just uid -> do
+ result <- archiveEmail uid
+ case result of
+ Left err -> pure (Aeson.object ["error" .= err])
+ Right msg ->
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "message" .= msg
+ ]
+ )
+
+emailCheckLoop :: (Int -> Maybe Int -> Text -> IO (Maybe Int)) -> Int -> IO ()
+emailCheckLoop sendFn chatId =
+ forever <| do
+ let sixHours = 6 * 60 * 60 * 1000000
+ threadDelay sixHours
+ performScheduledCheck sendFn chatId
+
+performScheduledCheck :: (Int -> Maybe Int -> Text -> IO (Maybe Int)) -> Int -> IO ()
+performScheduledCheck sendFn chatId = do
+ putText "Running scheduled email check..."
+ result <- checkNewEmails (Just 50) (Just 6)
+ case result of
+ Left err -> putText ("Email check failed: " <> err)
+ Right emails -> do
+ let urgent = filter isUrgent emails
+ needsResponse = filter needsResponsePred emails
+ marketing = filter hasUnsubscribe emails
+ when (not (null urgent) || not (null needsResponse)) <| do
+ let msg = formatEmailSummary urgent needsResponse (length marketing)
+ _ <- sendFn chatId Nothing msg
+ pure ()
+ where
+ isUrgent :: EmailSummary -> Bool
+ isUrgent email =
+ let subj = Text.toLower (emailSubject email)
+ in "urgent"
+ `Text.isInfixOf` subj
+ || "asap"
+ `Text.isInfixOf` subj
+ || "important"
+ `Text.isInfixOf` subj
+ || "action required"
+ `Text.isInfixOf` subj
+
+ needsResponsePred :: EmailSummary -> Bool
+ needsResponsePred email =
+ let sender = Text.toLower (emailFrom email)
+ subj = Text.toLower (emailSubject email)
+ in not (hasUnsubscribe email)
+ && not (isUrgent email)
+ && not ("noreply" `Text.isInfixOf` sender)
+ && not ("no-reply" `Text.isInfixOf` sender)
+ && ("?" `Text.isInfixOf` subj || "reply" `Text.isInfixOf` subj || "response" `Text.isInfixOf` subj)
+
+ hasUnsubscribe :: EmailSummary -> Bool
+ hasUnsubscribe = isJust <. emailUnsubscribe
+
+ formatEmailSummary :: [EmailSummary] -> [EmailSummary] -> Int -> Text
+ formatEmailSummary urgent needs marketingCount =
+ Text.unlines
+ <| ["📧 *email check*", ""]
+ <> (if null urgent then [] else ["*urgent:*"] <> map formatOne urgent <> [""])
+ <> (if null needs then [] else ["*may need response:*"] <> map formatOne needs <> [""])
+ <> [tshow marketingCount <> " marketing emails (use email_check to review)"]
+
+ formatOne :: EmailSummary -> Text
+ formatOne e =
+ "• " <> emailSubject e <> " (from: " <> emailFrom e <> ", uid: " <> tshow (emailUid e) <> ")"
+
+smtpServer :: String
+smtpServer = "bensima.com"
+
+smtpUser :: String
+smtpUser = "ben@bensima.com"
+
+withSmtpConnection :: (SMTP.SMTPConnection -> IO a) -> IO (Either Text a)
+withSmtpConnection action = do
+ pwResult <- getPassword
+ case pwResult of
+ Left err -> pure (Left err)
+ Right pw -> do
+ result <-
+ try <| do
+ conn <- SMTPSSL.connectSMTPSSL smtpServer
+ authSuccess <- SMTP.authenticate SMTP.LOGIN smtpUser (Text.unpack pw) conn
+ if authSuccess
+ then do
+ r <- action conn
+ SMTP.closeSMTP conn
+ pure r
+ else do
+ SMTP.closeSMTP conn
+ panic "SMTP authentication failed"
+ case result of
+ Left (e :: SomeException) -> pure (Left ("SMTP error: " <> tshow e))
+ Right r -> pure (Right r)
+
+sendApprovedEmail :: Text -> IO (Either Text Text)
+sendApprovedEmail draftId = do
+ mDraft <- Outreach.getDraft draftId
+ case mDraft of
+ Nothing -> pure (Left "Draft not found")
+ Just draft -> do
+ case Outreach.draftStatus draft of
+ Outreach.Approved -> do
+ let recipientAddr = Address Nothing (Outreach.draftRecipient draft)
+ senderAddr = Address (Just "Ben Sima") "ben@bensima.com"
+ subject = fromMaybe "" (Outreach.draftSubject draft)
+ body = LText.fromStrict (Outreach.draftBody draft)
+ footer = "\n\n---\nSent by Ava on behalf of Ben"
+ fullBody = body <> footer
+ mail = simpleMail' recipientAddr senderAddr subject fullBody
+ sendResult <-
+ withSmtpConnection <| \conn -> do
+ SMTP.sendMail mail conn
+ case sendResult of
+ Left err -> pure (Left err)
+ Right () -> do
+ _ <- Outreach.markSent draftId
+ pure (Right ("Email sent to " <> Outreach.draftRecipient draft))
+ Outreach.Pending -> pure (Left "Draft is still pending approval")
+ Outreach.Rejected -> pure (Left "Draft was rejected")
+ Outreach.Sent -> pure (Left "Draft was already sent")
+
+emailSendTool :: Engine.Tool
+emailSendTool =
+ Engine.Tool
+ { Engine.toolName = "email_send",
+ Engine.toolDescription =
+ "Send an approved outreach email. Only sends emails that have been approved "
+ <> "by Ben in the outreach queue. Use outreach_draft to create drafts first, "
+ <> "wait for approval, then use this to send.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "draft_id"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("ID of the approved draft to send" :: Text)
+ ]
+ ],
+ "required" .= (["draft_id"] :: [Text])
+ ],
+ Engine.toolExecute = executeEmailSend
+ }
+
+executeEmailSend :: Aeson.Value -> IO Aeson.Value
+executeEmailSend v = do
+ let draftIdM = case v of
+ Aeson.Object obj -> case KeyMap.lookup "draft_id" obj of
+ Just (Aeson.String s) -> Just s
+ _ -> Nothing
+ _ -> Nothing
+ case draftIdM of
+ Nothing -> pure (Aeson.object ["error" .= ("Missing draft_id parameter" :: Text)])
+ Just draftId -> do
+ result <- sendApprovedEmail draftId
+ case result of
+ Left err -> pure (Aeson.object ["error" .= err])
+ Right msg ->
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "message" .= msg
+ ]
+ )
diff --git a/Omni/Agent/Tools/Feedback.hs b/Omni/Agent/Tools/Feedback.hs
new file mode 100644
index 0000000..1ec684c
--- /dev/null
+++ b/Omni/Agent/Tools/Feedback.hs
@@ -0,0 +1,204 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Feedback query tool for PodcastItLater user research.
+--
+-- Allows the agent to query collected feedback from the PIL database.
+-- Feedback is submitted via /feedback on the PIL web app.
+--
+-- : out omni-agent-tools-feedback
+-- : dep aeson
+-- : dep http-conduit
+module Omni.Agent.Tools.Feedback
+ ( -- * Tools
+ feedbackListTool,
+ allFeedbackTools,
+
+ -- * Types
+ FeedbackEntry (..),
+ ListFeedbackArgs (..),
+
+ -- * Testing
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Data.Aeson ((.!=), (.:), (.:?), (.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.Text as Text
+import qualified Network.HTTP.Simple as HTTP
+import qualified Omni.Agent.Engine as Engine
+import qualified Omni.Test as Test
+import System.Environment (lookupEnv)
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Tools.Feedback"
+ [ Test.unit "feedbackListTool has correct name" <| do
+ Engine.toolName feedbackListTool Test.@=? "feedback_list",
+ Test.unit "allFeedbackTools has 1 tool" <| do
+ length allFeedbackTools Test.@=? 1,
+ Test.unit "ListFeedbackArgs parses correctly" <| do
+ let json = Aeson.object ["limit" .= (10 :: Int)]
+ case Aeson.fromJSON json of
+ Aeson.Success (args :: ListFeedbackArgs) -> lfaLimit args Test.@=? 10
+ Aeson.Error e -> Test.assertFailure e,
+ Test.unit "ListFeedbackArgs parses with since" <| do
+ let json =
+ Aeson.object
+ [ "limit" .= (20 :: Int),
+ "since" .= ("2024-01-01" :: Text)
+ ]
+ case Aeson.fromJSON json of
+ Aeson.Success (args :: ListFeedbackArgs) -> do
+ lfaLimit args Test.@=? 20
+ lfaSince args Test.@=? Just "2024-01-01"
+ Aeson.Error e -> Test.assertFailure e,
+ Test.unit "FeedbackEntry JSON roundtrip" <| do
+ let entry =
+ FeedbackEntry
+ { feId = "abc123",
+ feEmail = Just "test@example.com",
+ feSource = Just "outreach",
+ feCampaignId = Nothing,
+ feRating = Just 4,
+ feFeedbackText = Just "Great product!",
+ feUseCase = Just "Commute listening",
+ feCreatedAt = "2024-01-15T10:00:00Z"
+ }
+ case Aeson.decode (Aeson.encode entry) of
+ Nothing -> Test.assertFailure "Failed to decode FeedbackEntry"
+ Just decoded -> do
+ feId decoded Test.@=? "abc123"
+ feEmail decoded Test.@=? Just "test@example.com"
+ feRating decoded Test.@=? Just 4
+ ]
+
+data FeedbackEntry = FeedbackEntry
+ { feId :: Text,
+ feEmail :: Maybe Text,
+ feSource :: Maybe Text,
+ feCampaignId :: Maybe Text,
+ feRating :: Maybe Int,
+ feFeedbackText :: Maybe Text,
+ feUseCase :: Maybe Text,
+ feCreatedAt :: Text
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON FeedbackEntry where
+ toJSON e =
+ Aeson.object
+ [ "id" .= feId e,
+ "email" .= feEmail e,
+ "source" .= feSource e,
+ "campaign_id" .= feCampaignId e,
+ "rating" .= feRating e,
+ "feedback_text" .= feFeedbackText e,
+ "use_case" .= feUseCase e,
+ "created_at" .= feCreatedAt e
+ ]
+
+instance Aeson.FromJSON FeedbackEntry where
+ parseJSON =
+ Aeson.withObject "FeedbackEntry" <| \v ->
+ (FeedbackEntry </ (v .: "id"))
+ <*> (v .:? "email")
+ <*> (v .:? "source")
+ <*> (v .:? "campaign_id")
+ <*> (v .:? "rating")
+ <*> (v .:? "feedback_text")
+ <*> (v .:? "use_case")
+ <*> (v .: "created_at")
+
+data ListFeedbackArgs = ListFeedbackArgs
+ { lfaLimit :: Int,
+ lfaSince :: Maybe Text
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.FromJSON ListFeedbackArgs where
+ parseJSON =
+ Aeson.withObject "ListFeedbackArgs" <| \v ->
+ (ListFeedbackArgs </ (v .:? "limit" .!= 20))
+ <*> (v .:? "since")
+
+allFeedbackTools :: [Engine.Tool]
+allFeedbackTools = [feedbackListTool]
+
+feedbackListTool :: Engine.Tool
+feedbackListTool =
+ Engine.Tool
+ { Engine.toolName = "feedback_list",
+ Engine.toolDescription =
+ "List feedback entries from PodcastItLater users. "
+ <> "Use to review user research data and understand what potential "
+ <> "customers want from the product.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "limit"
+ .= Aeson.object
+ [ "type" .= ("integer" :: Text),
+ "description" .= ("Max entries to return (default: 20)" :: Text)
+ ],
+ "since"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("ISO date to filter by (entries after this date)" :: Text)
+ ]
+ ],
+ "required" .= ([] :: [Text])
+ ],
+ Engine.toolExecute = executeFeedbackList
+ }
+
+executeFeedbackList :: Aeson.Value -> IO Aeson.Value
+executeFeedbackList v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: ListFeedbackArgs) -> do
+ mBaseUrl <- lookupEnv "PIL_BASE_URL"
+ let baseUrl = maybe "http://localhost:8000" Text.pack mBaseUrl
+ limit = min 100 (max 1 (lfaLimit args))
+ sinceParam = case lfaSince args of
+ Nothing -> ""
+ Just since -> "&since=" <> since
+ url = baseUrl <> "/api/feedback?limit=" <> tshow limit <> sinceParam
+ result <- fetchFeedback url
+ case result of
+ Left err -> pure (Aeson.object ["error" .= err])
+ Right entries ->
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "count" .= length entries,
+ "entries" .= entries
+ ]
+ )
+
+fetchFeedback :: Text -> IO (Either Text [FeedbackEntry])
+fetchFeedback url = do
+ result <-
+ try <| do
+ req <- HTTP.parseRequest (Text.unpack url)
+ resp <- HTTP.httpLBS req
+ pure (HTTP.getResponseStatusCode resp, HTTP.getResponseBody resp)
+ case result of
+ Left (e :: SomeException) -> pure (Left ("Request failed: " <> tshow e))
+ Right (status, body) ->
+ if status /= 200
+ then pure (Left ("HTTP " <> tshow status))
+ else case Aeson.decode body of
+ Nothing -> pure (Left "Failed to parse response")
+ Just entries -> pure (Right entries)
diff --git a/Omni/Agent/Tools/Hledger.hs b/Omni/Agent/Tools/Hledger.hs
new file mode 100644
index 0000000..59e0c05
--- /dev/null
+++ b/Omni/Agent/Tools/Hledger.hs
@@ -0,0 +1,489 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Hledger tools for personal finance queries and transaction entry.
+--
+-- Provides hledger access for agents via the nix-shell in ~/fund.
+--
+-- : out omni-agent-tools-hledger
+-- : dep aeson
+-- : dep process
+-- : dep directory
+module Omni.Agent.Tools.Hledger
+ ( -- * Tools
+ hledgerBalanceTool,
+ hledgerRegisterTool,
+ hledgerAddTool,
+ hledgerIncomeStatementTool,
+ hledgerBalanceSheetTool,
+
+ -- * All tools (for easy import)
+ allHledgerTools,
+
+ -- * Direct API
+ queryBalance,
+ queryRegister,
+ addTransaction,
+ incomeStatement,
+ balanceSheet,
+
+ -- * Testing
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Data.Aeson ((.:), (.:?), (.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.List as List
+import qualified Data.Text as Text
+import qualified Data.Text.IO as TextIO
+import Data.Time (getCurrentTime, utcToLocalTime)
+import Data.Time.Format (defaultTimeLocale, formatTime)
+import Data.Time.LocalTime (getCurrentTimeZone)
+import qualified Omni.Agent.Engine as Engine
+import qualified Omni.Test as Test
+import System.Directory (doesFileExist)
+import System.Process (readProcessWithExitCode)
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Tools.Hledger"
+ [ Test.unit "hledgerBalanceTool has correct name" <| do
+ Engine.toolName hledgerBalanceTool Test.@=? "hledger_balance",
+ Test.unit "hledgerRegisterTool has correct name" <| do
+ Engine.toolName hledgerRegisterTool Test.@=? "hledger_register",
+ Test.unit "hledgerAddTool has correct name" <| do
+ Engine.toolName hledgerAddTool Test.@=? "hledger_add",
+ Test.unit "hledgerIncomeStatementTool has correct name" <| do
+ Engine.toolName hledgerIncomeStatementTool Test.@=? "hledger_income_statement",
+ Test.unit "hledgerBalanceSheetTool has correct name" <| do
+ Engine.toolName hledgerBalanceSheetTool Test.@=? "hledger_balance_sheet",
+ Test.unit "allHledgerTools has 5 tools" <| do
+ length allHledgerTools Test.@=? 5
+ ]
+
+fundDir :: FilePath
+fundDir = "/home/ben/fund"
+
+journalFile :: FilePath
+journalFile = fundDir <> "/ledger.journal"
+
+transactionsFile :: FilePath
+transactionsFile = fundDir <> "/telegram-transactions.journal"
+
+runHledgerInFund :: [String] -> IO (Either Text Text)
+runHledgerInFund args = do
+ let fullArgs :: [String]
+ fullArgs = ["-f", journalFile] <> args
+ hledgerCmd :: String
+ hledgerCmd = "hledger " ++ List.unwords fullArgs
+ cmd :: String
+ cmd = "cd " ++ fundDir ++ " && " ++ hledgerCmd
+ result <-
+ try <| readProcessWithExitCode "nix-shell" [fundDir ++ "/shell.nix", "--run", cmd] ""
+ case result of
+ Left (e :: SomeException) ->
+ pure (Left ("hledger error: " <> tshow e))
+ Right (exitCode, stdoutStr, stderrStr) ->
+ case exitCode of
+ ExitSuccess -> pure (Right (Text.pack stdoutStr))
+ ExitFailure code ->
+ pure (Left ("hledger failed (" <> tshow code <> "): " <> Text.pack stderrStr))
+
+allHledgerTools :: [Engine.Tool]
+allHledgerTools =
+ [ hledgerBalanceTool,
+ hledgerRegisterTool,
+ hledgerAddTool,
+ hledgerIncomeStatementTool,
+ hledgerBalanceSheetTool
+ ]
+
+queryBalance :: Maybe Text -> Maybe Text -> Maybe Text -> IO (Either Text Text)
+queryBalance maybePattern maybePeriod maybeCurrency = do
+ let patternArg = maybe [] (\p -> [Text.unpack p]) maybePattern
+ periodArg = maybe [] (\p -> ["-p", "'" ++ Text.unpack p ++ "'"]) maybePeriod
+ currency = maybe "USD" Text.unpack maybeCurrency
+ currencyArg = ["-X", currency]
+ runHledgerInFund (["bal", "-1", "--flat"] <> currencyArg <> patternArg <> periodArg)
+
+queryRegister :: Text -> Maybe Int -> Maybe Text -> Maybe Text -> IO (Either Text Text)
+queryRegister accountPattern maybeLimit maybeCurrency maybePeriod = do
+ let limitArg = maybe ["-n", "10"] (\n -> ["-n", show n]) maybeLimit
+ currency = maybe "USD" Text.unpack maybeCurrency
+ currencyArg = ["-X", currency]
+ periodArg = maybe [] (\p -> ["-p", "'" ++ Text.unpack p ++ "'"]) maybePeriod
+ runHledgerInFund (["reg", Text.unpack accountPattern] <> currencyArg <> periodArg <> limitArg)
+
+incomeStatement :: Maybe Text -> Maybe Text -> IO (Either Text Text)
+incomeStatement maybePeriod maybeCurrency = do
+ let periodArg = maybe ["-p", "thismonth"] (\p -> ["-p", "'" ++ Text.unpack p ++ "'"]) maybePeriod
+ currency = maybe "USD" Text.unpack maybeCurrency
+ currencyArg = ["-X", currency]
+ runHledgerInFund (["is"] <> currencyArg <> periodArg)
+
+balanceSheet :: Maybe Text -> IO (Either Text Text)
+balanceSheet maybeCurrency = do
+ let currency = maybe "USD" Text.unpack maybeCurrency
+ currencyArg = ["-X", currency]
+ runHledgerInFund (["bs"] <> currencyArg)
+
+addTransaction :: Text -> Text -> Text -> Text -> Maybe Text -> IO (Either Text Text)
+addTransaction description fromAccount toAccount amount maybeDate = do
+ now <- getCurrentTime
+ tz <- getCurrentTimeZone
+ let localTime = utcToLocalTime tz now
+ todayStr = formatTime defaultTimeLocale "%Y-%m-%d" localTime
+ dateStr = maybe todayStr Text.unpack maybeDate
+ transaction =
+ Text.unlines
+ [ "",
+ Text.pack dateStr <> " " <> description,
+ " " <> toAccount <> " " <> amount,
+ " " <> fromAccount
+ ]
+ exists <- doesFileExist transactionsFile
+ unless exists <| do
+ TextIO.writeFile transactionsFile "; Transactions added via Telegram bot\n"
+ TextIO.appendFile transactionsFile transaction
+ pure (Right ("Transaction added:\n" <> transaction))
+
+hledgerBalanceTool :: Engine.Tool
+hledgerBalanceTool =
+ Engine.Tool
+ { Engine.toolName = "hledger_balance",
+ Engine.toolDescription =
+ "Query account balances from hledger. "
+ <> "Account patterns: 'as' (assets), 'li' (liabilities), 'ex' (expenses), 'in' (income), 'eq' (equity). "
+ <> "Can drill down like 'as:me:cash' or 'ex:us:need'. "
+ <> "Currency defaults to USD but can be changed (e.g., 'BTC', 'ETH'). "
+ <> "Period uses hledger syntax: 'thismonth', 'lastmonth', 'thisyear', '2024', '2024-06', "
+ <> "'from 2024-01-01 to 2024-06-30', 'from 2024-06-01'.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "account_pattern"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Account pattern to filter (e.g., 'as:me:cash', 'ex', 'li:us:cred')" :: Text)
+ ],
+ "period"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("hledger period: 'thismonth', 'lastmonth', '2024', '2024-06', 'from 2024-01-01 to 2024-06-30'" :: Text)
+ ],
+ "currency"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Currency to display values in (default: 'USD'). Examples: 'BTC', 'ETH', 'EUR'" :: Text)
+ ]
+ ],
+ "required" .= ([] :: [Text])
+ ],
+ Engine.toolExecute = executeBalance
+ }
+
+executeBalance :: Aeson.Value -> IO Aeson.Value
+executeBalance v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: BalanceArgs) -> do
+ result <- queryBalance (baPattern args) (baPeriod args) (baCurrency args)
+ case result of
+ Left err -> pure (Aeson.object ["error" .= err])
+ Right output ->
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "balances" .= output
+ ]
+ )
+
+data BalanceArgs = BalanceArgs
+ { baPattern :: Maybe Text,
+ baPeriod :: Maybe Text,
+ baCurrency :: Maybe Text
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON BalanceArgs where
+ parseJSON =
+ Aeson.withObject "BalanceArgs" <| \v ->
+ (BalanceArgs </ (v .:? "account_pattern"))
+ <*> (v .:? "period")
+ <*> (v .:? "currency")
+
+hledgerRegisterTool :: Engine.Tool
+hledgerRegisterTool =
+ Engine.Tool
+ { Engine.toolName = "hledger_register",
+ Engine.toolDescription =
+ "Show recent transactions for an account. "
+ <> "Useful for seeing transaction history and checking recent spending. "
+ <> "Currency defaults to USD.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "account_pattern"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Account pattern to show transactions for (e.g., 'ex:us:need:grocery')" :: Text)
+ ],
+ "limit"
+ .= Aeson.object
+ [ "type" .= ("integer" :: Text),
+ "description" .= ("Max transactions to show (default: 10)" :: Text)
+ ],
+ "currency"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Currency to display values in (default: 'USD')" :: Text)
+ ],
+ "period"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("hledger period: 'thismonth', 'lastmonth', '2024', '2024-06', 'from 2024-06-01 to 2024-12-31'" :: Text)
+ ]
+ ],
+ "required" .= (["account_pattern"] :: [Text])
+ ],
+ Engine.toolExecute = executeRegister
+ }
+
+executeRegister :: Aeson.Value -> IO Aeson.Value
+executeRegister v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: RegisterArgs) -> do
+ result <- queryRegister (raPattern args) (raLimit args) (raCurrency args) (raPeriod args)
+ case result of
+ Left err -> pure (Aeson.object ["error" .= err])
+ Right output ->
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "transactions" .= output
+ ]
+ )
+
+data RegisterArgs = RegisterArgs
+ { raPattern :: Text,
+ raLimit :: Maybe Int,
+ raCurrency :: Maybe Text,
+ raPeriod :: Maybe Text
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON RegisterArgs where
+ parseJSON =
+ Aeson.withObject "RegisterArgs" <| \v ->
+ (RegisterArgs </ (v .: "account_pattern"))
+ <*> (v .:? "limit")
+ <*> (v .:? "currency")
+ <*> (v .:? "period")
+
+hledgerAddTool :: Engine.Tool
+hledgerAddTool =
+ Engine.Tool
+ { Engine.toolName = "hledger_add",
+ Engine.toolDescription =
+ "Add a new transaction to the ledger. "
+ <> "Use for recording expenses like 'I spent $30 at the barber'. "
+ <> "Account naming: ex:me:want (personal discretionary), ex:us:need (shared necessities), "
+ <> "as:me:cash:checking (bank account), li:us:cred:chase (credit card). "
+ <> "Common expense accounts: ex:us:need:grocery, ex:us:need:utilities, ex:me:want:dining, ex:me:want:grooming.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "description"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Transaction description (e.g., 'Haircut at Joe's Barber')" :: Text)
+ ],
+ "from_account"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Account paying (e.g., 'as:me:cash:checking', 'li:us:cred:chase')" :: Text)
+ ],
+ "to_account"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Account receiving (e.g., 'ex:me:want:grooming')" :: Text)
+ ],
+ "amount"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Amount with currency (e.g., '$30.00', '30 USD')" :: Text)
+ ],
+ "date"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Transaction date YYYY-MM-DD (default: today)" :: Text)
+ ]
+ ],
+ "required" .= (["description", "from_account", "to_account", "amount"] :: [Text])
+ ],
+ Engine.toolExecute = executeAdd
+ }
+
+executeAdd :: Aeson.Value -> IO Aeson.Value
+executeAdd v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: AddArgs) -> do
+ result <-
+ addTransaction
+ (aaDescription args)
+ (aaFromAccount args)
+ (aaToAccount args)
+ (aaAmount args)
+ (aaDate args)
+ case result of
+ Left err -> pure (Aeson.object ["error" .= err])
+ Right msg ->
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "message" .= msg
+ ]
+ )
+
+data AddArgs = AddArgs
+ { aaDescription :: Text,
+ aaFromAccount :: Text,
+ aaToAccount :: Text,
+ aaAmount :: Text,
+ aaDate :: Maybe Text
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON AddArgs where
+ parseJSON =
+ Aeson.withObject "AddArgs" <| \v ->
+ (AddArgs </ (v .: "description"))
+ <*> (v .: "from_account")
+ <*> (v .: "to_account")
+ <*> (v .: "amount")
+ <*> (v .:? "date")
+
+hledgerIncomeStatementTool :: Engine.Tool
+hledgerIncomeStatementTool =
+ Engine.Tool
+ { Engine.toolName = "hledger_income_statement",
+ Engine.toolDescription =
+ "Show income statement (income vs expenses) for a period. "
+ <> "Good for seeing 'how much did I spend this month' or 'what's my net income'. "
+ <> "Currency defaults to USD. "
+ <> "Period uses hledger syntax: 'thismonth', 'lastmonth', '2024', 'from 2024-01-01 to 2024-06-30'.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "period"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("hledger period (default: 'thismonth'): 'lastmonth', '2024', '2024-06', 'from 2024-01-01 to 2024-06-30'" :: Text)
+ ],
+ "currency"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Currency to display values in (default: 'USD')" :: Text)
+ ]
+ ],
+ "required" .= ([] :: [Text])
+ ],
+ Engine.toolExecute = executeIncomeStatement
+ }
+
+executeIncomeStatement :: Aeson.Value -> IO Aeson.Value
+executeIncomeStatement v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: IncomeStatementArgs) -> do
+ result <- incomeStatement (isaPeriod args) (isaCurrency args)
+ case result of
+ Left err -> pure (Aeson.object ["error" .= err])
+ Right output ->
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "income_statement" .= output
+ ]
+ )
+
+data IncomeStatementArgs = IncomeStatementArgs
+ { isaPeriod :: Maybe Text,
+ isaCurrency :: Maybe Text
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON IncomeStatementArgs where
+ parseJSON =
+ Aeson.withObject "IncomeStatementArgs" <| \v ->
+ (IncomeStatementArgs </ (v .:? "period"))
+ <*> (v .:? "currency")
+
+hledgerBalanceSheetTool :: Engine.Tool
+hledgerBalanceSheetTool =
+ Engine.Tool
+ { Engine.toolName = "hledger_balance_sheet",
+ Engine.toolDescription =
+ "Show current balance sheet (assets, liabilities, net worth). "
+ <> "Good for seeing 'what's my net worth' or 'how much do I have'. "
+ <> "Currency defaults to USD.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "currency"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Currency to display values in (default: 'USD')" :: Text)
+ ]
+ ],
+ "required" .= ([] :: [Text])
+ ],
+ Engine.toolExecute = executeBalanceSheet
+ }
+
+executeBalanceSheet :: Aeson.Value -> IO Aeson.Value
+executeBalanceSheet v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: BalanceSheetArgs) -> do
+ result <- balanceSheet (bsCurrency args)
+ case result of
+ Left err -> pure (Aeson.object ["error" .= err])
+ Right output ->
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "balance_sheet" .= output
+ ]
+ )
+
+newtype BalanceSheetArgs = BalanceSheetArgs
+ { bsCurrency :: Maybe Text
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON BalanceSheetArgs where
+ parseJSON =
+ Aeson.withObject "BalanceSheetArgs" <| \v ->
+ BalanceSheetArgs </ (v .:? "currency")
diff --git a/Omni/Agent/Tools/Http.hs b/Omni/Agent/Tools/Http.hs
new file mode 100644
index 0000000..d996ff5
--- /dev/null
+++ b/Omni/Agent/Tools/Http.hs
@@ -0,0 +1,338 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | HTTP request tools for agent API interactions.
+--
+-- Provides http_get and http_post tools for making HTTP requests.
+-- Supports headers, query params, and JSON body.
+--
+-- : out omni-agent-tools-http
+-- : dep aeson
+-- : dep http-conduit
+module Omni.Agent.Tools.Http
+ ( -- * Tools
+ httpGetTool,
+ httpPostTool,
+ allHttpTools,
+
+ -- * Types
+ HttpGetArgs (..),
+ HttpPostArgs (..),
+ HttpResult (..),
+
+ -- * Testing
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Data.Aeson ((.:), (.:?), (.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.Aeson.Key as Key
+import qualified Data.Aeson.KeyMap as KeyMap
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.CaseInsensitive as CI
+import qualified Data.Text as Text
+import qualified Data.Text.Encoding as TE
+import qualified Network.HTTP.Client as HTTPClient
+import qualified Network.HTTP.Simple as HTTP
+import qualified Omni.Agent.Engine as Engine
+import qualified Omni.Test as Test
+import System.Timeout (timeout)
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Tools.Http"
+ [ Test.unit "httpGetTool has correct name" <| do
+ Engine.toolName httpGetTool Test.@=? "http_get",
+ Test.unit "httpPostTool has correct name" <| do
+ Engine.toolName httpPostTool Test.@=? "http_post",
+ Test.unit "allHttpTools has 2 tools" <| do
+ length allHttpTools Test.@=? 2,
+ Test.unit "HttpGetArgs parses correctly" <| do
+ let json = Aeson.object ["url" .= ("https://example.com" :: Text)]
+ case Aeson.fromJSON json of
+ Aeson.Success (args :: HttpGetArgs) -> httpGetUrl args Test.@=? "https://example.com"
+ Aeson.Error e -> Test.assertFailure e,
+ Test.unit "HttpGetArgs parses with headers" <| do
+ let json =
+ Aeson.object
+ [ "url" .= ("https://api.example.com" :: Text),
+ "headers" .= Aeson.object ["Authorization" .= ("Bearer token" :: Text)]
+ ]
+ case Aeson.fromJSON json of
+ Aeson.Success (args :: HttpGetArgs) -> do
+ httpGetUrl args Test.@=? "https://api.example.com"
+ isJust (httpGetHeaders args) Test.@=? True
+ Aeson.Error e -> Test.assertFailure e,
+ Test.unit "HttpPostArgs parses correctly" <| do
+ let json =
+ Aeson.object
+ [ "url" .= ("https://api.example.com" :: Text),
+ "body" .= Aeson.object ["key" .= ("value" :: Text)]
+ ]
+ case Aeson.fromJSON json of
+ Aeson.Success (args :: HttpPostArgs) -> do
+ httpPostUrl args Test.@=? "https://api.example.com"
+ isJust (httpPostBody args) Test.@=? True
+ Aeson.Error e -> Test.assertFailure e,
+ Test.unit "HttpResult JSON roundtrip" <| do
+ let result =
+ HttpResult
+ { httpResultStatus = 200,
+ httpResultHeaders = Aeson.object ["Content-Type" .= ("application/json" :: Text)],
+ httpResultBody = "{\"ok\": true}"
+ }
+ case Aeson.decode (Aeson.encode result) of
+ Nothing -> Test.assertFailure "Failed to decode HttpResult"
+ Just decoded -> httpResultStatus decoded Test.@=? 200,
+ Test.unit "http_get fetches real URL" <| do
+ let args = Aeson.object ["url" .= ("https://httpbin.org/get" :: Text)]
+ result <- Engine.toolExecute httpGetTool args
+ case Aeson.fromJSON result of
+ Aeson.Success (r :: HttpResult) -> do
+ httpResultStatus r Test.@=? 200
+ ("httpbin.org" `Text.isInfixOf` httpResultBody r) Test.@=? True
+ Aeson.Error e -> Test.assertFailure e,
+ Test.unit "http_post with JSON body" <| do
+ let args =
+ Aeson.object
+ [ "url" .= ("https://httpbin.org/post" :: Text),
+ "body" .= Aeson.object ["test" .= ("value" :: Text)]
+ ]
+ result <- Engine.toolExecute httpPostTool args
+ case Aeson.fromJSON result of
+ Aeson.Success (r :: HttpResult) -> do
+ httpResultStatus r Test.@=? 200
+ ("test" `Text.isInfixOf` httpResultBody r) Test.@=? True
+ Aeson.Error e -> Test.assertFailure e
+ ]
+
+data HttpGetArgs = HttpGetArgs
+ { httpGetUrl :: Text,
+ httpGetHeaders :: Maybe Aeson.Object,
+ httpGetParams :: Maybe Aeson.Object
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.FromJSON HttpGetArgs where
+ parseJSON =
+ Aeson.withObject "HttpGetArgs" <| \v ->
+ (HttpGetArgs </ (v .: "url"))
+ <*> (v .:? "headers")
+ <*> (v .:? "params")
+
+data HttpPostArgs = HttpPostArgs
+ { httpPostUrl :: Text,
+ httpPostHeaders :: Maybe Aeson.Object,
+ httpPostBody :: Maybe Aeson.Value,
+ httpPostContentType :: Maybe Text
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.FromJSON HttpPostArgs where
+ parseJSON =
+ Aeson.withObject "HttpPostArgs" <| \v ->
+ (HttpPostArgs </ (v .: "url"))
+ <*> (v .:? "headers")
+ <*> (v .:? "body")
+ <*> (v .:? "content_type")
+
+data HttpResult = HttpResult
+ { httpResultStatus :: Int,
+ httpResultHeaders :: Aeson.Value,
+ httpResultBody :: Text
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON HttpResult where
+ toJSON r =
+ Aeson.object
+ [ "status" .= httpResultStatus r,
+ "headers" .= httpResultHeaders r,
+ "body" .= httpResultBody r
+ ]
+
+instance Aeson.FromJSON HttpResult where
+ parseJSON =
+ Aeson.withObject "HttpResult" <| \v ->
+ (HttpResult </ (v .: "status"))
+ <*> (v .: "headers")
+ <*> (v .: "body")
+
+allHttpTools :: [Engine.Tool]
+allHttpTools = [httpGetTool, httpPostTool]
+
+httpGetTool :: Engine.Tool
+httpGetTool =
+ Engine.Tool
+ { Engine.toolName = "http_get",
+ Engine.toolDescription =
+ "Make an HTTP GET request. Returns status code, headers, and response body. "
+ <> "Use for fetching data from APIs or web pages.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "url"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("The URL to request" :: Text)
+ ],
+ "headers"
+ .= Aeson.object
+ [ "type" .= ("object" :: Text),
+ "description" .= ("Optional headers as key-value pairs" :: Text)
+ ],
+ "params"
+ .= Aeson.object
+ [ "type" .= ("object" :: Text),
+ "description" .= ("Optional query parameters as key-value pairs" :: Text)
+ ]
+ ],
+ "required" .= (["url"] :: [Text])
+ ],
+ Engine.toolExecute = executeHttpGet
+ }
+
+executeHttpGet :: Aeson.Value -> IO Aeson.Value
+executeHttpGet v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure <| mkError ("Invalid arguments: " <> Text.pack e)
+ Aeson.Success args -> do
+ let urlWithParams = case httpGetParams args of
+ Nothing -> httpGetUrl args
+ Just params ->
+ let paramList = [(k, v') | (k, v') <- KeyMap.toList params]
+ paramStr = Text.intercalate "&" [Key.toText k <> "=" <> valueToText v' | (k, v') <- paramList]
+ in if Text.null paramStr
+ then httpGetUrl args
+ else httpGetUrl args <> "?" <> paramStr
+ doHttpRequest "GET" urlWithParams (httpGetHeaders args) Nothing
+
+httpPostTool :: Engine.Tool
+httpPostTool =
+ Engine.Tool
+ { Engine.toolName = "http_post",
+ Engine.toolDescription =
+ "Make an HTTP POST request. Returns status code, headers, and response body. "
+ <> "Use for submitting data to APIs or forms.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "url"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("The URL to request" :: Text)
+ ],
+ "headers"
+ .= Aeson.object
+ [ "type" .= ("object" :: Text),
+ "description" .= ("Optional headers as key-value pairs" :: Text)
+ ],
+ "body"
+ .= Aeson.object
+ [ "type" .= ("object" :: Text),
+ "description" .= ("Optional JSON body (object or string)" :: Text)
+ ],
+ "content_type"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Content type (default: application/json)" :: Text)
+ ]
+ ],
+ "required" .= (["url"] :: [Text])
+ ],
+ Engine.toolExecute = executeHttpPost
+ }
+
+executeHttpPost :: Aeson.Value -> IO Aeson.Value
+executeHttpPost v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure <| mkError ("Invalid arguments: " <> Text.pack e)
+ Aeson.Success args -> do
+ let contentType = fromMaybe "application/json" (httpPostContentType args)
+ body = case httpPostBody args of
+ Nothing -> Nothing
+ Just b -> Just (contentType, BL.toStrict (Aeson.encode b))
+ doHttpRequest "POST" (httpPostUrl args) (httpPostHeaders args) body
+
+doHttpRequest ::
+ ByteString ->
+ Text ->
+ Maybe Aeson.Object ->
+ Maybe (Text, ByteString) ->
+ IO Aeson.Value
+doHttpRequest method url mHeaders mBody = do
+ let timeoutMicros = 30 * 1000000
+ result <-
+ try <| do
+ req0 <- HTTP.parseRequest (Text.unpack url)
+ let req1 =
+ HTTP.setRequestMethod method
+ <| HTTP.setRequestHeader "User-Agent" ["OmniAgent/1.0"]
+ <| HTTP.setRequestResponseTimeout (HTTPClient.responseTimeoutMicro timeoutMicros)
+ <| req0
+ req2 = case mHeaders of
+ Nothing -> req1
+ Just hdrs -> foldr addHeader req1 (KeyMap.toList hdrs)
+ req3 = case mBody of
+ Nothing -> req2
+ Just (ct, bodyBytes) ->
+ HTTP.setRequestHeader "Content-Type" [TE.encodeUtf8 ct]
+ <| HTTP.setRequestBodyLBS (BL.fromStrict bodyBytes)
+ <| req2
+ mResp <- timeout timeoutMicros (HTTP.httpLBS req3)
+ case mResp of
+ Nothing -> pure (Left "Request timed out after 30 seconds")
+ Just resp -> pure (Right resp)
+ case result of
+ Left (e :: SomeException) -> pure <| mkError ("Request failed: " <> tshow e)
+ Right (Left err) -> pure <| mkError err
+ Right (Right response) -> do
+ let status = HTTP.getResponseStatusCode response
+ respHeaders = HTTP.getResponseHeaders response
+ headerObj =
+ Aeson.object
+ [ Key.fromText (TE.decodeUtf8 (CI.original k)) .= TE.decodeUtf8 v
+ | (k, v) <- respHeaders
+ ]
+ body = TE.decodeUtf8With (\_ _ -> Just '?') (BL.toStrict (HTTP.getResponseBody response))
+ pure
+ <| Aeson.toJSON
+ <| HttpResult
+ { httpResultStatus = status,
+ httpResultHeaders = headerObj,
+ httpResultBody = body
+ }
+ where
+ addHeader :: (Aeson.Key, Aeson.Value) -> HTTP.Request -> HTTP.Request
+ addHeader (k, v) req =
+ let headerName = CI.mk (TE.encodeUtf8 (Key.toText k))
+ headerValue = TE.encodeUtf8 (valueToText v)
+ in HTTP.addRequestHeader headerName headerValue req
+
+valueToText :: Aeson.Value -> Text
+valueToText (Aeson.String s) = s
+valueToText (Aeson.Number n) = tshow n
+valueToText (Aeson.Bool b) = if b then "true" else "false"
+valueToText Aeson.Null = ""
+valueToText other = TE.decodeUtf8 (BL.toStrict (Aeson.encode other))
+
+mkError :: Text -> Aeson.Value
+mkError err =
+ Aeson.object
+ [ "status" .= (-1 :: Int),
+ "headers" .= Aeson.object [],
+ "body" .= err
+ ]
diff --git a/Omni/Agent/Tools/Notes.hs b/Omni/Agent/Tools/Notes.hs
new file mode 100644
index 0000000..e3cef5d
--- /dev/null
+++ b/Omni/Agent/Tools/Notes.hs
@@ -0,0 +1,357 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Quick notes tool for agents.
+--
+-- Provides simple CRUD for tagged notes stored in memory.db.
+--
+-- : out omni-agent-tools-notes
+-- : dep aeson
+-- : dep sqlite-simple
+module Omni.Agent.Tools.Notes
+ ( -- * Tools
+ noteAddTool,
+ noteListTool,
+ noteDeleteTool,
+
+ -- * Direct API
+ Note (..),
+ createNote,
+ listNotes,
+ listNotesByTopic,
+ deleteNote,
+
+ -- * Database
+ initNotesTable,
+
+ -- * Testing
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Data.Aeson ((.!=), (.:), (.:?), (.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.Text as Text
+import Data.Time (UTCTime, getCurrentTime)
+import qualified Database.SQLite.Simple as SQL
+import qualified Omni.Agent.Engine as Engine
+import qualified Omni.Agent.Memory as Memory
+import qualified Omni.Test as Test
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Tools.Notes"
+ [ Test.unit "noteAddTool has correct schema" <| do
+ let tool = noteAddTool "test-user-id"
+ Engine.toolName tool Test.@=? "note_add",
+ Test.unit "noteListTool has correct schema" <| do
+ let tool = noteListTool "test-user-id"
+ Engine.toolName tool Test.@=? "note_list",
+ Test.unit "noteDeleteTool has correct schema" <| do
+ let tool = noteDeleteTool "test-user-id"
+ Engine.toolName tool Test.@=? "note_delete",
+ Test.unit "Note JSON roundtrip" <| do
+ now <- getCurrentTime
+ let n =
+ Note
+ { noteId = 1,
+ noteUserId = "user-123",
+ noteTopic = "groceries",
+ noteContent = "Buy milk",
+ noteCreatedAt = now
+ }
+ case Aeson.decode (Aeson.encode n) of
+ Nothing -> Test.assertFailure "Failed to decode Note"
+ Just decoded -> do
+ noteContent decoded Test.@=? "Buy milk"
+ noteTopic decoded Test.@=? "groceries"
+ ]
+
+data Note = Note
+ { noteId :: Int,
+ noteUserId :: Text,
+ noteTopic :: Text,
+ noteContent :: Text,
+ noteCreatedAt :: UTCTime
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON Note where
+ toJSON n =
+ Aeson.object
+ [ "id" .= noteId n,
+ "user_id" .= noteUserId n,
+ "topic" .= noteTopic n,
+ "content" .= noteContent n,
+ "created_at" .= noteCreatedAt n
+ ]
+
+instance Aeson.FromJSON Note where
+ parseJSON =
+ Aeson.withObject "Note" <| \v ->
+ (Note </ (v .: "id"))
+ <*> (v .: "user_id")
+ <*> (v .: "topic")
+ <*> (v .: "content")
+ <*> (v .: "created_at")
+
+instance SQL.FromRow Note where
+ fromRow =
+ (Note </ SQL.field)
+ <*> SQL.field
+ <*> SQL.field
+ <*> SQL.field
+ <*> SQL.field
+
+initNotesTable :: SQL.Connection -> IO ()
+initNotesTable conn = do
+ SQL.execute_
+ conn
+ "CREATE TABLE IF NOT EXISTS notes (\
+ \ id INTEGER PRIMARY KEY AUTOINCREMENT,\
+ \ user_id TEXT NOT NULL,\
+ \ topic TEXT NOT NULL,\
+ \ content TEXT NOT NULL,\
+ \ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP\
+ \)"
+ SQL.execute_
+ conn
+ "CREATE INDEX IF NOT EXISTS idx_notes_user ON notes(user_id)"
+ SQL.execute_
+ conn
+ "CREATE INDEX IF NOT EXISTS idx_notes_topic ON notes(user_id, topic)"
+
+createNote :: Text -> Text -> Text -> IO Note
+createNote uid topic content = do
+ now <- getCurrentTime
+ Memory.withMemoryDb <| \conn -> do
+ initNotesTable conn
+ SQL.execute
+ conn
+ "INSERT INTO notes (user_id, topic, content, created_at) VALUES (?, ?, ?, ?)"
+ (uid, topic, content, now)
+ rowId <- SQL.lastInsertRowId conn
+ pure
+ Note
+ { noteId = fromIntegral rowId,
+ noteUserId = uid,
+ noteTopic = topic,
+ noteContent = content,
+ noteCreatedAt = now
+ }
+
+listNotes :: Text -> Int -> IO [Note]
+listNotes uid limit =
+ Memory.withMemoryDb <| \conn -> do
+ initNotesTable conn
+ SQL.query
+ conn
+ "SELECT id, user_id, topic, content, created_at \
+ \FROM notes WHERE user_id = ? \
+ \ORDER BY created_at DESC LIMIT ?"
+ (uid, limit)
+
+listNotesByTopic :: Text -> Text -> Int -> IO [Note]
+listNotesByTopic uid topic limit =
+ Memory.withMemoryDb <| \conn -> do
+ initNotesTable conn
+ SQL.query
+ conn
+ "SELECT id, user_id, topic, content, created_at \
+ \FROM notes WHERE user_id = ? AND topic = ? \
+ \ORDER BY created_at DESC LIMIT ?"
+ (uid, topic, limit)
+
+deleteNote :: Text -> Int -> IO Bool
+deleteNote uid nid =
+ Memory.withMemoryDb <| \conn -> do
+ initNotesTable conn
+ SQL.execute
+ conn
+ "DELETE FROM notes WHERE id = ? AND user_id = ?"
+ (nid, uid)
+ changes <- SQL.changes conn
+ pure (changes > 0)
+
+noteAddTool :: Text -> Engine.Tool
+noteAddTool uid =
+ Engine.Tool
+ { Engine.toolName = "note_add",
+ Engine.toolDescription =
+ "Add a quick note on a topic. Use for reminders, lists, ideas, or anything "
+ <> "the user wants to jot down. Topics help organize notes (e.g., 'groceries', "
+ <> "'ideas', 'todo', 'recipes').",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "topic"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Topic/category for the note (e.g., 'groceries', 'todo')" :: Text)
+ ],
+ "content"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("The note content" :: Text)
+ ]
+ ],
+ "required" .= (["topic", "content"] :: [Text])
+ ],
+ Engine.toolExecute = executeNoteAdd uid
+ }
+
+executeNoteAdd :: Text -> Aeson.Value -> IO Aeson.Value
+executeNoteAdd uid v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: NoteAddArgs) -> do
+ newNote <- createNote uid (naTopic args) (naContent args)
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "note_id" .= noteId newNote,
+ "message" .= ("Added note to '" <> noteTopic newNote <> "': " <> noteContent newNote)
+ ]
+ )
+
+data NoteAddArgs = NoteAddArgs
+ { naTopic :: Text,
+ naContent :: Text
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON NoteAddArgs where
+ parseJSON =
+ Aeson.withObject "NoteAddArgs" <| \v ->
+ (NoteAddArgs </ (v .: "topic"))
+ <*> (v .: "content")
+
+noteListTool :: Text -> Engine.Tool
+noteListTool uid =
+ Engine.Tool
+ { Engine.toolName = "note_list",
+ Engine.toolDescription =
+ "List notes, optionally filtered by topic. Use to show the user their "
+ <> "saved notes or check what's on a specific list.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "topic"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Filter by topic (optional, omit to list all)" :: Text)
+ ],
+ "limit"
+ .= Aeson.object
+ [ "type" .= ("integer" :: Text),
+ "description" .= ("Max notes to return (default: 20)" :: Text)
+ ]
+ ],
+ "required" .= ([] :: [Text])
+ ],
+ Engine.toolExecute = executeNoteList uid
+ }
+
+executeNoteList :: Text -> Aeson.Value -> IO Aeson.Value
+executeNoteList uid v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: NoteListArgs) -> do
+ let lim = min 50 (max 1 (nlLimit args))
+ notes <- case nlTopic args of
+ Just topic -> listNotesByTopic uid topic lim
+ Nothing -> listNotes uid lim
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "count" .= length notes,
+ "notes" .= formatNotesForLLM notes
+ ]
+ )
+
+formatNotesForLLM :: [Note] -> Text
+formatNotesForLLM [] = "No notes found."
+formatNotesForLLM notes =
+ Text.unlines (map formatNote notes)
+ where
+ formatNote n =
+ "[" <> noteTopic n <> "] " <> noteContent n <> " (id: " <> tshow (noteId n) <> ")"
+
+data NoteListArgs = NoteListArgs
+ { nlTopic :: Maybe Text,
+ nlLimit :: Int
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON NoteListArgs where
+ parseJSON =
+ Aeson.withObject "NoteListArgs" <| \v ->
+ (NoteListArgs </ (v .:? "topic"))
+ <*> (v .:? "limit" .!= 20)
+
+noteDeleteTool :: Text -> Engine.Tool
+noteDeleteTool uid =
+ Engine.Tool
+ { Engine.toolName = "note_delete",
+ Engine.toolDescription =
+ "Delete a note by its ID. Use after the user says they've completed an item "
+ <> "or no longer need a note.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "note_id"
+ .= Aeson.object
+ [ "type" .= ("integer" :: Text),
+ "description" .= ("The ID of the note to delete" :: Text)
+ ]
+ ],
+ "required" .= (["note_id"] :: [Text])
+ ],
+ Engine.toolExecute = executeNoteDelete uid
+ }
+
+executeNoteDelete :: Text -> Aeson.Value -> IO Aeson.Value
+executeNoteDelete uid v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: NoteDeleteArgs) -> do
+ deleted <- deleteNote uid (ndNoteId args)
+ if deleted
+ then
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "message" .= ("Note deleted" :: Text)
+ ]
+ )
+ else
+ pure
+ ( Aeson.object
+ [ "success" .= False,
+ "error" .= ("Note not found or already deleted" :: Text)
+ ]
+ )
+
+newtype NoteDeleteArgs = NoteDeleteArgs
+ { ndNoteId :: Int
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON NoteDeleteArgs where
+ parseJSON =
+ Aeson.withObject "NoteDeleteArgs" <| \v ->
+ NoteDeleteArgs </ (v .: "note_id")
diff --git a/Omni/Agent/Tools/Outreach.hs b/Omni/Agent/Tools/Outreach.hs
new file mode 100644
index 0000000..e576cbd
--- /dev/null
+++ b/Omni/Agent/Tools/Outreach.hs
@@ -0,0 +1,513 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Outreach approval queue for agent use.
+--
+-- Provides tools for creating and tracking outreach drafts that require
+-- human approval before sending (emails, messages, etc).
+--
+-- Drafts flow: pending -> approved -> sent (or rejected)
+--
+-- : out omni-agent-tools-outreach
+-- : dep aeson
+-- : dep uuid
+-- : dep directory
+module Omni.Agent.Tools.Outreach
+ ( -- * Tools
+ outreachDraftTool,
+ outreachListTool,
+ outreachStatusTool,
+ allOutreachTools,
+
+ -- * Types
+ OutreachDraft (..),
+ OutreachType (..),
+ OutreachStatus (..),
+
+ -- * Direct API
+ createDraft,
+ listDrafts,
+ getDraft,
+ approveDraft,
+ rejectDraft,
+ markSent,
+ getPendingCount,
+
+ -- * Paths
+ outreachDir,
+ pendingDir,
+ approvedDir,
+ rejectedDir,
+ sentDir,
+
+ -- * Testing
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Control.Monad.Fail (MonadFail (fail))
+import Data.Aeson ((.!=), (.:), (.:?), (.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.Text as Text
+import qualified Data.Text.Encoding as TE
+import qualified Data.Text.IO as TextIO
+import Data.Time (UTCTime, getCurrentTime)
+import qualified Data.UUID as UUID
+import qualified Data.UUID.V4 as UUID
+import qualified Omni.Agent.Engine as Engine
+import qualified Omni.Agent.Paths as Paths
+import qualified Omni.Test as Test
+import qualified System.Directory as Directory
+import System.FilePath ((</>))
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Tools.Outreach"
+ [ Test.unit "outreachDraftTool has correct name" <| do
+ Engine.toolName outreachDraftTool Test.@=? "outreach_draft",
+ Test.unit "outreachListTool has correct name" <| do
+ Engine.toolName outreachListTool Test.@=? "outreach_list",
+ Test.unit "outreachStatusTool has correct name" <| do
+ Engine.toolName outreachStatusTool Test.@=? "outreach_status",
+ Test.unit "allOutreachTools has 3 tools" <| do
+ length allOutreachTools Test.@=? 3,
+ Test.unit "OutreachDraft JSON roundtrip" <| do
+ now <- getCurrentTime
+ let draft =
+ OutreachDraft
+ { draftId = "test-id-123",
+ draftType = Email,
+ draftCreatedAt = now,
+ draftSubject = Just "Test subject",
+ draftRecipient = "test@example.com",
+ draftBody = "Hello, this is a test.",
+ draftContext = "Testing outreach system",
+ draftStatus = Pending,
+ draftRejectReason = Nothing
+ }
+ case Aeson.decode (Aeson.encode draft) of
+ Nothing -> Test.assertFailure "Failed to decode OutreachDraft"
+ Just decoded -> do
+ draftId decoded Test.@=? "test-id-123"
+ draftType decoded Test.@=? Email
+ draftRecipient decoded Test.@=? "test@example.com",
+ Test.unit "OutreachType JSON roundtrip" <| do
+ case Aeson.decode (Aeson.encode Email) of
+ Just Email -> pure ()
+ _ -> Test.assertFailure "Failed to decode Email"
+ case Aeson.decode (Aeson.encode Message) of
+ Just Message -> pure ()
+ _ -> Test.assertFailure "Failed to decode Message",
+ Test.unit "OutreachStatus JSON roundtrip" <| do
+ let statuses = [Pending, Approved, Rejected, Sent]
+ forM_ statuses <| \s ->
+ case Aeson.decode (Aeson.encode s) of
+ Nothing -> Test.assertFailure ("Failed to decode " <> show s)
+ Just decoded -> decoded Test.@=? s
+ ]
+
+outreachDir :: FilePath
+outreachDir = Paths.outreachDir
+
+pendingDir :: FilePath
+pendingDir = outreachDir </> "pending"
+
+approvedDir :: FilePath
+approvedDir = outreachDir </> "approved"
+
+rejectedDir :: FilePath
+rejectedDir = outreachDir </> "rejected"
+
+sentDir :: FilePath
+sentDir = outreachDir </> "sent"
+
+data OutreachType = Email | Message
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON OutreachType where
+ toJSON Email = Aeson.String "email"
+ toJSON Message = Aeson.String "message"
+
+instance Aeson.FromJSON OutreachType where
+ parseJSON =
+ Aeson.withText "OutreachType" <| \t ->
+ case Text.toLower t of
+ "email" -> pure Email
+ "message" -> pure Message
+ _ -> fail "OutreachType must be 'email' or 'message'"
+
+data OutreachStatus = Pending | Approved | Rejected | Sent
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON OutreachStatus where
+ toJSON Pending = Aeson.String "pending"
+ toJSON Approved = Aeson.String "approved"
+ toJSON Rejected = Aeson.String "rejected"
+ toJSON Sent = Aeson.String "sent"
+
+instance Aeson.FromJSON OutreachStatus where
+ parseJSON =
+ Aeson.withText "OutreachStatus" <| \t ->
+ case Text.toLower t of
+ "pending" -> pure Pending
+ "approved" -> pure Approved
+ "rejected" -> pure Rejected
+ "sent" -> pure Sent
+ _ -> fail "OutreachStatus must be 'pending', 'approved', 'rejected', or 'sent'"
+
+data OutreachDraft = OutreachDraft
+ { draftId :: Text,
+ draftType :: OutreachType,
+ draftCreatedAt :: UTCTime,
+ draftSubject :: Maybe Text,
+ draftRecipient :: Text,
+ draftBody :: Text,
+ draftContext :: Text,
+ draftStatus :: OutreachStatus,
+ draftRejectReason :: Maybe Text
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON OutreachDraft where
+ toJSON d =
+ Aeson.object
+ [ "id" .= draftId d,
+ "type" .= draftType d,
+ "created_at" .= draftCreatedAt d,
+ "subject" .= draftSubject d,
+ "recipient" .= draftRecipient d,
+ "body" .= draftBody d,
+ "context" .= draftContext d,
+ "status" .= draftStatus d,
+ "reject_reason" .= draftRejectReason d
+ ]
+
+instance Aeson.FromJSON OutreachDraft where
+ parseJSON =
+ Aeson.withObject "OutreachDraft" <| \v ->
+ (OutreachDraft </ (v .: "id"))
+ <*> (v .: "type")
+ <*> (v .: "created_at")
+ <*> (v .:? "subject")
+ <*> (v .: "recipient")
+ <*> (v .: "body")
+ <*> (v .: "context")
+ <*> (v .: "status")
+ <*> (v .:? "reject_reason")
+
+ensureDirs :: IO ()
+ensureDirs = do
+ Directory.createDirectoryIfMissing True pendingDir
+ Directory.createDirectoryIfMissing True approvedDir
+ Directory.createDirectoryIfMissing True rejectedDir
+ Directory.createDirectoryIfMissing True sentDir
+
+draftPath :: FilePath -> Text -> FilePath
+draftPath dir draftId' = dir </> (Text.unpack draftId' <> ".json")
+
+saveDraft :: OutreachDraft -> IO ()
+saveDraft draft = do
+ ensureDirs
+ let dir = case draftStatus draft of
+ Pending -> pendingDir
+ Approved -> approvedDir
+ Rejected -> rejectedDir
+ Sent -> sentDir
+ path = draftPath dir (draftId draft)
+ TextIO.writeFile path (TE.decodeUtf8 (BL.toStrict (Aeson.encode draft)))
+
+createDraft :: OutreachType -> Text -> Maybe Text -> Text -> Text -> IO OutreachDraft
+createDraft otype recipient subject body context = do
+ uuid <- UUID.nextRandom
+ now <- getCurrentTime
+ let draft =
+ OutreachDraft
+ { draftId = UUID.toText uuid,
+ draftType = otype,
+ draftCreatedAt = now,
+ draftSubject = subject,
+ draftRecipient = recipient,
+ draftBody = body,
+ draftContext = context,
+ draftStatus = Pending,
+ draftRejectReason = Nothing
+ }
+ saveDraft draft
+ pure draft
+
+listDrafts :: OutreachStatus -> IO [OutreachDraft]
+listDrafts status = do
+ ensureDirs
+ let dir = case status of
+ Pending -> pendingDir
+ Approved -> approvedDir
+ Rejected -> rejectedDir
+ Sent -> sentDir
+ files <- Directory.listDirectory dir
+ let jsonFiles = filter (".json" `isSuffixOf`) files
+ drafts <-
+ forM jsonFiles <| \f -> do
+ content <- TextIO.readFile (dir </> f)
+ pure (Aeson.decode (BL.fromStrict (TE.encodeUtf8 content)))
+ pure (catMaybes drafts)
+
+getDraft :: Text -> IO (Maybe OutreachDraft)
+getDraft draftId' = do
+ ensureDirs
+ let dirs = [pendingDir, approvedDir, rejectedDir, sentDir]
+ findFirst dirs
+ where
+ findFirst [] = pure Nothing
+ findFirst (dir : rest) = do
+ let path = draftPath dir draftId'
+ exists <- Directory.doesFileExist path
+ if exists
+ then do
+ content <- TextIO.readFile path
+ pure (Aeson.decode (BL.fromStrict (TE.encodeUtf8 content)))
+ else findFirst rest
+
+moveDraft :: Text -> OutreachStatus -> OutreachStatus -> Maybe Text -> IO (Either Text OutreachDraft)
+moveDraft draftId' fromStatus toStatus reason = do
+ ensureDirs
+ let fromDir = case fromStatus of
+ Pending -> pendingDir
+ Approved -> approvedDir
+ Rejected -> rejectedDir
+ Sent -> sentDir
+ fromPath = draftPath fromDir draftId'
+ exists <- Directory.doesFileExist fromPath
+ if not exists
+ then pure (Left ("Draft not found in " <> tshow fromStatus <> " queue"))
+ else do
+ content <- TextIO.readFile fromPath
+ case Aeson.decode (BL.fromStrict (TE.encodeUtf8 content)) of
+ Nothing -> pure (Left "Failed to parse draft")
+ Just draft -> do
+ let updated = draft {draftStatus = toStatus, draftRejectReason = reason}
+ Directory.removeFile fromPath
+ saveDraft updated
+ pure (Right updated)
+
+approveDraft :: Text -> IO (Either Text OutreachDraft)
+approveDraft draftId' = moveDraft draftId' Pending Approved Nothing
+
+rejectDraft :: Text -> Maybe Text -> IO (Either Text OutreachDraft)
+rejectDraft draftId' = moveDraft draftId' Pending Rejected
+
+markSent :: Text -> IO (Either Text OutreachDraft)
+markSent draftId' = moveDraft draftId' Approved Sent Nothing
+
+getPendingCount :: IO Int
+getPendingCount = do
+ ensureDirs
+ files <- Directory.listDirectory pendingDir
+ pure (length (filter (".json" `isSuffixOf`) files))
+
+allOutreachTools :: [Engine.Tool]
+allOutreachTools =
+ [ outreachDraftTool,
+ outreachListTool,
+ outreachStatusTool
+ ]
+
+outreachDraftTool :: Engine.Tool
+outreachDraftTool =
+ Engine.Tool
+ { Engine.toolName = "outreach_draft",
+ Engine.toolDescription =
+ "Create a new outreach draft for Ben to review before sending. "
+ <> "Use this when you want to send an email or message on behalf of the business. "
+ <> "All outreach requires approval before it goes out.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "type"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "enum" .= (["email", "message"] :: [Text]),
+ "description" .= ("Type of outreach: 'email' or 'message'" :: Text)
+ ],
+ "recipient"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Email address or identifier of the recipient" :: Text)
+ ],
+ "subject"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Subject line (required for emails)" :: Text)
+ ],
+ "body"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("The message content" :: Text)
+ ],
+ "context"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Explain why you're sending this - helps Ben review" :: Text)
+ ]
+ ],
+ "required" .= (["type", "recipient", "body", "context"] :: [Text])
+ ],
+ Engine.toolExecute = executeOutreachDraft
+ }
+
+executeOutreachDraft :: Aeson.Value -> IO Aeson.Value
+executeOutreachDraft v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: DraftArgs) -> do
+ let otype = case daType args of
+ "email" -> Email
+ _ -> Message
+ draft <- createDraft otype (daRecipient args) (daSubject args) (daBody args) (daContext args)
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "draft_id" .= draftId draft,
+ "message" .= ("Draft created and queued for review. ID: " <> draftId draft)
+ ]
+ )
+
+data DraftArgs = DraftArgs
+ { daType :: Text,
+ daRecipient :: Text,
+ daSubject :: Maybe Text,
+ daBody :: Text,
+ daContext :: Text
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON DraftArgs where
+ parseJSON =
+ Aeson.withObject "DraftArgs" <| \v ->
+ (DraftArgs </ (v .: "type"))
+ <*> (v .: "recipient")
+ <*> (v .:? "subject")
+ <*> (v .: "body")
+ <*> (v .: "context")
+
+outreachListTool :: Engine.Tool
+outreachListTool =
+ Engine.Tool
+ { Engine.toolName = "outreach_list",
+ Engine.toolDescription =
+ "List outreach drafts by status. Use to check what's pending approval, "
+ <> "what's been approved, or review past outreach.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "status"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "enum" .= (["pending", "approved", "rejected", "sent"] :: [Text]),
+ "description" .= ("Filter by status (default: pending)" :: Text)
+ ],
+ "limit"
+ .= Aeson.object
+ [ "type" .= ("integer" :: Text),
+ "description" .= ("Max drafts to return (default: 20)" :: Text)
+ ]
+ ],
+ "required" .= ([] :: [Text])
+ ],
+ Engine.toolExecute = executeOutreachList
+ }
+
+executeOutreachList :: Aeson.Value -> IO Aeson.Value
+executeOutreachList v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: ListArgs) -> do
+ let status = case laStatus args of
+ Just "approved" -> Approved
+ Just "rejected" -> Rejected
+ Just "sent" -> Sent
+ _ -> Pending
+ limit = min 50 (max 1 (laLimit args))
+ drafts <- listDrafts status
+ let limited = take limit drafts
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "status" .= tshow status,
+ "count" .= length limited,
+ "drafts" .= limited
+ ]
+ )
+
+data ListArgs = ListArgs
+ { laStatus :: Maybe Text,
+ laLimit :: Int
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON ListArgs where
+ parseJSON =
+ Aeson.withObject "ListArgs" <| \v ->
+ (ListArgs </ (v .:? "status"))
+ <*> (v .:? "limit" .!= 20)
+
+outreachStatusTool :: Engine.Tool
+outreachStatusTool =
+ Engine.Tool
+ { Engine.toolName = "outreach_status",
+ Engine.toolDescription =
+ "Check the status of a specific outreach draft by ID.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "draft_id"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("The draft ID to check" :: Text)
+ ]
+ ],
+ "required" .= (["draft_id"] :: [Text])
+ ],
+ Engine.toolExecute = executeOutreachStatus
+ }
+
+executeOutreachStatus :: Aeson.Value -> IO Aeson.Value
+executeOutreachStatus v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: StatusArgs) -> do
+ mDraft <- getDraft (saId args)
+ case mDraft of
+ Nothing ->
+ pure (Aeson.object ["error" .= ("Draft not found" :: Text)])
+ Just draft ->
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "draft" .= draft
+ ]
+ )
+
+newtype StatusArgs = StatusArgs
+ { saId :: Text
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON StatusArgs where
+ parseJSON =
+ Aeson.withObject "StatusArgs" <| \v ->
+ StatusArgs </ (v .: "draft_id")
diff --git a/Omni/Agent/Tools/Pdf.hs b/Omni/Agent/Tools/Pdf.hs
new file mode 100644
index 0000000..7687234
--- /dev/null
+++ b/Omni/Agent/Tools/Pdf.hs
@@ -0,0 +1,180 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | PDF extraction tool using poppler-utils (pdftotext).
+--
+-- Extracts text from PDF files for LLM consumption.
+--
+-- : out omni-agent-tools-pdf
+-- : dep aeson
+-- : dep http-conduit
+-- : dep directory
+-- : dep process
+module Omni.Agent.Tools.Pdf
+ ( -- * Tool
+ pdfTool,
+
+ -- * Direct API
+ extractPdfText,
+ downloadAndExtract,
+
+ -- * Testing
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Data.Aeson ((.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.Text as Text
+import qualified Network.HTTP.Simple as HTTP
+import qualified Omni.Agent.Engine as Engine
+import qualified Omni.Test as Test
+import System.IO (hClose)
+import System.IO.Temp (withSystemTempFile)
+import System.Process (readProcessWithExitCode)
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Tools.Pdf"
+ [ Test.unit "pdfTool has correct schema" <| do
+ let tool = pdfTool
+ Engine.toolName tool Test.@=? "read_pdf",
+ Test.unit "extractPdfText handles missing file" <| do
+ result <- extractPdfText "/nonexistent/file.pdf"
+ case result of
+ Left err -> ("No such file" `Text.isInfixOf` err || "pdftotext" `Text.isInfixOf` err) Test.@=? True
+ Right _ -> Test.assertFailure "Expected error for missing file",
+ Test.unit "chunkText splits correctly" <| do
+ let text = Text.replicate 5000 "a"
+ chunks = chunkText 1000 text
+ length chunks Test.@=? 5
+ all (\c -> Text.length c <= 1000) chunks Test.@=? True,
+ Test.unit "chunkText handles small text" <| do
+ let text = "small text"
+ chunks = chunkText 1000 text
+ chunks Test.@=? ["small text"]
+ ]
+
+data PdfArgs = PdfArgs
+ { pdfPath :: Text,
+ pdfMaxChars :: Maybe Int
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON PdfArgs where
+ parseJSON =
+ Aeson.withObject "PdfArgs" <| \v ->
+ (PdfArgs </ (v Aeson..: "path"))
+ <*> (v Aeson..:? "max_chars")
+
+pdfTool :: Engine.Tool
+pdfTool =
+ Engine.Tool
+ { Engine.toolName = "read_pdf",
+ Engine.toolDescription =
+ "Extract text from a PDF file. Use this when you receive a PDF document "
+ <> "and need to read its contents. Returns the extracted text.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "path"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Path to the PDF file" :: Text)
+ ],
+ "max_chars"
+ .= Aeson.object
+ [ "type" .= ("integer" :: Text),
+ "description" .= ("Maximum characters to return (default: 50000)" :: Text)
+ ]
+ ],
+ "required" .= (["path"] :: [Text])
+ ],
+ Engine.toolExecute = executePdf
+ }
+
+executePdf :: Aeson.Value -> IO Aeson.Value
+executePdf v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: PdfArgs) -> do
+ let maxChars = maybe 50000 (min 100000 <. max 1000) (pdfMaxChars args)
+ result <- extractPdfText (Text.unpack (pdfPath args))
+ case result of
+ Left err ->
+ pure (Aeson.object ["error" .= err])
+ Right text -> do
+ let truncated = Text.take maxChars text
+ wasTruncated = Text.length text > maxChars
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "text" .= truncated,
+ "chars" .= Text.length truncated,
+ "truncated" .= wasTruncated
+ ]
+ )
+
+extractPdfText :: FilePath -> IO (Either Text Text)
+extractPdfText path = do
+ result <-
+ try <| readProcessWithExitCode "pdftotext" ["-layout", path, "-"] ""
+ case result of
+ Left (e :: SomeException) ->
+ pure (Left ("pdftotext error: " <> tshow e))
+ Right (exitCode, stdoutStr, stderrStr) ->
+ case exitCode of
+ ExitSuccess -> pure (Right (Text.pack stdoutStr))
+ ExitFailure code ->
+ pure (Left ("pdftotext failed (" <> tshow code <> "): " <> Text.pack stderrStr))
+
+downloadAndExtract :: Text -> Text -> Text -> IO (Either Text Text)
+downloadAndExtract botToken filePath maxCharsText = do
+ let url =
+ "https://api.telegram.org/file/bot"
+ <> Text.unpack botToken
+ <> "/"
+ <> Text.unpack filePath
+ maxChars = maybe 50000 identity (readMaybe (Text.unpack maxCharsText) :: Maybe Int)
+ withSystemTempFile "telegram_pdf.pdf" <| \tmpPath tmpHandle -> do
+ hClose tmpHandle
+ downloadResult <-
+ try <| do
+ req <- HTTP.parseRequest url
+ response <- HTTP.httpLBS req
+ let status = HTTP.getResponseStatusCode response
+ if status >= 200 && status < 300
+ then do
+ BL.writeFile tmpPath (HTTP.getResponseBody response)
+ pure (Right ())
+ else pure (Left ("Download failed: HTTP " <> tshow status))
+ case downloadResult of
+ Left (e :: SomeException) ->
+ pure (Left ("Download error: " <> tshow e))
+ Right (Left err) -> pure (Left err)
+ Right (Right ()) -> do
+ result <- extractPdfText tmpPath
+ case result of
+ Left err -> pure (Left err)
+ Right text -> do
+ let truncated = Text.take maxChars text
+ pure (Right truncated)
+
+chunkText :: Int -> Text -> [Text]
+chunkText chunkSize text
+ | Text.null text = []
+ | Text.length text <= chunkSize = [text]
+ | otherwise =
+ let (chunk, rest) = Text.splitAt chunkSize text
+ in chunk : chunkText chunkSize rest
diff --git a/Omni/Agent/Tools/Python.hs b/Omni/Agent/Tools/Python.hs
new file mode 100644
index 0000000..99f3f7d
--- /dev/null
+++ b/Omni/Agent/Tools/Python.hs
@@ -0,0 +1,217 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Python execution tool for agent use.
+--
+-- Executes Python snippets via subprocess with timeout support.
+-- Writes code to temp file, executes with python3, cleans up after.
+--
+-- Available stdlib: requests, json, csv, re, datetime, urllib
+--
+-- : out omni-agent-tools-python
+-- : dep aeson
+-- : dep process
+-- : dep directory
+-- : dep temporary
+module Omni.Agent.Tools.Python
+ ( pythonExecTool,
+ PythonExecArgs (..),
+ PythonResult (..),
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Data.Aeson ((.:), (.:?), (.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.Text as Text
+import qualified Data.Text.IO as TextIO
+import qualified Omni.Agent.Engine as Engine
+import qualified Omni.Test as Test
+import qualified System.Directory as Directory
+import qualified System.Exit as Exit
+import qualified System.Process as Process
+import System.Timeout (timeout)
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Tools.Python"
+ [ Test.unit "pythonExecTool has correct name" <| do
+ Engine.toolName pythonExecTool Test.@=? "python_exec",
+ Test.unit "pythonExecTool schema is valid" <| do
+ let schema = Engine.toolJsonSchema pythonExecTool
+ case schema of
+ Aeson.Object _ -> pure ()
+ _ -> Test.assertFailure "Schema should be an object",
+ Test.unit "PythonExecArgs parses correctly" <| do
+ let json = Aeson.object ["code" .= ("print('hello')" :: Text)]
+ case Aeson.fromJSON json of
+ Aeson.Success (args :: PythonExecArgs) -> pythonCode args Test.@=? "print('hello')"
+ Aeson.Error e -> Test.assertFailure e,
+ Test.unit "PythonExecArgs parses with timeout" <| do
+ let json = Aeson.object ["code" .= ("x = 1" :: Text), "timeout" .= (10 :: Int)]
+ case Aeson.fromJSON json of
+ Aeson.Success (args :: PythonExecArgs) -> do
+ pythonCode args Test.@=? "x = 1"
+ pythonTimeout args Test.@=? Just 10
+ Aeson.Error e -> Test.assertFailure e,
+ Test.unit "simple print statement" <| do
+ let args = Aeson.object ["code" .= ("print('hello world')" :: Text)]
+ result <- Engine.toolExecute pythonExecTool args
+ case Aeson.fromJSON result of
+ Aeson.Success (r :: PythonResult) -> do
+ pythonResultExitCode r Test.@=? 0
+ ("hello world" `Text.isInfixOf` pythonResultStdout r) Test.@=? True
+ Aeson.Error e -> Test.assertFailure e,
+ Test.unit "syntax error handling" <| do
+ let args = Aeson.object ["code" .= ("def broken(" :: Text)]
+ result <- Engine.toolExecute pythonExecTool args
+ case Aeson.fromJSON result of
+ Aeson.Success (r :: PythonResult) -> do
+ (pythonResultExitCode r /= 0) Test.@=? True
+ not (Text.null (pythonResultStderr r)) Test.@=? True
+ Aeson.Error e -> Test.assertFailure e,
+ Test.unit "import json works" <| do
+ let code = "import json\nprint(json.dumps({'a': 1}))"
+ args = Aeson.object ["code" .= (code :: Text)]
+ result <- Engine.toolExecute pythonExecTool args
+ case Aeson.fromJSON result of
+ Aeson.Success (r :: PythonResult) -> do
+ pythonResultExitCode r Test.@=? 0
+ ("{\"a\": 1}" `Text.isInfixOf` pythonResultStdout r) Test.@=? True
+ Aeson.Error e -> Test.assertFailure e,
+ Test.unit "timeout handling" <| do
+ let code = "import time\ntime.sleep(5)"
+ args = Aeson.object ["code" .= (code :: Text), "timeout" .= (1 :: Int)]
+ result <- Engine.toolExecute pythonExecTool args
+ case Aeson.fromJSON result of
+ Aeson.Success (r :: PythonResult) -> do
+ pythonResultExitCode r Test.@=? (-1)
+ ("timeout" `Text.isInfixOf` Text.toLower (pythonResultStderr r)) Test.@=? True
+ Aeson.Error e -> Test.assertFailure e
+ ]
+
+data PythonExecArgs = PythonExecArgs
+ { pythonCode :: Text,
+ pythonTimeout :: Maybe Int
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.FromJSON PythonExecArgs where
+ parseJSON =
+ Aeson.withObject "PythonExecArgs" <| \v ->
+ (PythonExecArgs </ (v .: "code"))
+ <*> (v .:? "timeout")
+
+data PythonResult = PythonResult
+ { pythonResultStdout :: Text,
+ pythonResultStderr :: Text,
+ pythonResultExitCode :: Int
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON PythonResult where
+ toJSON r =
+ Aeson.object
+ [ "stdout" .= pythonResultStdout r,
+ "stderr" .= pythonResultStderr r,
+ "exit_code" .= pythonResultExitCode r
+ ]
+
+instance Aeson.FromJSON PythonResult where
+ parseJSON =
+ Aeson.withObject "PythonResult" <| \v ->
+ (PythonResult </ (v .: "stdout"))
+ <*> (v .: "stderr")
+ <*> (v .: "exit_code")
+
+pythonExecTool :: Engine.Tool
+pythonExecTool =
+ Engine.Tool
+ { Engine.toolName = "python_exec",
+ Engine.toolDescription =
+ "Execute Python code and return the output. "
+ <> "Use for data processing, API calls, calculations, or any task requiring Python. "
+ <> "Available libraries: requests, json, csv, re, datetime, urllib. "
+ <> "Code runs in a subprocess with a 30 second default timeout.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "code"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Python code to execute" :: Text)
+ ],
+ "timeout"
+ .= Aeson.object
+ [ "type" .= ("integer" :: Text),
+ "description" .= ("Timeout in seconds (default: 30)" :: Text)
+ ]
+ ],
+ "required" .= (["code"] :: [Text])
+ ],
+ Engine.toolExecute = executePythonExec
+ }
+
+executePythonExec :: Aeson.Value -> IO Aeson.Value
+executePythonExec v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure <| mkError ("Invalid arguments: " <> Text.pack e)
+ Aeson.Success args -> do
+ let code = pythonCode args
+ timeoutSecs = fromMaybe 30 (pythonTimeout args)
+ timeoutMicros = timeoutSecs * 1000000
+ tmpDir <- Directory.getTemporaryDirectory
+ let tmpFile = tmpDir <> "/python_exec_" <> show (codeHash code) <> ".py"
+ result <-
+ try <| do
+ TextIO.writeFile tmpFile code
+ let proc = Process.proc "python3" [tmpFile]
+ mResult <- timeout timeoutMicros <| Process.readCreateProcessWithExitCode proc ""
+ Directory.removeFile tmpFile
+ pure mResult
+ case result of
+ Left (e :: SomeException) -> do
+ _ <- try @SomeException <| Directory.removeFile tmpFile
+ pure <| mkError ("Execution failed: " <> tshow e)
+ Right Nothing -> do
+ _ <- try @SomeException <| Directory.removeFile tmpFile
+ pure
+ <| Aeson.toJSON
+ <| PythonResult
+ { pythonResultStdout = "",
+ pythonResultStderr = "Timeout: execution exceeded " <> tshow timeoutSecs <> " seconds",
+ pythonResultExitCode = -1
+ }
+ Right (Just (exitCode, stdoutStr, stderrStr)) ->
+ pure
+ <| Aeson.toJSON
+ <| PythonResult
+ { pythonResultStdout = Text.pack stdoutStr,
+ pythonResultStderr = Text.pack stderrStr,
+ pythonResultExitCode = exitCodeToInt exitCode
+ }
+
+exitCodeToInt :: Exit.ExitCode -> Int
+exitCodeToInt Exit.ExitSuccess = 0
+exitCodeToInt (Exit.ExitFailure n) = n
+
+mkError :: Text -> Aeson.Value
+mkError err =
+ Aeson.toJSON
+ <| PythonResult
+ { pythonResultStdout = "",
+ pythonResultStderr = err,
+ pythonResultExitCode = -1
+ }
+
+codeHash :: Text -> Int
+codeHash = Text.foldl' (\h c -> 31 * h + fromEnum c) 0
diff --git a/Omni/Agent/Tools/Todos.hs b/Omni/Agent/Tools/Todos.hs
new file mode 100644
index 0000000..2aacacc
--- /dev/null
+++ b/Omni/Agent/Tools/Todos.hs
@@ -0,0 +1,527 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Todo tool with due dates and reminders.
+--
+-- Provides user-scoped todos with optional due dates.
+--
+-- : out omni-agent-tools-todos
+-- : dep aeson
+-- : dep sqlite-simple
+-- : dep time
+module Omni.Agent.Tools.Todos
+ ( -- * Tools
+ todoAddTool,
+ todoListTool,
+ todoCompleteTool,
+ todoDeleteTool,
+
+ -- * Direct API
+ Todo (..),
+ createTodo,
+ listTodos,
+ listPendingTodos,
+ listOverdueTodos,
+ completeTodo,
+ deleteTodo,
+
+ -- * Reminders
+ listTodosDueForReminder,
+ markReminderSent,
+ reminderInterval,
+
+ -- * Database
+ initTodosTable,
+
+ -- * Testing
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Data.Aeson ((.!=), (.:), (.:?), (.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.Text as Text
+import Data.Time (LocalTime, NominalDiffTime, TimeZone, UTCTime, addUTCTime, getCurrentTime, localTimeToUTC, minutesToTimeZone, utcToLocalTime)
+import Data.Time.Format (defaultTimeLocale, formatTime, parseTimeM)
+import qualified Database.SQLite.Simple as SQL
+import qualified Omni.Agent.Engine as Engine
+import qualified Omni.Agent.Memory as Memory
+import qualified Omni.Test as Test
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Tools.Todos"
+ [ Test.unit "todoAddTool has correct schema" <| do
+ let tool = todoAddTool "test-user-id"
+ Engine.toolName tool Test.@=? "todo_add",
+ Test.unit "todoListTool has correct schema" <| do
+ let tool = todoListTool "test-user-id"
+ Engine.toolName tool Test.@=? "todo_list",
+ Test.unit "todoCompleteTool has correct schema" <| do
+ let tool = todoCompleteTool "test-user-id"
+ Engine.toolName tool Test.@=? "todo_complete",
+ Test.unit "todoDeleteTool has correct schema" <| do
+ let tool = todoDeleteTool "test-user-id"
+ Engine.toolName tool Test.@=? "todo_delete",
+ Test.unit "Todo JSON roundtrip" <| do
+ now <- getCurrentTime
+ let td =
+ Todo
+ { todoId = 1,
+ todoUserId = "user-123",
+ todoTitle = "Buy milk",
+ todoDueDate = Just now,
+ todoCompleted = False,
+ todoCreatedAt = now,
+ todoLastRemindedAt = Nothing
+ }
+ case Aeson.decode (Aeson.encode td) of
+ Nothing -> Test.assertFailure "Failed to decode Todo"
+ Just decoded -> do
+ todoTitle decoded Test.@=? "Buy milk"
+ todoCompleted decoded Test.@=? False,
+ Test.unit "parseDueDate handles various formats" <| do
+ isJust (parseDueDate "2024-12-25") Test.@=? True
+ isJust (parseDueDate "2024-12-25 14:00") Test.@=? True
+ ]
+
+data Todo = Todo
+ { todoId :: Int,
+ todoUserId :: Text,
+ todoTitle :: Text,
+ todoDueDate :: Maybe UTCTime,
+ todoCompleted :: Bool,
+ todoCreatedAt :: UTCTime,
+ todoLastRemindedAt :: Maybe UTCTime
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON Todo where
+ toJSON td =
+ Aeson.object
+ [ "id" .= todoId td,
+ "user_id" .= todoUserId td,
+ "title" .= todoTitle td,
+ "due_date" .= todoDueDate td,
+ "completed" .= todoCompleted td,
+ "created_at" .= todoCreatedAt td,
+ "last_reminded_at" .= todoLastRemindedAt td
+ ]
+
+instance Aeson.FromJSON Todo where
+ parseJSON =
+ Aeson.withObject "Todo" <| \v ->
+ (Todo </ (v .: "id"))
+ <*> (v .: "user_id")
+ <*> (v .: "title")
+ <*> (v .:? "due_date")
+ <*> (v .: "completed")
+ <*> (v .: "created_at")
+ <*> (v .:? "last_reminded_at")
+
+instance SQL.FromRow Todo where
+ fromRow =
+ (Todo </ SQL.field)
+ <*> SQL.field
+ <*> SQL.field
+ <*> SQL.field
+ <*> SQL.field
+ <*> SQL.field
+ <*> SQL.field
+
+initTodosTable :: SQL.Connection -> IO ()
+initTodosTable conn = do
+ SQL.execute_
+ conn
+ "CREATE TABLE IF NOT EXISTS todos (\
+ \ id INTEGER PRIMARY KEY AUTOINCREMENT,\
+ \ user_id TEXT NOT NULL,\
+ \ title TEXT NOT NULL,\
+ \ due_date TIMESTAMP,\
+ \ completed INTEGER NOT NULL DEFAULT 0,\
+ \ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,\
+ \ last_reminded_at TIMESTAMP\
+ \)"
+ SQL.execute_
+ conn
+ "CREATE INDEX IF NOT EXISTS idx_todos_user ON todos(user_id)"
+ SQL.execute_
+ conn
+ "CREATE INDEX IF NOT EXISTS idx_todos_due ON todos(user_id, due_date)"
+ migrateTodosTable conn
+
+migrateTodosTable :: SQL.Connection -> IO ()
+migrateTodosTable conn = do
+ cols <- SQL.query_ conn "PRAGMA table_info(todos)" :: IO [(Int, Text, Text, Int, Maybe Text, Int)]
+ let colNames = map (\(_, name, _, _, _, _) -> name) cols
+ unless ("last_reminded_at" `elem` colNames) <| do
+ SQL.execute_ conn "ALTER TABLE todos ADD COLUMN last_reminded_at TIMESTAMP"
+
+easternTimeZone :: TimeZone
+easternTimeZone = minutesToTimeZone (-300)
+
+parseDueDate :: Text -> Maybe UTCTime
+parseDueDate txt =
+ let s = Text.unpack txt
+ parseLocal :: Maybe LocalTime
+ parseLocal =
+ parseTimeM True defaultTimeLocale "%Y-%m-%d %H:%M" s
+ <|> parseTimeM True defaultTimeLocale "%Y-%m-%d" s
+ <|> parseTimeM True defaultTimeLocale "%Y-%m-%dT%H:%M:%S" s
+ in fmap (localTimeToUTC easternTimeZone) parseLocal
+ <|> parseTimeM True defaultTimeLocale "%Y-%m-%dT%H:%M:%SZ" s
+
+createTodo :: Text -> Text -> Maybe Text -> IO Todo
+createTodo uid title maybeDueDateStr = do
+ now <- getCurrentTime
+ let dueDate = maybeDueDateStr +> parseDueDate
+ Memory.withMemoryDb <| \conn -> do
+ initTodosTable conn
+ SQL.execute
+ conn
+ "INSERT INTO todos (user_id, title, due_date, completed, created_at) VALUES (?, ?, ?, 0, ?)"
+ (uid, title, dueDate, now)
+ rowId <- SQL.lastInsertRowId conn
+ pure
+ Todo
+ { todoId = fromIntegral rowId,
+ todoUserId = uid,
+ todoTitle = title,
+ todoDueDate = dueDate,
+ todoCompleted = False,
+ todoCreatedAt = now,
+ todoLastRemindedAt = Nothing
+ }
+
+listTodos :: Text -> Int -> IO [Todo]
+listTodos uid limit =
+ Memory.withMemoryDb <| \conn -> do
+ initTodosTable conn
+ SQL.query
+ conn
+ "SELECT id, user_id, title, due_date, completed, created_at, last_reminded_at \
+ \FROM todos WHERE user_id = ? \
+ \ORDER BY completed ASC, due_date ASC NULLS LAST, created_at DESC LIMIT ?"
+ (uid, limit)
+
+listPendingTodos :: Text -> Int -> IO [Todo]
+listPendingTodos uid limit =
+ Memory.withMemoryDb <| \conn -> do
+ initTodosTable conn
+ SQL.query
+ conn
+ "SELECT id, user_id, title, due_date, completed, created_at, last_reminded_at \
+ \FROM todos WHERE user_id = ? AND completed = 0 \
+ \ORDER BY due_date ASC NULLS LAST, created_at DESC LIMIT ?"
+ (uid, limit)
+
+listOverdueTodos :: Text -> IO [Todo]
+listOverdueTodos uid = do
+ now <- getCurrentTime
+ Memory.withMemoryDb <| \conn -> do
+ initTodosTable conn
+ SQL.query
+ conn
+ "SELECT id, user_id, title, due_date, completed, created_at, last_reminded_at \
+ \FROM todos WHERE user_id = ? AND completed = 0 AND due_date < ? \
+ \ORDER BY due_date ASC"
+ (uid, now)
+
+reminderInterval :: NominalDiffTime
+reminderInterval = 24 * 60 * 60
+
+listTodosDueForReminder :: IO [Todo]
+listTodosDueForReminder = do
+ now <- getCurrentTime
+ let cutoff = addUTCTime (negate reminderInterval) now
+ Memory.withMemoryDb <| \conn -> do
+ initTodosTable conn
+ SQL.query
+ conn
+ "SELECT id, user_id, title, due_date, completed, created_at, last_reminded_at \
+ \FROM todos \
+ \WHERE completed = 0 \
+ \ AND due_date IS NOT NULL \
+ \ AND due_date < ? \
+ \ AND (last_reminded_at IS NULL OR last_reminded_at < ?)"
+ (now, cutoff)
+
+markReminderSent :: Int -> IO ()
+markReminderSent tid = do
+ now <- getCurrentTime
+ Memory.withMemoryDb <| \conn -> do
+ initTodosTable conn
+ SQL.execute
+ conn
+ "UPDATE todos SET last_reminded_at = ? WHERE id = ?"
+ (now, tid)
+
+completeTodo :: Text -> Int -> IO Bool
+completeTodo uid tid =
+ Memory.withMemoryDb <| \conn -> do
+ initTodosTable conn
+ SQL.execute
+ conn
+ "UPDATE todos SET completed = 1 WHERE id = ? AND user_id = ?"
+ (tid, uid)
+ changes <- SQL.changes conn
+ pure (changes > 0)
+
+deleteTodo :: Text -> Int -> IO Bool
+deleteTodo uid tid =
+ Memory.withMemoryDb <| \conn -> do
+ initTodosTable conn
+ SQL.execute
+ conn
+ "DELETE FROM todos WHERE id = ? AND user_id = ?"
+ (tid, uid)
+ changes <- SQL.changes conn
+ pure (changes > 0)
+
+todoAddTool :: Text -> Engine.Tool
+todoAddTool uid =
+ Engine.Tool
+ { Engine.toolName = "todo_add",
+ Engine.toolDescription =
+ "Add a todo item with optional due date. Use for tasks, reminders, "
+ <> "or anything the user needs to remember to do. "
+ <> "Due date format: 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM'.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "title"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("What needs to be done" :: Text)
+ ],
+ "due_date"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Optional due date in Eastern time: 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM'" :: Text)
+ ]
+ ],
+ "required" .= (["title"] :: [Text])
+ ],
+ Engine.toolExecute = executeTodoAdd uid
+ }
+
+executeTodoAdd :: Text -> Aeson.Value -> IO Aeson.Value
+executeTodoAdd uid v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: TodoAddArgs) -> do
+ td <- createTodo uid (taTitle args) (taDueDate args)
+ let dueDateMsg = case todoDueDate td of
+ Just d ->
+ let localTime = utcToLocalTime easternTimeZone d
+ in " (due: " <> Text.pack (formatTime defaultTimeLocale "%Y-%m-%d %H:%M ET" localTime) <> ")"
+ Nothing -> ""
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "todo_id" .= todoId td,
+ "message" .= ("Added todo: " <> todoTitle td <> dueDateMsg)
+ ]
+ )
+
+data TodoAddArgs = TodoAddArgs
+ { taTitle :: Text,
+ taDueDate :: Maybe Text
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON TodoAddArgs where
+ parseJSON =
+ Aeson.withObject "TodoAddArgs" <| \v ->
+ (TodoAddArgs </ (v .: "title"))
+ <*> (v .:? "due_date")
+
+todoListTool :: Text -> Engine.Tool
+todoListTool uid =
+ Engine.Tool
+ { Engine.toolName = "todo_list",
+ Engine.toolDescription =
+ "List todos. By default shows pending (incomplete) todos. "
+ <> "Can show all todos or just overdue ones.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "filter"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("Filter: 'pending' (default), 'all', or 'overdue'" :: Text)
+ ],
+ "limit"
+ .= Aeson.object
+ [ "type" .= ("integer" :: Text),
+ "description" .= ("Max todos to return (default: 20)" :: Text)
+ ]
+ ],
+ "required" .= ([] :: [Text])
+ ],
+ Engine.toolExecute = executeTodoList uid
+ }
+
+executeTodoList :: Text -> Aeson.Value -> IO Aeson.Value
+executeTodoList uid v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: TodoListArgs) -> do
+ let lim = min 50 (max 1 (tlLimit args))
+ todos <- case tlFilter args of
+ "all" -> listTodos uid lim
+ "overdue" -> listOverdueTodos uid
+ _ -> listPendingTodos uid lim
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "count" .= length todos,
+ "todos" .= formatTodosForLLM todos
+ ]
+ )
+
+formatTodosForLLM :: [Todo] -> Text
+formatTodosForLLM [] = "No todos found."
+formatTodosForLLM todos =
+ Text.unlines (map formatTodo todos)
+ where
+ formatTodo td =
+ let status = if todoCompleted td then "[x]" else "[ ]"
+ dueStr = case todoDueDate td of
+ Just d ->
+ let localTime = utcToLocalTime easternTimeZone d
+ in " (due: " <> Text.pack (formatTime defaultTimeLocale "%Y-%m-%d %H:%M ET" localTime) <> ")"
+ Nothing -> ""
+ in status <> " " <> todoTitle td <> dueStr <> " (id: " <> tshow (todoId td) <> ")"
+
+data TodoListArgs = TodoListArgs
+ { tlFilter :: Text,
+ tlLimit :: Int
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON TodoListArgs where
+ parseJSON =
+ Aeson.withObject "TodoListArgs" <| \v ->
+ (TodoListArgs </ (v .:? "filter" .!= "pending"))
+ <*> (v .:? "limit" .!= 20)
+
+todoCompleteTool :: Text -> Engine.Tool
+todoCompleteTool uid =
+ Engine.Tool
+ { Engine.toolName = "todo_complete",
+ Engine.toolDescription =
+ "Mark a todo as completed. Use when the user says they finished something.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "todo_id"
+ .= Aeson.object
+ [ "type" .= ("integer" :: Text),
+ "description" .= ("The ID of the todo to complete" :: Text)
+ ]
+ ],
+ "required" .= (["todo_id"] :: [Text])
+ ],
+ Engine.toolExecute = executeTodoComplete uid
+ }
+
+executeTodoComplete :: Text -> Aeson.Value -> IO Aeson.Value
+executeTodoComplete uid v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: TodoCompleteArgs) -> do
+ completed <- completeTodo uid (tcTodoId args)
+ if completed
+ then
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "message" .= ("Todo marked as complete" :: Text)
+ ]
+ )
+ else
+ pure
+ ( Aeson.object
+ [ "success" .= False,
+ "error" .= ("Todo not found" :: Text)
+ ]
+ )
+
+newtype TodoCompleteArgs = TodoCompleteArgs
+ { tcTodoId :: Int
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON TodoCompleteArgs where
+ parseJSON =
+ Aeson.withObject "TodoCompleteArgs" <| \v ->
+ TodoCompleteArgs </ (v .: "todo_id")
+
+todoDeleteTool :: Text -> Engine.Tool
+todoDeleteTool uid =
+ Engine.Tool
+ { Engine.toolName = "todo_delete",
+ Engine.toolDescription =
+ "Delete a todo permanently. Use when a todo is no longer needed.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "todo_id"
+ .= Aeson.object
+ [ "type" .= ("integer" :: Text),
+ "description" .= ("The ID of the todo to delete" :: Text)
+ ]
+ ],
+ "required" .= (["todo_id"] :: [Text])
+ ],
+ Engine.toolExecute = executeTodoDelete uid
+ }
+
+executeTodoDelete :: Text -> Aeson.Value -> IO Aeson.Value
+executeTodoDelete uid v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: TodoDeleteArgs) -> do
+ deleted <- deleteTodo uid (tdTodoId args)
+ if deleted
+ then
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "message" .= ("Todo deleted" :: Text)
+ ]
+ )
+ else
+ pure
+ ( Aeson.object
+ [ "success" .= False,
+ "error" .= ("Todo not found" :: Text)
+ ]
+ )
+
+newtype TodoDeleteArgs = TodoDeleteArgs
+ { tdTodoId :: Int
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON TodoDeleteArgs where
+ parseJSON =
+ Aeson.withObject "TodoDeleteArgs" <| \v ->
+ TodoDeleteArgs </ (v .: "todo_id")
diff --git a/Omni/Agent/Tools/WebReader.hs b/Omni/Agent/Tools/WebReader.hs
new file mode 100644
index 0000000..a69e3cf
--- /dev/null
+++ b/Omni/Agent/Tools/WebReader.hs
@@ -0,0 +1,308 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Web page reader tool - fetches and summarizes web pages.
+--
+-- : out omni-agent-tools-webreader
+-- : dep aeson
+-- : dep http-conduit
+-- : run trafilatura
+module Omni.Agent.Tools.WebReader
+ ( -- * Tool
+ webReaderTool,
+
+ -- * Direct API
+ fetchWebpage,
+ extractText,
+ fetchAndSummarize,
+
+ -- * Testing
+ main,
+ test,
+ )
+where
+
+import Alpha
+import qualified Control.Concurrent.Sema as Sema
+import Data.Aeson ((.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.Aeson.KeyMap as KeyMap
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.Text as Text
+import qualified Data.Text.Encoding as TE
+import qualified Data.Text.IO as TIO
+import Data.Time.Clock (diffUTCTime, getCurrentTime)
+import qualified Network.HTTP.Client as HTTPClient
+import qualified Network.HTTP.Simple as HTTP
+import qualified Omni.Agent.Engine as Engine
+import qualified Omni.Agent.Provider as Provider
+import qualified Omni.Test as Test
+import qualified System.Exit as Exit
+import qualified System.IO as IO
+import qualified System.Process as Process
+import qualified System.Timeout as Timeout
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Tools.WebReader"
+ [ Test.unit "extractText removes HTML tags" <| do
+ let html = "<html><body><p>Hello world</p></body></html>"
+ result = extractText html
+ ("Hello world" `Text.isInfixOf` result) Test.@=? True,
+ Test.unit "extractText removes script tags" <| do
+ let html = "<html><script>alert('hi')</script><p>Content</p></html>"
+ result = extractText html
+ ("alert" `Text.isInfixOf` result) Test.@=? False
+ ("Content" `Text.isInfixOf` result) Test.@=? True,
+ Test.unit "webReaderTool has correct schema" <| do
+ let tool = webReaderTool "test-key"
+ Engine.toolName tool Test.@=? "read_webpages"
+ ]
+
+-- | Fetch timeout in microseconds (15 seconds - short because blocked sites won't respond anyway)
+fetchTimeoutMicros :: Int
+fetchTimeoutMicros = 15 * 1000000
+
+-- | Summarization timeout in microseconds (30 seconds)
+summarizeTimeoutMicros :: Int
+summarizeTimeoutMicros = 30 * 1000000
+
+-- | Maximum concurrent fetches
+maxConcurrentFetches :: Int
+maxConcurrentFetches = 10
+
+-- | Simple debug logging to stderr
+dbg :: Text -> IO ()
+dbg = TIO.hPutStrLn IO.stderr
+
+fetchWebpage :: Text -> IO (Either Text Text)
+fetchWebpage url = do
+ dbg ("[webreader] Fetching: " <> url)
+ result <-
+ Timeout.timeout fetchTimeoutMicros <| do
+ innerResult <-
+ try <| do
+ req0 <- HTTP.parseRequest (Text.unpack url)
+ let req =
+ HTTP.setRequestMethod "GET"
+ <| HTTP.setRequestHeader "User-Agent" ["Mozilla/5.0 (compatible; OmniBot/1.0)"]
+ <| HTTP.setRequestHeader "Accept" ["text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"]
+ <| HTTP.setRequestResponseTimeout (HTTPClient.responseTimeoutMicro fetchTimeoutMicros)
+ <| req0
+ HTTP.httpLBS req
+ case innerResult of
+ Left (e :: SomeException) -> do
+ dbg ("[webreader] Fetch error: " <> url <> " - " <> tshow e)
+ pure (Left ("Failed to fetch URL: " <> tshow e))
+ Right response -> do
+ let status = HTTP.getResponseStatusCode response
+ if status >= 200 && status < 300
+ then do
+ let body = HTTP.getResponseBody response
+ text = TE.decodeUtf8With (\_ _ -> Just '?') (BL.toStrict body)
+ len = Text.length text
+ dbg ("[webreader] Fetched: " <> url <> " (" <> tshow len <> " chars)")
+ pure (Right text)
+ else do
+ dbg ("[webreader] HTTP " <> tshow status <> ": " <> url)
+ pure (Left ("HTTP error: " <> tshow status))
+ case result of
+ Nothing -> do
+ dbg ("[webreader] Timeout: " <> url)
+ pure (Left ("Timeout fetching " <> url))
+ Just r -> pure r
+
+-- | Fast single-pass text extraction from HTML
+-- Strips all tags in one pass, no expensive operations
+extractText :: Text -> Text
+extractText html = collapseWhitespace (stripAllTags html)
+ where
+ -- Single pass: accumulate text outside of tags
+ stripAllTags :: Text -> Text
+ stripAllTags txt = Text.pack (go (Text.unpack txt) False [])
+ where
+ go :: [Char] -> Bool -> [Char] -> [Char]
+ go [] _ acc = reverse acc
+ go ('<' : rest) _ acc = go rest True acc -- Enter tag
+ go ('>' : rest) True acc = go rest False (' ' : acc) -- Exit tag, add space
+ go (_ : rest) True acc = go rest True acc -- Inside tag, skip
+ go (c : rest) False acc = go rest False (c : acc) -- Outside tag, keep
+ collapseWhitespace = Text.unwords <. Text.words
+
+-- | Maximum chars to send for summarization (keep it small for fast LLM response)
+maxContentForSummary :: Int
+maxContentForSummary = 15000
+
+-- | Maximum summary length to return
+maxSummaryLength :: Int
+maxSummaryLength = 1000
+
+-- | Timeout for trafilatura extraction in microseconds (10 seconds)
+extractTimeoutMicros :: Int
+extractTimeoutMicros = 10 * 1000000
+
+-- | Extract article content using trafilatura (Python library)
+-- Falls back to naive extractText if trafilatura fails
+extractWithTrafilatura :: Text -> IO Text
+extractWithTrafilatura html = do
+ let pythonScript =
+ "import sys; import trafilatura; "
+ <> "html = sys.stdin.read(); "
+ <> "result = trafilatura.extract(html, include_comments=False, include_tables=False); "
+ <> "print(result if result else '')"
+ proc =
+ (Process.proc "python3" ["-c", Text.unpack pythonScript])
+ { Process.std_in = Process.CreatePipe,
+ Process.std_out = Process.CreatePipe,
+ Process.std_err = Process.CreatePipe
+ }
+ result <-
+ Timeout.timeout extractTimeoutMicros <| do
+ (exitCode, stdoutStr, _stderrStr) <- Process.readCreateProcessWithExitCode proc (Text.unpack html)
+ case exitCode of
+ Exit.ExitSuccess -> pure (Text.strip (Text.pack stdoutStr))
+ Exit.ExitFailure _ -> pure ""
+ case result of
+ Just txt | not (Text.null txt) -> pure txt
+ _ -> do
+ dbg "[webreader] trafilatura failed, falling back to naive extraction"
+ pure (extractText (Text.take 100000 html))
+
+summarizeContent :: Text -> Text -> Text -> IO (Either Text Text)
+summarizeContent apiKey url content = do
+ let truncatedContent = Text.take maxContentForSummary content
+ haiku = Provider.defaultOpenRouter apiKey "anthropic/claude-haiku-4.5"
+ dbg ("[webreader] Summarizing: " <> url <> " (" <> tshow (Text.length truncatedContent) <> " chars)")
+ dbg "[webreader] Calling LLM for summarization..."
+ startTime <- getCurrentTime
+ result <-
+ Timeout.timeout summarizeTimeoutMicros
+ <| Provider.chat
+ haiku
+ []
+ [ Provider.Message
+ Provider.System
+ ( "You are a webpage summarizer. Extract the key information in 3-5 bullet points. "
+ <> "Be extremely concise - max 500 characters total. No preamble, just bullets."
+ )
+ Nothing
+ Nothing,
+ Provider.Message
+ Provider.User
+ ("Summarize: " <> url <> "\n\n" <> truncatedContent)
+ Nothing
+ Nothing
+ ]
+ endTime <- getCurrentTime
+ let elapsed = diffUTCTime endTime startTime
+ dbg ("[webreader] LLM call completed in " <> tshow elapsed)
+ case result of
+ Nothing -> do
+ dbg ("[webreader] Summarize timeout after " <> tshow elapsed <> ": " <> url)
+ pure (Left ("Timeout summarizing " <> url))
+ Just (Left err) -> do
+ dbg ("[webreader] Summarize error: " <> url <> " - " <> err)
+ pure (Left ("Summarization failed: " <> err))
+ Just (Right msg) -> do
+ let summary = Text.take maxSummaryLength (Provider.msgContent msg)
+ dbg ("[webreader] Summarized: " <> url <> " (" <> tshow (Text.length summary) <> " chars)")
+ pure (Right summary)
+
+-- | Fetch and summarize a single URL, returning a result object
+-- This is the core function used by both single and batch tools
+fetchAndSummarize :: Text -> Text -> IO Aeson.Value
+fetchAndSummarize apiKey url = do
+ fetchResult <- fetchWebpage url
+ case fetchResult of
+ Left err ->
+ pure (Aeson.object ["url" .= url, "error" .= err])
+ Right html -> do
+ dbg ("[webreader] Extracting article from: " <> url <> " (" <> tshow (Text.length html) <> " chars HTML)")
+ extractStart <- getCurrentTime
+ textContent <- extractWithTrafilatura html
+ extractEnd <- getCurrentTime
+ let extractElapsed = diffUTCTime extractEnd extractStart
+ dbg ("[webreader] Extracted: " <> url <> " (" <> tshow (Text.length textContent) <> " chars text) in " <> tshow extractElapsed)
+ if Text.null (Text.strip textContent)
+ then pure (Aeson.object ["url" .= url, "error" .= ("Page appears to be empty or JavaScript-only" :: Text)])
+ else do
+ summaryResult <- summarizeContent apiKey url textContent
+ case summaryResult of
+ Left err ->
+ pure
+ ( Aeson.object
+ [ "url" .= url,
+ "error" .= err,
+ "raw_content" .= Text.take 2000 textContent
+ ]
+ )
+ Right summary ->
+ pure
+ ( Aeson.object
+ [ "url" .= url,
+ "success" .= True,
+ "summary" .= summary
+ ]
+ )
+
+-- | Web reader tool - fetches and summarizes webpages in parallel
+webReaderTool :: Text -> Engine.Tool
+webReaderTool apiKey =
+ Engine.Tool
+ { Engine.toolName = "read_webpages",
+ Engine.toolDescription =
+ "Fetch and summarize webpages in parallel. Each page is processed independently - "
+ <> "failures on one page won't affect others. Returns a list of summaries.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "urls"
+ .= Aeson.object
+ [ "type" .= ("array" :: Text),
+ "items" .= Aeson.object ["type" .= ("string" :: Text)],
+ "description" .= ("List of URLs to read and summarize" :: Text)
+ ]
+ ],
+ "required" .= (["urls"] :: [Text])
+ ],
+ Engine.toolExecute = executeWebReader apiKey
+ }
+
+executeWebReader :: Text -> Aeson.Value -> IO Aeson.Value
+executeWebReader apiKey v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: WebReaderArgs) -> do
+ let urls = wrUrls args
+ dbg ("[webreader] Starting batch: " <> tshow (length urls) <> " URLs")
+ results <- Sema.mapPool maxConcurrentFetches (fetchAndSummarize apiKey) urls
+ let succeeded = length (filter isSuccess results)
+ dbg ("[webreader] Batch complete: " <> tshow succeeded <> "/" <> tshow (length urls) <> " succeeded")
+ pure
+ ( Aeson.object
+ [ "results" .= results,
+ "total" .= length urls,
+ "succeeded" .= succeeded
+ ]
+ )
+ where
+ isSuccess (Aeson.Object obj) = KeyMap.member "success" obj
+ isSuccess _ = False
+
+newtype WebReaderArgs = WebReaderArgs
+ { wrUrls :: [Text]
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON WebReaderArgs where
+ parseJSON =
+ Aeson.withObject "WebReaderArgs" <| \v ->
+ WebReaderArgs </ (v Aeson..: "urls")
diff --git a/Omni/Agent/Tools/WebReaderTest.hs b/Omni/Agent/Tools/WebReaderTest.hs
new file mode 100644
index 0000000..ca4c119
--- /dev/null
+++ b/Omni/Agent/Tools/WebReaderTest.hs
@@ -0,0 +1,53 @@
+{-# LANGUAGE BangPatterns #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Quick test for WebReader to debug hangs
+--
+-- : out webreader-test
+-- : dep http-conduit
+-- : run trafilatura
+module Omni.Agent.Tools.WebReaderTest where
+
+import Alpha
+import qualified Data.Text as Text
+import qualified Data.Text.IO as TIO
+import Data.Time.Clock (diffUTCTime, getCurrentTime)
+import qualified Omni.Agent.Tools.WebReader as WebReader
+
+main :: IO ()
+main = do
+ TIO.putStrLn "=== WebReader Debug Test ==="
+
+ TIO.putStrLn "\n--- Test 1: Small page (httpbin) ---"
+ testUrl "https://httpbin.org/html"
+
+ TIO.putStrLn "\n--- Test 2: Medium page (example.com) ---"
+ testUrl "https://example.com"
+
+ TIO.putStrLn "\n--- Test 3: Large page (github) ---"
+ testUrl "https://github.com/anthropics/skills"
+
+ TIO.putStrLn "\n=== Done ==="
+
+testUrl :: Text -> IO ()
+testUrl url = do
+ TIO.putStrLn ("Fetching: " <> url)
+
+ startFetch <- getCurrentTime
+ result <- WebReader.fetchWebpage url
+ endFetch <- getCurrentTime
+ TIO.putStrLn ("Fetch took: " <> tshow (diffUTCTime endFetch startFetch))
+
+ case result of
+ Left err -> TIO.putStrLn ("Fetch error: " <> err)
+ Right html -> do
+ TIO.putStrLn ("HTML size: " <> tshow (Text.length html) <> " chars")
+
+ TIO.putStrLn "Extracting text (naive, 100k truncated)..."
+ startExtract <- getCurrentTime
+ let !text = WebReader.extractText (Text.take 100000 html)
+ endExtract <- getCurrentTime
+ TIO.putStrLn ("Extract took: " <> tshow (diffUTCTime endExtract startExtract))
+ TIO.putStrLn ("Text size: " <> tshow (Text.length text) <> " chars")
+ TIO.putStrLn ("Preview: " <> Text.take 200 text)
diff --git a/Omni/Agent/Tools/WebSearch.hs b/Omni/Agent/Tools/WebSearch.hs
new file mode 100644
index 0000000..58c945c
--- /dev/null
+++ b/Omni/Agent/Tools/WebSearch.hs
@@ -0,0 +1,212 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Web search tool using Kagi Search API.
+--
+-- Provides web search capabilities for agents.
+--
+-- : out omni-agent-tools-websearch
+-- : dep aeson
+-- : dep http-conduit
+module Omni.Agent.Tools.WebSearch
+ ( -- * Tool
+ webSearchTool,
+
+ -- * Direct API
+ kagiSearch,
+ SearchResult (..),
+
+ -- * Testing
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Data.Aeson ((.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.Aeson.KeyMap as KeyMap
+import qualified Data.Text as Text
+import qualified Data.Text.Encoding as TE
+import qualified Network.HTTP.Simple as HTTP
+import qualified Network.HTTP.Types.URI as URI
+import qualified Omni.Agent.Engine as Engine
+import qualified Omni.Test as Test
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Tools.WebSearch"
+ [ Test.unit "SearchResult JSON parsing" <| do
+ let json =
+ Aeson.object
+ [ "t" .= (0 :: Int),
+ "url" .= ("https://example.com" :: Text),
+ "title" .= ("Example Title" :: Text),
+ "snippet" .= ("This is a snippet" :: Text)
+ ]
+ case parseSearchResult json of
+ Nothing -> Test.assertFailure "Failed to parse search result"
+ Just sr -> do
+ srUrl sr Test.@=? "https://example.com"
+ srTitle sr Test.@=? "Example Title"
+ srSnippet sr Test.@=? Just "This is a snippet",
+ Test.unit "webSearchTool has correct schema" <| do
+ let tool = webSearchTool "test-key"
+ Engine.toolName tool Test.@=? "web_search",
+ Test.unit "formatResultsForLLM formats correctly" <| do
+ let results =
+ [ SearchResult "https://a.com" "Title A" (Just "Snippet A") Nothing,
+ SearchResult "https://b.com" "Title B" Nothing Nothing
+ ]
+ formatted = formatResultsForLLM results
+ ("Title A" `Text.isInfixOf` formatted) Test.@=? True
+ ("https://a.com" `Text.isInfixOf` formatted) Test.@=? True
+ ]
+
+data SearchResult = SearchResult
+ { srUrl :: Text,
+ srTitle :: Text,
+ srSnippet :: Maybe Text,
+ srPublished :: Maybe Text
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON SearchResult where
+ toJSON r =
+ Aeson.object
+ [ "url" .= srUrl r,
+ "title" .= srTitle r,
+ "snippet" .= srSnippet r,
+ "published" .= srPublished r
+ ]
+
+parseSearchResult :: Aeson.Value -> Maybe SearchResult
+parseSearchResult val = do
+ Aeson.Object obj <- pure val
+ t <- case KeyMap.lookup "t" obj of
+ Just (Aeson.Number n) -> Just (round n :: Int)
+ _ -> Nothing
+ guard (t == 0)
+ url <- case KeyMap.lookup "url" obj of
+ Just (Aeson.String s) -> Just s
+ _ -> Nothing
+ title <- case KeyMap.lookup "title" obj of
+ Just (Aeson.String s) -> Just s
+ _ -> Nothing
+ let snippet = case KeyMap.lookup "snippet" obj of
+ Just (Aeson.String s) -> Just s
+ _ -> Nothing
+ published = case KeyMap.lookup "published" obj of
+ Just (Aeson.String s) -> Just s
+ _ -> Nothing
+ pure SearchResult {srUrl = url, srTitle = title, srSnippet = snippet, srPublished = published}
+
+kagiSearch :: Text -> Text -> Int -> IO (Either Text [SearchResult])
+kagiSearch apiKey query limit = do
+ let encodedQuery = TE.decodeUtf8 (URI.urlEncode False (TE.encodeUtf8 query))
+ url = "https://kagi.com/api/v0/search?q=" <> Text.unpack encodedQuery <> "&limit=" <> show limit
+ result <-
+ try <| do
+ req0 <- HTTP.parseRequest url
+ let req =
+ HTTP.setRequestMethod "GET"
+ <| HTTP.setRequestHeader "Authorization" ["Bot " <> TE.encodeUtf8 apiKey]
+ <| req0
+ HTTP.httpLBS req
+ case result of
+ Left (e :: SomeException) ->
+ pure (Left ("Kagi API error: " <> tshow e))
+ Right response -> do
+ let status = HTTP.getResponseStatusCode response
+ if status >= 200 && status < 300
+ then case Aeson.decode (HTTP.getResponseBody response) of
+ Just (Aeson.Object obj) -> case KeyMap.lookup "data" obj of
+ Just (Aeson.Array arr) ->
+ pure (Right (mapMaybe parseSearchResult (toList arr)))
+ _ -> pure (Left "No data in response")
+ _ -> pure (Left "Failed to parse Kagi response")
+ else case Aeson.decode (HTTP.getResponseBody response) of
+ Just (Aeson.Object obj) -> case KeyMap.lookup "error" obj of
+ Just errArr -> pure (Left ("Kagi error: " <> tshow errArr))
+ _ -> pure (Left ("Kagi HTTP error: " <> tshow status))
+ _ -> pure (Left ("Kagi HTTP error: " <> tshow status))
+
+formatResultsForLLM :: [SearchResult] -> Text
+formatResultsForLLM [] = "No results found."
+formatResultsForLLM results =
+ Text.unlines (zipWith formatResult [1 ..] results)
+ where
+ formatResult :: Int -> SearchResult -> Text
+ formatResult n r =
+ tshow n
+ <> ". "
+ <> srTitle r
+ <> "\n "
+ <> srUrl r
+ <> maybe "" (\s -> "\n " <> Text.take 200 s) (srSnippet r)
+
+webSearchTool :: Text -> Engine.Tool
+webSearchTool apiKey =
+ Engine.Tool
+ { Engine.toolName = "web_search",
+ Engine.toolDescription =
+ "Search the web using Kagi. Use this to find current information, "
+ <> "verify facts, look up documentation, or research topics. "
+ <> "Returns titles, URLs, and snippets from search results.",
+ Engine.toolJsonSchema =
+ Aeson.object
+ [ "type" .= ("object" :: Text),
+ "properties"
+ .= Aeson.object
+ [ "query"
+ .= Aeson.object
+ [ "type" .= ("string" :: Text),
+ "description" .= ("The search query" :: Text)
+ ],
+ "limit"
+ .= Aeson.object
+ [ "type" .= ("integer" :: Text),
+ "description" .= ("Max results to return (default: 10, max: 20)" :: Text)
+ ]
+ ],
+ "required" .= (["query"] :: [Text])
+ ],
+ Engine.toolExecute = executeWebSearch apiKey
+ }
+
+executeWebSearch :: Text -> Aeson.Value -> IO Aeson.Value
+executeWebSearch apiKey v =
+ case Aeson.fromJSON v of
+ Aeson.Error e -> pure (Aeson.object ["error" .= Text.pack e])
+ Aeson.Success (args :: WebSearchArgs) -> do
+ let lim = min 20 (max 1 (wsLimit args))
+ result <- kagiSearch apiKey (wsQuery args) lim
+ case result of
+ Left err ->
+ pure (Aeson.object ["error" .= err])
+ Right results ->
+ pure
+ ( Aeson.object
+ [ "success" .= True,
+ "count" .= length results,
+ "results" .= formatResultsForLLM results
+ ]
+ )
+
+data WebSearchArgs = WebSearchArgs
+ { wsQuery :: Text,
+ wsLimit :: Int
+ }
+ deriving (Generic)
+
+instance Aeson.FromJSON WebSearchArgs where
+ parseJSON =
+ Aeson.withObject "WebSearchArgs" <| \v ->
+ (WebSearchArgs </ (v Aeson..: "query"))
+ <*> (v Aeson..:? "limit" Aeson..!= 10)
diff --git a/Omni/Agent/Worker.hs b/Omni/Agent/Worker.hs
index 66f894d..d6afb73 100644
--- a/Omni/Agent/Worker.hs
+++ b/Omni/Agent/Worker.hs
@@ -20,7 +20,8 @@ import qualified Data.Text.Encoding as TE
import qualified Data.Time
import qualified Omni.Agent.Core as Core
import qualified Omni.Agent.Engine as Engine
-import qualified Omni.Agent.Log as AgentLog
+import qualified Omni.Agent.Provider as Provider
+import qualified Omni.Agent.Status as AgentStatus
import qualified Omni.Agent.Tools as Tools
import qualified Omni.Fact as Fact
import qualified Omni.Task.Core as TaskCore
@@ -35,8 +36,8 @@ start worker maybeTaskId = do
if Core.workerQuiet worker
then putText ("[worker] Starting for " <> Core.workerName worker)
else do
- AgentLog.init (Core.workerName worker)
- AgentLog.log ("[worker] Starting for " <> Core.workerName worker)
+ AgentStatus.init (Core.workerName worker)
+ AgentStatus.log ("[worker] Starting for " <> Core.workerName worker)
case maybeTaskId of
Just tid -> logMsg worker ("[worker] Target task: " <> tid)
Nothing -> logMsg worker "[worker] No specific task, will pick from ready queue"
@@ -47,7 +48,7 @@ logMsg :: Core.Worker -> Text -> IO ()
logMsg worker msg =
if Core.workerQuiet worker
then putText msg
- else AgentLog.log msg
+ else AgentStatus.log msg
-- | Convert key-value pairs to JSON metadata string
toMetadata :: [(Text, Text)] -> Text
@@ -85,10 +86,10 @@ runOnce worker maybeTaskId = do
Nothing -> do
case maybeTaskId of
Just tid -> do
- unless (Core.workerQuiet worker) <| AgentLog.updateActivity ("Task " <> tid <> " not found.")
+ unless (Core.workerQuiet worker) <| AgentStatus.updateActivity ("Task " <> tid <> " not found.")
logMsg worker ("[worker] Task " <> tid <> " not found.")
Nothing -> do
- unless (Core.workerQuiet worker) <| AgentLog.updateActivity "No work found."
+ unless (Core.workerQuiet worker) <| AgentStatus.updateActivity "No work found."
logMsg worker "[worker] No ready tasks found."
Just task -> do
processTask worker task
@@ -100,7 +101,7 @@ processTask worker task = do
let quiet = Core.workerQuiet worker
let say = logMsg worker
- unless quiet <| AgentLog.update (\s -> s {AgentLog.statusTask = Just tid})
+ unless quiet <| AgentStatus.update (\s -> s {AgentStatus.statusTask = Just tid})
say ("[worker] Claiming task " <> tid)
-- Claim task
@@ -173,13 +174,13 @@ processTask worker task = do
TaskCore.logActivity tid TaskCore.Completed (Just (toMetadata [("result", "no_changes")]))
TaskCore.updateTaskStatusWithActor tid TaskCore.Done [] TaskCore.Junior
say ("[worker] ✓ Task " <> tid <> " -> Done (no changes)")
- unless quiet <| AgentLog.update (\s -> s {AgentLog.statusTask = Nothing})
+ unless quiet <| AgentStatus.update (\s -> s {AgentStatus.statusTask = Nothing})
CommitSuccess -> do
-- Commit succeeded, set to Review
TaskCore.logActivity tid TaskCore.Completed (Just (toMetadata [("result", "committed")]))
TaskCore.updateTaskStatusWithActor tid TaskCore.Review [] TaskCore.Junior
say ("[worker] ✓ Task " <> tid <> " -> Review")
- unless quiet <| AgentLog.update (\s -> s {AgentLog.statusTask = Nothing})
+ unless quiet <| AgentStatus.update (\s -> s {AgentStatus.statusTask = Nothing})
EngineGuardrailViolation errMsg _ -> do
say ("[worker] Guardrail violation: " <> errMsg)
TaskCore.logActivity tid TaskCore.Failed (Just (toMetadata [("reason", "guardrail_violation")]))
@@ -188,7 +189,7 @@ processTask worker task = do
-- Set to NeedsHelp so human can review
TaskCore.updateTaskStatusWithActor tid TaskCore.NeedsHelp [] TaskCore.Junior
say ("[worker] Task " <> tid <> " -> NeedsHelp (guardrail violation)")
- unless quiet <| AgentLog.update (\s -> s {AgentLog.statusTask = Nothing})
+ unless quiet <| AgentStatus.update (\s -> s {AgentStatus.statusTask = Nothing})
EngineError errMsg _ -> do
say ("[worker] Engine error: " <> errMsg)
TaskCore.logActivity tid TaskCore.Failed (Just (toMetadata [("reason", "engine_error")]))
@@ -302,7 +303,7 @@ runWithEngine worker repo task = do
-- Build Engine config with callbacks
totalCostRef <- newIORef (0 :: Double)
let quiet = Core.workerQuiet worker
- sayLog msg = if quiet then putText msg else AgentLog.log msg
+ sayLog msg = if quiet then putText msg else AgentStatus.log msg
engineCfg =
Engine.EngineConfig
{ Engine.engineLLM =
@@ -357,8 +358,14 @@ runWithEngine worker repo task = do
Engine.agentGuardrails = guardrails
}
- -- Run the agent
- result <- Engine.runAgent engineCfg agentCfg userPrompt
+ -- Run the agent with appropriate provider
+ result <- case Core.workerEngine worker of
+ Core.EngineOpenRouter -> Engine.runAgent engineCfg agentCfg userPrompt
+ Core.EngineOllama -> do
+ ollamaModel <- fromMaybe "llama3.1:8b" </ Env.lookupEnv "OLLAMA_MODEL"
+ let provider = Provider.defaultOllama (Text.pack ollamaModel)
+ Engine.runAgentWithProvider engineCfg provider agentCfg userPrompt
+ Core.EngineAmp -> pure (Left "Amp engine not yet implemented")
totalCost <- readIORef totalCostRef
case result of
diff --git a/Omni/Ava.hs b/Omni/Ava.hs
new file mode 100755
index 0000000..2dfecb1
--- /dev/null
+++ b/Omni/Ava.hs
@@ -0,0 +1,66 @@
+#!/usr/bin/env run.sh
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Ava - AI assistant via Telegram.
+--
+-- Usage:
+-- ava # Uses TELEGRAM_BOT_TOKEN env var
+-- ava --token=XXX # Explicit token
+-- ava --model=MODEL # Override LLM model
+--
+-- : out ava
+-- : dep aeson
+-- : dep http-conduit
+-- : dep stm
+module Omni.Ava where
+
+import Alpha
+import qualified Data.Text as Text
+import qualified Omni.Agent.Telegram as Telegram
+import qualified Omni.Cli as Cli
+import qualified Omni.Test as Test
+import qualified System.Console.Docopt as Docopt
+
+main :: IO ()
+main = Cli.main plan
+
+plan :: Cli.Plan ()
+plan =
+ Cli.Plan
+ { Cli.help = help,
+ Cli.move = move,
+ Cli.test = test,
+ Cli.tidy = \_ -> pure ()
+ }
+
+help :: Cli.Docopt
+help =
+ [Cli.docopt|
+ava - AI assistant via Telegram
+
+Usage:
+ ava [--token=TOKEN] [--model=MODEL]
+ ava test
+ ava (-h | --help)
+
+Options:
+ -h --help Show this help
+ --token=TOKEN Telegram bot token (or use TELEGRAM_BOT_TOKEN env)
+ --model=MODEL LLM model to use [default: anthropic/claude-sonnet-4]
+|]
+
+move :: Cli.Arguments -> IO ()
+move args = do
+ let maybeToken = fmap Text.pack (Cli.getArg args (Cli.longOption "token"))
+ Telegram.startBot maybeToken
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Ava"
+ [ Test.unit "help is non-empty" <| do
+ let usage = str (Docopt.usage help) :: String
+ null usage Test.@=? False
+ ]
diff --git a/Omni/Bild.hs b/Omni/Bild.hs
index e1f5c46..1ebeb05 100644
--- a/Omni/Bild.hs
+++ b/Omni/Bild.hs
@@ -249,7 +249,7 @@ move args = do
forM_ skippedNamespaces <| \ns -> LogC.updateLineState ns LogC.Skipped
action
runWithManager <| do
- pipelineBuild isTest isLoud 8 jobs (cpus nproc) namespaces analyzeOne
+ pipelineBuild isTest isLoud (not noCache) 8 jobs (cpus nproc) namespaces analyzeOne
|> Timeout.timeout (toMillis minutes)
+> \case
Nothing ->
@@ -285,6 +285,7 @@ move args = do
Just n -> n
isTest = args `Cli.has` Cli.longOption "test"
isLoud = args `Cli.has` Cli.longOption "loud"
+ noCache = args `Cli.has` Cli.longOption "no-cache"
putJSON = Aeson.encode .> ByteString.Lazy.toStrict .> Char8.putStrLn
-- | Don't try to build stuff that isn't part of the git repo.
@@ -363,6 +364,7 @@ Options:
--test, -t Run tests on a target after building
--loud, -l Show all output from compiler
--plan, -p Print the build plan as JSON, don't build
+ --no-cache Skip signing and pushing to S3 binary cache
--time N Set timeout to N minutes, 0 means never timeout [default: 10]
--jobs N, -j N Build up to N jobs at once [default: 6]
--cpus N, -c N Allocate up to N cpu cores per job (default: (nproc-4)/jobs)
@@ -1297,8 +1299,8 @@ pipelineAnalysisWorker coord@Coordinator {..} analyzeFn = loop
else modifyTVar' coStates (Map.insert ns (TSWaitingForDeps target pendingDeps))
loop
-pipelineBuildWorker :: Bool -> Bool -> Int -> Int -> Coordinator -> IO ()
-pipelineBuildWorker andTest loud jobs cpus coord@Coordinator {..} = loop
+pipelineBuildWorker :: Bool -> Bool -> Bool -> Int -> Int -> Coordinator -> IO ()
+pipelineBuildWorker andTest loud andCache jobs cpus coord@Coordinator {..} = loop
where
loop = do
remaining <- readTVarIO coRemaining
@@ -1319,7 +1321,7 @@ pipelineBuildWorker andTest loud jobs cpus coord@Coordinator {..} = loop
Nothing -> loop
Just target -> do
LogC.updateLineState ns LogC.Building
- exitCode <- pipelineBuildOne andTest loud jobs cpus target
+ exitCode <- pipelineBuildOne andTest loud andCache jobs cpus target
atomically <| do
modifyTVar' coStates (Map.insert ns (TSComplete target exitCode))
modifyTVar' coResults (exitCode :)
@@ -1342,8 +1344,8 @@ promoteWaiters Coordinator {..} completedNs = do
else modifyTVar' coStates (Map.insert ns (TSWaitingForDeps target deps'))
_ -> pure ()
-pipelineBuildOne :: Bool -> Bool -> Int -> Int -> Target -> IO Exit.ExitCode
-pipelineBuildOne andTest loud jobs cpus target@Target {..} = do
+pipelineBuildOne :: Bool -> Bool -> Bool -> Int -> Int -> Target -> IO Exit.ExitCode
+pipelineBuildOne andTest loud andCache jobs cpus target@Target {..} = do
root <- getCoderoot
result <- case compiler of
CPython -> case out of
@@ -1392,14 +1394,50 @@ pipelineBuildOne andTest loud jobs cpus target@Target {..} = do
nixBuild loud jobs cpus target
Sbcl ->
proc loud namespace (toNixFlag compiler) compilerFlags
- pure (fst result)
-
-pipelineBuild :: Bool -> Bool -> Int -> Int -> Int -> [Namespace] -> (Namespace -> IO (Maybe Target)) -> IO [Exit.ExitCode]
-pipelineBuild andTest loud analysisWorkers buildWorkers cpus namespaces analyzeFn = do
+ let exitCode = fst result
+ when (andCache && isSuccess exitCode) <| do
+ storePath <- Dir.canonicalizePath (nixdir </> outname out)
+ cacheStorePath loud namespace storePath
+ pure exitCode
+
+cacheStorePath :: Bool -> Namespace -> FilePath -> IO ()
+cacheStorePath loud ns storePath = do
+ mKeyPath <- Env.lookupEnv "NIX_CACHE_KEY"
+ case mKeyPath of
+ Nothing -> Log.warn ["cache", "NIX_CACHE_KEY not set, skipping"]
+ Just keyPath -> do
+ let s3Url = "s3://omni-nix-cache?profile=digitalocean&scheme=https&endpoint=nyc3.digitaloceanspaces.com"
+ LogC.updateLine ns "signing..."
+ (signExit, _, signErr) <-
+ Process.readProcessWithExitCode
+ "nix"
+ ["store", "sign", "--key-file", keyPath, storePath]
+ ""
+ case signExit of
+ Exit.ExitSuccess -> do
+ LogC.updateLine ns "pushing to cache..."
+ (pushExit, _, pushErr) <-
+ Process.readProcessWithExitCode
+ "nix"
+ ["copy", "--to", s3Url, storePath]
+ ""
+ case pushExit of
+ Exit.ExitSuccess -> do
+ loud ?| Log.good ["cache", "pushed", Text.pack storePath]
+ Text.IO.putStrLn <| "STORE_PATH=" <> Text.pack storePath
+ Exit.ExitFailure _ -> do
+ Log.fail ["cache", "push failed", Text.pack storePath]
+ loud ?| putStrLn pushErr
+ Exit.ExitFailure _ -> do
+ Log.fail ["cache", "sign failed", Text.pack storePath]
+ loud ?| putStrLn signErr
+
+pipelineBuild :: Bool -> Bool -> Bool -> Int -> Int -> Int -> [Namespace] -> (Namespace -> IO (Maybe Target)) -> IO [Exit.ExitCode]
+pipelineBuild andTest loud andCache analysisWorkers buildWorkers cpus namespaces analyzeFn = do
root <- getCoderoot
coord <- initCoordinator root namespaces
let spawnAnalysis = replicateM analysisWorkers (Async.async (pipelineAnalysisWorker coord analyzeFn))
- let spawnBuild = replicateM buildWorkers (Async.async (pipelineBuildWorker andTest loud buildWorkers cpus coord))
+ let spawnBuild = replicateM buildWorkers (Async.async (pipelineBuildWorker andTest loud andCache buildWorkers cpus coord))
threads <- (<>) </ spawnAnalysis <*> spawnBuild
let waitLoop = do
remaining <- readTVarIO (coRemaining coord)
diff --git a/Omni/Bild.nix b/Omni/Bild.nix
index b7e0801..ca70ae8 100644
--- a/Omni/Bild.nix
+++ b/Omni/Bild.nix
@@ -110,6 +110,7 @@
pkgs = with stable.pkgs; {
inherit
alejandra
+ awscli2
bat
bc
cmark
@@ -117,6 +118,7 @@
universal-ctags
datasette
deadnix
+ doctl
fd
figlet
findutils
@@ -146,6 +148,7 @@
llama-cpp = unstable.llama-cpp;
llm = unstable.python312.withPackages (p: [p.llm]);
ollama = unstable.ollama;
+ trafilatura = unstable.python312.withPackages (p: [p.trafilatura]);
ruff = unstable.ruff;
shellcheck = unstable.shellcheck;
};
@@ -256,6 +259,7 @@
bc
self.bild
datasette
+ doctl
universal-ctags
fd
figlet
diff --git a/Omni/Bild/Builder.nix b/Omni/Bild/Builder.nix
index 9ede3b8..9356d97 100644
--- a/Omni/Bild/Builder.nix
+++ b/Omni/Bild/Builder.nix
@@ -280,8 +280,7 @@ with bild; let
python = python.buildPythonApplication rec {
inherit name src CODEROOT;
- pyproject = true;
- build-system = [python.packages.setuptools];
+ format = "setuptools";
nativeBuildInputs = [makeWrapper];
propagatedBuildInputs = langdeps_ ++ sysdeps_ ++ rundeps_;
buildInputs = sysdeps_;
diff --git a/Omni/Bild/Deps/Haskell.nix b/Omni/Bild/Deps/Haskell.nix
index 7e3650a..138a80e 100644
--- a/Omni/Bild/Deps/Haskell.nix
+++ b/Omni/Bild/Deps/Haskell.nix
@@ -22,6 +22,8 @@
"fast-logger"
"filepath"
"github"
+ "HaskellNet"
+ "HaskellNet-SSL"
"haskeline"
"hostname"
"http-types"
@@ -53,6 +55,7 @@
"sqids"
"sqlite-simple"
"stm"
+ "tagsoup"
"tasty"
"tasty-hunit"
"tasty-quickcheck"
diff --git a/Omni/Bild/Haskell.nix b/Omni/Bild/Haskell.nix
index e55dee9..5754253 100644
--- a/Omni/Bild/Haskell.nix
+++ b/Omni/Bild/Haskell.nix
@@ -21,6 +21,8 @@ in rec {
cmark = doJailbreak sup.cmark;
docopt = buildCabal sel "docopt";
filelock = dontCheck sup.filelock;
+ HaskellNet = doJailbreak sup.HaskellNet;
+ HaskellNet-SSL = doJailbreak sup.HaskellNet-SSL;
linear-generics = doJailbreak sup.linear-generics;
req = doJailbreak sup.req;
servant-auth = doJailbreak sup.servant-auth;
diff --git a/Omni/Bot.scm b/Omni/Bot.scm
deleted file mode 100755
index ff81c53..0000000
--- a/Omni/Bot.scm
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/usr/bin/env run.sh
-!#
-;; : out omnibot
-;;
-;; Usage with ii:
-;;
-;; tail -f \#omni/out | guile -L $CODEROOT -s Omni/Bot.scm
-;;
-(define-module (Omni Bot) #:export (main))
-
-(import (ice-9 rdelim))
-(import (ice-9 match))
-(import (ice-9 regex))
-(import (ice-9 receive))
-(import (bs core))
-(import (prefix (bs string) string.))
-
-(define (log msg)
- (display msg (current-error-port)))
-
-(define (is-command? msg)
- (string.prefix? msg "omnibot:"))
-
-(define (parse-line line)
- (if (eof-object? line)
- (exit)
- (let ([matches (regexp-exec
- (make-regexp "<(\\S*)>(.*)" regexp/extended)
- (string-drop line 11))])
- (if matches
- `(user
- ,(match:substring matches 1)
- ,(string.lstrip (match:substring matches 2) #\space))
- `(system ,(string-drop line 11))))))
-
-(define (dispatch user msg)
- (let ([msg (-> msg
- (string-drop (string-length "omnibot:"))
- (string.lstrip #\space))])
- (cond
- ((equal? msg "hi")
- (display (fmt "~a: well, hello!" user)))
-
- (else
- (display (fmt "command not understood: ~a" msg))))))
-
-(define (main args)
- (while #t
- (match (parse-line (read-line))
- [('user user msg)
- (if (is-command? msg)
- (dispatch user msg)
- (begin
- (log (fmt "user: ~a " user))
- (log (fmt "message: ~a" msg))))]
-
- [('system msg)
- (log (fmt "system: ~a" msg))])
-
- (newline)
- (force-output)))
diff --git a/Omni/Deploy/Caddy.hs b/Omni/Deploy/Caddy.hs
new file mode 100644
index 0000000..6cedf92
--- /dev/null
+++ b/Omni/Deploy/Caddy.hs
@@ -0,0 +1,241 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE RecordWildCards #-}
+{-# LANGUAGE TypeApplications #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Caddy admin API integration for the mini-PaaS deployment system.
+--
+-- : out deploy-caddy
+-- : dep aeson
+-- : dep http-conduit
+-- : dep http-types
+module Omni.Deploy.Caddy
+ ( buildRoute,
+ getCurrentRoutes,
+ upsertRoute,
+ deleteRoute,
+ syncRoutes,
+ getRouteById,
+ caddyAdmin,
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Data.Aeson ((.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.Map as Map
+import qualified Data.Text as Text
+import qualified Network.HTTP.Simple as HTTP
+import qualified Network.HTTP.Types.Status as Status
+import Omni.Deploy.Manifest (Artifact (..), Exec (..), Hardening (..), Http (..), Service (..), Systemd (..))
+import qualified Omni.Test as Test
+
+caddyAdmin :: Text
+caddyAdmin = "http://localhost:2019"
+
+data Route = Route
+ { routeId :: Text,
+ routeMatch :: [RouteMatch],
+ routeHandle :: [RouteHandler],
+ routeTerminal :: Bool
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON Route where
+ toJSON Route {..} =
+ Aeson.object
+ [ "@id" .= routeId,
+ "match" .= routeMatch,
+ "handle" .= routeHandle,
+ "terminal" .= routeTerminal
+ ]
+
+newtype RouteMatch = RouteMatch
+ { matchHost :: [Text]
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON RouteMatch where
+ toJSON RouteMatch {..} =
+ Aeson.object ["host" .= matchHost]
+
+data RouteHandler = RouteHandler
+ { handlerType :: Text,
+ handlerUpstreams :: [Upstream]
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON RouteHandler where
+ toJSON RouteHandler {..} =
+ Aeson.object
+ [ "handler" .= handlerType,
+ "upstreams" .= handlerUpstreams
+ ]
+
+newtype Upstream = Upstream
+ { upstreamDial :: Text
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON Upstream where
+ toJSON Upstream {..} =
+ Aeson.object ["dial" .= upstreamDial]
+
+buildRoute :: Service -> Maybe Route
+buildRoute Service {..} = case serviceHttp of
+ Nothing -> Nothing
+ Just Http {..} ->
+ Just
+ <| Route
+ { routeId = "biz-" <> serviceName,
+ routeMatch = [RouteMatch [httpDomain]],
+ routeHandle =
+ [ RouteHandler
+ "reverse_proxy"
+ [Upstream <| "localhost:" <> tshow httpInternalPort]
+ ],
+ routeTerminal = True
+ }
+
+getCurrentRoutes :: Text -> IO [Aeson.Value]
+getCurrentRoutes adminUrl = do
+ let url = Text.unpack adminUrl <> "/config/apps/http/servers/srv0/routes"
+ request <- HTTP.parseRequest url
+ result <- try @SomeException <| HTTP.httpLBS request
+ case result of
+ Left _ -> pure []
+ Right response ->
+ if Status.statusIsSuccessful (HTTP.getResponseStatus response)
+ then case Aeson.decode (HTTP.getResponseBody response) of
+ Just routes -> pure routes
+ Nothing -> pure []
+ else pure []
+
+upsertRoute :: Text -> Service -> IO Bool
+upsertRoute adminUrl svc = case buildRoute svc of
+ Nothing -> pure False
+ Just route -> do
+ let routeId' = "biz-" <> serviceName svc
+ patchUrl = Text.unpack adminUrl <> "/id/" <> Text.unpack routeId'
+ postUrl = Text.unpack adminUrl <> "/config/apps/http/servers/srv0/routes"
+ body = Aeson.encode route
+
+ patchRequest <-
+ HTTP.parseRequest patchUrl
+ /> HTTP.setRequestMethod "PATCH"
+ /> HTTP.setRequestBodyLBS body
+ /> HTTP.setRequestHeader "Content-Type" ["application/json"]
+ patchResult <- try @SomeException <| HTTP.httpLBS patchRequest
+
+ case patchResult of
+ Right resp
+ | Status.statusIsSuccessful (HTTP.getResponseStatus resp) ->
+ pure True
+ _ -> do
+ postRequest <-
+ HTTP.parseRequest postUrl
+ /> HTTP.setRequestMethod "POST"
+ /> HTTP.setRequestBodyLBS body
+ /> HTTP.setRequestHeader "Content-Type" ["application/json"]
+ postResult <- try @SomeException <| HTTP.httpLBS postRequest
+ case postResult of
+ Right resp -> pure <| Status.statusIsSuccessful (HTTP.getResponseStatus resp)
+ Left _ -> pure False
+
+deleteRoute :: Text -> Text -> IO Bool
+deleteRoute adminUrl serviceName' = do
+ let routeId' = "biz-" <> serviceName'
+ url = Text.unpack adminUrl <> "/id/" <> Text.unpack routeId'
+ request <-
+ HTTP.parseRequest url
+ /> HTTP.setRequestMethod "DELETE"
+ result <- try @SomeException <| HTTP.httpLBS request
+ case result of
+ Right resp -> pure <| Status.statusIsSuccessful (HTTP.getResponseStatus resp)
+ Left _ -> pure False
+
+syncRoutes :: Text -> [Service] -> IO (Map Text Bool)
+syncRoutes adminUrl services = do
+ results <-
+ forM services <| \svc ->
+ case serviceHttp svc of
+ Nothing -> pure Nothing
+ Just _ -> do
+ success <- upsertRoute adminUrl svc
+ pure <| Just (serviceName svc, success)
+ pure <| Map.fromList <| catMaybes results
+
+getRouteById :: Text -> Text -> IO (Maybe Aeson.Value)
+getRouteById adminUrl routeId' = do
+ let url = Text.unpack adminUrl <> "/id/" <> Text.unpack routeId'
+ request <- HTTP.parseRequest url
+ result <- try @SomeException <| HTTP.httpLBS request
+ case result of
+ Right resp
+ | Status.statusIsSuccessful (HTTP.getResponseStatus resp) ->
+ pure <| Aeson.decode (HTTP.getResponseBody resp)
+ _ -> pure Nothing
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Deploy.Caddy"
+ [ test_buildRouteWithHttp,
+ test_buildRouteWithoutHttp,
+ test_buildRouteWithPath
+ ]
+
+mkTestService :: Text -> Text -> Maybe Http -> Service
+mkTestService name path http =
+ Service
+ { serviceName = name,
+ serviceArtifact = Artifact "nix-closure" path,
+ serviceHosts = ["biz"],
+ serviceExec = Exec Nothing "root" "root",
+ serviceEnv = mempty,
+ serviceEnvFile = Nothing,
+ serviceHttp = http,
+ serviceSystemd = Systemd ["network-online.target"] [] "on-failure" 5,
+ serviceHardening = Hardening False True "strict" True,
+ serviceRevision = Nothing
+ }
+
+test_buildRouteWithHttp :: Test.Tree
+test_buildRouteWithHttp =
+ Test.unit "builds route for service with HTTP" <| do
+ let svc = mkTestService "test-svc" "/nix/store/abc" (Just <| Http "example.com" "/" 8000)
+ case buildRoute svc of
+ Nothing -> Test.assertFailure "expected route"
+ Just route -> do
+ routeId route Test.@=? "biz-test-svc"
+ case (head <| routeMatch route, head <| routeHandle route) of
+ (Just m, Just h) -> do
+ matchHost m Test.@=? ["example.com"]
+ case head <| handlerUpstreams h of
+ Just u -> upstreamDial u Test.@=? "localhost:8000"
+ Nothing -> Test.assertFailure "no upstreams"
+ _ -> Test.assertFailure "no match/handle"
+
+test_buildRouteWithoutHttp :: Test.Tree
+test_buildRouteWithoutHttp =
+ Test.unit "returns Nothing for service without HTTP" <| do
+ let svc = mkTestService "worker" "/nix/store/xyz" Nothing
+ case buildRoute svc of
+ Nothing -> pure ()
+ Just _ -> Test.assertFailure "expected Nothing"
+
+test_buildRouteWithPath :: Test.Tree
+test_buildRouteWithPath =
+ Test.unit "builds route with custom path" <| do
+ let svc = mkTestService "api" "/nix/store/abc" (Just <| Http "api.example.com" "/v1" 8080)
+ case buildRoute svc of
+ Nothing -> Test.assertFailure "expected route"
+ Just route -> case head <| routeMatch route of
+ Nothing -> Test.assertFailure "no match"
+ Just m -> matchHost m Test.@=? ["api.example.com"]
+
+main :: IO ()
+main = Test.run test
diff --git a/Omni/Deploy/Deployer.hs b/Omni/Deploy/Deployer.hs
new file mode 100644
index 0000000..7e57b34
--- /dev/null
+++ b/Omni/Deploy/Deployer.hs
@@ -0,0 +1,317 @@
+{-# LANGUAGE DeriveAnyClass #-}
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE DerivingStrategies #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE RecordWildCards #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Mini-PaaS deployer service.
+--
+-- Polls manifest from S3, compares to local state, pulls changed closures,
+-- generates systemd units, updates Caddy routes, and manages GC roots.
+--
+-- : out biz-deployer
+-- : dep aeson
+-- : dep amazonka
+-- : dep amazonka-core
+-- : dep amazonka-s3
+-- : dep directory
+-- : dep http-conduit
+-- : dep http-types
+-- : dep time
+module Omni.Deploy.Deployer
+ ( DeployerState (..),
+ loadState,
+ saveState,
+ pullClosure,
+ createGcRoot,
+ removeGcRoot,
+ deployService,
+ removeService,
+ reconcile,
+ runOnce,
+ runDaemon,
+ stateDir,
+ stateFile,
+ gcrootsDir,
+ main,
+ test,
+ )
+where
+
+import Alpha
+import qualified Control.Concurrent as Concurrent
+import qualified Data.Aeson as Aeson
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.Map as Map
+import qualified Data.Set as Set
+import qualified Data.Text as Text
+import qualified Network.HostName as HostName
+import qualified Omni.Cli as Cli
+import qualified Omni.Deploy.Caddy as Caddy
+import qualified Omni.Deploy.Manifest as Manifest
+import qualified Omni.Deploy.Systemd as Systemd
+import qualified Omni.Log as Log
+import qualified Omni.Test as Test
+import qualified System.Directory as Dir
+import qualified System.Exit as Exit
+import System.FilePath ((</>))
+import qualified System.Process as Process
+
+stateDir :: FilePath
+stateDir = "/var/lib/biz-deployer"
+
+stateFile :: FilePath
+stateFile = stateDir </> "state.json"
+
+gcrootsDir :: FilePath
+gcrootsDir = "/nix/var/nix/gcroots/biz"
+
+s3Url :: String
+s3Url = "s3://omni-nix-cache?profile=digitalocean&scheme=https&endpoint=nyc3.digitaloceanspaces.com"
+
+newtype DeployerState = DeployerState
+ { stateServices :: Map Text Text
+ }
+ deriving (Show, Eq, Generic)
+ deriving anyclass (Aeson.FromJSON, Aeson.ToJSON)
+
+emptyState :: DeployerState
+emptyState = DeployerState mempty
+
+loadState :: IO DeployerState
+loadState = do
+ exists <- Dir.doesFileExist stateFile
+ if exists
+ then do
+ contents <- BL.readFile stateFile
+ case Aeson.eitherDecode contents of
+ Left _ -> pure emptyState
+ Right s -> pure s
+ else pure emptyState
+
+saveState :: DeployerState -> IO ()
+saveState st = do
+ Dir.createDirectoryIfMissing True stateDir
+ BL.writeFile stateFile (Aeson.encode st)
+
+getHostname :: IO Text
+getHostname = HostName.getHostName /> Text.pack
+
+pullClosure :: Text -> IO Bool
+pullClosure storePath = do
+ -- First check if the path already exists locally
+ exists <- Dir.doesDirectoryExist (Text.unpack storePath)
+ if exists
+ then do
+ Log.info ["deployer", "path already exists locally", storePath]
+ pure True
+ else do
+ (exitCode, _, stderr') <-
+ Process.readProcessWithExitCode
+ "nix"
+ [ "copy",
+ "--extra-experimental-features",
+ "nix-command",
+ "--from",
+ s3Url,
+ Text.unpack storePath
+ ]
+ ""
+ case exitCode of
+ Exit.ExitSuccess -> pure True
+ Exit.ExitFailure _ -> do
+ Log.fail ["deployer", "pull failed", storePath, Text.pack stderr']
+ pure False
+
+createGcRoot :: Text -> Text -> IO FilePath
+createGcRoot serviceName storePath = do
+ Dir.createDirectoryIfMissing True gcrootsDir
+ let rootPath = gcrootsDir </> Text.unpack serviceName
+ exists <- Dir.doesPathExist rootPath
+ when exists <| Dir.removeFile rootPath
+ Dir.createFileLink (Text.unpack storePath) rootPath
+ pure rootPath
+
+removeGcRoot :: Text -> IO ()
+removeGcRoot serviceName = do
+ let rootPath = gcrootsDir </> Text.unpack serviceName
+ exists <- Dir.doesPathExist rootPath
+ when exists <| Dir.removeFile rootPath
+
+deployService :: Manifest.Service -> DeployerState -> IO (Bool, DeployerState)
+deployService svc st = do
+ let name = Manifest.serviceName svc
+ path = Manifest.storePath (Manifest.serviceArtifact svc)
+
+ -- Check what's actually running in systemd instead of in-memory state
+ runningPath <- Systemd.getRunningStorePath name
+
+ if runningPath == Just path
+ then do
+ Log.info ["deployer", name, "already at", path]
+ pure (True, st)
+ else do
+ Log.info ["deployer", "deploying", name, fromMaybe "new" runningPath, "->", path]
+
+ pulled <- pullClosure path
+ if don't pulled
+ then do
+ Log.fail ["deployer", "failed to pull", name]
+ pure (False, st)
+ else do
+ _ <- createGcRoot name path
+
+ _ <- Systemd.writeUnit Systemd.servicesDir svc
+ _ <- Systemd.createSymlink Systemd.servicesDir "/run/systemd/system" svc
+ Systemd.reloadAndRestart name
+
+ case Manifest.serviceHttp svc of
+ Just _ -> void <| Caddy.upsertRoute Caddy.caddyAdmin svc
+ Nothing -> pure ()
+
+ let newSt = st {stateServices = Map.insert name path (stateServices st)}
+ Log.good ["deployer", "deployed", name]
+ pure (True, newSt)
+
+removeService :: Text -> DeployerState -> IO DeployerState
+removeService svcName st = do
+ Log.info ["deployer", "removing", svcName]
+
+ Systemd.stopAndDisable svcName
+ Systemd.removeUnit Systemd.servicesDir "/run/systemd/system" svcName
+ _ <- Caddy.deleteRoute Caddy.caddyAdmin svcName
+ removeGcRoot svcName
+
+ pure <| st {stateServices = Map.delete svcName (stateServices st)}
+
+reconcile :: Manifest.Manifest -> DeployerState -> IO DeployerState
+reconcile manifest st = do
+ hostname <- getHostname
+
+ let mfstServices =
+ Set.fromList
+ [ Manifest.serviceName svc
+ | svc <- Manifest.manifestServices manifest,
+ hostname `elem` Manifest.serviceHosts svc
+ ]
+ localServices = Set.fromList <| Map.keys (stateServices st)
+ toRemove = localServices Set.\\ mfstServices
+
+ st' <- foldM (flip removeService) st (Set.toList toRemove)
+
+ foldM
+ ( \s svc ->
+ if hostname `elem` Manifest.serviceHosts svc
+ then do
+ (_, newSt) <- deployService svc s
+ pure newSt
+ else pure s
+ )
+ st'
+ (Manifest.manifestServices manifest)
+
+runOnce :: IO Bool
+runOnce = do
+ Log.info ["deployer", "starting reconciliation"]
+
+ manifest <- Manifest.loadManifestFromS3
+ case manifest of
+ Nothing -> do
+ Log.warn ["deployer", "no manifest found in S3"]
+ pure False
+ Just m -> do
+ st <- loadState
+ st' <- reconcile m st
+ saveState st'
+ Log.good ["deployer", "reconciliation complete"]
+ pure True
+
+runDaemon :: Int -> IO ()
+runDaemon intervalSeconds = do
+ Log.info ["deployer", "starting daemon", "interval=" <> tshow intervalSeconds <> "s"]
+ forever <| do
+ result <- try runOnce
+ case result of
+ Left (e :: SomeException) ->
+ Log.fail ["deployer", "error in reconciliation", tshow e]
+ Right _ -> pure ()
+ Concurrent.threadDelay (intervalSeconds * 1_000_000)
+
+help :: Cli.Docopt
+help =
+ [Cli.docopt|
+biz-deployer - Mini-PaaS deployment agent
+
+Usage:
+ biz-deployer test
+ biz-deployer once
+ biz-deployer daemon [<interval>]
+ biz-deployer status
+ biz-deployer (-h | --help)
+
+Commands:
+ test Run tests
+ once Run a single reconciliation cycle
+ daemon Run as daemon with interval in seconds (default: 300)
+ status Show current deployer state
+
+Options:
+ -h --help Show this help
+|]
+
+move :: Cli.Arguments -> IO ()
+move args
+ | args `Cli.has` Cli.command "once" = do
+ success <- runOnce
+ if success
+ then Exit.exitSuccess
+ else Exit.exitWith (Exit.ExitFailure 1)
+ | args `Cli.has` Cli.command "daemon" = do
+ let interval =
+ Cli.getArg args (Cli.argument "interval")
+ +> readMaybe
+ |> fromMaybe 300
+ runDaemon interval
+ | args `Cli.has` Cli.command "status" = do
+ st <- loadState
+ BL.putStr <| Aeson.encode st
+ putStrLn ("" :: String)
+ | otherwise = do
+ Log.fail ["deployer", "unknown command"]
+ Exit.exitWith (Exit.ExitFailure 1)
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Deploy.Deployer"
+ [ test_emptyState,
+ test_stateJsonRoundtrip
+ ]
+
+test_emptyState :: Test.Tree
+test_emptyState =
+ Test.unit "empty state has no services" <| do
+ let st = emptyState
+ Map.null (stateServices st) Test.@=? True
+
+test_stateJsonRoundtrip :: Test.Tree
+test_stateJsonRoundtrip =
+ Test.unit "state JSON roundtrip" <| do
+ let testState =
+ DeployerState
+ { stateServices =
+ Map.fromList
+ [ ("svc-a", "/nix/store/abc"),
+ ("svc-b", "/nix/store/xyz")
+ ]
+ }
+ let encoded = Aeson.encode testState
+ case Aeson.eitherDecode encoded of
+ Left err -> Test.assertFailure err
+ Right decoded -> stateServices decoded Test.@=? stateServices testState
+
+main :: IO ()
+main = Cli.main <| Cli.Plan help move test pure
diff --git a/Omni/Deploy/Deployer.nix b/Omni/Deploy/Deployer.nix
new file mode 100644
index 0000000..091b43b
--- /dev/null
+++ b/Omni/Deploy/Deployer.nix
@@ -0,0 +1,104 @@
+{
+ options,
+ lib,
+ config,
+ pkgs,
+ ...
+}: let
+ cfg = config.services.biz-deployer;
+in {
+ options.services.biz-deployer = {
+ enable = lib.mkEnableOption "Enable the biz-deployer mini-PaaS service";
+
+ package = lib.mkOption {
+ type = lib.types.package;
+ description = "The biz-deployer package to use";
+ };
+
+ manifestPackage = lib.mkOption {
+ type = lib.types.package;
+ description = "The deploy-manifest package for CLI operations";
+ };
+
+ interval = lib.mkOption {
+ type = lib.types.int;
+ default = 300;
+ description = "Interval in seconds between reconciliation cycles";
+ };
+
+ stateDir = lib.mkOption {
+ type = lib.types.path;
+ default = "/var/lib/biz-deployer";
+ description = "Directory for deployer state and generated unit files";
+ };
+
+ secretsDir = lib.mkOption {
+ type = lib.types.path;
+ default = "/var/lib/biz-secrets";
+ description = "Directory containing service secret .env files";
+ };
+
+ gcrootsDir = lib.mkOption {
+ type = lib.types.path;
+ default = "/nix/var/nix/gcroots/biz";
+ description = "Directory for GC roots to prevent closure garbage collection";
+ };
+ };
+
+ config = lib.mkIf cfg.enable {
+ # Create required directories
+ systemd.tmpfiles.rules = [
+ "d ${cfg.stateDir} 0755 root root -"
+ "d ${cfg.stateDir}/services 0755 root root -"
+ "d ${cfg.secretsDir} 0700 root root -"
+ "d ${cfg.gcrootsDir} 0755 root root -"
+ ];
+
+ # The deployer service runs as a timer-triggered oneshot
+ systemd.services.biz-deployer = {
+ description = "Mini-PaaS deployment agent";
+ after = ["network-online.target"];
+ wants = ["network-online.target"];
+ path = [cfg.package cfg.manifestPackage pkgs.nix pkgs.awscli2];
+
+ serviceConfig = {
+ Type = "oneshot";
+ ExecStart = "${cfg.package}/bin/biz-deployer once";
+ Environment = [
+ "HOME=/root"
+ "AWS_SHARED_CREDENTIALS_FILE=/root/.aws/credentials"
+ ];
+
+ # Note: Hardening disabled because deployer needs write access to
+ # /etc/systemd/system, /nix/store, /nix/var, /root/.cache/nix
+ PrivateTmp = true;
+ };
+ };
+
+ # Timer to run deployer every N seconds
+ systemd.timers.biz-deployer = {
+ description = "Timer for biz-deployer reconciliation";
+ wantedBy = ["timers.target"];
+ timerConfig = {
+ OnBootSec = "1min";
+ OnUnitActiveSec = "${toString cfg.interval}s";
+ Unit = "biz-deployer.service";
+ };
+ };
+
+ # Caddy reverse proxy for deployed services
+ # TODO: Generate this dynamically from manifest in the future
+ services.caddy = {
+ enable = true;
+ globalConfig = ''
+ admin localhost:2019
+ '';
+ virtualHosts."podcastitlater.bensima.com".extraConfig = ''
+ reverse_proxy localhost:8000
+ '';
+ };
+
+ # Open firewall for HTTP/HTTPS
+ networking.firewall.allowedTCPPorts = [80 443];
+ };
+}
diff --git a/Omni/Deploy/Manifest.hs b/Omni/Deploy/Manifest.hs
new file mode 100644
index 0000000..e0d0b78
--- /dev/null
+++ b/Omni/Deploy/Manifest.hs
@@ -0,0 +1,673 @@
+{-# LANGUAGE DeriveAnyClass #-}
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE RecordWildCards #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Manifest schema and S3 operations for the mini-PaaS deployment system.
+--
+-- Uses aws CLI for S3 operations (simpler than amazonka, already available).
+--
+-- : out deploy-manifest
+-- : dep aeson
+-- : dep time
+-- : dep directory
+-- : dep temporary
+-- : run awscli2
+module Omni.Deploy.Manifest
+ ( Artifact (..),
+ Exec (..),
+ Http (..),
+ Systemd (..),
+ Hardening (..),
+ Service (..),
+ Manifest (..),
+ findService,
+ updateService,
+ createEmptyManifest,
+ loadManifestFromS3,
+ saveManifestToS3,
+ archiveManifest,
+ listArchivedManifests,
+ rollbackToManifest,
+ s3Bucket,
+ s3Endpoint,
+ main,
+ test,
+ )
+where
+
+import Alpha
+import Data.Aeson ((.!=), (.:), (.:?), (.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.Text as Text
+import qualified Data.Text.Encoding as TE
+import Data.Time (UTCTime, getCurrentTime)
+import Data.Time.Format.ISO8601 (iso8601Show)
+import qualified Omni.Cli as Cli
+import qualified Omni.Log as Log
+import qualified Omni.Test as Test
+import qualified System.Exit as Exit
+import qualified System.IO as IO
+import qualified System.IO.Temp as Temp
+import qualified System.Process as Process
+
+s3Bucket :: Text
+s3Bucket = "omni-nix-cache"
+
+s3Endpoint :: Text
+s3Endpoint = "https://nyc3.digitaloceanspaces.com"
+
+data Artifact = Artifact
+ { artifactType :: Text,
+ storePath :: Text
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.FromJSON Artifact where
+ parseJSON =
+ Aeson.withObject "Artifact" <| \o ->
+ Artifact
+ </ (o .:? "type" .!= "nix-closure")
+ <*> o
+ .: "storePath"
+
+instance Aeson.ToJSON Artifact where
+ toJSON Artifact {..} =
+ Aeson.object
+ [ "type" .= artifactType,
+ "storePath" .= storePath
+ ]
+
+data Exec = Exec
+ { execCommand :: Maybe Text,
+ execUser :: Text,
+ execGroup :: Text
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.FromJSON Exec where
+ parseJSON =
+ Aeson.withObject "Exec" <| \o ->
+ Exec
+ </ (o .:? "command")
+ <*> o
+ .:? "user"
+ .!= "root"
+ <*> o
+ .:? "group"
+ .!= "root"
+
+instance Aeson.ToJSON Exec where
+ toJSON Exec {..} =
+ Aeson.object
+ [ "command" .= execCommand,
+ "user" .= execUser,
+ "group" .= execGroup
+ ]
+
+defaultExec :: Exec
+defaultExec = Exec Nothing "root" "root"
+
+data Http = Http
+ { httpDomain :: Text,
+ httpPath :: Text,
+ httpInternalPort :: Int
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.FromJSON Http where
+ parseJSON =
+ Aeson.withObject "Http" <| \o ->
+ Http
+ </ (o .: "domain")
+ <*> o
+ .:? "path"
+ .!= "/"
+ <*> o
+ .: "internalPort"
+
+instance Aeson.ToJSON Http where
+ toJSON Http {..} =
+ Aeson.object
+ [ "domain" .= httpDomain,
+ "path" .= httpPath,
+ "internalPort" .= httpInternalPort
+ ]
+
+data Systemd = Systemd
+ { systemdAfter :: [Text],
+ systemdRequires :: [Text],
+ systemdRestart :: Text,
+ systemdRestartSec :: Int
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.FromJSON Systemd where
+ parseJSON =
+ Aeson.withObject "Systemd" <| \o ->
+ Systemd
+ </ (o .:? "after" .!= ["network-online.target"])
+ <*> o
+ .:? "requires"
+ .!= []
+ <*> o
+ .:? "restart"
+ .!= "on-failure"
+ <*> o
+ .:? "restartSec"
+ .!= 5
+
+instance Aeson.ToJSON Systemd where
+ toJSON Systemd {..} =
+ Aeson.object
+ [ "after" .= systemdAfter,
+ "requires" .= systemdRequires,
+ "restart" .= systemdRestart,
+ "restartSec" .= systemdRestartSec
+ ]
+
+defaultSystemd :: Systemd
+defaultSystemd = Systemd ["network-online.target"] [] "on-failure" 5
+
+data Hardening = Hardening
+ { hardeningDynamicUser :: Bool,
+ hardeningPrivateTmp :: Bool,
+ hardeningProtectSystem :: Text,
+ hardeningProtectHome :: Bool
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.FromJSON Hardening where
+ parseJSON =
+ Aeson.withObject "Hardening" <| \o ->
+ Hardening
+ </ (o .:? "dynamicUser" .!= False)
+ <*> o
+ .:? "privateTmp"
+ .!= True
+ <*> o
+ .:? "protectSystem"
+ .!= "strict"
+ <*> o
+ .:? "protectHome"
+ .!= True
+
+instance Aeson.ToJSON Hardening where
+ toJSON Hardening {..} =
+ Aeson.object
+ [ "dynamicUser" .= hardeningDynamicUser,
+ "privateTmp" .= hardeningPrivateTmp,
+ "protectSystem" .= hardeningProtectSystem,
+ "protectHome" .= hardeningProtectHome
+ ]
+
+defaultHardening :: Hardening
+defaultHardening = Hardening False True "strict" True
+
+data Service = Service
+ { serviceName :: Text,
+ serviceArtifact :: Artifact,
+ serviceHosts :: [Text],
+ serviceExec :: Exec,
+ serviceEnv :: Map Text Text,
+ serviceEnvFile :: Maybe Text,
+ serviceHttp :: Maybe Http,
+ serviceSystemd :: Systemd,
+ serviceHardening :: Hardening,
+ serviceRevision :: Maybe Text
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.FromJSON Service where
+ parseJSON =
+ Aeson.withObject "Service" <| \o ->
+ Service
+ </ (o .: "name")
+ <*> o
+ .: "artifact"
+ <*> o
+ .:? "hosts"
+ .!= ["biz"]
+ <*> o
+ .:? "exec"
+ .!= defaultExec
+ <*> o
+ .:? "env"
+ .!= mempty
+ <*> o
+ .:? "envFile"
+ <*> o
+ .:? "http"
+ <*> o
+ .:? "systemd"
+ .!= defaultSystemd
+ <*> o
+ .:? "hardening"
+ .!= defaultHardening
+ <*> o
+ .:? "revision"
+
+instance Aeson.ToJSON Service where
+ toJSON Service {..} =
+ Aeson.object
+ [ "name" .= serviceName,
+ "artifact" .= serviceArtifact,
+ "hosts" .= serviceHosts,
+ "exec" .= serviceExec,
+ "env" .= serviceEnv,
+ "envFile" .= serviceEnvFile,
+ "http" .= serviceHttp,
+ "systemd" .= serviceSystemd,
+ "hardening" .= serviceHardening,
+ "revision" .= serviceRevision
+ ]
+
+data Manifest = Manifest
+ { manifestVersion :: Int,
+ manifestGeneration :: UTCTime,
+ manifestServices :: [Service]
+ }
+ deriving (Show, Eq, Generic)
+
+instance Aeson.FromJSON Manifest where
+ parseJSON =
+ Aeson.withObject "Manifest" <| \o ->
+ Manifest
+ </ (o .:? "version" .!= 1)
+ <*> o
+ .: "generation"
+ <*> o
+ .:? "services"
+ .!= []
+
+instance Aeson.ToJSON Manifest where
+ toJSON Manifest {..} =
+ Aeson.object
+ [ "version" .= manifestVersion,
+ "generation" .= manifestGeneration,
+ "services" .= manifestServices
+ ]
+
+findService :: Text -> Manifest -> Maybe Service
+findService name manifest =
+ find (\s -> serviceName s == name) (manifestServices manifest)
+
+updateService :: Text -> Text -> Maybe Text -> Manifest -> Either Text Manifest
+updateService name newStorePath revision manifest =
+ case findService name manifest of
+ Nothing -> Left <| "Service '" <> name <> "' not found in manifest"
+ Just _ -> Right <| manifest {manifestServices = updatedServices}
+ where
+ updatedServices = map updateIfMatch (manifestServices manifest)
+ updateIfMatch svc
+ | serviceName svc == name =
+ svc
+ { serviceArtifact = (serviceArtifact svc) {storePath = newStorePath},
+ serviceRevision = revision
+ }
+ | otherwise = svc
+
+createEmptyManifest :: IO Manifest
+createEmptyManifest = do
+ now <- getCurrentTime
+ pure <| Manifest 1 now []
+
+awsS3Args :: [String]
+awsS3Args =
+ [ "--endpoint-url",
+ Text.unpack s3Endpoint,
+ "--profile",
+ "digitalocean"
+ ]
+
+s3Get :: Text -> FilePath -> IO Bool
+s3Get key destPath = do
+ let url = "s3://" <> Text.unpack s3Bucket <> "/" <> Text.unpack key
+ args = ["s3", "cp"] ++ awsS3Args ++ [url, destPath]
+ (exitCode, _, _) <- Process.readProcessWithExitCode "aws" args ""
+ pure <| exitCode == Exit.ExitSuccess
+
+s3Put :: FilePath -> Text -> IO Bool
+s3Put srcPath key = do
+ let url = "s3://" <> Text.unpack s3Bucket <> "/" <> Text.unpack key
+ args = ["s3", "cp"] ++ awsS3Args ++ [srcPath, url]
+ (exitCode, _, _) <- Process.readProcessWithExitCode "aws" args ""
+ pure <| exitCode == Exit.ExitSuccess
+
+s3List :: Text -> IO [Text]
+s3List prefix = do
+ let url = "s3://" <> Text.unpack s3Bucket <> "/" <> Text.unpack prefix
+ args = ["s3", "ls"] ++ awsS3Args ++ [url]
+ (exitCode, stdout', _) <- Process.readProcessWithExitCode "aws" args ""
+ case exitCode of
+ Exit.ExitSuccess ->
+ pure <| parseS3ListOutput (Text.pack stdout')
+ Exit.ExitFailure _ -> pure []
+
+parseS3ListOutput :: Text -> [Text]
+parseS3ListOutput output =
+ output
+ |> Text.lines
+ |> map extractFilename
+ |> filter (not <. Text.null)
+ where
+ extractFilename line =
+ case Text.words line of
+ [_, _, _, filename] -> filename
+ _ -> ""
+
+loadManifestFromS3 :: IO (Maybe Manifest)
+loadManifestFromS3 = loadManifestFromS3' "manifest.json"
+
+loadManifestFromS3' :: Text -> IO (Maybe Manifest)
+loadManifestFromS3' key = do
+ Temp.withSystemTempFile "manifest.json" <| \tmpPath tmpHandle -> do
+ IO.hClose tmpHandle
+ success <- s3Get key tmpPath
+ if success
+ then do
+ contents <- BL.readFile tmpPath
+ case Aeson.eitherDecode contents of
+ Left _ -> pure Nothing
+ Right manifest -> pure <| Just manifest
+ else pure Nothing
+
+archiveManifest :: Manifest -> IO Text
+archiveManifest manifest = do
+ let timestamp =
+ iso8601Show (manifestGeneration manifest)
+ |> filter (\c -> c /= ':' && c /= '-')
+ |> Text.pack
+ archiveKey = "manifests/manifest-" <> timestamp <> ".json"
+ Temp.withSystemTempFile "manifest.json" <| \tmpPath tmpHandle -> do
+ BL.hPut tmpHandle (Aeson.encode manifest)
+ IO.hClose tmpHandle
+ _ <- s3Put tmpPath archiveKey
+ pure archiveKey
+
+listArchivedManifests :: IO [Text]
+listArchivedManifests = do
+ files <- s3List "manifests/"
+ pure <| filter (Text.isSuffixOf ".json") files
+
+rollbackToManifest :: Text -> IO Bool
+rollbackToManifest archiveKey = do
+ let fullKey =
+ if "manifests/" `Text.isPrefixOf` archiveKey
+ then archiveKey
+ else "manifests/" <> archiveKey
+ archived <- loadManifestFromS3' fullKey
+ case archived of
+ Nothing -> pure False
+ Just manifest -> do
+ saveManifestToS3 manifest
+ pure True
+
+saveManifestToS3 :: Manifest -> IO ()
+saveManifestToS3 = saveManifestToS3' "manifest.json"
+
+saveManifestToS3' :: Text -> Manifest -> IO ()
+saveManifestToS3' key manifest = do
+ existing <- loadManifestFromS3' key
+ case existing of
+ Just old -> void <| archiveManifest old
+ Nothing -> pure ()
+
+ now <- getCurrentTime
+ let updatedManifest = manifest {manifestGeneration = now}
+ Temp.withSystemTempFile "manifest.json" <| \tmpPath tmpHandle -> do
+ BL.hPut tmpHandle (Aeson.encode updatedManifest)
+ IO.hClose tmpHandle
+ _ <- s3Put tmpPath key
+ pure ()
+
+help :: Cli.Docopt
+help =
+ [Cli.docopt|
+deploy-manifest - Manage deployment manifest in S3
+
+Usage:
+ deploy-manifest test
+ deploy-manifest init
+ deploy-manifest show
+ deploy-manifest update <name> <store-path> [<revision>]
+ deploy-manifest add-service <json>
+ deploy-manifest list
+ deploy-manifest rollback <archive>
+ deploy-manifest (-h | --help)
+
+Commands:
+ test Run tests
+ init Initialize empty manifest in S3
+ show Show current manifest
+ update Update service store path in manifest
+ add-service Add a new service from JSON
+ list List archived manifest generations
+ rollback Restore a previous manifest version
+
+Options:
+ -h --help Show this help
+|]
+
+move :: Cli.Arguments -> IO ()
+move args
+ | args `Cli.has` Cli.command "init" = do
+ existing <- loadManifestFromS3
+ case existing of
+ Just _ -> do
+ Log.fail ["manifest", "already exists"]
+ Exit.exitWith (Exit.ExitFailure 1)
+ Nothing -> do
+ manifest <- createEmptyManifest
+ saveManifestToS3 manifest
+ Log.good ["manifest", "initialized empty manifest"]
+ | args `Cli.has` Cli.command "show" = do
+ manifest <- loadManifestFromS3
+ case manifest of
+ Nothing -> putStrLn ("no manifest found" :: String)
+ Just m -> BL.putStr <| Aeson.encode m
+ | args `Cli.has` Cli.command "update" = do
+ let name =
+ Cli.getArg args (Cli.argument "name")
+ |> fromMaybe ""
+ |> Text.pack
+ storePath' =
+ Cli.getArg args (Cli.argument "store-path")
+ |> fromMaybe ""
+ |> Text.pack
+ revision =
+ Cli.getArg args (Cli.argument "revision")
+ /> Text.pack
+ manifest <- loadManifestFromS3
+ case manifest of
+ Nothing -> do
+ Log.fail ["manifest", "no manifest found in S3"]
+ Exit.exitWith (Exit.ExitFailure 1)
+ Just m -> case updateService name storePath' revision m of
+ Left err -> do
+ Log.fail ["manifest", err]
+ Exit.exitWith (Exit.ExitFailure 1)
+ Right updated -> do
+ saveManifestToS3 updated
+ Log.good ["manifest", "updated", name, "->", storePath']
+ | args `Cli.has` Cli.command "add-service" = do
+ let jsonStr =
+ Cli.getArg args (Cli.argument "json")
+ |> fromMaybe ""
+ case Aeson.eitherDecode (BL.fromStrict <| TE.encodeUtf8 <| Text.pack jsonStr) of
+ Left err -> do
+ Log.fail ["manifest", "invalid JSON:", Text.pack err]
+ Exit.exitWith (Exit.ExitFailure 1)
+ Right svc -> do
+ manifest <- loadManifestFromS3
+ m <- maybe createEmptyManifest pure manifest
+ case findService (serviceName svc) m of
+ Just _ -> do
+ Log.fail ["manifest", "service already exists:", serviceName svc]
+ Exit.exitWith (Exit.ExitFailure 1)
+ Nothing -> do
+ let newManifest = m {manifestServices = manifestServices m ++ [svc]}
+ saveManifestToS3 newManifest
+ Log.good ["manifest", "added service", serviceName svc]
+ | args `Cli.has` Cli.command "list" = do
+ archives <- listArchivedManifests
+ if null archives
+ then putStrLn ("no archived manifests found" :: String)
+ else
+ forM_ archives <| \archive -> do
+ putStrLn <| Text.unpack archive
+ | args `Cli.has` Cli.command "rollback" = do
+ let archive =
+ Cli.getArg args (Cli.argument "archive")
+ |> fromMaybe ""
+ |> Text.pack
+ success <- rollbackToManifest archive
+ if success
+ then Log.good ["manifest", "rolled back to", archive]
+ else do
+ Log.fail ["manifest", "failed to rollback to", archive]
+ Exit.exitWith (Exit.ExitFailure 1)
+ | otherwise = do
+ Log.fail ["manifest", "unknown command"]
+ Exit.exitWith (Exit.ExitFailure 1)
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Deploy.Manifest"
+ [ test_artifactDefaults,
+ test_serviceDefaults,
+ test_manifestJsonRoundtrip,
+ test_updateService,
+ test_findService
+ ]
+
+test_artifactDefaults :: Test.Tree
+test_artifactDefaults =
+ Test.unit "artifact defaults type to nix-closure" <| do
+ let json = "{\"storePath\": \"/nix/store/abc123\"}"
+ case Aeson.eitherDecode json of
+ Left err -> Test.assertFailure err
+ Right (artifact :: Artifact) ->
+ artifactType artifact Test.@=? "nix-closure"
+
+test_serviceDefaults :: Test.Tree
+test_serviceDefaults =
+ Test.unit "service has correct defaults" <| do
+ let json = "{\"name\": \"test-svc\", \"artifact\": {\"storePath\": \"/nix/store/xyz\"}}"
+ case Aeson.eitherDecode json of
+ Left err -> Test.assertFailure err
+ Right (svc :: Service) -> do
+ serviceHosts svc Test.@=? ["biz"]
+ execUser (serviceExec svc) Test.@=? "root"
+ systemdRestart (serviceSystemd svc) Test.@=? "on-failure"
+ hardeningPrivateTmp (serviceHardening svc) Test.@=? True
+
+test_manifestJsonRoundtrip :: Test.Tree
+test_manifestJsonRoundtrip =
+ Test.unit "manifest JSON roundtrip" <| do
+ now <- getCurrentTime
+ let manifest =
+ Manifest
+ { manifestVersion = 1,
+ manifestGeneration = now,
+ manifestServices =
+ [ Service
+ { serviceName = "test-svc",
+ serviceArtifact = Artifact "nix-closure" "/nix/store/abc123",
+ serviceHosts = ["biz"],
+ serviceExec = defaultExec,
+ serviceEnv = mempty,
+ serviceEnvFile = Nothing,
+ serviceHttp = Just (Http "example.com" "/" 8000),
+ serviceSystemd = defaultSystemd,
+ serviceHardening = defaultHardening,
+ serviceRevision = Nothing
+ }
+ ]
+ }
+ encoded = Aeson.encode manifest
+ case Aeson.eitherDecode encoded of
+ Left err -> Test.assertFailure err
+ Right decoded -> do
+ length (manifestServices decoded) Test.@=? 1
+ case head <| manifestServices decoded of
+ Nothing -> Test.assertFailure "no services"
+ Just svc -> serviceName svc Test.@=? "test-svc"
+
+test_updateService :: Test.Tree
+test_updateService =
+ Test.unit "updateService updates store path" <| do
+ now <- getCurrentTime
+ let manifest =
+ Manifest
+ { manifestVersion = 1,
+ manifestGeneration = now,
+ manifestServices =
+ [ Service
+ "svc-a"
+ (Artifact "nix-closure" "/nix/store/old")
+ ["biz"]
+ defaultExec
+ mempty
+ Nothing
+ Nothing
+ defaultSystemd
+ defaultHardening
+ Nothing,
+ Service
+ "svc-b"
+ (Artifact "nix-closure" "/nix/store/other")
+ ["biz"]
+ defaultExec
+ mempty
+ Nothing
+ Nothing
+ defaultSystemd
+ defaultHardening
+ Nothing
+ ]
+ }
+ case updateService "svc-a" "/nix/store/new" (Just "abc123") manifest of
+ Left err -> Test.assertFailure (Text.unpack err)
+ Right updated -> case head <| manifestServices updated of
+ Nothing -> Test.assertFailure "no services"
+ Just svcA -> do
+ storePath (serviceArtifact svcA) Test.@=? "/nix/store/new"
+ serviceRevision svcA Test.@=? Just "abc123"
+
+test_findService :: Test.Tree
+test_findService =
+ Test.unit "findService finds service by name" <| do
+ now <- getCurrentTime
+ let manifest =
+ Manifest
+ { manifestVersion = 1,
+ manifestGeneration = now,
+ manifestServices =
+ [ Service
+ "svc-a"
+ (Artifact "nix-closure" "/nix/store/a")
+ ["biz"]
+ defaultExec
+ mempty
+ Nothing
+ Nothing
+ defaultSystemd
+ defaultHardening
+ Nothing
+ ]
+ }
+ case findService "svc-a" manifest of
+ Nothing -> Test.assertFailure "service not found"
+ Just svc -> serviceName svc Test.@=? "svc-a"
+ case findService "nonexistent" manifest of
+ Nothing -> pure ()
+ Just _ -> Test.assertFailure "found nonexistent service"
+
+main :: IO ()
+main = Cli.main <| Cli.Plan help move test pure
diff --git a/Omni/Deploy/PLAN.md b/Omni/Deploy/PLAN.md
new file mode 100644
index 0000000..1870ebd
--- /dev/null
+++ b/Omni/Deploy/PLAN.md
@@ -0,0 +1,299 @@
+# Mini-PaaS Deployment System
+
+## Overview
+
+A pull-based deployment system that allows deploying Nix-built services without full NixOS rebuilds. Services are defined in a manifest, pulled from an S3 binary cache, and managed as systemd units with Caddy for reverse proxying.
+
+## Problem Statement
+
+Current deployment (`push.sh` + full NixOS rebuild) is slow and heavyweight:
+- Every service change requires rebuilding the entire NixOS configuration
+- Adding a new service requires modifying Biz.nix and doing a full rebuild
+- Deploy time from "code ready" to "running in prod" is too long
+
+## Goals
+
+1. **Fast deploys**: Update a single service in <5 minutes without touching others
+2. **Independent services**: Deploy services without NixOS rebuild
+3. **Add services dynamically**: New services via manifest, no NixOS changes needed
+4. **Maintain NixOS for base OS**: Keep NixOS for infra (Postgres, SSH, firewall)
+5. **Clear scale-up path**: Single host now, easy migration to Nomad later
+
+## Key Design Decisions
+
+1. **Nix closures, not Docker**: Deploy Nix store paths directly, not containers. Simpler, no Docker daemon needed. Use systemd hardening for isolation.
+
+2. **Pull-based, not push-based**: Target host polls S3 for manifest changes every 5 min. No SSH needed for deploys, just update manifest.
+
+3. **Caddy, not nginx**: Caddy has admin API for dynamic route updates and automatic HTTPS. No config file regeneration needed.
+
+4. **Separation of concerns**:
+ - `bild`: Build tool, adds `--cache` flag to sign+push closures
+ - `push.sh`: Deploy orchestrator, handles both NixOS and service deploys
+ - `deployer`: Runs on target, polls manifest, manages services
+
+5. **Out-of-band secrets**: Secrets stored in `/var/lib/biz-secrets/*.env`, manifest only references paths. No secrets in S3.
+
+6. **Nix profiles for rollback**: Each service gets a Nix profile, enabling `nix-env --rollback`.
+
+## Relevant Existing Files
+
+- `Omni/Bild.hs` - Build tool, modify to add `--cache` flag
+- `Omni/Bild.nix` - Nix build library, has `bild.run` for building packages
+- `Omni/Ide/push.sh` - Current deploy script, enhance for service deploys
+- `Biz.nix` - Current NixOS config for biz host
+- `Biz/Packages.nix` - Builds all Biz packages
+- `Biz/PodcastItLater/Web.nix` - Example NixOS service module (to be replaced)
+- `Biz/PodcastItLater/Web.py` - Example Python service (deploy target)
+- `Omni/Os/Base.nix` - Base NixOS config, add S3 substituter here
+
+## Architecture
+
+```
+┌─────────────────────────────────────────────────────────────────────────────┐
+│ DEV MACHINE │
+│ │
+│ ┌─────────────────────────────────────────────────────────────────────┐ │
+│ │ push.sh <target> │ │
+│ │ │ │
+│ │ if target.nix: (NixOS deploy - existing behavior) │ │
+│ │ bild <target> │ │
+│ │ nix copy --to ssh://host │ │
+│ │ ssh host switch-to-configuration │ │
+│ │ │ │
+│ │ else: (Service deploy - new behavior) │ │
+│ │ bild <target> --cache ──▶ sign + push closure to S3 │ │
+│ │ update manifest.json in S3 with new storePath │ │
+│ │ (deployer on target will pick up changes) │ │
+│ └─────────────────────────────────────────────────────────────────────┘ │
+│ │
+│ Separation of concerns: │
+│ - bild: Build + sign + push to S3 cache (--cache flag) │
+│ - push.sh: Orchestrates deploy, updates manifest, handles both modes │
+└─────────────────────────────────────────────────────────────────────────────┘
+ │
+ ▼
+┌─────────────────────────────────────────────────────────────────────────────┐
+│ DO SPACES (S3 BINARY CACHE) - PRIVATE │
+│ │
+│ /nar/*.nar.xz ← Compressed Nix store paths │
+│ /*.narinfo ← Metadata + signatures │
+│ /nix-cache-info ← Cache metadata │
+│ /manifest.json ← Current deployment state │
+│ /manifests/ ← Historical manifests for rollback │
+│ manifest-<ts>.json │
+│ │
+│ Authentication: AWS credentials (Spaces access key) │
+│ - Dev machine: write access for pushing │
+│ - Target host: read access for pulling │
+└─────────────────────────────────────────────────────────────────────────────┘
+ │
+ poll every 5 min
+ ▼
+┌─────────────────────────────────────────────────────────────────────────────┐
+│ TARGET HOST (biz) │
+│ │
+│ ┌──────────────────────────────────────────────────────────────────────┐ │
+│ │ biz-deployer │ │
+│ │ (Python systemd service, runs every 5 min via timer) │ │
+│ │ │ │
+│ │ 1. Fetch manifest.json from S3 │ │
+│ │ 2. Compare to local state │ │
+│ │ 3. For changed services: │ │
+│ │ - nix copy --from s3://... <storePath> │ │
+│ │ - Generate systemd unit file │ │
+│ │ - Create GC root │ │
+│ │ - systemctl daemon-reload && restart │ │
+│ │ 4. Update Caddy routes via API │ │
+│ │ 5. Save local state │ │
+│ └──────────────────────────────────────────────────────────────────────┘ │
+│ │
+│ Directories: │
+│ - /var/lib/biz-deployer/services/*.service (generated units) │
+│ - /var/lib/biz-deployer/state.json (local state) │
+│ - /var/lib/biz-secrets/*.env (secret env files) │
+│ - /nix/var/nix/gcroots/biz/* (GC roots) │
+│ │
+│ NixOS manages: │
+│ - Base OS, SSH, firewall │
+│ - Caddy with admin API enabled │
+│ - PostgreSQL, Redis (infra services) │
+│ - biz-deployer service itself │
+└─────────────────────────────────────────────────────────────────────────────┘
+```
+
+## Components
+
+### 1. S3 Binary Cache (DO Spaces)
+
+**Bucket**: `omni-nix-cache` (private)
+**Region**: `nyc3` (or nearest)
+
+**Credentials**:
+- Dev machine: `~/.aws/credentials` with `[digitalocean]` profile
+- Target host: `/root/.aws/credentials` with same profile
+
+**Signing key**:
+- Generate: `nix-store --generate-binary-cache-key omni-cache cache-priv-key.pem cache-pub-key.pem`
+- Private key: `~/.config/nix/cache-priv-key.pem` (dev machine only)
+- Public key: Added to target's `nix.settings.trusted-public-keys`
+
+**S3 URL format**:
+```
+s3://omni-nix-cache?profile=digitalocean&scheme=https&endpoint=nyc3.digitaloceanspaces.com
+```
+
+### 2. Manifest Schema (v1)
+
+```json
+{
+ "version": 1,
+ "generation": "2025-01-15T12:34:56Z",
+ "services": [
+ {
+ "name": "podcastitlater-web",
+ "artifact": {
+ "type": "nix-closure",
+ "storePath": "/nix/store/abc123-podcastitlater-web-1.2.3"
+ },
+ "hosts": ["biz"],
+ "exec": {
+ "command": "podcastitlater-web",
+ "user": "pil-web",
+ "group": "pil"
+ },
+ "env": {
+ "PORT": "8000",
+ "AREA": "Live",
+ "DATA_DIR": "/var/podcastitlater",
+ "BASE_URL": "https://podcastitlater.com"
+ },
+ "envFile": "/var/lib/biz-secrets/podcastitlater-web.env",
+ "http": {
+ "domain": "podcastitlater.com",
+ "path": "/",
+ "internalPort": 8000
+ },
+ "systemd": {
+ "after": ["network-online.target", "postgresql.service"],
+ "requires": [],
+ "restart": "on-failure",
+ "restartSec": 5
+ },
+ "hardening": {
+ "dynamicUser": false,
+ "privateTmp": true,
+ "protectSystem": "strict",
+ "protectHome": true
+ },
+ "revision": "abc123def"
+ }
+ ]
+}
+```
+
+### 3. Deployer Service (Omni/Deploy/Deployer.py)
+
+Python service that:
+- Polls manifest from S3
+- Pulls Nix closures
+- Generates systemd units
+- Updates Caddy via API
+- Manages GC roots
+- Tracks local state
+
+### 4. NixOS Module (Omni/Deploy/Deployer.nix)
+
+Configures:
+- biz-deployer systemd service + timer
+- Caddy with admin API
+- S3 substituter configuration
+- Required directories and permissions
+
+### 5. Bild Integration (Omni/Bild.hs)
+
+New `--cache` flag that:
+1. Builds the target
+2. Signs the closure with cache key (using NIX_CACHE_KEY env var)
+3. Pushes to S3 cache
+4. Outputs the store path for push.sh to use
+
+Does NOT update manifest - that's push.sh's responsibility.
+
+### 6. Push.sh Enhancement (Omni/Ide/push.sh)
+
+Detect deploy mode from target extension:
+- `.nix` → NixOS deploy (existing behavior)
+- `.py`, `.hs`, etc. → Service deploy (new behavior)
+
+For service deploys:
+1. Call `bild <target> --cache`
+2. Capture store path from bild output
+3. Fetch current manifest.json from S3
+4. Archive current manifest to manifests/manifest-<timestamp>.json
+5. Update manifest with new storePath for this service
+6. Upload new manifest.json to S3
+7. Deployer on target picks up change within 5 minutes
+
+## Migration Path
+
+### Phase 1: Infrastructure Setup
+1. Create DO Spaces bucket
+2. Generate signing keys
+3. Configure S3 substituter on target
+4. Deploy base deployer service (empty manifest)
+
+### Phase 2: Migrate First Service
+1. Choose non-critical service (e.g., podcastitlater-worker)
+2. Add to manifest with different port
+3. Verify via staging route
+4. Flip Caddy to new service
+5. Disable old NixOS-managed service
+
+### Phase 3: Migrate Remaining Services
+- Repeat Phase 2 for each service
+- Order: worker → web → storybook
+
+### Phase 4: Cleanup
+- Remove service-specific NixOS modules
+- Simplify Biz.nix to base OS only
+
+## Rollback Strategy
+
+1. Each deploy archives current manifest to `/manifests/manifest-<ts>.json`
+2. Rollback = copy old manifest back to `manifest.json`
+3. Deployer sees new generation, converges to old state
+4. GC roots keep old closures alive (last 5 versions per service)
+
+## Scale-up Path
+
+| Stage | Hosts | Changes |
+|-------|-------|---------|
+| Current | 1 | Full architecture as described |
+| 2-3 hosts | 2-3 | Add `hosts` filtering, each host runs deployer |
+| 4+ hosts | 4+ | Consider Nomad with nix-nomad for job definitions |
+
+## Security Considerations
+
+- S3 bucket is private (authenticated reads/writes)
+- Signing key never leaves dev machine
+- Secrets stored out-of-band in `/var/lib/biz-secrets/`
+- systemd hardening for service isolation
+- Deployer validates manifest schema before applying
+
+## File Locations
+
+```
+Omni/
+ Deploy/
+ PLAN.md # This document
+ Deployer.py # Main deployer service
+ Deployer.nix # NixOS module
+ Manifest.py # Manifest schema/validation
+ Systemd.py # Unit file generation
+ Caddy.py # Caddy API integration
+ S3.py # S3 operations (for deployer)
+ Bild.hs # Add --cache flag for sign+push
+ Ide/
+ push.sh # Enhanced: NixOS deploy OR service deploy + manifest update
+```
diff --git a/Omni/Deploy/Packages.nix b/Omni/Deploy/Packages.nix
new file mode 100644
index 0000000..4cc42e9
--- /dev/null
+++ b/Omni/Deploy/Packages.nix
@@ -0,0 +1,11 @@
+# Build all deployer packages independently, outside NixOS context.
+#
+# Usage:
+# nix-build Omni/Deploy/Packages.nix # builds all packages
+# nix-build Omni/Deploy/Packages.nix -A biz-deployer # builds one package
+{bild ? import ../Bild.nix {}}: {
+ biz-deployer = bild.run ./Deployer.hs;
+ deploy-manifest = bild.run ./Manifest.hs;
+ deploy-systemd = bild.run ./Systemd.hs;
+ deploy-caddy = bild.run ./Caddy.hs;
+}
diff --git a/Omni/Deploy/README.md b/Omni/Deploy/README.md
new file mode 100644
index 0000000..cabad43
--- /dev/null
+++ b/Omni/Deploy/README.md
@@ -0,0 +1,211 @@
+# Mini-PaaS Deployment System
+
+A pull-based deployment system for deploying Nix-built services without full NixOS rebuilds.
+
+## Quick Start
+
+### Deploy a Service
+
+```bash
+# Build, cache to S3, and update manifest
+Omni/Ide/push.sh Biz/PodcastItLater/Web.py
+
+# The deployer on the target host polls every 5 minutes
+# To force immediate deployment, SSH to host and run:
+ssh biz sudo systemctl start biz-deployer
+```
+
+### View Current State
+
+```bash
+# Show current manifest
+deploy-manifest show
+
+# List archived manifests (for rollback)
+deploy-manifest list
+
+# Check deployer status on target
+ssh biz sudo systemctl status biz-deployer
+ssh biz cat /var/lib/biz-deployer/state.json
+```
+
+## Deployment Workflow
+
+```
+Developer Machine S3 Cache Target Host (biz)
+ │ │ │
+ │ push.sh Biz/App.py │ │
+ ├───────────────────────────────►│ │
+ │ 1. bild builds + caches │ │
+ │ 2. deploy-manifest update │ │
+ │ │ poll every 5 min │
+ │ │◄─────────────────────────────┤
+ │ │ │
+ │ │ manifest changed? │
+ │ │ - pull closure │
+ │ │ - generate systemd unit │
+ │ │ - update Caddy route │
+ │ │ - restart service │
+ │ │─────────────────────────────►│
+ │ │ │
+```
+
+## Adding a New Service
+
+### 1. Create the Service Definition
+
+```bash
+deploy-manifest add-service '{
+ "name": "my-new-service",
+ "artifact": {"storePath": "/nix/store/placeholder"},
+ "hosts": ["biz"],
+ "exec": {"command": null, "user": "root", "group": "root"},
+ "env": {"PORT": "8080", "AREA": "Live"},
+ "envFile": "/var/lib/biz-secrets/my-new-service.env",
+ "http": {"domain": "myservice.bensima.com", "path": "/", "internalPort": 8080}
+}'
+```
+
+### 2. Create Secrets File on Target
+
+```bash
+ssh biz
+sudo mkdir -p /var/lib/biz-secrets
+sudo tee /var/lib/biz-secrets/my-new-service.env << 'EOF'
+SECRET_KEY=your-secret-here
+DATABASE_URL=postgres://...
+EOF
+sudo chmod 600 /var/lib/biz-secrets/my-new-service.env
+```
+
+### 3. Deploy the Service
+
+```bash
+Omni/Ide/push.sh Biz/MyService.py
+```
+
+## Secrets Management
+
+Secrets are stored out-of-band on the target host, never in S3 or the manifest.
+
+**Location**: `/var/lib/biz-secrets/<service-name>.env`
+
+**Format**: Standard environment file
+```
+SECRET_KEY=abc123
+DATABASE_URL=postgres://user:pass@localhost/db
+STRIPE_API_KEY=sk_live_...
+```
+
+**Permissions**: `chmod 600`, owned by root
+
+**Referencing in manifest**: Set `envFile` field to the path
+
+## Rollback
+
+### List Available Versions
+
+```bash
+deploy-manifest list
+# Output:
+# manifest-20251216T033000Z.json
+# manifest-20251216T045211.json
+# manifest-20251215T120000Z.json
+```
+
+### Rollback to Previous Version
+
+```bash
+# Restore a specific archived manifest
+deploy-manifest rollback manifest-20251215T120000Z.json
+
+# Force immediate deployment
+ssh biz sudo systemctl start biz-deployer
+```
+
+The rollback archives the current manifest before restoring, so you can always rollback the rollback.
+
+## Troubleshooting
+
+### Service Not Starting
+
+```bash
+# Check deployer logs
+ssh biz sudo journalctl -u biz-deployer -f
+
+# Check service logs
+ssh biz sudo journalctl -u <service-name> -f
+
+# Check deployer state
+ssh biz cat /var/lib/biz-deployer/state.json
+```
+
+### Manifest Update Failed
+
+```bash
+# Verify AWS credentials
+aws s3 ls s3://omni-nix-cache/ --endpoint-url https://nyc3.digitaloceanspaces.com --profile digitalocean
+
+# Check manifest exists
+deploy-manifest show
+```
+
+### Closure Not Pulling
+
+```bash
+# Check if store path exists in cache
+aws s3 ls s3://omni-nix-cache/<hash>.narinfo --endpoint-url https://nyc3.digitaloceanspaces.com --profile digitalocean
+
+# Check target can access cache
+ssh biz nix copy --from 's3://omni-nix-cache?profile=digitalocean&scheme=https&endpoint=nyc3.digitaloceanspaces.com' /nix/store/<path>
+```
+
+### Caddy Route Issues
+
+```bash
+# Check Caddy config
+ssh biz curl -s localhost:2019/config/ | jq .
+
+# Check Caddy logs
+ssh biz sudo journalctl -u caddy -f
+```
+
+## Architecture
+
+| Component | Location | Purpose |
+|-----------|----------|---------|
+| `bild` | Dev machine | Build tool, caches to S3 |
+| `push.sh` | Dev machine | Orchestrates deploys |
+| `deploy-manifest` | Dev machine | Manage manifest in S3 |
+| `biz-deployer` | Target host | Polls manifest, deploys services |
+| Caddy | Target host | Reverse proxy with auto-HTTPS |
+
+### File Locations on Target
+
+| Path | Purpose |
+|------|---------|
+| `/var/lib/biz-deployer/state.json` | Local deployer state |
+| `/var/lib/biz-deployer/services/` | Generated systemd units |
+| `/var/lib/biz-secrets/` | Service secret env files |
+| `/nix/var/nix/gcroots/biz/` | GC roots for deployed closures |
+| `/root/.aws/credentials` | S3 credentials |
+
+## Scale-Up Path
+
+| Stage | Hosts | Changes Needed |
+|-------|-------|----------------|
+| Current | 1 | Full architecture as described |
+| 2-3 hosts | 2-3 | Add `hosts` filtering (already supported) |
+| 4+ hosts | 4+ | Consider migrating to Nomad + nix-nomad |
+
+The manifest already supports multi-host deployments via the `hosts` array. Each host runs its own deployer and only deploys services where its hostname appears in the `hosts` list.
+
+## Related Files
+
+- [Omni/Deploy/Manifest.hs](Manifest.hs) - Manifest CLI and schema
+- [Omni/Deploy/Deployer.hs](Deployer.hs) - Deployer service
+- [Omni/Deploy/Deployer.nix](Deployer.nix) - NixOS module
+- [Omni/Deploy/Systemd.hs](Systemd.hs) - Systemd unit generation
+- [Omni/Deploy/Caddy.hs](Caddy.hs) - Caddy API integration
+- [Omni/Ide/push.sh](../Ide/push.sh) - Deploy script
+- [Omni/Deploy/PLAN.md](PLAN.md) - Original design document
diff --git a/Omni/Deploy/Systemd.hs b/Omni/Deploy/Systemd.hs
new file mode 100644
index 0000000..7b64d1f
--- /dev/null
+++ b/Omni/Deploy/Systemd.hs
@@ -0,0 +1,269 @@
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE RecordWildCards #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Systemd unit file generator for the mini-PaaS deployment system.
+--
+-- : out deploy-systemd
+-- : dep directory
+module Omni.Deploy.Systemd
+ ( generateUnit,
+ writeUnit,
+ createSymlink,
+ reloadAndRestart,
+ stopAndDisable,
+ removeUnit,
+ getRunningStorePath,
+ servicesDir,
+ main,
+ test,
+ )
+where
+
+import Alpha
+import qualified Data.Map as Map
+import qualified Data.Text as Text
+import qualified Data.Text.IO as Text.IO
+import Omni.Deploy.Manifest (Artifact (..), Exec (..), Hardening (..), Service (..), Systemd (..))
+import qualified Omni.Test as Test
+import qualified System.Directory as Dir
+import System.FilePath ((</>))
+import qualified System.Process as Process
+
+servicesDir :: FilePath
+servicesDir = "/var/lib/biz-deployer/services"
+
+generateUnit :: Service -> Text
+generateUnit Service {..} =
+ Text.unlines <| unitSection ++ serviceSection ++ hardeningSection ++ installSection
+ where
+ binary = fromMaybe serviceName (execCommand serviceExec)
+ execStart = storePath serviceArtifact <> "/bin/" <> binary
+
+ unitSection =
+ [ "[Unit]",
+ "Description=" <> serviceName,
+ "After=" <> Text.intercalate " " (systemdAfter serviceSystemd)
+ ]
+ ++ requiresLine
+
+ requiresLine =
+ ["Requires=" <> Text.intercalate " " (systemdRequires serviceSystemd) | not (null (systemdRequires serviceSystemd))]
+
+ serviceSection =
+ [ "",
+ "[Service]",
+ "Type=simple",
+ "ExecStart=" <> execStart,
+ "User=" <> execUser serviceExec,
+ "Group=" <> execGroup serviceExec,
+ "Restart=" <> systemdRestart serviceSystemd,
+ "RestartSec=" <> tshow (systemdRestartSec serviceSystemd)
+ ]
+ ++ envLines
+ ++ envFileLine
+
+ envLines =
+ Map.toList serviceEnv
+ |> map (\(k, v) -> "Environment=\"" <> k <> "=" <> v <> "\"")
+
+ envFileLine = case serviceEnvFile of
+ Nothing -> []
+ Just path -> ["EnvironmentFile=" <> path]
+
+ hardeningSection =
+ [ "",
+ "# Hardening",
+ "PrivateTmp=" <> boolToYesNo (hardeningPrivateTmp serviceHardening),
+ "ProtectSystem=" <> hardeningProtectSystem serviceHardening,
+ "ProtectHome=" <> boolToYesNo (hardeningProtectHome serviceHardening),
+ "NoNewPrivileges=yes"
+ ]
+ ++ readWritePathsLine
+
+ readWritePathsLine =
+ case Map.lookup "DATA_DIR" serviceEnv of
+ Just dataDir -> ["ReadWritePaths=" <> dataDir]
+ Nothing -> []
+
+ installSection =
+ [ "",
+ "[Install]",
+ "WantedBy=multi-user.target"
+ ]
+
+ boolToYesNo True = "yes"
+ boolToYesNo False = "no"
+
+writeUnit :: FilePath -> Service -> IO FilePath
+writeUnit baseDir svc = do
+ Dir.createDirectoryIfMissing True baseDir
+ let path = baseDir </> Text.unpack (serviceName svc) <> ".service"
+ content = generateUnit svc
+ Text.IO.writeFile path content
+ pure path
+
+createSymlink :: FilePath -> FilePath -> Service -> IO FilePath
+createSymlink baseDir sysDir svc = do
+ let unitPath = baseDir </> Text.unpack (serviceName svc) <> ".service"
+ linkPath = sysDir </> Text.unpack (serviceName svc) <> ".service"
+ exists <- Dir.doesPathExist linkPath
+ when exists <| Dir.removeFile linkPath
+ Dir.createFileLink unitPath linkPath
+ pure linkPath
+
+reloadAndRestart :: Text -> IO ()
+reloadAndRestart serviceName' = do
+ _ <- Process.readProcessWithExitCode "systemctl" ["daemon-reload"] ""
+ _ <-
+ Process.readProcessWithExitCode
+ "systemctl"
+ ["enable", "--now", Text.unpack serviceName' <> ".service"]
+ ""
+ pure ()
+
+stopAndDisable :: Text -> IO ()
+stopAndDisable serviceName' = do
+ _ <-
+ Process.readProcessWithExitCode
+ "systemctl"
+ ["disable", "--now", Text.unpack serviceName' <> ".service"]
+ ""
+ pure ()
+
+removeUnit :: FilePath -> FilePath -> Text -> IO ()
+removeUnit baseDir sysDir serviceName' = do
+ let unitPath = baseDir </> Text.unpack serviceName' <> ".service"
+ linkPath = sysDir </> Text.unpack serviceName' <> ".service"
+ linkExists <- Dir.doesPathExist linkPath
+ when linkExists <| Dir.removeFile linkPath
+ unitExists <- Dir.doesPathExist unitPath
+ when unitExists <| Dir.removeFile unitPath
+ _ <- Process.readProcessWithExitCode "systemctl" ["daemon-reload"] ""
+ pure ()
+
+-- | Get the store path of the currently running service by reading its unit file.
+getRunningStorePath :: Text -> IO (Maybe Text)
+getRunningStorePath serviceName' = do
+ let unitPath = servicesDir </> Text.unpack serviceName' <> ".service"
+ exists <- Dir.doesFileExist unitPath
+ if not exists
+ then pure Nothing
+ else do
+ content <- Text.IO.readFile unitPath
+ pure <| extractStorePath content
+ where
+ -- Extract /nix/store/...-service-name from ExecStart=/nix/store/.../bin/...
+ extractStorePath content =
+ content
+ |> Text.lines
+ |> find (Text.isPrefixOf "ExecStart=")
+ |> fmap (Text.drop (Text.length "ExecStart="))
+ |> fmap (Text.dropWhile (/= '/'))
+ |> fmap (Text.drop 1)
+ |> fmap (Text.takeWhile (/= '/'))
+ |> fmap ("/nix/store/" <>)
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Deploy.Systemd"
+ [ test_generateBasicUnit,
+ test_generateUnitWithEnv,
+ test_generateUnitWithCustomExec,
+ test_generateUnitWithEnvFile,
+ test_generateUnitWithDependencies,
+ test_generateUnitWithHardening
+ ]
+
+mkTestService :: Text -> Text -> Service
+mkTestService name path =
+ Service
+ { serviceName = name,
+ serviceArtifact = Artifact "nix-closure" path,
+ serviceHosts = ["biz"],
+ serviceExec = Exec Nothing "root" "root",
+ serviceEnv = mempty,
+ serviceEnvFile = Nothing,
+ serviceHttp = Nothing,
+ serviceSystemd = Systemd ["network-online.target"] [] "on-failure" 5,
+ serviceHardening = Hardening False True "strict" True,
+ serviceRevision = Nothing
+ }
+
+test_generateBasicUnit :: Test.Tree
+test_generateBasicUnit =
+ Test.unit "generates basic unit file" <| do
+ let svc = mkTestService "test-service" "/nix/store/abc123-test"
+ unit = generateUnit svc
+ Text.isInfixOf "[Unit]" unit Test.@=? True
+ Text.isInfixOf "Description=test-service" unit Test.@=? True
+ Text.isInfixOf "[Service]" unit Test.@=? True
+ Text.isInfixOf "ExecStart=/nix/store/abc123-test/bin/test-service" unit Test.@=? True
+ Text.isInfixOf "[Install]" unit Test.@=? True
+ Text.isInfixOf "WantedBy=multi-user.target" unit Test.@=? True
+
+test_generateUnitWithEnv :: Test.Tree
+test_generateUnitWithEnv =
+ Test.unit "generates unit with environment" <| do
+ let svc =
+ (mkTestService "env-test" "/nix/store/xyz")
+ { serviceEnv = Map.fromList [("PORT", "8000"), ("DEBUG", "true")]
+ }
+ unit = generateUnit svc
+ Text.isInfixOf "Environment=\"PORT=8000\"" unit Test.@=? True
+ Text.isInfixOf "Environment=\"DEBUG=true\"" unit Test.@=? True
+
+test_generateUnitWithCustomExec :: Test.Tree
+test_generateUnitWithCustomExec =
+ Test.unit "generates unit with custom exec" <| do
+ let svc =
+ (mkTestService "custom-exec" "/nix/store/abc")
+ { serviceExec = Exec (Just "my-binary") "www-data" "www-data"
+ }
+ unit = generateUnit svc
+ Text.isInfixOf "ExecStart=/nix/store/abc/bin/my-binary" unit Test.@=? True
+ Text.isInfixOf "User=www-data" unit Test.@=? True
+ Text.isInfixOf "Group=www-data" unit Test.@=? True
+
+test_generateUnitWithEnvFile :: Test.Tree
+test_generateUnitWithEnvFile =
+ Test.unit "generates unit with env file" <| do
+ let svc =
+ (mkTestService "env-file-test" "/nix/store/xyz")
+ { serviceEnvFile = Just "/var/lib/biz-secrets/test.env"
+ }
+ unit = generateUnit svc
+ Text.isInfixOf "EnvironmentFile=/var/lib/biz-secrets/test.env" unit Test.@=? True
+
+test_generateUnitWithDependencies :: Test.Tree
+test_generateUnitWithDependencies =
+ Test.unit "generates unit with dependencies" <| do
+ let svc =
+ (mkTestService "dep-test" "/nix/store/abc")
+ { serviceSystemd =
+ Systemd
+ ["network-online.target", "postgresql.service"]
+ ["postgresql.service"]
+ "on-failure"
+ 5
+ }
+ unit = generateUnit svc
+ Text.isInfixOf "After=network-online.target postgresql.service" unit Test.@=? True
+ Text.isInfixOf "Requires=postgresql.service" unit Test.@=? True
+
+test_generateUnitWithHardening :: Test.Tree
+test_generateUnitWithHardening =
+ Test.unit "generates unit with hardening" <| do
+ let svc =
+ (mkTestService "hardened" "/nix/store/abc")
+ { serviceHardening = Hardening False True "full" True
+ }
+ unit = generateUnit svc
+ Text.isInfixOf "PrivateTmp=yes" unit Test.@=? True
+ Text.isInfixOf "ProtectSystem=full" unit Test.@=? True
+ Text.isInfixOf "ProtectHome=yes" unit Test.@=? True
+ Text.isInfixOf "NoNewPrivileges=yes" unit Test.@=? True
+
+main :: IO ()
+main = Test.run test
diff --git a/Omni/Dev/Beryllium.nix b/Omni/Dev/Beryllium.nix
index 023523e..4d9ed09 100755
--- a/Omni/Dev/Beryllium.nix
+++ b/Omni/Dev/Beryllium.nix
@@ -5,6 +5,7 @@ bild.os {
../Os/Base.nix
../Packages.nix
../Users.nix
+ ./Beryllium/Ava.nix
./Beryllium/Configuration.nix
./Beryllium/Hardware.nix
./Beryllium/Ollama.nix
diff --git a/Omni/Dev/Beryllium/AVA.md b/Omni/Dev/Beryllium/AVA.md
new file mode 100644
index 0000000..620592b
--- /dev/null
+++ b/Omni/Dev/Beryllium/AVA.md
@@ -0,0 +1,111 @@
+# Ava Deployment on Beryllium
+
+Ava runs as a systemd service under the `ava` user.
+
+## Architecture
+
+```
+/home/ava/ # Ava's home directory (AVA_DATA_ROOT)
+├── omni/ # Clone of the omni repo
+├── skills/ # Ava's skills directory
+│ ├── shared/ # Skills available to all users
+│ └── <username>/ # User-specific skills
+├── outreach/ # Outreach approval queue
+│ ├── pending/
+│ ├── approved/
+│ ├── rejected/
+│ └── sent/
+├── users/ # Per-user scratch space
+│ └── <username>/
+└── .local/share/omni/
+ └── memory.db # SQLite memory database
+```
+
+## Configuration
+
+The service is configured in `Ava.nix` and requires these environment variables in `/run/secrets/ava.env`:
+
+```bash
+TELEGRAM_BOT_TOKEN=xxx
+OPENROUTER_API_KEY=xxx
+KAGI_API_KEY=xxx # optional
+ALLOWED_TELEGRAM_USER_IDS=xxx,yyy # or * for all
+```
+
+## Commands
+
+```bash
+# View logs
+journalctl -u ava -f
+
+# Restart service
+sudo systemctl restart ava
+
+# Check status
+sudo systemctl status ava
+
+# Stop/Start
+sudo systemctl stop ava
+sudo systemctl start ava
+```
+
+## SSH Access
+
+The Ava private key is at `~/.ssh/ava_ed25519`. Use it to SSH as ava:
+
+```bash
+ssh -i ~/.ssh/ava_ed25519 ava@beryl.bensima.com
+```
+
+Ben can also access ava's workspace via his own SSH key since ava is in the git group.
+
+## Git Setup
+
+Ava has its own clone of the omni repo at `/home/ava/omni`. To fetch changes from ben:
+
+```bash
+# As ava:
+cd /home/ava/omni
+git fetch origin
+git pull origin main
+```
+
+Ben can also push directly to ava's repo if needed:
+
+```bash
+# From /home/ben/omni:
+git remote add ava /home/ava/omni
+git push ava main
+```
+
+## Redeploy
+
+To redeploy Ava with code changes:
+
+```bash
+# 1. Rebuild the NixOS config
+push.sh Omni/Dev/Beryllium.nix
+
+# 2. Or just restart the service if only env changes
+sudo systemctl restart ava
+```
+
+## Migration from tmux
+
+If migrating from the old tmux-based deployment:
+
+1. Deploy the NixOS config with the new ava user
+2. Run the migration script: `sudo ./Omni/Dev/Beryllium/migrate-ava.sh`
+3. Create `/run/secrets/ava.env` with the required secrets
+4. Stop the tmux ava process
+5. Start the systemd service: `sudo systemctl start ava`
+6. Enable on boot: `sudo systemctl enable ava`
+
+## Environment Variable: AVA_DATA_ROOT
+
+The `AVA_DATA_ROOT` environment variable controls where Ava stores its data:
+
+- **Development** (unset): Uses `_/var/ava/` (relative to repo)
+- **Production**: Set to `/home/ava` via the systemd service
+
+This allows the same codebase to run in both environments without changes.
diff --git a/Omni/Dev/Beryllium/Ava.nix b/Omni/Dev/Beryllium/Ava.nix
new file mode 100644
index 0000000..6957352
--- /dev/null
+++ b/Omni/Dev/Beryllium/Ava.nix
@@ -0,0 +1,48 @@
+{...}: let
+ bild = import ../../Bild.nix {};
+ avaPkg = bild.run ../../Ava.hs;
+in {
+ systemd.services.ava = {
+ description = "Ava Telegram assistant";
+ after = ["network-online.target" "ollama.service"];
+ wants = ["network-online.target" "ollama.service"];
+ wantedBy = ["multi-user.target"];
+
+ serviceConfig = {
+ Type = "simple";
+ User = "ava";
+ Group = "users";
+ WorkingDirectory = "/home/ava/omni";
+
+ Environment = [
+ "AVA_DATA_ROOT=/home/ava"
+ "HOME=/home/ava"
+ "OLLAMA_URL=http://localhost:11434"
+ ];
+
+ EnvironmentFile = "/run/secrets/ava.env";
+
+ ExecStart = "${avaPkg}/bin/ava";
+
+ Restart = "on-failure";
+ RestartSec = 5;
+
+ TimeoutStopSec = 90;
+ KillMode = "mixed";
+ KillSignal = "SIGTERM";
+ };
+ };
+
+ systemd.tmpfiles.rules = [
+ "d /home/ava 0755 ava users -"
+ "d /home/ava/omni 0755 ava users -"
+ "d /home/ava/skills 0755 ava users -"
+ "d /home/ava/outreach 0755 ava users -"
+ "d /home/ava/outreach/pending 0755 ava users -"
+ "d /home/ava/outreach/approved 0755 ava users -"
+ "d /home/ava/outreach/rejected 0755 ava users -"
+ "d /home/ava/outreach/sent 0755 ava users -"
+ "d /home/ava/users 0755 ava users -"
+ "d /home/ava/.local/share/omni 0755 ava users -"
+ ];
+}
diff --git a/Omni/Dev/Beryllium/migrate-ava.sh b/Omni/Dev/Beryllium/migrate-ava.sh
new file mode 100755
index 0000000..91d2740
--- /dev/null
+++ b/Omni/Dev/Beryllium/migrate-ava.sh
@@ -0,0 +1,102 @@
+#!/usr/bin/env bash
+# Migration script: move Ava data from _/var/ava to /home/ava
+#
+# Run this ONCE after deploying the NixOS config with the new ava user.
+#
+# Usage:
+# sudo ./migrate-ava.sh
+#
+# This script:
+# 1. Copies existing data from _/var/ava to /home/ava
+# 2. Copies memory.db from ben's .local to ava's .local
+# 3. Clones the omni repo into /home/ava/omni
+# 4. Sets proper ownership
+
+set -euo pipefail
+
+GRN='\033[0;32m'
+YLW='\033[0;33m'
+RED='\033[0;31m'
+NC='\033[0m'
+
+OMNI_REPO="/home/ben/omni"
+AVA_HOME="/home/ava"
+OLD_DATA_ROOT="$OMNI_REPO/_/var/ava"
+
+echo -e "${YLW}=== Ava Migration Script ===${NC}"
+
+# Check we're running as root
+if [[ $EUID -ne 0 ]]; then
+ echo -e "${RED}Error: This script must be run as root${NC}"
+ exit 1
+fi
+
+# Check ava user exists
+if ! id ava &>/dev/null; then
+ echo -e "${RED}Error: ava user does not exist. Deploy the NixOS config first.${NC}"
+ exit 1
+fi
+
+# Create directory structure (tmpfiles should handle this, but just in case)
+echo -e "${YLW}Creating directory structure...${NC}"
+mkdir -p "$AVA_HOME"/{skills,outreach,users,omni,.local/share/omni}
+mkdir -p "$AVA_HOME"/outreach/{pending,approved,rejected,sent}
+
+# Copy skills
+if [[ -d "$OLD_DATA_ROOT/skills" ]]; then
+ echo -e "${YLW}Copying skills...${NC}"
+ rsync -av --progress "$OLD_DATA_ROOT/skills/" "$AVA_HOME/skills/"
+else
+ echo -e "${YLW}No skills to migrate${NC}"
+fi
+
+# Copy outreach
+if [[ -d "$OLD_DATA_ROOT/outreach" ]]; then
+ echo -e "${YLW}Copying outreach data...${NC}"
+ rsync -av --progress "$OLD_DATA_ROOT/outreach/" "$AVA_HOME/outreach/"
+else
+ echo -e "${YLW}No outreach data to migrate${NC}"
+fi
+
+# Copy memory.db if it exists
+BEN_MEMORY="/home/ben/.local/share/omni/memory.db"
+AVA_MEMORY="$AVA_HOME/.local/share/omni/memory.db"
+if [[ -f "$BEN_MEMORY" ]]; then
+ echo -e "${YLW}Copying memory database...${NC}"
+ cp -v "$BEN_MEMORY" "$AVA_MEMORY"
+else
+ echo -e "${YLW}No memory.db found at $BEN_MEMORY${NC}"
+fi
+
+# Clone or update the omni repo
+if [[ -d "$AVA_HOME/omni/.git" ]]; then
+ echo -e "${YLW}Omni repo already exists, updating...${NC}"
+ cd "$AVA_HOME/omni"
+ sudo -u ava git fetch origin
+else
+ echo -e "${YLW}Cloning omni repo...${NC}"
+ sudo -u ava git clone "$OMNI_REPO" "$AVA_HOME/omni"
+fi
+
+# Set ownership
+echo -e "${YLW}Setting ownership...${NC}"
+chown -R ava:users "$AVA_HOME"
+
+# Show summary
+echo ""
+echo -e "${GRN}=== Migration Complete ===${NC}"
+echo ""
+echo "Directory structure:"
+ls -la "$AVA_HOME"
+echo ""
+echo "Next steps:"
+echo "1. Create /run/secrets/ava.env with:"
+echo " TELEGRAM_BOT_TOKEN=xxx"
+echo " OPENROUTER_API_KEY=xxx"
+echo " KAGI_API_KEY=xxx (optional)"
+echo " ALLOWED_TELEGRAM_USER_IDS=xxx (or * for all)"
+echo ""
+echo "2. Stop the tmux Ava process"
+echo "3. Start the systemd service: sudo systemctl start ava"
+echo "4. Watch logs: journalctl -u ava -f"
+echo "5. Enable on boot: sudo systemctl enable ava"
diff --git a/Omni/Ide/push.sh b/Omni/Ide/push.sh
index ce1df3d..f6d67f4 100755
--- a/Omni/Ide/push.sh
+++ b/Omni/Ide/push.sh
@@ -1,35 +1,136 @@
#!/usr/bin/env bash
-# Eventually convert to haskell, see:
-# - https://github.com/awakesecurity/nix-deploy/blob/master/src/Main.hs
-# - http://www.haskellforall.com/2018/08/nixos-in-production.html
-prefix=${PWD/$CODEROOT}
-if [[ "$prefix" == "" ]]
-then
- target="$1"
-else
- target="$prefix.$1"
-fi
-what=$(realpath "${CODEROOT:?}/_/nix/$target")
-# hack: get the domain from the systemd service. there does not seem to be a way
-# to get it from nix-instantiate. (or, maybe i should put this in bild --plan?)
-where=$(rg --only-matching --replace '$2' --regexp '(domainname ")(.*)(")' \
- "$what/etc/systemd/system/domainname.service")
-nix copy --to ssh://"$USER"@"$where" "$what"
-ssh "$USER"@"$where" sudo nix-env --profile /nix/var/nix/profiles/system --set "$what"
-switch_cmd=(
- systemd-run
- -E LOCALE_ARCHIVE
- --setenv=XDG_RUNTIME_DIR=""
- --collect
- --no-ask-password
- --pipe
- --quiet
- --service-type=exec
- --unit=push-switch-to-configuration
- --wait
- "$what/bin/switch-to-configuration"
- "switch"
-)
-# shellcheck disable=SC2029
-ssh "$USER"@"$where" sudo "${switch_cmd[@]}"
-echo "${GRN}good: push: $target${NC}"
+# Deployment script for both NixOS configs and individual services.
+#
+# Usage:
+# push.sh Biz.nix # NixOS deploy (existing behavior)
+# push.sh Biz/PodcastItLater/Web.py # Service deploy (new behavior)
+#
+# For service deploys:
+# 1. Builds the target with bild (caches to S3 by default)
+# 2. Updates the manifest.json in S3 with new store path
+# 3. Deployer on target picks up change within 5 minutes
+#
+# Environment:
+# CODEROOT - Root of the codebase (required)
+# NIX_CACHE_KEY - Path to signing key (required for service deploys)
+
+set -euo pipefail
+
+# Colors
+GRN='\033[0;32m'
+YLW='\033[0;33m'
+RED='\033[0;31m'
+NC='\033[0m' # No Color
+
+# Derive service name from target path
+# Biz/PodcastItLater/Web.py -> podcastitlater-web
+# Biz/Storybook.py -> storybook
+derive_service_name() {
+ local target="$1"
+ # Remove extension
+ local base="${target%.*}"
+ # Remove Biz/ prefix if present
+ base="${base#Biz/}"
+ # Convert slashes to hyphens and lowercase
+ echo "$base" | tr '/' '-' | tr '[:upper:]' '[:lower:]'
+}
+
+# NixOS deploy (existing behavior)
+nixos_deploy() {
+ local target="$1"
+ prefix=${PWD/$CODEROOT}
+ if [[ "$prefix" == "" ]]; then
+ target="$1"
+ else
+ target="$prefix.$1"
+ fi
+ what=$(realpath "${CODEROOT:?}/_/nix/$target")
+ # hack: get the domain from the systemd service
+ where=$(rg --only-matching --replace '$2' --regexp '(domainname ")(.*)(")' \
+ "$what/etc/systemd/system/domainname.service")
+ nix copy --to ssh://"$USER"@"$where" "$what"
+ ssh "$USER"@"$where" sudo nix-env --profile /nix/var/nix/profiles/system --set "$what"
+ switch_cmd=(
+ systemd-run
+ -E LOCALE_ARCHIVE
+ --setenv=XDG_RUNTIME_DIR=""
+ --collect
+ --no-ask-password
+ --pipe
+ --quiet
+ --service-type=exec
+ --unit=push-switch-to-configuration
+ --wait
+ "$what/bin/switch-to-configuration"
+ "switch"
+ )
+ # shellcheck disable=SC2029
+ ssh "$USER"@"$where" sudo "${switch_cmd[@]}"
+ echo -e "${GRN}good: push: $target${NC}"
+}
+
+# Service deploy (new behavior)
+service_deploy() {
+ local target="$1"
+ local service_name
+ service_name=$(derive_service_name "$target")
+
+ echo -e "${YLW}info: push: deploying service $service_name${NC}"
+
+ # 1. Build and cache
+ echo -e "${YLW}info: push: building $target${NC}"
+ if ! bild "$target"; then
+ echo -e "${RED}fail: push: bild failed${NC}"
+ exit 1
+ fi
+
+ # Get store path from symlink in _/nix/
+ local symlink_path="${CODEROOT}/_/nix/${service_name}"
+ if [[ ! -L "$symlink_path" ]]; then
+ echo -e "${RED}fail: push: symlink not found: $symlink_path${NC}"
+ exit 1
+ fi
+
+ local store_path
+ store_path=$(readlink "$symlink_path")
+
+ if [[ -z "$store_path" ]]; then
+ echo -e "${RED}fail: push: could not read store path from symlink${NC}"
+ exit 1
+ fi
+
+ echo -e "${YLW}info: push: cached $store_path${NC}"
+
+ # 2. Get git revision
+ local revision
+ revision=$(git rev-parse --short HEAD 2>/dev/null || echo "unknown")
+
+ # 3. Update manifest in S3
+ echo -e "${YLW}info: push: updating manifest${NC}"
+ "${CODEROOT}/_/nix/deploy-manifest/bin/deploy-manifest" update "$service_name" "$store_path" "$revision" || {
+ echo -e "${RED}fail: push: manifest update failed${NC}"
+ exit 1
+ }
+
+ echo -e "${GRN}good: push: $service_name deployed (deployer will pick up in <5 min)${NC}"
+}
+
+# Main
+main() {
+ if [[ $# -lt 1 ]]; then
+ echo "Usage: push.sh <target>"
+ echo " target.nix -> NixOS deploy"
+ echo " target.py/.hs/.. -> Service deploy"
+ exit 1
+ fi
+
+ local target="$1"
+
+ if [[ "$target" == *.nix ]]; then
+ nixos_deploy "$target"
+ else
+ service_deploy "$target"
+ fi
+}
+
+main "$@"
diff --git a/Omni/Jr.hs b/Omni/Jr.hs
index b60a029..48dbf90 100755
--- a/Omni/Jr.hs
+++ b/Omni/Jr.hs
@@ -53,7 +53,7 @@ jr
Usage:
jr task [<args>...]
- jr work [<task-id>]
+ jr work [<task-id>] [--engine=ENGINE]
jr prompt <task-id>
jr web [--port=PORT]
jr review [<task-id>] [--auto]
@@ -77,6 +77,7 @@ Commands:
Options:
-h --help Show this help
--port=PORT Port for web server [default: 8080]
+ --engine=ENGINE LLM engine: openrouter, ollama, amp [default: openrouter]
--auto Auto-review: accept if tests pass, reject if they fail
--delay=SECONDS Delay between loop iterations [default: 5]
--project=PROJECT Filter facts by project
@@ -119,13 +120,20 @@ move args
absPath <- Directory.getCurrentDirectory
let name = Text.pack (takeFileName absPath)
+ -- Parse engine flag
+ let engineType = case Cli.getArg args (Cli.longOption "engine") of
+ Just "ollama" -> AgentCore.EngineOllama
+ Just "amp" -> AgentCore.EngineAmp
+ _ -> AgentCore.EngineOpenRouter
+
let worker =
AgentCore.Worker
{ AgentCore.workerName = name,
AgentCore.workerPid = Nothing,
AgentCore.workerStatus = AgentCore.Idle,
AgentCore.workerPath = path,
- AgentCore.workerQuiet = False -- Show ANSI status bar for manual work
+ AgentCore.workerQuiet = False, -- Show ANSI status bar for manual work
+ AgentCore.workerEngine = engineType
}
let taskId = fmap Text.pack (Cli.getArg args (Cli.argument "task-id"))
@@ -183,7 +191,8 @@ runLoop delaySec = do
AgentCore.workerPid = Nothing,
AgentCore.workerStatus = AgentCore.Idle,
AgentCore.workerPath = ".",
- AgentCore.workerQuiet = True -- No ANSI status bar in loop mode
+ AgentCore.workerQuiet = True, -- No ANSI status bar in loop mode
+ AgentCore.workerEngine = AgentCore.EngineOpenRouter -- Default for loop
}
putText "[loop] Starting worker..."
AgentWorker.start worker (Just (TaskCore.taskId task))
diff --git a/Omni/Keys/Ava.pub b/Omni/Keys/Ava.pub
new file mode 100644
index 0000000..77c314c
--- /dev/null
+++ b/Omni/Keys/Ava.pub
@@ -0,0 +1 @@
+ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOOv/fKFUS4exJtmnWqi5Taa3W5jTxqTmAZBtvisKMKH ava@beryl.bensima.com
diff --git a/Omni/Log.hs b/Omni/Log.hs
index 91fcb55..c42d5e8 100644
--- a/Omni/Log.hs
+++ b/Omni/Log.hs
@@ -15,6 +15,12 @@
-- * often use `br` after `warn`, unless its really unimportant
--
-- * labels should be roughly hierarchical from general->specific
+--
+-- Future improvements to consider:
+-- * Add timestamps (set via LOG_TIMESTAMPS=1 env var)
+-- * Add log level filtering (set via LOG_LEVEL=warn to suppress info)
+-- * Add structured JSON output (set via LOG_FORMAT=json for machine parsing)
+-- * Add a `debug` level below `info` for verbose debugging
module Omni.Log
( Lvl (..),
good,
@@ -22,6 +28,7 @@ module Omni.Log
info,
warn,
fail,
+ debug,
wipe,
-- * Debugging
@@ -50,7 +57,8 @@ import qualified System.Environment as Env
import qualified System.IO as IO
import System.IO.Unsafe (unsafePerformIO)
-data Lvl = Good | Pass | Info | Warn | Fail | Mark
+data Lvl = Debug | Good | Pass | Info | Warn | Fail | Mark
+ deriving (Eq, Ord)
-- | Get the environment. This should probably return 'Omni.App.Area' instead of
-- 'String', but I don't want to depend on everything in 'Omni.App', so some kind
@@ -60,19 +68,36 @@ area =
Env.lookupEnv "AREA"
/> maybe "Test" identity
+-- | Get the minimum log level from LOG_LEVEL env var (default: Info)
+-- Set LOG_LEVEL=debug to see debug messages, LOG_LEVEL=warn to suppress info
+minLogLevel :: Lvl
+minLogLevel =
+ unsafePerformIO <| do
+ Env.lookupEnv "LOG_LEVEL" /> \case
+ Just "debug" -> Debug
+ Just "info" -> Info
+ Just "warn" -> Warn
+ Just "fail" -> Fail
+ _ -> Info
+{-# NOINLINE minLogLevel #-}
+
msg :: Lvl -> [Text] -> IO ()
-msg lvl labels =
- area +> \case
- "Live" -> putDumb
- _ ->
- Env.getEnv "TERM" +> \case
- "dumb" -> putDumb
- _ -> Rainbow.hPutChunks IO.stderr [fore color <| clear <> chunk txt <> "\r"]
+msg lvl labels
+ | lvl < minLogLevel = pure () -- Skip messages below minimum level
+ | otherwise =
+ area +> \case
+ "Live" -> putDumb
+ _ ->
+ Env.lookupEnv "TERM" +> \case
+ Just "dumb" -> putDumb
+ Nothing -> putDumb
+ _ -> Rainbow.hPutChunks IO.stderr [fore color <| clear <> chunk txt <> "\r"]
where
-- For systemd-journal, emacs *compilation* buffers, etc.
putDumb = putStr <| txt <> "\n"
txt = fmt (label : labels)
(color, label) = case lvl of
+ Debug -> (white, "debg")
Good -> (green, "good")
Pass -> (green, "pass")
Info -> (white, "info")
@@ -94,12 +119,13 @@ br = Rainbow.hPutChunks stderr ["\n"] >> IO.hFlush stderr
wipe :: IO ()
wipe = hPutStr stderr ("\r" :: Text) >> IO.hFlush stderr
-good, pass, info, warn, fail :: [Text] -> IO ()
+good, pass, info, warn, fail, debug :: [Text] -> IO ()
good = msg Good
pass = msg Pass
info = msg Info
warn = msg Warn
fail = msg Fail
+debug = msg Debug
-- | Like 'Debug.trace' but follows the patterns in this module
mark :: (Show a) => Text -> a -> a
diff --git a/Omni/Os/Base.nix b/Omni/Os/Base.nix
index 0489b1c..a186772 100644
--- a/Omni/Os/Base.nix
+++ b/Omni/Os/Base.nix
@@ -6,8 +6,17 @@ in {
boot.tmp.cleanOnBoot = true;
networking.firewall.allowPing = true;
networking.firewall.allowedTCPPorts = [ports.et];
- nix.settings.substituters = ["https://cache.nixos.org" "https://nix-community.cachix.org"]; # "ssh://dev.bensima.com" ];
- nix.settings.trusted-public-keys = ["nix-community.cachix.org-1:mB9FSh9qf2dCimDSUo8Zy7bkq5CX+/rkCWyvRCYg3Fs="];
+ nix.settings.substituters = [
+ "https://cache.nixos.org"
+ "https://nix-community.cachix.org"
+ "s3://omni-nix-cache?profile=digitalocean&scheme=https&endpoint=nyc3.digitaloceanspaces.com"
+ ];
+ nix.settings.trusted-public-keys = [
+ "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY="
+ "nix-community.cachix.org-1:mB9FSh9qf2dCimDSUo8Zy7bkq5CX+/rkCWyvRCYg3Fs="
+ "omni-cache:vyAhEFT7D8si2T1SjKHcg6BpU37Qj5klMDRagfNHpUI="
+ ];
+ nix.settings.experimental-features = ["nix-command" "flakes"];
nix.gc.automatic = true;
nix.gc.dates = "Sunday 02:15";
nix.optimise.automatic = true;
diff --git a/Omni/Users.nix b/Omni/Users.nix
index 3de5712..4ae8c17 100644
--- a/Omni/Users.nix
+++ b/Omni/Users.nix
@@ -30,6 +30,13 @@ in {
openssh.authorizedKeys.keys = readKeys ./Keys/Deploy.pub;
extraGroups = ["wheel"];
};
+ ava = {
+ description = "Ava Telegram bot";
+ isNormalUser = true;
+ home = "/home/ava";
+ openssh.authorizedKeys.keys = readKeys ./Keys/Ava.pub;
+ extraGroups = ["git"];
+ };
#
# humans
#