summaryrefslogtreecommitdiff
path: root/Omni
diff options
context:
space:
mode:
authorBen Sima <ben@bensima.com>2025-11-30 00:04:31 -0500
committerBen Sima <ben@bensima.com>2025-11-30 00:04:31 -0500
commit1f38531d3184c30ad8a4f365f78288cc23d7baf2 (patch)
tree9111eff46adb42b430830928a791fffc01ddba22 /Omni
parent7d3103a99a81adf9f919564f066ce85e0fecee50 (diff)
Define Tool protocol and LLM provider abstraction
The implementation is complete. Here's a summary of the changes made: 1. **Updated LLM type** to include `llmExtraHeaders` field for OpenRoute 2. **Changed `defaultLLM`** to use: - OpenRouter base URL: `https://openrouter.ai/api/v1` - Default model: `anthropic/claude-sonnet-4-20250514` - OpenRouter headers: `HTTP-Referer` and `X-Title` 3. **Updated `chatWithUsage`** to apply extra headers to HTTP requests 4. **Added `case-insensitive` dependency** for proper header handling 5. **Added tests** for OpenRouter configuration 6. **Fixed hlint suggestions** (Use `</` instead of `<$>`, eta reduce) Task-Id: t-141.1
Diffstat (limited to 'Omni')
-rw-r--r--Omni/Agent/Engine.hs40
1 files changed, 32 insertions, 8 deletions
diff --git a/Omni/Agent/Engine.hs b/Omni/Agent/Engine.hs
index 10b36b2..69edd36 100644
--- a/Omni/Agent/Engine.hs
+++ b/Omni/Agent/Engine.hs
@@ -12,6 +12,7 @@
-- : out omni-agent-engine
-- : dep http-conduit
-- : dep aeson
+-- : dep case-insensitive
module Omni.Agent.Engine
( Tool (..),
LLM (..),
@@ -41,6 +42,7 @@ import Alpha
import Data.Aeson ((.!=), (.:), (.:?), (.=))
import qualified Data.Aeson as Aeson
import qualified Data.ByteString.Lazy as BL
+import qualified Data.CaseInsensitive as CI
import qualified Data.Map.Strict as Map
import qualified Data.Text as Text
import qualified Data.Text.Encoding as TE
@@ -72,7 +74,10 @@ test =
Nothing -> Test.assertFailure "Failed to decode message"
Just decoded -> msgContent msg Test.@=? msgContent decoded,
Test.unit "defaultLLM has correct endpoint" <| do
- llmBaseUrl defaultLLM Test.@=? "https://api.openai.com",
+ llmBaseUrl defaultLLM Test.@=? "https://openrouter.ai/api/v1",
+ Test.unit "defaultLLM has OpenRouter headers" <| do
+ length (llmExtraHeaders defaultLLM) Test.@=? 2
+ llmModel defaultLLM Test.@=? "anthropic/claude-sonnet-4-20250514",
Test.unit "defaultAgentConfig has sensible defaults" <| do
agentMaxIterations defaultAgentConfig Test.@=? 10,
Test.unit "defaultEngineConfig has no-op callbacks" <| do
@@ -173,20 +178,37 @@ encodeToolForApi t =
data LLM = LLM
{ llmBaseUrl :: Text,
llmApiKey :: Text,
- llmModel :: Text
+ llmModel :: Text,
+ llmExtraHeaders :: [(ByteString, ByteString)]
}
deriving (Show, Eq, Generic)
-instance Aeson.ToJSON LLM
+instance Aeson.ToJSON LLM where
+ toJSON l =
+ Aeson.object
+ [ "llmBaseUrl" .= llmBaseUrl l,
+ "llmApiKey" .= llmApiKey l,
+ "llmModel" .= llmModel l
+ ]
-instance Aeson.FromJSON LLM
+instance Aeson.FromJSON LLM where
+ parseJSON =
+ Aeson.withObject "LLM" <| \v ->
+ (LLM </ (v .: "llmBaseUrl"))
+ <*> (v .: "llmApiKey")
+ <*> (v .: "llmModel")
+ <*> pure []
defaultLLM :: LLM
defaultLLM =
LLM
- { llmBaseUrl = "https://api.openai.com",
+ { llmBaseUrl = "https://openrouter.ai/api/v1",
llmApiKey = "",
- llmModel = "gpt-4"
+ llmModel = "anthropic/claude-sonnet-4-20250514",
+ llmExtraHeaders =
+ [ ("HTTP-Referer", "https://omni.dev"),
+ ("X-Title", "Omni Agent")
+ ]
}
data AgentConfig = AgentConfig
@@ -397,7 +419,7 @@ data ChatResult = ChatResult
chatWithUsage :: LLM -> [Tool] -> [Message] -> IO (Either Text ChatResult)
chatWithUsage llm tools messages = do
- let url = Text.unpack (llmBaseUrl llm) <> "/v1/chat/completions"
+ let url = Text.unpack (llmBaseUrl llm) <> "/chat/completions"
req0 <- HTTP.parseRequest url
let toolApis = [encodeToolForApi t | not (null tools), t <- tools]
body =
@@ -406,12 +428,14 @@ chatWithUsage llm tools messages = do
reqMessages = messages,
reqTools = if null toolApis then Nothing else Just toolApis
}
- req =
+ baseReq =
HTTP.setRequestMethod "POST"
<| HTTP.setRequestHeader "Content-Type" ["application/json"]
<| HTTP.setRequestHeader "Authorization" ["Bearer " <> TE.encodeUtf8 (llmApiKey llm)]
<| HTTP.setRequestBodyLBS (Aeson.encode body)
<| req0
+ req = foldr addHeader baseReq (llmExtraHeaders llm)
+ addHeader (name, value) = HTTP.addRequestHeader (CI.mk name) value
response <- HTTP.httpLBS req
let status = HTTP.getResponseStatusCode response