diff options
| -rwxr-xr-x | Omni/Jr.hs | 53 |
1 files changed, 36 insertions, 17 deletions
@@ -9,7 +9,6 @@ -- : dep servant-server -- : dep lucid -- : dep servant-lucid --- : run llm module Omni.Jr where import Alpha @@ -604,17 +603,25 @@ addCompletionSummary tid commitSha = do -- Build prompt for llm let prompt = buildCompletionPrompt tid commitMessage diffSummary files - -- Call llm CLI to generate summary - (llmCode, llmOut, llmErr) <- Process.readProcessWithExitCode "llm" [] (Text.unpack prompt) - - case llmCode of - Exit.ExitSuccess -> do - let summary = Text.strip (Text.pack llmOut) - unless (Text.null summary) <| do - _ <- TaskCore.addComment tid ("## Completion Summary\n\n" <> summary) - putText "[review] Added completion summary comment" - Exit.ExitFailure _ -> do - putText ("[review] Failed to generate completion summary: " <> Text.pack llmErr) + -- Try to get API key + maybeApiKey <- Env.lookupEnv "OPENROUTER_API_KEY" + case maybeApiKey of + Nothing -> do + putText "[review] Warning: OPENROUTER_API_KEY not set, skipping completion summary" + Just apiKey -> do + -- Call LLM via Engine.chat + let llm = Engine.defaultLLM {Engine.llmApiKey = Text.pack apiKey} + messages = [Engine.Message Engine.User prompt Nothing Nothing] + + result <- Engine.chat llm [] messages + case result of + Left err -> do + putText ("[review] Failed to generate completion summary: " <> err) + Right msg -> do + let summary = Text.strip (Engine.msgContent msg) + unless (Text.null summary) <| do + _ <- TaskCore.addComment tid ("## Completion Summary\n\n" <> summary) + putText "[review] Added completion summary comment" -- | Build prompt for LLM to generate completion summary buildCompletionPrompt :: Text -> Text -> Text -> [String] -> Text @@ -654,11 +661,23 @@ extractFacts tid commitSha = do Nothing -> pure () Just task -> do let prompt = buildFactExtractionPrompt task diffOut - -- Call llm CLI - (code, llmOut, _) <- Process.readProcessWithExitCode "llm" ["-s", Text.unpack prompt] "" - case code of - Exit.ExitSuccess -> parseFacts tid llmOut - _ -> putText "[facts] Failed to extract facts" + + -- Try to get API key + maybeApiKey <- Env.lookupEnv "OPENROUTER_API_KEY" + case maybeApiKey of + Nothing -> do + putText "[facts] Warning: OPENROUTER_API_KEY not set, skipping fact extraction" + Just apiKey -> do + -- Call LLM via Engine.chat + let llm = Engine.defaultLLM {Engine.llmApiKey = Text.pack apiKey} + messages = [Engine.Message Engine.User prompt Nothing Nothing] + + result <- Engine.chat llm [] messages + case result of + Left err -> do + putText ("[facts] Failed to extract facts: " <> err) + Right msg -> do + parseFacts tid (Text.unpack (Engine.msgContent msg)) -- | Build prompt for LLM to extract facts from completed task buildFactExtractionPrompt :: TaskCore.Task -> String -> Text |
