summaryrefslogtreecommitdiff
path: root/Omni
diff options
context:
space:
mode:
Diffstat (limited to 'Omni')
-rw-r--r--Omni/Agent.hs77
-rw-r--r--Omni/Agent/Core.hs37
-rw-r--r--Omni/Agent/DESIGN.md9
-rw-r--r--Omni/Agent/Git.hs201
-rw-r--r--Omni/Agent/Log.hs218
-rw-r--r--Omni/Agent/LogTest.hs74
-rw-r--r--Omni/Agent/WORKER_AGENT_GUIDE.md67
-rw-r--r--Omni/Agent/Worker.hs252
-rwxr-xr-xOmni/Agent/harvest-tasks.sh9
-rwxr-xr-xOmni/Agent/monitor.sh29
-rwxr-xr-xOmni/Agent/setup-worker.sh7
-rwxr-xr-xOmni/Agent/start-worker.sh171
-rw-r--r--Omni/Bild.hs69
-rwxr-xr-xOmni/Bild/Audit.py176
-rw-r--r--Omni/Bild/Deps/Haskell.nix1
-rw-r--r--Omni/Namespace.hs5
-rw-r--r--Omni/Task.hs183
-rw-r--r--Omni/Task/Core.hs499
-rw-r--r--Omni/Task/RaceTest.hs56
19 files changed, 1766 insertions, 374 deletions
diff --git a/Omni/Agent.hs b/Omni/Agent.hs
new file mode 100644
index 0000000..d53bccd
--- /dev/null
+++ b/Omni/Agent.hs
@@ -0,0 +1,77 @@
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- : out agent
+-- : dep temporary
+module Omni.Agent where
+
+import Alpha
+import qualified Data.Text as Text
+import qualified Omni.Agent.Core as Core
+import qualified Omni.Agent.Worker as Worker
+import qualified Omni.Cli as Cli
+import qualified Omni.Test as Test
+import qualified System.Console.Docopt as Docopt
+
+main :: IO ()
+main = Cli.main plan
+
+plan :: Cli.Plan ()
+plan =
+ Cli.Plan
+ { Cli.help = help,
+ Cli.move = move,
+ Cli.test = test,
+ Cli.tidy = \_ -> pure ()
+ }
+
+help :: Cli.Docopt
+help =
+ [Cli.docopt|
+agent
+
+Usage:
+ agent start <name> [--path=<path>]
+ agent test
+ agent --help
+
+Options:
+ --path=<path> Path to the worker directory [default: .]
+ --help Show this help
+|]
+
+move :: Cli.Arguments -> IO ()
+move args
+ | args `Cli.has` Cli.command "start" = do
+ name <-
+ Cli.getArg args (Cli.argument "name") |> \case
+ Just n -> pure (Text.pack n)
+ Nothing -> panic "Name required"
+ let path = Cli.getArgWithDefault args "." (Cli.longOption "path")
+
+ let worker =
+ Core.Worker
+ { Core.workerName = name,
+ Core.workerPid = Nothing,
+ Core.workerStatus = Core.Idle,
+ Core.workerPath = path
+ }
+
+ Worker.start worker
+ | otherwise = putStrLn (Cli.usage help)
+
+test :: Test.Tree
+test = Test.group "Omni.Agent" [unitTests]
+
+unitTests :: Test.Tree
+unitTests =
+ Test.group
+ "Unit tests"
+ [ Test.unit "can parse start command" <| do
+ let result = Docopt.parseArgs help ["start", "worker-1"]
+ case result of
+ Left err -> Test.assertFailure <| "Failed to parse 'start': " <> show err
+ Right args -> args `Cli.has` Cli.command "start" Test.@?= True
+ ]
diff --git a/Omni/Agent/Core.hs b/Omni/Agent/Core.hs
new file mode 100644
index 0000000..2d09e39
--- /dev/null
+++ b/Omni/Agent/Core.hs
@@ -0,0 +1,37 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- : out omni-agent-core
+module Omni.Agent.Core where
+
+import Alpha
+import Data.Aeson (FromJSON, ToJSON)
+
+-- | Status of a worker agent
+data WorkerStatus
+ = Idle
+ | Syncing
+ | -- | Task ID
+ Working Text
+ | -- | Task ID
+ Submitting Text
+ | -- | Error message
+ Error Text
+ deriving (Show, Eq, Generic)
+
+instance ToJSON WorkerStatus
+
+instance FromJSON WorkerStatus
+
+-- | Representation of a worker agent
+data Worker = Worker
+ { workerName :: Text,
+ workerPid :: Maybe Int,
+ workerStatus :: WorkerStatus,
+ workerPath :: FilePath
+ }
+ deriving (Show, Eq, Generic)
+
+instance ToJSON Worker
+
+instance FromJSON Worker
diff --git a/Omni/Agent/DESIGN.md b/Omni/Agent/DESIGN.md
index eb30ecb..2d1e6e3 100644
--- a/Omni/Agent/DESIGN.md
+++ b/Omni/Agent/DESIGN.md
@@ -77,6 +77,7 @@ The Haskell implementation should replicate the logic of `start-worker.sh` but w
- **Two-line Status**: The CLI should maintain two reserved lines at the bottom (or top) of the output for each worker:
- **Line 1 (Meta)**: `[Worker: omni-worker-1] Task: t-123 | Files: 3 | Credits: $0.45 | Time: 05:23`
- **Line 2 (Activity)**: `[14:05:22] 🤖 Thinking...` (updates in place)
+ - **Task Details**: When claiming a task, print the full task description/details to the log/console so the user can see what is being worked on without looking it up.
- **Completion**: When a task finishes, print a summary line (e.g., `[✓] Task t-123 completed in 12m 30s`) and a hard line break before starting the next loop.
- **History**: Previous log lines (tool outputs, thoughts) scroll up above these two status lines.
@@ -84,13 +85,19 @@ The Haskell implementation should replicate the logic of `start-worker.sh` but w
- Iterate over `.tasks/workers/` or `git worktree list`.
- For each worker, extract `.tasks/tasks.jsonl` via `git show`.
- Run `Task.import`.
+- **Squashing**: If the previous commit on the target branch (live) was a harvest commit, use `git commit --amend` to consolidate updates and reduce commit noise.
### 4.5 Git Robustness (Learnings)
+- **Identity**: Configure `git config user.name "Omni Worker"` and `user.email` in the worktree to clearly distinguish worker commits from human commits.
- **Force Checkout**: The worker must use `git checkout -f` (or equivalent) when switching to task branches to ensure untracked files (like `.tasks/counters.jsonl`) don't block the switch.
- **Base Branch Logic**:
- If the task depends on another task that is *not* yet in `live` (e.g., in `Review`), the worker should branch off the dependency's branch (`task/<dep-id>`).
- - Otherwise, branch off `live` (via the worker's base branch).
+ - Otherwise, branch off `live` directly. Do NOT use the local worker branch (`omni-worker-N`) as the base, as it may contain temporary sync commits that shouldn't be merged.
+- **Commit Hygiene**: Bundle the task status update (marking as 'Review') *inside* the feature implementation commit. This keeps the history clean (one commit per feature) and avoids separate "sync" commits for status changes.
- **Clean State**: The worker should ensure the workspace is clean (no uncommitted changes) before starting a new loop iteration.
+- **Rebase Safety**: Always check the exit code of `git rebase`. If it fails (conflicts), abort immediately (`git rebase --abort`) to avoid leaving the repo in a broken interactive rebase state.
+- **Status Verification**: Verify that task status updates actually succeed. Check `task ready` output against `live` state to prevent "zombie" tasks (completed in live but stuck in local loop) from being re-claimed.
+- **Binary Freshness**: Ensure the `task` binary used by the worker is rebuilt/updated when source code changes, otherwise logic fixes (like `task ready` filtering) won't take effect.
## 5. Migration Strategy
diff --git a/Omni/Agent/Git.hs b/Omni/Agent/Git.hs
new file mode 100644
index 0000000..b1978f2
--- /dev/null
+++ b/Omni/Agent/Git.hs
@@ -0,0 +1,201 @@
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Git operations for the agent.
+--
+-- : out omni-agent-git
+-- : dep temporary
+module Omni.Agent.Git
+ ( checkout,
+ syncWithLive,
+ commit,
+ createBranch,
+ getCurrentBranch,
+ branchExists,
+ isMerged,
+ main,
+ test,
+ )
+where
+
+import Alpha
+import qualified Data.Text as Text
+import qualified Omni.Log as Log
+import Omni.Test ((@=?))
+import qualified Omni.Test as Test
+import qualified System.Directory as Directory
+import qualified System.Exit as Exit
+import qualified System.IO.Temp as Temp
+import qualified System.Process as Process
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Agent.Git"
+ [ Test.unit "checkout works" <| do
+ Temp.withSystemTempDirectory "omni-agent-git-test" <| \tmpDir -> do
+ let repo = tmpDir <> "/repo"
+ Directory.createDirectory repo
+ -- init repo
+ git repo ["init"]
+ git repo ["branch", "-m", "master"]
+ git repo ["config", "user.email", "you@example.com"]
+ git repo ["config", "user.name", "Your Name"]
+
+ -- commit A
+ writeFile (repo <> "/a.txt") "A"
+ git repo ["add", "a.txt"]
+ git repo ["commit", "-m", "A"]
+ shaA <- getSha repo "HEAD"
+
+ -- create branch dev
+ git repo ["checkout", "-b", "dev"]
+
+ -- commit B
+ writeFile (repo <> "/b.txt") "B"
+ git repo ["add", "b.txt"]
+ git repo ["commit", "-m", "B"]
+ shaB <- getSha repo "HEAD"
+
+ -- switch back to master
+ git repo ["checkout", "master"]
+
+ -- Test 1: checkout dev
+ checkout repo "dev"
+ current <- getSha repo "HEAD"
+ shaB @=? current
+
+ -- Test 2: checkout master
+ checkout repo "master"
+ current' <- getSha repo "HEAD"
+ shaA @=? current'
+
+ -- Test 3: dirty state
+ writeFile (repo <> "/a.txt") "DIRTY"
+ checkout repo "dev"
+ current'' <- getSha repo "HEAD"
+ shaB @=? current''
+ -- Verify dirty file is gone/overwritten (b.txt should exist, a.txt should be A from master? No, a.txt is in A and B)
+ -- Wait, in dev, a.txt is "A".
+ content <- readFile (repo <> "/a.txt")
+ "A" @=? content
+
+ -- Test 4: untracked file
+ writeFile (repo <> "/untracked.txt") "DELETE ME"
+ checkout repo "master"
+ exists <- Directory.doesFileExist (repo <> "/untracked.txt")
+ False @=? exists
+ ]
+
+getSha :: FilePath -> String -> IO String
+getSha dir ref = do
+ let cmd = (Process.proc "git" ["rev-parse", ref]) {Process.cwd = Just dir}
+ (code, out, _) <- Process.readCreateProcessWithExitCode cmd ""
+ case code of
+ Exit.ExitSuccess -> pure <| strip out
+ _ -> panic "getSha failed"
+
+-- | Checkout a specific ref (SHA, branch, tag) in the given repository path.
+-- This function ensures the repository is in the correct state by:
+-- 1. Fetching all updates
+-- 2. Checking out the ref (forcing overwrites of local changes)
+-- 3. Resetting hard to the ref (to ensure clean state)
+-- 4. Cleaning untracked files
+-- 5. Updating submodules
+checkout :: FilePath -> Text -> IO ()
+checkout repoPath ref = do
+ let r = Text.unpack ref
+
+ Log.info ["git", "checkout", ref, "in", Text.pack repoPath]
+
+ -- Fetch all refs to ensure we have the target
+ git repoPath ["fetch", "--all", "--tags"]
+
+ -- Checkout the ref, discarding local changes
+ git repoPath ["checkout", "--force", r]
+
+ -- Reset hard to ensure we are exactly at the target state
+ git repoPath ["reset", "--hard", r]
+
+ -- Remove untracked files and directories
+ git repoPath ["clean", "-fdx"]
+
+ -- Update submodules
+ git repoPath ["submodule", "update", "--init", "--recursive"]
+
+ Log.good ["git", "checkout", "complete"]
+ Log.br
+
+-- | Run a git command in the given directory.
+git :: FilePath -> [String] -> IO ()
+git dir args = do
+ let cmd = (Process.proc "git" args) {Process.cwd = Just dir}
+ (exitCode, out, err) <- Process.readCreateProcessWithExitCode cmd ""
+ case exitCode of
+ Exit.ExitSuccess -> pure ()
+ Exit.ExitFailure code -> do
+ Log.fail ["git command failed", Text.pack (show args), "code: " <> show code]
+ Log.info [Text.pack out]
+ Log.info [Text.pack err]
+ Log.br
+ panic <| "git command failed: git " <> show args
+
+syncWithLive :: FilePath -> IO ()
+syncWithLive repo = do
+ Log.info ["git", "syncing with live"]
+ -- git repo ["fetch", "origin", "live"] -- Optional
+
+ -- Try sync (branchless sync), if fail, panic
+ -- This replaces manual rebase and handles stack movement
+ let cmd = (Process.proc "git" ["sync"]) {Process.cwd = Just repo}
+ (code, out, err) <- Process.readCreateProcessWithExitCode cmd ""
+ case code of
+ Exit.ExitSuccess -> pure ()
+ Exit.ExitFailure _ -> do
+ Log.warn ["git sync failed", Text.pack err]
+ Log.info [Text.pack out]
+ panic "Sync with live failed (git sync)"
+
+commit :: FilePath -> Text -> IO ()
+commit repo msg = do
+ Log.info ["git", "commit", msg]
+ git repo ["add", "."]
+
+ -- Check for changes before committing to avoid error
+ let checkCmd = (Process.proc "git" ["diff", "--cached", "--quiet"]) {Process.cwd = Just repo}
+ (code, _, _) <- Process.readCreateProcessWithExitCode checkCmd ""
+
+ case code of
+ Exit.ExitSuccess -> Log.warn ["git", "nothing to commit", "skipping"]
+ Exit.ExitFailure 1 -> git repo ["commit", "-m", Text.unpack msg]
+ Exit.ExitFailure c -> panic <| "git diff failed with code " <> show c
+
+createBranch :: FilePath -> Text -> IO ()
+createBranch repo branch = do
+ Log.info ["git", "create branch", branch]
+ git repo ["checkout", "-b", Text.unpack branch]
+
+getCurrentBranch :: FilePath -> IO Text
+getCurrentBranch repo = do
+ let cmd = (Process.proc "git" ["branch", "--show-current"]) {Process.cwd = Just repo}
+ (code, out, _) <- Process.readCreateProcessWithExitCode cmd ""
+ case code of
+ Exit.ExitSuccess -> pure <| Text.strip (Text.pack out)
+ _ -> panic "git branch failed"
+
+branchExists :: FilePath -> Text -> IO Bool
+branchExists repo branch = do
+ let cmd = (Process.proc "git" ["show-ref", "--verify", "refs/heads/" <> Text.unpack branch]) {Process.cwd = Just repo}
+ (code, _, _) <- Process.readCreateProcessWithExitCode cmd ""
+ pure (code == Exit.ExitSuccess)
+
+isMerged :: FilePath -> Text -> Text -> IO Bool
+isMerged repo branch target = do
+ -- Check if 'branch' is merged into 'target'
+ -- git merge-base --is-ancestor <branch> <target>
+ let cmd = (Process.proc "git" ["merge-base", "--is-ancestor", Text.unpack branch, Text.unpack target]) {Process.cwd = Just repo}
+ (code, _, _) <- Process.readCreateProcessWithExitCode cmd ""
+ pure (code == Exit.ExitSuccess)
diff --git a/Omni/Agent/Log.hs b/Omni/Agent/Log.hs
new file mode 100644
index 0000000..dd66abc
--- /dev/null
+++ b/Omni/Agent/Log.hs
@@ -0,0 +1,218 @@
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE RecordWildCards #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- : out omni-agent-log
+module Omni.Agent.Log where
+
+import Alpha
+import Data.Aeson (Value (..), decode)
+import qualified Data.Aeson.KeyMap as KM
+import qualified Data.ByteString.Lazy as BL
+import Data.IORef (IORef, modifyIORef', newIORef, readIORef, writeIORef)
+import qualified Data.Text.Encoding as TextEnc
+import qualified Data.Text.IO as TIO
+import qualified Data.Vector as V
+import qualified System.Console.ANSI as ANSI
+import qualified System.IO as IO
+import System.IO.Unsafe (unsafePerformIO)
+import Text.Printf (printf)
+
+-- | Parsed log entry
+data LogEntry = LogEntry
+ { leMessage :: Maybe Text,
+ leLevel :: Maybe Text,
+ leToolName :: Maybe Text,
+ leBatches :: Maybe [[Text]],
+ leMethod :: Maybe Text,
+ lePath :: Maybe Text
+ }
+ deriving (Show, Eq)
+
+-- | Status of the agent for the UI
+data Status = Status
+ { statusWorker :: Text,
+ statusTask :: Maybe Text,
+ statusThreadId :: Maybe Text,
+ statusFiles :: Int,
+ statusCredits :: Double,
+ statusTime :: Text, -- formatted time string
+ statusActivity :: Text
+ }
+ deriving (Show, Eq)
+
+emptyStatus :: Text -> Status
+emptyStatus workerName =
+ Status
+ { statusWorker = workerName,
+ statusTask = Nothing,
+ statusThreadId = Nothing,
+ statusFiles = 0,
+ statusCredits = 0.0,
+ statusTime = "00:00",
+ statusActivity = "Idle"
+ }
+
+-- | Global state for the status bar
+{-# NOINLINE currentStatus #-}
+currentStatus :: IORef Status
+currentStatus = unsafePerformIO (newIORef (emptyStatus "Unknown"))
+
+-- | Initialize the status bar system
+init :: Text -> IO ()
+init workerName = do
+ IO.hSetBuffering IO.stderr IO.LineBuffering
+ writeIORef currentStatus (emptyStatus workerName)
+ -- Reserve 5 lines at bottom
+ replicateM_ 5 (IO.hPutStrLn IO.stderr "")
+ ANSI.hCursorUp IO.stderr 5
+
+-- | Update the status
+update :: (Status -> Status) -> IO ()
+update f = do
+ modifyIORef' currentStatus f
+ render
+
+-- | Set the activity message
+updateActivity :: Text -> IO ()
+updateActivity msg = update (\s -> s {statusActivity = msg})
+
+-- | Process a log line from the agent and update status if relevant
+processLogLine :: Text -> IO ()
+processLogLine line = do
+ let entry = parseLine line
+ for_ (entry +> formatLogEntry) updateActivity
+
+-- | Parse a JSON log line into a LogEntry
+parseLine :: Text -> Maybe LogEntry
+parseLine line = do
+ let lbs = BL.fromStrict (TextEnc.encodeUtf8 line)
+ obj <- decode lbs
+ case obj of
+ Object o ->
+ Just
+ LogEntry
+ { leMessage = getString "message" o,
+ leLevel = getString "level" o,
+ leToolName = getString "toolName" o,
+ leBatches = getBatches o,
+ leMethod = getString "method" o,
+ lePath = getString "path" o
+ }
+ _ -> Nothing
+ where
+ getString k o =
+ case KM.lookup k o of
+ Just (String s) -> Just s
+ _ -> Nothing
+
+ getBatches o =
+ case KM.lookup "batches" o of
+ Just (Array b) ->
+ Just
+ <| mapMaybe
+ ( \case
+ Array b0 ->
+ Just
+ <| mapMaybe
+ ( \case
+ String s -> Just s
+ _ -> Nothing
+ )
+ (V.toList b0)
+ _ -> Nothing
+ )
+ (V.toList b)
+ _ -> Nothing
+
+-- | Format a log entry into a user-friendly status message (NO EMOJIS)
+formatLogEntry :: LogEntry -> Maybe Text
+formatLogEntry LogEntry {..} =
+ case leMessage of
+ Just "executing 1 tools in 1 batch(es)" -> do
+ let tools = fromMaybe [] leBatches
+ let firstTool = case tools of
+ ((t : _) : _) -> t
+ _ -> "unknown"
+ Just ("THOUGHT: Planning tool execution (" <> firstTool <> ")")
+ Just "Tool Bash permitted - action: allow" ->
+ Just "TOOL: Bash command executed"
+ Just "Processing tool completion for ledger"
+ | isJust leToolName ->
+ Just ("TOOL: " <> fromMaybe "unknown" leToolName <> " completed")
+ Just "ide-fs" | leMethod == Just "readFile" ->
+ case lePath of
+ Just p -> Just ("READ: " <> p)
+ _ -> Nothing
+ Just "System prompt build complete (no changes)" ->
+ Just "THINKING..."
+ Just "System prompt build complete (first build)" ->
+ Just "STARTING new task context"
+ Just msg
+ | leLevel == Just "error" ->
+ Just ("ERROR: " <> msg)
+ _ -> Nothing
+
+-- | Log a scrolling message (appears above status bars)
+log :: Text -> IO ()
+log msg = do
+ -- Clear status bars (5 lines)
+ ANSI.hClearLine IO.stderr
+ ANSI.hCursorDown IO.stderr 1
+ ANSI.hClearLine IO.stderr
+ ANSI.hCursorDown IO.stderr 1
+ ANSI.hClearLine IO.stderr
+ ANSI.hCursorDown IO.stderr 1
+ ANSI.hClearLine IO.stderr
+ ANSI.hCursorDown IO.stderr 1
+ ANSI.hClearLine IO.stderr
+ ANSI.hCursorUp IO.stderr 4
+
+ -- Print message (scrolls screen)
+ TIO.hPutStrLn IO.stderr msg
+
+ -- Re-render status bars at bottom
+ -- (Since we scrolled, we are now on the line above where the first status line should be)
+ render
+
+-- | Render the 5 status lines (Vertical Layout)
+render :: IO ()
+render = do
+ Status {..} <- readIORef currentStatus
+
+ let taskStr = maybe "None" identity statusTask
+ threadStr = maybe "None" identity statusThreadId
+
+ -- Line 1: Worker + Time
+ ANSI.hSetCursorColumn IO.stderr 0
+ ANSI.hClearLine IO.stderr
+ TIO.hPutStr IO.stderr <| "Worker: " <> statusWorker <> " | Time: " <> statusTime
+
+ -- Line 2: Task
+ ANSI.hCursorDown IO.stderr 1
+ ANSI.hSetCursorColumn IO.stderr 0
+ ANSI.hClearLine IO.stderr
+ TIO.hPutStr IO.stderr <| "Task: " <> taskStr
+
+ -- Line 3: Thread
+ ANSI.hCursorDown IO.stderr 1
+ ANSI.hSetCursorColumn IO.stderr 0
+ ANSI.hClearLine IO.stderr
+ TIO.hPutStr IO.stderr <| "Thread: " <> threadStr
+
+ -- Line 4: Credits
+ ANSI.hCursorDown IO.stderr 1
+ ANSI.hSetCursorColumn IO.stderr 0
+ ANSI.hClearLine IO.stderr
+ TIO.hPutStr IO.stderr <| "Credits: $" <> str (printf "%.2f" statusCredits :: String)
+
+ -- Line 5: Activity
+ ANSI.hCursorDown IO.stderr 1
+ ANSI.hSetCursorColumn IO.stderr 0
+ ANSI.hClearLine IO.stderr
+ TIO.hPutStr IO.stderr ("> " <> statusActivity)
+
+ -- Return cursor to Line 1
+ ANSI.hCursorUp IO.stderr 4
+ IO.hFlush IO.stderr
diff --git a/Omni/Agent/LogTest.hs b/Omni/Agent/LogTest.hs
new file mode 100644
index 0000000..97b558d
--- /dev/null
+++ b/Omni/Agent/LogTest.hs
@@ -0,0 +1,74 @@
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- : out agent-log-test
+module Omni.Agent.LogTest where
+
+import Alpha
+import Omni.Agent.Log
+import qualified Omni.Test as Test
+
+main :: IO ()
+main = Test.run tests
+
+tests :: Test.Tree
+tests =
+ Test.group
+ "Omni.Agent.Log"
+ [ Test.unit "Parse LogEntry" testParse,
+ Test.unit "Format LogEntry" testFormat
+ ]
+
+testParse :: IO ()
+testParse = do
+ let json = "{\"message\": \"executing 1 tools in 1 batch(es)\", \"batches\": [[\"grep\"]]}"
+ let expected =
+ LogEntry
+ { leMessage = Just "executing 1 tools in 1 batch(es)",
+ leLevel = Nothing,
+ leToolName = Nothing,
+ leBatches = Just [["grep"]],
+ leMethod = Nothing,
+ lePath = Nothing
+ }
+ parseLine json @?= Just expected
+
+testFormat :: IO ()
+testFormat = do
+ let entry =
+ LogEntry
+ { leMessage = Just "executing 1 tools in 1 batch(es)",
+ leLevel = Nothing,
+ leToolName = Nothing,
+ leBatches = Just [["grep"]],
+ leMethod = Nothing,
+ lePath = Nothing
+ }
+ -- Expect NO emoji
+ formatLogEntry entry @?= Just "THOUGHT: Planning tool execution (grep)"
+
+ let entry2 =
+ LogEntry
+ { leMessage = Just "some random log",
+ leLevel = Nothing,
+ leToolName = Nothing,
+ leBatches = Nothing,
+ leMethod = Nothing,
+ lePath = Nothing
+ }
+ formatLogEntry entry2 @?= Nothing
+
+ let entry3 =
+ LogEntry
+ { leMessage = Just "some error",
+ leLevel = Just "error",
+ leToolName = Nothing,
+ leBatches = Nothing,
+ leMethod = Nothing,
+ lePath = Nothing
+ }
+ -- Expect NO emoji
+ formatLogEntry entry3 @?= Just "ERROR: some error"
+
+(@?=) :: (Eq a, Show a) => a -> a -> IO ()
+(@?=) = (Test.@?=)
diff --git a/Omni/Agent/WORKER_AGENT_GUIDE.md b/Omni/Agent/WORKER_AGENT_GUIDE.md
index af81bb0..e832a2a 100644
--- a/Omni/Agent/WORKER_AGENT_GUIDE.md
+++ b/Omni/Agent/WORKER_AGENT_GUIDE.md
@@ -55,13 +55,10 @@ task update t-123 in-progress
2. **Check for Unmerged Work**: Look for dependencies that have existing branches (e.g., `task/t-parent-id`) which are NOT yet merged into `live`.
3. **Select Base**:
* If you find an unmerged dependency branch, check it out: `git checkout task/t-parent-id`.
- * Otherwise, start from fresh live code: `git checkout omni-worker-1` (which tracks `live`).
+ * Otherwise, start from fresh live code: `git checkout -b task/t-123 live`.
-4. **Create/Checkout Feature Branch**:
- ```bash
- # Try to switch to existing branch, otherwise create new one
- git checkout task/t-123 || git checkout -b task/t-123
- ```
+4. **Implement**:
+ (Proceed to implementation)
### Step 4: Implement
@@ -70,29 +67,35 @@ task update t-123 in-progress
3. **Run Tests**: `bild --test Omni/YourNamespace.hs`
### Step 5: Submit for Review
-
-1. **Commit Implementation**:
- ```bash
- git add .
- git commit -m "feat: implement t-123"
- ```
-
-2. **Signal Review Readiness**:
- The Planner checks the `omni-worker-X` branch for status updates. You must switch back and update the status there.
-
- ```bash
- # Switch to base branch
- git checkout omni-worker-1
-
- # Sync to get latest state (and any manual merges)
- ./Omni/Agent/sync-tasks.sh
-
- # Mark task for review
- task update t-123 review
-
- # Commit this status change to the worker branch
- ./Omni/Agent/sync-tasks.sh --commit
- ```
+
+ 1. **Update Status and Commit**:
+ Bundle the task status update with your implementation to keep history clean.
+
+ ```bash
+ # 1. Mark task for review (updates .tasks/tasks.jsonl)
+ task update t-123 review
+
+ # 2. Commit changes + task update
+ git add .
+ git commit -m "feat: implement t-123"
+ ```
+
+ 2. **Signal Review Readiness**:
+ Update the worker branch to signal the planner.
+
+ ```bash
+ # Switch to base branch
+ git checkout omni-worker-1
+
+ # Sync to get latest state
+ ./Omni/Agent/sync-tasks.sh
+
+ # Ensure the task is marked review here too (for harvest visibility)
+ task update t-123 review
+
+ # Commit this status change to the worker branch
+ ./Omni/Agent/sync-tasks.sh --commit
+ ```
*Note: The Planner will now see 't-123' in 'Review' when it runs `harvest-tasks.sh`.*
@@ -103,10 +106,14 @@ The Planner Agent (running in the main repo) will:
2. **Find Reviews**: Run `task list --status=review`.
3. **Review Code**:
* Check out the feature branch: `git checkout task/t-123`.
+ * **Rebase onto Live**: Ensure the branch is up-to-date and linear.
+ ```bash
+ git rebase live
+ ```
* Run tests and review code.
4. **Merge**:
* `git checkout live`
- * `git merge task/t-123`
+ * `git merge task/t-123` (This will now be a fast-forward or clean merge)
5. **Complete**:
* `task update t-123 done`
* `git commit -am "task: t-123 done"`
diff --git a/Omni/Agent/Worker.hs b/Omni/Agent/Worker.hs
new file mode 100644
index 0000000..1cc0b8d
--- /dev/null
+++ b/Omni/Agent/Worker.hs
@@ -0,0 +1,252 @@
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- : out omni-agent-worker
+module Omni.Agent.Worker where
+
+import Alpha
+import qualified Data.Text as Text
+import qualified Data.Text.IO as TIO
+import qualified Omni.Agent.Core as Core
+import qualified Omni.Agent.Git as Git
+import qualified Omni.Agent.Log as AgentLog
+import qualified Omni.Task.Core as TaskCore
+import qualified System.Directory as Directory
+import qualified System.Exit as Exit
+import System.FilePath ((</>))
+import qualified System.IO as IO
+import qualified System.Process as Process
+
+start :: Core.Worker -> IO ()
+start worker = do
+ AgentLog.init (Core.workerName worker)
+ AgentLog.log ("Worker starting loop for " <> Core.workerName worker)
+ loop worker
+
+loop :: Core.Worker -> IO ()
+loop worker = do
+ let repo = Core.workerPath worker
+
+ AgentLog.updateActivity "Syncing tasks..."
+ -- Sync with live first to get latest code and tasks
+ -- We ignore errors here to keep the loop alive, but syncWithLive panics on conflict.
+ -- Ideally we should catch exceptions, but for now let it fail and restart (via supervisor or manual).
+ Git.syncWithLive repo
+
+ -- Sync tasks database (import from live)
+ -- Since we rebased, .tasks/tasks.jsonl should be up to date with live.
+ -- But we might need to consolidate if there are merge artifacts (not likely with rebase).
+ -- The bash script calls ./Omni/Agent/sync-tasks.sh which calls 'task import'.
+ -- Here we rely on 'task loadTasks' reading the file.
+ -- But 'syncWithLive' already updated the file from git.
+
+ -- Find ready work
+ readyTasks <- TaskCore.getReadyTasks
+ case readyTasks of
+ [] -> do
+ AgentLog.updateActivity "No work found, sleeping..."
+ threadDelay (60 * 1000000) -- 60 seconds
+ loop worker
+ (task : _) -> do
+ processTask worker task
+ loop worker
+
+processTask :: Core.Worker -> TaskCore.Task -> IO ()
+processTask worker task = do
+ let repo = Core.workerPath worker
+ let tid = TaskCore.taskId task
+
+ AgentLog.update (\s -> s {AgentLog.statusTask = Just tid})
+ AgentLog.updateActivity ("Claiming task " <> tid)
+
+ -- Claim task
+ TaskCore.updateTaskStatus tid TaskCore.InProgress []
+
+ -- Commit claim locally
+ Git.commit repo ("task: claim " <> tid)
+
+ -- Prepare branch
+ let taskBranch = "task/" <> tid
+ currentBranch <- Git.getCurrentBranch repo
+ if currentBranch == taskBranch
+ then AgentLog.log ("Resuming branch " <> taskBranch)
+ else do
+ exists <- Git.branchExists repo taskBranch
+ if exists
+ then do
+ AgentLog.log ("Switching to existing branch " <> taskBranch)
+ Git.checkout repo taskBranch
+ else do
+ -- Determine base branch from dependencies
+ baseBranch <- findBaseBranch repo task
+ if baseBranch /= "live"
+ then do
+ AgentLog.log ("Basing " <> taskBranch <> " on " <> baseBranch)
+ Git.checkout repo baseBranch
+ else AgentLog.log ("Basing " <> taskBranch <> " on live")
+
+ Git.createBranch repo taskBranch
+
+ -- Run Amp
+ AgentLog.updateActivity "Running Amp agent..."
+ (exitCode, output) <- runAmp repo task
+
+ case exitCode of
+ Exit.ExitSuccess -> do
+ AgentLog.log "Agent finished successfully"
+
+ -- Update status to Review (bundled with feature commit)
+ TaskCore.updateTaskStatus tid TaskCore.Review []
+
+ -- Commit changes
+ -- We use the agent's output as the extended commit description
+ let summary = Text.strip output
+ let commitMsg = "feat: implement " <> tid <> "\n\n" <> summary
+ Git.commit repo commitMsg
+
+ -- Submit for review
+ AgentLog.updateActivity "Submitting for review..."
+
+ -- Switch back to worker base
+ let base = Core.workerName worker
+ Git.checkout repo base
+
+ -- Sync again
+ Git.syncWithLive repo
+
+ -- Update status to Review (for signaling)
+ TaskCore.updateTaskStatus tid TaskCore.Review []
+ Git.commit repo ("task: review " <> tid)
+
+ AgentLog.log ("[✓] Task " <> tid <> " completed")
+ AgentLog.update (\s -> s {AgentLog.statusTask = Nothing})
+ Exit.ExitFailure code -> do
+ AgentLog.log ("Agent failed with code " <> tshow code)
+ AgentLog.updateActivity "Agent failed, retrying..."
+ threadDelay (10 * 1000000) -- Sleep 10s
+
+runAmp :: FilePath -> TaskCore.Task -> IO (Exit.ExitCode, Text)
+runAmp repo task = do
+ let prompt =
+ "You are a Worker Agent.\n"
+ <> "Your goal is to implement the following task:\n\n"
+ <> formatTask task
+ <> "\n\nINSTRUCTIONS:\n"
+ <> "1. Analyze the codebase (use finder/Grep) to understand where to make changes.\n"
+ <> "2. Implement the changes by editing files.\n"
+ <> "3. Run tests to verify your work (e.g., 'bild --test Omni/Namespace').\n"
+ <> "4. Fix any errors found during testing.\n"
+ <> "5. Do NOT update the task status or manage git branches (the system handles that).\n"
+ <> "6. Do NOT run 'git commit'. The system will commit your changes automatically.\n"
+ <> "7. When finished and tested, exit.\n\n"
+ <> "Context:\n"
+ <> "- You are working in '"
+ <> Text.pack repo
+ <> "'.\n"
+ <> "- The task is in namespace '"
+ <> fromMaybe "root" (TaskCore.taskNamespace task)
+ <> "'.\n"
+
+ Directory.createDirectoryIfMissing True (repo </> "_/llm")
+ let logPath = repo </> "_/llm/amp.log"
+
+ -- Ensure log file is empty/exists
+ IO.writeFile logPath ""
+
+ -- Read AGENTS.md
+ agentsMd <-
+ fmap (fromMaybe "") <| do
+ exists <- Directory.doesFileExist (repo </> "AGENTS.md")
+ if exists
+ then Just </ readFile (repo </> "AGENTS.md")
+ else pure Nothing
+
+ let fullPrompt =
+ prompt
+ <> "\n\nREPOSITORY GUIDELINES (AGENTS.md):\n"
+ <> agentsMd
+
+ -- Monitor log file
+ tidLog <- forkIO (monitorLog logPath)
+
+ -- Assume amp is in PATH
+ let args = ["--log-level", "debug", "--log-file", "_/llm/amp.log", "--dangerously-allow-all", "-x", Text.unpack fullPrompt]
+
+ let cp = (Process.proc "amp" args) {Process.cwd = Just repo}
+ (exitCode, out, _err) <- Process.readCreateProcessWithExitCode cp ""
+
+ -- Cleanup
+ killThread tidLog
+
+ pure (exitCode, Text.pack out)
+
+formatTask :: TaskCore.Task -> Text
+formatTask t =
+ "Task: "
+ <> TaskCore.taskId t
+ <> "\n"
+ <> "Title: "
+ <> TaskCore.taskTitle t
+ <> "\n"
+ <> "Type: "
+ <> Text.pack (show (TaskCore.taskType t))
+ <> "\n"
+ <> "Status: "
+ <> Text.pack (show (TaskCore.taskStatus t))
+ <> "\n"
+ <> "Priority: "
+ <> Text.pack (show (TaskCore.taskPriority t))
+ <> "\n"
+ <> maybe "" (\p -> "Parent: " <> p <> "\n") (TaskCore.taskParent t)
+ <> maybe "" (\ns -> "Namespace: " <> ns <> "\n") (TaskCore.taskNamespace t)
+ <> "Created: "
+ <> Text.pack (show (TaskCore.taskCreatedAt t))
+ <> "\n"
+ <> "Updated: "
+ <> Text.pack (show (TaskCore.taskUpdatedAt t))
+ <> "\n"
+ <> maybe "" (\d -> "Description:\n" <> d <> "\n\n") (TaskCore.taskDescription t)
+ <> (if null (TaskCore.taskDependencies t) then "" else "\nDependencies:\n" <> Text.unlines (map formatDep (TaskCore.taskDependencies t)))
+ where
+ formatDep dep = " - " <> TaskCore.depId dep <> " [" <> Text.pack (show (TaskCore.depType dep)) <> "]"
+
+findBaseBranch :: FilePath -> TaskCore.Task -> IO Text
+findBaseBranch repo task = do
+ let deps = TaskCore.taskDependencies task
+ -- Filter for blocking dependencies
+ let blockingDeps = filter (\d -> TaskCore.depType d == TaskCore.Blocks || TaskCore.depType d == TaskCore.ParentChild) deps
+
+ -- Check if any have unmerged branches
+ candidates <-
+ flip filterM blockingDeps <| \dep -> do
+ let branch = "task/" <> TaskCore.depId dep
+ exists <- Git.branchExists repo branch
+ if exists
+ then do
+ merged <- Git.isMerged repo branch "live"
+ pure (not merged)
+ else pure False
+
+ case candidates of
+ (candidate : _) -> pure ("task/" <> TaskCore.depId candidate)
+ [] -> pure "live"
+
+monitorLog :: FilePath -> IO ()
+monitorLog path = do
+ -- Wait for file to exist
+ waitForFile path
+
+ IO.withFile path IO.ReadMode <| \h -> do
+ IO.hSetBuffering h IO.LineBuffering
+ forever <| do
+ eof <- IO.hIsEOF h
+ if eof
+ then threadDelay 100000 -- 0.1s
+ else do
+ line <- TIO.hGetLine h
+ AgentLog.processLogLine line
+
+waitForFile :: FilePath -> IO ()
+waitForFile p = do
+ e <- Directory.doesFileExist p
+ if e then pure () else threadDelay 100000 >> waitForFile p
diff --git a/Omni/Agent/harvest-tasks.sh b/Omni/Agent/harvest-tasks.sh
index 282beab..44c2322 100755
--- a/Omni/Agent/harvest-tasks.sh
+++ b/Omni/Agent/harvest-tasks.sh
@@ -45,7 +45,14 @@ if [ "$UPDATED" -eq 1 ]; then
# Commit if there are changes
if [[ -n $(git status --porcelain .tasks/tasks.jsonl) ]]; then
git add .tasks/tasks.jsonl
- git commit -m "task: harvest updates from workers"
+
+ LAST_MSG=$(git log -1 --pretty=%s)
+ if [[ "$LAST_MSG" == "task: harvest updates from workers" ]]; then
+ echo "Squashing with previous harvest commit..."
+ git commit --amend --no-edit
+ else
+ git commit -m "task: harvest updates from workers"
+ fi
echo "Success: Task database updated and committed."
else
echo "No effective changes found."
diff --git a/Omni/Agent/monitor.sh b/Omni/Agent/monitor.sh
new file mode 100755
index 0000000..1626354
--- /dev/null
+++ b/Omni/Agent/monitor.sh
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+# Omni/Agent/monitor.sh
+# Monitor the logs of a worker agent
+# Usage: ./Omni/Agent/monitor.sh [worker-name]
+
+WORKER="${1:-omni-worker-1}"
+REPO_ROOT="$(git rev-parse --show-toplevel)"
+WORKER_DIR="$REPO_ROOT/../$WORKER"
+
+if [ ! -d "$WORKER_DIR" ]; then
+ echo "Error: Worker directory '$WORKER_DIR' not found."
+ echo "Usage: $0 [worker-name]"
+ exit 1
+fi
+
+LOG_FILE="$WORKER_DIR/_/llm/amp.log"
+
+echo "Monitoring worker: $WORKER"
+echo "Watching log: $LOG_FILE"
+echo "---------------------------------------------------"
+
+# Wait for log file to appear
+while [ ! -f "$LOG_FILE" ]; do
+ echo "Waiting for log file to be created..."
+ sleep 2
+done
+
+# Tail the log file
+tail -f "$LOG_FILE"
diff --git a/Omni/Agent/setup-worker.sh b/Omni/Agent/setup-worker.sh
index 28c29b1..42b7fc9 100755
--- a/Omni/Agent/setup-worker.sh
+++ b/Omni/Agent/setup-worker.sh
@@ -22,3 +22,10 @@ if [ -f "$REPO_ROOT/.envrc.local" ]; then
echo "Copying .envrc.local..."
cp "$REPO_ROOT/.envrc.local" "$WORKTREE_PATH/"
fi
+
+# Configure git identity for the worker
+echo "Configuring git identity for worker..."
+git -C "$WORKTREE_PATH" config user.name "Omni Worker"
+git -C "$WORKTREE_PATH" config user.email "bot@omni.agent"
+
+echo "Worker setup complete at $WORKTREE_PATH"
diff --git a/Omni/Agent/start-worker.sh b/Omni/Agent/start-worker.sh
index ad519a0..310ca56 100755
--- a/Omni/Agent/start-worker.sh
+++ b/Omni/Agent/start-worker.sh
@@ -34,157 +34,30 @@ if [ ! -x "$TASK_BIN" ]; then
echo "Warning: Task binary not found at '$TASK_BIN'. Assuming it's in path or build it first."
fi
-echo "Starting Worker Agent Loop"
-echo " Worker Path: $WORKER_PATH"
-echo " Amp Binary: $AMP_BIN"
-echo " Log File: $WORKER_PATH/_/llm/amp.log"
-echo " Monitor: tail -f $WORKER_PATH/_/llm/amp.log"
-echo " Press Ctrl+C to stop."
-
-# Function to sync tasks safely
-sync_tasks() {
- "$MAIN_REPO/Omni/Agent/sync-tasks.sh" "$@"
-}
+# Ensure worker has local task and agent binaries
+mkdir -p "$WORKER_PATH/_/bin"
-cd "$WORKER_PATH"
-
-# 3. The Worker Loop
-while true; do
- echo "----------------------------------------------------------------"
- echo "$(date): Syncing and checking for work..."
-
- # A. Sync with Live
- # We use 'git rebase' to keep history linear
- # Force checkout to clean up any untracked files from previous runs
- git checkout -f omni-worker-1 >/dev/null 2>&1
-
- # Rebase directly on local live branch (shared repo)
- if ! git rebase live >/dev/null 2>&1; then
- echo "Warning: Rebase conflict at start of loop. Aborting rebase and proceeding with local state."
- git rebase --abort || true
- fi
-
- # B. Sync Tasks
- sync_tasks
-
- # C. Find Ready Work
- # We use jq to parse the first task
- # Note: task ready --json returns an array [...]
- TASK_JSON=$("$TASK_BIN" ready --json 2>/dev/null | jq -r '.[0] // empty')
-
- if [ -z "$TASK_JSON" ]; then
- echo "$(date): No ready tasks. Sleeping for 60s..."
- sleep 60
- continue
- fi
-
- TASK_ID=$(echo "$TASK_JSON" | jq -r '.taskId')
- TASK_TITLE=$(echo "$TASK_JSON" | jq -r '.taskTitle')
- TASK_NS=$(echo "$TASK_JSON" | jq -r '.taskNamespace // "root"')
-
- # Verify against live state to prevent re-claiming completed work
- # (This handles cases where local 'InProgress' timestamp > live 'Review' timestamp due to retries)
- git show live:.tasks/tasks.jsonl > .tasks/temp-live-tasks.jsonl 2>/dev/null
- LIVE_TASK=$(grep "\"taskId\":\"$TASK_ID\"" .tasks/temp-live-tasks.jsonl || true)
- LIVE_STATUS=$(echo "$LIVE_TASK" | jq -r '.taskStatus // empty')
- rm -f .tasks/temp-live-tasks.jsonl
-
- if [[ "$LIVE_STATUS" == "Review" ]] || [[ "$LIVE_STATUS" == "Done" ]]; then
- echo "Task $TASK_ID is already $LIVE_STATUS in live. Skipping and updating local state."
- # Force update local DB to match live for this task
- # We can't easily use 'task update' because it updates timestamp.
- # Instead, we just rely on the loop continuing and hopefully 'task import' eventually winning
- # if we stop touching it. Or we could force import again.
- sleep 60
- continue
- fi
-
- echo "$(date): Claiming task $TASK_ID: $TASK_TITLE"
-
- # D. Claim Task
- "$TASK_BIN" update "$TASK_ID" in-progress >/dev/null
- sync_tasks --commit >/dev/null
-
- # E. Prepare Branch
- BRANCH_NAME="task/$TASK_ID"
- if git show-ref --verify --quiet "refs/heads/$BRANCH_NAME"; then
- echo "Resuming existing branch $BRANCH_NAME"
- # Force checkout to overwrite untracked files (like .tasks/counters.jsonl)
- # that may have been generated by sync tools but are tracked in the branch.
- git checkout -f "$BRANCH_NAME" >/dev/null
- else
- echo "Creating new branch $BRANCH_NAME"
- git checkout -b "$BRANCH_NAME" >/dev/null
- fi
-
- # F. Execute Agent
- echo "Launching Amp to implement task..."
-
- TASK_DETAILS=$("$TASK_BIN" show "$TASK_ID")
-
- # We construct a specific prompt for the agent
- PROMPT="You are a Worker Agent.
-Your goal is to implement the following task:
+echo "Building 'task' in worker..."
+if ! (cd "$WORKER_PATH" && bild Omni/Task.hs); then
+ echo "Error: Failed to build 'task' in worker directory."
+ exit 1
+fi
-$TASK_DETAILS
+echo "Building 'agent' in worker..."
+if ! (cd "$WORKER_PATH" && bild Omni/Agent.hs); then
+ echo "Error: Failed to build 'agent' in worker directory."
+ exit 1
+fi
-INSTRUCTIONS:
-1. Analyze the codebase (use finder/Grep) to understand where to make changes.
-2. Implement the changes by editing files.
-3. Run tests to verify your work (e.g., 'bild --test Omni/Namespace').
-4. Fix any errors found during testing.
-5. Do NOT update the task status or manage git branches (the system handles that).
-6. When finished and tested, exit.
+echo "Starting Worker Agent (Haskell)"
+echo " Worker Path: $WORKER_PATH"
+echo " Agent Bin: $WORKER_PATH/_/bin/agent"
+echo " Log File: $WORKER_PATH/_/llm/amp.log"
+echo " Monitor: ./Omni/Agent/monitor.sh $TARGET"
+echo " Press Ctrl+C to stop."
-Context:
-- You are working in '$WORKER_PATH'.
-- The task is in namespace '$TASK_NS'.
-"
+# Add amp to PATH so the agent can find it
+export PATH="$MAIN_REPO/node_modules/.bin:$PATH"
- mkdir -p _/llm
- "$AMP_BIN" --log-level debug --log-file "_/llm/amp.log" --dangerously-allow-all -x "$PROMPT"
-
- AGENT_EXIT_CODE=$?
-
- if [ $AGENT_EXIT_CODE -eq 0 ]; then
- echo "Agent finished successfully."
-
- # G. Submit Work
- if [ -n "$(git status --porcelain)" ]; then
- echo "Committing changes..."
- git add .
- git commit -m "feat: implement $TASK_ID" || true
- else
- echo "No changes to commit."
- fi
-
- echo "Submitting for review..."
- # Switch back to base
- git checkout omni-worker-1 >/dev/null
-
- # Sync again (rebase on latest live)
- # If rebase fails, we MUST abort to avoid leaving the repo in a broken state
- if ! git rebase live >/dev/null 2>&1; then
- echo "Warning: Rebase conflict. Aborting rebase and proceeding with local state."
- git rebase --abort || true
- fi
-
- sync_tasks
-
- # Update status
- echo "Marking task $TASK_ID as Review..."
- if "$TASK_BIN" update "$TASK_ID" review; then
- sync_tasks --commit >/dev/null
- echo "Task $TASK_ID submitted for review."
- else
- echo "Error: Failed to update task status to Review."
- fi
-
- else
- echo "Agent failed (exit code $AGENT_EXIT_CODE). Sleeping for 10s before retrying..."
- sleep 10
- fi
-
- echo "Cooldown..."
- sleep 5
-done
+# Run the agent
+"$WORKER_PATH/_/bin/agent" start "$TARGET" --path "$WORKER_PATH"
diff --git a/Omni/Bild.hs b/Omni/Bild.hs
index 53d96a5..8d00936 100644
--- a/Omni/Bild.hs
+++ b/Omni/Bild.hs
@@ -185,7 +185,7 @@ main = Cli.Plan help move test_ pure |> Cli.main
test_bildBild :: Test.Tree
test_bildBild =
Test.unit "can bild bild" <| do
- root <- Env.getEnv "CODEROOT"
+ root <- getCoderoot
path <- Dir.makeAbsolute "Omni/Bild.hs"
case Namespace.fromPath root path of
Nothing -> Test.assertFailure "can't find ns for bild"
@@ -201,7 +201,7 @@ test_bildBild =
test_bildExamples :: Test.Tree
test_bildExamples =
Test.unit "can bild examples" <| do
- Env.getEnv "CODEROOT" +> \root ->
+ getCoderoot +> \root ->
["c", "hs", "lisp", "rs"]
|> map ("Omni/Bild/Example." <>)
|> traverse Dir.makeAbsolute
@@ -216,7 +216,7 @@ test_bildExamples =
move :: Cli.Arguments -> IO ()
move args = do
IO.hSetBuffering stdout IO.NoBuffering
- root <- Env.getEnv "CODEROOT"
+ root <- getCoderoot
loadGhcPkgCache
namespaces <-
Cli.getAllArgs args (Cli.argument "target")
@@ -322,10 +322,10 @@ test_isGitHook =
Test.group
"isGitHook"
[ Test.unit "filters pre-commit hook" <| do
- root <- Env.getEnv "CODEROOT"
+ root <- getCoderoot
True @=? (isGitHook <| root <> "/Omni/Ide/hooks/pre-commit"),
Test.unit "doesn't filter non-hooks" <| do
- root <- Env.getEnv "CODEROOT"
+ root <- getCoderoot
False @=? (isGitHook <| root <> "/Omni/Bild.hs")
]
@@ -458,7 +458,7 @@ data HsModuleGraph = HsModuleGraph
-- | Use this to just get a target to play with at the repl.
dev_getTarget :: FilePath -> IO Target
dev_getTarget fp = do
- root <- Env.getEnv "CODEROOT"
+ root <- getCoderoot
path <- Dir.makeAbsolute fp
Namespace.fromPath root path
|> \case
@@ -569,7 +569,7 @@ analyzeAll isPlanMode nss = do
analyzeOne :: Namespace -> IO (Maybe Target)
analyzeOne namespace@(Namespace parts ext) = do
let path = Namespace.toPath namespace
- root <- Env.getEnv "CODEROOT"
+ root <- getCoderoot
let abspath = root </> path
let quapath = path
user <- Env.getEnv "USER" /> Text.pack
@@ -653,7 +653,8 @@ analyzeAll isPlanMode nss = do
contentLines
|> Meta.detectAll "--"
|> \Meta.Parsed {..} ->
- detectHaskellImports mempty contentLines +> \(langdeps, srcs) -> do
+ detectHaskellImports mempty contentLines +> \(autoDeps, srcs) -> do
+ let langdeps = autoDeps <> pdep
graph <- buildHsModuleGraph namespace quapath srcs
pure
<| Just
@@ -833,7 +834,7 @@ analyzeAll isPlanMode nss = do
detectHaskellImports :: Analysis -> [Text] -> IO (Set Meta.Dep, Set FilePath)
detectHaskellImports _ contentLines = do
- root <- Env.getEnv "CODEROOT"
+ root <- getCoderoot
let initialMods = catMaybes (Regex.match haskellImports </ (Text.unpack </ contentLines))
initialLocals <- toLocalFiles root initialMods
let initialLocalsSet = Set.fromList initialLocals
@@ -884,7 +885,7 @@ detectLispImports contentLines =
-- 'detectHaskellImports'.
detectPythonImports :: Analysis -> [Text] -> IO (Set FilePath)
detectPythonImports _ contentLines = do
- root <- Env.getEnv "CODEROOT"
+ root <- getCoderoot
let initialMods = catMaybes (Regex.match pythonImport </ (Text.unpack </ contentLines))
initialLocals <- toLocalFiles root initialMods
bfs root (Set.fromList initialLocals) Set.empty
@@ -984,7 +985,7 @@ ghcPkgCacheHash = do
ghcPkgCachePath :: IO (Maybe FilePath)
ghcPkgCachePath = do
- root <- Env.getEnv "CODEROOT"
+ root <- getCoderoot
fmap (\h -> root </> vardir </> ("ghc-pkg-cache-" <> h <> ".json")) </ ghcPkgCacheHash
loadGhcPkgCache :: IO ()
@@ -1041,7 +1042,7 @@ ghcPkgFindModule acc m =
-- | Build module graph for Haskell targets, returns Nothing if TH or cycles detected
buildHsModuleGraph :: Namespace -> FilePath -> Set FilePath -> IO (Maybe HsModuleGraph)
buildHsModuleGraph namespace entryPoint deps = do
- root <- Env.getEnv "CODEROOT"
+ root <- getCoderoot
-- Analyze all dependencies first
depNodes <- foldM (analyzeModule root) Map.empty (Set.toList deps)
-- Then analyze the entry point itself
@@ -1123,7 +1124,7 @@ isSuccess _ = False
test :: Bool -> Target -> IO (Exit.ExitCode, ByteString)
test loud Target {..} =
- Env.getEnv "CODEROOT"
+ getCoderoot
+> \root -> case compiler of
Ghc ->
Proc
@@ -1152,7 +1153,7 @@ test loud Target {..} =
build :: Bool -> Bool -> Int -> Int -> Analysis -> IO [Exit.ExitCode]
build andTest loud jobs cpus analysis = do
- root <- Env.getEnv "CODEROOT"
+ root <- getCoderoot
let targets = Map.elems analysis
let namespaces = map (\Target {..} -> namespace) targets
-- Use adaptive concurrent UI unless --loud is set
@@ -1344,7 +1345,7 @@ lispRequires =
nixBuild :: Bool -> Int -> Int -> Target -> IO (Exit.ExitCode, ByteString)
nixBuild loud maxJobs cores target@(Target {..}) =
- Env.getEnv "CODEROOT" +> \root ->
+ getCoderoot +> \root ->
instantiate root |> run +> \case
(_, "") -> panic "instantiate did not produce a drv"
(Exit.ExitSuccess, drv) ->
@@ -1410,3 +1411,41 @@ nixBuild loud maxJobs cores target@(Target {..}) =
onFailure = Log.fail ["bild", "symlink", nschunk namespace] >> Log.br,
onSuccess = pure ()
}
+
+getCoderoot :: IO FilePath
+getCoderoot = do
+ mEnvRoot <- Env.lookupEnv "CODEROOT"
+ cwd <- Dir.getCurrentDirectory
+ case mEnvRoot of
+ Just envRoot -> do
+ let isPrefix = envRoot `List.isPrefixOf` cwd
+ let validPrefix =
+ isPrefix
+ && ( length envRoot
+ == length cwd
+ || (length cwd > length envRoot && (List.!!) cwd (length envRoot) == '/')
+ )
+ if validPrefix
+ then pure envRoot
+ else do
+ mRealRoot <- findRoot cwd
+ case mRealRoot of
+ Just realRoot -> pure realRoot
+ Nothing -> pure envRoot
+ Nothing -> do
+ mRealRoot <- findRoot cwd
+ case mRealRoot of
+ Just realRoot -> pure realRoot
+ Nothing -> panic "CODEROOT not set and could not find root"
+
+findRoot :: FilePath -> IO (Maybe FilePath)
+findRoot dir = do
+ let marker = dir </> "Omni"
+ exists <- Dir.doesDirectoryExist marker
+ if exists
+ then pure (Just dir)
+ else do
+ let parent = takeDirectory dir
+ if parent == dir
+ then pure Nothing
+ else findRoot parent
diff --git a/Omni/Bild/Audit.py b/Omni/Bild/Audit.py
new file mode 100755
index 0000000..4df6c0b
--- /dev/null
+++ b/Omni/Bild/Audit.py
@@ -0,0 +1,176 @@
+#!/usr/bin/env python3
+"""
+Audit codebase builds.
+
+Iterates through every namespace in the project and runs 'bild'.
+For every build failure encountered, it automatically creates a new task.
+"""
+
+# : out bild-audit
+
+import argparse
+import re
+import shutil
+import subprocess
+import sys
+from pathlib import Path
+
+# Extensions supported by bild (from Omni/Bild.hs and Omni/Namespace.hs)
+EXTENSIONS = {".c", ".hs", ".lisp", ".nix", ".py", ".scm", ".rs", ".toml"}
+MAX_TITLE_LENGTH = 50
+
+
+def strip_ansi(text: str) -> str:
+ """Strip ANSI escape codes from text."""
+ ansi_escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])")
+ return ansi_escape.sub("", text)
+
+
+def is_ignored(path: Path) -> bool:
+ """Check if a file is ignored by git."""
+ res = subprocess.run(
+ ["git", "check-ignore", str(path)],
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL,
+ check=False,
+ )
+ return res.returncode == 0
+
+
+def get_buildable_files(root_dir: str = ".") -> list[str]:
+ """Find all files that bild can build."""
+ targets: list[str] = []
+
+ root = Path(root_dir)
+ if not root.exists():
+ return []
+
+ for path in root.rglob("*"):
+ # Skip directories
+ if path.is_dir():
+ continue
+
+ # Skip hidden files/dirs and '_' dirs
+ parts = path.parts
+ if any(p.startswith(".") or p == "_" for p in parts):
+ continue
+
+ if path.suffix in EXTENSIONS:
+ # Clean up path: keep it relative to cwd if possible
+ try:
+ # We want the path as a string, relative to current directory
+ # if possible
+ p_str = (
+ str(path.relative_to(Path.cwd()))
+ if path.is_absolute()
+ else str(path)
+ )
+ except ValueError:
+ p_str = str(path)
+
+ if not is_ignored(Path(p_str)):
+ targets.append(p_str)
+ return targets
+
+
+def run_bild(target: str) -> subprocess.CompletedProcess[str]:
+ """Run bild on the target."""
+ # --time 0 disables timeout
+ # --loud enables output (which we capture)
+ cmd = ["bild", "--time", "0", "--loud", target]
+ return subprocess.run(cmd, capture_output=True, text=True, check=False)
+
+
+def create_task(
+ target: str,
+ result: subprocess.CompletedProcess[str],
+ parent_id: str | None = None,
+) -> None:
+ """Create a task for a build failure."""
+ # Construct a descriptive title
+ # Try to get the last meaningful line of error output
+ lines = (result.stdout + result.stderr).strip().split("\n")
+ last_line = lines[-1] if lines else "Unknown error"
+ last_line = strip_ansi(last_line).strip()
+
+ if len(last_line) > MAX_TITLE_LENGTH:
+ last_line = last_line[: MAX_TITLE_LENGTH - 3] + "..."
+
+ title = f"Build failed: {target} - {last_line}"
+
+ cmd = ["task", "create", title, "--priority", "2", "--json"]
+
+ if parent_id:
+ cmd.append(f"--discovered-from={parent_id}")
+
+ # Try to infer namespace
+ # e.g. Omni/Bild.hs -> Omni/Bild
+ ns = Path(target).parent
+ if str(ns) != ".":
+ cmd.append(f"--namespace={ns}")
+
+ print(f"Creating task for {target}...") # noqa: T201
+ proc = subprocess.run(cmd, capture_output=True, text=True, check=False)
+
+ if proc.returncode != 0:
+ print(f"Error creating task: {proc.stderr}", file=sys.stderr) # noqa: T201
+ else:
+ # task create --json returns the created task json
+ print(f"Task created: {proc.stdout.strip()}") # noqa: T201
+
+
+def main() -> None:
+ """Run the build audit."""
+ parser = argparse.ArgumentParser(description="Audit codebase builds.")
+ parser.add_argument(
+ "--parent",
+ help="Parent task ID to link discovered tasks to",
+ )
+ parser.add_argument(
+ "paths",
+ nargs="*",
+ default=["."],
+ help="Paths to search for targets",
+ )
+ args = parser.parse_args()
+
+ # Check if bild is available
+ if not shutil.which("bild"):
+ print( # noqa: T201
+ "Warning: 'bild' command not found. Ensure it is in PATH.",
+ file=sys.stderr,
+ )
+
+ print(f"Scanning for targets in {args.paths}...") # noqa: T201
+ targets: list[str] = []
+ for path_str in args.paths:
+ path = Path(path_str)
+ if path.is_file():
+ targets.append(str(path))
+ else:
+ targets.extend(get_buildable_files(path_str))
+
+ # Remove duplicates
+ targets = sorted(set(targets))
+ print(f"Found {len(targets)} targets.") # noqa: T201
+
+ failures = 0
+ for target in targets:
+ res = run_bild(target)
+
+ if res.returncode == 0:
+ print("OK") # noqa: T201
+ else:
+ print("FAIL") # noqa: T201
+ failures += 1
+ create_task(target, res, args.parent)
+
+ print(f"\nAudit complete. {failures} failures found.") # noqa: T201
+ if failures > 0:
+ sys.exit(1)
+ else:
+ sys.exit(0)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/Omni/Bild/Deps/Haskell.nix b/Omni/Bild/Deps/Haskell.nix
index 5d6abbb..6930860 100644
--- a/Omni/Bild/Deps/Haskell.nix
+++ b/Omni/Bild/Deps/Haskell.nix
@@ -54,6 +54,7 @@
"tasty"
"tasty-hunit"
"tasty-quickcheck"
+ "temporary"
"text"
"time"
"transformers"
diff --git a/Omni/Namespace.hs b/Omni/Namespace.hs
index 5884507..a0f8a8e 100644
--- a/Omni/Namespace.hs
+++ b/Omni/Namespace.hs
@@ -111,7 +111,10 @@ toHaskellModule :: Namespace -> String
toHaskellModule = toModule
fromHaskellModule :: String -> Namespace
-fromHaskellModule s = Namespace (List.splitOn "." s) Hs
+fromHaskellModule s =
+ let s' = if ".hs" `List.isSuffixOf` s then List.take (length s - 3) s else s
+ s'' = map (\c -> if c == '/' then '.' else c) s'
+ in Namespace (List.splitOn "." s'') Hs
toSchemeModule :: Namespace -> String
toSchemeModule = toModule
diff --git a/Omni/Task.hs b/Omni/Task.hs
index 24e528b..e1457fb 100644
--- a/Omni/Task.hs
+++ b/Omni/Task.hs
@@ -13,11 +13,13 @@ import qualified Data.Text as T
import qualified Omni.Cli as Cli
import qualified Omni.Namespace as Namespace
import Omni.Task.Core
+import qualified Omni.Task.RaceTest as RaceTest
import qualified Omni.Test as Test
import qualified System.Console.Docopt as Docopt
import System.Directory (doesFileExist, removeFile)
import System.Environment (setEnv)
import System.Process (callCommand)
+import qualified Test.Tasty as Tasty
main :: IO ()
main = Cli.main plan
@@ -42,10 +44,11 @@ Usage:
task list [options]
task ready [--json]
task show <id> [--json]
- task update <id> <status> [--json]
+ task update <id> <status> [options]
task deps <id> [--json]
task tree [<id>] [--json]
- task stats [--json]
+ task progress <id> [--json]
+ task stats [--epic=<id>] [--json]
task export [--flush]
task import -i <file>
task sync
@@ -61,6 +64,7 @@ Commands:
update Update task status
deps Show dependency tree
tree Show task tree (epics with children, or all epics if no ID given)
+ progress Show progress for an epic
stats Show task statistics
export Export and consolidate tasks to JSONL
import Import tasks from JSONL file
@@ -73,10 +77,12 @@ Options:
--parent=<id> Parent epic ID
--priority=<p> Priority: 0-4 (0=critical, 4=backlog, default: 2)
--status=<status> Filter by status: open, in-progress, review, done
+ --epic=<id> Filter stats by epic (recursive)
--deps=<ids> Comma-separated list of dependency IDs
--dep-type=<type> Dependency type: blocks, discovered-from, parent-child, related (default: blocks)
--discovered-from=<id> Shortcut for --deps=<id> --dep-type=discovered-from
--namespace=<ns> Optional namespace (e.g., Omni/Task, Biz/Cloud)
+ --description=<desc> Task description
--flush Force immediate export
--json Output in JSON format (for agent use)
--quiet Non-interactive mode (for agents)
@@ -154,7 +160,12 @@ move args
let validNs = Namespace.fromHaskellModule ns
nsPath = T.pack <| Namespace.toPath validNs
pure <| Just nsPath
- createdTask <- createTask title taskType parent namespace priority deps
+
+ description <- case Cli.getArg args (Cli.longOption "description") of
+ Nothing -> pure Nothing
+ Just d -> pure <| Just (T.pack d)
+
+ createdTask <- createTask title taskType parent namespace priority deps description
if isJsonMode args
then outputJson createdTask
else putStrLn <| "Created task: " <> T.unpack (taskId createdTask)
@@ -194,22 +205,39 @@ move args
| args `Cli.has` Cli.command "show" = do
tid <- getArgText args "id"
tasks <- loadTasks
- case filter (\t -> taskId t == tid) tasks of
- [] -> putText "Task not found"
- (task : _) ->
+ case findTask tid tasks of
+ Nothing -> putText "Task not found"
+ Just task ->
if isJsonMode args
then outputJson task
else showTaskDetailed task
| args `Cli.has` Cli.command "update" = do
tid <- getArgText args "id"
statusStr <- getArgText args "status"
+
+ -- Handle update dependencies
+ deps <- do
+ -- Parse --deps and --dep-type
+ ids <- case Cli.getArg args (Cli.longOption "deps") of
+ Nothing -> pure []
+ Just depStr -> pure <| T.splitOn "," (T.pack depStr)
+ dtype <- case Cli.getArg args (Cli.longOption "dep-type") of
+ Nothing -> pure Blocks
+ Just "blocks" -> pure Blocks
+ Just "discovered-from" -> pure DiscoveredFrom
+ Just "parent-child" -> pure ParentChild
+ Just "related" -> pure Related
+ Just other -> panic <| "Invalid dependency type: " <> T.pack other <> ". Use: blocks, discovered-from, parent-child, or related"
+ pure (map (\d -> Dependency {depId = d, depType = dtype}) ids)
+
let newStatus = case statusStr of
"open" -> Open
"in-progress" -> InProgress
"review" -> Review
"done" -> Done
_ -> panic "Invalid status. Use: open, in-progress, review, or done"
- updateTaskStatus tid newStatus
+
+ updateTaskStatus tid newStatus deps
if isJsonMode args
then outputSuccess <| "Updated task " <> tid
else do
@@ -231,12 +259,22 @@ move args
tree <- getTaskTree maybeId
outputJson tree
else showTaskTree maybeId
+ | args `Cli.has` Cli.command "progress" = do
+ tid <- getArgText args "id"
+ if isJsonMode args
+ then do
+ progress <- getTaskProgress tid
+ outputJson progress
+ else showTaskProgress tid
| args `Cli.has` Cli.command "stats" = do
+ maybeEpic <- case Cli.getArg args (Cli.longOption "epic") of
+ Nothing -> pure Nothing
+ Just e -> pure <| Just (T.pack e)
if isJsonMode args
then do
- stats <- getTaskStats
+ stats <- getTaskStats maybeEpic
outputJson stats
- else showTaskStats
+ else showTaskStats maybeEpic
| args `Cli.has` Cli.command "export" = do
exportTasks
putText "Exported and consolidated tasks to .tasks/tasks.jsonl"
@@ -263,7 +301,13 @@ move args
Just val -> pure (T.pack val)
test :: Test.Tree
-test = Test.group "Omni.Task" [unitTests, cliTests]
+test =
+ Test.group
+ "Omni.Task"
+ [ unitTests,
+ cliTests,
+ Tasty.after Tasty.AllSucceed "Unit tests" RaceTest.test
+ ]
unitTests :: Test.Tree
unitTests =
@@ -280,79 +324,121 @@ unitTests =
initTaskDb
True Test.@?= True,
Test.unit "can create task" <| do
- task <- createTask "Test task" WorkTask Nothing Nothing P2 []
+ task <- createTask "Test task" WorkTask Nothing Nothing P2 [] Nothing
taskTitle task Test.@?= "Test task"
taskType task Test.@?= WorkTask
taskStatus task Test.@?= Open
taskPriority task Test.@?= P2
null (taskDependencies task) Test.@?= True,
+ Test.unit "can create task with description" <| do
+ task <- createTask "Test task" WorkTask Nothing Nothing P2 [] (Just "My description")
+ taskDescription task Test.@?= Just "My description",
Test.unit "can list tasks" <| do
- _ <- createTask "Test task for list" WorkTask Nothing Nothing P2 []
+ _ <- createTask "Test task for list" WorkTask Nothing Nothing P2 [] Nothing
tasks <- listTasks Nothing Nothing Nothing Nothing
not (null tasks) Test.@?= True,
Test.unit "ready tasks exclude blocked ones" <| do
- task1 <- createTask "First task" WorkTask Nothing Nothing P2 []
+ task1 <- createTask "First task" WorkTask Nothing Nothing P2 [] Nothing
let blockingDep = Dependency {depId = taskId task1, depType = Blocks}
- task2 <- createTask "Blocked task" WorkTask Nothing Nothing P2 [blockingDep]
+ task2 <- createTask "Blocked task" WorkTask Nothing Nothing P2 [blockingDep] Nothing
ready <- getReadyTasks
(taskId task1 `elem` map taskId ready) Test.@?= True
(taskId task2 `notElem` map taskId ready) Test.@?= True,
Test.unit "discovered-from dependencies don't block" <| do
- task1 <- createTask "Original task" WorkTask Nothing Nothing P2 []
+ task1 <- createTask "Original task" WorkTask Nothing Nothing P2 [] Nothing
let discDep = Dependency {depId = taskId task1, depType = DiscoveredFrom}
- task2 <- createTask "Discovered work" WorkTask Nothing Nothing P2 [discDep]
+ task2 <- createTask "Discovered work" WorkTask Nothing Nothing P2 [discDep] Nothing
ready <- getReadyTasks
-- Both should be ready since DiscoveredFrom doesn't block
(taskId task1 `elem` map taskId ready) Test.@?= True
(taskId task2 `elem` map taskId ready) Test.@?= True,
Test.unit "related dependencies don't block" <| do
- task1 <- createTask "Task A" WorkTask Nothing Nothing P2 []
+ task1 <- createTask "Task A" WorkTask Nothing Nothing P2 [] Nothing
let relDep = Dependency {depId = taskId task1, depType = Related}
- task2 <- createTask "Task B" WorkTask Nothing Nothing P2 [relDep]
+ task2 <- createTask "Task B" WorkTask Nothing Nothing P2 [relDep] Nothing
ready <- getReadyTasks
-- Both should be ready since Related doesn't block
(taskId task1 `elem` map taskId ready) Test.@?= True
(taskId task2 `elem` map taskId ready) Test.@?= True,
Test.unit "child task gets sequential ID" <| do
- parent <- createTask "Parent" Epic Nothing Nothing P2 []
- child1 <- createTask "Child 1" WorkTask (Just (taskId parent)) Nothing P2 []
- child2 <- createTask "Child 2" WorkTask (Just (taskId parent)) Nothing P2 []
+ parent <- createTask "Parent" Epic Nothing Nothing P2 [] Nothing
+ child1 <- createTask "Child 1" WorkTask (Just (taskId parent)) Nothing P2 [] Nothing
+ child2 <- createTask "Child 2" WorkTask (Just (taskId parent)) Nothing P2 [] Nothing
taskId child1 Test.@?= taskId parent <> ".1"
taskId child2 Test.@?= taskId parent <> ".2",
Test.unit "grandchild task gets sequential ID" <| do
- parent <- createTask "Grandparent" Epic Nothing Nothing P2 []
- child <- createTask "Parent" Epic (Just (taskId parent)) Nothing P2 []
- grandchild <- createTask "Grandchild" WorkTask (Just (taskId child)) Nothing P2 []
+ parent <- createTask "Grandparent" Epic Nothing Nothing P2 [] Nothing
+ child <- createTask "Parent" Epic (Just (taskId parent)) Nothing P2 [] Nothing
+ grandchild <- createTask "Grandchild" WorkTask (Just (taskId child)) Nothing P2 [] Nothing
taskId grandchild Test.@?= taskId parent <> ".1.1",
Test.unit "siblings of grandchild task get sequential ID" <| do
- parent <- createTask "Grandparent" Epic Nothing Nothing P2 []
- child <- createTask "Parent" Epic (Just (taskId parent)) Nothing P2 []
- grandchild1 <- createTask "Grandchild 1" WorkTask (Just (taskId child)) Nothing P2 []
- grandchild2 <- createTask "Grandchild 2" WorkTask (Just (taskId child)) Nothing P2 []
+ parent <- createTask "Grandparent" Epic Nothing Nothing P2 [] Nothing
+ child <- createTask "Parent" Epic (Just (taskId parent)) Nothing P2 [] Nothing
+ grandchild1 <- createTask "Grandchild 1" WorkTask (Just (taskId child)) Nothing P2 [] Nothing
+ grandchild2 <- createTask "Grandchild 2" WorkTask (Just (taskId child)) Nothing P2 [] Nothing
taskId grandchild1 Test.@?= taskId parent <> ".1.1"
taskId grandchild2 Test.@?= taskId parent <> ".1.2",
Test.unit "child ID generation skips gaps" <| do
- parent <- createTask "Parent with gaps" Epic Nothing Nothing P2 []
- child1 <- createTask "Child 1" WorkTask (Just (taskId parent)) Nothing P2 []
+ parent <- createTask "Parent with gaps" Epic Nothing Nothing P2 [] Nothing
+ child1 <- createTask "Child 1" WorkTask (Just (taskId parent)) Nothing P2 [] Nothing
-- Manually create a task with .3 suffix to simulate a gap (or deleted task)
let child3Id = taskId parent <> ".3"
- child3 = Task
- { taskId = child3Id,
- taskTitle = "Child 3",
- taskType = WorkTask,
- taskParent = Just (taskId parent),
- taskNamespace = Nothing,
- taskStatus = Open,
- taskPriority = P2,
- taskDependencies = [],
- taskCreatedAt = taskCreatedAt child1,
- taskUpdatedAt = taskUpdatedAt child1
- }
+ child3 =
+ Task
+ { taskId = child3Id,
+ taskTitle = "Child 3",
+ taskType = WorkTask,
+ taskParent = Just (taskId parent),
+ taskNamespace = Nothing,
+ taskStatus = Open,
+ taskPriority = P2,
+ taskDependencies = [],
+ taskCreatedAt = taskCreatedAt child1,
+ taskUpdatedAt = taskUpdatedAt child1,
+ taskDescription = Nothing
+ }
saveTask child3
-
+
-- Create a new child, it should get .4, not .2
- child4 <- createTask "Child 4" WorkTask (Just (taskId parent)) Nothing P2 []
- taskId child4 Test.@?= taskId parent <> ".4"
+ child4 <- createTask "Child 4" WorkTask (Just (taskId parent)) Nothing P2 [] Nothing
+ taskId child4 Test.@?= taskId parent <> ".4",
+ Test.unit "task lookup is case insensitive" <| do
+ task <- createTask "Case sensitive" WorkTask Nothing Nothing P2 [] Nothing
+ let tid = taskId task
+ upperTid = T.toUpper tid
+ tasks <- loadTasks
+ let found = findTask upperTid tasks
+ case found of
+ Just t -> taskId t Test.@?= tid
+ Nothing -> Test.assertFailure "Could not find task with upper case ID",
+ Test.unit "namespace normalization handles .hs suffix" <| do
+ let ns = "Omni/Task.hs"
+ validNs = Namespace.fromHaskellModule ns
+ Namespace.toPath validNs Test.@?= "Omni/Task.hs",
+ Test.unit "generated IDs are lowercase" <| do
+ task <- createTask "Lowercase check" WorkTask Nothing Nothing P2 [] Nothing
+ let tid = taskId task
+ tid Test.@?= T.toLower tid
+ -- check it matches regex for base36 (t-[0-9a-z]+)
+ let isLowerBase36 = T.all (\c -> c `elem` ['0' .. '9'] ++ ['a' .. 'z'] || c == 't' || c == '-') tid
+ isLowerBase36 Test.@?= True,
+ Test.unit "dependencies are case insensitive" <| do
+ task1 <- createTask "Blocker" WorkTask Nothing Nothing P2 [] Nothing
+ let tid1 = taskId task1
+ -- Use uppercase ID for dependency
+ upperTid1 = T.toUpper tid1
+ dep = Dependency {depId = upperTid1, depType = Blocks}
+ task2 <- createTask "Blocked" WorkTask Nothing Nothing P2 [dep] Nothing
+
+ -- task1 is Open, so task2 should NOT be ready
+ ready <- getReadyTasks
+ (taskId task2 `notElem` map taskId ready) Test.@?= True
+
+ updateTaskStatus tid1 Done []
+
+ -- task2 should now be ready because dependency check normalizes IDs
+ ready2 <- getReadyTasks
+ (taskId task2 `elem` map taskId ready2) Test.@?= True
]
-- | Test CLI argument parsing to ensure docopt string matches actual usage
@@ -516,6 +602,13 @@ cliTests =
Right args -> do
args `Cli.has` Cli.command "stats" Test.@?= True
args `Cli.has` Cli.longOption "json" Test.@?= True,
+ Test.unit "stats with --epic flag" <| do
+ let result = Docopt.parseArgs help ["stats", "--epic=t-abc123"]
+ case result of
+ Left err -> Test.assertFailure <| "Failed to parse 'stats --epic': " <> show err
+ Right args -> do
+ args `Cli.has` Cli.command "stats" Test.@?= True
+ Cli.getArg args (Cli.longOption "epic") Test.@?= Just "t-abc123",
Test.unit "create with flags in different order" <| do
let result = Docopt.parseArgs help ["create", "Test", "--json", "--priority=1", "--namespace=Omni/Task"]
case result of
diff --git a/Omni/Task/Core.hs b/Omni/Task/Core.hs
index 228ab05..b17c2aa 100644
--- a/Omni/Task/Core.hs
+++ b/Omni/Task/Core.hs
@@ -18,6 +18,10 @@ import Data.Time.Calendar (toModifiedJulianDay)
import GHC.Generics ()
import System.Directory (createDirectoryIfMissing, doesFileExist)
import System.Environment (lookupEnv)
+import System.IO (SeekMode (AbsoluteSeek))
+import qualified System.IO as IO
+import System.IO.Unsafe (unsafePerformIO)
+import System.Posix.IO (LockRequest (..), closeFd, handleToFd, waitToSetLock)
-- Core data types
data Task = Task
@@ -29,6 +33,7 @@ data Task = Task
taskStatus :: Status,
taskPriority :: Priority, -- Priority level (0-4)
taskDependencies :: [Dependency], -- List of dependencies with types
+ taskDescription :: Maybe Text, -- Optional detailed description
taskCreatedAt :: UTCTime,
taskUpdatedAt :: UTCTime
}
@@ -57,6 +62,14 @@ data DependencyType
| Related -- Soft relationship, doesn't block
deriving (Show, Eq, Generic)
+data TaskProgress = TaskProgress
+ { progressTaskId :: Text,
+ progressTotal :: Int,
+ progressCompleted :: Int,
+ progressPercentage :: Int
+ }
+ deriving (Show, Eq, Generic)
+
instance ToJSON TaskType
instance FromJSON TaskType
@@ -81,15 +94,44 @@ instance ToJSON Task
instance FromJSON Task
+-- | Case-insensitive ID comparison
+matchesId :: Text -> Text -> Bool
+matchesId id1 id2 = normalizeId id1 == normalizeId id2
+
+-- | Normalize ID to lowercase
+normalizeId :: Text -> Text
+normalizeId = T.toLower
+
+-- | Find a task by ID (case-insensitive)
+findTask :: Text -> [Task] -> Maybe Task
+findTask tid = List.find (\t -> matchesId (taskId t) tid)
+
+-- | Normalize task IDs (self, parent, dependencies) to lowercase
+normalizeTask :: Task -> Task
+normalizeTask t =
+ t
+ { taskId = normalizeId (taskId t),
+ taskParent = fmap normalizeId (taskParent t),
+ taskDependencies = map normalizeDependency (taskDependencies t)
+ }
+
+normalizeDependency :: Dependency -> Dependency
+normalizeDependency d = d {depId = normalizeId (depId d)}
+
+instance ToJSON TaskProgress
+
+instance FromJSON TaskProgress
+
-- Get the tasks database file path (use test file if TASK_TEST_MODE is set)
getTasksFilePath :: IO FilePath
getTasksFilePath = do
customPath <- lookupEnv "TASK_DB_PATH"
testMode <- lookupEnv "TASK_TEST_MODE"
- pure <| case (customPath, testMode) of
- (Just path, _) -> path
- (_, Just "1") -> ".tasks/tasks-test.jsonl"
- _ -> ".tasks/tasks.jsonl"
+ let path = case (customPath, testMode) of
+ (Just p, _) -> p
+ (_, Just "1") -> ".tasks/tasks-test.jsonl"
+ _ -> ".tasks/tasks.jsonl"
+ pure path
-- Initialize the task database
initTaskDb :: IO ()
@@ -101,6 +143,55 @@ initTaskDb = do
TIO.writeFile tasksFile ""
putText <| "Initialized task database at " <> T.pack tasksFile
+-- Lock for in-process thread safety
+taskLock :: MVar ()
+taskLock = unsafePerformIO (newMVar ())
+{-# NOINLINE taskLock #-}
+
+-- Execute action with write lock (exclusive)
+withTaskWriteLock :: IO a -> IO a
+withTaskWriteLock action =
+ withMVar taskLock <| \_ -> do
+ -- In test mode, we rely on MVar for thread safety to avoid GHC "resource busy" errors
+ -- when mixing openFd (flock) and standard IO in threaded tests.
+ testMode <- lookupEnv "TASK_TEST_MODE"
+ case testMode of
+ Just "1" -> action
+ _ -> do
+ tasksFile <- getTasksFilePath
+ let lockFile = tasksFile <> ".lock"
+ bracket
+ ( do
+ h <- IO.openFile lockFile IO.ReadWriteMode
+ handleToFd h
+ )
+ closeFd
+ ( \fd -> do
+ waitToSetLock fd (WriteLock, AbsoluteSeek, 0, 0)
+ action
+ )
+
+-- Execute action with read lock (shared)
+withTaskReadLock :: IO a -> IO a
+withTaskReadLock action =
+ withMVar taskLock <| \_ -> do
+ testMode <- lookupEnv "TASK_TEST_MODE"
+ case testMode of
+ Just "1" -> action
+ _ -> do
+ tasksFile <- getTasksFilePath
+ let lockFile = tasksFile <> ".lock"
+ bracket
+ ( do
+ h <- IO.openFile lockFile IO.ReadWriteMode
+ handleToFd h
+ )
+ closeFd
+ ( \fd -> do
+ waitToSetLock fd (ReadLock, AbsoluteSeek, 0, 0)
+ action
+ )
+
-- Generate a short ID using base62 encoding of timestamp
generateId :: IO Text
generateId = do
@@ -113,14 +204,19 @@ generateId = do
-- Combine MJD and micros to ensure uniqueness across days.
-- Multiplier 10^11 (100,000 seconds) is safe for any day length.
totalMicros = (mjd * 100000000000) + micros
- encoded = toBase62 totalMicros
+ encoded = toBase36 totalMicros
pure <| "t-" <> T.pack encoded
-- Generate a child ID based on parent ID (e.g. "t-abc.1", "t-abc.1.2")
-- Finds the next available sequential suffix among existing children.
generateChildId :: Text -> IO Text
-generateChildId parentId = do
- tasks <- loadTasks
+generateChildId parentId =
+ withTaskReadLock <| do
+ tasks <- loadTasksInternal
+ pure <| computeNextChildId tasks (normalizeId parentId)
+
+computeNextChildId :: [Task] -> Text -> Text
+computeNextChildId tasks parentId =
-- Find the max suffix among ALL tasks that look like children (to avoid ID collisions)
-- We check all tasks, not just those with taskParent set, because we want to ensure
-- ID uniqueness even if the parent link is missing.
@@ -128,7 +224,7 @@ generateChildId parentId = do
nextSuffix = case suffixes of
[] -> 1
s -> maximum s + 1
- pure <| parentId <> "." <> T.pack (show nextSuffix)
+ in parentId <> "." <> T.pack (show nextSuffix)
getSuffix :: Text -> Text -> Maybe Int
getSuffix parent childId =
@@ -140,15 +236,15 @@ getSuffix parent childId =
else Nothing
else Nothing
--- Convert number to base62 (0-9, a-z, A-Z)
-toBase62 :: Integer -> String
-toBase62 0 = "0"
-toBase62 n = reverse <| go n
+-- Convert number to base36 (0-9, a-z)
+toBase36 :: Integer -> String
+toBase36 0 = "0"
+toBase36 n = reverse <| go n
where
- alphabet = ['0' .. '9'] ++ ['a' .. 'z'] ++ ['A' .. 'Z']
+ alphabet = ['0' .. '9'] ++ ['a' .. 'z']
go 0 = []
go x =
- let (q, r) = x `divMod` 62
+ let (q, r) = x `divMod` 36
idx = fromIntegral r
char = case drop idx alphabet of
(c : _) -> c
@@ -157,7 +253,10 @@ toBase62 n = reverse <| go n
-- Load all tasks from JSONL file (with migration support)
loadTasks :: IO [Task]
-loadTasks = do
+loadTasks = withTaskReadLock loadTasksInternal
+
+loadTasksInternal :: IO [Task]
+loadTasksInternal = do
tasksFile <- getTasksFilePath
exists <- doesFileExist tasksFile
if exists
@@ -173,11 +272,11 @@ loadTasks = do
then Nothing
else case decode (BLC.pack <| T.unpack line) of
Just task -> Just task
- Nothing -> migrateOldTask line
+ Nothing -> migrateTask line
- -- Migrate old task format (with taskProject field or missing priority) to new format
- migrateOldTask :: Text -> Maybe Task
- migrateOldTask line = case Aeson.decode (BLC.pack <| T.unpack line) :: Maybe Aeson.Object of
+ -- Migrate old task formats to new format
+ migrateTask :: Text -> Maybe Task
+ migrateTask line = case Aeson.decode (BLC.pack <| T.unpack line) :: Maybe Aeson.Object of
Nothing -> Nothing
Just obj ->
let taskId' = KM.lookup "taskId" obj +> parseMaybe Aeson.parseJSON
@@ -185,12 +284,22 @@ loadTasks = do
taskStatus' = KM.lookup "taskStatus" obj +> parseMaybe Aeson.parseJSON
taskCreatedAt' = KM.lookup "taskCreatedAt" obj +> parseMaybe Aeson.parseJSON
taskUpdatedAt' = KM.lookup "taskUpdatedAt" obj +> parseMaybe Aeson.parseJSON
- -- Extract old taskDependencies (could be [Text] or [Dependency])
- oldDeps = KM.lookup "taskDependencies" obj +> parseMaybe Aeson.parseJSON :: Maybe [Text]
- newDeps = maybe [] (map (\tid -> Dependency {depId = tid, depType = Blocks})) oldDeps
+
+ -- Extract taskDescription (new field)
+ taskDescription' = KM.lookup "taskDescription" obj +> parseMaybe Aeson.parseJSON
+
+ -- Extract dependencies (handle V1 [Dependency] and V0 [Text])
+ v1Deps = KM.lookup "taskDependencies" obj +> parseMaybe Aeson.parseJSON :: Maybe [Dependency]
+ v0Deps = KM.lookup "taskDependencies" obj +> parseMaybe Aeson.parseJSON :: Maybe [Text]
+ finalDeps = case v1Deps of
+ Just ds -> ds
+ Nothing -> case v0Deps of
+ Just ts -> map (\tid -> Dependency {depId = tid, depType = Blocks}) ts
+ Nothing -> []
+
-- taskProject is ignored in new format (use epics instead)
- taskType' = WorkTask -- Old tasks become WorkTask by default
- taskParent' = Nothing
+ taskType' = fromMaybe WorkTask (KM.lookup "taskType" obj +> parseMaybe Aeson.parseJSON)
+ taskParent' = KM.lookup "taskParent" obj +> parseMaybe Aeson.parseJSON
taskNamespace' = KM.lookup "taskNamespace" obj +> parseMaybe Aeson.parseJSON
-- Default priority to P2 (medium) for old tasks
taskPriority' = fromMaybe P2 (KM.lookup "taskPriority" obj +> parseMaybe Aeson.parseJSON)
@@ -205,7 +314,8 @@ loadTasks = do
taskNamespace = taskNamespace',
taskStatus = status,
taskPriority = taskPriority',
- taskDependencies = newDeps,
+ taskDependencies = finalDeps,
+ taskDescription = taskDescription',
taskCreatedAt = created,
taskUpdatedAt = updated
}
@@ -213,46 +323,59 @@ loadTasks = do
-- Save a single task (append to JSONL)
saveTask :: Task -> IO ()
-saveTask task = do
+saveTask = withTaskWriteLock <. saveTaskInternal
+
+saveTaskInternal :: Task -> IO ()
+saveTaskInternal task = do
tasksFile <- getTasksFilePath
let json = encode task
BLC.appendFile tasksFile (json <> "\n")
-- Create a new task
-createTask :: Text -> TaskType -> Maybe Text -> Maybe Text -> Priority -> [Dependency] -> IO Task
-createTask title taskType parent namespace priority deps = do
- tid <- maybe generateId generateChildId parent
- now <- getCurrentTime
- let task =
- Task
- { taskId = tid,
- taskTitle = title,
- taskType = taskType,
- taskParent = parent,
- taskNamespace = namespace,
- taskStatus = Open,
- taskPriority = priority,
- taskDependencies = deps,
- taskCreatedAt = now,
- taskUpdatedAt = now
- }
- saveTask task
- pure task
+createTask :: Text -> TaskType -> Maybe Text -> Maybe Text -> Priority -> [Dependency] -> Maybe Text -> IO Task
+createTask title taskType parent namespace priority deps description =
+ withTaskWriteLock <| do
+ let parent' = fmap normalizeId parent
+ deps' = map normalizeDependency deps
+
+ tid <- case parent' of
+ Nothing -> generateId
+ Just pid -> do
+ tasks <- loadTasksInternal
+ pure <| computeNextChildId tasks pid
+ now <- getCurrentTime
+ let task =
+ Task
+ { taskId = normalizeId tid,
+ taskTitle = title,
+ taskType = taskType,
+ taskParent = parent',
+ taskNamespace = namespace,
+ taskStatus = Open,
+ taskPriority = priority,
+ taskDependencies = deps',
+ taskDescription = description,
+ taskCreatedAt = now,
+ taskUpdatedAt = now
+ }
+ saveTaskInternal task
+ pure task
-- Update task status
-updateTaskStatus :: Text -> Status -> IO ()
-updateTaskStatus tid newStatus = do
- tasks <- loadTasks
- now <- getCurrentTime
- let updatedTasks = map updateIfMatch tasks
- updateIfMatch t =
- if taskId t == tid
- then t {taskStatus = newStatus, taskUpdatedAt = now}
- else t
- -- Rewrite the entire file (simple approach for MVP)
- tasksFile <- getTasksFilePath
- TIO.writeFile tasksFile ""
- traverse_ saveTask updatedTasks
+updateTaskStatus :: Text -> Status -> [Dependency] -> IO ()
+updateTaskStatus tid newStatus newDeps =
+ withTaskWriteLock <| do
+ tasks <- loadTasksInternal
+ now <- getCurrentTime
+ let updatedTasks = map updateIfMatch tasks
+ updateIfMatch t =
+ if matchesId (taskId t) tid
+ then t {taskStatus = newStatus, taskUpdatedAt = now, taskDependencies = if null newDeps then taskDependencies t else newDeps}
+ else t
+ -- Rewrite the entire file (simple approach for MVP)
+ tasksFile <- getTasksFilePath
+ TIO.writeFile tasksFile ""
+ traverse_ saveTaskInternal updatedTasks
-- List tasks, optionally filtered by type, parent, status, or namespace
listTasks :: Maybe TaskType -> Maybe Text -> Maybe Status -> Maybe Text -> IO [Task]
@@ -279,7 +402,9 @@ listTasks maybeType maybeParent maybeStatus maybeNamespace = do
getReadyTasks :: IO [Task]
getReadyTasks = do
allTasks <- loadTasks
- let openTasks = filter (\t -> taskStatus t /= Done) allTasks
+ -- Only Open or InProgress tasks are considered ready for work.
+ -- Review tasks are waiting for review, and Done tasks are complete.
+ let openTasks = filter (\t -> taskStatus t == Open || taskStatus t == InProgress) allTasks
doneIds = map taskId <| filter (\t -> taskStatus t == Done) allTasks
-- Find all tasks that act as parents
@@ -297,29 +422,56 @@ getReadyTasks = do
getDependencyTree :: Text -> IO [Task]
getDependencyTree tid = do
tasks <- loadTasks
- case filter (\t -> taskId t == tid) tasks of
- [] -> pure []
- (task : _) -> pure <| collectDeps tasks task
+ case findTask tid tasks of
+ Nothing -> pure []
+ Just task -> pure <| collectDeps tasks task
where
collectDeps :: [Task] -> Task -> [Task]
collectDeps allTasks task =
let depIds = map depId (taskDependencies task)
- deps = filter (\t -> taskId t `elem` depIds) allTasks
+ deps = filter (\t -> any (matchesId (taskId t)) depIds) allTasks
in task : concatMap (collectDeps allTasks) deps
+-- Get task progress
+getTaskProgress :: Text -> IO TaskProgress
+getTaskProgress tidRaw = do
+ let tid = normalizeId tidRaw
+ tasks <- loadTasks
+ -- Verify task exists (optional, but good for error handling)
+ case findTask tid tasks of
+ Nothing -> panic "Task not found"
+ Just _ -> do
+ let children = filter (\child -> taskParent child == Just tid) tasks
+ total = length children
+ completed = length <| filter (\child -> taskStatus child == Done) children
+ percentage = if total == 0 then 0 else (completed * 100) `div` total
+ pure
+ TaskProgress
+ { progressTaskId = tid,
+ progressTotal = total,
+ progressCompleted = completed,
+ progressPercentage = percentage
+ }
+
+-- Show task progress
+showTaskProgress :: Text -> IO ()
+showTaskProgress tid = do
+ progress <- getTaskProgress tid
+ putText <| "Progress for " <> tid <> ": " <> T.pack (show (progressCompleted progress)) <> "/" <> T.pack (show (progressTotal progress)) <> " (" <> T.pack (show (progressPercentage progress)) <> "%)"
+
-- Show dependency tree for a task
showDependencyTree :: Text -> IO ()
showDependencyTree tid = do
tasks <- loadTasks
- case filter (\t -> taskId t == tid) tasks of
- [] -> putText "Task not found"
- (task : _) -> printTree tasks task 0
+ case findTask tid tasks of
+ Nothing -> putText "Task not found"
+ Just task -> printTree tasks task 0
where
printTree :: [Task] -> Task -> Int -> IO ()
printTree allTasks task indent = do
putText <| T.pack (replicate (indent * 2) ' ') <> taskId task <> ": " <> taskTitle task
let depIds = map depId (taskDependencies task)
- deps = filter (\t -> taskId t `elem` depIds) allTasks
+ deps = filter (\t -> any (matchesId (taskId t)) depIds) allTasks
traverse_ (\dep -> printTree allTasks dep (indent + 1)) deps
-- Get task tree (returns tasks hierarchically)
@@ -333,13 +485,13 @@ getTaskTree maybeId = do
in pure <| concatMap (collectChildren tasks) epics
Just tid -> do
-- Return specific task/epic with its children
- case filter (\t -> taskId t == tid) tasks of
- [] -> pure []
- (task : _) -> pure <| collectChildren tasks task
+ case findTask tid tasks of
+ Nothing -> pure []
+ Just task -> pure <| collectChildren tasks task
where
collectChildren :: [Task] -> Task -> [Task]
collectChildren allTasks task =
- let children = filter (\t -> taskParent t == Just (taskId task)) allTasks
+ let children = filter (maybe False (`matchesId` taskId task) <. taskParent) allTasks
in task : concatMap (collectChildren allTasks) children
-- Show task tree (epic with children, or all epics if no ID given)
@@ -355,9 +507,9 @@ showTaskTree maybeId = do
else traverse_ (printEpicTree tasks) epics
Just tid -> do
-- Show specific task/epic with its children
- case filter (\t -> taskId t == tid) tasks of
- [] -> putText "Task not found"
- (task : _) -> printEpicTree tasks task
+ case findTask tid tasks of
+ Nothing -> putText "Task not found"
+ Just task -> printEpicTree tasks task
where
printEpicTree :: [Task] -> Task -> IO ()
printEpicTree allTasks task = printTreeNode allTasks task 0
@@ -367,7 +519,7 @@ showTaskTree maybeId = do
printTreeNode' :: [Task] -> Task -> Int -> [Bool] -> IO ()
printTreeNode' allTasks task indent ancestry = do
- let children = filter (\t -> taskParent t == Just (taskId task)) allTasks
+ let children = filter (maybe False (`matchesId` taskId task) <. taskParent) allTasks
-- Build tree prefix using box-drawing characters
prefix =
if indent == 0
@@ -387,9 +539,23 @@ showTaskTree maybeId = do
InProgress -> "[~]"
Review -> "[?]"
Done -> "[✓]"
+
+ coloredStatusStr = case taskType task of
+ Epic -> magenta statusStr
+ WorkTask -> case taskStatus task of
+ Open -> bold statusStr
+ InProgress -> yellow statusStr
+ Review -> magenta statusStr
+ Done -> green statusStr
+
nsStr = case taskNamespace task of
Nothing -> ""
Just ns -> "[" <> ns <> "] "
+
+ coloredNsStr = case taskNamespace task of
+ Nothing -> ""
+ Just _ -> gray nsStr
+
-- Calculate available width for title (80 cols - prefix - id - labels)
usedWidth = T.length prefix + T.length (taskId task) + T.length statusStr + T.length nsStr + 2
availableWidth = max 20 (80 - usedWidth)
@@ -397,7 +563,10 @@ showTaskTree maybeId = do
if T.length (taskTitle task) > availableWidth
then T.take (availableWidth - 3) (taskTitle task) <> "..."
else taskTitle task
- putText <| prefix <> taskId task <> " " <> statusStr <> " " <> nsStr <> truncatedTitle
+
+ coloredTitle = if taskType task == Epic then bold truncatedTitle else truncatedTitle
+
+ putText <| prefix <> cyan (taskId task) <> " " <> coloredStatusStr <> " " <> coloredNsStr <> coloredTitle
-- Print children with updated ancestry
let indexedChildren = zip [1 ..] children
@@ -416,29 +585,51 @@ printTask t = do
let progressInfo =
if taskType t == Epic
then
- let children = filter (\child -> taskParent child == Just (taskId t)) tasks
+ let children = filter (maybe False (`matchesId` taskId t) <. taskParent) tasks
total = length children
completed = length <| filter (\child -> taskStatus child == Done) children
in " [" <> T.pack (show completed) <> "/" <> T.pack (show total) <> "]"
else ""
+
parentInfo = case taskParent t of
Nothing -> ""
Just p -> " (parent: " <> p <> ")"
+
namespaceInfo = case taskNamespace t of
Nothing -> ""
Just ns -> " [" <> ns <> "]"
+
+ coloredStatus =
+ let s = "[" <> T.pack (show (taskStatus t)) <> "]"
+ in case taskStatus t of
+ Open -> bold s
+ InProgress -> yellow s
+ Review -> magenta s
+ Done -> green s
+
+ coloredTitle = if taskType t == Epic then bold (taskTitle t) else taskTitle t
+
+ coloredProgress = if taskType t == Epic then magenta progressInfo else progressInfo
+
+ coloredNamespace = case taskNamespace t of
+ Nothing -> ""
+ Just _ -> gray namespaceInfo
+
+ coloredParent = case taskParent t of
+ Nothing -> ""
+ Just _ -> gray parentInfo
+
putText
- <| taskId t
+ <| cyan (taskId t)
<> " ["
<> T.pack (show (taskType t))
- <> "] ["
- <> T.pack (show (taskStatus t))
- <> "]"
- <> progressInfo
+ <> "] "
+ <> coloredStatus
+ <> coloredProgress
<> " "
- <> taskTitle t
- <> parentInfo
- <> namespaceInfo
+ <> coloredTitle
+ <> coloredParent
+ <> coloredNamespace
-- Show detailed task information (human-readable)
showTaskDetailed :: Task -> IO ()
@@ -454,7 +645,7 @@ showTaskDetailed t = do
-- Show epic progress if this is an epic
when (taskType t == Epic) <| do
- let children = filter (\child -> taskParent child == Just (taskId t)) tasks
+ let children = filter (maybe False (`matchesId` taskId t) <. taskParent) tasks
total = length children
completed = length <| filter (\child -> taskStatus child == Done) children
percentage = if total == 0 then 0 else (completed * 100) `div` total
@@ -475,6 +666,16 @@ showTaskDetailed t = do
putText "Dependencies:"
traverse_ printDependency (taskDependencies t)
+ -- Show description
+ case taskDescription t of
+ Nothing -> pure ()
+ Just desc -> do
+ putText ""
+ putText "Description:"
+ -- Indent description for better readability
+ let indented = T.unlines <| map (" " <>) (T.lines desc)
+ putText indented
+
putText ""
where
priorityDesc = case taskPriority t of
@@ -487,14 +688,26 @@ showTaskDetailed t = do
printDependency dep =
putText <| " - " <> depId dep <> " [" <> T.pack (show (depType dep)) <> "]"
+-- ANSI Colors
+red, green, yellow, blue, magenta, cyan, gray, bold :: Text -> Text
+red t = "\ESC[31m" <> t <> "\ESC[0m"
+green t = "\ESC[32m" <> t <> "\ESC[0m"
+yellow t = "\ESC[33m" <> t <> "\ESC[0m"
+blue t = "\ESC[34m" <> t <> "\ESC[0m"
+magenta t = "\ESC[35m" <> t <> "\ESC[0m"
+cyan t = "\ESC[36m" <> t <> "\ESC[0m"
+gray t = "\ESC[90m" <> t <> "\ESC[0m"
+bold t = "\ESC[1m" <> t <> "\ESC[0m"
+
-- Export tasks: Consolidate JSONL file (remove duplicates, keep latest version)
exportTasks :: IO ()
-exportTasks = do
- tasks <- loadTasks
- -- Rewrite the entire file with deduplicated tasks
- tasksFile <- getTasksFilePath
- TIO.writeFile tasksFile ""
- traverse_ saveTask tasks
+exportTasks =
+ withTaskWriteLock <| do
+ tasks <- loadTasksInternal
+ -- Rewrite the entire file with deduplicated tasks
+ tasksFile <- getTasksFilePath
+ TIO.writeFile tasksFile ""
+ traverse_ saveTaskInternal tasks
-- Task statistics
data TaskStats = TaskStats
@@ -516,18 +729,31 @@ instance ToJSON TaskStats
instance FromJSON TaskStats
-- Get task statistics
-getTaskStats :: IO TaskStats
-getTaskStats = do
- tasks <- loadTasks
- ready <- getReadyTasks
- let total = length tasks
+getTaskStats :: Maybe Text -> IO TaskStats
+getTaskStats maybeEpicId = do
+ allTasks <- loadTasks
+
+ targetTasks <- case maybeEpicId of
+ Nothing -> pure allTasks
+ Just epicId ->
+ case findTask epicId allTasks of
+ Nothing -> panic "Epic not found"
+ Just task -> pure <| getAllDescendants allTasks (taskId task)
+
+ globalReady <- getReadyTasks
+ let readyIds = map taskId globalReady
+ -- Filter ready tasks to only include those in our target set
+ readyCount = length <| filter (\t -> taskId t `elem` readyIds) targetTasks
+
+ tasks = targetTasks
+ total = length tasks
open = length <| filter (\t -> taskStatus t == Open) tasks
inProg = length <| filter (\t -> taskStatus t == InProgress) tasks
review = length <| filter (\t -> taskStatus t == Review) tasks
done = length <| filter (\t -> taskStatus t == Done) tasks
epics = length <| filter (\t -> taskType t == Epic) tasks
- readyCount = length ready
- blockedCount = total - readyCount - done
+ readyCount' = readyCount
+ blockedCount = total - readyCount' - done
-- Count tasks by priority
byPriority =
[ (P0, length <| filter (\t -> taskPriority t == P0) tasks),
@@ -548,18 +774,26 @@ getTaskStats = do
reviewTasks = review,
doneTasks = done,
totalEpics = epics,
- readyTasks = readyCount,
+ readyTasks = readyCount',
blockedTasks = blockedCount,
tasksByPriority = byPriority,
tasksByNamespace = byNamespace
}
+-- Helper to get all descendants of a task (recursive)
+getAllDescendants :: [Task] -> Text -> [Task]
+getAllDescendants allTasks parentId =
+ let children = filter (maybe False (`matchesId` parentId) <. taskParent) allTasks
+ in children ++ concatMap (getAllDescendants allTasks <. taskId) children
+
-- Show task statistics (human-readable)
-showTaskStats :: IO ()
-showTaskStats = do
- stats <- getTaskStats
+showTaskStats :: Maybe Text -> IO ()
+showTaskStats maybeEpicId = do
+ stats <- getTaskStats maybeEpicId
putText ""
- putText "Task Statistics"
+ case maybeEpicId of
+ Nothing -> putText "Task Statistics"
+ Just epicId -> putText <| "Task Statistics for Epic " <> epicId
putText ""
putText <| "Total tasks: " <> T.pack (show (totalTasks stats))
putText <| " Open: " <> T.pack (show (openTasks stats))
@@ -593,31 +827,32 @@ showTaskStats = do
-- Import tasks: Read from another JSONL file and merge with existing tasks
importTasks :: FilePath -> IO ()
-importTasks filePath = do
- exists <- doesFileExist filePath
- unless exists <| panic (T.pack filePath <> " does not exist")
-
- -- Load tasks from import file
- content <- TIO.readFile filePath
- let importLines = T.lines content
- importedTasks = mapMaybe decodeTask importLines
-
- -- Load existing tasks
- existingTasks <- loadTasks
-
- -- Create a map of existing task IDs for quick lookup
- let existingIds = map taskId existingTasks
- -- Filter to only new tasks (not already in our database)
- newTasks = filter (\t -> taskId t `notElem` existingIds) importedTasks
- -- For tasks that exist, update them with imported data
- updatedTasks = map (updateWithImported importedTasks) existingTasks
- -- Combine: updated existing tasks + new tasks
- allTasks = updatedTasks ++ newTasks
-
- -- Rewrite tasks.jsonl with merged data
- tasksFile <- getTasksFilePath
- TIO.writeFile tasksFile ""
- traverse_ saveTask allTasks
+importTasks filePath =
+ withTaskWriteLock <| do
+ exists <- doesFileExist filePath
+ unless exists <| panic (T.pack filePath <> " does not exist")
+
+ -- Load tasks from import file
+ content <- TIO.readFile filePath
+ let importLines = T.lines content
+ importedTasks = map normalizeTask (mapMaybe decodeTask importLines)
+
+ -- Load existing tasks
+ existingTasks <- loadTasksInternal
+
+ -- Create a map of existing task IDs for quick lookup
+ let existingIds = map taskId existingTasks
+ -- Filter to only new tasks (not already in our database)
+ newTasks = filter (\t -> not (any (`matchesId` taskId t) existingIds)) importedTasks
+ -- For tasks that exist, update them with imported data
+ updatedTasks = map (updateWithImported importedTasks) existingTasks
+ -- Combine: updated existing tasks + new tasks
+ allTasks = updatedTasks ++ newTasks
+
+ -- Rewrite tasks.jsonl with merged data
+ tasksFile <- getTasksFilePath
+ TIO.writeFile tasksFile ""
+ traverse_ saveTaskInternal allTasks
where
decodeTask :: Text -> Maybe Task
decodeTask line =
@@ -628,9 +863,9 @@ importTasks filePath = do
-- Update an existing task if there's a newer version in imported tasks
updateWithImported :: [Task] -> Task -> Task
updateWithImported imported existing =
- case filter (\t -> taskId t == taskId existing) imported of
- [] -> existing -- No imported version, keep existing
- (importedTask : _) ->
+ case findTask (taskId existing) imported of
+ Nothing -> existing -- No imported version, keep existing
+ Just importedTask ->
-- Use imported version if it's newer (based on updatedAt)
if taskUpdatedAt importedTask > taskUpdatedAt existing
then importedTask
diff --git a/Omni/Task/RaceTest.hs b/Omni/Task/RaceTest.hs
new file mode 100644
index 0000000..cfadaca
--- /dev/null
+++ b/Omni/Task/RaceTest.hs
@@ -0,0 +1,56 @@
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+module Omni.Task.RaceTest where
+
+import Alpha
+import Control.Concurrent.Async (mapConcurrently)
+import Data.List (nub)
+import qualified Data.Text as T
+import Omni.Task.Core
+import qualified Omni.Test as Test
+import System.Directory (doesFileExist, removeFile)
+import System.Environment (setEnv)
+
+test :: Test.Tree
+test = Test.group "Omni.Task.Race" [raceTest]
+
+raceTest :: Test.Tree
+raceTest =
+ Test.unit "concurrent child creation (race condition)" <| do
+ -- Set up test mode
+ setEnv "TASK_TEST_MODE" "1"
+ setEnv "TASK_DB_PATH" ".tasks/race-test.jsonl"
+
+ -- Clean up test database
+ let testFile = ".tasks/race-test.jsonl"
+ exists <- doesFileExist testFile
+ when exists <| removeFile testFile
+ initTaskDb
+
+ -- Create a parent epic
+ parent <- createTask "Parent Epic" Epic Nothing Nothing P2 [] Nothing
+ let parentId = taskId parent
+
+ -- Create multiple children concurrently
+ -- We'll create 10 children in parallel
+ let childCount = 10
+ indices = [1 .. childCount]
+
+ -- Run concurrent creations
+ children <-
+ mapConcurrently
+ (\i -> createTask ("Child " <> tshow i) WorkTask (Just parentId) Nothing P2 [] Nothing)
+ indices
+
+ -- Check for duplicates in generated IDs
+ let ids = map taskId children
+ uniqueIds = nub ids
+
+ -- If there was a race condition, we'd have fewer unique IDs than children
+ length uniqueIds Test.@?= length children
+ length uniqueIds Test.@?= childCount
+
+ -- Verify IDs follow the pattern parentId.N
+ for_ ids <| \tid -> do
+ (parentId `T.isPrefixOf` tid) Test.@?= True