Review Task

ID:t-817
Title:agentd persistent JSONL events can lag due stdout buffering
Status:Review

Commit: 70daf843

commit 70daf843e2896896b95f29fc6a63dbfdd597e650
Author: Coder Agent <coder@agents.omni>
Date:   Mon Apr 20 15:23:42 2026

    fix(agentd): realtime persistent logs and trailing-line parse
    
    Flush JSON event/result output in agent so persistent session
    
    logs are written promptly under systemd/tee. Also teach the
    
    session JSONL splitter to keep a trailing line when it is valid
    
    JSON (even without final newline), and add regression tests.
    
    Task-Id: t-816
    
    Task-Id: t-817

diff --git a/Omni/Agent.hs b/Omni/Agent.hs
index 817dacd0..efc911c9 100755
--- a/Omni/Agent.hs
+++ b/Omni/Agent.hs
@@ -393,6 +393,7 @@ emitRunResult opts exitOnError result =
       now <- Time.getCurrentTime
       BL.putStr <| Aeson.encode (jsonResultEvent now result)
       TextIO.putStrLn "" -- newline to make it valid JSONL
+      IO.hFlush IO.stdout
     else case result of
       AgentSuccess response -> TextIO.putStrLn response
       AgentError err -> do
@@ -903,6 +904,7 @@ runAgentWithState signalState opts initialState = do
         when (verbosity >= 2 || optJson opts) <| do
           BL.putStr (Aeson.encode event)
           TextIO.putStrLn ""
+          IO.hFlush IO.stdout
       emitErrorWithState :: OpAgent.AgentState -> Text -> IO (AgentResult, OpAgent.AgentState)
       emitErrorWithState agentState err = do
         when (verbosity >= 2 || optJson opts) <| do
diff --git a/Omni/Agentd/Daemon.hs b/Omni/Agentd/Daemon.hs
index 9785819b..de92cc2d 100644
--- a/Omni/Agentd/Daemon.hs
+++ b/Omni/Agentd/Daemon.hs
@@ -2814,6 +2814,17 @@ test =
             (lines', consumed) = SessionLog.splitCompleteJsonlLines chunk
         length lines' Test.@=? 2
         consumed Test.@=? expectedConsumed,
+      Test.unit "session log splitter keeps trailing valid JSON line without newline" <| do
+        let chunk = TextEncoding.encodeUtf8 "{\"type\":\"a\"}\n{\"type\":\"b\"}"
+            expectedConsumed = fromIntegral (BS.length chunk)
+            (lines', consumed) = SessionLog.splitCompleteJsonlLines chunk
+        map SessionLog.decodeJsonlLine lines' Test.@=? ["{\"type\":\"a\"}", "{\"type\":\"b\"}"]
+        consumed Test.@=? expectedConsumed,
+      Test.unit "tail reader keeps trailing valid JSON line without newline" <| do
+        let content = TextEncoding.encodeUtf8 "{\"type\":\"alpha\"}\n{\"type\":\"beta\"}"
+        withTempJsonlFile content <| \path -> do
+          (lines', _nextOffset) <- SessionLog.readTailJsonlLines path 4096
+          map SessionLog.decodeJsonlLine lines' Test.@=? ["{\"type\":\"alpha\"}", "{\"type\":\"beta\"}"],
       Test.unit "tail reader drops leading partial line from truncated chunk" <| do
         let content = TextEncoding.encodeUtf8 "1234567890\nabc\nxyz\n"
         withTempJsonlFile content <| \path -> do
diff --git a/Omni/Agentd/SessionLog.hs b/Omni/Agentd/SessionLog.hs
index 556961aa..995348d5 100644
--- a/Omni/Agentd/SessionLog.hs
+++ b/Omni/Agentd/SessionLog.hs
@@ -10,6 +10,7 @@ module Omni.Agentd.SessionLog
 where
 
 import Alpha
+import qualified Data.Aeson as Aeson
 import qualified Data.ByteString as BS
 import qualified Data.Text.Encoding as TextEncoding
 import qualified Data.Text.Encoding.Error as TextEncodingError
@@ -62,12 +63,22 @@ splitCompleteJsonlLines chunk
        in (lines', fromIntegral (BS.length chunk))
   | otherwise =
       let segments = BS.split 10 chunk
-          complete = case reverse segments of
-            [] -> []
-            (_partial : restRev) -> reverse restRev
-          lines' = filter (not <. BS.null) complete
-          consumed = sum (map ((+ 1) <. BS.length) complete)
-       in (lines', fromIntegral consumed)
+          (complete, trailing) = case reverse segments of
+            [] -> ([], Nothing)
+            (partial : restRev) -> (reverse restRev, Just partial)
+          trailingComplete = case trailing of
+            Just partial | not (BS.null partial) && isValidJsonLine partial -> [partial]
+            _ -> []
+          lines' = filter (not <. BS.null) complete <> trailingComplete
+          consumedComplete = sum (map ((+ 1) <. BS.length) complete)
+          consumedTrailing = case trailingComplete of
+            [] -> 0
+            [line] -> BS.length line
+            _ -> 0
+       in (lines', fromIntegral (consumedComplete + consumedTrailing))
+
+isValidJsonLine :: BS.ByteString -> Bool
+isValidJsonLine line = isJust (Aeson.decodeStrict' line :: Maybe Aeson.Value)
 
 -- | Decode a JSONL line with lenient UTF-8 handling.
 decodeJsonlLine :: BS.ByteString -> Text