commit 6837d57fb5b106b25298a28c40b0b751e4df84d1
Author: Ben Sima <ben@bensima.com>
Date: Thu Jan 1 15:43:04 2026
Add agent CLI - Unix-native agentic task runner (t-319)
A simple, composable CLI for agentic tasks that replaces pi/amp usage
with a Haskell-native tool integrated with Omni infrastructure.
Features:
- Multi-provider support: anthropic, openrouter, ollama (auto-detect)
- Direct Anthropic API support (new provider in Provider.hs)
- Skill loading from file path (--skill=PATH)
- Stdin piping with prompt combination
- Verbose mode shows tool calls (--verbose)
- JSON output mode (--json)
- Dry-run mode (--dry-run)
- Cost and iteration limits (--max-cost, --max-iter)
Usage:
agent "what's my public IP"
echo data | agent "analyze this"
agent --skill=path/to/SKILL.md "do something"
agent --provider=ollama --model=llama3 "hello"
Task-Id: t-319
diff --git a/Omni/Agent.hs b/Omni/Agent.hs
index 98fc7667..a7eb1bf2 100644
--- a/Omni/Agent.hs
+++ b/Omni/Agent.hs
@@ -1,48 +1,329 @@
+#!/usr/bin/env run.sh
{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE NoImplicitPrelude #-}
--- | Agent system entry point and combined test runner.
+-- | Agent CLI - Unix-native agentic task runner.
--
--- This module provides the main entry point for the agent system
--- and re-exports core types from sub-modules.
+-- Usage:
+-- agent [OPTIONS] [PROMPT]
+-- agent --skill=PATH "do something"
+-- echo "data" | agent "analyze this"
--
--- : out omni-agent
+-- A simple, composable CLI for agentic tasks. Replaces pi/amp usage
+-- with a Haskell-native tool that integrates with Omni infrastructure.
+--
+-- : out agent
-- : dep aeson
+-- : dep docopt
+-- : dep http-conduit
+-- : run sudo
module Omni.Agent
- ( -- * Engine
- module Omni.Agent.Engine,
-
- -- * Tools
- module Omni.Agent.Tools,
-
- -- * Test
- main,
+ ( main,
test,
+ runAgent,
+ AgentOptions (..),
+ AgentResult (..),
)
where
import Alpha
-import Omni.Agent.Engine hiding (main, test)
+import qualified Data.Aeson as Aeson
+import qualified Data.Aeson.Encode.Pretty as AesonPretty
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.Text as Text
+import qualified Data.Text.IO as TextIO
import qualified Omni.Agent.Engine as Engine
-import Omni.Agent.Tools hiding (ToolResult, main, test)
+import qualified Omni.Agent.Provider as Provider
import qualified Omni.Agent.Tools as Tools
+import qualified Omni.Cli as Cli
import qualified Omni.Test as Test
+import System.Directory (doesFileExist)
+import qualified System.Console.Docopt as Docopt
+import qualified System.Environment as Environment
+import qualified System.Exit as Exit
+import qualified System.IO as IO
main :: IO ()
-main = Test.run test
+main = Cli.main plan
+
+plan :: Cli.Plan ()
+plan =
+ Cli.Plan
+ { Cli.help = help,
+ Cli.test = test,
+ Cli.move = run,
+ Cli.tidy = \_ -> pure ()
+ }
+
+help :: Cli.Docopt
+help =
+ [Cli.docopt|
+agent - Unix-native agentic task runner
+
+Usage:
+ agent test
+ agent (-h | --help)
+ agent (-v | --version)
+ agent [options] [<prompt>]
+
+Options:
+ --provider=NAME LLM provider: anthropic, openrouter, ollama [default: auto]
+ --model=MODEL Model override
+ --skill=PATH Load skill from file path
+ --max-cost=CENTS Cost limit in cents [default: 50]
+ --max-iter=N Iteration limit [default: 20]
+ --verbose Show tool calls to stderr
+ --json Output structured JSON
+ --dry-run Show config, don't run
+ -h --help Show this help
+ -v --version Show version
+
+Input:
+ <prompt> From argument, or stdin if absent/empty
+
+Output:
+ Final response to stdout
+ Tool calls to stderr (if --verbose)
+ Exit 0 on success, 1 on failure
+
+Examples:
+ agent "what's my public IP"
+ cat file.py | agent "review this code"
+ agent --skill=path/to/SKILL.md "do something"
+ agent --provider=ollama --model=llama3 "hello"
+
+Environment:
+ ANTHROPIC_API_KEY - for anthropic provider
+ OPENROUTER_API_KEY - for openrouter provider
+ KAGI_API_KEY - for web search tool
+|]
test :: Test.Tree
test =
Test.group
"Omni.Agent"
- [ Engine.test,
- Tools.test,
- Test.unit "Engine and Tools integrate correctly" <| do
- let tools = Tools.allTools
- length tools Test.@=? 5
- let config =
- Engine.defaultAgentConfig
- { Engine.agentTools = tools
- }
- Engine.agentMaxIterations config Test.@=? 10
+ [ Test.unit "defaultOptions has sensible defaults" <| do
+ let opts = defaultOptions
+ optMaxCost opts Test.@=? 50
+ optMaxIter opts Test.@=? 20
+ optProvider opts Test.@=? "auto"
]
+
+run :: Docopt.Arguments -> IO ()
+run args = do
+ -- Parse options
+ opts <- parseOptions args
+
+ -- Check for dry-run
+ when (optDryRun opts) <| do
+ TextIO.hPutStrLn IO.stderr <| "Dry run - would use options:"
+ TextIO.hPutStrLn IO.stderr <| " Provider: " <> optProvider opts
+ TextIO.hPutStrLn IO.stderr <| " Model: " <> fromMaybe "default" (optModel opts)
+ TextIO.hPutStrLn IO.stderr <| " Skill: " <> fromMaybe "none" (optSkill opts)
+ TextIO.hPutStrLn IO.stderr <| " Max cost: " <> tshow (optMaxCost opts) <> " cents"
+ TextIO.hPutStrLn IO.stderr <| " Max iter: " <> tshow (optMaxIter opts)
+ TextIO.hPutStrLn IO.stderr <| " Prompt: " <> Text.take 100 (optPrompt opts)
+ Exit.exitSuccess
+
+ -- Run agent
+ result <- runAgent opts
+
+ -- Output result
+ if optJson opts
+ then BL.putStr <| AesonPretty.encodePretty result
+ else case result of
+ AgentSuccess response -> TextIO.putStrLn response
+ AgentError err -> do
+ TextIO.hPutStrLn IO.stderr <| "Error: " <> err
+ Exit.exitWith (Exit.ExitFailure 1)
+
+-- | Agent CLI options
+data AgentOptions = AgentOptions
+ { optProvider :: Text,
+ optModel :: Maybe Text,
+ optSkill :: Maybe Text,
+ optMaxCost :: Int,
+ optMaxIter :: Int,
+ optVerbose :: Bool,
+ optJson :: Bool,
+ optDryRun :: Bool,
+ optPrompt :: Text
+ }
+ deriving (Show, Eq)
+
+defaultOptions :: AgentOptions
+defaultOptions =
+ AgentOptions
+ { optProvider = "auto",
+ optModel = Nothing,
+ optSkill = Nothing,
+ optMaxCost = 50,
+ optMaxIter = 20,
+ optVerbose = False,
+ optJson = False,
+ optDryRun = False,
+ optPrompt = ""
+ }
+
+-- | Agent execution result
+data AgentResult
+ = AgentSuccess Text
+ | AgentError Text
+ deriving (Show, Eq)
+
+instance Aeson.ToJSON AgentResult where
+ toJSON (AgentSuccess response) =
+ Aeson.object
+ [ "success" Aeson..= True,
+ "response" Aeson..= response
+ ]
+ toJSON (AgentError err) =
+ Aeson.object
+ [ "success" Aeson..= False,
+ "error" Aeson..= err
+ ]
+
+-- | Parse CLI options from docopt arguments
+parseOptions :: Docopt.Arguments -> IO AgentOptions
+parseOptions args = do
+ -- Get prompt from arg or stdin (docopt returns String, convert to Text)
+ let argPrompt = maybe "" Text.pack <| Docopt.getArg args (Docopt.argument "prompt")
+ -- Always read stdin if not a terminal (allows piping data to prompts)
+ stdinContent <- do
+ isTerminal <- IO.hIsTerminalDevice IO.stdin
+ if isTerminal
+ then pure ""
+ else TextIO.getContents
+ -- Combine: if we have stdin, append it to prompt; if no prompt, use stdin
+ let prompt =
+ if Text.null stdinContent
+ then argPrompt
+ else if Text.null argPrompt
+ then stdinContent
+ else argPrompt <> "\n\n--- Input Data ---\n" <> stdinContent
+
+ pure
+ AgentOptions
+ { optProvider = maybe "auto" Text.pack <| Docopt.getArg args (Docopt.longOption "provider"),
+ optModel = Text.pack </ Docopt.getArg args (Docopt.longOption "model"),
+ optSkill = Text.pack </ Docopt.getArg args (Docopt.longOption "skill"),
+ optMaxCost = fromMaybe 50 <| (Docopt.getArg args (Docopt.longOption "max-cost") +> readMaybe),
+ optMaxIter = fromMaybe 20 <| (Docopt.getArg args (Docopt.longOption "max-iter") +> readMaybe),
+ optVerbose = args `Cli.has` Docopt.longOption "verbose",
+ optJson = args `Cli.has` Docopt.longOption "json",
+ optDryRun = args `Cli.has` Docopt.longOption "dry-run",
+ optPrompt = Text.strip prompt
+ }
+
+-- | Run the agent with given options
+runAgent :: AgentOptions -> IO AgentResult
+runAgent opts = do
+ -- Validate prompt
+ when (Text.null (optPrompt opts)) <| do
+ TextIO.hPutStrLn IO.stderr "Error: No prompt provided"
+ Exit.exitWith (Exit.ExitFailure 1)
+
+ -- Resolve provider
+ provider <- resolveProvider (optProvider opts) (optModel opts)
+ case provider of
+ Left err -> pure <| AgentError err
+ Right prov -> do
+ -- Load skill if specified (read file directly since path is provided)
+ skillPrompt <- case optSkill opts of
+ Nothing -> pure ""
+ Just path -> do
+ exists <- doesFileExist (Text.unpack path)
+ if exists
+ then TextIO.readFile (Text.unpack path)
+ else do
+ TextIO.hPutStrLn IO.stderr <| "Warning: Skill file not found: " <> path
+ pure ""
+
+ -- Build system prompt
+ let systemPrompt =
+ Text.unlines
+ [ "You are a command-line agent. You help with tasks by reading files, running commands, and making edits.",
+ "",
+ "Be concise. Output only what's needed.",
+ "",
+ if Text.null skillPrompt
+ then ""
+ else "## Skill Instructions\n\n" <> skillPrompt,
+ "",
+ "When you're done, provide a clear final answer."
+ ]
+
+ -- Build configs
+ let agentConfig =
+ Engine.defaultAgentConfig
+ { Engine.agentSystemPrompt = systemPrompt,
+ Engine.agentTools = Tools.allTools,
+ Engine.agentMaxIterations = optMaxIter opts
+ }
+ engineConfig =
+ Engine.defaultEngineConfig
+ { Engine.engineOnActivity =
+ if optVerbose opts
+ then \msg -> TextIO.hPutStrLn IO.stderr <| "[agent] " <> msg
+ else \_ -> pure (),
+ Engine.engineOnToolCall =
+ if optVerbose opts
+ then \name args -> TextIO.hPutStrLn IO.stderr <| "[tool] " <> name <> " " <> Text.take 100 args
+ else \_ _ -> pure (),
+ Engine.engineOnToolResult =
+ if optVerbose opts
+ then \name ok result -> TextIO.hPutStrLn IO.stderr <| "[result] " <> name <> " " <> (if ok then "ok" else "err") <> " " <> Text.take 100 result
+ else \_ _ _ -> pure ()
+ }
+
+ -- Run agent
+ result <- Engine.runAgentWithProvider engineConfig prov agentConfig (optPrompt opts)
+ case result of
+ Left err -> pure <| AgentError err
+ Right response -> pure <| AgentSuccess (Engine.resultFinalMessage response)
+
+-- | Resolve provider from name and optional model
+resolveProvider :: Text -> Maybe Text -> IO (Either Text Provider.Provider)
+resolveProvider providerName mModel = do
+ case providerName of
+ "auto" -> resolveAutoProvider mModel
+ "anthropic" -> resolveAnthropicProvider mModel
+ "openrouter" -> resolveOpenRouterProvider mModel
+ "ollama" -> pure <| Right <| Provider.ollamaProvider (fromMaybe "llama3" mModel)
+ other -> pure <| Left <| "Unknown provider: " <> other
+
+-- | Auto-detect provider from available API keys
+resolveAutoProvider :: Maybe Text -> IO (Either Text Provider.Provider)
+resolveAutoProvider mModel = do
+ -- Try providers in order of preference
+ anthropicKey <- Environment.lookupEnv "ANTHROPIC_API_KEY"
+ openrouterKey <- Environment.lookupEnv "OPENROUTER_API_KEY"
+
+ case (anthropicKey, openrouterKey) of
+ (Just key, _) -> do
+ TextIO.hPutStrLn IO.stderr "[agent] Using Anthropic provider"
+ pure <| Right <| Provider.anthropicProvider (Text.pack key) (fromMaybe "claude-sonnet-4-20250514" mModel)
+ (_, Just key) -> do
+ TextIO.hPutStrLn IO.stderr "[agent] Using OpenRouter provider"
+ pure <| Right <| Provider.openRouterProvider (Text.pack key) (fromMaybe "anthropic/claude-sonnet-4" mModel)
+ _ -> do
+ -- Try ollama as fallback
+ TextIO.hPutStrLn IO.stderr "[agent] No API keys found, trying Ollama"
+ pure <| Right <| Provider.ollamaProvider (fromMaybe "llama3" mModel)
+
+-- | Resolve Anthropic provider
+resolveAnthropicProvider :: Maybe Text -> IO (Either Text Provider.Provider)
+resolveAnthropicProvider mModel = do
+ mKey <- Environment.lookupEnv "ANTHROPIC_API_KEY"
+ case mKey of
+ Nothing -> pure <| Left "ANTHROPIC_API_KEY not set"
+ Just key -> pure <| Right <| Provider.anthropicProvider (Text.pack key) (fromMaybe "claude-sonnet-4-20250514" mModel)
+
+-- | Resolve OpenRouter provider
+resolveOpenRouterProvider :: Maybe Text -> IO (Either Text Provider.Provider)
+resolveOpenRouterProvider mModel = do
+ mKey <- Environment.lookupEnv "OPENROUTER_API_KEY"
+ case mKey of
+ Nothing -> pure <| Left "OPENROUTER_API_KEY not set"
+ Just key -> pure <| Right <| Provider.openRouterProvider (Text.pack key) (fromMaybe "anthropic/claude-sonnet-4" mModel)
diff --git a/Omni/Agent/Engine.hs b/Omni/Agent/Engine.hs
index 9f930fd6..611fb101 100644
--- a/Omni/Agent/Engine.hs
+++ b/Omni/Agent/Engine.hs
@@ -1065,6 +1065,7 @@ runAgentWithProvider engineCfg provider agentCfg userPrompt = do
getProviderModel :: Provider.Provider -> Text
getProviderModel (Provider.OpenRouter cfg) = Provider.providerModel cfg
getProviderModel (Provider.Ollama cfg) = Provider.providerModel cfg
+ getProviderModel (Provider.Anthropic cfg) = Provider.providerModel cfg
getProviderModel (Provider.AmpCLI _) = "amp"
updateProviderToolCallCounts :: Map.Map Text Int -> [Provider.ToolCall] -> Map.Map Text Int
@@ -1272,6 +1273,7 @@ runAgentWithProviderStreaming engineCfg provider agentCfg userPrompt onStreamChu
getProviderModelStreaming :: Provider.Provider -> Text
getProviderModelStreaming (Provider.OpenRouter cfg) = Provider.providerModel cfg
getProviderModelStreaming (Provider.Ollama cfg) = Provider.providerModel cfg
+ getProviderModelStreaming (Provider.Anthropic cfg) = Provider.providerModel cfg
getProviderModelStreaming (Provider.AmpCLI _) = "amp"
updateToolCallCountsStreaming :: Map.Map Text Int -> [Provider.ToolCall] -> Map.Map Text Int
diff --git a/Omni/Agent/Provider.hs b/Omni/Agent/Provider.hs
index db30e5fa..19a0b783 100644
--- a/Omni/Agent/Provider.hs
+++ b/Omni/Agent/Provider.hs
@@ -26,8 +26,14 @@ module Omni.Agent.Provider
Usage (..),
ToolApi (..),
StreamChunk (..),
+ -- * Provider constructors
+ openRouterProvider,
+ ollamaProvider,
+ anthropicProvider,
+ -- * Legacy defaults
defaultOpenRouter,
defaultOllama,
+ -- * Chat functions
chat,
chatWithUsage,
chatStream,
@@ -115,9 +121,43 @@ retryWithBackoff retriesLeft backoff action
data Provider
= OpenRouter ProviderConfig
| Ollama ProviderConfig
+ | Anthropic ProviderConfig
| AmpCLI FilePath
deriving (Show, Eq, Generic)
+-- | Create an OpenRouter provider
+openRouterProvider :: Text -> Text -> Provider
+openRouterProvider apiKey model =
+ OpenRouter
+ ProviderConfig
+ { providerBaseUrl = "https://openrouter.ai/api/v1",
+ providerApiKey = apiKey,
+ providerModel = model,
+ providerExtraHeaders = []
+ }
+
+-- | Create an Ollama provider
+ollamaProvider :: Text -> Provider
+ollamaProvider model =
+ Ollama
+ ProviderConfig
+ { providerBaseUrl = "http://localhost:11434",
+ providerApiKey = "",
+ providerModel = model,
+ providerExtraHeaders = []
+ }
+
+-- | Create an Anthropic provider (direct API)
+anthropicProvider :: Text -> Text -> Provider
+anthropicProvider apiKey model =
+ Anthropic
+ ProviderConfig
+ { providerBaseUrl = "https://api.anthropic.com",
+ providerApiKey = apiKey,
+ providerModel = model,
+ providerExtraHeaders = []
+ }
+
data ProviderConfig = ProviderConfig
{ providerBaseUrl :: Text,
providerApiKey :: Text,
@@ -346,6 +386,7 @@ chat provider tools messages = do
chatWithUsage :: Provider -> [ToolApi] -> [Message] -> IO (Either Text ChatResult)
chatWithUsage (OpenRouter cfg) tools messages = chatOpenAI cfg tools messages
chatWithUsage (Ollama cfg) tools messages = chatOllama cfg tools messages
+chatWithUsage (Anthropic cfg) tools messages = chatAnthropic cfg tools messages
chatWithUsage (AmpCLI _promptFile) _tools _messages = do
pure (Left "Amp CLI provider not yet implemented")
@@ -410,6 +451,141 @@ chatOllama cfg tools messages = do
Nothing -> pure (Left ("Failed to parse Ollama response: " <> TE.decodeUtf8 (BL.toStrict (HTTP.getResponseBody response))))
else pure (Left ("HTTP error: " <> tshow status <> " - " <> TE.decodeUtf8 (BL.toStrict (HTTP.getResponseBody response))))
+-- | Chat via Anthropic Messages API
+-- Anthropic API is different from OpenAI - separate system parameter, different response format
+chatAnthropic :: ProviderConfig -> [ToolApi] -> [Message] -> IO (Either Text ChatResult)
+chatAnthropic cfg tools messages = do
+ let url = Text.unpack (providerBaseUrl cfg) <> "/v1/messages"
+ req0 <- HTTP.parseRequest url
+
+ -- Anthropic wants system message separate, not in messages array
+ let (systemMsg, userMsgs) = extractSystemMessage messages
+ -- Convert tool API to Anthropic format
+ anthropicTools = map convertToolToAnthropic tools
+ body =
+ Aeson.object <|
+ [ "model" .= providerModel cfg,
+ "max_tokens" .= (8192 :: Int),
+ "messages" .= map convertMessageToAnthropic userMsgs
+ ]
+ ++ ["system" .= systemMsg | not (Text.null systemMsg)]
+ ++ ["tools" .= anthropicTools | not (null tools)]
+ req =
+ HTTP.setRequestMethod "POST"
+ <| HTTP.setRequestHeader "Content-Type" ["application/json"]
+ <| HTTP.setRequestHeader "x-api-key" [TE.encodeUtf8 (providerApiKey cfg)]
+ <| HTTP.setRequestHeader "anthropic-version" ["2023-06-01"]
+ <| HTTP.setRequestBodyLBS (Aeson.encode body)
+ <| req0
+
+ retryWithBackoff maxRetries initialBackoffMicros <| do
+ response <- HTTP.httpLBS req
+ let status = HTTP.getResponseStatusCode response
+ respBody = HTTP.getResponseBody response
+ if status >= 200 && status < 300
+ then parseAnthropicResponse respBody
+ else pure (Left ("HTTP error: " <> tshow status <> " - " <> TE.decodeUtf8 (BL.toStrict respBody)))
+
+-- | Extract system message from message list
+extractSystemMessage :: [Message] -> (Text, [Message])
+extractSystemMessage msgs =
+ let systemMsgs = [msgContent m | m <- msgs, msgRole m == System]
+ otherMsgs = [m | m <- msgs, msgRole m /= System]
+ in (Text.intercalate "\n\n" systemMsgs, otherMsgs)
+
+-- | Convert our Message to Anthropic format
+convertMessageToAnthropic :: Message -> Aeson.Value
+convertMessageToAnthropic msg =
+ case msgRole msg of
+ User -> Aeson.object ["role" .= ("user" :: Text), "content" .= msgContent msg]
+ Assistant ->
+ case msgToolCalls msg of
+ Just tcs -> Aeson.object
+ [ "role" .= ("assistant" :: Text),
+ "content" .= map toolCallToAnthropicContent tcs
+ ]
+ Nothing -> Aeson.object ["role" .= ("assistant" :: Text), "content" .= msgContent msg]
+ ToolRole ->
+ Aeson.object
+ [ "role" .= ("user" :: Text),
+ "content" .= [Aeson.object
+ [ "type" .= ("tool_result" :: Text),
+ "tool_use_id" .= fromMaybe "" (msgToolCallId msg),
+ "content" .= msgContent msg
+ ]]
+ ]
+ System -> Aeson.object ["role" .= ("user" :: Text), "content" .= msgContent msg] -- shouldn't happen
+
+-- | Convert tool call to Anthropic content block
+toolCallToAnthropicContent :: ToolCall -> Aeson.Value
+toolCallToAnthropicContent tc =
+ Aeson.object
+ [ "type" .= ("tool_use" :: Text),
+ "id" .= tcId tc,
+ "name" .= fcName (tcFunction tc),
+ "input" .= (Aeson.decode (BL.fromStrict (TE.encodeUtf8 (fcArguments (tcFunction tc)))) :: Maybe Aeson.Value)
+ ]
+
+-- | Convert our ToolApi to Anthropic format
+convertToolToAnthropic :: ToolApi -> Aeson.Value
+convertToolToAnthropic tool =
+ Aeson.object
+ [ "name" .= toolApiName tool,
+ "description" .= toolApiDescription tool,
+ "input_schema" .= toolApiParameters tool
+ ]
+
+-- | Parse Anthropic response
+parseAnthropicResponse :: BL.ByteString -> IO (Either Text ChatResult)
+parseAnthropicResponse body =
+ case Aeson.decode body of
+ Nothing -> pure (Left ("Failed to parse Anthropic response: " <> TE.decodeUtf8 (BL.toStrict body)))
+ Just (Aeson.Object obj) -> do
+ -- Extract content blocks
+ let contentBlocks = case KeyMap.lookup "content" obj of
+ Just (Aeson.Array arr) -> toList arr
+ _ -> []
+ -- Extract text content
+ textContent = Text.concat [t | Aeson.Object b <- contentBlocks,
+ KeyMap.lookup "type" b == Just (Aeson.String "text"),
+ Just (Aeson.String t) <- [KeyMap.lookup "text" b]]
+ -- Extract tool use blocks
+ toolCalls = [ToolCall
+ { tcId = fromMaybe "" (getString "id" b),
+ tcType = "function",
+ tcFunction = FunctionCall
+ { fcName = fromMaybe "" (getString "name" b),
+ fcArguments = maybe "" (TE.decodeUtf8 <. BL.toStrict <. Aeson.encode) (KeyMap.lookup "input" b)
+ }
+ }
+ | Aeson.Object b <- contentBlocks,
+ KeyMap.lookup "type" b == Just (Aeson.String "tool_use")]
+ -- Extract usage
+ usage = case KeyMap.lookup "usage" obj of
+ Just (Aeson.Object u) ->
+ Just Usage
+ { usagePromptTokens = getInt "input_tokens" u,
+ usageCompletionTokens = getInt "output_tokens" u,
+ usageTotalTokens = getInt "input_tokens" u + getInt "output_tokens" u,
+ usageCost = Nothing
+ }
+ _ -> Nothing
+ msg = Message
+ { msgRole = Assistant,
+ msgContent = textContent,
+ msgToolCalls = if null toolCalls then Nothing else Just toolCalls,
+ msgToolCallId = Nothing
+ }
+ pure (Right (ChatResult msg usage))
+ Just _ -> pure (Left "Expected object response from Anthropic")
+ where
+ getString key obj = case KeyMap.lookup key obj of
+ Just (Aeson.String s) -> Just s
+ _ -> Nothing
+ getInt key obj = case KeyMap.lookup key obj of
+ Just (Aeson.Number n) -> round n
+ _ -> 0
+
parseOllamaResponse :: Aeson.Value -> IO (Either Text ChatResult)
parseOllamaResponse val =
case val of
@@ -458,6 +634,7 @@ data ToolCallDelta = ToolCallDelta
chatStream :: Provider -> [ToolApi] -> [Message] -> (StreamChunk -> IO ()) -> IO (Either Text ChatResult)
chatStream (OpenRouter cfg) tools messages onChunk = chatStreamOpenAI cfg tools messages onChunk
chatStream (Ollama _cfg) _tools _messages _onChunk = pure (Left "Streaming not implemented for Ollama")
+chatStream (Anthropic _cfg) _tools _messages _onChunk = pure (Left "Streaming not implemented for Anthropic")
chatStream (AmpCLI _) _tools _messages _onChunk = pure (Left "Streaming not implemented for AmpCLI")
chatStreamOpenAI :: ProviderConfig -> [ToolApi] -> [Message] -> (StreamChunk -> IO ()) -> IO (Either Text ChatResult)