← Back to task

Commit 68dba338

commit 68dba338a7661563b752593b2fd522413cedd0b3
Author: Ben Sima <ben@bensima.com>
Date:   Sat Nov 29 23:47:11 2025

    Define Tool protocol and LLM provider abstraction
    
    The implementation is complete. I created
    [Omni/Agent/Engine.hs](file://
    
    - **Types**: `Tool`, `LLM`, `AgentConfig`, `Message`, `Role`,
    `ToolCall` - **Functions**: `chat` for OpenAI-compatible HTTP via
    http-conduit, `de - **Tests**: JSON roundtrip for Tool, Message;
    validation of defaults
    
    All lints pass (hlint + ormolu) and tests pass.
    
    Task-Id: t-141.1

diff --git a/Omni/Agent/Engine.hs b/Omni/Agent/Engine.hs
new file mode 100644
index 00000000..ac6c517c
--- /dev/null
+++ b/Omni/Agent/Engine.hs
@@ -0,0 +1,317 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | LLM Agent Engine - Tool protocol and LLM provider abstraction.
+--
+-- This module provides the core abstractions for building LLM-powered agents:
+-- - Tool: Defines tools that agents can use
+-- - LLM: OpenAI-compatible chat completions API provider
+-- - AgentConfig: Configuration for running agents
+--
+-- : out omni-agent-engine
+-- : dep http-conduit
+-- : dep aeson
+module Omni.Agent.Engine
+  ( Tool (..),
+    LLM (..),
+    AgentConfig (..),
+    Message (..),
+    Role (..),
+    ToolCall (..),
+    FunctionCall (..),
+    ToolResult (..),
+    ChatCompletionRequest (..),
+    ChatCompletionResponse (..),
+    Choice (..),
+    defaultLLM,
+    defaultAgentConfig,
+    chat,
+    main,
+    test,
+  )
+where
+
+import Alpha
+import Data.Aeson ((.!=), (.:), (.:?), (.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.Text as Text
+import qualified Data.Text.Encoding as TE
+import qualified Network.HTTP.Simple as HTTP
+import qualified Omni.Test as Test
+
+main :: IO ()
+main = Test.run test
+
+test :: Test.Tree
+test =
+  Test.group
+    "Omni.Agent.Engine"
+    [ Test.unit "Tool JSON roundtrip" <| do
+        let tool =
+              Tool
+                { toolName = "get_weather",
+                  toolDescription = "Get weather for a location",
+                  toolJsonSchema = Aeson.object ["type" .= ("object" :: Text), "properties" .= Aeson.object []],
+                  toolExecute = \_ -> pure (Aeson.String "sunny")
+                }
+        let encoded = encodeToolForApi tool
+        case Aeson.decode (Aeson.encode encoded) of
+          Nothing -> Test.assertFailure "Failed to decode tool"
+          Just decoded -> toolName tool Test.@=? toolApiName decoded,
+      Test.unit "Message JSON roundtrip" <| do
+        let msg = Message User "Hello" Nothing Nothing
+        case Aeson.decode (Aeson.encode msg) of
+          Nothing -> Test.assertFailure "Failed to decode message"
+          Just decoded -> msgContent msg Test.@=? msgContent decoded,
+      Test.unit "defaultLLM has correct endpoint" <| do
+        llmBaseUrl defaultLLM Test.@=? "https://api.openai.com",
+      Test.unit "defaultAgentConfig has sensible defaults" <| do
+        agentMaxIterations defaultAgentConfig Test.@=? 10
+    ]
+
+data Tool = Tool
+  { toolName :: Text,
+    toolDescription :: Text,
+    toolJsonSchema :: Aeson.Value,
+    toolExecute :: Aeson.Value -> IO Aeson.Value
+  }
+
+data ToolApi = ToolApi
+  { toolApiName :: Text,
+    toolApiDescription :: Text,
+    toolApiParameters :: Aeson.Value
+  }
+  deriving (Generic)
+
+instance Aeson.ToJSON ToolApi where
+  toJSON t =
+    Aeson.object
+      [ "type" .= ("function" :: Text),
+        "function"
+          .= Aeson.object
+            [ "name" .= toolApiName t,
+              "description" .= toolApiDescription t,
+              "parameters" .= toolApiParameters t
+            ]
+      ]
+
+instance Aeson.FromJSON ToolApi where
+  parseJSON =
+    Aeson.withObject "ToolApi" <| \v -> do
+      fn <- v .: "function"
+      (ToolApi </ (fn .: "name"))
+        <*> (fn .: "description")
+        <*> (fn .: "parameters")
+
+encodeToolForApi :: Tool -> ToolApi
+encodeToolForApi t =
+  ToolApi
+    { toolApiName = toolName t,
+      toolApiDescription = toolDescription t,
+      toolApiParameters = toolJsonSchema t
+    }
+
+data LLM = LLM
+  { llmBaseUrl :: Text,
+    llmApiKey :: Text,
+    llmModel :: Text
+  }
+  deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON LLM
+
+instance Aeson.FromJSON LLM
+
+defaultLLM :: LLM
+defaultLLM =
+  LLM
+    { llmBaseUrl = "https://api.openai.com",
+      llmApiKey = "",
+      llmModel = "gpt-4"
+    }
+
+data AgentConfig = AgentConfig
+  { agentModel :: Text,
+    agentTools :: [Tool],
+    agentSystemPrompt :: Text,
+    agentMaxIterations :: Int
+  }
+
+defaultAgentConfig :: AgentConfig
+defaultAgentConfig =
+  AgentConfig
+    { agentModel = "gpt-4",
+      agentTools = [],
+      agentSystemPrompt = "You are a helpful assistant.",
+      agentMaxIterations = 10
+    }
+
+data Role = System | User | Assistant | ToolRole
+  deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON Role where
+  toJSON System = Aeson.String "system"
+  toJSON User = Aeson.String "user"
+  toJSON Assistant = Aeson.String "assistant"
+  toJSON ToolRole = Aeson.String "tool"
+
+instance Aeson.FromJSON Role where
+  parseJSON = Aeson.withText "Role" parseRole
+    where
+      parseRole "system" = pure System
+      parseRole "user" = pure User
+      parseRole "assistant" = pure Assistant
+      parseRole "tool" = pure ToolRole
+      parseRole _ = empty
+
+data Message = Message
+  { msgRole :: Role,
+    msgContent :: Text,
+    msgToolCalls :: Maybe [ToolCall],
+    msgToolCallId :: Maybe Text
+  }
+  deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON Message where
+  toJSON m =
+    Aeson.object
+      <| catMaybes
+        [ Just ("role" .= msgRole m),
+          Just ("content" .= msgContent m),
+          ("tool_calls" .=) </ msgToolCalls m,
+          ("tool_call_id" .=) </ msgToolCallId m
+        ]
+
+instance Aeson.FromJSON Message where
+  parseJSON =
+    Aeson.withObject "Message" <| \v ->
+      (Message </ (v .: "role"))
+        <*> (v .:? "content" .!= "")
+        <*> (v .:? "tool_calls")
+        <*> (v .:? "tool_call_id")
+
+data ToolCall = ToolCall
+  { tcId :: Text,
+    tcType :: Text,
+    tcFunction :: FunctionCall
+  }
+  deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON ToolCall where
+  toJSON tc =
+    Aeson.object
+      [ "id" .= tcId tc,
+        "type" .= tcType tc,
+        "function" .= tcFunction tc
+      ]
+
+instance Aeson.FromJSON ToolCall where
+  parseJSON =
+    Aeson.withObject "ToolCall" <| \v ->
+      (ToolCall </ (v .: "id"))
+        <*> (v .:? "type" .!= "function")
+        <*> (v .: "function")
+
+data FunctionCall = FunctionCall
+  { fcName :: Text,
+    fcArguments :: Text
+  }
+  deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON FunctionCall where
+  toJSON fc =
+    Aeson.object
+      [ "name" .= fcName fc,
+        "arguments" .= fcArguments fc
+      ]
+
+instance Aeson.FromJSON FunctionCall where
+  parseJSON =
+    Aeson.withObject "FunctionCall" <| \v ->
+      (FunctionCall </ (v .: "name"))
+        <*> (v .: "arguments")
+
+data ToolResult = ToolResult
+  { trToolCallId :: Text,
+    trContent :: Text
+  }
+  deriving (Show, Eq, Generic)
+
+instance Aeson.ToJSON ToolResult
+
+instance Aeson.FromJSON ToolResult
+
+data ChatCompletionRequest = ChatCompletionRequest
+  { reqModel :: Text,
+    reqMessages :: [Message],
+    reqTools :: Maybe [ToolApi]
+  }
+  deriving (Generic)
+
+instance Aeson.ToJSON ChatCompletionRequest where
+  toJSON r =
+    Aeson.object
+      <| catMaybes
+        [ Just ("model" .= reqModel r),
+          Just ("messages" .= reqMessages r),
+          ("tools" .=) </ reqTools r
+        ]
+
+data Choice = Choice
+  { choiceIndex :: Int,
+    choiceMessage :: Message,
+    choiceFinishReason :: Maybe Text
+  }
+  deriving (Show, Eq, Generic)
+
+instance Aeson.FromJSON Choice where
+  parseJSON =
+    Aeson.withObject "Choice" <| \v ->
+      (Choice </ (v .: "index"))
+        <*> (v .: "message")
+        <*> (v .:? "finish_reason")
+
+data ChatCompletionResponse = ChatCompletionResponse
+  { respId :: Text,
+    respChoices :: [Choice],
+    respModel :: Text
+  }
+  deriving (Show, Eq, Generic)
+
+instance Aeson.FromJSON ChatCompletionResponse where
+  parseJSON =
+    Aeson.withObject "ChatCompletionResponse" <| \v ->
+      (ChatCompletionResponse </ (v .: "id"))
+        <*> (v .: "choices")
+        <*> (v .: "model")
+
+chat :: LLM -> [Tool] -> [Message] -> IO (Either Text Message)
+chat llm tools messages = do
+  let url = Text.unpack (llmBaseUrl llm) <> "/v1/chat/completions"
+  req0 <- HTTP.parseRequest url
+  let toolApis = [encodeToolForApi t | not (null tools), t <- tools]
+      body =
+        ChatCompletionRequest
+          { reqModel = llmModel llm,
+            reqMessages = messages,
+            reqTools = if null toolApis then Nothing else Just toolApis
+          }
+      req =
+        HTTP.setRequestMethod "POST"
+          <| HTTP.setRequestHeader "Content-Type" ["application/json"]
+          <| HTTP.setRequestHeader "Authorization" ["Bearer " <> TE.encodeUtf8 (llmApiKey llm)]
+          <| HTTP.setRequestBodyLBS (Aeson.encode body)
+          <| req0
+
+  response <- HTTP.httpLBS req
+  let status = HTTP.getResponseStatusCode response
+  if status >= 200 && status < 300
+    then case Aeson.decode (HTTP.getResponseBody response) of
+      Just resp ->
+        case respChoices resp of
+          (c : _) -> pure (Right (choiceMessage c))
+          [] -> pure (Left "No choices in response")
+      Nothing -> pure (Left "Failed to parse response")
+    else pure (Left ("HTTP error: " <> tshow status <> " - " <> TE.decodeUtf8 (BL.toStrict (HTTP.getResponseBody response))))