Skip to content

Commit

Permalink
Merge pull request #31 from imclerran/update-docs
Browse files Browse the repository at this point in the history
Update documentation
  • Loading branch information
imclerran authored Dec 28, 2024
2 parents 70abcb0 + 206299f commit b319473
Show file tree
Hide file tree
Showing 4 changed files with 80 additions and 26 deletions.
15 changes: 8 additions & 7 deletions package/Chat.roc
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
## The Chat module contains the functions and types needed to use the ChatML formatted chat completion API. It includes the Message type, ChatRequestBody and ChatResponseBody types, and various functions for creating and handling API requests.
module [
ChatRequestBody,
ChatResponseBody,
Expand Down Expand Up @@ -29,9 +30,14 @@ import Shared exposing [
listToOption,
]


Client : Client.Client

## Initialize the OpenRouter API client with the required API key. All parameters besides apiKey are completely optional, and may be set during initialization, assigned later, or left as their defaults.
## ```
## client = Chat.initClient { apiKey: "your_openrouter_api_key" }
## ```
initClient = Client.init

## The OpenAI ChatML standard message used to query the AI model.
Message : {
role : Str,
Expand Down Expand Up @@ -136,10 +142,6 @@ DecodeChatResponseBody : {
},
}

## Initialize the OpenRouter API client with the required API key.
## Other parameters may optionally be set during initialization, or assigned later using the Client module setters.
initClient = Client.init

## Create a request object to be sent with basic-cli's Http.send using ChatML messages
buildHttpRequest : Client, List Message, { toolChoice ? ToolChoice } -> RequestObject
buildHttpRequest = \client, messages, { toolChoice ? Auto } ->
Expand Down Expand Up @@ -254,8 +256,7 @@ encodeRequestBody = \body ->
}
)

## Inject the messages list into the request body, by encoding
## the message to the correct format based on the cached flag.
## Inject the messages list into the request body, by encoding the message to the correct format based on the cached flag.
injectMessages : List U8, List Message -> List U8
injectMessages = \bodyBytes, messages ->
injectAt = List.walkWithIndexUntil bodyBytes 0 \_, _, i ->
Expand Down
48 changes: 37 additions & 11 deletions package/Client.roc
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
## Client for the OpenRouter.ai API. This module contains the Client object, which stores configuration for openrouter.ai API requrests, as well as the init function, and functions to set various configuration options.
module [
Client,
init,
Expand Down Expand Up @@ -28,6 +29,29 @@ import Shared exposing [TimeoutConfig]
import InternalTools exposing [Tool]

## The record used to store configuration for the OpenRouter API client.
## ```
## Client : {
## apiKey : Str,
## model : Str,
## url : Str,
## requestTimeout : TimeoutConfig,
## providerOrder : Option (List Str),
## temperature : F32,
## topP : F32,
## topK : U64,
## frequencyPenalty : F32,
## presencePenalty : F32,
## repetitionPenalty : F32,
## minP : F32,
## topA : F32,
## seed : Option U64,
## maxTokens : Option U64,
## responseFormat : { type : Str },
## models : Option (List Str),
## route : Option Str,
## tools: Option (List Tool),
## }
## ```
Client : {
apiKey : Str,
model : Str,
Expand All @@ -50,11 +74,16 @@ Client : {
tools: Option (List Tool),
}

## Default model to use for API requests. This defaults to the openrouter/auto model router.
defaultModel = "openrouter/auto"

## The default URL for the OpenRouter API. Currently the only supported URL is the openrouter.ai API url.
defaultUrl = "https://openrouter.ai/api/v1/chat/completions"

## Initialize the OpenRouter API client with the required API key.
## Other parameters may optionally be set during initialization, or assigned later.
## Initialize the OpenRouter API client with the required API key. All parameters besides apiKey are completely optional, and may be set during initialization, assigned later, or left as their defaults.
## ```
## client = Client.init { apiKey: "your_openrouter_api_key" }
## ```
init :
{
apiKey : Str,
Expand Down Expand Up @@ -112,8 +141,7 @@ init = \{ apiKey, model ? defaultModel, url ? defaultUrl, requestTimeout ? NoTim
setModel : Client, Str -> Client
setModel = \client, model -> { client & model }

## Set the URL to be used for the API requests.
## (Change with care - while the openrouter.ai API is similar to OpenAI's, there may be some unexpected differences.)
## Set the URL to be used for the API requests. (Change with care - while the openrouter.ai API is similar to OpenAI's, there may be some unexpected differences.)
setUrl : Client, Str -> Client
setUrl = \client, url -> { client & url }

Expand Down Expand Up @@ -180,8 +208,7 @@ setMinP = \client, minP -> { client & minP }
setTopA : Client, F32 -> Client
setTopA = \client, topA -> { client & topA }

## Set the seed for the API requests.
## OpenAI models only
## Set the seed for the API requests. (This is for OpenAI models only)
## Default: 0 - random seed
setSeed : Client, U64 -> Client
setSeed = \client, seed ->
Expand Down Expand Up @@ -209,8 +236,7 @@ setResponseFormat = \client, responseFormat ->
responseFormatRecord = { type: responseFormat }
{ client & responseFormat: responseFormatRecord }

## Set the models for the auto router to choose from.
## If not set, the auto router will choose from a small selection of the top performing models.
## Set the models for the auto router to choose from. If not set, the auto router will choose from a small selection of the top performing models.
## https://openrouter.ai/models/openrouter/auto
## Default: []
setModels : Client, List Str -> Client
Expand All @@ -225,9 +251,7 @@ setModels = \client, models ->
Option.some models
{ client & models: modelsOption }

## Set the parameter which determines whether to use a fallback model if the primary model fails.
## OpenRouter will use the models provided in models, or if no models are provided,
## will try a similarly priced model to the primary.
## Set the parameter which determines whether to use a fallback model if the primary model fails. OpenRouter will use the models provided in models, or if no models are provided, will try a similarly priced model to the primary.
## https://openrouter.ai/docs#model-routing
## Default: NoFallback
setRoute : Client, [UseFallback, NoFallback] -> Client
Expand All @@ -238,6 +262,8 @@ setRoute = \client, route ->
UseFallback -> Option.some "fallback"
{ client & route: routeOption }

## Set the list of tools available for models to use to handle requests.
## Default: []
setTools : Client, List Tool -> Client
setTools = \client, tools ->
toolsOption =
Expand Down
4 changes: 4 additions & 0 deletions package/Prompt.roc
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
## The prompt module contains functions and types for needed to interact with the OpenRouter API using basic prompt strings. In addition to functions for creating and handling API requests, the module includes functions for formatting prompts for models with Llama-style fine-tuning.
module [
PromptRequestBody,
PromptResponseBody,
Expand Down Expand Up @@ -142,6 +143,7 @@ llamaSysMessageEndTag = "\n<<SYS>>\n\n"
llamaExchangeStartTag = "<s>"
llamaExchangeEndTag = "</s>\n"

## Format the prompt and system message into a Llama-style prompt string.
## ```
## [INST]
## <<SYS>>
Expand All @@ -166,6 +168,7 @@ formatLLamaPrompt = \{ prompt, sysMessage ? "" } ->
|> Str.concat prompt
|> Str.concat llamaPromptEndTag

## Format the prompt and conversation history into a Llama-style conversation history string.
## ```
## <s>1st exchange</s>
## <s>...</s>
Expand All @@ -183,6 +186,7 @@ formatLLamaPromptWithHistory = \prompt, conversationHistory ->
|> Str.concat llamaExchangeStartTag
|> Str.concat prompt

## Format the most recent prompt and bot reply, and optionally the previous conversation history, into a Llama-style conversation history string.
## ```
## <s>[INST]
## <<SYS>>
Expand Down
39 changes: 31 additions & 8 deletions package/Tools.roc
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,34 @@ import InternalTools
import Chat
import Client exposing [Client]

## A tool that can be called by the AI model.
## ```
## Tool : {
## type : Str,
## function : {
## name : Str,
## description : Str,
## parameters : {
## type : Str,
## properties : Dict Str FunctionParameter,
## },
## required : List Str,
## },
## }
## ```
Tool : InternalTools.Tool

## A call from the model to a tool.
## ```
## ToolCall : {
## id : Str,
## type : Str,
## function : {
## name : Str,
## arguments : Str,
## },
## }
## ```
ToolCall : InternalTools.ToolCall

## The OpenAI ChatML standard message used to query the AI model.
Expand All @@ -27,13 +54,9 @@ HttpResponse : {
body : List U8,
}

## Using the given toolHandlerMap, check the last message for tool calls, call all
## the tools in the tool call list, send the results back to the model, and handle
## any additional tool calls that may have been generated. If or when no more tool
## calls are present, return the updated list of messages.
## Using the given toolHandlerMap, check the last message for tool calls, call all the tools in the tool call list, send the results back to the model, and handle any additional tool calls that may have been generated. If or when no more tool calls are present, return the updated list of messages.
##
## The toolHandlerMap is a dictionary mapping tool function names to functions
## that take the arguments as a JSON string, parse the json, and return the tool's response.
## The Dict maps function tool names strings to roc functions that take their arguments as a JSON string, parse the json, and return the tool's response.
handleToolCalls : List Message, Client, Dict Str (Str -> Task Str _) -> Task (List Message) _
handleToolCalls = \messages, client, toolHandlerMap ->
when List.last messages is
Expand All @@ -51,8 +74,7 @@ handleToolCalls = \messages, client, toolHandlerMap ->

## Dispatch the tool calls to the appropriate tool handler functions and return the list of tool messages.
##
## The toolHandlerMap is a dictionary mapping tool function names to functions
## that take the arguments as a JSON string, parse the json, and return the tool's response.
## The Dict maps function tool names strings to roc functions that take their arguments as a JSON string, parse the json, and return the tool's response.
dispatchToolCalls : List ToolCall, Dict Str (Str -> Task Str _) -> Task (List Message) _
dispatchToolCalls = \toolCallList, toolHandlerMap ->
Task.loop { toolCalls: toolCallList, toolMessages: [] } \{ toolCalls, toolMessages } ->
Expand Down Expand Up @@ -107,4 +129,5 @@ updateMessagesFromResponse = \messages, responseRes ->
# Err (HttpErr _) -> messages

## Build a tool object with the given name, description, and parameters.
buildTool : Str, Str, List { name : Str, type : Str, description : Str, required : Bool } -> Tool
buildTool = InternalTools.buildTool

0 comments on commit b319473

Please sign in to comment.