Skip to content
This repository has been archived by the owner on Dec 16, 2023. It is now read-only.

Commit

Permalink
Can use image and Fix bugs
Browse files Browse the repository at this point in the history
  • Loading branch information
Amaototi committed Nov 8, 2023
1 parent 1f36a9e commit ff9a1ab
Show file tree
Hide file tree
Showing 7 changed files with 412 additions and 111 deletions.
20 changes: 17 additions & 3 deletions data.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,20 @@
tokens:
gpt_35_turbo:
gpt_35_turbo_1106:
prompt: 0
completion: 0
gpt_4: 0
gpt_4_32k: 0
gpt_35_turbo_instruct:
prompt: 0
completion: 0
gpt_4:
prompt: 0
completion: 0
gpt_4_32k:
prompt: 0
completion: 0
gpt_4_1106_preview:
prompt: 0
completion: 0
gpt_4_vision_preview:
prompt: 0
completion: 0
detail: 0
29 changes: 24 additions & 5 deletions lib/chat/gpt_request.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,20 +17,39 @@ const openai = "https://api.openai.com/v1/chat/completions"

var timeout *url.Error

func GptRequest(msg *[]Message, data *config.Tokens, guild *config.Guild, topnum float64, tempnum float64, model string) (string, error) {
func GptRequestImg(img *[]Img, data *config.Tokens, guild *config.Guild, topnum float64, tempnum float64, model string, detcost int) (string, error) {

apikey := config.CurrentConfig.Chat.ChatToken
response, err := getOpenAIResponse(&apikey, msg, data, guild, topnum, tempnum, model)

requestBody := OpenaiRequestImg{
Model: model,
Messages: *img,
Top_p: topnum,
Temperature: tempnum,
MaxToken: 3000,
}

response, err := getOpenAIResponse(&apikey, data, model, requestBody, detcost)
return response, err
}

func getOpenAIResponse(apikey *string, messages *[]Message, data *config.Tokens, guild *config.Guild, topnum float64, tempnum float64, model string) (string, error) {
func GptRequest(msg *[]Message, data *config.Tokens, guild *config.Guild, topnum float64, tempnum float64, model string, detail int) (string, error) {

apikey := config.CurrentConfig.Chat.ChatToken

requestBody := OpenaiRequest{
Model: model,
Messages: *messages,
Messages: *msg,
Top_p: topnum,
Temperature: tempnum,
}

response, err := getOpenAIResponse(&apikey, data, model, requestBody, detail)
return response, err
}

func getOpenAIResponse(apikey *string, data *config.Tokens, model string, requestBody interface{}, detcost int) (string, error) {

requestJSON, err := json.Marshal(requestBody)
if err != nil {
log.Fatal("Marshaling json error: ", err)
Expand Down Expand Up @@ -82,7 +101,7 @@ func getOpenAIResponse(apikey *string, messages *[]Message, data *config.Tokens,
completionTokens := response.Usages.CompletionTokens
totalTokens := response.Usages.TotalTokens

err = config.SaveData(data, &model, &promptTokens, &completionTokens, &totalTokens)
err = config.SaveData(data, &model, &promptTokens, &completionTokens, &totalTokens, &detcost)
if err != nil {
log.Fatal("Data save failed: ", err)
}
Expand Down
30 changes: 30 additions & 0 deletions lib/chat/gpt_structs.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,14 @@ type OpenaiRequest struct {
Temperature float64 `json:"temperature"`
}

type OpenaiRequestImg struct {
Model string `json:"model"`
Messages []Img `json:"messages"`
Top_p float64 `json:"top_p"`
Temperature float64 `json:"temperature"`
MaxToken int `json:"max_tokens"`
}

type OpenaiResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Expand All @@ -26,6 +34,28 @@ type Message struct {
Content string `json:"content"`
}

type Content interface{}

type Img struct {
Role string `json:"role"`
Content []Content `json:"content"`
}

type TextContent struct {
Type string `json:"type"`
Text string `json:"text"`
}

type ImageContent struct {
Type string `json:"type"`
ImageURL ImageURL `json:"image_url"`
}

type ImageURL struct {
Url string `json:"url"`
Detail string `json:"detail"`
}

type Usage struct {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
Expand Down
Loading

0 comments on commit ff9a1ab

Please sign in to comment.