Skip to content

Commit

Permalink
update: channel worker, channel sequence and ticker
Browse files Browse the repository at this point in the history
  • Loading branch information
zmh-program committed Dec 1, 2023
1 parent db7acee commit 8e3a424
Show file tree
Hide file tree
Showing 25 changed files with 475 additions and 189 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ node_modules
.vscode
.idea
config.yaml
config.dev.yaml

addition/generation/data/*
!addition/generation/data/.gitkeep
Expand Down
150 changes: 76 additions & 74 deletions adapter/adapter.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,9 @@ import (
"chat/adapter/zhipuai"
"chat/globals"
"chat/utils"
"fmt"
)

var defaultMaxRetries = 3
var midjourneyMaxRetries = 10

type RequestProps struct {
MaxRetries *int
Current int
Expand All @@ -46,36 +44,21 @@ type ChatProps struct {
Buffer utils.Buffer
}

func createChatRequest(props *ChatProps, hook globals.Hook) error {
if oneapi.IsHit(props.Model) {
return oneapi.NewChatInstanceFromConfig().CreateStreamChatRequest(&oneapi.ChatProps{
Model: props.Model,
Message: props.Message,
Token: utils.Multi(
props.Token == 0,
utils.Multi(globals.IsGPT4Model(props.Model) || props.Plan || props.Infinity, nil, utils.ToPtr(2500)),
&props.Token,
),
PresencePenalty: props.PresencePenalty,
FrequencyPenalty: props.FrequencyPenalty,
Temperature: props.Temperature,
TopP: props.TopP,
Tools: props.Tools,
ToolChoice: props.ToolChoice,
Buffer: props.Buffer,
}, hook)
func createChatRequest(conf globals.ChannelConfig, props *ChatProps, hook globals.Hook) error {
model := conf.GetModelReflect(props.Model)

} else if globals.IsChatGPTModel(props.Model) {
switch conf.GetType() {
case globals.OpenAIChannelType:
instance := chatgpt.NewChatInstanceFromModel(&chatgpt.InstanceProps{
Model: props.Model,
Model: model,
Plan: props.Plan,
})
return instance.CreateStreamChatRequest(&chatgpt.ChatProps{
Model: props.Model,
Model: model,
Message: props.Message,
Token: utils.Multi(
props.Token == 0,
utils.Multi(globals.IsGPT4Model(props.Model) || props.Plan || props.Infinity, nil, utils.ToPtr(2500)),
utils.Multi(globals.IsGPT4Model(model) || props.Plan || props.Infinity, nil, utils.ToPtr(2500)),
&props.Token,
),
PresencePenalty: props.PresencePenalty,
Expand All @@ -87,55 +70,55 @@ func createChatRequest(props *ChatProps, hook globals.Hook) error {
Buffer: props.Buffer,
}, hook)

} else if globals.IsClaudeModel(props.Model) {
case globals.ClaudeChannelType:
return claude.NewChatInstanceFromConfig().CreateStreamChatRequest(&claude.ChatProps{
Model: props.Model,
Model: model,
Message: props.Message,
Token: utils.Multi(props.Token == 0, 50000, props.Token),
TopP: props.TopP,
TopK: props.TopK,
Temperature: props.Temperature,
}, hook)

} else if globals.IsSparkDeskModel(props.Model) {
return sparkdesk.NewChatInstance(props.Model).CreateStreamChatRequest(&sparkdesk.ChatProps{
Model: props.Model,
Message: props.Message,
Token: utils.Multi(props.Token == 0, nil, utils.ToPtr(props.Token)),
Temperature: props.Temperature,
TopK: props.TopK,
Tools: props.Tools,
Buffer: props.Buffer,
case globals.SlackChannelType:
return slack.NewChatInstanceFromConfig().CreateStreamChatRequest(&slack.ChatProps{
Message: props.Message,
}, hook)

} else if globals.IsPalm2Model(props.Model) {
return palm2.NewChatInstanceFromConfig().CreateStreamChatRequest(&palm2.ChatProps{
Model: props.Model,
case globals.BingChannelType:
return bing.NewChatInstanceFromConfig().CreateStreamChatRequest(&bing.ChatProps{
Model: model,
Message: props.Message,
}, hook)

} else if globals.IsSlackModel(props.Model) {
return slack.NewChatInstanceFromConfig().CreateStreamChatRequest(&slack.ChatProps{
case globals.PalmChannelType:
return palm2.NewChatInstanceFromConfig().CreateStreamChatRequest(&palm2.ChatProps{
Model: model,
Message: props.Message,
}, hook)

} else if globals.IsBingModel(props.Model) {
return bing.NewChatInstanceFromConfig().CreateStreamChatRequest(&bing.ChatProps{
Model: props.Model,
Message: props.Message,
case globals.SparkdeskChannelType:
return sparkdesk.NewChatInstance(model).CreateStreamChatRequest(&sparkdesk.ChatProps{
Model: model,
Message: props.Message,
Token: utils.Multi(props.Token == 0, nil, utils.ToPtr(props.Token)),
Temperature: props.Temperature,
TopK: props.TopK,
Tools: props.Tools,
Buffer: props.Buffer,
}, hook)

} else if globals.IsZhiPuModel(props.Model) {
case globals.ChatGLMChannelType:
return zhipuai.NewChatInstanceFromConfig().CreateStreamChatRequest(&zhipuai.ChatProps{
Model: props.Model,
Model: model,
Message: props.Message,
Temperature: props.Temperature,
TopP: props.TopP,
}, hook)

} else if globals.IsQwenModel(props.Model) {
case globals.QwenChannelType:
return dashscope.NewChatInstanceFromConfig().CreateStreamChatRequest(&dashscope.ChatProps{
Model: props.Model,
Model: model,
Message: props.Message,
Token: utils.Multi(props.Infinity || props.Plan, 2048, props.Token),
Temperature: props.Temperature,
Expand All @@ -144,43 +127,26 @@ func createChatRequest(props *ChatProps, hook globals.Hook) error {
RepetitionPenalty: props.RepetitionPenalty,
}, hook)

} else if globals.IsMidjourneyModel(props.Model) {
return midjourney.NewChatInstanceFromConfig().CreateStreamChatRequest(&midjourney.ChatProps{
Model: props.Model,
Messages: props.Message,
}, hook)

} else if globals.IsHunyuanModel(props.Model) {
case globals.HunyuanChannelType:
return hunyuan.NewChatInstanceFromConfig().CreateStreamChatRequest(&hunyuan.ChatProps{
Model: props.Model,
Model: model,
Message: props.Message,
Temperature: props.Temperature,
TopP: props.TopP,
}, hook)

} else if globals.Is360Model(props.Model) {
return zhinao.NewChatInstanceFromConfig().CreateStreamChatRequest(&zhinao.ChatProps{
Model: props.Model,
Message: props.Message,
Token: utils.Multi(props.Infinity || props.Plan, nil, utils.ToPtr(2048)),
TopP: props.TopP,
TopK: props.TopK,
Temperature: props.Temperature,
RepetitionPenalty: props.RepetitionPenalty,
}, hook)

} else if globals.IsBaichuanModel(props.Model) {
case globals.BaichuanChannelType:
return baichuan.NewChatInstanceFromConfig().CreateStreamChatRequest(&baichuan.ChatProps{
Model: props.Model,
Model: model,
Message: props.Message,
TopP: props.TopP,
TopK: props.TopK,
Temperature: props.Temperature,
}, hook)

} else if globals.IsSkylarkModel(props.Model) {
case globals.SkylarkChannelType:
return skylark.NewChatInstanceFromConfig().CreateStreamChatRequest(&skylark.ChatProps{
Model: props.Model,
Model: model,
Message: props.Message,
Token: utils.Multi(props.Token == 0, 4096, props.Token),
TopP: props.TopP,
Expand All @@ -191,7 +157,43 @@ func createChatRequest(props *ChatProps, hook globals.Hook) error {
RepeatPenalty: props.RepetitionPenalty,
Tools: props.Tools,
}, hook)
}

return hook("Sorry, we cannot find the model you are looking for. Please try another model.")
case globals.ZhinaoChannelType:
return zhinao.NewChatInstanceFromConfig().CreateStreamChatRequest(&zhinao.ChatProps{
Model: model,
Message: props.Message,
Token: utils.Multi(props.Infinity || props.Plan, nil, utils.ToPtr(2048)),
TopP: props.TopP,
TopK: props.TopK,
Temperature: props.Temperature,
RepetitionPenalty: props.RepetitionPenalty,
}, hook)

case globals.MidjourneyChannelType:
return midjourney.NewChatInstanceFromConfig().CreateStreamChatRequest(&midjourney.ChatProps{
Model: model,
Messages: props.Message,
}, hook)

case globals.OneAPIChannelType:
return oneapi.NewChatInstanceFromConfig().CreateStreamChatRequest(&oneapi.ChatProps{
Model: model,
Message: props.Message,
Token: utils.Multi(
props.Token == 0,
utils.Multi(globals.IsGPT4Model(model) || props.Plan || props.Infinity, nil, utils.ToPtr(2500)),
&props.Token,
),
PresencePenalty: props.PresencePenalty,
FrequencyPenalty: props.FrequencyPenalty,
Temperature: props.Temperature,
TopP: props.TopP,
Tools: props.Tools,
ToolChoice: props.ToolChoice,
Buffer: props.Buffer,
}, hook)

default:
return fmt.Errorf("unknown channel type %s for model %s", conf.GetType(), props.Model)
}
}
3 changes: 1 addition & 2 deletions adapter/chatgpt/struct.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ package chatgpt

import (
"chat/globals"
"chat/utils"
"fmt"
"github.com/spf13/viper"
)
Expand Down Expand Up @@ -42,7 +41,7 @@ func NewChatInstance(endpoint, apiKey string) *ChatInstance {
func NewChatInstanceFromConfig(v string) *ChatInstance {
return NewChatInstance(
viper.GetString(fmt.Sprintf("openai.%s.endpoint", v)),
utils.GetRandomKey(viper.GetString(fmt.Sprintf("openai.%s.apikey", v))),
viper.GetString(fmt.Sprintf("openai.%s.apikey", v)),
)
}

Expand Down
3 changes: 1 addition & 2 deletions adapter/claude/struct.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package claude

import (
"chat/utils"
"github.com/spf13/viper"
)

Expand All @@ -20,7 +19,7 @@ func NewChatInstance(endpoint, apiKey string) *ChatInstance {
func NewChatInstanceFromConfig() *ChatInstance {
return NewChatInstance(
viper.GetString("claude.endpoint"),
utils.GetRandomKey(viper.GetString("claude.apikey")),
viper.GetString("claude.apikey"),
)
}

Expand Down
3 changes: 1 addition & 2 deletions adapter/dashscope/struct.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package dashscope

import (
"chat/utils"
"github.com/spf13/viper"
)

Expand All @@ -28,6 +27,6 @@ func NewChatInstance(endpoint string, apiKey string) *ChatInstance {
func NewChatInstanceFromConfig() *ChatInstance {
return NewChatInstance(
viper.GetString("dashscope.endpoint"),
utils.GetRandomKey(viper.GetString("dashscope.apikey")),
viper.GetString("dashscope.apikey"),
)
}
26 changes: 0 additions & 26 deletions adapter/oneapi/globals.go

This file was deleted.

5 changes: 0 additions & 5 deletions adapter/oneapi/struct.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package oneapi

import (
"chat/utils"
"fmt"
"github.com/spf13/viper"
)
Expand Down Expand Up @@ -44,7 +43,3 @@ func NewChatInstanceFromConfig() *ChatInstance {
viper.GetString("oneapi.apikey"),
)
}

func IsHit(model string) bool {
return utils.Contains[string](model, HitModels)
}
3 changes: 1 addition & 2 deletions adapter/palm2/struct.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package palm2

import (
"chat/utils"
"github.com/spf13/viper"
)

Expand All @@ -28,6 +27,6 @@ func NewChatInstance(endpoint string, apiKey string) *ChatInstance {
func NewChatInstanceFromConfig() *ChatInstance {
return NewChatInstance(
viper.GetString("palm2.endpoint"),
utils.GetRandomKey(viper.GetString("palm2.apikey")),
viper.GetString("palm2.apikey"),
)
}
23 changes: 6 additions & 17 deletions adapter/request.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,21 +20,10 @@ func isQPSOverLimit(model string, err error) bool {
}
}

func getRetries(model string, retries *int) int {
if retries == nil {
if globals.IsMidjourneyModel(model) {
return midjourneyMaxRetries
}
return defaultMaxRetries
}

return *retries
}

func NewChatRequest(props *ChatProps, hook globals.Hook) error {
err := createChatRequest(props, hook)
func NewChatRequest(conf globals.ChannelConfig, props *ChatProps, hook globals.Hook) error {
err := createChatRequest(conf, props, hook)

retries := getRetries(props.Model, props.MaxRetries)
retries := conf.GetRetry()
props.Current++

if IsAvailableError(err) {
Expand All @@ -43,14 +32,14 @@ func NewChatRequest(props *ChatProps, hook globals.Hook) error {

fmt.Println(fmt.Sprintf("qps limit for %s, sleep and retry (times: %d)", props.Model, props.Current))
time.Sleep(500 * time.Millisecond)
return NewChatRequest(props, hook)
return NewChatRequest(conf, props, hook)
}

if props.Current < retries {
fmt.Println(fmt.Sprintf("retrying chat request for %s (attempt %d/%d, error: %s)", props.Model, props.Current+1, retries, err.Error()))
return NewChatRequest(props, hook)
return NewChatRequest(conf, props, hook)
}
}

return err
return conf.ProcessError(err)
}
Loading

0 comments on commit 8e3a424

Please sign in to comment.