Skip to content

Commit

Permalink
support deepseek
Browse files Browse the repository at this point in the history
Signed-off-by: Patrick Zhao <[email protected]>
  • Loading branch information
PetrusZ committed Feb 12, 2025
1 parent b89d65d commit f375f14
Show file tree
Hide file tree
Showing 11 changed files with 93 additions and 15 deletions.
2 changes: 1 addition & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ require (
github.com/rfyiamcool/cronlib v1.2.1
github.com/robfig/cron/v3 v3.0.1
github.com/samber/lo v1.37.0
github.com/sashabaranov/go-openai v1.24.0
github.com/sashabaranov/go-openai v1.37.0
github.com/segmentio/encoding v0.4.1
github.com/shirou/gopsutil v3.21.11+incompatible
github.com/shirou/gopsutil/v3 v3.22.8
Expand Down
2 changes: 2 additions & 0 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -1010,6 +1010,8 @@ github.com/samber/lo v1.37.0 h1:XjVcB8g6tgUp8rsPsJ2CvhClfImrpL04YpQHXeHPhRw=
github.com/samber/lo v1.37.0/go.mod h1:9vaz2O4o8oOnK23pd2TrXufcbdbJIa3b6cstBWKpopA=
github.com/sashabaranov/go-openai v1.24.0 h1:4H4Pg8Bl2RH/YSnU8DYumZbuHnnkfioor/dtNlB20D4=
github.com/sashabaranov/go-openai v1.24.0/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg=
github.com/sashabaranov/go-openai v1.37.0 h1:hQQowgYm4OXJ1Z/wTrE+XZaO20BYsL0R3uRPSpfNZkY=
github.com/sashabaranov/go-openai v1.37.0/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/sebdah/goldie/v2 v2.5.3 h1:9ES/mNN+HNUbNWpVAlrzuZ7jE+Nrczbj8uFRjM7624Y=
github.com/sebdah/goldie/v2 v2.5.3/go.mod h1:oZ9fp0+se1eapSRjfYbsV/0Hqhbuu3bJVvKI/NNtssI=
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ type LLMIntegration struct {
ProviderName llm.Provider `bson:"provider_name" json:"provider_name"`
Token string `bson:"token" json:"token"`
BaseURL string `bson:"base_url" json:"base_url"`
Model string `bson:"model" json:"model"`
EnableProxy bool `bson:"enable_proxy" json:"enable_proxy"`
IsDefault bool `bson:"is_default" json:"is_default"`
UpdatedBy string `bson:"updated_by" json:"updated_by"`
Expand Down
1 change: 1 addition & 0 deletions pkg/microservice/aslan/core/common/service/llm.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ func newLLMClient(llmIntegration *models.LLMIntegration) (llm.ILLM, error) {
ProviderName: llmIntegration.ProviderName,
Token: llmIntegration.Token,
BaseURL: llmIntegration.BaseURL,
Model: llmIntegration.Model,
}
if llmIntegration.EnableProxy {
llmConfig.Proxy = config.ProxyHTTPSAddr()
Expand Down
35 changes: 35 additions & 0 deletions pkg/microservice/aslan/core/common/service/llm_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
package service

import (
"reflect"
"testing"

"github.com/koderover/zadig/v2/pkg/microservice/aslan/core/common/repository/models"
"github.com/koderover/zadig/v2/pkg/tool/llm"
)

func Test_newLLMClient(t *testing.T) {
type args struct {
llmIntegration *models.LLMIntegration
}
tests := []struct {
name string
args args
want llm.ILLM
wantErr bool
}{
// TODO: Add test cases.
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := newLLMClient(tt.args.llmIntegration)
if (err != nil) != tt.wantErr {
t.Errorf("newLLMClient() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("newLLMClient() = %v, want %v", got, tt.want)
}
})
}
}
8 changes: 7 additions & 1 deletion pkg/microservice/aslan/core/log/service/ai/build.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,13 @@ func AnalyzeBuildLog(args *BuildLogAnalysisArgs, project, pipeline, job string,
log := args.Log
prompt := fmt.Sprintf("%s; 构建日志数据: \"\"\"%s\"\"\"", BuildLogAnalysisPrompt, util.RemoveExtraSpaces(splitBuildLogByRowNum(log, 500)))

answer, err := client.GetCompletion(ctx, prompt, llm.WithModel(openapi.GPT4o))
options := []llm.ParamOption{}
if client.GetModel() != "" {
options = append(options, llm.WithModel(client.GetModel()))
} else {
options = append(options, llm.WithModel(openapi.GPT4o))
}
answer, err := client.GetCompletion(ctx, prompt, options...)
if err != nil {
logger.Errorf("failed to get answer from ai: %v, the error is: %+v", client.GetName(), err)
return "", err
Expand Down
32 changes: 28 additions & 4 deletions pkg/microservice/aslan/core/stat/service/ai/ai_analysis.go
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,13 @@ func AnalyzeProjectStats(args *AiAnalysisReq, logger *zap.SugaredLogger) (*AiAna
"分析要求:%s;你的回答需要使用text格式输出,输出内容不要包含\"三重引号分割的项目数据\"这个名称,也不要复述分析要求中的内容,在你的回答中禁止包含 "+
"\\\"data_description\\\"\\\"jenkins\\\" 等字段; 项目数据:\"\"\"%s\"\"\"", args.Prompt, overAllInput)
}
answer, err := client.GetCompletion(context.TODO(), util.RemoveExtraSpaces(prompt), llm.WithTemperature(float32(0.2)), llm.WithModel(AnalysisModel))
options := []llm.ParamOption{llm.WithTemperature(float32(0.2))}
if client.GetModel() != "" {
options = append(options, llm.WithModel(client.GetModel()))
} else {
options = append(options, llm.WithModel(AnalysisModel))
}
answer, err := client.GetCompletion(context.TODO(), util.RemoveExtraSpaces(prompt), options...)
if err != nil {
logger.Errorf("failed to get answer from ai: %v, the error is: %+v", client.GetName(), err)
return nil, err
Expand All @@ -137,7 +143,13 @@ func AnalyzeProject(userPrompt string, project *ProjectData, client llm.ILLM, an
}

prompt := fmt.Sprintf("假设你是资深Devops专家,我需要你根据以下分析要求来分析用三重引号分割的项目数据,最后根据你的分析来生成分析报告,分析要求:%s; 项目数据:\"\"\"%s\"\"\";你的回答不能超过400个汉字,同时回答内容要符合text格式,不要存在换行和空行;", util.RemoveExtraSpaces(EveryProjectAnalysisPrompt), string(pData))
answer, err := client.GetCompletion(context.TODO(), util.RemoveExtraSpaces(prompt), llm.WithTemperature(float32(0.1)), llm.WithModel(AnalysisModel))
options := []llm.ParamOption{llm.WithTemperature(float32(0.1))}
if client.GetModel() != "" {
options = append(options, llm.WithModel(client.GetModel()))
} else {
options = append(options, llm.WithModel(AnalysisModel))
}
answer, err := client.GetCompletion(context.TODO(), util.RemoveExtraSpaces(prompt), options...)
if err != nil {
logger.Errorf("failed to get answer from ai: %v, the error is: %+v", client.GetName(), err)
return
Expand Down Expand Up @@ -189,7 +201,13 @@ func parseUserPrompt(args *AiAnalysisReq, aiClient llm.ILLM, logger *zap.Sugared
}

prompt := fmt.Sprintf("%s;\"\"\"%s\"\"\"", util.RemoveExtraSpaces(ParseUserPromptPrompt), args.Prompt)
resp, err := aiClient.GetCompletion(context.TODO(), prompt)
options := []llm.ParamOption{}
if aiClient.GetModel() != "" {
options = append(options, llm.WithModel(aiClient.GetModel()))
} else {
options = append(options, llm.WithModel(AnalysisModel))
}
resp, err := aiClient.GetCompletion(context.TODO(), prompt, options...)
if err != nil {
return input, fmt.Errorf("failed to get completion, error: %v", err)
}
Expand Down Expand Up @@ -457,7 +475,13 @@ func AnalyzeMonthAttention(start, end int64, data []*service2.MonthAttention, lo
retryTime := 0
answer := ""
for retryTime < 3 {
answer, err = client.GetCompletion(context.TODO(), util.RemoveExtraSpaces(prompt), llm.WithTemperature(float32(0.2)), llm.WithModel(AnalysisModel))
options := []llm.ParamOption{llm.WithTemperature(float32(0.2))}
if client.GetModel() != "" {
options = append(options, llm.WithModel(client.GetModel()))
} else {
options = append(options, llm.WithModel(AnalysisModel))
}
answer, err = client.GetCompletion(context.TODO(), util.RemoveExtraSpaces(prompt), options...)
if err != nil {
retryTime++
if strings.Contains(err.Error(), "create chat completion failed") && retryTime < 3 {
Expand Down
2 changes: 2 additions & 0 deletions pkg/microservice/aslan/core/system/handler/llm.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ type CreateLLMIntegrationRequest struct {
ProviderName llm.Provider `json:"provider_name"`
Token string `json:"token"`
BaseURL string `json:"base_url"`
Model string `json:"model"`
EnableProxy bool `json:"enable_proxy"`
}

Expand Down Expand Up @@ -189,6 +190,7 @@ func convertLLMArgToModel(args *CreateLLMIntegrationRequest) *commonmodels.LLMIn
Token: args.Token,
BaseURL: args.BaseURL,
EnableProxy: args.EnableProxy,
Model: args.Model,
IsDefault: true,
}
}
3 changes: 2 additions & 1 deletion pkg/tool/analysis/analysis.go
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,8 @@ func (a *Analysis) GetAIResults(anonymize bool) error {
texts = append(texts, failure.Text)
}
prompt := fmt.Sprintf(analysisPrompt, "Chinese", strings.Join(texts, " "))
parsedText, err := a.AIClient.Parse(a.Context, prompt, a.Cache, llm.WithTemperature(0.3))
options := []llm.ParamOption{llm.WithTemperature(0.3), llm.WithModel(a.AIClient.GetModel())}
parsedText, err := a.AIClient.Parse(a.Context, prompt, a.Cache, options...)
if err != nil {
// Check for exhaustion
if strings.Contains(err.Error(), "status code: 429") {
Expand Down
15 changes: 9 additions & 6 deletions pkg/tool/llm/illm.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,16 +27,18 @@ import (
type Provider string

const (
ProviderOpenAI Provider = "openai"
ProviderAzure Provider = "azure_openai"
ProviderAzureAD Provider = "azure_ad_openai"
ProviderOpenAI Provider = "openai"
ProviderDeepSeek Provider = "deepseek"
ProviderAzure Provider = "azure_openai"
ProviderAzureAD Provider = "azure_ad_openai"
)

var (
clients = map[Provider]ILLM{
ProviderOpenAI: &OpenAIClient{},
ProviderAzure: &OpenAIClient{},
ProviderAzureAD: &OpenAIClient{},
ProviderOpenAI: &OpenAIClient{},
ProviderDeepSeek: &OpenAIClient{},
ProviderAzure: &OpenAIClient{},
ProviderAzureAD: &OpenAIClient{},
}
)

Expand All @@ -45,6 +47,7 @@ type ILLM interface {
GetCompletion(ctx context.Context, prompt string, options ...ParamOption) (string, error)
Parse(ctx context.Context, prompt string, cache cache.ICache, options ...ParamOption) (string, error)
GetName() string
GetModel() string
}

func NewClient(provider Provider) (ILLM, error) {
Expand Down
7 changes: 5 additions & 2 deletions pkg/tool/llm/openai.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ import (
)

const (
DefaultOpenAIModel = openai.GPT4o
DefaultOpenAIModel = openai.O1
DefaultOpenAIModelTokenLimit = "128000"
)

Expand Down Expand Up @@ -180,6 +180,10 @@ func (a *OpenAIClient) GetName() string {
return a.name
}

func (a *OpenAIClient) GetModel() string {
return a.model
}

func NumTokensFromMessages(messages []openai.ChatCompletionMessage, model string) (num_tokens int, err error) {
tkm, err := tiktoken.NewEncodingForModel(model)
if err != nil {
Expand All @@ -198,7 +202,6 @@ func NumTokensFromMessages(messages []openai.ChatCompletionMessage, model string
} else {
tokens_per_message = 3
tokens_per_name = 1
log.Warnf("Warning: model not found. Using cl100k_base encoding.")
}

calcTokens := func(message string) (int, error) {
Expand Down

0 comments on commit f375f14

Please sign in to comment.