Skip to content

Commit

Permalink
support deepseek
Browse files Browse the repository at this point in the history
Signed-off-by: Patrick Zhao <[email protected]>
  • Loading branch information
PetrusZ committed Feb 13, 2025
1 parent b89d65d commit 067b2f8
Show file tree
Hide file tree
Showing 12 changed files with 143 additions and 60 deletions.
2 changes: 1 addition & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ require (
github.com/rfyiamcool/cronlib v1.2.1
github.com/robfig/cron/v3 v3.0.1
github.com/samber/lo v1.37.0
github.com/sashabaranov/go-openai v1.24.0
github.com/sashabaranov/go-openai v1.37.0
github.com/segmentio/encoding v0.4.1
github.com/shirou/gopsutil v3.21.11+incompatible
github.com/shirou/gopsutil/v3 v3.22.8
Expand Down
2 changes: 2 additions & 0 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -1010,6 +1010,8 @@ github.com/samber/lo v1.37.0 h1:XjVcB8g6tgUp8rsPsJ2CvhClfImrpL04YpQHXeHPhRw=
github.com/samber/lo v1.37.0/go.mod h1:9vaz2O4o8oOnK23pd2TrXufcbdbJIa3b6cstBWKpopA=
github.com/sashabaranov/go-openai v1.24.0 h1:4H4Pg8Bl2RH/YSnU8DYumZbuHnnkfioor/dtNlB20D4=
github.com/sashabaranov/go-openai v1.24.0/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg=
github.com/sashabaranov/go-openai v1.37.0 h1:hQQowgYm4OXJ1Z/wTrE+XZaO20BYsL0R3uRPSpfNZkY=
github.com/sashabaranov/go-openai v1.37.0/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/sebdah/goldie/v2 v2.5.3 h1:9ES/mNN+HNUbNWpVAlrzuZ7jE+Nrczbj8uFRjM7624Y=
github.com/sebdah/goldie/v2 v2.5.3/go.mod h1:oZ9fp0+se1eapSRjfYbsV/0Hqhbuu3bJVvKI/NNtssI=
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ type LLMIntegration struct {
ProviderName llm.Provider `bson:"provider_name" json:"provider_name"`
Token string `bson:"token" json:"token"`
BaseURL string `bson:"base_url" json:"base_url"`
Model string `bson:"model" json:"model"`
EnableProxy bool `bson:"enable_proxy" json:"enable_proxy"`
IsDefault bool `bson:"is_default" json:"is_default"`
UpdatedBy string `bson:"updated_by" json:"updated_by"`
Expand Down
1 change: 1 addition & 0 deletions pkg/microservice/aslan/core/common/service/llm.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ func newLLMClient(llmIntegration *models.LLMIntegration) (llm.ILLM, error) {
ProviderName: llmIntegration.ProviderName,
Token: llmIntegration.Token,
BaseURL: llmIntegration.BaseURL,
Model: llmIntegration.Model,
}
if llmIntegration.EnableProxy {
llmConfig.Proxy = config.ProxyHTTPSAddr()
Expand Down
35 changes: 35 additions & 0 deletions pkg/microservice/aslan/core/common/service/llm_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
package service

import (
"reflect"
"testing"

"github.com/koderover/zadig/v2/pkg/microservice/aslan/core/common/repository/models"
"github.com/koderover/zadig/v2/pkg/tool/llm"
)

func Test_newLLMClient(t *testing.T) {
type args struct {
llmIntegration *models.LLMIntegration
}
tests := []struct {
name string
args args
want llm.ILLM
wantErr bool
}{
// TODO: Add test cases.
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := newLLMClient(tt.args.llmIntegration)
if (err != nil) != tt.wantErr {
t.Errorf("newLLMClient() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("newLLMClient() = %v, want %v", got, tt.want)
}
})
}
}
21 changes: 0 additions & 21 deletions pkg/microservice/aslan/core/environment/handler/environment.go
Original file line number Diff line number Diff line change
Expand Up @@ -2211,12 +2211,6 @@ func RunAnalysis(c *gin.Context) {
return
}
}

err = commonutil.CheckZadigProfessionalLicense()
if err != nil {
ctx.RespErr = err
return
}
} else {
if !ctx.Resources.ProjectAuthInfo[projectKey].IsProjectAdmin &&
!ctx.Resources.ProjectAuthInfo[projectKey].Env.View {
Expand Down Expand Up @@ -2278,11 +2272,6 @@ func UpsertEnvAnalysisCron(c *gin.Context) {
return
}
}

if err := commonutil.CheckZadigProfessionalLicense(); err != nil {
ctx.RespErr = err
return
}
} else {
if !ctx.Resources.ProjectAuthInfo[projectKey].IsProjectAdmin &&
!ctx.Resources.ProjectAuthInfo[projectKey].Env.EditConfig {
Expand Down Expand Up @@ -2357,11 +2346,6 @@ func GetEnvAnalysisCron(c *gin.Context) {
return
}
}

if err := commonutil.CheckZadigProfessionalLicense(); err != nil {
ctx.RespErr = err
return
}
} else {
if !ctx.Resources.ProjectAuthInfo[projectKey].IsProjectAdmin &&
!ctx.Resources.ProjectAuthInfo[projectKey].Env.View {
Expand Down Expand Up @@ -2425,11 +2409,6 @@ func GetEnvAnalysisHistory(c *gin.Context) {
return
}
}

if err := commonutil.CheckZadigProfessionalLicense(); err != nil {
ctx.RespErr = err
return
}
} else {
if !ctx.Resources.ProjectAuthInfo[projectKey].IsProjectAdmin &&
!ctx.Resources.ProjectAuthInfo[projectKey].Env.View {
Expand Down
8 changes: 7 additions & 1 deletion pkg/microservice/aslan/core/log/service/ai/build.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,13 @@ func AnalyzeBuildLog(args *BuildLogAnalysisArgs, project, pipeline, job string,
log := args.Log
prompt := fmt.Sprintf("%s; 构建日志数据: \"\"\"%s\"\"\"", BuildLogAnalysisPrompt, util.RemoveExtraSpaces(splitBuildLogByRowNum(log, 500)))

answer, err := client.GetCompletion(ctx, prompt, llm.WithModel(openapi.GPT4o))
options := []llm.ParamOption{}
if client.GetModel() != "" {
options = append(options, llm.WithModel(client.GetModel()))
} else {
options = append(options, llm.WithModel(openapi.GPT4o))
}
answer, err := client.GetCompletion(ctx, prompt, options...)
if err != nil {
logger.Errorf("failed to get answer from ai: %v, the error is: %+v", client.GetName(), err)
return "", err
Expand Down
32 changes: 28 additions & 4 deletions pkg/microservice/aslan/core/stat/service/ai/ai_analysis.go
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,13 @@ func AnalyzeProjectStats(args *AiAnalysisReq, logger *zap.SugaredLogger) (*AiAna
"分析要求:%s;你的回答需要使用text格式输出,输出内容不要包含\"三重引号分割的项目数据\"这个名称,也不要复述分析要求中的内容,在你的回答中禁止包含 "+
"\\\"data_description\\\"\\\"jenkins\\\" 等字段; 项目数据:\"\"\"%s\"\"\"", args.Prompt, overAllInput)
}
answer, err := client.GetCompletion(context.TODO(), util.RemoveExtraSpaces(prompt), llm.WithTemperature(float32(0.2)), llm.WithModel(AnalysisModel))
options := []llm.ParamOption{llm.WithTemperature(float32(0.2))}
if client.GetModel() != "" {
options = append(options, llm.WithModel(client.GetModel()))
} else {
options = append(options, llm.WithModel(AnalysisModel))
}
answer, err := client.GetCompletion(context.TODO(), util.RemoveExtraSpaces(prompt), options...)
if err != nil {
logger.Errorf("failed to get answer from ai: %v, the error is: %+v", client.GetName(), err)
return nil, err
Expand All @@ -137,7 +143,13 @@ func AnalyzeProject(userPrompt string, project *ProjectData, client llm.ILLM, an
}

prompt := fmt.Sprintf("假设你是资深Devops专家,我需要你根据以下分析要求来分析用三重引号分割的项目数据,最后根据你的分析来生成分析报告,分析要求:%s; 项目数据:\"\"\"%s\"\"\";你的回答不能超过400个汉字,同时回答内容要符合text格式,不要存在换行和空行;", util.RemoveExtraSpaces(EveryProjectAnalysisPrompt), string(pData))
answer, err := client.GetCompletion(context.TODO(), util.RemoveExtraSpaces(prompt), llm.WithTemperature(float32(0.1)), llm.WithModel(AnalysisModel))
options := []llm.ParamOption{llm.WithTemperature(float32(0.1))}
if client.GetModel() != "" {
options = append(options, llm.WithModel(client.GetModel()))
} else {
options = append(options, llm.WithModel(AnalysisModel))
}
answer, err := client.GetCompletion(context.TODO(), util.RemoveExtraSpaces(prompt), options...)
if err != nil {
logger.Errorf("failed to get answer from ai: %v, the error is: %+v", client.GetName(), err)
return
Expand Down Expand Up @@ -189,7 +201,13 @@ func parseUserPrompt(args *AiAnalysisReq, aiClient llm.ILLM, logger *zap.Sugared
}

prompt := fmt.Sprintf("%s;\"\"\"%s\"\"\"", util.RemoveExtraSpaces(ParseUserPromptPrompt), args.Prompt)
resp, err := aiClient.GetCompletion(context.TODO(), prompt)
options := []llm.ParamOption{}
if aiClient.GetModel() != "" {
options = append(options, llm.WithModel(aiClient.GetModel()))
} else {
options = append(options, llm.WithModel(AnalysisModel))
}
resp, err := aiClient.GetCompletion(context.TODO(), prompt, options...)
if err != nil {
return input, fmt.Errorf("failed to get completion, error: %v", err)
}
Expand Down Expand Up @@ -457,7 +475,13 @@ func AnalyzeMonthAttention(start, end int64, data []*service2.MonthAttention, lo
retryTime := 0
answer := ""
for retryTime < 3 {
answer, err = client.GetCompletion(context.TODO(), util.RemoveExtraSpaces(prompt), llm.WithTemperature(float32(0.2)), llm.WithModel(AnalysisModel))
options := []llm.ParamOption{llm.WithTemperature(float32(0.2))}
if client.GetModel() != "" {
options = append(options, llm.WithModel(client.GetModel()))
} else {
options = append(options, llm.WithModel(AnalysisModel))
}
answer, err = client.GetCompletion(context.TODO(), util.RemoveExtraSpaces(prompt), options...)
if err != nil {
retryTime++
if strings.Contains(err.Error(), "create chat completion failed") && retryTime < 3 {
Expand Down
15 changes: 2 additions & 13 deletions pkg/microservice/aslan/core/system/handler/llm.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ import (
"github.com/gin-gonic/gin"

commonmodels "github.com/koderover/zadig/v2/pkg/microservice/aslan/core/common/repository/models"
commonutil "github.com/koderover/zadig/v2/pkg/microservice/aslan/core/common/util"
"github.com/koderover/zadig/v2/pkg/microservice/aslan/core/system/service"
internalhandler "github.com/koderover/zadig/v2/pkg/shared/handler"
e "github.com/koderover/zadig/v2/pkg/tool/errors"
Expand All @@ -33,6 +32,7 @@ type CreateLLMIntegrationRequest struct {
ProviderName llm.Provider `json:"provider_name"`
Token string `json:"token"`
BaseURL string `json:"base_url"`
Model string `json:"model"`
EnableProxy bool `json:"enable_proxy"`
}

Expand All @@ -54,12 +54,6 @@ func CreateLLMIntegration(c *gin.Context) {
return
}

err := commonutil.CheckZadigProfessionalLicense()
if err != nil {
ctx.RespErr = err
return
}

llmProvider := convertLLMArgToModel(args)
llmProvider.UpdatedBy = ctx.UserName
ctx.RespErr = service.CreateLLMIntegration(context.TODO(), llmProvider)
Expand Down Expand Up @@ -151,12 +145,6 @@ func UpdateLLMIntegration(c *gin.Context) {
return
}

err := commonutil.CheckZadigProfessionalLicense()
if err != nil {
ctx.RespErr = err
return
}

llmProvider := convertLLMArgToModel(args)
llmProvider.UpdatedBy = ctx.UserName
ctx.RespErr = service.UpdateLLMIntegration(context.TODO(), c.Param("id"), llmProvider)
Expand Down Expand Up @@ -189,6 +177,7 @@ func convertLLMArgToModel(args *CreateLLMIntegrationRequest) *commonmodels.LLMIn
Token: args.Token,
BaseURL: args.BaseURL,
EnableProxy: args.EnableProxy,
Model: args.Model,
IsDefault: true,
}
}
10 changes: 8 additions & 2 deletions pkg/tool/analysis/analysis.go
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,12 @@ const (
StateProblemDetected AnalysisStatus = "ProblemDetected"

analysisPrompt = `Simplify the following Kubernetes error message delimited by triple dashes written in --- %s --- language; --- %s ---.
Provide the most possible solution in a step by step style in no more than 280 characters. Write the output in the following format:
Provide the most possible solution in a step by step style in no more than 280 characters. Write the output in the following format, and do not output thinking process:
错误: {Explain error here}
解决方案: {Step by step solution here}
`
analysisChinesePrompt = `简化以下用 --- %s --- 语言编写的以三重破折号分隔的 Kubernetes 错误消息; --- %s ---。
以不超过 280 个字符的方式,一步一步地提供最可能的解决方案。按以下格式写出输出,不要输出思考过程:
错误: {Explain error here}
解决方案: {Step by step solution here}
`
Expand Down Expand Up @@ -212,7 +217,8 @@ func (a *Analysis) GetAIResults(anonymize bool) error {
texts = append(texts, failure.Text)
}
prompt := fmt.Sprintf(analysisPrompt, "Chinese", strings.Join(texts, " "))
parsedText, err := a.AIClient.Parse(a.Context, prompt, a.Cache, llm.WithTemperature(0.3))
options := []llm.ParamOption{llm.WithTemperature(0.3), llm.WithModel(a.AIClient.GetModel())}
parsedText, err := a.AIClient.Parse(a.Context, prompt, a.Cache, options...)
if err != nil {
// Check for exhaustion
if strings.Contains(err.Error(), "status code: 429") {
Expand Down
15 changes: 9 additions & 6 deletions pkg/tool/llm/illm.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,16 +27,18 @@ import (
type Provider string

const (
ProviderOpenAI Provider = "openai"
ProviderAzure Provider = "azure_openai"
ProviderAzureAD Provider = "azure_ad_openai"
ProviderOpenAI Provider = "openai"
ProviderDeepSeek Provider = "deepseek"
ProviderAzure Provider = "azure_openai"
ProviderAzureAD Provider = "azure_ad_openai"
)

var (
clients = map[Provider]ILLM{
ProviderOpenAI: &OpenAIClient{},
ProviderAzure: &OpenAIClient{},
ProviderAzureAD: &OpenAIClient{},
ProviderOpenAI: &OpenAIClient{},
ProviderDeepSeek: &OpenAIClient{},
ProviderAzure: &OpenAIClient{},
ProviderAzureAD: &OpenAIClient{},
}
)

Expand All @@ -45,6 +47,7 @@ type ILLM interface {
GetCompletion(ctx context.Context, prompt string, options ...ParamOption) (string, error)
Parse(ctx context.Context, prompt string, cache cache.ICache, options ...ParamOption) (string, error)
GetName() string
GetModel() string
}

func NewClient(provider Provider) (ILLM, error) {
Expand Down
Loading

0 comments on commit 067b2f8

Please sign in to comment.