new version for apigo.cc/ai

This commit is contained in:
Star 2024-10-31 15:25:04 +08:00
parent dba6f196df
commit 75708351bb
7 changed files with 222 additions and 205 deletions

20
ai_test.go Normal file
View File

@ -0,0 +1,20 @@
package zhipu_test
import (
"testing"
_ "apigo.cc/ai"
_ "apigo.cc/ai/zhipu"
"apigo.cc/gojs"
_ "apigo.cc/gojs/console"
"github.com/ssgo/u"
)
func TestAI(t *testing.T) {
gojs.ExportForDev()
r, err := gojs.RunFile("ai_test.js")
if err != nil {
t.Fatal(err.Error())
}
println(u.Cyan(u.JsonP(r)))
}

6
ai_test.js Normal file
View File

@ -0,0 +1,6 @@
import co from 'apigo.cc/gojs/console'
import ai from 'apigo.cc/ai'
return ai.zhipu.fastAsk('用一句话介绍一下你的主人', co.info, {
systemPrompt: '你的主人叫张三,是个程序员'
})

164
chat.go
View File

@ -1,123 +1,106 @@
package zhipu package zhipu
import ( import (
"apigo.cc/ai/llm/llm"
"bytes" "bytes"
"context" "context"
"encoding/binary" "encoding/binary"
"fmt"
"github.com/ssgo/u"
"github.com/yankeguo/zhipu"
"strings" "strings"
"time" "time"
"apigo.cc/ai"
"github.com/ssgo/u"
"github.com/yankeguo/zhipu"
) )
func (lm *LLM) FastAsk(messages []llm.ChatMessage, callback func(answer string)) (string, llm.Usage, error) { func getClient(aiConf *ai.AIConfig) (client *zhipu.Client, err error) {
return lm.Ask(messages, llm.ChatConfig{ opt := []zhipu.ClientOption{zhipu.WithAPIKey(aiConf.ApiKey)}
Model: ModelGLM4Flash, if aiConf.Endpoint != "" {
}, callback) opt = append(opt, zhipu.WithBaseURL(aiConf.Endpoint))
}
return zhipu.NewClient(opt...)
} }
func (lm *LLM) LongAsk(messages []llm.ChatMessage, callback func(answer string)) (string, llm.Usage, error) { func Chat(aiConf *ai.AIConfig, messages []ai.ChatMessage, callback func(string), conf ai.ChatConfig) (ai.ChatResult, error) {
return lm.Ask(messages, llm.ChatConfig{ c, err := getClient(aiConf)
Model: ModelGLM4Long,
}, callback)
}
func (lm *LLM) BatterAsk(messages []llm.ChatMessage, callback func(answer string)) (string, llm.Usage, error) {
return lm.Ask(messages, llm.ChatConfig{
Model: ModelGLM4Plus,
}, callback)
}
func (lm *LLM) BestAsk(messages []llm.ChatMessage, callback func(answer string)) (string, llm.Usage, error) {
return lm.Ask(messages, llm.ChatConfig{
Model: ModelGLM40520,
}, callback)
}
func (lm *LLM) MultiAsk(messages []llm.ChatMessage, callback func(answer string)) (string, llm.Usage, error) {
return lm.Ask(messages, llm.ChatConfig{
Model: ModelGLM4VPlus,
}, callback)
}
func (lm *LLM) BestMultiAsk(messages []llm.ChatMessage, callback func(answer string)) (string, llm.Usage, error) {
return lm.Ask(messages, llm.ChatConfig{
Model: ModelGLM4V,
}, callback)
}
func (lm *LLM) CodeInterpreterAsk(messages []llm.ChatMessage, callback func(answer string)) (string, llm.Usage, error) {
return lm.Ask(messages, llm.ChatConfig{
Model: ModelGLM4AllTools,
Tools: map[string]any{llm.ToolCodeInterpreter: nil},
}, callback)
}
func (lm *LLM) WebSearchAsk(messages []llm.ChatMessage, callback func(answer string)) (string, llm.Usage, error) {
return lm.Ask(messages, llm.ChatConfig{
Model: ModelGLM4AllTools,
Tools: map[string]any{llm.ToolWebSearch: nil},
}, callback)
}
func (lm *LLM) Ask(messages []llm.ChatMessage, config llm.ChatConfig, callback func(answer string)) (string, llm.Usage, error) {
config.SetDefault(&lm.config.ChatConfig)
c, err := zhipu.NewClient(zhipu.WithAPIKey(lm.config.ApiKey), zhipu.WithBaseURL(lm.config.Endpoint))
if err != nil { if err != nil {
return "", llm.Usage{}, err return ai.ChatResult{}, err
} }
cc := c.ChatCompletion(config.GetModel()) cc := c.ChatCompletion(conf.Model)
if conf.SystemPrompt != "" {
cc.AddMessage(zhipu.ChatCompletionMessage{
Role: zhipu.RoleSystem,
Content: conf.SystemPrompt,
})
}
for _, msg := range messages { for _, msg := range messages {
var contents []zhipu.ChatCompletionMultiContent var contents []zhipu.ChatCompletionMultiContent
if msg.Contents != nil { if msg.Contents != nil {
contents = make([]zhipu.ChatCompletionMultiContent, len(msg.Contents)) contents = make([]zhipu.ChatCompletionMultiContent, len(msg.Contents))
for j, inPart := range msg.Contents { for j, inPart := range msg.Contents {
part := zhipu.ChatCompletionMultiContent{} part := zhipu.ChatCompletionMultiContent{}
part.Type = NameMap[inPart.Type] part.Type = inPart.Type
switch inPart.Type { switch inPart.Type {
case llm.TypeText: case ai.TypeText:
part.Text = inPart.Content part.Text = inPart.Content
case llm.TypeImage: case ai.TypeImage:
part.ImageURL = &zhipu.URLItem{URL: inPart.Content} part.ImageURL = &zhipu.URLItem{URL: inPart.Content}
//case llm.TypeVideo: //case ai.TypeVideo:
// part.VideoURL = &zhipu.URLItem{URL: inPart.Content} // part.VideoURL = &zhipu.URLItem{URL: inPart.Content}
} }
contents[j] = part contents[j] = part
} }
} }
if len(contents) == 1 && contents[0].Type == llm.TypeText { if len(contents) == 1 && contents[0].Type == ai.TypeText {
cc.AddMessage(zhipu.ChatCompletionMessage{ cc.AddMessage(zhipu.ChatCompletionMessage{
Role: NameMap[msg.Role], Role: msg.Role,
Content: contents[0].Text, Content: contents[0].Text,
}) })
} else { } else {
cc.AddMessage(zhipu.ChatCompletionMultiMessage{ cc.AddMessage(zhipu.ChatCompletionMultiMessage{
Role: NameMap[msg.Role], Role: msg.Role,
Content: contents, Content: contents,
}) })
} }
} }
for name := range config.GetTools() { for name, toolConf := range conf.Tools {
switch name { switch name {
case llm.ToolCodeInterpreter: case ai.ToolFunction:
cc.AddTool(zhipu.ChatCompletionToolCodeInterpreter{}) conf := zhipu.ChatCompletionToolFunction{}
case llm.ToolWebSearch: u.Convert(toolConf, &conf)
cc.AddTool(zhipu.ChatCompletionToolWebBrowser{}) cc.AddTool(conf)
case ai.ToolCodeInterpreter:
conf := zhipu.ChatCompletionToolCodeInterpreter{}
u.Convert(toolConf, &conf)
cc.AddTool(conf)
case ai.ToolWebSearch:
conf := zhipu.ChatCompletionToolWebSearch{}
u.Convert(toolConf, &conf)
cc.AddTool(conf)
case ai.ToolWebBrowser:
conf := zhipu.ChatCompletionToolWebBrowser{}
u.Convert(toolConf, &conf)
cc.AddTool(conf)
case ai.ToolDrawingTool:
conf := zhipu.ChatCompletionToolDrawingTool{}
u.Convert(toolConf, &conf)
cc.AddTool(conf)
case ai.ToolRetrieval:
conf := zhipu.ChatCompletionToolRetrieval{}
u.Convert(toolConf, &conf)
cc.AddTool(conf)
} }
} }
if config.GetMaxTokens() != 0 { if conf.MaxTokens != 0 {
cc.SetMaxTokens(config.GetMaxTokens()) cc.SetMaxTokens(conf.MaxTokens)
} }
if config.GetTemperature() != 0 { if conf.Temperature != 0 {
cc.SetTemperature(config.GetTemperature()) cc.SetTemperature(conf.Temperature)
} }
if config.GetTopP() != 0 { if conf.TopP != 0 {
cc.SetTopP(config.GetTopP()) cc.SetTopP(conf.TopP)
} }
if callback != nil { if callback != nil {
cc.SetStreamHandler(func(r2 zhipu.ChatCompletionResponse) error { cc.SetStreamHandler(func(r2 zhipu.ChatCompletionResponse) error {
@ -131,11 +114,6 @@ func (lm *LLM) Ask(messages []llm.ChatMessage, config llm.ChatConfig, callback f
}) })
} }
if lm.config.Debug {
fmt.Println(cc.BatchMethod(), cc.BatchURL())
fmt.Println(u.JsonP(cc.BatchBody()))
}
t1 := time.Now().UnixMilli() t1 := time.Now().UnixMilli()
if r, err := cc.Do(context.Background()); err == nil { if r, err := cc.Do(context.Background()); err == nil {
t2 := time.Now().UnixMilli() - t1 t2 := time.Now().UnixMilli() - t1
@ -145,32 +123,25 @@ func (lm *LLM) Ask(messages []llm.ChatMessage, config llm.ChatConfig, callback f
results = append(results, ch.Message.Content) results = append(results, ch.Message.Content)
} }
} }
return strings.Join(results, ""), llm.Usage{ return ai.ChatResult{
Result: strings.Join(results, ""),
AskTokens: r.Usage.PromptTokens, AskTokens: r.Usage.PromptTokens,
AnswerTokens: r.Usage.CompletionTokens, AnswerTokens: r.Usage.CompletionTokens,
TotalTokens: r.Usage.TotalTokens, TotalTokens: r.Usage.TotalTokens,
UsedTime: t2, UsedTime: t2,
}, nil }, nil
} else { } else {
return "", llm.Usage{}, err return ai.ChatResult{}, err
} }
} }
func (lm *LLM) FastEmbedding(text string) ([]byte, llm.Usage, error) { func Embedding(aiConf *ai.AIConfig, text string, embeddingConf ai.EmbeddingConfig) (ai.EmbeddingResult, error) {
return lm.Embedding(text, ModelEmbedding3) c, err := getClient(aiConf)
}
func (lm *LLM) BestEmbedding(text string) ([]byte, llm.Usage, error) {
return lm.Embedding(text, ModelEmbedding3)
}
func (lm *LLM) Embedding(text, model string) ([]byte, llm.Usage, error) {
c, err := zhipu.NewClient(zhipu.WithAPIKey(lm.config.ApiKey), zhipu.WithBaseURL(lm.config.Endpoint))
if err != nil { if err != nil {
return nil, llm.Usage{}, err return ai.EmbeddingResult{}, err
} }
cc := c.Embedding(model) cc := c.Embedding(embeddingConf.Model)
cc.SetInput(text) cc.SetInput(text)
t1 := time.Now().UnixMilli() t1 := time.Now().UnixMilli()
if r, err := cc.Do(context.Background()); err == nil { if r, err := cc.Do(context.Background()); err == nil {
@ -183,13 +154,14 @@ func (lm *LLM) Embedding(text, model string) ([]byte, llm.Usage, error) {
} }
} }
} }
return buf.Bytes(), llm.Usage{ return ai.EmbeddingResult{
Result: buf.Bytes(),
AskTokens: r.Usage.PromptTokens, AskTokens: r.Usage.PromptTokens,
AnswerTokens: r.Usage.CompletionTokens, AnswerTokens: r.Usage.CompletionTokens,
TotalTokens: r.Usage.TotalTokens, TotalTokens: r.Usage.TotalTokens,
UsedTime: t2, UsedTime: t2,
}, nil }, nil
} else { } else {
return nil, llm.Usage{}, err return ai.EmbeddingResult{}, err
} }
} }

View File

@ -1,60 +1,27 @@
package zhipu package zhipu
import ( import (
"apigo.cc/ai/llm/llm" _ "embed"
"github.com/yankeguo/zhipu"
"apigo.cc/ai"
"github.com/ssgo/u"
) )
type LLM struct { //go:embed default.yml
config llm.Config var defaultYml string
}
var NameMap = map[string]string{
llm.TypeText: zhipu.MultiContentTypeText,
llm.TypeImage: zhipu.MultiContentTypeImageURL,
//llm.TypeVideo: zhipu.MultiContentTypeVideoURL,
llm.RoleSystem: zhipu.RoleSystem,
llm.RoleUser: zhipu.RoleUser,
llm.RoleAssistant: zhipu.RoleAssistant,
llm.RoleTool: zhipu.RoleTool,
}
const (
ModelGLM4Plus = "GLM-4-Plus"
ModelGLM40520 = "GLM-4-0520"
ModelGLM4Long = "GLM-4-Long"
ModelGLM4AirX = "GLM-4-AirX"
ModelGLM4Air = "GLM-4-Air"
ModelGLM4Flash = "GLM-4-Flash"
ModelGLM4AllTools = "GLM-4-AllTools"
ModelGLM4 = "GLM-4"
ModelGLM4VPlus = "GLM-4V-Plus"
ModelGLM4V = "GLM-4V"
ModelCogVideoX = "CogVideoX"
ModelCogView3Plus = "CogView-3-Plus"
ModelCogView3 = "CogView-3"
ModelEmbedding3 = "Embedding-3"
ModelEmbedding2 = "Embedding-2"
ModelCharGLM3 = "CharGLM-3"
ModelEmohaa = "Emohaa"
ModelCodeGeeX4 = "CodeGeeX-4"
)
func (lm *LLM) Support() llm.Support {
return llm.Support{
Ask: true,
AskWithImage: true,
AskWithVideo: false,
AskWithCodeInterpreter: true,
AskWithWebSearch: true,
MakeImage: true,
MakeVideo: true,
Models: []string{ModelGLM4Plus, ModelGLM40520, ModelGLM4Long, ModelGLM4AirX, ModelGLM4Air, ModelGLM4Flash, ModelGLM4AllTools, ModelGLM4, ModelGLM4VPlus, ModelGLM4V, ModelCogVideoX, ModelCogView3Plus, ModelCogView3, ModelEmbedding3, ModelEmbedding2, ModelCharGLM3, ModelEmohaa, ModelCodeGeeX4},
}
}
func init() { func init() {
llm.Register("zhipu", func(config llm.Config) llm.LLM { defaultConf := ai.AILoadConfig{}
return &LLM{config: config} u.Convert(u.UnYamlMap(defaultYml), &defaultConf)
ai.Register("zhipu", &ai.Agent{
ChatConfigs: defaultConf.Chat,
EmbeddingConfigs: defaultConf.Embedding,
ImageConfigs: defaultConf.Image,
VideoConfigs: defaultConf.Video,
Chat: Chat,
Embedding: Embedding,
MakeImage: MakeImage,
MakeVideo: MakeVideo,
GetVideoResult: GetVideoResult,
}) })
} }

36
default.yml Normal file
View File

@ -0,0 +1,36 @@
chat:
fastAsk:
model: GLM-4-Flash
longAsk:
model: GLM-4-Long
plusAsk:
model: GLM-4-Plus
plusAskV:
model: GLM-4V-Plus
bestAsk:
model: GLM-4-0520
bestAskV:
model: GLM-4V
codeInterpreter:
model: GLM-4-AllTools
tools:
codeInterpreter:
webSearch:
model: GLM-4-AllTools
tools:
webSearch:
codeGeex:
model: CodeGeeX-4
emohaa:
model: Emohaa
embedding:
embedding:
model: Embedding-3
image:
makeImage:
model: CogView-3-Plus
width: 1024
height: 1024
video:
makeVideo:
model: CogVideoX

98
gc.go
View File

@ -5,28 +5,17 @@ import (
"errors" "errors"
"time" "time"
"apigo.cc/ai/llm/llm" "apigo.cc/ai"
"github.com/yankeguo/zhipu" "github.com/yankeguo/zhipu"
) )
func (lm *LLM) FastMakeImage(prompt string, config llm.GCConfig) ([]string, llm.Usage, error) { func MakeImage(aiConf *ai.AIConfig, conf ai.ImageConfig) (ai.ImageResult, error) {
config.Model = ModelCogView3Plus c, err := getClient(aiConf)
return lm.MakeImage(prompt, config)
}
func (lm *LLM) BestMakeImage(prompt string, config llm.GCConfig) ([]string, llm.Usage, error) {
config.Model = ModelCogView3
return lm.MakeImage(prompt, config)
}
func (lm *LLM) MakeImage(prompt string, config llm.GCConfig) ([]string, llm.Usage, error) {
c, err := zhipu.NewClient(zhipu.WithAPIKey(lm.config.ApiKey), zhipu.WithBaseURL(lm.config.Endpoint))
if err != nil { if err != nil {
return nil, llm.Usage{}, err return ai.ImageResult{}, err
} }
config.SetDefault(&lm.config.GCConfig) cc := c.ImageGeneration(conf.Model).SetPrompt(conf.SystemPrompt + conf.Prompt)
cc := c.ImageGeneration(config.GetModel()).SetPrompt(prompt)
//cc.SetSize(config.GetSize()) //cc.SetSize(config.GetSize())
t1 := time.Now().UnixMilli() t1 := time.Now().UnixMilli()
@ -36,60 +25,75 @@ func (lm *LLM) MakeImage(prompt string, config llm.GCConfig) ([]string, llm.Usag
for _, item := range r.Data { for _, item := range r.Data {
results = append(results, item.URL) results = append(results, item.URL)
} }
return results, llm.Usage{ if len(results) == 0 {
results = append(results, "")
}
return ai.ImageResult{
Results: results,
UsedTime: t2, UsedTime: t2,
}, nil }, nil
} else { } else {
return nil, llm.Usage{}, err return ai.ImageResult{}, err
} }
} }
func (lm *LLM) FastMakeVideo(prompt string, config llm.GCConfig) ([]string, []string, llm.Usage, error) { func MakeVideo(aiConf *ai.AIConfig, conf ai.VideoConfig) (string, error) {
config.Model = ModelCogVideoX c, err := getClient(aiConf)
return lm.MakeVideo(prompt, config)
}
func (lm *LLM) BestMakeVideo(prompt string, config llm.GCConfig) ([]string, []string, llm.Usage, error) {
config.Model = ModelCogVideoX
return lm.MakeVideo(prompt, config)
}
func (lm *LLM) MakeVideo(prompt string, config llm.GCConfig) ([]string, []string, llm.Usage, error) {
c, err := zhipu.NewClient(zhipu.WithAPIKey(lm.config.ApiKey), zhipu.WithBaseURL(lm.config.Endpoint))
if err != nil { if err != nil {
return nil, nil, llm.Usage{}, err return "", err
} }
config.SetDefault(&lm.config.GCConfig) cc := c.VideoGeneration(conf.Model).SetPrompt(conf.SystemPrompt + conf.Prompt)
cc := c.VideoGeneration(config.GetModel()).SetPrompt(prompt) if len(conf.Ref) > 0 {
cc.SetImageURL(config.GetRef()) cc.SetImageURL(conf.Ref[0])
}
t1 := time.Now().UnixMilli()
if resp, err := cc.Do(context.Background()); err == nil { if resp, err := cc.Do(context.Background()); err == nil {
t2 := time.Now().UnixMilli() - t1 return resp.ID, nil
for i := 0; i < 1200; i++ { } else {
r, err := c.AsyncResult(resp.ID).Do(context.Background()) return "", err
}
}
func GetVideoResult(aiConf *ai.AIConfig, taskId string, waitSeconds int) (ai.VideoResult, error) {
c, err := getClient(aiConf)
if err != nil { if err != nil {
return nil, nil, llm.Usage{}, err return ai.VideoResult{}, err
}
var r zhipu.AsyncResultResponse
for i := 0; i < waitSeconds; i += 3 {
r, err = c.AsyncResult(taskId).Do(context.Background())
if err != nil {
return ai.VideoResult{}, err
} }
if r.TaskStatus == zhipu.VideoGenerationTaskStatusSuccess { if r.TaskStatus == zhipu.VideoGenerationTaskStatusSuccess {
covers := make([]string, 0)
results := make([]string, 0) results := make([]string, 0)
previews := make([]string, 0)
for _, item := range r.VideoResult { for _, item := range r.VideoResult {
results = append(results, item.URL) results = append(results, item.URL)
covers = append(covers, item.CoverImageURL) previews = append(previews, item.CoverImageURL)
} }
return results, covers, llm.Usage{ if len(results) == 0 {
UsedTime: t2, results = append(results, "")
previews = append(previews, "")
}
return ai.VideoResult{
Results: results,
Previews: previews,
UsedTime: 0,
}, nil }, nil
} }
if r.TaskStatus == zhipu.VideoGenerationTaskStatusFail { if r.TaskStatus == zhipu.VideoGenerationTaskStatusFail {
return nil, nil, llm.Usage{}, errors.New("fail on task " + resp.ID) return ai.VideoResult{}, errors.New("fail on video task " + taskId)
} else if r.TaskStatus == zhipu.VideoGenerationTaskStatusProcessing {
if waitSeconds == 0 {
return ai.VideoResult{IsProcessing: true}, nil
} }
time.Sleep(3 * time.Second) time.Sleep(3 * time.Second)
}
return nil, nil, llm.Usage{}, errors.New("timeout on task " + resp.ID)
} else { } else {
return nil, nil, llm.Usage{}, err return ai.VideoResult{}, errors.New("unknow status " + r.TaskStatus + " on video task " + taskId)
} }
} }
return ai.VideoResult{IsProcessing: true}, errors.New("timeout on video task " + taskId)
}

18
go.mod
View File

@ -1,16 +1,28 @@
module apigo.cc/ai/zhipu module apigo.cc/ai/zhipu
go 1.22 go 1.18
require ( require (
apigo.cc/ai/llm v0.0.4 apigo.cc/ai v0.0.1
apigo.cc/gojs v0.0.4
apigo.cc/gojs/console v0.0.1
github.com/ssgo/u v1.7.9 github.com/ssgo/u v1.7.9
github.com/yankeguo/zhipu v0.1.2 github.com/yankeguo/zhipu v0.1.2
) )
require ( require (
github.com/go-resty/resty/v2 v2.14.0 // indirect github.com/dlclark/regexp2 v1.11.4 // indirect
github.com/fsnotify/fsnotify v1.7.0 // indirect
github.com/go-resty/resty/v2 v2.15.3 // indirect
github.com/go-sourcemap/sourcemap v2.1.4+incompatible // indirect
github.com/golang-jwt/jwt/v5 v5.2.1 // indirect github.com/golang-jwt/jwt/v5 v5.2.1 // indirect
github.com/google/pprof v0.0.0-20230207041349-798e818bf904 // indirect
github.com/ssgo/config v1.7.8 // indirect
github.com/ssgo/log v1.7.7 // indirect
github.com/ssgo/standard v1.7.7 // indirect
github.com/ssgo/tool v0.4.27 // indirect
golang.org/x/net v0.30.0 // indirect golang.org/x/net v0.30.0 // indirect
golang.org/x/sys v0.26.0 // indirect
golang.org/x/text v0.19.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect
) )