Compare commits

...

2 Commits

Author SHA1 Message Date
Star
f7fbd20ffa new version for apigo.cc/ai 2024-10-31 15:27:22 +08:00
998a9f4ad5 use new gojs 2024-10-25 16:04:36 +08:00
9 changed files with 242 additions and 261 deletions

6
.gitignore vendored Normal file
View File

@ -0,0 +1,6 @@
.*
!.gitignore
go.sum
env.yml
node_modules
package.json

20
ai_test.go Normal file
View File

@ -0,0 +1,20 @@
package openai_test
import (
"testing"
_ "apigo.cc/ai"
_ "apigo.cc/ai/openai"
"apigo.cc/gojs"
_ "apigo.cc/gojs/console"
"github.com/ssgo/u"
)
func TestAI(t *testing.T) {
gojs.ExportForDev()
r, err := gojs.RunFile("ai_test.js")
if err != nil {
t.Fatal(err.Error())
}
println(u.Cyan(u.JsonP(r)))
}

6
ai_test.js Normal file
View File

@ -0,0 +1,6 @@
import co from 'apigo.cc/gojs/console'
import ai from 'apigo.cc/ai'
return ai.openai.fastAsk('用一句话介绍一下你的主人', co.info, {
systemPrompt: '你的主人叫张三,是个程序员'
})

95
aigc.go
View File

@ -1,95 +0,0 @@
package openai
import (
"context"
"github.com/sashabaranov/go-openai"
"strings"
)
func (ag *Agent) FastMakeImage(prompt, size, refImage string) ([]string, error) {
return ag.MakeImage(ModelDallE3Std, prompt, size, refImage)
}
func (ag *Agent) BestMakeImage(prompt, size, refImage string) ([]string, error) {
return ag.MakeImage(ModelDallE3HD, prompt, size, refImage)
}
func (ag *Agent) MakeImage(model, prompt, size, refImage string) ([]string, error) {
openaiConf := openai.DefaultConfig(ag.config.ApiKey)
if ag.config.Endpoint != "" {
openaiConf.BaseURL = ag.config.Endpoint
}
c := openai.NewClientWithConfig(openaiConf)
style := openai.CreateImageStyleVivid
if (!strings.Contains(prompt, "vivid") || !strings.Contains(prompt, "生动的")) && (strings.Contains(prompt, "natural") || strings.Contains(prompt, "自然的")) {
style = openai.CreateImageStyleNatural
}
quality := openai.CreateImageQualityStandard
if strings.HasSuffix(model, "-hd") {
quality = openai.CreateImageQualityHD
model = model[0 : len(model)-3]
}
r, err := c.CreateImage(context.Background(), openai.ImageRequest{
Prompt: prompt,
Model: model,
Quality: quality,
Size: size,
Style: style,
ResponseFormat: openai.CreateImageResponseFormatURL,
})
if err == nil {
results := make([]string, 0)
for _, item := range r.Data {
results = append(results, item.URL)
}
return results, nil
} else {
return nil, err
}
}
func (ag *Agent) FastMakeVideo(prompt, size, refImage string) ([]string, []string, error) {
return ag.MakeVideo("", prompt, size, refImage)
}
func (ag *Agent) BestMakeVideo(prompt, size, refImage string) ([]string, []string, error) {
return ag.MakeVideo("", prompt, size, refImage)
}
func (ag *Agent) MakeVideo(model, prompt, size, refImage string) ([]string, []string, error) {
//c, err := openai.NewClient(openai.WithAPIKey(ag.config.ApiKey), openai.WithBaseURL(ag.config.Endpoint))
//if err != nil {
// return nil, nil, err
//}
//
//cc := c.VideoGeneration(model).SetPrompt(prompt)
//if refImage != "" {
// cc.SetImageURL(refImage)
//}
//
//if resp, err := cc.Do(context.Background()); err == nil {
// for i := 0; i < 1200; i++ {
// r, err := c.AsyncResult(resp.ID).Do(context.Background())
// if err != nil {
// return nil, nil, err
// }
// if r.TaskStatus == openai.VideoGenerationTaskStatusSuccess {
// covers := make([]string, 0)
// results := make([]string, 0)
// for _, item := range r.VideoResult {
// results = append(results, item.URL)
// covers = append(covers, item.CoverImageURL)
// }
// return results, covers, nil
// }
// if r.TaskStatus == openai.VideoGenerationTaskStatusFail {
// return nil, nil, errors.New("fail on task " + resp.ID)
// }
// time.Sleep(3 * time.Second)
// }
// return nil, nil, errors.New("timeout on task " + resp.ID)
//} else {
// return nil, nil, err
//}
return nil, nil, nil
}

186
chat.go
View File

@ -1,126 +1,101 @@
package openai
import (
"apigo.cc/ai/agent"
"bytes"
"context"
"encoding/binary"
"strings"
"time"
"apigo.cc/ai"
"github.com/sashabaranov/go-openai"
"github.com/ssgo/log"
"strings"
"github.com/ssgo/u"
)
func (ag *Agent) FastAsk(messages []agent.ChatMessage, callback func(answer string)) (string, agent.TokenUsage, error) {
return ag.Ask(messages, &agent.ChatModelConfig{
Model: ModelGPT_4o_mini_2024_07_18,
}, callback)
}
func (ag *Agent) LongAsk(messages []agent.ChatMessage, callback func(answer string)) (string, agent.TokenUsage, error) {
return ag.Ask(messages, &agent.ChatModelConfig{
Model: ModelGPT_4_32k_0613,
}, callback)
}
func (ag *Agent) BatterAsk(messages []agent.ChatMessage, callback func(answer string)) (string, agent.TokenUsage, error) {
return ag.Ask(messages, &agent.ChatModelConfig{
Model: ModelGPT_4_turbo,
}, callback)
}
func (ag *Agent) BestAsk(messages []agent.ChatMessage, callback func(answer string)) (string, agent.TokenUsage, error) {
return ag.Ask(messages, &agent.ChatModelConfig{
Model: ModelGPT_4o_2024_08_06,
}, callback)
}
func (ag *Agent) MultiAsk(messages []agent.ChatMessage, callback func(answer string)) (string, agent.TokenUsage, error) {
return ag.Ask(messages, &agent.ChatModelConfig{
Model: ModelGPT_4o_mini_2024_07_18,
}, callback)
}
func (ag *Agent) BestMultiAsk(messages []agent.ChatMessage, callback func(answer string)) (string, agent.TokenUsage, error) {
return ag.Ask(messages, &agent.ChatModelConfig{
Model: ModelGPT_4o_2024_08_06,
}, callback)
}
func (ag *Agent) CodeInterpreterAsk(messages []agent.ChatMessage, callback func(answer string)) (string, agent.TokenUsage, error) {
return ag.Ask(messages, &agent.ChatModelConfig{
Model: ModelGPT_4o,
Tools: map[string]any{agent.ToolCodeInterpreter: nil},
}, callback)
}
func (ag *Agent) WebSearchAsk(messages []agent.ChatMessage, callback func(answer string)) (string, agent.TokenUsage, error) {
return ag.Ask(messages, &agent.ChatModelConfig{
Model: ModelGPT_4o_mini_2024_07_18,
Tools: map[string]any{agent.ToolWebSearch: nil},
}, callback)
}
func (ag *Agent) Ask(messages []agent.ChatMessage, config *agent.ChatModelConfig, callback func(answer string)) (string, agent.TokenUsage, error) {
openaiConf := openai.DefaultConfig(ag.config.ApiKey)
if ag.config.Endpoint != "" {
openaiConf.BaseURL = ag.config.Endpoint
func getClient(aiConf *ai.AIConfig) *openai.Client {
openaiConf := openai.DefaultConfig(aiConf.ApiKey)
if aiConf.Endpoint != "" {
openaiConf.BaseURL = aiConf.Endpoint
}
return openai.NewClientWithConfig(openaiConf)
}
if config == nil {
config = &agent.ChatModelConfig{}
}
config.SetDefault(&ag.config.DefaultChatModelConfig)
agentMessages := make([]openai.ChatCompletionMessage, len(messages))
// func (lm *LLM) Ask(messages []ai.ChatMessage, config ai.ChatConfig, callback func(answer string)) (string, ai.Usage, error) {
func Chat(aiConf *ai.AIConfig, messages []ai.ChatMessage, callback func(string), conf ai.ChatConfig) (ai.ChatResult, error) {
chatMessages := make([]openai.ChatCompletionMessage, len(messages))
for i, msg := range messages {
var contents []openai.ChatMessagePart
if msg.Contents != nil {
contents = make([]openai.ChatMessagePart, len(msg.Contents))
for j, inPart := range msg.Contents {
part := openai.ChatMessagePart{}
part.Type = TypeMap[inPart.Type]
switch inPart.Type {
case agent.TypeText:
case ai.TypeText:
part.Type = openai.ChatMessagePartTypeText
part.Text = inPart.Content
case agent.TypeImage:
case ai.TypeImage:
part.Type = openai.ChatMessagePartTypeImageURL
part.ImageURL = &openai.ChatMessageImageURL{
URL: inPart.Content,
Detail: openai.ImageURLDetailAuto,
}
default:
part.Type = openai.ChatMessagePartType(inPart.Type)
part.Text = inPart.Content
}
contents[j] = part
}
}
agentMessages[i] = openai.ChatCompletionMessage{
Role: RoleMap[msg.Role],
if len(contents) == 1 && contents[0].Type == ai.TypeText {
chatMessages[i] = openai.ChatCompletionMessage{
Role: msg.Role,
Content: contents[0].Text,
}
} else {
chatMessages[i] = openai.ChatCompletionMessage{
Role: msg.Role,
MultiContent: contents,
}
}
}
if conf.SystemPrompt != "" {
chatMessages = append([]openai.ChatCompletionMessage{{
Role: openai.ChatMessageRoleSystem,
Content: conf.SystemPrompt,
}}, chatMessages...)
}
opt := openai.ChatCompletionRequest{
Model: config.GetModel(),
Messages: agentMessages,
MaxTokens: config.GetMaxTokens(),
Temperature: float32(config.GetTemperature()),
TopP: float32(config.GetTopP()),
Model: conf.Model,
Messages: chatMessages,
MaxTokens: conf.MaxTokens,
Temperature: float32(conf.Temperature),
TopP: float32(conf.TopP),
StreamOptions: &openai.StreamOptions{
IncludeUsage: true,
},
}
for name := range config.GetTools() {
for name, toolConf := range conf.Tools {
switch name {
case agent.ToolCodeInterpreter:
case ai.ToolCodeInterpreter:
opt.Tools = append(opt.Tools, openai.Tool{Type: "code_interpreter"})
case agent.ToolWebSearch:
case ai.ToolFunction:
conf := openai.FunctionDefinition{}
u.Convert(toolConf, &conf)
opt.Tools = append(opt.Tools, openai.Tool{Type: openai.ToolTypeFunction, Function: &conf})
}
}
c := openai.NewClientWithConfig(openaiConf)
c := getClient(aiConf)
if callback != nil {
opt.Stream = true
r, err := c.CreateChatCompletionStream(context.Background(), opt)
if err == nil {
results := make([]string, 0)
usage := agent.TokenUsage{}
out := ai.ChatResult{}
for {
if r2, err := r.Recv(); err == nil {
if r2.Choices != nil {
@ -131,38 +106,73 @@ func (ag *Agent) Ask(messages []agent.ChatMessage, config *agent.ChatModelConfig
}
}
if r2.Usage != nil {
usage.AskTokens += int64(r2.Usage.PromptTokens)
usage.AnswerTokens += int64(r2.Usage.CompletionTokens)
usage.TotalTokens += int64(r2.Usage.TotalTokens)
out.AskTokens += int64(r2.Usage.PromptTokens)
out.AnswerTokens += int64(r2.Usage.CompletionTokens)
out.TotalTokens += int64(r2.Usage.TotalTokens)
}
} else {
break
}
}
_ = r.Close()
return strings.Join(results, ""), usage, nil
out.Result = strings.Join(results, "")
return out, nil
} else {
log.DefaultLogger.Error(err.Error())
return "", agent.TokenUsage{}, err
return ai.ChatResult{}, err
}
} else {
r, err := c.CreateChatCompletion(context.Background(), opt)
if err == nil {
t1 := time.Now().UnixMilli()
if r, err := c.CreateChatCompletion(context.Background(), opt); err == nil {
t2 := time.Now().UnixMilli() - t1
results := make([]string, 0)
if r.Choices != nil {
for _, ch := range r.Choices {
results = append(results, ch.Message.Content)
}
}
return strings.Join(results, ""), agent.TokenUsage{
return ai.ChatResult{
Result: strings.Join(results, ""),
AskTokens: int64(r.Usage.PromptTokens),
AnswerTokens: int64(r.Usage.CompletionTokens),
TotalTokens: int64(r.Usage.TotalTokens),
UsedTime: t2,
}, nil
} else {
//fmt.Println(u.BMagenta(err.Error()), u.BMagenta(u.JsonP(r)))
return "", agent.TokenUsage{}, err
return ai.ChatResult{}, err
}
}
}
func Embedding(aiConf *ai.AIConfig, text string, embeddingConf ai.EmbeddingConfig) (ai.EmbeddingResult, error) {
c := getClient(aiConf)
req := openai.EmbeddingRequest{
Input: text,
Model: openai.EmbeddingModel(embeddingConf.Model),
User: "",
EncodingFormat: "",
Dimensions: 0,
}
t1 := time.Now().UnixMilli()
if r, err := c.CreateEmbeddings(context.Background(), req); err == nil {
t2 := time.Now().UnixMilli() - t1
buf := new(bytes.Buffer)
if r.Data != nil {
for _, ch := range r.Data {
for _, v := range ch.Embedding {
_ = binary.Write(buf, binary.LittleEndian, v)
}
}
}
return ai.EmbeddingResult{
Result: buf.Bytes(),
AskTokens: int64(r.Usage.PromptTokens),
AnswerTokens: int64(r.Usage.CompletionTokens),
TotalTokens: int64(r.Usage.TotalTokens),
UsedTime: t2,
}, nil
} else {
return ai.EmbeddingResult{}, err
}
}

View File

@ -1,81 +1,25 @@
package openai
import (
"apigo.cc/ai/agent"
"github.com/sashabaranov/go-openai"
_ "embed"
"apigo.cc/ai"
"github.com/ssgo/u"
)
type Agent struct {
config agent.APIConfig
}
var TypeMap = map[string]openai.ChatMessagePartType{
agent.TypeText: openai.ChatMessagePartTypeText,
agent.TypeImage: openai.ChatMessagePartTypeImageURL,
//agent.TypeVideo: "video_url",
}
var RoleMap = map[string]string{
agent.RoleSystem: openai.ChatMessageRoleSystem,
agent.RoleUser: openai.ChatMessageRoleUser,
agent.RoleAssistant: openai.ChatMessageRoleAssistant,
agent.RoleTool: openai.ChatMessageRoleTool,
}
const (
ModelGPT_4_32k_0613 = "gpt-4-32k-0613"
ModelGPT_4_32k_0314 = "gpt-4-32k-0314"
ModelGPT_4_32k = "gpt-4-32k"
ModelGPT_4_0613 = "gpt-4-0613"
ModelGPT_4_0314 = "gpt-4-0314"
ModelGPT_4o = "gpt-4o"
ModelGPT_4o_2024_05_13 = "gpt-4o-2024-05-13"
ModelGPT_4o_2024_08_06 = "gpt-4o-2024-08-06"
ModelGPT_4o_mini = "gpt-4o-mini"
ModelGPT_4o_mini_2024_07_18 = "gpt-4o-mini-2024-07-18"
ModelGPT_4_turbo = "gpt-4-turbo"
ModelGPT_4_turbo_2024_04_09 = "gpt-4-turbo-2024-04-09"
ModelGPT_4_0125_preview = "gpt-4-0125-preview"
ModelGPT_4_1106_preview = "gpt-4-1106-preview"
ModelGPT_4_turbo_preview = "gpt-4-turbo-preview"
ModelGPT_4_vision_preview = "gpt-4-vision-preview"
ModelGPT_4 = "gpt-4"
ModelGPT_3_5_turbo_0125 = "gpt-3.5-turbo-0125"
ModelGPT_3_5_turbo_1106 = "gpt-3.5-turbo-1106"
ModelGPT_3_5_turbo_0613 = "gpt-3.5-turbo-0613"
ModelGPT_3_5_turbo_0301 = "gpt-3.5-turbo-0301"
ModelGPT_3_5_turbo_16k = "gpt-3.5-turbo-16k"
ModelGPT_3_5_turbo_16k_0613 = "gpt-3.5-turbo-16k-0613"
ModelGPT_3_5_turbo = "gpt-3.5-turbo"
ModelGPT_3_5_turbo_instruct = "gpt-3.5-turbo-instruct"
ModelDavinci_002 = "davinci-002"
ModelCurie = "curie"
ModelCurie_002 = "curie-002"
ModelAda_002 = "ada-002"
ModelBabbage_002 = "babbage-002"
ModelCode_davinci_002 = "code-davinci-002"
ModelCode_cushman_001 = "code-cushman-001"
ModelCode_davinci_001 = "code-davinci-001"
ModelDallE2Std = "dall-e-2"
ModelDallE2HD = "dall-e-2-hd"
ModelDallE3Std = "dall-e-3"
ModelDallE3HD = "dall-e-3-hd"
)
func (ag *Agent) Support() agent.Support {
return agent.Support{
Ask: true,
AskWithImage: true,
AskWithVideo: false,
AskWithCodeInterpreter: true,
AskWithWebSearch: false,
MakeImage: true,
MakeVideo: false,
Models: []string{ModelGPT_4_32k_0613, ModelGPT_4_32k_0314, ModelGPT_4_32k, ModelGPT_4_0613, ModelGPT_4_0314, ModelGPT_4o, ModelGPT_4o_2024_05_13, ModelGPT_4o_2024_08_06, ModelGPT_4o_mini, ModelGPT_4o_mini_2024_07_18, ModelGPT_4_turbo, ModelGPT_4_turbo_2024_04_09, ModelGPT_4_0125_preview, ModelGPT_4_1106_preview, ModelGPT_4_turbo_preview, ModelGPT_4_vision_preview, ModelGPT_4, ModelGPT_3_5_turbo_0125, ModelGPT_3_5_turbo_1106, ModelGPT_3_5_turbo_0613, ModelGPT_3_5_turbo_0301, ModelGPT_3_5_turbo_16k, ModelGPT_3_5_turbo_16k_0613, ModelGPT_3_5_turbo, ModelGPT_3_5_turbo_instruct, ModelDavinci_002, ModelCurie, ModelCurie_002, ModelAda_002, ModelBabbage_002, ModelCode_davinci_002, ModelCode_cushman_001, ModelCode_davinci_001, ModelDallE2Std, ModelDallE2HD, ModelDallE3Std, ModelDallE3HD},
}
}
//go:embed default.yml
var defaultYml string
func init() {
agent.RegisterAgentMaker("openai", func(config agent.APIConfig) agent.Agent {
return &Agent{config: config}
defaultConf := ai.AILoadConfig{}
u.Convert(u.UnYamlMap(defaultYml), &defaultConf)
ai.Register("openai", &ai.Agent{
ChatConfigs: defaultConf.Chat,
EmbeddingConfigs: defaultConf.Embedding,
ImageConfigs: defaultConf.Image,
VideoConfigs: defaultConf.Video,
Chat: Chat,
Embedding: Embedding,
MakeImage: MakeImage,
})
}

31
default.yml Normal file
View File

@ -0,0 +1,31 @@
chat:
fastAsk:
model: gpt-4o-mini-2024-07-18
longAsk:
model: gpt-4-32k-0613
turboAsk:
model: gpt-4-turbo
visionAsk:
model: gpt-4-vision-preview
bestAsk:
model: gpt-4o-2024-08-06
codeInterpreter:
model: gpt-4o-2024-08-06
tools:
codeInterpreter:
embedding:
embedding:
model: text-embedding-3-small
embeddingLarge:
model: text-embedding-3-large
image:
makeImage:
model: dall-e-3
quality: standard
width: 1024
height: 1024
makeImageHD:
model: dall-e-3-hd
quality: hd
width: 1024
height: 1024

50
gc.go Normal file
View File

@ -0,0 +1,50 @@
package openai
import (
"context"
"fmt"
"strings"
"time"
"apigo.cc/ai"
"github.com/sashabaranov/go-openai"
)
func MakeImage(aiConf *ai.AIConfig, conf ai.ImageConfig) (ai.ImageResult, error) {
c := getClient(aiConf)
style := conf.Style
if style == "" && (strings.Contains(conf.Prompt, "natural") || strings.Contains(conf.Prompt, "自然")) {
style = openai.CreateImageStyleNatural
}
if style == "" {
style = openai.CreateImageStyleVivid
}
quality := conf.Quality
if quality == "" {
quality = openai.CreateImageQualityStandard
}
t1 := time.Now().UnixMilli()
r, err := c.CreateImage(context.Background(), openai.ImageRequest{
Prompt: conf.SystemPrompt + conf.Prompt,
Model: conf.Model,
Quality: quality,
Size: fmt.Sprintf("%dx%d", conf.Width, conf.Height),
Style: style,
ResponseFormat: openai.CreateImageResponseFormatURL,
})
t2 := time.Now().UnixMilli() - t1
if err == nil {
results := make([]string, 0)
for _, item := range r.Data {
results = append(results, item.URL)
}
return ai.ImageResult{
Results: results,
UsedTime: t2,
}, nil
} else {
return ai.ImageResult{}, err
}
}

19
go.mod
View File

@ -1,16 +1,25 @@
module apigo.cc/ai/openai
go 1.22
go 1.18
require (
apigo.cc/ai/agent v0.0.1
github.com/sashabaranov/go-openai v1.29.1
apigo.cc/ai v0.0.1
apigo.cc/gojs v0.0.4
apigo.cc/gojs/console v0.0.1
github.com/sashabaranov/go-openai v1.32.5
github.com/ssgo/log v1.7.7
github.com/ssgo/u v1.7.9
)
require (
github.com/ssgo/config v1.7.7 // indirect
github.com/dlclark/regexp2 v1.11.4 // indirect
github.com/fsnotify/fsnotify v1.7.0 // indirect
github.com/go-sourcemap/sourcemap v2.1.4+incompatible // indirect
github.com/google/pprof v0.0.0-20230207041349-798e818bf904 // indirect
github.com/ssgo/config v1.7.8 // indirect
github.com/ssgo/standard v1.7.7 // indirect
github.com/ssgo/u v1.7.7 // indirect
github.com/ssgo/tool v0.4.27 // indirect
golang.org/x/sys v0.26.0 // indirect
golang.org/x/text v0.19.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)