use new gojs
This commit is contained in:
parent
471f13a27d
commit
998a9f4ad5
6
.gitignore
vendored
Normal file
6
.gitignore
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
.*
|
||||||
|
!.gitignore
|
||||||
|
go.sum
|
||||||
|
env.yml
|
||||||
|
node_modules
|
||||||
|
package.json
|
95
aigc.go
95
aigc.go
@ -1,95 +0,0 @@
|
|||||||
package openai
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"github.com/sashabaranov/go-openai"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (ag *Agent) FastMakeImage(prompt, size, refImage string) ([]string, error) {
|
|
||||||
return ag.MakeImage(ModelDallE3Std, prompt, size, refImage)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ag *Agent) BestMakeImage(prompt, size, refImage string) ([]string, error) {
|
|
||||||
return ag.MakeImage(ModelDallE3HD, prompt, size, refImage)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ag *Agent) MakeImage(model, prompt, size, refImage string) ([]string, error) {
|
|
||||||
openaiConf := openai.DefaultConfig(ag.config.ApiKey)
|
|
||||||
if ag.config.Endpoint != "" {
|
|
||||||
openaiConf.BaseURL = ag.config.Endpoint
|
|
||||||
}
|
|
||||||
c := openai.NewClientWithConfig(openaiConf)
|
|
||||||
style := openai.CreateImageStyleVivid
|
|
||||||
if (!strings.Contains(prompt, "vivid") || !strings.Contains(prompt, "生动的")) && (strings.Contains(prompt, "natural") || strings.Contains(prompt, "自然的")) {
|
|
||||||
style = openai.CreateImageStyleNatural
|
|
||||||
}
|
|
||||||
quality := openai.CreateImageQualityStandard
|
|
||||||
if strings.HasSuffix(model, "-hd") {
|
|
||||||
quality = openai.CreateImageQualityHD
|
|
||||||
model = model[0 : len(model)-3]
|
|
||||||
}
|
|
||||||
r, err := c.CreateImage(context.Background(), openai.ImageRequest{
|
|
||||||
Prompt: prompt,
|
|
||||||
Model: model,
|
|
||||||
Quality: quality,
|
|
||||||
Size: size,
|
|
||||||
Style: style,
|
|
||||||
ResponseFormat: openai.CreateImageResponseFormatURL,
|
|
||||||
})
|
|
||||||
if err == nil {
|
|
||||||
results := make([]string, 0)
|
|
||||||
for _, item := range r.Data {
|
|
||||||
results = append(results, item.URL)
|
|
||||||
}
|
|
||||||
return results, nil
|
|
||||||
} else {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ag *Agent) FastMakeVideo(prompt, size, refImage string) ([]string, []string, error) {
|
|
||||||
return ag.MakeVideo("", prompt, size, refImage)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ag *Agent) BestMakeVideo(prompt, size, refImage string) ([]string, []string, error) {
|
|
||||||
return ag.MakeVideo("", prompt, size, refImage)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ag *Agent) MakeVideo(model, prompt, size, refImage string) ([]string, []string, error) {
|
|
||||||
//c, err := openai.NewClient(openai.WithAPIKey(ag.config.ApiKey), openai.WithBaseURL(ag.config.Endpoint))
|
|
||||||
//if err != nil {
|
|
||||||
// return nil, nil, err
|
|
||||||
//}
|
|
||||||
//
|
|
||||||
//cc := c.VideoGeneration(model).SetPrompt(prompt)
|
|
||||||
//if refImage != "" {
|
|
||||||
// cc.SetImageURL(refImage)
|
|
||||||
//}
|
|
||||||
//
|
|
||||||
//if resp, err := cc.Do(context.Background()); err == nil {
|
|
||||||
// for i := 0; i < 1200; i++ {
|
|
||||||
// r, err := c.AsyncResult(resp.ID).Do(context.Background())
|
|
||||||
// if err != nil {
|
|
||||||
// return nil, nil, err
|
|
||||||
// }
|
|
||||||
// if r.TaskStatus == openai.VideoGenerationTaskStatusSuccess {
|
|
||||||
// covers := make([]string, 0)
|
|
||||||
// results := make([]string, 0)
|
|
||||||
// for _, item := range r.VideoResult {
|
|
||||||
// results = append(results, item.URL)
|
|
||||||
// covers = append(covers, item.CoverImageURL)
|
|
||||||
// }
|
|
||||||
// return results, covers, nil
|
|
||||||
// }
|
|
||||||
// if r.TaskStatus == openai.VideoGenerationTaskStatusFail {
|
|
||||||
// return nil, nil, errors.New("fail on task " + resp.ID)
|
|
||||||
// }
|
|
||||||
// time.Sleep(3 * time.Second)
|
|
||||||
// }
|
|
||||||
// return nil, nil, errors.New("timeout on task " + resp.ID)
|
|
||||||
//} else {
|
|
||||||
// return nil, nil, err
|
|
||||||
//}
|
|
||||||
return nil, nil, nil
|
|
||||||
}
|
|
144
chat.go
144
chat.go
@ -1,73 +1,75 @@
|
|||||||
package openai
|
package openai
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"apigo.cc/ai/agent"
|
"apigo.cc/ai/llm/llm"
|
||||||
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
|
"encoding/binary"
|
||||||
|
"fmt"
|
||||||
"github.com/sashabaranov/go-openai"
|
"github.com/sashabaranov/go-openai"
|
||||||
"github.com/ssgo/log"
|
"github.com/ssgo/log"
|
||||||
|
"github.com/ssgo/u"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (ag *Agent) FastAsk(messages []agent.ChatMessage, callback func(answer string)) (string, agent.TokenUsage, error) {
|
func (lm *LLM) FastAsk(messages []llm.ChatMessage, callback func(answer string)) (string, llm.Usage, error) {
|
||||||
return ag.Ask(messages, &agent.ChatModelConfig{
|
return lm.Ask(messages, llm.ChatConfig{
|
||||||
Model: ModelGPT_4o_mini_2024_07_18,
|
Model: ModelGPT_4o_mini_2024_07_18,
|
||||||
}, callback)
|
}, callback)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ag *Agent) LongAsk(messages []agent.ChatMessage, callback func(answer string)) (string, agent.TokenUsage, error) {
|
func (lm *LLM) LongAsk(messages []llm.ChatMessage, callback func(answer string)) (string, llm.Usage, error) {
|
||||||
return ag.Ask(messages, &agent.ChatModelConfig{
|
return lm.Ask(messages, llm.ChatConfig{
|
||||||
Model: ModelGPT_4_32k_0613,
|
Model: ModelGPT_4_32k_0613,
|
||||||
}, callback)
|
}, callback)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ag *Agent) BatterAsk(messages []agent.ChatMessage, callback func(answer string)) (string, agent.TokenUsage, error) {
|
func (lm *LLM) BatterAsk(messages []llm.ChatMessage, callback func(answer string)) (string, llm.Usage, error) {
|
||||||
return ag.Ask(messages, &agent.ChatModelConfig{
|
return lm.Ask(messages, llm.ChatConfig{
|
||||||
Model: ModelGPT_4_turbo,
|
Model: ModelGPT_4_turbo,
|
||||||
}, callback)
|
}, callback)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ag *Agent) BestAsk(messages []agent.ChatMessage, callback func(answer string)) (string, agent.TokenUsage, error) {
|
func (lm *LLM) BestAsk(messages []llm.ChatMessage, callback func(answer string)) (string, llm.Usage, error) {
|
||||||
return ag.Ask(messages, &agent.ChatModelConfig{
|
return lm.Ask(messages, llm.ChatConfig{
|
||||||
Model: ModelGPT_4o_2024_08_06,
|
Model: ModelGPT_4o_2024_08_06,
|
||||||
}, callback)
|
}, callback)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ag *Agent) MultiAsk(messages []agent.ChatMessage, callback func(answer string)) (string, agent.TokenUsage, error) {
|
func (lm *LLM) MultiAsk(messages []llm.ChatMessage, callback func(answer string)) (string, llm.Usage, error) {
|
||||||
return ag.Ask(messages, &agent.ChatModelConfig{
|
return lm.Ask(messages, llm.ChatConfig{
|
||||||
Model: ModelGPT_4o_mini_2024_07_18,
|
Model: ModelGPT_4o_mini_2024_07_18,
|
||||||
}, callback)
|
}, callback)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ag *Agent) BestMultiAsk(messages []agent.ChatMessage, callback func(answer string)) (string, agent.TokenUsage, error) {
|
func (lm *LLM) BestMultiAsk(messages []llm.ChatMessage, callback func(answer string)) (string, llm.Usage, error) {
|
||||||
return ag.Ask(messages, &agent.ChatModelConfig{
|
return lm.Ask(messages, llm.ChatConfig{
|
||||||
Model: ModelGPT_4o_2024_08_06,
|
Model: ModelGPT_4o_2024_08_06,
|
||||||
}, callback)
|
}, callback)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ag *Agent) CodeInterpreterAsk(messages []agent.ChatMessage, callback func(answer string)) (string, agent.TokenUsage, error) {
|
func (lm *LLM) CodeInterpreterAsk(messages []llm.ChatMessage, callback func(answer string)) (string, llm.Usage, error) {
|
||||||
return ag.Ask(messages, &agent.ChatModelConfig{
|
return lm.Ask(messages, llm.ChatConfig{
|
||||||
Model: ModelGPT_4o,
|
Model: ModelGPT_4o,
|
||||||
Tools: map[string]any{agent.ToolCodeInterpreter: nil},
|
Tools: map[string]any{llm.ToolCodeInterpreter: nil},
|
||||||
}, callback)
|
}, callback)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ag *Agent) WebSearchAsk(messages []agent.ChatMessage, callback func(answer string)) (string, agent.TokenUsage, error) {
|
func (lm *LLM) WebSearchAsk(messages []llm.ChatMessage, callback func(answer string)) (string, llm.Usage, error) {
|
||||||
return ag.Ask(messages, &agent.ChatModelConfig{
|
return lm.Ask(messages, llm.ChatConfig{
|
||||||
Model: ModelGPT_4o_mini_2024_07_18,
|
Model: ModelGPT_4o_mini_2024_07_18,
|
||||||
Tools: map[string]any{agent.ToolWebSearch: nil},
|
Tools: map[string]any{llm.ToolWebSearch: nil},
|
||||||
}, callback)
|
}, callback)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ag *Agent) Ask(messages []agent.ChatMessage, config *agent.ChatModelConfig, callback func(answer string)) (string, agent.TokenUsage, error) {
|
func (lm *LLM) Ask(messages []llm.ChatMessage, config llm.ChatConfig, callback func(answer string)) (string, llm.Usage, error) {
|
||||||
openaiConf := openai.DefaultConfig(ag.config.ApiKey)
|
openaiConf := openai.DefaultConfig(lm.config.ApiKey)
|
||||||
if ag.config.Endpoint != "" {
|
if lm.config.Endpoint != "" {
|
||||||
openaiConf.BaseURL = ag.config.Endpoint
|
openaiConf.BaseURL = lm.config.Endpoint
|
||||||
}
|
}
|
||||||
|
|
||||||
if config == nil {
|
config.SetDefault(&lm.config.ChatConfig)
|
||||||
config = &agent.ChatModelConfig{}
|
|
||||||
}
|
|
||||||
config.SetDefault(&ag.config.DefaultChatModelConfig)
|
|
||||||
|
|
||||||
agentMessages := make([]openai.ChatCompletionMessage, len(messages))
|
agentMessages := make([]openai.ChatCompletionMessage, len(messages))
|
||||||
for i, msg := range messages {
|
for i, msg := range messages {
|
||||||
@ -78,9 +80,9 @@ func (ag *Agent) Ask(messages []agent.ChatMessage, config *agent.ChatModelConfig
|
|||||||
part := openai.ChatMessagePart{}
|
part := openai.ChatMessagePart{}
|
||||||
part.Type = TypeMap[inPart.Type]
|
part.Type = TypeMap[inPart.Type]
|
||||||
switch inPart.Type {
|
switch inPart.Type {
|
||||||
case agent.TypeText:
|
case llm.TypeText:
|
||||||
part.Text = inPart.Content
|
part.Text = inPart.Content
|
||||||
case agent.TypeImage:
|
case llm.TypeImage:
|
||||||
part.ImageURL = &openai.ChatMessageImageURL{
|
part.ImageURL = &openai.ChatMessageImageURL{
|
||||||
URL: inPart.Content,
|
URL: inPart.Content,
|
||||||
Detail: openai.ImageURLDetailAuto,
|
Detail: openai.ImageURLDetailAuto,
|
||||||
@ -89,9 +91,16 @@ func (ag *Agent) Ask(messages []agent.ChatMessage, config *agent.ChatModelConfig
|
|||||||
contents[j] = part
|
contents[j] = part
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
agentMessages[i] = openai.ChatCompletionMessage{
|
if len(contents) == 1 && contents[0].Type == llm.TypeText {
|
||||||
Role: RoleMap[msg.Role],
|
agentMessages[i] = openai.ChatCompletionMessage{
|
||||||
MultiContent: contents,
|
Role: RoleMap[msg.Role],
|
||||||
|
Content: contents[0].Text,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
agentMessages[i] = openai.ChatCompletionMessage{
|
||||||
|
Role: RoleMap[msg.Role],
|
||||||
|
MultiContent: contents,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -108,9 +117,9 @@ func (ag *Agent) Ask(messages []agent.ChatMessage, config *agent.ChatModelConfig
|
|||||||
|
|
||||||
for name := range config.GetTools() {
|
for name := range config.GetTools() {
|
||||||
switch name {
|
switch name {
|
||||||
case agent.ToolCodeInterpreter:
|
case llm.ToolCodeInterpreter:
|
||||||
opt.Tools = append(opt.Tools, openai.Tool{Type: "code_interpreter"})
|
opt.Tools = append(opt.Tools, openai.Tool{Type: "code_interpreter"})
|
||||||
case agent.ToolWebSearch:
|
case llm.ToolWebSearch:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -120,7 +129,7 @@ func (ag *Agent) Ask(messages []agent.ChatMessage, config *agent.ChatModelConfig
|
|||||||
r, err := c.CreateChatCompletionStream(context.Background(), opt)
|
r, err := c.CreateChatCompletionStream(context.Background(), opt)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
results := make([]string, 0)
|
results := make([]string, 0)
|
||||||
usage := agent.TokenUsage{}
|
usage := llm.Usage{}
|
||||||
for {
|
for {
|
||||||
if r2, err := r.Recv(); err == nil {
|
if r2, err := r.Recv(); err == nil {
|
||||||
if r2.Choices != nil {
|
if r2.Choices != nil {
|
||||||
@ -143,26 +152,79 @@ func (ag *Agent) Ask(messages []agent.ChatMessage, config *agent.ChatModelConfig
|
|||||||
return strings.Join(results, ""), usage, nil
|
return strings.Join(results, ""), usage, nil
|
||||||
} else {
|
} else {
|
||||||
log.DefaultLogger.Error(err.Error())
|
log.DefaultLogger.Error(err.Error())
|
||||||
return "", agent.TokenUsage{}, err
|
return "", llm.Usage{}, err
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
r, err := c.CreateChatCompletion(context.Background(), opt)
|
t1 := time.Now().UnixMilli()
|
||||||
|
if r, err := c.CreateChatCompletion(context.Background(), opt); err == nil {
|
||||||
if err == nil {
|
t2 := time.Now().UnixMilli() - t1
|
||||||
results := make([]string, 0)
|
results := make([]string, 0)
|
||||||
if r.Choices != nil {
|
if r.Choices != nil {
|
||||||
for _, ch := range r.Choices {
|
for _, ch := range r.Choices {
|
||||||
results = append(results, ch.Message.Content)
|
results = append(results, ch.Message.Content)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return strings.Join(results, ""), agent.TokenUsage{
|
return strings.Join(results, ""), llm.Usage{
|
||||||
AskTokens: int64(r.Usage.PromptTokens),
|
AskTokens: int64(r.Usage.PromptTokens),
|
||||||
AnswerTokens: int64(r.Usage.CompletionTokens),
|
AnswerTokens: int64(r.Usage.CompletionTokens),
|
||||||
TotalTokens: int64(r.Usage.TotalTokens),
|
TotalTokens: int64(r.Usage.TotalTokens),
|
||||||
|
UsedTime: t2,
|
||||||
}, nil
|
}, nil
|
||||||
} else {
|
} else {
|
||||||
//fmt.Println(u.BMagenta(err.Error()), u.BMagenta(u.JsonP(r)))
|
//fmt.Println(u.BMagenta(err.Error()), u.BMagenta(u.JsonP(r)))
|
||||||
return "", agent.TokenUsage{}, err
|
return "", llm.Usage{}, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (lm *LLM) FastEmbedding(text string) ([]byte, llm.Usage, error) {
|
||||||
|
return lm.Embedding(text, string(openai.AdaEmbeddingV2))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (lm *LLM) BestEmbedding(text string) ([]byte, llm.Usage, error) {
|
||||||
|
return lm.Embedding(text, string(openai.LargeEmbedding3))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (lm *LLM) Embedding(text, model string) ([]byte, llm.Usage, error) {
|
||||||
|
fmt.Println(111, model, text)
|
||||||
|
openaiConf := openai.DefaultConfig(lm.config.ApiKey)
|
||||||
|
if lm.config.Endpoint != "" {
|
||||||
|
openaiConf.BaseURL = lm.config.Endpoint
|
||||||
|
}
|
||||||
|
|
||||||
|
c := openai.NewClientWithConfig(openaiConf)
|
||||||
|
req := openai.EmbeddingRequest{
|
||||||
|
Input: text,
|
||||||
|
Model: openai.EmbeddingModel(model),
|
||||||
|
User: "",
|
||||||
|
EncodingFormat: "",
|
||||||
|
Dimensions: 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
if lm.config.Debug {
|
||||||
|
fmt.Println(u.JsonP(req))
|
||||||
|
}
|
||||||
|
|
||||||
|
t1 := time.Now().UnixMilli()
|
||||||
|
if r, err := c.CreateEmbeddings(context.Background(), req); err == nil {
|
||||||
|
t2 := time.Now().UnixMilli() - t1
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
if r.Data != nil {
|
||||||
|
for _, ch := range r.Data {
|
||||||
|
for _, v := range ch.Embedding {
|
||||||
|
_ = binary.Write(buf, binary.LittleEndian, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Println(len(buf.Bytes()))
|
||||||
|
return buf.Bytes(), llm.Usage{
|
||||||
|
AskTokens: int64(r.Usage.PromptTokens),
|
||||||
|
AnswerTokens: int64(r.Usage.CompletionTokens),
|
||||||
|
TotalTokens: int64(r.Usage.TotalTokens),
|
||||||
|
UsedTime: t2,
|
||||||
|
}, nil
|
||||||
|
} else {
|
||||||
|
fmt.Println(err.Error())
|
||||||
|
return nil, llm.Usage{}, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
28
config.go
28
config.go
@ -1,24 +1,24 @@
|
|||||||
package openai
|
package openai
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"apigo.cc/ai/agent"
|
"apigo.cc/ai/llm/llm"
|
||||||
"github.com/sashabaranov/go-openai"
|
"github.com/sashabaranov/go-openai"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Agent struct {
|
type LLM struct {
|
||||||
config agent.APIConfig
|
config llm.Config
|
||||||
}
|
}
|
||||||
|
|
||||||
var TypeMap = map[string]openai.ChatMessagePartType{
|
var TypeMap = map[string]openai.ChatMessagePartType{
|
||||||
agent.TypeText: openai.ChatMessagePartTypeText,
|
llm.TypeText: openai.ChatMessagePartTypeText,
|
||||||
agent.TypeImage: openai.ChatMessagePartTypeImageURL,
|
llm.TypeImage: openai.ChatMessagePartTypeImageURL,
|
||||||
//agent.TypeVideo: "video_url",
|
//llm.TypeVideo: "video_url",
|
||||||
}
|
}
|
||||||
var RoleMap = map[string]string{
|
var RoleMap = map[string]string{
|
||||||
agent.RoleSystem: openai.ChatMessageRoleSystem,
|
llm.RoleSystem: openai.ChatMessageRoleSystem,
|
||||||
agent.RoleUser: openai.ChatMessageRoleUser,
|
llm.RoleUser: openai.ChatMessageRoleUser,
|
||||||
agent.RoleAssistant: openai.ChatMessageRoleAssistant,
|
llm.RoleAssistant: openai.ChatMessageRoleAssistant,
|
||||||
agent.RoleTool: openai.ChatMessageRoleTool,
|
llm.RoleTool: openai.ChatMessageRoleTool,
|
||||||
}
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -61,8 +61,8 @@ const (
|
|||||||
ModelDallE3HD = "dall-e-3-hd"
|
ModelDallE3HD = "dall-e-3-hd"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (ag *Agent) Support() agent.Support {
|
func (ag *LLM) Support() llm.Support {
|
||||||
return agent.Support{
|
return llm.Support{
|
||||||
Ask: true,
|
Ask: true,
|
||||||
AskWithImage: true,
|
AskWithImage: true,
|
||||||
AskWithVideo: false,
|
AskWithVideo: false,
|
||||||
@ -75,7 +75,7 @@ func (ag *Agent) Support() agent.Support {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
agent.RegisterAgentMaker("openai", func(config agent.APIConfig) agent.Agent {
|
llm.Register("openai", func(config llm.Config) llm.LLM {
|
||||||
return &Agent{config: config}
|
return &LLM{config: config}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
83
gc.go
Normal file
83
gc.go
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
package openai
|
||||||
|
|
||||||
|
import (
|
||||||
|
"apigo.cc/ai/llm/llm"
|
||||||
|
"context"
|
||||||
|
"github.com/sashabaranov/go-openai"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// func (lm *LLM) FastMakeImage(prompt, size, refImage string) ([]string, llm.Usage, error) {
|
||||||
|
// return lm.MakeImage(ModelDallE3Std, prompt, size, refImage)
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// func (lm *LLM) BestMakeImage(prompt, size, refImage string) ([]string, llm.Usage, error) {
|
||||||
|
// return lm.MakeImage(ModelDallE3HD, prompt, size, refImage)
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// func (lm *LLM) MakeImage(model, prompt, size, refImage string) ([]string, llm.Usage, error) {
|
||||||
|
func (lm *LLM) FastMakeImage(prompt string, config llm.GCConfig) ([]string, llm.Usage, error) {
|
||||||
|
config.Model = ModelDallE3Std
|
||||||
|
return lm.MakeImage(prompt, config)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (lm *LLM) BestMakeImage(prompt string, config llm.GCConfig) ([]string, llm.Usage, error) {
|
||||||
|
config.Model = ModelDallE3HD
|
||||||
|
return lm.MakeImage(prompt, config)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (lm *LLM) MakeImage(prompt string, config llm.GCConfig) ([]string, llm.Usage, error) {
|
||||||
|
openaiConf := openai.DefaultConfig(lm.config.ApiKey)
|
||||||
|
if lm.config.Endpoint != "" {
|
||||||
|
openaiConf.BaseURL = lm.config.Endpoint
|
||||||
|
}
|
||||||
|
config.SetDefault(&lm.config.GCConfig)
|
||||||
|
c := openai.NewClientWithConfig(openaiConf)
|
||||||
|
style := openai.CreateImageStyleVivid
|
||||||
|
if (!strings.Contains(prompt, "vivid") || !strings.Contains(prompt, "生动的")) && (strings.Contains(prompt, "natural") || strings.Contains(prompt, "自然的")) {
|
||||||
|
style = openai.CreateImageStyleNatural
|
||||||
|
}
|
||||||
|
quality := openai.CreateImageQualityStandard
|
||||||
|
model := config.GetModel()
|
||||||
|
if strings.HasSuffix(model, "-hd") {
|
||||||
|
quality = openai.CreateImageQualityHD
|
||||||
|
model = model[0 : len(model)-3]
|
||||||
|
}
|
||||||
|
t1 := time.Now().UnixMilli()
|
||||||
|
r, err := c.CreateImage(context.Background(), openai.ImageRequest{
|
||||||
|
Prompt: prompt,
|
||||||
|
Model: model,
|
||||||
|
Quality: quality,
|
||||||
|
Size: config.GetSize(),
|
||||||
|
Style: style,
|
||||||
|
ResponseFormat: openai.CreateImageResponseFormatURL,
|
||||||
|
})
|
||||||
|
t2 := time.Now().UnixMilli() - t1
|
||||||
|
if err == nil {
|
||||||
|
results := make([]string, 0)
|
||||||
|
for _, item := range r.Data {
|
||||||
|
results = append(results, item.URL)
|
||||||
|
}
|
||||||
|
return results, llm.Usage{
|
||||||
|
AskTokens: 0,
|
||||||
|
AnswerTokens: 0,
|
||||||
|
TotalTokens: 0,
|
||||||
|
UsedTime: t2,
|
||||||
|
}, nil
|
||||||
|
} else {
|
||||||
|
return nil, llm.Usage{}, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (lm *LLM) FastMakeVideo(prompt string, config llm.GCConfig) ([]string, []string, llm.Usage, error) {
|
||||||
|
return lm.MakeVideo(prompt, config)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (lm *LLM) BestMakeVideo(prompt string, config llm.GCConfig) ([]string, []string, llm.Usage, error) {
|
||||||
|
return lm.MakeVideo(prompt, config)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (lm *LLM) MakeVideo(prompt string, config llm.GCConfig) ([]string, []string, llm.Usage, error) {
|
||||||
|
return nil, nil, llm.Usage{}, nil
|
||||||
|
}
|
6
go.mod
6
go.mod
@ -3,14 +3,14 @@ module apigo.cc/ai/openai
|
|||||||
go 1.22
|
go 1.22
|
||||||
|
|
||||||
require (
|
require (
|
||||||
apigo.cc/ai/agent v0.0.1
|
apigo.cc/ai/llm v0.0.4
|
||||||
github.com/sashabaranov/go-openai v1.29.1
|
github.com/sashabaranov/go-openai v1.29.1
|
||||||
github.com/ssgo/log v1.7.7
|
github.com/ssgo/log v1.7.7
|
||||||
|
github.com/ssgo/u v1.7.9
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/ssgo/config v1.7.7 // indirect
|
github.com/ssgo/config v1.7.8 // indirect
|
||||||
github.com/ssgo/standard v1.7.7 // indirect
|
github.com/ssgo/standard v1.7.7 // indirect
|
||||||
github.com/ssgo/u v1.7.7 // indirect
|
|
||||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
)
|
)
|
||||||
|
Loading…
Reference in New Issue
Block a user