ai_old/llm/openai/gc.go

84 lines
2.6 KiB
Go
Raw Normal View History

2024-09-17 18:44:21 +08:00
package openai
import (
"apigo.cc/ai/ai/interface/llm"
2024-09-17 18:44:21 +08:00
"context"
"github.com/sashabaranov/go-openai"
"strings"
"time"
2024-09-17 18:44:21 +08:00
)
// func (lm *LLM) FastMakeImage(prompt, size, refImage string) ([]string, llm.Usage, error) {
2024-09-17 18:44:21 +08:00
// return lm.MakeImage(ModelDallE3Std, prompt, size, refImage)
// }
//
// func (lm *LLM) BestMakeImage(prompt, size, refImage string) ([]string, llm.Usage, error) {
2024-09-17 18:44:21 +08:00
// return lm.MakeImage(ModelDallE3HD, prompt, size, refImage)
// }
//
// func (lm *LLM) MakeImage(model, prompt, size, refImage string) ([]string, llm.Usage, error) {
func (lm *LLM) FastMakeImage(prompt string, config llm.GCConfig) ([]string, llm.Usage, error) {
2024-09-17 18:44:21 +08:00
config.Model = ModelDallE3Std
return lm.MakeImage(prompt, config)
}
func (lm *LLM) BestMakeImage(prompt string, config llm.GCConfig) ([]string, llm.Usage, error) {
2024-09-17 18:44:21 +08:00
config.Model = ModelDallE3HD
return lm.MakeImage(prompt, config)
}
func (lm *LLM) MakeImage(prompt string, config llm.GCConfig) ([]string, llm.Usage, error) {
2024-09-17 18:44:21 +08:00
openaiConf := openai.DefaultConfig(lm.config.ApiKey)
if lm.config.Endpoint != "" {
openaiConf.BaseURL = lm.config.Endpoint
}
2024-09-18 18:29:21 +08:00
config.SetDefault(&lm.config.GCConfig)
2024-09-17 18:44:21 +08:00
c := openai.NewClientWithConfig(openaiConf)
style := openai.CreateImageStyleVivid
if (!strings.Contains(prompt, "vivid") || !strings.Contains(prompt, "生动的")) && (strings.Contains(prompt, "natural") || strings.Contains(prompt, "自然的")) {
style = openai.CreateImageStyleNatural
}
quality := openai.CreateImageQualityStandard
2024-09-18 18:29:21 +08:00
model := config.GetModel()
if strings.HasSuffix(model, "-hd") {
2024-09-17 18:44:21 +08:00
quality = openai.CreateImageQualityHD
2024-09-18 18:29:21 +08:00
model = model[0 : len(model)-3]
2024-09-17 18:44:21 +08:00
}
t1 := time.Now().UnixMilli()
2024-09-17 18:44:21 +08:00
r, err := c.CreateImage(context.Background(), openai.ImageRequest{
Prompt: prompt,
2024-09-18 18:29:21 +08:00
Model: model,
2024-09-17 18:44:21 +08:00
Quality: quality,
2024-09-18 18:29:21 +08:00
Size: config.GetSize(),
2024-09-17 18:44:21 +08:00
Style: style,
ResponseFormat: openai.CreateImageResponseFormatURL,
})
t2 := time.Now().UnixMilli() - t1
2024-09-17 18:44:21 +08:00
if err == nil {
results := make([]string, 0)
for _, item := range r.Data {
results = append(results, item.URL)
}
return results, llm.Usage{
AskTokens: 0,
AnswerTokens: 0,
TotalTokens: 0,
UsedTime: t2,
}, nil
2024-09-17 18:44:21 +08:00
} else {
return nil, llm.Usage{}, err
2024-09-17 18:44:21 +08:00
}
}
func (lm *LLM) FastMakeVideo(prompt string, config llm.GCConfig) ([]string, []string, llm.Usage, error) {
2024-09-17 18:44:21 +08:00
return lm.MakeVideo(prompt, config)
}
func (lm *LLM) BestMakeVideo(prompt string, config llm.GCConfig) ([]string, []string, llm.Usage, error) {
2024-09-17 18:44:21 +08:00
return lm.MakeVideo(prompt, config)
}
func (lm *LLM) MakeVideo(prompt string, config llm.GCConfig) ([]string, []string, llm.Usage, error) {
return nil, nil, llm.Usage{}, nil
2024-09-17 18:44:21 +08:00
}