2024-09-17 18:44:21 +08:00
|
|
|
package llm
|
|
|
|
|
|
|
|
import "sync"
|
|
|
|
|
|
|
|
const (
|
|
|
|
TypeText = "text"
|
|
|
|
TypeImage = "image"
|
|
|
|
TypeVideo = "video"
|
|
|
|
RoleSystem = "system"
|
|
|
|
RoleUser = "user"
|
|
|
|
RoleAssistant = "assistant"
|
|
|
|
RoleTool = "tool"
|
|
|
|
ToolCodeInterpreter = "codeInterpreter"
|
|
|
|
ToolWebSearch = "webSearch"
|
|
|
|
)
|
|
|
|
|
|
|
|
type Support struct {
|
|
|
|
Ask bool
|
|
|
|
AskWithImage bool
|
|
|
|
AskWithVideo bool
|
|
|
|
AskWithCodeInterpreter bool
|
|
|
|
AskWithWebSearch bool
|
|
|
|
MakeImage bool
|
|
|
|
MakeVideo bool
|
|
|
|
Models []string
|
|
|
|
}
|
|
|
|
|
|
|
|
type Config struct {
|
|
|
|
Endpoint string
|
|
|
|
ApiKey string
|
|
|
|
ChatConfig ChatConfig
|
|
|
|
GCConfig GCConfig
|
2024-09-23 18:15:02 +08:00
|
|
|
Debug bool
|
2024-09-17 18:44:21 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
type LLM interface {
|
|
|
|
Support() Support
|
2024-09-29 21:20:28 +08:00
|
|
|
Ask(messages []ChatMessage, config ChatConfig, callback func(answer string)) (string, Usage, error)
|
|
|
|
FastAsk(messages []ChatMessage, callback func(answer string)) (string, Usage, error)
|
|
|
|
LongAsk(messages []ChatMessage, callback func(answer string)) (string, Usage, error)
|
|
|
|
BatterAsk(messages []ChatMessage, callback func(answer string)) (string, Usage, error)
|
|
|
|
BestAsk(messages []ChatMessage, callback func(answer string)) (string, Usage, error)
|
|
|
|
MultiAsk(messages []ChatMessage, callback func(answer string)) (string, Usage, error)
|
|
|
|
BestMultiAsk(messages []ChatMessage, callback func(answer string)) (string, Usage, error)
|
|
|
|
CodeInterpreterAsk(messages []ChatMessage, callback func(answer string)) (string, Usage, error)
|
|
|
|
WebSearchAsk(messages []ChatMessage, callback func(answer string)) (string, Usage, error)
|
|
|
|
MakeImage(prompt string, config GCConfig) ([]string, Usage, error)
|
|
|
|
FastMakeImage(prompt string, config GCConfig) ([]string, Usage, error)
|
|
|
|
BestMakeImage(prompt string, config GCConfig) ([]string, Usage, error)
|
|
|
|
MakeVideo(prompt string, config GCConfig) ([]string, []string, Usage, error)
|
|
|
|
FastMakeVideo(prompt string, config GCConfig) ([]string, []string, Usage, error)
|
|
|
|
BestMakeVideo(prompt string, config GCConfig) ([]string, []string, Usage, error)
|
|
|
|
Embedding(text string, model string) ([]byte, Usage, error)
|
|
|
|
FastEmbedding(text string) ([]byte, Usage, error)
|
|
|
|
BestEmbedding(text string) ([]byte, Usage, error)
|
2024-09-17 18:44:21 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
var llmMakers = map[string]func(Config) LLM{}
|
|
|
|
var llmMakersLock = sync.RWMutex{}
|
|
|
|
|
|
|
|
var llms = map[string]LLM{}
|
|
|
|
var llmsLock = sync.RWMutex{}
|
|
|
|
|
|
|
|
func Register(llmId string, maker func(Config) LLM) {
|
|
|
|
llmMakersLock.Lock()
|
|
|
|
llmMakers[llmId] = maker
|
|
|
|
llmMakersLock.Unlock()
|
|
|
|
}
|
|
|
|
|
|
|
|
func Create(name, llmId string, config Config) LLM {
|
|
|
|
llmMakersLock.RLock()
|
|
|
|
maker := llmMakers[llmId]
|
|
|
|
llmMakersLock.RUnlock()
|
|
|
|
|
|
|
|
if maker != nil {
|
|
|
|
llm := maker(config)
|
|
|
|
llmsLock.Lock()
|
|
|
|
llms[name] = llm
|
|
|
|
llmsLock.Unlock()
|
|
|
|
return llm
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func Get(name string) LLM {
|
|
|
|
llmsLock.RLock()
|
|
|
|
llm := llms[name]
|
|
|
|
llmsLock.RUnlock()
|
|
|
|
return llm
|
|
|
|
}
|
|
|
|
|
|
|
|
func List() map[string]LLM {
|
|
|
|
list := map[string]LLM{}
|
|
|
|
llmsLock.RLock()
|
|
|
|
for name, llm := range llms {
|
|
|
|
list[name] = llm
|
|
|
|
}
|
|
|
|
llmsLock.RUnlock()
|
|
|
|
return list
|
|
|
|
}
|