|
||
---|---|---|
ai | ||
goja | ||
goja_nodejs | ||
interface/llm | ||
js | ||
llm | ||
tests | ||
watcher | ||
.gitignore | ||
ai.go | ||
go.mod | ||
js.go | ||
LICENSE | ||
README.md |
低代码AI能力工具包
命令行工具
Install
go install apigo.cc/ai/ai/ai@latest
Usage
ai -h | --help show usage
ai -e | --export export ai.ts file to ./lib for develop
ai test | test.js run test.js, if not specified, run ./ai.js
Sample
test.js
import {glm} from './lib/ai'
import console from './lib/console'
function main(...args) {
if(!args[0]) throw new Error('no ask')
let r = glm.fastAsk(args[0], r => {
console.print(r)
})
console.println()
return r
}
run sample
ai test "你好"
Module
mod/glm.js
import {glm} from './lib/ai'
import console from './lib/console'
function chat(...args) {
if(!args[0]) throw new Error('no prompt')
return glm.fastAsk(args[0])
}
function draw(...args) {
if(!args[0]) throw new Error('no ask')
return glm.makeImage(args[0], {size:'1024x1024'})
}
module.exports = {chat, draw}
test.js
import glm from './mod/glm'
function main(...args) {
console.println(glm.chat(args[0]))
}
run sample
ai test "你好"
Configure
llm.yml
openai:
apiKey: ...
zhipu:
apiKey: ...
or use env.yml
llm.yml
llm:
openai:
apiKey: ...
zhipu:
apiKey: ...
encrypt apiKey
install sskey
go install github.com/ssgo/tool/sskey@latest
sskey -e 'your apiKey'
copy url base64 format encrypted apiKey into llm.yml or env.yml
config with special endpoint
llm:
openai:
apiKey: ...
endpoint: https://api.openai.com/v1
config multi api
llm:
glm:
apiKey: ...
llm: zhipu
glm2:
apiKey: ...
endpoint: https://......
llm: zhipu
调用 JavaScript API
Install
go get apigo.cc/ai/ai
Usage
package main
import (
"apigo.cc/ai/ai/js"
"fmt"
)
func main() {
result, err := js.Run(`return ai.glm.fastAsk(args[0])`, "", "你好")
// js.RunFile
// js.StartFromFile
// js.StartFromCode
if err != nil {
fmt.Println(err.Error())
} else if result != nil {
fmt.Println(result)
}
}
调用 Go API
Install
go get apigo.cc/ai/ai
Usage
package main
import (
"apigo.cc/ai/ai"
"apigo.cc/ai/ai/llm"
"fmt"
)
func main() {
ai.Init()
glm := llm.Get("zhipu")
r, usage, err := glm.FastAsk(llm.Messages().User().Text("你是什么模型").Make(), func(text string) {
fmt.Print(text)
})
if err != nil {
fmt.Println(err)
} else {
fmt.Println()
fmt.Println("result:", r)
fmt.Println("usage:", usage)
}
}