.
diff --git a/code/.gitignore b/code/.gitignore
new file mode 100644
index 00000000..d742ec84
--- /dev/null
+++ b/code/.gitignore
@@ -0,0 +1,2 @@
+/apikey_usage.json
+*.pem
diff --git a/code/calc/calc.go b/code/calc/calc.go
deleted file mode 100644
index c6139ba5..00000000
--- a/code/calc/calc.go
+++ /dev/null
@@ -1,24 +0,0 @@
-package calc
-
-import (
- "fmt"
-
- "gopkg.in/Knetic/govaluate.v2"
-)
-
-func CalcStr(str string) (float64, error) {
- fmt.Println(str)
-
- expression, _ := govaluate.NewEvaluableExpression(str)
- out, _ := expression.Evaluate(nil)
- fmt.Println(out)
- return out.(float64), nil
-}
-
-func FormatMathOut(out float64) string {
- //if is int
- if out == float64(int(out)) {
- return fmt.Sprintf("%d", int(out))
- }
- return fmt.Sprintf("%f", out)
-}
diff --git a/code/calc/calc_test.go b/code/calc/calc_test.go
deleted file mode 100644
index 77cb88e3..00000000
--- a/code/calc/calc_test.go
+++ /dev/null
@@ -1,41 +0,0 @@
-package calc
-
-import (
- "testing"
-)
-
-func TestCalc(t *testing.T) {
-
- out, err := CalcStr("1+1")
- if err != nil {
- t.Error(err)
- }
-
- if out != 2 {
- t.Error("1+1 should be 2")
- }
-}
-
-func TestCalc2(t *testing.T) {
-
- out, err := CalcStr("1+2")
- if err != nil {
- t.Error(err)
- }
-
- if out != 3 {
- t.Error("1+2 should be 3")
- }
-}
-
-func TestCalc3(t *testing.T) {
- //22*32
- out, err := CalcStr("22*32")
- if err != nil {
- t.Error(err)
- }
-
- if out != 704 {
- t.Error("22*32 should be 704")
- }
-}
diff --git a/code/config.example.yaml b/code/config.example.yaml
index 46c6b20a..d93a1abe 100644
--- a/code/config.example.yaml
+++ b/code/config.example.yaml
@@ -1,5 +1,37 @@
+# 飞书
+BASE_URL: https://open.feishu.cn
APP_ID: cli_axxx
APP_SECRET: xxx
-APP_ENCRYPT_KEY: xxxx
+APP_ENCRYPT_KEY: xxx
APP_VERIFICATION_TOKEN: xxx
-OPENAI_KEY: XXX
+# 请确保和飞书应用管理平台中的设置一致
+BOT_NAME: chatGpt
+# openAI key 支持负载均衡 可以填写多个key 用逗号分隔
+OPENAI_KEY: sk-xxx,sk-xxx,sk-xxx
+# openAI model 指定模型,默认为 gpt-3.5-turbo
+# 可选参数有:"gpt-4-1106-preview", "gpt-4-32K","gpt-4","gpt-3.5-turbo-16k", "gpt-3.5-turbo","gpt-3.5-turbo-16k","gpt-3.5-turbo-1106" 等
+# 如果使用gpt-4,请确认自己是否有接口调用白名单
+OPENAI_MODEL: gpt-3.5-turbo
+# openAI 最大token数 默认为2000
+OPENAI_MAX_TOKENS: 2000
+# 响应超时时间,单位为毫秒,默认为550毫秒
+OPENAI_HTTP_CLIENT_TIMEOUT: 550
+# 服务器配置
+HTTP_PORT: 9000
+HTTPS_PORT: 9001
+USE_HTTPS: false
+CERT_FILE: cert.pem
+KEY_FILE: key.pem
+# openai 地址, 一般不需要修改, 除非你有自己的反向代理
+API_URL: https://api.openai.com
+# 代理设置, 例如 "http://127.0.0.1:7890", ""代表不使用代理
+HTTP_PROXY: ""
+# 是否开启流式接口返回
+STREAM_MODE: false # set true to use stream mode
+# AZURE OPENAI
+AZURE_ON: false # set true to use Azure rather than OpenAI
+AZURE_API_VERSION: 2023-03-15-preview # 2023-03-15-preview or 2022-12-01 refer https://learn.microsoft.com/en-us/azure/cognitive-services/openai/reference#completions
+AZURE_RESOURCE_NAME: xxxx # you can find in endpoint url. Usually looks like https://{RESOURCE_NAME}.openai.azure.com
+AZURE_DEPLOYMENT_NAME: xxxx # usually looks like ...openai.azure.com/openai/deployments/{DEPLOYMENT_NAME}/chat/completions.
+AZURE_OPENAI_TOKEN: xxxx # Authentication key. We can use Azure Active Directory Authentication(TBD).
+
diff --git a/code/go.mod b/code/go.mod
index fa7420bb..59b92c77 100644
--- a/code/go.mod
+++ b/code/go.mod
@@ -5,20 +5,32 @@ go 1.18
require github.com/larksuite/oapi-sdk-go/v3 v3.0.14
require (
+ github.com/duke-git/lancet/v2 v2.1.17
github.com/gin-gonic/gin v1.8.2
+ github.com/google/uuid v1.3.0
github.com/larksuite/oapi-sdk-gin v1.0.0
+ github.com/pandodao/tokenizer-go v0.2.0
github.com/patrickmn/go-cache v2.1.0+incompatible
+ github.com/pion/opus v0.0.0-20230123082803-1052c3e89e58
+ github.com/sashabaranov/go-openai v1.13.0
+ github.com/sirupsen/logrus v1.9.0
+ github.com/spf13/pflag v1.0.5
github.com/spf13/viper v1.14.0
- gopkg.in/Knetic/govaluate.v2 v2.3.0
+ gopkg.in/yaml.v2 v2.4.0
)
require (
+ github.com/dlclark/regexp2 v1.8.1 // indirect
+ github.com/dop251/goja v0.0.0-20230304130813-e2f543bf4b4c // indirect
+ github.com/dop251/goja_nodejs v0.0.0-20230226152057-060fa99b809f // indirect
github.com/fsnotify/fsnotify v1.6.0 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.0 // indirect
github.com/go-playground/validator/v10 v10.11.1 // indirect
+ github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect
github.com/goccy/go-json v0.10.0 // indirect
+ github.com/google/pprof v0.0.0-20230309165930-d61513b1440d // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/leodido/go-urn v1.2.1 // indirect
@@ -32,15 +44,16 @@ require (
github.com/spf13/afero v1.9.3 // indirect
github.com/spf13/cast v1.5.0 // indirect
github.com/spf13/jwalterweatherman v1.1.0 // indirect
- github.com/spf13/pflag v1.0.5 // indirect
github.com/subosito/gotenv v1.4.1 // indirect
github.com/ugorji/go/codec v1.2.8 // indirect
golang.org/x/crypto v0.5.0 // indirect
+ golang.org/x/exp v0.0.0-20221208152030-732eee02a75a // indirect
golang.org/x/net v0.5.0 // indirect
- golang.org/x/sys v0.4.0 // indirect
- golang.org/x/text v0.6.0 // indirect
+ golang.org/x/sys v0.5.0 // indirect
+ golang.org/x/text v0.8.0 // indirect
google.golang.org/protobuf v1.28.1 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
- gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)
+
+//replace github.com/sashabaranov/go-openai v1.13.0 => github.com/Leizhenpeng/go-openai v0.0.3
diff --git a/code/go.sum b/code/go.sum
index 677e3062..accd0903 100644
--- a/code/go.sum
+++ b/code/go.sum
@@ -40,8 +40,11 @@ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
+github.com/chzyer/logex v1.2.0/go.mod h1:9+9sk7u7pGNWYMkh0hdiL++6OeibzJccyQU4p4MedaY=
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
+github.com/chzyer/readline v1.5.0/go.mod h1:x22KAscuvRqlLoK9CsoYsmxoXZMMFVyOl86cAH8qUic=
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
+github.com/chzyer/test v0.0.0-20210722231415-061457976a23/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
@@ -50,6 +53,20 @@ github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ3
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
+github.com/dlclark/regexp2 v1.7.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
+github.com/dlclark/regexp2 v1.8.1 h1:6Lcdwya6GjPUNsBct8Lg/yRPwMhABj269AAzdGSiR+0=
+github.com/dlclark/regexp2 v1.8.1/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
+github.com/dop251/goja v0.0.0-20211022113120-dc8c55024d06/go.mod h1:R9ET47fwRVRPZnOGvHxxhuZcbrMCuiqOz3Rlrh4KSnk=
+github.com/dop251/goja v0.0.0-20221118162653-d4bf6fde1b86/go.mod h1:yRkwfj0CBpOGre+TwBsqPV0IH0Pk73e4PXJOeNDboGs=
+github.com/dop251/goja v0.0.0-20230304130813-e2f543bf4b4c h1:/utv6nmTctV6OVgfk5+O6lEMEWL+6KJy4h9NZ5fnkQQ=
+github.com/dop251/goja v0.0.0-20230304130813-e2f543bf4b4c/go.mod h1:QMWlm50DNe14hD7t24KEqZuUdC9sOTy8W6XbCU1mlw4=
+github.com/dop251/goja_nodejs v0.0.0-20210225215109-d91c329300e7/go.mod h1:hn7BA7c8pLvoGndExHudxTDKZ84Pyvv+90pbBjbTz0Y=
+github.com/dop251/goja_nodejs v0.0.0-20211022123610-8dd9abb0616d/go.mod h1:DngW8aVqWbuLRMHItjPUyqdj+HWPvnQe8V8y1nDpIbM=
+github.com/dop251/goja_nodejs v0.0.0-20230226152057-060fa99b809f h1:mmnNidRg3cMfcgyeNtIBSDZgjf/85lA/2pplccwSxYg=
+github.com/dop251/goja_nodejs v0.0.0-20230226152057-060fa99b809f/go.mod h1:0tlktQL7yHfYEtjcRGi/eiOkbDR5XF7gyFFvbC5//E0=
+github.com/duke-git/lancet/v2 v2.1.17 h1:4u9oAGgmTPTt2D7AcjjLp0ubbcaQlova8xeTIuyupDw=
+github.com/duke-git/lancet/v2 v2.1.17/go.mod h1:hNcc06mV7qr+crH/0nP+rlC3TB0Q9g5OrVnO8/TGD4c=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
@@ -75,6 +92,8 @@ github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/j
github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA=
github.com/go-playground/validator/v10 v10.11.1 h1:prmOlTVv+YjZjmRmNSF3VmspqJIxJWXmqUsHwfTRRkQ=
github.com/go-playground/validator/v10 v10.11.1/go.mod h1:i+3WkQ1FvaUjjxh1kSvIA4dMGDBiPU55YFDl0WbKdWU=
+github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU=
+github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg=
github.com/goccy/go-json v0.10.0 h1:mXKd9Qw4NuzShiRlOXKews24ufknHO7gx30lsDyokKA=
github.com/goccy/go-json v0.10.0/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
@@ -130,8 +149,13 @@ github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hf
github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/pprof v0.0.0-20230207041349-798e818bf904/go.mod h1:uglQLonpP8qtYCYyzA+8c/9qtqgA3qsXGYqCPKARAFg=
+github.com/google/pprof v0.0.0-20230309165930-d61513b1440d h1:um9/pc7tKMINFfP1eE7Wv6PRGXlcCSJkVajF7KJw3uQ=
+github.com/google/pprof v0.0.0-20230309165930-d61513b1440d/go.mod h1:79YE0hCXdHag9sBkw2o+N/YnZtTkXi0UT9Nnixa5eYk=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
+github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g=
@@ -141,6 +165,7 @@ github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
+github.com/ianlancetaylor/demangle v0.0.0-20220319035150-800ac71e25c2/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
@@ -172,12 +197,16 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
+github.com/pandodao/tokenizer-go v0.2.0 h1:NhfI8fGvQkDld2cZCag6NEU3pJ/ugU9zoY1R/zi9YCs=
+github.com/pandodao/tokenizer-go v0.2.0/go.mod h1:t6qFbaleKxbv0KNio2XUN/mfGM5WKv4haPXDQWVDG00=
github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc=
github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8=
github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
github.com/pelletier/go-toml/v2 v2.0.6 h1:nrzqCb7j9cDFj2coyLNLaZuJTLjWjlaz6nvTvIwycIU=
github.com/pelletier/go-toml/v2 v2.0.6/go.mod h1:eumQOmlWiOPt5WriQQqoM5y18pDHwha2N+QD+EUNTek=
+github.com/pion/opus v0.0.0-20230123082803-1052c3e89e58 h1:wi5XffRvL9Ghx8nRAdZyAjmLV/ccnn2xJ4w6S6fELgA=
+github.com/pion/opus v0.0.0-20230123082803-1052c3e89e58/go.mod h1:m8ODxkLrcNvLY6BPvOj7yLxK1wMQWA+2jqKcsrZ293U=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg=
@@ -188,6 +217,10 @@ github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8=
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
+github.com/sashabaranov/go-openai v1.13.0 h1:EAusFfnhaMaaUspUZ2+MbB/ZcVeD4epJmTOlZ+8AcAE=
+github.com/sashabaranov/go-openai v1.13.0/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg=
+github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0=
+github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/spf13/afero v1.9.3 h1:41FoI0fD7OR7mGcKE/aOiLkGreyf8ifIOQmJANWogMk=
github.com/spf13/afero v1.9.3/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y=
github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w=
@@ -209,8 +242,8 @@ github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
-github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
+github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8=
github.com/subosito/gotenv v1.4.1 h1:jyEFiXpy21Wm81FBN71l9VoMMV8H8jG+qIK3GCpY6Qs=
github.com/subosito/gotenv v1.4.1/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0=
github.com/ugorji/go/codec v1.2.8 h1:sgBJS6COt0b/P40VouWKdseidkDgHxYGm0SAglUHfP0=
@@ -219,6 +252,7 @@ github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
@@ -231,6 +265,7 @@ golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8U
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
+golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.5.0 h1:U/0M97KRkSFvyD/3FSmdP5W5swImpNgle/EHFhOsQPE=
@@ -245,6 +280,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
+golang.org/x/exp v0.0.0-20221208152030-732eee02a75a h1:4iLhBPcpqFmylhnkbY3W0ONLUYYkDAW9xMFLfxgsvCw=
+golang.org/x/exp v0.0.0-20221208152030-732eee02a75a/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
@@ -268,6 +305,7 @@ golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -300,6 +338,8 @@ golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v
golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
+golang.org/x/net v0.4.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE=
golang.org/x/net v0.5.0 h1:GyT4nK/YDHSqa1c4753ouYCDajOYKTja9Xb/OHtgvSw=
golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
@@ -321,6 +361,7 @@ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@@ -357,11 +398,18 @@ golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.4.0 h1:Zr2JFtRQNX3BCZ8YtxRE9hNJYC8J6I1MVbMg6owUp18=
-golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU=
+golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
+golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@@ -370,8 +418,10 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
-golang.org/x/text v0.6.0 h1:3XmdazWV+ubf7QgHSTWeykHOci5oeekaGJBLkrkaw4k=
-golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
+golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
+golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
+golang.org/x/text v0.8.0 h1:57P1ETyNKtuIjB4SRd15iJxuhj8Gc416Y78H3qgMh68=
+golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@@ -422,6 +472,7 @@ golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4f
golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
+golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
@@ -517,8 +568,6 @@ google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlba
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w=
google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
-gopkg.in/Knetic/govaluate.v2 v2.3.0 h1:naJVc9CZlWA8rC8f5mvECJD7jreTrn7FvGXjBthkHJQ=
-gopkg.in/Knetic/govaluate.v2 v2.3.0/go.mod h1:NW0gr10J8s7aNghEg6uhdxiEaBvc0+8VgJjVViHUKp4=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
diff --git a/code/handlers/card_ai_mode_action.go b/code/handlers/card_ai_mode_action.go
new file mode 100644
index 00000000..f7f05b82
--- /dev/null
+++ b/code/handlers/card_ai_mode_action.go
@@ -0,0 +1,38 @@
+package handlers
+
+import (
+ "context"
+
+ "start-feishubot/services"
+ "start-feishubot/services/openai"
+
+ larkcard "github.com/larksuite/oapi-sdk-go/v3/card"
+)
+
+// AIModeChooseKind is the kind of card action for choosing AI mode
+func NewAIModeCardHandler(cardMsg CardMsg,
+ m MessageHandler) CardHandlerFunc {
+ return func(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) {
+
+ if cardMsg.Kind == AIModeChooseKind {
+ newCard, err, done := CommonProcessAIMode(cardMsg, cardAction,
+ m.sessionCache)
+ if done {
+ return newCard, err
+ }
+ return nil, nil
+ }
+ return nil, ErrNextHandler
+ }
+}
+
+// CommonProcessAIMode is the common process for choosing AI mode
+func CommonProcessAIMode(msg CardMsg, cardAction *larkcard.CardAction,
+ cache services.SessionServiceCacheInterface) (interface{},
+ error, bool) {
+ option := cardAction.Action.Option
+ replyMsg(context.Background(), "已选择发散模式:"+option,
+ &msg.MsgId)
+ cache.SetAIMode(msg.SessionId, openai.AIModeMap[option])
+ return nil, nil, true
+}
diff --git a/code/handlers/card_clear_action.go b/code/handlers/card_clear_action.go
new file mode 100644
index 00000000..c1c1ab26
--- /dev/null
+++ b/code/handlers/card_clear_action.go
@@ -0,0 +1,45 @@
+package handlers
+
+import (
+ "context"
+ larkcard "github.com/larksuite/oapi-sdk-go/v3/card"
+ "start-feishubot/logger"
+ "start-feishubot/services"
+)
+
+func NewClearCardHandler(cardMsg CardMsg, m MessageHandler) CardHandlerFunc {
+ return func(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) {
+ if cardMsg.Kind == ClearCardKind {
+ newCard, err, done := CommonProcessClearCache(cardMsg, m.sessionCache)
+ if done {
+ return newCard, err
+ }
+ return nil, nil
+ }
+ return nil, ErrNextHandler
+ }
+}
+
+func CommonProcessClearCache(cardMsg CardMsg, session services.SessionServiceCacheInterface) (
+ interface{}, error, bool) {
+ logger.Debugf("card msg value %v", cardMsg.Value)
+ if cardMsg.Value == "1" {
+ session.Clear(cardMsg.SessionId)
+ newCard, _ := newSendCard(
+ withHeader("️🆑 机器人提醒", larkcard.TemplateGrey),
+ withMainMd("已删除此话题的上下文信息"),
+ withNote("我们可以开始一个全新的话题,继续找我聊天吧"),
+ )
+ logger.Debugf("session %v", newCard)
+ return newCard, nil, true
+ }
+ if cardMsg.Value == "0" {
+ newCard, _ := newSendCard(
+ withHeader("️🆑 机器人提醒", larkcard.TemplateGreen),
+ withMainMd("依旧保留此话题的上下文信息"),
+ withNote("我们可以继续探讨这个话题,期待和您聊天。如果您有其他问题或者想要讨论的话题,请告诉我哦"),
+ )
+ return newCard, nil, true
+ }
+ return nil, nil, false
+}
diff --git a/code/handlers/card_common_action.go b/code/handlers/card_common_action.go
new file mode 100644
index 00000000..1f1a7ac8
--- /dev/null
+++ b/code/handlers/card_common_action.go
@@ -0,0 +1,49 @@
+package handlers
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ larkcard "github.com/larksuite/oapi-sdk-go/v3/card"
+)
+
+type CardHandlerMeta func(cardMsg CardMsg, m MessageHandler) CardHandlerFunc
+
+type CardHandlerFunc func(ctx context.Context, cardAction *larkcard.CardAction) (
+ interface{}, error)
+
+var ErrNextHandler = fmt.Errorf("next handler")
+
+func NewCardHandler(m MessageHandler) CardHandlerFunc {
+ handlers := []CardHandlerMeta{
+ NewClearCardHandler,
+ NewPicResolutionHandler,
+ NewVisionResolutionHandler,
+ NewPicTextMoreHandler,
+ NewPicModeChangeHandler,
+ NewRoleTagCardHandler,
+ NewRoleCardHandler,
+ NewAIModeCardHandler,
+ NewVisionModeChangeHandler,
+ }
+
+ return func(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) {
+ var cardMsg CardMsg
+ actionValue := cardAction.Action.Value
+ actionValueJson, _ := json.Marshal(actionValue)
+ if err := json.Unmarshal(actionValueJson, &cardMsg); err != nil {
+ return nil, err
+ }
+ //pp.Println(cardMsg)
+ //logger.Debug("cardMsg ", cardMsg)
+ for _, handler := range handlers {
+ h := handler(cardMsg, m)
+ i, err := h(ctx, cardAction)
+ if err == ErrNextHandler {
+ continue
+ }
+ return i, err
+ }
+ return nil, nil
+ }
+}
diff --git a/code/handlers/card_pic_action.go b/code/handlers/card_pic_action.go
new file mode 100644
index 00000000..a077a53f
--- /dev/null
+++ b/code/handlers/card_pic_action.go
@@ -0,0 +1,115 @@
+package handlers
+
+import (
+ "context"
+ "fmt"
+ larkcore "github.com/larksuite/oapi-sdk-go/v3/core"
+ "start-feishubot/logger"
+
+ "start-feishubot/services"
+
+ larkcard "github.com/larksuite/oapi-sdk-go/v3/card"
+)
+
+func NewPicResolutionHandler(cardMsg CardMsg, m MessageHandler) CardHandlerFunc {
+ return func(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) {
+ if cardMsg.Kind == PicResolutionKind {
+ CommonProcessPicResolution(cardMsg, cardAction, m.sessionCache)
+ return nil, nil
+ }
+ if cardMsg.Kind == PicStyleKind {
+ CommonProcessPicStyle(cardMsg, cardAction, m.sessionCache)
+ return nil, nil
+ }
+ return nil, ErrNextHandler
+ }
+}
+
+func NewPicModeChangeHandler(cardMsg CardMsg, m MessageHandler) CardHandlerFunc {
+ return func(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) {
+ if cardMsg.Kind == PicModeChangeKind {
+ newCard, err, done := CommonProcessPicModeChange(cardMsg, m.sessionCache)
+ if done {
+ return newCard, err
+ }
+ return nil, nil
+ }
+ return nil, ErrNextHandler
+ }
+}
+
+func NewPicTextMoreHandler(cardMsg CardMsg, m MessageHandler) CardHandlerFunc {
+ return func(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) {
+ if cardMsg.Kind == PicTextMoreKind {
+ go func() {
+ m.CommonProcessPicMore(cardMsg)
+ }()
+ return nil, nil
+ }
+ return nil, ErrNextHandler
+ }
+}
+
+func CommonProcessPicResolution(msg CardMsg,
+ cardAction *larkcard.CardAction,
+ cache services.SessionServiceCacheInterface) {
+ option := cardAction.Action.Option
+ fmt.Println(larkcore.Prettify(msg))
+ cache.SetPicResolution(msg.SessionId, services.Resolution(option))
+ //send text
+ replyMsg(context.Background(), "已更新图片分辨率为"+option,
+ &msg.MsgId)
+}
+
+func CommonProcessPicStyle(msg CardMsg,
+ cardAction *larkcard.CardAction,
+ cache services.SessionServiceCacheInterface) {
+ option := cardAction.Action.Option
+ fmt.Println(larkcore.Prettify(msg))
+ cache.SetPicStyle(msg.SessionId, services.PicStyle(option))
+ //send text
+ replyMsg(context.Background(), "已更新图片风格为"+option,
+ &msg.MsgId)
+}
+
+func (m MessageHandler) CommonProcessPicMore(msg CardMsg) {
+ resolution := m.sessionCache.GetPicResolution(msg.SessionId)
+ style := m.sessionCache.GetPicStyle(msg.SessionId)
+
+ logger.Debugf("resolution: %v", resolution)
+ logger.Debug("msg: %v", msg)
+ question := msg.Value.(string)
+ bs64, _ := m.gpt.GenerateOneImage(question, resolution, style)
+ replayImageCardByBase64(context.Background(), bs64, &msg.MsgId,
+ &msg.SessionId, question)
+}
+
+func CommonProcessPicModeChange(cardMsg CardMsg,
+ session services.SessionServiceCacheInterface) (
+ interface{}, error, bool) {
+ if cardMsg.Value == "1" {
+
+ sessionId := cardMsg.SessionId
+ session.Clear(sessionId)
+ session.SetMode(sessionId,
+ services.ModePicCreate)
+ session.SetPicResolution(sessionId,
+ services.Resolution256)
+
+ newCard, _ :=
+ newSendCard(
+ withHeader("🖼️ 已进入图片创作模式", larkcard.TemplateBlue),
+ withPicResolutionBtn(&sessionId),
+ withNote("提醒:回复文本或图片,让AI生成相关的图片。"))
+ return newCard, nil, true
+ }
+ if cardMsg.Value == "0" {
+ newCard, _ := newSendCard(
+ withHeader("️🎒 机器人提醒", larkcard.TemplateGreen),
+ withMainMd("依旧保留此话题的上下文信息"),
+ withNote("我们可以继续探讨这个话题,期待和您聊天。如果您有其他问题或者想要讨论的话题,请告诉我哦"),
+ )
+ return newCard, nil, true
+ }
+ return nil, nil, false
+}
diff --git a/code/handlers/card_role_action.go b/code/handlers/card_role_action.go
new file mode 100644
index 00000000..6a10150c
--- /dev/null
+++ b/code/handlers/card_role_action.go
@@ -0,0 +1,77 @@
+package handlers
+
+import (
+ "context"
+
+ "start-feishubot/initialization"
+ "start-feishubot/services"
+ "start-feishubot/services/openai"
+
+ larkcard "github.com/larksuite/oapi-sdk-go/v3/card"
+)
+
+func NewRoleTagCardHandler(cardMsg CardMsg,
+ m MessageHandler) CardHandlerFunc {
+ return func(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) {
+
+ if cardMsg.Kind == RoleTagsChooseKind {
+ newCard, err, done := CommonProcessRoleTag(cardMsg, cardAction,
+ m.sessionCache)
+ if done {
+ return newCard, err
+ }
+ return nil, nil
+ }
+ return nil, ErrNextHandler
+ }
+}
+
+func NewRoleCardHandler(cardMsg CardMsg,
+ m MessageHandler) CardHandlerFunc {
+ return func(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) {
+
+ if cardMsg.Kind == RoleChooseKind {
+ newCard, err, done := CommonProcessRole(cardMsg, cardAction,
+ m.sessionCache)
+ if done {
+ return newCard, err
+ }
+ return nil, nil
+ }
+ return nil, ErrNextHandler
+ }
+}
+
+func CommonProcessRoleTag(msg CardMsg, cardAction *larkcard.CardAction,
+ cache services.SessionServiceCacheInterface) (interface{},
+ error, bool) {
+ option := cardAction.Action.Option
+ //replyMsg(context.Background(), "已选择tag:"+option,
+ // &msg.MsgId)
+ roles := initialization.GetTitleListByTag(option)
+ //fmt.Printf("roles: %s", roles)
+ SendRoleListCard(context.Background(), &msg.SessionId,
+ &msg.MsgId, option, *roles)
+ return nil, nil, true
+}
+
+func CommonProcessRole(msg CardMsg, cardAction *larkcard.CardAction,
+ cache services.SessionServiceCacheInterface) (interface{},
+ error, bool) {
+ option := cardAction.Action.Option
+ contentByTitle, error := initialization.GetFirstRoleContentByTitle(option)
+ if error != nil {
+ return nil, error, true
+ }
+ cache.Clear(msg.SessionId)
+ systemMsg := append([]openai.Messages{}, openai.Messages{
+ Role: "system", Content: contentByTitle,
+ })
+ cache.SetMsg(msg.SessionId, systemMsg)
+ //pp.Println("systemMsg: ", systemMsg)
+ sendSystemInstructionCard(context.Background(), &msg.SessionId,
+ &msg.MsgId, contentByTitle)
+ //replyMsg(context.Background(), "已选择角色:"+contentByTitle,
+ // &msg.MsgId)
+ return nil, nil, true
+}
diff --git a/code/handlers/card_vision_action.go b/code/handlers/card_vision_action.go
new file mode 100644
index 00000000..9e056492
--- /dev/null
+++ b/code/handlers/card_vision_action.go
@@ -0,0 +1,74 @@
+package handlers
+
+import (
+ "context"
+ "fmt"
+ larkcard "github.com/larksuite/oapi-sdk-go/v3/card"
+ larkcore "github.com/larksuite/oapi-sdk-go/v3/core"
+ "start-feishubot/services"
+)
+
+func NewVisionResolutionHandler(cardMsg CardMsg,
+ m MessageHandler) CardHandlerFunc {
+ return func(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) {
+ if cardMsg.Kind == VisionStyleKind {
+ CommonProcessVisionStyle(cardMsg, cardAction, m.sessionCache)
+ return nil, nil
+ }
+ return nil, ErrNextHandler
+ }
+}
+func NewVisionModeChangeHandler(cardMsg CardMsg,
+ m MessageHandler) CardHandlerFunc {
+ return func(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) {
+ if cardMsg.Kind == VisionModeChangeKind {
+ newCard, err, done := CommonProcessVisionModeChange(cardMsg, m.sessionCache)
+ if done {
+ return newCard, err
+ }
+ return nil, nil
+ }
+ return nil, ErrNextHandler
+ }
+}
+
+func CommonProcessVisionStyle(msg CardMsg,
+ cardAction *larkcard.CardAction,
+ cache services.SessionServiceCacheInterface) {
+ option := cardAction.Action.Option
+ fmt.Println(larkcore.Prettify(msg))
+ cache.SetVisionDetail(msg.SessionId, services.VisionDetail(option))
+ //send text
+ replyMsg(context.Background(), "图片解析度调整为:"+option,
+ &msg.MsgId)
+}
+
+func CommonProcessVisionModeChange(cardMsg CardMsg,
+ session services.SessionServiceCacheInterface) (
+ interface{}, error, bool) {
+ if cardMsg.Value == "1" {
+
+ sessionId := cardMsg.SessionId
+ session.Clear(sessionId)
+ session.SetMode(sessionId,
+ services.ModeVision)
+ session.SetVisionDetail(sessionId,
+ services.VisionDetailLow)
+
+ newCard, _ :=
+ newSendCard(
+ withHeader("🕵️️ 已进入图片推理模式", larkcard.TemplateBlue),
+ withVisionDetailLevelBtn(&sessionId),
+ withNote("提醒:回复图片,让LLM和你一起推理图片的内容。"))
+ return newCard, nil, true
+ }
+ if cardMsg.Value == "0" {
+ newCard, _ := newSendCard(
+ withHeader("️🎒 机器人提醒", larkcard.TemplateGreen),
+ withMainMd("依旧保留此话题的上下文信息"),
+ withNote("我们可以继续探讨这个话题,期待和您聊天。如果您有其他问题或者想要讨论的话题,请告诉我哦"),
+ )
+ return newCard, nil, true
+ }
+ return nil, nil, false
+}
diff --git a/code/handlers/common.go b/code/handlers/common.go
index 9845597b..ffe4912a 100644
--- a/code/handlers/common.go
+++ b/code/handlers/common.go
@@ -1,72 +1,93 @@
package handlers
import (
- "context"
"encoding/json"
"fmt"
- larkim "github.com/larksuite/oapi-sdk-go/v3/service/im/v1"
"regexp"
- "start-feishubot/initialization"
+ "strconv"
"strings"
)
-func sendMsg(ctx context.Context, msg string, chatId *string) error {
- //msg = strings.Trim(msg, " ")
- //msg = strings.Trim(msg, "\n")
- //msg = strings.Trim(msg, "\r")
- //msg = strings.Trim(msg, "\t")
- //// 去除空行 以及空行前的空格
- //regex := regexp.MustCompile(`\n[\s| ]*\r`)
- //msg = regex.ReplaceAllString(msg, "\n")
- ////换行符转义
- //msg = strings.ReplaceAll(msg, "\n", "\\n")
- fmt.Println("sendMsg", msg, chatId)
- msg, i := processMessage(msg)
- if i != nil {
- return i
- }
- client := initialization.GetLarkClient()
- content := larkim.NewTextMsgBuilder().
- Text(msg).
- Build()
- fmt.Println("content", content)
-
- resp, err := client.Im.Message.Create(ctx, larkim.NewCreateMessageReqBuilder().
- ReceiveIdType(larkim.ReceiveIdTypeChatId).
- Body(larkim.NewCreateMessageReqBodyBuilder().
- MsgType(larkim.MsgTypeText).
- ReceiveId(*chatId).
- Content(content).
- Build()).
- Build())
-
- // 处理错误
+// func sendCard
+func msgFilter(msg string) string {
+ //replace @到下一个非空的字段 为 ''
+ regex := regexp.MustCompile(`@[^ ]*`)
+ return regex.ReplaceAllString(msg, "")
+}
+
+// Parse rich text json to text
+func parsePostContent(content string) string {
+ var contentMap map[string]interface{}
+ err := json.Unmarshal([]byte(content), &contentMap)
+
if err != nil {
fmt.Println(err)
- return err
}
- // 服务端错误处理
- if !resp.Success() {
- fmt.Println(resp.Code, resp.Msg, resp.RequestId())
- return err
+ if contentMap["content"] == nil {
+ return ""
+ }
+ var text string
+ // deal with title
+ if contentMap["title"] != nil && contentMap["title"] != "" {
+ text += contentMap["title"].(string) + "\n"
}
- return nil
+ // deal with content
+ contentList := contentMap["content"].([]interface{})
+ for _, v := range contentList {
+ for _, v1 := range v.([]interface{}) {
+ if v1.(map[string]interface{})["tag"] == "text" {
+ text += v1.(map[string]interface{})["text"].(string)
+ }
+ }
+ // add new line
+ text += "\n"
+ }
+ return msgFilter(text)
}
-func msgFilter(msg string) string {
- //replace @到下一个非空的字段 为 ''
- regex := regexp.MustCompile(`@[^ ]*`)
- return regex.ReplaceAllString(msg, "")
+func parsePostImageKeys(content string) []string {
+ var contentMap map[string]interface{}
+ err := json.Unmarshal([]byte(content), &contentMap)
+
+ if err != nil {
+ fmt.Println(err)
+ return nil
+ }
+
+ var imageKeys []string
+
+ if contentMap["content"] == nil {
+ return imageKeys
+ }
+
+ contentList := contentMap["content"].([]interface{})
+ for _, v := range contentList {
+ for _, v1 := range v.([]interface{}) {
+ if v1.(map[string]interface{})["tag"] == "img" {
+ imageKeys = append(imageKeys, v1.(map[string]interface{})["image_key"].(string))
+ }
+ }
+ }
+
+ return imageKeys
}
-func parseContent(content string) string {
+
+func parseContent(content, msgType string) string {
//"{\"text\":\"@_user_1 hahaha\"}",
//only get text content hahaha
+ if msgType == "post" {
+ return parsePostContent(content)
+ }
+
var contentMap map[string]interface{}
err := json.Unmarshal([]byte(content), &contentMap)
if err != nil {
fmt.Println(err)
}
+ if contentMap["text"] == nil {
+ return ""
+ }
text := contentMap["text"].(string)
return msgFilter(text)
}
@@ -83,6 +104,60 @@ func processMessage(msg interface{}) (string, error) {
if len(msgStr) >= 2 {
msgStr = msgStr[1 : len(msgStr)-1]
}
-
return msgStr, nil
}
+
+func processNewLine(msg string) string {
+ return strings.Replace(msg, "\\n", `
+`, -1)
+}
+
+func processQuote(msg string) string {
+ return strings.Replace(msg, "\\\"", "\"", -1)
+}
+
+// 将字符中 \u003c 替换为 < 等等
+func processUnicode(msg string) string {
+ regex := regexp.MustCompile(`\\u[0-9a-fA-F]{4}`)
+ return regex.ReplaceAllStringFunc(msg, func(s string) string {
+ r, _ := regexp.Compile(`\\u`)
+ s = r.ReplaceAllString(s, "")
+ i, _ := strconv.ParseInt(s, 16, 32)
+ return string(rune(i))
+ })
+}
+
+func cleanTextBlock(msg string) string {
+ msg = processNewLine(msg)
+ msg = processUnicode(msg)
+ msg = processQuote(msg)
+ return msg
+}
+
+func parseFileKey(content string) string {
+ var contentMap map[string]interface{}
+ err := json.Unmarshal([]byte(content), &contentMap)
+ if err != nil {
+ fmt.Println(err)
+ return ""
+ }
+ if contentMap["file_key"] == nil {
+ return ""
+ }
+ fileKey := contentMap["file_key"].(string)
+ return fileKey
+}
+
+func parseImageKey(content string) string {
+ var contentMap map[string]interface{}
+ err := json.Unmarshal([]byte(content), &contentMap)
+ if err != nil {
+ fmt.Println(err)
+ return ""
+ }
+ if contentMap["image_key"] == nil {
+ return ""
+ }
+ imageKey := contentMap["image_key"].(string)
+ return imageKey
+}
diff --git a/code/handlers/event_audio_action.go b/code/handlers/event_audio_action.go
new file mode 100644
index 00000000..e01bce22
--- /dev/null
+++ b/code/handlers/event_audio_action.go
@@ -0,0 +1,69 @@
+package handlers
+
+import (
+ "context"
+ "fmt"
+ "os"
+
+ "start-feishubot/initialization"
+ "start-feishubot/utils/audio"
+
+ larkim "github.com/larksuite/oapi-sdk-go/v3/service/im/v1"
+)
+
+type AudioAction struct { /*语音*/
+}
+
+func (*AudioAction) Execute(a *ActionInfo) bool {
+ check := AzureModeCheck(a)
+ if !check {
+ return true
+ }
+
+ // 只有私聊才解析语音,其他不解析
+ if a.info.handlerType != UserHandler {
+ return true
+ }
+
+ //判断是否是语音
+ if a.info.msgType == "audio" {
+ fileKey := a.info.fileKey
+ //fmt.Printf("fileKey: %s \n", fileKey)
+ msgId := a.info.msgId
+ //fmt.Println("msgId: ", *msgId)
+ req := larkim.NewGetMessageResourceReqBuilder().MessageId(
+ *msgId).FileKey(fileKey).Type("file").Build()
+ resp, err := initialization.GetLarkClient().Im.MessageResource.Get(context.Background(), req)
+ //fmt.Println(resp, err)
+ if err != nil {
+ fmt.Println(err)
+ return true
+ }
+ f := fmt.Sprintf("%s.ogg", fileKey)
+ resp.WriteFile(f)
+ defer os.Remove(f)
+
+ //fmt.Println("f: ", f)
+ output := fmt.Sprintf("%s.mp3", fileKey)
+ // 等待转换完成
+ audio.OggToWavByPath(f, output)
+ defer os.Remove(output)
+ //fmt.Println("output: ", output)
+
+ text, err := a.handler.gpt.AudioToText(output)
+ if err != nil {
+ fmt.Println(err)
+
+ sendMsg(*a.ctx, fmt.Sprintf("🤖️:语音转换失败,请稍后再试~\n错误信息: %v", err), a.info.msgId)
+ return false
+ }
+
+ replyMsg(*a.ctx, fmt.Sprintf("🤖️:%s", text), a.info.msgId)
+ //fmt.Println("text: ", text)
+ a.info.qParsed = text
+ return true
+ }
+
+ return true
+
+}
diff --git a/code/handlers/event_common_action.go b/code/handlers/event_common_action.go
new file mode 100644
index 00000000..190d66bd
--- /dev/null
+++ b/code/handlers/event_common_action.go
@@ -0,0 +1,169 @@
+package handlers
+
+import (
+ "context"
+ "fmt"
+
+ "start-feishubot/initialization"
+ "start-feishubot/services/openai"
+ "start-feishubot/utils"
+
+ larkim "github.com/larksuite/oapi-sdk-go/v3/service/im/v1"
+)
+
+type MsgInfo struct {
+ handlerType HandlerType
+ msgType string
+ msgId *string
+ chatId *string
+ qParsed string
+ fileKey string
+ imageKey string
+ imageKeys []string // post 消息卡片中的图片组
+ sessionId *string
+ mention []*larkim.MentionEvent
+}
+type ActionInfo struct {
+ handler *MessageHandler
+ ctx *context.Context
+ info *MsgInfo
+}
+
+type Action interface {
+ Execute(a *ActionInfo) bool
+}
+
+type ProcessedUniqueAction struct { //消息唯一性
+}
+
+func (*ProcessedUniqueAction) Execute(a *ActionInfo) bool {
+ if a.handler.msgCache.IfProcessed(*a.info.msgId) {
+ return false
+ }
+ a.handler.msgCache.TagProcessed(*a.info.msgId)
+ return true
+}
+
+type ProcessMentionAction struct { //是否机器人应该处理
+}
+
+func (*ProcessMentionAction) Execute(a *ActionInfo) bool {
+ // 私聊直接过
+ if a.info.handlerType == UserHandler {
+ return true
+ }
+ // 群聊判断是否提到机器人
+ if a.info.handlerType == GroupHandler {
+ if a.handler.judgeIfMentionMe(a.info.mention) {
+ return true
+ }
+ return false
+ }
+ return false
+}
+
+type EmptyAction struct { /*空消息*/
+}
+
+func (*EmptyAction) Execute(a *ActionInfo) bool {
+ if len(a.info.qParsed) == 0 {
+ sendMsg(*a.ctx, "🤖️:你想知道什么呢~", a.info.chatId)
+ fmt.Println("msgId", *a.info.msgId,
+ "message.text is empty")
+
+ return false
+ }
+ return true
+}
+
+type ClearAction struct { /*清除消息*/
+}
+
+func (*ClearAction) Execute(a *ActionInfo) bool {
+ if _, foundClear := utils.EitherTrimEqual(a.info.qParsed,
+ "/clear", "清除"); foundClear {
+ sendClearCacheCheckCard(*a.ctx, a.info.sessionId,
+ a.info.msgId)
+ return false
+ }
+ return true
+}
+
+type RolePlayAction struct { /*角色扮演*/
+}
+
+func (*RolePlayAction) Execute(a *ActionInfo) bool {
+ if system, foundSystem := utils.EitherCutPrefix(a.info.qParsed,
+ "/system ", "角色扮演 "); foundSystem {
+ a.handler.sessionCache.Clear(*a.info.sessionId)
+ systemMsg := append([]openai.Messages{}, openai.Messages{
+ Role: "system", Content: system,
+ })
+ a.handler.sessionCache.SetMsg(*a.info.sessionId, systemMsg)
+ sendSystemInstructionCard(*a.ctx, a.info.sessionId,
+ a.info.msgId, system)
+ return false
+ }
+ return true
+}
+
+type HelpAction struct { /*帮助*/
+}
+
+func (*HelpAction) Execute(a *ActionInfo) bool {
+ if _, foundHelp := utils.EitherTrimEqual(a.info.qParsed, "/help",
+ "帮助"); foundHelp {
+ sendHelpCard(*a.ctx, a.info.sessionId, a.info.msgId)
+ return false
+ }
+ return true
+}
+
+type BalanceAction struct { /*余额*/
+}
+
+func (*BalanceAction) Execute(a *ActionInfo) bool {
+ if _, foundBalance := utils.EitherTrimEqual(a.info.qParsed,
+ "/balance", "余额"); foundBalance {
+ balanceResp, err := a.handler.gpt.GetBalance()
+ if err != nil {
+ replyMsg(*a.ctx, "查询余额失败,请稍后再试", a.info.msgId)
+ return false
+ }
+ sendBalanceCard(*a.ctx, a.info.sessionId, *balanceResp)
+ return false
+ }
+ return true
+}
+
+type RoleListAction struct { /*角色列表*/
+}
+
+func (*RoleListAction) Execute(a *ActionInfo) bool {
+ if _, foundSystem := utils.EitherTrimEqual(a.info.qParsed,
+ "/roles", "角色列表"); foundSystem {
+ //a.handler.sessionCache.Clear(*a.info.sessionId)
+ //systemMsg := append([]openai.Messages{}, openai.Messages{
+ // Role: "system", Content: system,
+ //})
+ //a.handler.sessionCache.SetMsg(*a.info.sessionId, systemMsg)
+ //sendSystemInstructionCard(*a.ctx, a.info.sessionId,
+ // a.info.msgId, system)
+ tags := initialization.GetAllUniqueTags()
+ SendRoleTagsCard(*a.ctx, a.info.sessionId, a.info.msgId, *tags)
+ return false
+ }
+ return true
+}
+
+type AIModeAction struct { /*发散模式*/
+}
+
+func (*AIModeAction) Execute(a *ActionInfo) bool {
+ if _, foundMode := utils.EitherCutPrefix(a.info.qParsed,
+ "/ai_mode", "发散模式"); foundMode {
+ SendAIModeListsCard(*a.ctx, a.info.sessionId, a.info.msgId, openai.AIModeStrs)
+ return false
+ }
+ return true
+}
diff --git a/code/handlers/event_msg_action.go b/code/handlers/event_msg_action.go
new file mode 100644
index 00000000..f9e10f71
--- /dev/null
+++ b/code/handlers/event_msg_action.go
@@ -0,0 +1,222 @@
+package handlers
+
+import (
+ "encoding/json"
+ "fmt"
+ "log"
+ "strings"
+ "time"
+
+ "start-feishubot/services/openai"
+)
+
+func setDefaultPrompt(msg []openai.Messages) []openai.Messages {
+ if !hasSystemRole(msg) {
+ msg = append(msg, openai.Messages{
+ Role: "system", Content: "You are ChatGPT, " +
+ "a large language model trained by OpenAI. " +
+ "Answer in user's language as concisely as" +
+ " possible. Knowledge cutoff: 20230601 " +
+ "Current date" + time.Now().Format("20060102"),
+ })
+ }
+ return msg
+}
+
+//func setDefaultVisionPrompt(msg []openai.VisionMessages) []openai.VisionMessages {
+// if !hasSystemRole(msg) {
+// msg = append(msg, openai.VisionMessages{
+// Role: "system", Content: []openai.ContentType{
+// {Type: "text", Text: "You are ChatGPT4V, " +
+// "You are ChatGPT4V, " +
+// "a large language and picture model trained by" +
+// " OpenAI. " +
+// "Answer in user's language as concisely as" +
+// " possible. Knowledge cutoff: 20230601 " +
+// "Current date" + time.Now().Format("20060102"),
+// }},
+// })
+// }
+// return msg
+//}
+
+type MessageAction struct { /*消息*/
+}
+
+func (*MessageAction) Execute(a *ActionInfo) bool {
+ if a.handler.config.StreamMode {
+ return true
+ }
+ msg := a.handler.sessionCache.GetMsg(*a.info.sessionId)
+ // 如果没有提示词,默认模拟ChatGPT
+ msg = setDefaultPrompt(msg)
+ msg = append(msg, openai.Messages{
+ Role: "user", Content: a.info.qParsed,
+ })
+
+ // get ai mode as temperature
+ aiMode := a.handler.sessionCache.GetAIMode(*a.info.sessionId)
+ fmt.Println("msg: ", msg)
+ fmt.Println("aiMode: ", aiMode)
+ completions, err := a.handler.gpt.Completions(msg, aiMode)
+ if err != nil {
+ replyMsg(*a.ctx, fmt.Sprintf(
+ "🤖️:消息机器人摆烂了,请稍后再试~\n错误信息: %v", err), a.info.msgId)
+ return false
+ }
+ msg = append(msg, completions)
+ a.handler.sessionCache.SetMsg(*a.info.sessionId, msg)
+ //if new topic
+ if len(msg) == 3 {
+ //fmt.Println("new topic", msg[1].Content)
+ sendNewTopicCard(*a.ctx, a.info.sessionId, a.info.msgId,
+ completions.Content)
+ return false
+ }
+ if len(msg) != 3 {
+ sendOldTopicCard(*a.ctx, a.info.sessionId, a.info.msgId,
+ completions.Content)
+ return false
+ }
+ err = replyMsg(*a.ctx, completions.Content, a.info.msgId)
+ if err != nil {
+ replyMsg(*a.ctx, fmt.Sprintf(
+ "🤖️:消息机器人摆烂了,请稍后再试~\n错误信息: %v", err), a.info.msgId)
+ return false
+ }
+ return true
+}
+
+//判断msg中的是否包含system role
+func hasSystemRole(msg []openai.Messages) bool {
+ for _, m := range msg {
+ if m.Role == "system" {
+ return true
+ }
+ }
+ return false
+}
+
+type StreamMessageAction struct { /*消息*/
+}
+
+func (m *StreamMessageAction) Execute(a *ActionInfo) bool {
+ if !a.handler.config.StreamMode {
+ return true
+ }
+ msg := a.handler.sessionCache.GetMsg(*a.info.sessionId)
+ // 如果没有提示词,默认模拟ChatGPT
+ msg = setDefaultPrompt(msg)
+ msg = append(msg, openai.Messages{
+ Role: "user", Content: a.info.qParsed,
+ })
+ //if new topic
+ var ifNewTopic bool
+ if len(msg) <= 3 {
+ ifNewTopic = true
+ } else {
+ ifNewTopic = false
+ }
+
+ cardId, err2 := sendOnProcess(a, ifNewTopic)
+ if err2 != nil {
+ return false
+ }
+
+ answer := ""
+ chatResponseStream := make(chan string)
+ done := make(chan struct{}) // 添加 done 信号,保证 goroutine 正确退出
+ noContentTimeout := time.AfterFunc(10*time.Second, func() {
+ log.Println("no content timeout")
+ close(done)
+ err := updateFinalCard(*a.ctx, "请求超时", cardId, ifNewTopic)
+ if err != nil {
+ return
+ }
+ return
+ })
+ defer noContentTimeout.Stop()
+
+ go func() {
+ defer func() {
+ if err := recover(); err != nil {
+ err := updateFinalCard(*a.ctx, "聊天失败", cardId, ifNewTopic)
+ if err != nil {
+ return
+ }
+ }
+ }()
+
+ //log.Printf("UserId: %s , Request: %s", a.info.userId, msg)
+ aiMode := a.handler.sessionCache.GetAIMode(*a.info.sessionId)
+ //fmt.Println("msg: ", msg)
+ //fmt.Println("aiMode: ", aiMode)
+ if err := a.handler.gpt.StreamChat(*a.ctx, msg, aiMode,
+ chatResponseStream); err != nil {
+ err := updateFinalCard(*a.ctx, "聊天失败", cardId, ifNewTopic)
+ if err != nil {
+ return
+ }
+ close(done) // 关闭 done 信号
+ }
+
+ close(done) // 关闭 done 信号
+ }()
+ ticker := time.NewTicker(700 * time.Millisecond)
+ defer ticker.Stop() // 注意在函数结束时停止 ticker
+ go func() {
+ for {
+ select {
+ case <-done:
+ return
+ case <-ticker.C:
+ err := updateTextCard(*a.ctx, answer, cardId, ifNewTopic)
+ if err != nil {
+ return
+ }
+ }
+ }
+ }()
+ for {
+ select {
+ case res, ok := <-chatResponseStream:
+ if !ok {
+ return false
+ }
+ noContentTimeout.Stop()
+ answer += res
+ //pp.Println("answer", answer)
+ case <-done: // 添加 done 信号的处理
+ err := updateFinalCard(*a.ctx, answer, cardId, ifNewTopic)
+ if err != nil {
+ return false
+ }
+ ticker.Stop()
+ msg := append(msg, openai.Messages{
+ Role: "assistant", Content: answer,
+ })
+ a.handler.sessionCache.SetMsg(*a.info.sessionId, msg)
+ close(chatResponseStream)
+ log.Printf("\n\n\n")
+ jsonByteArray, err := json.Marshal(msg)
+ if err != nil {
+ log.Println(err)
+ }
+ jsonStr := strings.ReplaceAll(string(jsonByteArray), "\\n", "")
+ jsonStr = strings.ReplaceAll(jsonStr, "\n", "")
+ log.Printf("\n\n\n")
+ return false
+ }
+ }
+}
+
+func sendOnProcess(a *ActionInfo, ifNewTopic bool) (*string, error) {
+ // send 正在处理中
+ cardId, err := sendOnProcessCard(*a.ctx, a.info.sessionId,
+ a.info.msgId, ifNewTopic)
+ if err != nil {
+ return nil, err
+ }
+ return cardId, nil
+
+}
diff --git a/code/handlers/event_pic_action.go b/code/handlers/event_pic_action.go
new file mode 100644
index 00000000..ca08d216
--- /dev/null
+++ b/code/handlers/event_pic_action.go
@@ -0,0 +1,110 @@
+package handlers
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "start-feishubot/logger"
+
+ "start-feishubot/initialization"
+ "start-feishubot/services"
+ "start-feishubot/services/openai"
+ "start-feishubot/utils"
+
+ larkim "github.com/larksuite/oapi-sdk-go/v3/service/im/v1"
+)
+
+type PicAction struct { /*图片*/
+}
+
+func (*PicAction) Execute(a *ActionInfo) bool {
+ check := AzureModeCheck(a)
+ if !check {
+ return true
+ }
+ // 开启图片创作模式
+ if _, foundPic := utils.EitherTrimEqual(a.info.qParsed,
+ "/picture", "图片创作"); foundPic {
+ a.handler.sessionCache.Clear(*a.info.sessionId)
+ a.handler.sessionCache.SetMode(*a.info.sessionId,
+ services.ModePicCreate)
+ a.handler.sessionCache.SetPicResolution(*a.info.sessionId,
+ services.Resolution1024)
+ sendPicCreateInstructionCard(*a.ctx, a.info.sessionId,
+ a.info.msgId)
+ return false
+ }
+
+ mode := a.handler.sessionCache.GetMode(*a.info.sessionId)
+ //fmt.Println("mode: ", mode)
+ logger.Debug("MODE:", mode)
+ // 收到一张图片,且不在图片创作模式下, 提醒是否切换到图片创作模式
+ if a.info.msgType == "image" && mode != services.ModePicCreate {
+ sendPicModeCheckCard(*a.ctx, a.info.sessionId, a.info.msgId)
+ return false
+ }
+
+ if a.info.msgType == "image" && mode == services.ModePicCreate {
+ //保存图片
+ imageKey := a.info.imageKey
+ //fmt.Printf("fileKey: %s \n", imageKey)
+ msgId := a.info.msgId
+ //fmt.Println("msgId: ", *msgId)
+ req := larkim.NewGetMessageResourceReqBuilder().MessageId(
+ *msgId).FileKey(imageKey).Type("image").Build()
+ resp, err := initialization.GetLarkClient().Im.MessageResource.Get(context.Background(), req)
+ //fmt.Println(resp, err)
+ if err != nil {
+ //fmt.Println(err)
+ replyMsg(*a.ctx, fmt.Sprintf("🤖️:图片下载失败,请稍后再试~\n 错误信息: %v", err),
+ a.info.msgId)
+ return false
+ }
+
+ f := fmt.Sprintf("%s.png", imageKey)
+ resp.WriteFile(f)
+ defer os.Remove(f)
+ resolution := a.handler.sessionCache.GetPicResolution(*a.
+ info.sessionId)
+
+ openai.ConvertJpegToPNG(f)
+ openai.ConvertToRGBA(f, f)
+
+ //图片校验
+ err = openai.VerifyPngs([]string{f})
+ if err != nil {
+ replyMsg(*a.ctx, fmt.Sprintf("🤖️:无法解析图片,请发送原图并尝试重新操作~"),
+ a.info.msgId)
+ return false
+ }
+ bs64, err := a.handler.gpt.GenerateOneImageVariation(f, resolution)
+ if err != nil {
+ replyMsg(*a.ctx, fmt.Sprintf(
+ "🤖️:图片生成失败,请稍后再试~\n错误信息: %v", err), a.info.msgId)
+ return false
+ }
+ replayImagePlainByBase64(*a.ctx, bs64, a.info.msgId)
+ return false
+
+ }
+
+ // 生成图片
+ if mode == services.ModePicCreate {
+ resolution := a.handler.sessionCache.GetPicResolution(*a.
+ info.sessionId)
+ style := a.handler.sessionCache.GetPicStyle(*a.
+ info.sessionId)
+ bs64, err := a.handler.gpt.GenerateOneImage(a.info.qParsed,
+ resolution, style)
+ if err != nil {
+ replyMsg(*a.ctx, fmt.Sprintf(
+ "🤖️:图片生成失败,请稍后再试~\n错误信息: %v", err), a.info.msgId)
+ return false
+ }
+ replayImageCardByBase64(*a.ctx, bs64, a.info.msgId, a.info.sessionId,
+ a.info.qParsed)
+ return false
+ }
+
+ return true
+}
diff --git a/code/handlers/event_vision_action.go b/code/handlers/event_vision_action.go
new file mode 100644
index 00000000..ae67873b
--- /dev/null
+++ b/code/handlers/event_vision_action.go
@@ -0,0 +1,160 @@
+package handlers
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "start-feishubot/initialization"
+ "start-feishubot/services"
+ "start-feishubot/services/openai"
+ "start-feishubot/utils"
+
+ larkim "github.com/larksuite/oapi-sdk-go/v3/service/im/v1"
+)
+
+type VisionAction struct { /*图片推理*/
+}
+
+func (va *VisionAction) Execute(a *ActionInfo) bool {
+ if !AzureModeCheck(a) {
+ return true
+ }
+
+ if isVisionCommand(a) {
+ initializeVisionMode(a)
+ sendVisionInstructionCard(*a.ctx, a.info.sessionId, a.info.msgId)
+ return false
+ }
+
+ mode := a.handler.sessionCache.GetMode(*a.info.sessionId)
+
+ if a.info.msgType == "image" {
+ if mode != services.ModeVision {
+ sendVisionModeCheckCard(*a.ctx, a.info.sessionId, a.info.msgId)
+ return false
+ }
+
+ return va.handleVisionImage(a)
+ }
+
+ if a.info.msgType == "post" && mode == services.ModeVision {
+ return va.handleVisionPost(a)
+ }
+
+ return true
+}
+
+func isVisionCommand(a *ActionInfo) bool {
+ _, foundPic := utils.EitherTrimEqual(a.info.qParsed, "/vision", "图片推理")
+ return foundPic
+}
+
+func initializeVisionMode(a *ActionInfo) {
+ a.handler.sessionCache.Clear(*a.info.sessionId)
+ a.handler.sessionCache.SetMode(*a.info.sessionId, services.ModeVision)
+ a.handler.sessionCache.SetVisionDetail(*a.info.sessionId, services.VisionDetailHigh)
+}
+
+func (va *VisionAction) handleVisionImage(a *ActionInfo) bool {
+ detail := a.handler.sessionCache.GetVisionDetail(*a.info.sessionId)
+ base64, err := downloadAndEncodeImage(a.info.imageKey, a.info.msgId)
+ if err != nil {
+ replyWithErrorMsg(*a.ctx, err, a.info.msgId)
+ return false
+ }
+
+ return va.processImageAndReply(a, base64, detail)
+}
+
+func (va *VisionAction) handleVisionPost(a *ActionInfo) bool {
+ detail := a.handler.sessionCache.GetVisionDetail(*a.info.sessionId)
+ var base64s []string
+
+ for _, imageKey := range a.info.imageKeys {
+ if imageKey == "" {
+ continue
+ }
+ base64, err := downloadAndEncodeImage(imageKey, a.info.msgId)
+ if err != nil {
+ replyWithErrorMsg(*a.ctx, err, a.info.msgId)
+ return false
+ }
+ base64s = append(base64s, base64)
+ }
+
+ if len(base64s) == 0 {
+ replyMsg(*a.ctx, "🤖️:请发送一张图片", a.info.msgId)
+ return false
+ }
+
+ return va.processMultipleImagesAndReply(a, base64s, detail)
+}
+
+func downloadAndEncodeImage(imageKey string, msgId *string) (string, error) {
+ f := fmt.Sprintf("%s.png", imageKey)
+ defer os.Remove(f)
+
+ req := larkim.NewGetMessageResourceReqBuilder().MessageId(*msgId).FileKey(imageKey).Type("image").Build()
+ resp, err := initialization.GetLarkClient().Im.MessageResource.Get(context.Background(), req)
+ if err != nil {
+ return "", err
+ }
+
+ resp.WriteFile(f)
+ return openai.GetBase64FromImage(f)
+}
+
+func replyWithErrorMsg(ctx context.Context, err error, msgId *string) {
+ replyMsg(ctx, fmt.Sprintf("🤖️:图片下载失败,请稍后再试~\n 错误信息: %v", err), msgId)
+}
+
+func (va *VisionAction) processImageAndReply(a *ActionInfo, base64 string, detail string) bool {
+ msg := createVisionMessages("解释这个图片", base64, detail)
+ completions, err := a.handler.gpt.GetVisionInfo(msg)
+ if err != nil {
+ replyWithErrorMsg(*a.ctx, err, a.info.msgId)
+ return false
+ }
+ sendVisionTopicCard(*a.ctx, a.info.sessionId, a.info.msgId, completions.Content)
+ return false
+}
+
+func (va *VisionAction) processMultipleImagesAndReply(a *ActionInfo, base64s []string, detail string) bool {
+ msg := createMultipleVisionMessages(a.info.qParsed, base64s, detail)
+ completions, err := a.handler.gpt.GetVisionInfo(msg)
+ if err != nil {
+ replyWithErrorMsg(*a.ctx, err, a.info.msgId)
+ return false
+ }
+ sendVisionTopicCard(*a.ctx, a.info.sessionId, a.info.msgId, completions.Content)
+ return false
+}
+
+func createVisionMessages(query, base64Image, detail string) []openai.VisionMessages {
+ return []openai.VisionMessages{
+ {
+ Role: "user",
+ Content: []openai.ContentType{
+ {Type: "text", Text: query},
+ {Type: "image_url", ImageURL: &openai.ImageURL{
+ URL: "data:image/jpeg;base64," + base64Image,
+ Detail: detail,
+ }},
+ },
+ },
+ }
+}
+
+func createMultipleVisionMessages(query string, base64Images []string, detail string) []openai.VisionMessages {
+ content := []openai.ContentType{{Type: "text", Text: query}}
+ for _, base64Image := range base64Images {
+ content = append(content, openai.ContentType{
+ Type: "image_url",
+ ImageURL: &openai.ImageURL{
+ URL: "data:image/jpeg;base64," + base64Image,
+ Detail: detail,
+ },
+ })
+ }
+ return []openai.VisionMessages{{Role: "user", Content: content}}
+}
diff --git a/code/handlers/handler.go b/code/handlers/handler.go
new file mode 100644
index 00000000..d7622f1b
--- /dev/null
+++ b/code/handlers/handler.go
@@ -0,0 +1,140 @@
+package handlers
+
+import (
+ "context"
+ "fmt"
+ larkcore "github.com/larksuite/oapi-sdk-go/v3/core"
+ "start-feishubot/logger"
+ "strings"
+
+ "start-feishubot/initialization"
+ "start-feishubot/services"
+ "start-feishubot/services/openai"
+
+ larkcard "github.com/larksuite/oapi-sdk-go/v3/card"
+ larkim "github.com/larksuite/oapi-sdk-go/v3/service/im/v1"
+)
+
+// 责任链
+func chain(data *ActionInfo, actions ...Action) bool {
+ for _, v := range actions {
+ if !v.Execute(data) {
+ return false
+ }
+ }
+ return true
+}
+
+type MessageHandler struct {
+ sessionCache services.SessionServiceCacheInterface
+ msgCache services.MsgCacheInterface
+ gpt *openai.ChatGPT
+ config initialization.Config
+}
+
+func (m MessageHandler) cardHandler(ctx context.Context,
+ cardAction *larkcard.CardAction) (interface{}, error) {
+ messageHandler := NewCardHandler(m)
+ return messageHandler(ctx, cardAction)
+}
+
+func judgeMsgType(event *larkim.P2MessageReceiveV1) (string, error) {
+ msgType := event.Event.Message.MessageType
+
+ switch *msgType {
+ case "text", "image", "audio", "post":
+ return *msgType, nil
+ default:
+ return "", fmt.Errorf("unknown message type: %v", *msgType)
+ }
+}
+
+func (m MessageHandler) msgReceivedHandler(ctx context.Context, event *larkim.P2MessageReceiveV1) error {
+ handlerType := judgeChatType(event)
+ logger.Debug("handlerType", handlerType)
+ if handlerType == "otherChat" {
+ fmt.Println("unknown chat type")
+ return nil
+ }
+ logger.Debug("收到消息:", larkcore.Prettify(event.Event.Message))
+
+ msgType, err := judgeMsgType(event)
+ if err != nil {
+ fmt.Printf("error getting message type: %v\n", err)
+ return nil
+ }
+
+ content := event.Event.Message.Content
+ msgId := event.Event.Message.MessageId
+ rootId := event.Event.Message.RootId
+ chatId := event.Event.Message.ChatId
+ mention := event.Event.Message.Mentions
+
+ sessionId := rootId
+ if sessionId == nil || *sessionId == "" {
+ sessionId = msgId
+ }
+ msgInfo := MsgInfo{
+ handlerType: handlerType,
+ msgType: msgType,
+ msgId: msgId,
+ chatId: chatId,
+ qParsed: strings.Trim(parseContent(*content, msgType), " "),
+ fileKey: parseFileKey(*content),
+ imageKey: parseImageKey(*content),
+ imageKeys: parsePostImageKeys(*content),
+ sessionId: sessionId,
+ mention: mention,
+ }
+ data := &ActionInfo{
+ ctx: &ctx,
+ handler: &m,
+ info: &msgInfo,
+ }
+ actions := []Action{
+ &ProcessedUniqueAction{}, //避免重复处理
+ &ProcessMentionAction{}, //判断机器人是否应该被调用
+ &AudioAction{}, //语音处理
+ &ClearAction{}, //清除消息处理
+ &VisionAction{}, //图片推理处理
+ &PicAction{}, //图片处理
+ &AIModeAction{}, //模式切换处理
+ &RoleListAction{}, //角色列表处理
+ &HelpAction{}, //帮助处理
+ &BalanceAction{}, //余额处理
+ &RolePlayAction{}, //角色扮演处理
+ &MessageAction{}, //消息处理
+ &EmptyAction{}, //空消息处理
+ &StreamMessageAction{}, //流式消息处理
+ }
+ chain(data, actions...)
+ return nil
+}
+
+var _ MessageHandlerInterface = (*MessageHandler)(nil)
+
+func NewMessageHandler(gpt *openai.ChatGPT,
+ config initialization.Config) MessageHandlerInterface {
+ return &MessageHandler{
+ sessionCache: services.GetSessionCache(),
+ msgCache: services.GetMsgCache(),
+ gpt: gpt,
+ config: config,
+ }
+}
+
+func (m MessageHandler) judgeIfMentionMe(mention []*larkim.
+ MentionEvent) bool {
+ if len(mention) != 1 {
+ return false
+ }
+ return *mention[0].Name == m.config.FeishuBotName
+}
+
+func AzureModeCheck(a *ActionInfo) bool {
+ if a.handler.config.AzureOn {
+ //sendMsg(*a.ctx, "Azure Openai 接口下,暂不支持此功能", a.info.chatId)
+ return false
+ }
+ return true
+}
diff --git a/code/handlers/init.go b/code/handlers/init.go
index d90316e5..25ab953a 100644
--- a/code/handlers/init.go
+++ b/code/handlers/init.go
@@ -2,11 +2,18 @@ package handlers
import (
"context"
+ "start-feishubot/logger"
+
+ "start-feishubot/initialization"
+ "start-feishubot/services/openai"
+
+ larkcard "github.com/larksuite/oapi-sdk-go/v3/card"
larkim "github.com/larksuite/oapi-sdk-go/v3/service/im/v1"
)
type MessageHandlerInterface interface {
- handle(ctx context.Context, event *larkim.P2MessageReceiveV1) error
+ msgReceivedHandler(ctx context.Context, event *larkim.P2MessageReceiveV1) error
+ cardHandler(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error)
}
type HandlerType string
@@ -17,14 +24,52 @@ const (
)
// handlers 所有消息类型类型的处理器
-var handlers map[HandlerType]MessageHandlerInterface
+var handlers MessageHandlerInterface
-func init() {
- handlers = make(map[HandlerType]MessageHandlerInterface)
- //handlers[GroupHandler] = NewGroupMessageHandler()
- handlers[UserHandler] = NewPersonalMessageHandler()
+func InitHandlers(gpt *openai.ChatGPT, config initialization.Config) {
+ handlers = NewMessageHandler(gpt, config)
}
func Handler(ctx context.Context, event *larkim.P2MessageReceiveV1) error {
- return handlers[UserHandler].handle(ctx, event)
+ return handlers.msgReceivedHandler(ctx, event)
+}
+
+func ReadHandler(ctx context.Context, event *larkim.P2MessageReadV1) error {
+ readerId := event.Event.Reader.ReaderId.OpenId
+ //fmt.Printf("msg is read by : %v \n", *readerId)
+ logger.Debugf("msg is read by : %v \n", *readerId)
+
+ return nil
+}
+
+func CardHandler() func(ctx context.Context,
+ cardAction *larkcard.CardAction) (interface{}, error) {
+ return func(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) {
+ //handlerType := judgeCardType(cardAction)
+ return handlers.cardHandler(ctx, cardAction)
+ }
+}
+
+func judgeCardType(cardAction *larkcard.CardAction) HandlerType {
+ actionValue := cardAction.Action.Value
+ chatType := actionValue["chatType"]
+ //fmt.Printf("chatType: %v", chatType)
+ if chatType == "group" {
+ return GroupHandler
+ }
+ if chatType == "personal" {
+ return UserHandler
+ }
+ return "otherChat"
+}
+
+func judgeChatType(event *larkim.P2MessageReceiveV1) HandlerType {
+ chatType := event.Event.Message.ChatType
+ if *chatType == "group" {
+ return GroupHandler
+ }
+ if *chatType == "p2p" {
+ return UserHandler
+ }
+ return "otherChat"
}
diff --git a/code/handlers/msg.go b/code/handlers/msg.go
new file mode 100644
index 00000000..3b752149
--- /dev/null
+++ b/code/handlers/msg.go
@@ -0,0 +1,1047 @@
+package handlers
+
+import (
+ "bytes"
+ "context"
+ "encoding/base64"
+ "errors"
+ "fmt"
+ "start-feishubot/logger"
+
+ "start-feishubot/initialization"
+ "start-feishubot/services"
+ "start-feishubot/services/openai"
+
+ "github.com/google/uuid"
+ larkcard "github.com/larksuite/oapi-sdk-go/v3/card"
+ larkim "github.com/larksuite/oapi-sdk-go/v3/service/im/v1"
+)
+
+type CardKind string
+type CardChatType string
+
+var (
+ ClearCardKind = CardKind("clear") // 清空上下文
+ PicModeChangeKind = CardKind("pic_mode_change") // 切换图片创作模式
+ VisionModeChangeKind = CardKind("vision_mode") // 切换图片解析模式
+ PicResolutionKind = CardKind("pic_resolution") // 图片分辨率调整
+ PicStyleKind = CardKind("pic_style") // 图片风格调整
+ VisionStyleKind = CardKind("vision_style") // 图片推理级别调整
+ PicTextMoreKind = CardKind("pic_text_more") // 重新根据文本生成图片
+ PicVarMoreKind = CardKind("pic_var_more") // 变量图片
+ RoleTagsChooseKind = CardKind("role_tags_choose") // 内置角色所属标签选择
+ RoleChooseKind = CardKind("role_choose") // 内置角色选择
+ AIModeChooseKind = CardKind("ai_mode_choose") // AI模式选择
+)
+
+var (
+ GroupChatType = CardChatType("group")
+ UserChatType = CardChatType("personal")
+)
+
+type CardMsg struct {
+ Kind CardKind
+ ChatType CardChatType
+ Value interface{}
+ SessionId string
+ MsgId string
+}
+
+type MenuOption struct {
+ value string
+ label string
+}
+
+func replyCard(ctx context.Context,
+ msgId *string,
+ cardContent string,
+) error {
+ client := initialization.GetLarkClient()
+ resp, err := client.Im.Message.Reply(ctx, larkim.NewReplyMessageReqBuilder().
+ MessageId(*msgId).
+ Body(larkim.NewReplyMessageReqBodyBuilder().
+ MsgType(larkim.MsgTypeInteractive).
+ Uuid(uuid.New().String()).
+ Content(cardContent).
+ Build()).
+ Build())
+
+ // 处理错误
+ if err != nil {
+ fmt.Println(err)
+ return err
+ }
+
+ // 服务端错误处理
+ if !resp.Success() {
+ logger.Errorf("服务端错误 resp code[%v], msg [%v] requestId [%v] ", resp.Code, resp.Msg, resp.RequestId())
+ return errors.New(resp.Msg)
+ }
+ return nil
+}
+
+func newSendCard(
+ header *larkcard.MessageCardHeader,
+ elements ...larkcard.MessageCardElement) (string,
+ error) {
+ config := larkcard.NewMessageCardConfig().
+ WideScreenMode(false).
+ EnableForward(true).
+ UpdateMulti(false).
+ Build()
+ var aElementPool []larkcard.MessageCardElement
+ aElementPool = append(aElementPool, elements...)
+ // 卡片消息体
+ cardContent, err := larkcard.NewMessageCard().
+ Config(config).
+ Header(header).
+ Elements(
+ aElementPool,
+ ).
+ String()
+ return cardContent, err
+}
+
+func newSimpleSendCard(
+ elements ...larkcard.MessageCardElement) (string,
+ error) {
+ config := larkcard.NewMessageCardConfig().
+ WideScreenMode(false).
+ EnableForward(true).
+ UpdateMulti(false).
+ Build()
+ var aElementPool []larkcard.MessageCardElement
+ aElementPool = append(aElementPool, elements...)
+ // 卡片消息体
+ cardContent, err := larkcard.NewMessageCard().
+ Config(config).
+ Elements(
+ aElementPool,
+ ).
+ String()
+ return cardContent, err
+}
+
+// withSplitLine 用于生成分割线
+func withSplitLine() larkcard.MessageCardElement {
+ splitLine := larkcard.NewMessageCardHr().
+ Build()
+ return splitLine
+}
+
+// withHeader 用于生成消息头
+func withHeader(title string, color string) *larkcard.
+ MessageCardHeader {
+ if title == "" {
+ title = "🤖️机器人提醒"
+ }
+ header := larkcard.NewMessageCardHeader().
+ Template(color).
+ Title(larkcard.NewMessageCardPlainText().
+ Content(title).
+ Build()).
+ Build()
+ return header
+}
+
+// withNote 用于生成纯文本脚注
+func withNote(note string) larkcard.MessageCardElement {
+ noteElement := larkcard.NewMessageCardNote().
+ Elements([]larkcard.MessageCardNoteElement{larkcard.NewMessageCardPlainText().
+ Content(note).
+ Build()}).
+ Build()
+ return noteElement
+}
+
+// withMainMd 用于生成markdown消息体
+func withMainMd(msg string) larkcard.MessageCardElement {
+ msg, i := processMessage(msg)
+ msg = processNewLine(msg)
+ if i != nil {
+ return nil
+ }
+ mainElement := larkcard.NewMessageCardDiv().
+ Fields([]*larkcard.MessageCardField{larkcard.NewMessageCardField().
+ Text(larkcard.NewMessageCardLarkMd().
+ Content(msg).
+ Build()).
+ IsShort(true).
+ Build()}).
+ Build()
+ return mainElement
+}
+
+// withMainText 用于生成纯文本消息体
+func withMainText(msg string) larkcard.MessageCardElement {
+ msg, i := processMessage(msg)
+ msg = cleanTextBlock(msg)
+ if i != nil {
+ return nil
+ }
+ mainElement := larkcard.NewMessageCardDiv().
+ Fields([]*larkcard.MessageCardField{larkcard.NewMessageCardField().
+ Text(larkcard.NewMessageCardPlainText().
+ Content(msg).
+ Build()).
+ IsShort(false).
+ Build()}).
+ Build()
+ return mainElement
+}
+
+func withImageDiv(imageKey string) larkcard.MessageCardElement {
+ imageElement := larkcard.NewMessageCardImage().
+ ImgKey(imageKey).
+ Alt(larkcard.NewMessageCardPlainText().Content("").
+ Build()).
+ Preview(true).
+ Mode(larkcard.MessageCardImageModelCropCenter).
+ CompactWidth(true).
+ Build()
+ return imageElement
+}
+
+// withMdAndExtraBtn 用于生成带有额外按钮的消息体
+func withMdAndExtraBtn(msg string, btn *larkcard.
+ MessageCardEmbedButton) larkcard.MessageCardElement {
+ msg, i := processMessage(msg)
+ msg = processNewLine(msg)
+ if i != nil {
+ return nil
+ }
+ mainElement := larkcard.NewMessageCardDiv().
+ Fields(
+ []*larkcard.MessageCardField{
+ larkcard.NewMessageCardField().
+ Text(larkcard.NewMessageCardLarkMd().
+ Content(msg).
+ Build()).
+ IsShort(true).
+ Build()}).
+ Extra(btn).
+ Build()
+ return mainElement
+}
+
+func newBtn(content string, value map[string]interface{},
+ typename larkcard.MessageCardButtonType) *larkcard.
+ MessageCardEmbedButton {
+ btn := larkcard.NewMessageCardEmbedButton().
+ Type(typename).
+ Value(value).
+ Text(larkcard.NewMessageCardPlainText().
+ Content(content).
+ Build())
+ return btn
+}
+
+func newMenu(
+ placeHolder string,
+ value map[string]interface{},
+ options ...MenuOption,
+) *larkcard.
+ MessageCardEmbedSelectMenuStatic {
+ var aOptionPool []*larkcard.MessageCardEmbedSelectOption
+ for _, option := range options {
+ aOption := larkcard.NewMessageCardEmbedSelectOption().
+ Value(option.value).
+ Text(larkcard.NewMessageCardPlainText().
+ Content(option.label).
+ Build())
+ aOptionPool = append(aOptionPool, aOption)
+
+ }
+ btn := larkcard.NewMessageCardEmbedSelectMenuStatic().
+ MessageCardEmbedSelectMenuStatic(larkcard.NewMessageCardEmbedSelectMenuBase().
+ Options(aOptionPool).
+ Placeholder(larkcard.NewMessageCardPlainText().
+ Content(placeHolder).
+ Build()).
+ Value(value).
+ Build()).
+ Build()
+ return btn
+}
+
+// 清除卡片按钮
+func withClearDoubleCheckBtn(sessionID *string) larkcard.MessageCardElement {
+ confirmBtn := newBtn("确认清除", map[string]interface{}{
+ "value": "1",
+ "kind": ClearCardKind,
+ "chatType": UserChatType,
+ "sessionId": *sessionID,
+ }, larkcard.MessageCardButtonTypeDanger,
+ )
+ cancelBtn := newBtn("我再想想", map[string]interface{}{
+ "value": "0",
+ "kind": ClearCardKind,
+ "sessionId": *sessionID,
+ "chatType": UserChatType,
+ },
+ larkcard.MessageCardButtonTypeDefault)
+
+ actions := larkcard.NewMessageCardAction().
+ Actions([]larkcard.MessageCardActionElement{confirmBtn, cancelBtn}).
+ Layout(larkcard.MessageCardActionLayoutBisected.Ptr()).
+ Build()
+
+ return actions
+}
+
+func withPicModeDoubleCheckBtn(sessionID *string) larkcard.
+ MessageCardElement {
+ confirmBtn := newBtn("切换模式", map[string]interface{}{
+ "value": "1",
+ "kind": PicModeChangeKind,
+ "chatType": UserChatType,
+ "sessionId": *sessionID,
+ }, larkcard.MessageCardButtonTypeDanger,
+ )
+ cancelBtn := newBtn("我再想想", map[string]interface{}{
+ "value": "0",
+ "kind": PicModeChangeKind,
+ "sessionId": *sessionID,
+ "chatType": UserChatType,
+ },
+ larkcard.MessageCardButtonTypeDefault)
+
+ actions := larkcard.NewMessageCardAction().
+ Actions([]larkcard.MessageCardActionElement{confirmBtn, cancelBtn}).
+ Layout(larkcard.MessageCardActionLayoutBisected.Ptr()).
+ Build()
+
+ return actions
+}
+func withVisionModeDoubleCheckBtn(sessionID *string) larkcard.
+ MessageCardElement {
+ confirmBtn := newBtn("切换模式", map[string]interface{}{
+ "value": "1",
+ "kind": VisionModeChangeKind,
+ "chatType": UserChatType,
+ "sessionId": *sessionID,
+ }, larkcard.MessageCardButtonTypeDanger,
+ )
+ cancelBtn := newBtn("我再想想", map[string]interface{}{
+ "value": "0",
+ "kind": VisionModeChangeKind,
+ "sessionId": *sessionID,
+ "chatType": UserChatType,
+ },
+ larkcard.MessageCardButtonTypeDefault)
+
+ actions := larkcard.NewMessageCardAction().
+ Actions([]larkcard.MessageCardActionElement{confirmBtn, cancelBtn}).
+ Layout(larkcard.MessageCardActionLayoutBisected.Ptr()).
+ Build()
+
+ return actions
+}
+
+func withOneBtn(btn *larkcard.MessageCardEmbedButton) larkcard.
+ MessageCardElement {
+ actions := larkcard.NewMessageCardAction().
+ Actions([]larkcard.MessageCardActionElement{btn}).
+ Layout(larkcard.MessageCardActionLayoutFlow.Ptr()).
+ Build()
+ return actions
+}
+
+//新建对话按钮
+
+func withPicResolutionBtn(sessionID *string) larkcard.
+ MessageCardElement {
+ resolutionMenu := newMenu("默认分辨率",
+ map[string]interface{}{
+ "value": "0",
+ "kind": PicResolutionKind,
+ "sessionId": *sessionID,
+ "msgId": *sessionID,
+ },
+ // dall-e-2 256, 512, 1024
+ //MenuOption{
+ // label: "256x256",
+ // value: string(services.Resolution256),
+ //},
+ //MenuOption{
+ // label: "512x512",
+ // value: string(services.Resolution512),
+ //},
+ // dall-e-3
+ MenuOption{
+ label: "1024x1024",
+ value: string(services.Resolution1024),
+ },
+ MenuOption{
+ label: "1024x1792",
+ value: string(services.Resolution10241792),
+ },
+ MenuOption{
+ label: "1792x1024",
+ value: string(services.Resolution17921024),
+ },
+ )
+
+ styleMenu := newMenu("风格",
+ map[string]interface{}{
+ "value": "0",
+ "kind": PicStyleKind,
+ "sessionId": *sessionID,
+ "msgId": *sessionID,
+ },
+ MenuOption{
+ label: "生动风格",
+ value: string(services.PicStyleVivid),
+ },
+ MenuOption{
+ label: "自然风格",
+ value: string(services.PicStyleNatural),
+ },
+ )
+
+ actions := larkcard.NewMessageCardAction().
+ Actions([]larkcard.MessageCardActionElement{resolutionMenu, styleMenu}).
+ Layout(larkcard.MessageCardActionLayoutFlow.Ptr()).
+ Build()
+ return actions
+}
+
+func withVisionDetailLevelBtn(sessionID *string) larkcard.
+ MessageCardElement {
+ detailMenu := newMenu("选择图片解析度,默认为高",
+ map[string]interface{}{
+ "value": "0",
+ "kind": VisionStyleKind,
+ "sessionId": *sessionID,
+ "msgId": *sessionID,
+ },
+ MenuOption{
+ label: "高",
+ value: string(services.VisionDetailHigh),
+ },
+ MenuOption{
+ label: "低",
+ value: string(services.VisionDetailLow),
+ },
+ )
+
+ actions := larkcard.NewMessageCardAction().
+ Actions([]larkcard.MessageCardActionElement{detailMenu}).
+ Layout(larkcard.MessageCardActionLayoutBisected.Ptr()).
+ Build()
+
+ return actions
+}
+func withRoleTagsBtn(sessionID *string, tags ...string) larkcard.
+ MessageCardElement {
+ var menuOptions []MenuOption
+
+ for _, tag := range tags {
+ menuOptions = append(menuOptions, MenuOption{
+ label: tag,
+ value: tag,
+ })
+ }
+ cancelMenu := newMenu("选择角色分类",
+ map[string]interface{}{
+ "value": "0",
+ "kind": RoleTagsChooseKind,
+ "sessionId": *sessionID,
+ "msgId": *sessionID,
+ },
+ menuOptions...,
+ )
+
+ actions := larkcard.NewMessageCardAction().
+ Actions([]larkcard.MessageCardActionElement{cancelMenu}).
+ Layout(larkcard.MessageCardActionLayoutFlow.Ptr()).
+ Build()
+ return actions
+}
+
+func withRoleBtn(sessionID *string, titles ...string) larkcard.
+ MessageCardElement {
+ var menuOptions []MenuOption
+
+ for _, tag := range titles {
+ menuOptions = append(menuOptions, MenuOption{
+ label: tag,
+ value: tag,
+ })
+ }
+ cancelMenu := newMenu("查看内置角色",
+ map[string]interface{}{
+ "value": "0",
+ "kind": RoleChooseKind,
+ "sessionId": *sessionID,
+ "msgId": *sessionID,
+ },
+ menuOptions...,
+ )
+
+ actions := larkcard.NewMessageCardAction().
+ Actions([]larkcard.MessageCardActionElement{cancelMenu}).
+ Layout(larkcard.MessageCardActionLayoutFlow.Ptr()).
+ Build()
+ return actions
+}
+
+func withAIModeBtn(sessionID *string, aiModeStrs []string) larkcard.MessageCardElement {
+ var menuOptions []MenuOption
+ for _, label := range aiModeStrs {
+ menuOptions = append(menuOptions, MenuOption{
+ label: label,
+ value: label,
+ })
+ }
+
+ cancelMenu := newMenu("选择模式",
+ map[string]interface{}{
+ "value": "0",
+ "kind": AIModeChooseKind,
+ "sessionId": *sessionID,
+ "msgId": *sessionID,
+ },
+ menuOptions...,
+ )
+
+ actions := larkcard.NewMessageCardAction().
+ Actions([]larkcard.MessageCardActionElement{cancelMenu}).
+ Layout(larkcard.MessageCardActionLayoutFlow.Ptr()).
+ Build()
+ return actions
+}
+
+func replyMsg(ctx context.Context, msg string, msgId *string) error {
+ msg, i := processMessage(msg)
+ if i != nil {
+ return i
+ }
+ client := initialization.GetLarkClient()
+ content := larkim.NewTextMsgBuilder().
+ Text(msg).
+ Build()
+
+ resp, err := client.Im.Message.Reply(ctx, larkim.NewReplyMessageReqBuilder().
+ MessageId(*msgId).
+ Body(larkim.NewReplyMessageReqBodyBuilder().
+ MsgType(larkim.MsgTypeText).
+ Uuid(uuid.New().String()).
+ Content(content).
+ Build()).
+ Build())
+
+ // 处理错误
+ if err != nil {
+ fmt.Println(err)
+ return err
+ }
+
+ // 服务端错误处理
+ if !resp.Success() {
+ fmt.Println(resp.Code, resp.Msg, resp.RequestId())
+ return errors.New(resp.Msg)
+ }
+ return nil
+}
+
+func uploadImage(base64Str string) (*string, error) {
+ imageBytes, err := base64.StdEncoding.DecodeString(base64Str)
+ if err != nil {
+ fmt.Println(err)
+ return nil, err
+ }
+ client := initialization.GetLarkClient()
+ resp, err := client.Im.Image.Create(context.Background(),
+ larkim.NewCreateImageReqBuilder().
+ Body(larkim.NewCreateImageReqBodyBuilder().
+ ImageType(larkim.ImageTypeMessage).
+ Image(bytes.NewReader(imageBytes)).
+ Build()).
+ Build())
+
+ // 处理错误
+ if err != nil {
+ fmt.Println(err)
+ return nil, err
+ }
+
+ // 服务端错误处理
+ if !resp.Success() {
+ fmt.Println(resp.Code, resp.Msg, resp.RequestId())
+ return nil, errors.New(resp.Msg)
+ }
+ return resp.Data.ImageKey, nil
+}
+
+func replyImage(ctx context.Context, ImageKey *string,
+ msgId *string) error {
+ //fmt.Println("sendMsg", ImageKey, msgId)
+
+ msgImage := larkim.MessageImage{ImageKey: *ImageKey}
+ content, err := msgImage.String()
+ if err != nil {
+ fmt.Println(err)
+ return err
+ }
+ client := initialization.GetLarkClient()
+
+ resp, err := client.Im.Message.Reply(ctx, larkim.NewReplyMessageReqBuilder().
+ MessageId(*msgId).
+ Body(larkim.NewReplyMessageReqBodyBuilder().
+ MsgType(larkim.MsgTypeImage).
+ Uuid(uuid.New().String()).
+ Content(content).
+ Build()).
+ Build())
+
+ // 处理错误
+ if err != nil {
+ fmt.Println(err)
+ return err
+ }
+
+ // 服务端错误处理
+ if !resp.Success() {
+ fmt.Println(resp.Code, resp.Msg, resp.RequestId())
+ return errors.New(resp.Msg)
+ }
+ return nil
+}
+
+func replayImageCardByBase64(ctx context.Context, base64Str string,
+ msgId *string, sessionId *string, question string) error {
+ imageKey, err := uploadImage(base64Str)
+ if err != nil {
+ return err
+ }
+ //example := "img_v2_041b28e3-5680-48c2-9af2-497ace79333g"
+ //imageKey := &example
+ //fmt.Println("imageKey", *imageKey)
+ err = sendImageCard(ctx, *imageKey, msgId, sessionId, question)
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+func replayImagePlainByBase64(ctx context.Context, base64Str string,
+ msgId *string) error {
+ imageKey, err := uploadImage(base64Str)
+ if err != nil {
+ return err
+ }
+ //example := "img_v2_041b28e3-5680-48c2-9af2-497ace79333g"
+ //imageKey := &example
+ //fmt.Println("imageKey", *imageKey)
+ err = replyImage(ctx, imageKey, msgId)
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+func replayVariantImageByBase64(ctx context.Context, base64Str string,
+ msgId *string, sessionId *string) error {
+ imageKey, err := uploadImage(base64Str)
+ if err != nil {
+ return err
+ }
+ //example := "img_v2_041b28e3-5680-48c2-9af2-497ace79333g"
+ //imageKey := &example
+ //fmt.Println("imageKey", *imageKey)
+ err = sendVarImageCard(ctx, *imageKey, msgId, sessionId)
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+func sendMsg(ctx context.Context, msg string, chatId *string) error {
+ //fmt.Println("sendMsg", msg, chatId)
+ msg, i := processMessage(msg)
+ if i != nil {
+ return i
+ }
+ client := initialization.GetLarkClient()
+ content := larkim.NewTextMsgBuilder().
+ Text(msg).
+ Build()
+
+ //fmt.Println("content", content)
+
+ resp, err := client.Im.Message.Create(ctx, larkim.NewCreateMessageReqBuilder().
+ ReceiveIdType(larkim.ReceiveIdTypeChatId).
+ Body(larkim.NewCreateMessageReqBodyBuilder().
+ MsgType(larkim.MsgTypeText).
+ ReceiveId(*chatId).
+ Content(content).
+ Build()).
+ Build())
+
+ // 处理错误
+ if err != nil {
+ fmt.Println(err)
+ return err
+ }
+
+ // 服务端错误处理
+ if !resp.Success() {
+ fmt.Println(resp.Code, resp.Msg, resp.RequestId())
+ return errors.New(resp.Msg)
+ }
+ return nil
+}
+
+func sendClearCacheCheckCard(ctx context.Context,
+ sessionId *string, msgId *string) {
+ newCard, _ := newSendCard(
+ withHeader("🆑 机器人提醒", larkcard.TemplateBlue),
+ withMainMd("您确定要清除对话上下文吗?"),
+ withNote("请注意,这将开始一个全新的对话,您将无法利用之前话题的历史信息"),
+ withClearDoubleCheckBtn(sessionId))
+ replyCard(ctx, msgId, newCard)
+}
+
+func sendSystemInstructionCard(ctx context.Context,
+ sessionId *string, msgId *string, content string) {
+ newCard, _ := newSendCard(
+ withHeader("🥷 已进入角色扮演模式", larkcard.TemplateIndigo),
+ withMainText(content),
+ withNote("请注意,这将开始一个全新的对话,您将无法利用之前话题的历史信息"))
+ replyCard(ctx, msgId, newCard)
+}
+
+func sendPicCreateInstructionCard(ctx context.Context,
+ sessionId *string, msgId *string) {
+ newCard, _ := newSendCard(
+ withHeader("🖼️ 已进入图片创作模式", larkcard.TemplateBlue),
+ withPicResolutionBtn(sessionId),
+ withNote("提醒:回复文本或图片,让AI生成相关的图片。"))
+ replyCard(ctx, msgId, newCard)
+}
+
+func sendVisionInstructionCard(ctx context.Context,
+ sessionId *string, msgId *string) {
+ newCard, _ := newSendCard(
+ withHeader("🕵️️ 已进入图片推理模式", larkcard.TemplateBlue),
+ withVisionDetailLevelBtn(sessionId),
+ withNote("提醒:回复图片,让LLM和你一起推理图片的内容。"))
+ replyCard(ctx, msgId, newCard)
+}
+
+func sendPicModeCheckCard(ctx context.Context,
+ sessionId *string, msgId *string) {
+ newCard, _ := newSendCard(
+ withHeader("🖼️ 机器人提醒", larkcard.TemplateBlue),
+ withMainMd("收到图片,是否进入图片创作模式?"),
+ withNote("请注意,这将开始一个全新的对话,您将无法利用之前话题的历史信息"),
+ withPicModeDoubleCheckBtn(sessionId))
+ replyCard(ctx, msgId, newCard)
+}
+func sendVisionModeCheckCard(ctx context.Context,
+ sessionId *string, msgId *string) {
+ newCard, _ := newSendCard(
+ withHeader("🕵️ 机器人提醒", larkcard.TemplateBlue),
+ withMainMd("检测到图片,是否进入图片推理模式?"),
+ withNote("请注意,这将开始一个全新的对话,您将无法利用之前话题的历史信息"),
+ withVisionModeDoubleCheckBtn(sessionId))
+ replyCard(ctx, msgId, newCard)
+}
+
+func sendNewTopicCard(ctx context.Context,
+ sessionId *string, msgId *string, content string) {
+ newCard, _ := newSendCard(
+ withHeader("👻️ 已开启新的话题", larkcard.TemplateBlue),
+ withMainText(content),
+ withNote("提醒:点击对话框参与回复,可保持话题连贯"))
+ replyCard(ctx, msgId, newCard)
+}
+
+func sendOldTopicCard(ctx context.Context,
+ sessionId *string, msgId *string, content string) {
+ newCard, _ := newSendCard(
+ withHeader("🔃️ 上下文的话题", larkcard.TemplateBlue),
+ withMainText(content),
+ withNote("提醒:点击对话框参与回复,可保持话题连贯"))
+ replyCard(ctx, msgId, newCard)
+}
+
+func sendVisionTopicCard(ctx context.Context,
+ sessionId *string, msgId *string, content string) {
+ newCard, _ := newSendCard(
+ withHeader("🕵️图片推理结果", larkcard.TemplateBlue),
+ withMainText(content),
+ withNote("让LLM和你一起推理图片的内容~"))
+ replyCard(ctx, msgId, newCard)
+}
+
+func sendHelpCard(ctx context.Context,
+ sessionId *string, msgId *string) {
+ newCard, _ := newSendCard(
+ withHeader("🎒需要帮助吗?", larkcard.TemplateBlue),
+ withMainMd("**🤠你好呀~ 我来自企联AI,一款基于OpenAI的智能助手!**"),
+ withSplitLine(),
+ withMdAndExtraBtn(
+ "** 🆑 清除话题上下文**\n文本回复 *清除* 或 */clear*",
+ newBtn("立刻清除", map[string]interface{}{
+ "value": "1",
+ "kind": ClearCardKind,
+ "chatType": UserChatType,
+ "sessionId": *sessionId,
+ }, larkcard.MessageCardButtonTypeDanger)),
+ withSplitLine(),
+ withMainMd("🤖 **发散模式选择** \n"+" 文本回复 *发散模式* 或 */ai_mode*"),
+ withSplitLine(),
+ withMainMd("🛖 **内置角色列表** \n"+" 文本回复 *角色列表* 或 */roles*"),
+ withSplitLine(),
+ withMainMd("🥷 **角色扮演模式**\n文本回复*角色扮演* 或 */system*+空格+角色信息"),
+ withSplitLine(),
+ withMainMd("🎤 **AI语音对话**\n私聊模式下直接发送语音"),
+ withSplitLine(),
+ withMainMd("🎨 **图片创作模式**\n回复*图片创作* 或 */picture*"),
+ withSplitLine(),
+ withMainMd("🕵️ **图片推理模式** \n"+" 文本回复 *图片推理* 或 */vision*"),
+ withSplitLine(),
+ withMainMd("🎰 **Token余额查询**\n回复*余额* 或 */balance*"),
+ withSplitLine(),
+ withMainMd("🔃️ **历史话题回档** 🚧\n"+" 进入话题的回复详情页,文本回复 *恢复* 或 */reload*"),
+ withSplitLine(),
+ withMainMd("📤 **话题内容导出** 🚧\n"+" 文本回复 *导出* 或 */export*"),
+ withSplitLine(),
+ withMainMd("🎰 **连续对话与多话题模式**\n"+" 点击对话框参与回复,可保持话题连贯。同时,单独提问即可开启全新新话题"),
+ withSplitLine(),
+ withMainMd("🎒 **需要更多帮助**\n文本回复 *帮助* 或 */help*"),
+ )
+ replyCard(ctx, msgId, newCard)
+}
+
+func sendImageCard(ctx context.Context, imageKey string,
+ msgId *string, sessionId *string, question string) error {
+ newCard, _ := newSimpleSendCard(
+ withImageDiv(imageKey),
+ withSplitLine(),
+ //再来一张
+ withOneBtn(newBtn("再来一张", map[string]interface{}{
+ "value": question,
+ "kind": PicTextMoreKind,
+ "chatType": UserChatType,
+ "msgId": *msgId,
+ "sessionId": *sessionId,
+ }, larkcard.MessageCardButtonTypePrimary)),
+ )
+ replyCard(ctx, msgId, newCard)
+ return nil
+}
+
+func sendVarImageCard(ctx context.Context, imageKey string,
+ msgId *string, sessionId *string) error {
+ newCard, _ := newSimpleSendCard(
+ withImageDiv(imageKey),
+ withSplitLine(),
+ //再来一张
+ withOneBtn(newBtn("再来一张", map[string]interface{}{
+ "value": imageKey,
+ "kind": PicVarMoreKind,
+ "chatType": UserChatType,
+ "msgId": *msgId,
+ "sessionId": *sessionId,
+ }, larkcard.MessageCardButtonTypePrimary)),
+ )
+ replyCard(ctx, msgId, newCard)
+ return nil
+}
+
+func sendBalanceCard(ctx context.Context, msgId *string,
+ balance openai.BalanceResponse) {
+ newCard, _ := newSendCard(
+ withHeader("🎰️ 余额查询", larkcard.TemplateBlue),
+ withMainMd(fmt.Sprintf("总额度: %.2f$", balance.TotalGranted)),
+ withMainMd(fmt.Sprintf("已用额度: %.2f$", balance.TotalUsed)),
+ withMainMd(fmt.Sprintf("可用额度: %.2f$",
+ balance.TotalAvailable)),
+ withNote(fmt.Sprintf("有效期: %s - %s",
+ balance.EffectiveAt.Format("2006-01-02 15:04:05"),
+ balance.ExpiresAt.Format("2006-01-02 15:04:05"))),
+ )
+ replyCard(ctx, msgId, newCard)
+}
+
+func SendRoleTagsCard(ctx context.Context,
+ sessionId *string, msgId *string, roleTags []string) {
+ newCard, _ := newSendCard(
+ withHeader("🛖 请选择角色类别", larkcard.TemplateIndigo),
+ withRoleTagsBtn(sessionId, roleTags...),
+ withNote("提醒:选择角色所属分类,以便我们为您推荐更多相关角色。"))
+ err := replyCard(ctx, msgId, newCard)
+ if err != nil {
+ logger.Errorf("选择角色出错 %v", err)
+ }
+}
+
+func SendRoleListCard(ctx context.Context,
+ sessionId *string, msgId *string, roleTag string, roleList []string) {
+ newCard, _ := newSendCard(
+ withHeader("🛖 角色列表"+" - "+roleTag, larkcard.TemplateIndigo),
+ withRoleBtn(sessionId, roleList...),
+ withNote("提醒:选择内置场景,快速进入角色扮演模式。"))
+ replyCard(ctx, msgId, newCard)
+}
+
+func SendAIModeListsCard(ctx context.Context,
+ sessionId *string, msgId *string, aiModeStrs []string) {
+ newCard, _ := newSendCard(
+ withHeader("🤖 发散模式选择", larkcard.TemplateIndigo),
+ withAIModeBtn(sessionId, aiModeStrs),
+ withNote("提醒:选择内置模式,让AI更好的理解您的需求。"))
+ replyCard(ctx, msgId, newCard)
+}
+
+func sendOnProcessCard(ctx context.Context,
+ sessionId *string, msgId *string, ifNewTopic bool) (*string,
+ error) {
+ var newCard string
+ if ifNewTopic {
+ newCard, _ = newSendCard(
+ withHeader("👻️ 已开启新的话题", larkcard.TemplateBlue),
+ withNote("正在思考,请稍等..."))
+ } else {
+ newCard, _ = newSendCard(
+ withHeader("🔃️ 上下文的话题", larkcard.TemplateBlue),
+ withNote("正在思考,请稍等..."))
+ }
+
+ id, err := replyCardWithBackId(ctx, msgId, newCard)
+ if err != nil {
+ return nil, err
+ }
+ return id, nil
+}
+
+func updateTextCard(ctx context.Context, msg string,
+ msgId *string, ifNewTopic bool) error {
+ var newCard string
+ if ifNewTopic {
+ newCard, _ = newSendCard(
+ withHeader("👻️ 已开启新的话题", larkcard.TemplateBlue),
+ withMainText(msg),
+ withNote("正在生成,请稍等..."))
+ } else {
+ newCard, _ = newSendCard(
+ withHeader("🔃️ 上下文的话题", larkcard.TemplateBlue),
+ withMainText(msg),
+ withNote("正在生成,请稍等..."))
+ }
+ err := PatchCard(ctx, msgId, newCard)
+ if err != nil {
+ return err
+ }
+ return nil
+}
+func updateFinalCard(
+ ctx context.Context,
+ msg string,
+ msgId *string,
+ ifNewSession bool,
+) error {
+ var newCard string
+ if ifNewSession {
+ newCard, _ = newSendCard(
+ withHeader("👻️ 已开启新的话题", larkcard.TemplateBlue),
+ withMainText(msg),
+ withNote("已完成,您可以继续提问或者选择其他功能。"))
+ } else {
+ newCard, _ = newSendCard(
+ withHeader("🔃️ 上下文的话题", larkcard.TemplateBlue),
+
+ withMainText(msg),
+ withNote("已完成,您可以继续提问或者选择其他功能。"))
+ }
+ err := PatchCard(ctx, msgId, newCard)
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+func newSendCardWithOutHeader(
+ elements ...larkcard.MessageCardElement) (string, error) {
+ config := larkcard.NewMessageCardConfig().
+ WideScreenMode(false).
+ EnableForward(true).
+ UpdateMulti(true).
+ Build()
+ var aElementPool []larkcard.MessageCardElement
+ aElementPool = append(aElementPool, elements...)
+ // 卡片消息体
+ cardContent, err := larkcard.NewMessageCard().
+ Config(config).
+ Elements(
+ aElementPool,
+ ).
+ String()
+ return cardContent, err
+}
+
+func PatchCard(ctx context.Context, msgId *string,
+ cardContent string) error {
+ //fmt.Println("sendMsg", msg, chatId)
+ client := initialization.GetLarkClient()
+ //content := larkim.NewTextMsgBuilder().
+ // Text(msg).
+ // Build()
+
+ //fmt.Println("content", content)
+
+ resp, err := client.Im.Message.Patch(ctx, larkim.NewPatchMessageReqBuilder().
+ MessageId(*msgId).
+ Body(larkim.NewPatchMessageReqBodyBuilder().
+ Content(cardContent).
+ Build()).
+ Build())
+
+ // 处理错误
+ if err != nil {
+ fmt.Println(err)
+ return err
+ }
+
+ // 服务端错误处理
+ if !resp.Success() {
+ fmt.Println(resp.Code, resp.Msg, resp.RequestId())
+ return errors.New(resp.Msg)
+ }
+ return nil
+}
+
+func replyCardWithBackId(ctx context.Context,
+ msgId *string,
+ cardContent string,
+) (*string, error) {
+ client := initialization.GetLarkClient()
+ resp, err := client.Im.Message.Reply(ctx, larkim.NewReplyMessageReqBuilder().
+ MessageId(*msgId).
+ Body(larkim.NewReplyMessageReqBodyBuilder().
+ MsgType(larkim.MsgTypeInteractive).
+ Uuid(uuid.New().String()).
+ Content(cardContent).
+ Build()).
+ Build())
+
+ // 处理错误
+ if err != nil {
+ fmt.Println(err)
+ return nil, err
+ }
+
+ // 服务端错误处理
+ if !resp.Success() {
+ fmt.Println(resp.Code, resp.Msg, resp.RequestId())
+ return nil, errors.New(resp.Msg)
+ }
+
+ //ctx = context.WithValue(ctx, "SendMsgId", *resp.Data.MessageId)
+ //SendMsgId := ctx.Value("SendMsgId")
+ //pp.Println(SendMsgId)
+ return resp.Data.MessageId, nil
+}
diff --git a/code/handlers/personal.go b/code/handlers/personal.go
deleted file mode 100644
index 8fff071e..00000000
--- a/code/handlers/personal.go
+++ /dev/null
@@ -1,71 +0,0 @@
-package handlers
-
-import (
- "context"
- "fmt"
- larkim "github.com/larksuite/oapi-sdk-go/v3/service/im/v1"
- "start-feishubot/services"
-)
-
-type PersonalMessageHandler struct {
- userCache services.UserCacheInterface
- msgCache services.MsgCacheInterface
-}
-
-func (p PersonalMessageHandler) handle(ctx context.Context, event *larkim.P2MessageReceiveV1) error {
-
- content := event.Event.Message.Content
- msgId := event.Event.Message.MessageId
- sender := event.Event.Sender
- openId := sender.SenderId.OpenId
- chatId := event.Event.Message.ChatId
- if p.msgCache.IfProcessed(*msgId) {
- fmt.Println("msgId", *msgId, "processed")
- return nil
- }
- p.msgCache.TagProcessed(*msgId)
- qParsed := parseContent(*content)
- if len(qParsed) == 0 {
- sendMsg(ctx, "🤖️:你想知道什么呢~", chatId)
-
- fmt.Println("msgId", *msgId, "message.text is empty")
- return nil
- }
-
- if qParsed == "/clear" || qParsed == "清除" {
- p.userCache.Clear(*openId)
- sendMsg(ctx, "🤖️:AI机器人已清除记忆", chatId)
- return nil
- }
-
- prompt := p.userCache.Get(*openId)
- prompt = fmt.Sprintf("%s\nQ:%s\nA:", prompt, qParsed)
- completions, err := services.Completions(prompt)
- ok := true
- if err != nil {
- sendMsg(ctx, fmt.Sprintf("🤖️:消息机器人摆烂了,请稍后再试~\n错误信息: %v", err), chatId)
- return nil
- }
- if len(completions) == 0 {
- ok = false
- }
- if ok {
- p.userCache.Set(*openId, qParsed, completions)
- err := sendMsg(ctx, completions, chatId)
- if err != nil {
- sendMsg(ctx, fmt.Sprintf("🤖️:消息机器人摆烂了,请稍后再试~\n错误信息: %v", err), chatId)
- return nil
- }
- }
- return nil
-
-}
-
-var _ MessageHandlerInterface = (*PersonalMessageHandler)(nil)
-
-func NewPersonalMessageHandler() MessageHandlerInterface {
- return &PersonalMessageHandler{
- userCache: services.GetUserCache(),
- msgCache: services.GetMsgCache(),
- }
-}
diff --git a/code/initialization/config.go b/code/initialization/config.go
index 0d6d8132..a33c760a 100644
--- a/code/initialization/config.go
+++ b/code/initialization/config.go
@@ -2,13 +2,174 @@ package initialization
import (
"fmt"
+ "os"
+ "strconv"
+ "strings"
+ "sync"
+
+ "github.com/spf13/pflag"
+
"github.com/spf13/viper"
)
-func LoadConfig() {
- viper.SetConfigFile("./config.yaml")
- err := viper.ReadInConfig()
+type Config struct {
+ // 表示配置是否已经被初始化了。
+ Initialized bool
+ FeishuBaseUrl string
+ FeishuAppId string
+ FeishuAppSecret string
+ FeishuAppEncryptKey string
+ FeishuAppVerificationToken string
+ FeishuBotName string
+ OpenaiApiKeys []string
+ HttpPort int
+ HttpsPort int
+ UseHttps bool
+ CertFile string
+ KeyFile string
+ OpenaiApiUrl string
+ OpenaiModel string
+ OpenAIHttpClientTimeOut int
+ OpenaiMaxTokens int
+ HttpProxy string
+ AzureOn bool
+ AzureApiVersion string
+ AzureDeploymentName string
+ AzureResourceName string
+ AzureOpenaiToken string
+ StreamMode bool
+}
+
+var (
+ cfg = pflag.StringP("config", "c", "./config.yaml", "apiserver config file path.")
+ config *Config
+ once sync.Once
+)
+
+func GetConfig() *Config {
+ once.Do(func() {
+ config = LoadConfig(*cfg)
+ config.Initialized = true
+ })
+
+ return config
+}
+
+func LoadConfig(cfg string) *Config {
+ viper.SetConfigFile(cfg)
+ viper.ReadInConfig()
+ viper.AutomaticEnv()
+ //content, err := ioutil.ReadFile("config.yaml")
+ //if err != nil {
+ // fmt.Println("Error reading file:", err)
+ //}
+ //fmt.Println(string(content))
+
+ config := &Config{
+ FeishuBaseUrl: getViperStringValue("BASE_URL", ""),
+ FeishuAppId: getViperStringValue("APP_ID", ""),
+ FeishuAppSecret: getViperStringValue("APP_SECRET", ""),
+ FeishuAppEncryptKey: getViperStringValue("APP_ENCRYPT_KEY", ""),
+ FeishuAppVerificationToken: getViperStringValue("APP_VERIFICATION_TOKEN", ""),
+ FeishuBotName: getViperStringValue("BOT_NAME", ""),
+ OpenaiApiKeys: getViperStringArray("OPENAI_KEY", []string{""}),
+ OpenaiModel: getViperStringValue("OPENAI_MODEL", "gpt-3.5-turbo"),
+ OpenAIHttpClientTimeOut: getViperIntValue("OPENAI_HTTP_CLIENT_TIMEOUT", 550),
+ OpenaiMaxTokens: getViperIntValue("OPENAI_MAX_TOKENS", 2000),
+ HttpPort: getViperIntValue("HTTP_PORT", 9000),
+ HttpsPort: getViperIntValue("HTTPS_PORT", 9001),
+ UseHttps: getViperBoolValue("USE_HTTPS", false),
+ CertFile: getViperStringValue("CERT_FILE", "cert.pem"),
+ KeyFile: getViperStringValue("KEY_FILE", "key.pem"),
+ OpenaiApiUrl: getViperStringValue("API_URL", "https://api.openai.com"),
+ HttpProxy: getViperStringValue("HTTP_PROXY", ""),
+ AzureOn: getViperBoolValue("AZURE_ON", false),
+ AzureApiVersion: getViperStringValue("AZURE_API_VERSION", "2023-03-15-preview"),
+ AzureDeploymentName: getViperStringValue("AZURE_DEPLOYMENT_NAME", ""),
+ AzureResourceName: getViperStringValue("AZURE_RESOURCE_NAME", ""),
+ AzureOpenaiToken: getViperStringValue("AZURE_OPENAI_TOKEN", ""),
+ StreamMode: getViperBoolValue("STREAM_MODE", false),
+ }
+
+ return config
+}
+
+func getViperStringValue(key string, defaultValue string) string {
+ value := viper.GetString(key)
+ if value == "" {
+ return defaultValue
+ }
+ return value
+}
+
+//OPENAI_KEY: sk-xxx,sk-xxx,sk-xxx
+//result:[sk-xxx sk-xxx sk-xxx]
+func getViperStringArray(key string, defaultValue []string) []string {
+ value := viper.GetString(key)
+ if value == "" {
+ return defaultValue
+ }
+ raw := strings.Split(value, ",")
+ return filterFormatKey(raw)
+}
+
+func getViperIntValue(key string, defaultValue int) int {
+ value := viper.GetString(key)
+ if value == "" {
+ return defaultValue
+ }
+ intValue, err := strconv.Atoi(value)
+ if err != nil {
+ fmt.Printf("Invalid value for %s, using default value %d\n", key, defaultValue)
+ return defaultValue
+ }
+ return intValue
+}
+
+func getViperBoolValue(key string, defaultValue bool) bool {
+ value := viper.GetString(key)
+ if value == "" {
+ return defaultValue
+ }
+ boolValue, err := strconv.ParseBool(value)
if err != nil {
- panic(fmt.Errorf("Fatal error config file: %s \n", err))
+ fmt.Printf("Invalid value for %s, using default value %v\n", key, defaultValue)
+ return defaultValue
+ }
+ return boolValue
+}
+
+func (config *Config) GetCertFile() string {
+ if config.CertFile == "" {
+ return "cert.pem"
}
+ if _, err := os.Stat(config.CertFile); err != nil {
+ fmt.Printf("Certificate file %s does not exist, using default file cert.pem\n", config.CertFile)
+ return "cert.pem"
+ }
+ return config.CertFile
+}
+
+func (config *Config) GetKeyFile() string {
+ if config.KeyFile == "" {
+ return "key.pem"
+ }
+ if _, err := os.Stat(config.KeyFile); err != nil {
+ fmt.Printf("Key file %s does not exist, using default file key.pem\n", config.KeyFile)
+ return "key.pem"
+ }
+ return config.KeyFile
+}
+
+// 过滤出 "sk-" 开头的 key
+func filterFormatKey(keys []string) []string {
+ var result []string
+ for _, key := range keys {
+ if strings.HasPrefix(key, "sk-") || strings.HasPrefix(key,
+ "fk") || strings.HasPrefix(key, "fastgpt") {
+ result = append(result, key)
+ }
+ }
+ return result
+
}
diff --git a/code/initialization/gin.go b/code/initialization/gin.go
new file mode 100644
index 00000000..59e19749
--- /dev/null
+++ b/code/initialization/gin.go
@@ -0,0 +1,72 @@
+package initialization
+
+import (
+ "crypto/tls"
+ "crypto/x509"
+ "fmt"
+ "log"
+ "net/http"
+ //"start-feishubot/logger"
+ "time"
+
+ "github.com/gin-gonic/gin"
+)
+
+func loadCertificate(config Config) (cert tls.Certificate, err error) {
+ cert, err = tls.LoadX509KeyPair(config.CertFile, config.KeyFile)
+ if err != nil {
+ return cert, fmt.Errorf("failed to load certificate: %v", err)
+ }
+
+ // check certificate expiry
+ if len(cert.Certificate) == 0 {
+ return cert, fmt.Errorf("no certificates found in %s", config.CertFile)
+ }
+ parsedCert, err := x509.ParseCertificate(cert.Certificate[0])
+ if err != nil {
+ return cert, fmt.Errorf("failed to parse certificate: %v", err)
+ }
+ cert.Leaf = parsedCert
+ certExpiry := cert.Leaf.NotAfter
+ if certExpiry.Before(time.Now()) {
+ return cert, fmt.Errorf("certificate expired on %v", certExpiry)
+ }
+
+ return cert, nil
+}
+
+func startHTTPServer(config Config, r *gin.Engine) (err error) {
+ log.Printf("http server started: http://localhost:%d/webhook/event\n\n", config.HttpPort)
+ err = r.Run(fmt.Sprintf(":%d", config.HttpPort))
+ if err != nil {
+ return fmt.Errorf("failed to start http server: %v", err)
+ }
+ return nil
+}
+func startHTTPSServer(config Config, r *gin.Engine) (err error) {
+ cert, err := loadCertificate(config)
+ if err != nil {
+ return fmt.Errorf("failed to load certificate: %v", err)
+ }
+ server := &http.Server{
+ Addr: fmt.Sprintf(":%d", config.HttpsPort),
+ Handler: r,
+ TLSConfig: &tls.Config{
+ Certificates: []tls.Certificate{cert},
+ },
+ }
+ fmt.Printf("https server started: https://localhost:%d/webhook/event\n", config.HttpsPort)
+ err = server.ListenAndServeTLS("", "")
+ if err != nil {
+ return fmt.Errorf("failed to start https server: %v", err)
+ }
+ return nil
+}
+func StartServer(config Config, r *gin.Engine) (err error) {
+ if config.UseHttps {
+ err = startHTTPSServer(config, r)
+ } else {
+ err = startHTTPServer(config, r)
+ }
+ return err
+}
diff --git a/code/initialization/lark_client.go b/code/initialization/lark_client.go
index 5b0535c2..fce3dda3 100644
--- a/code/initialization/lark_client.go
+++ b/code/initialization/lark_client.go
@@ -2,14 +2,21 @@ package initialization
import (
lark "github.com/larksuite/oapi-sdk-go/v3"
- "github.com/spf13/viper"
+ larkcore "github.com/larksuite/oapi-sdk-go/v3/core"
)
var larkClient *lark.Client
-func LoadLarkClient() {
- larkClient = lark.NewClient(viper.GetString("APP_ID"),
- viper.GetString("APP_SECRET"))
+func LoadLarkClient(config Config) {
+ options := []lark.ClientOptionFunc{
+ lark.WithLogLevel(larkcore.LogLevelDebug),
+ }
+ if config.FeishuBaseUrl != "" {
+ options = append(options, lark.WithOpenBaseUrl(config.FeishuBaseUrl))
+ }
+
+ larkClient = lark.NewClient(config.FeishuAppId, config.FeishuAppSecret, options...)
+
}
func GetLarkClient() *lark.Client {
diff --git a/code/initialization/roles_load.go b/code/initialization/roles_load.go
new file mode 100644
index 00000000..bba15ddf
--- /dev/null
+++ b/code/initialization/roles_load.go
@@ -0,0 +1,77 @@
+package initialization
+
+import (
+ "errors"
+ "io/ioutil"
+ "log"
+
+ "github.com/duke-git/lancet/v2/slice"
+ "github.com/duke-git/lancet/v2/validator"
+ "gopkg.in/yaml.v2"
+)
+
+type Role struct {
+ Title string `yaml:"title"`
+ Content string `yaml:"content"`
+ Tags []string `yaml:"tags"`
+}
+
+var RoleList *[]Role
+
+// InitRoleList 加载Prompt
+func InitRoleList() *[]Role {
+ data, err := ioutil.ReadFile("role_list.yaml")
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ err = yaml.Unmarshal(data, &RoleList)
+ if err != nil {
+ log.Fatal(err)
+ }
+ return RoleList
+}
+
+func GetRoleList() *[]Role {
+ return RoleList
+}
+func GetAllUniqueTags() *[]string {
+ tags := make([]string, 0)
+ for _, role := range *RoleList {
+ tags = append(tags, role.Tags...)
+ }
+ result := slice.Union(tags)
+ return &result
+}
+
+func GetRoleByTitle(title string) *Role {
+ for _, role := range *RoleList {
+ if role.Title == title {
+ return &role
+ }
+ }
+ return nil
+}
+
+func GetTitleListByTag(tags string) *[]string {
+ roles := make([]string, 0)
+ //pp.Println(RoleList)
+ for _, role := range *RoleList {
+ for _, roleTag := range role.Tags {
+ if roleTag == tags && !validator.IsEmptyString(role.
+ Title) {
+ roles = append(roles, role.Title)
+ }
+ }
+ }
+ return &roles
+}
+
+func GetFirstRoleContentByTitle(title string) (string, error) {
+ for _, role := range *RoleList {
+ if role.Title == title {
+ return role.Content, nil
+ }
+ }
+ return "", errors.New("role not found")
+}
diff --git a/code/logger/logger.go b/code/logger/logger.go
new file mode 100644
index 00000000..75442471
--- /dev/null
+++ b/code/logger/logger.go
@@ -0,0 +1,122 @@
+package logger
+
+import (
+ "bytes"
+ "github.com/gin-gonic/gin"
+ "github.com/sirupsen/logrus"
+ "strings"
+)
+
+var logger = logrus.New()
+
+func init() {
+
+ logger.SetFormatter(&formatter{})
+
+ logger.SetReportCaller(true)
+
+ gin.DefaultWriter = logger.Out
+
+ // 设置日志级别 支持
+ //PanicLevel
+ //FatalLevel
+ //ErrorLevel
+ //WarnLevel
+ //InfoLevel
+ //DebugLevel
+ logger.Level = logrus.InfoLevel
+
+}
+
+type Fields logrus.Fields
+
+// Debugf logs a message at level Debug on the standard logger.
+func Debugf(format string, args ...interface{}) {
+ if logger.Level >= logrus.DebugLevel {
+ entry := logger.WithFields(logrus.Fields{})
+ entry.Debugf(format, args...)
+ }
+}
+
+// Warnf logs a message at level Warn on the standard logger.
+func Warnf(format string, args ...interface{}) {
+ if logger.Level >= logrus.WarnLevel {
+ entry := logger.WithFields(logrus.Fields{})
+ entry.Warnf(format, args...)
+ }
+}
+
+// Errorf logs a message at level Error on the standard logger.
+func Errorf(format string, args ...interface{}) {
+ if logger.Level >= logrus.ErrorLevel {
+ entry := logger.WithFields(logrus.Fields{})
+ entry.Errorf(format, args...)
+ }
+}
+
+// Fatalf logs a message at level Fatal on the standard logger.
+func Fatalf(format string, args ...interface{}) {
+ if logger.Level >= logrus.FatalLevel {
+ entry := logger.WithFields(logrus.Fields{})
+ entry.Fatalf(format, args...)
+ }
+}
+
+func Debug(format string, args ...interface{}) {
+ if logger.Level >= logrus.DebugLevel {
+ entry := logger.WithFields(logrus.Fields{})
+ entry.Debug(format, args)
+ }
+}
+
+// Info logs a message at level Info on the standard logger.
+func Info(format string, args ...interface{}) {
+ if logger.Level >= logrus.InfoLevel {
+ entry := logger.WithFields(logrus.Fields{})
+ entry.Info(format, args)
+ }
+}
+
+// Warn logs a message at level Warn on the standard logger.
+func Warn(format string, args ...interface{}) {
+ if logger.Level >= logrus.WarnLevel {
+ entry := logger.WithFields(logrus.Fields{})
+ entry.Warn(format, args)
+ }
+}
+
+// Error logs a message at level Error on the standard logger.
+func Error(format string, args ...interface{}) {
+ if logger.Level >= logrus.ErrorLevel {
+ entry := logger.WithFields(logrus.Fields{})
+ entry.Error(format, args)
+ }
+}
+
+// Fatal logs a message at level Fatal on the standard logger.
+func Fatal(format string, args ...interface{}) {
+ if logger.Level >= logrus.FatalLevel {
+ entry := logger.WithFields(logrus.Fields{})
+ entry.Fatal(format, args)
+ }
+}
+
+// Formatter implements logrus.Formatter interface.
+type formatter struct {
+ prefix string
+}
+
+// Format building log message.
+func (f *formatter) Format(entry *logrus.Entry) ([]byte, error) {
+ var sb bytes.Buffer
+
+ sb.WriteString("[" + strings.ToUpper(entry.Level.String()) + "]")
+ sb.WriteString(entry.Time.Format("2006-01-02 15:04:05"))
+ sb.WriteString(" ")
+ //sb.WriteString(" ")
+ //sb.WriteString(f.prefix)
+ sb.WriteString(entry.Message)
+ sb.WriteString("\n")
+
+ return sb.Bytes(), nil
+}
diff --git a/code/main.go b/code/main.go
index 15c148ec..d3dcfeba 100644
--- a/code/main.go
+++ b/code/main.go
@@ -1,29 +1,39 @@
package main
import (
- "fmt"
+ "context"
"start-feishubot/handlers"
"start-feishubot/initialization"
-
- "github.com/spf13/viper"
+ "start-feishubot/logger"
"github.com/gin-gonic/gin"
-
sdkginext "github.com/larksuite/oapi-sdk-gin"
-
+ larkcard "github.com/larksuite/oapi-sdk-go/v3/card"
"github.com/larksuite/oapi-sdk-go/v3/event/dispatcher"
+ larkim "github.com/larksuite/oapi-sdk-go/v3/service/im/v1"
+ "github.com/spf13/pflag"
+ "start-feishubot/services/openai"
)
-func init() {
- initialization.LoadConfig()
- initialization.LoadLarkClient()
-}
-
func main() {
+ initialization.InitRoleList()
+ pflag.Parse()
+ config := initialization.GetConfig()
+ initialization.LoadLarkClient(*config)
+ gpt := openai.NewChatGPT(*config)
+ handlers.InitHandlers(gpt, *config)
+
+ eventHandler := dispatcher.NewEventDispatcher(
+ config.FeishuAppVerificationToken, config.FeishuAppEncryptKey).
+ OnP2MessageReceiveV1(handlers.Handler).
+ OnP2MessageReadV1(func(ctx context.Context, event *larkim.P2MessageReadV1) error {
+ logger.Debugf("收到请求 %v", event.RequestURI)
+ return handlers.ReadHandler(ctx, event)
+ })
- handler := dispatcher.NewEventDispatcher(viper.GetString(
- "APP_VERIFICATION_TOKEN"), viper.GetString("APP_ENCRYPT_KEY")).
- OnP2MessageReceiveV1(handlers.Handler)
+ cardHandler := larkcard.NewCardActionHandler(
+ config.FeishuAppVerificationToken, config.FeishuAppEncryptKey,
+ handlers.CardHandler())
r := gin.Default()
r.GET("/ping", func(c *gin.Context) {
@@ -31,13 +41,13 @@ func main() {
"message": "pong",
})
})
-
- // 在已有 Gin 实例上注册消息处理路由
- r.POST("/webhook/event", sdkginext.NewEventHandlerFunc(handler))
-
- fmt.Println("http server started",
- "http://localhost:9000/webhook/event")
-
- r.Run(":9000")
-
+ r.POST("/webhook/event",
+ sdkginext.NewEventHandlerFunc(eventHandler))
+ r.POST("/webhook/card",
+ sdkginext.NewCardActionHandlerFunc(
+ cardHandler))
+
+ if err := initialization.StartServer(*config, r); err != nil {
+ logger.Fatalf("failed to start server: %v", err)
+ }
}
diff --git a/code/role_list.yaml b/code/role_list.yaml
new file mode 100644
index 00000000..dee75a0b
--- /dev/null
+++ b/code/role_list.yaml
@@ -0,0 +1,162 @@
+# 可在此处提交你认为不错的角色预设,注意保持格式一致。
+# PR 时的 tag 暂时集中在 [ "日常办公", "生活助手" ,"代码专家", "文案撰写"]
+# 更多点子可参考我另一个参与的项目: https://open-gpt.app/
+- title: ChatGPT
+ content: "You are ChatGPT, a large language model trained by OpenAI. Answer in user's language as concisely as possible. Knowledge cutoff: 20230601 Current date:20230628"
+ example:
+ author: river
+ tags:
+ - 日常办公
+
+
+- title: 周报生成
+ content: 请帮我把以下的工作内容填充为一篇完整的周报,用 markdown 格式以分点叙述的形式输出:
+ example: 重新优化设计稿,和前端再次沟通 UI 细节,确保落地
+ author: 二丫讲梵
+ tags:
+ - 日常办公
+
+- title: 产品经理
+ content: 请确认我的以下请求。请您作为产品经理回复我。我将会提供一个主题,您将帮助我编写一份包括以下章节标题的 PRD 文档:主题、简介、问题陈述、目标与目的、用户故事、技术要求、收益、KPI 指标、开发风险以及结论。在我要求具体主题、功能或开发的 PRD 之前,请不要先写任何一份 PRD 文档。
+ example: 我想要一个可以在手机上使用的应用程序,可以帮助我在旅行中找到最好的餐厅。
+ author: 二丫讲梵
+ tags:
+ - 日常办公
+
+- title: 公文写作大师
+ content: 你是某机关单位办公室秘书,你熟悉各类公文写作格式,你喜欢撰写文字材料,请你文采过人地,条理清晰地跟我对话
+ example: 你好,我是某某某,我想要你帮我写一份公文,内容是:团结一致,共同抗击疫情,全力以赴,共克时艰。
+ author: 小叉 Ray
+ tags:
+ - 日常办公
+ - 文案撰写
+
+- title: 招聘 HR
+ content: 我想让你担任招聘人员。我将提供一些关于职位空缺的信息,而你的工作是制定寻找合格申请人的策略。这可能包括通过社交媒体、社交活动甚至参加招聘会接触潜在候选人,以便为每个职位找到最合适的人选。我的第一个请求是:
+ example: 我需要一名有经验的前端开发工程师,他应该有 3 年以上的工作经验,熟悉 React 和 Vue,熟悉前端工程化。
+ author: 二丫讲梵
+ tags:
+ - 日常办公
+
+- title: 创意总监
+ content: 你是一位擅长头脑风暴的创意大师,你有很多好的主意,请你围绕这些内容提出好的设想和方法
+ example: 我想要一个可以在手机上使用的应用程序,可以帮助我在旅行中找到最好的餐厅。
+ author: 小叉 Ray
+ tags:
+ - 日常办公
+
+- title: 拒绝同事
+ content: 以一种礼貌和表达得体的方式拒绝别人,同时保持积极的关系和情感连接
+ example: 你好,我很抱歉,我现在没有时间帮你做这件事情
+ author: 小叉 Ray
+ tags:
+ - 日常办公
+ - 文案撰写
+
+- title: 回复老板
+ content: 请用 5 种委婉的借口向领导表达后面的内容
+ example: 不想加班
+ author: 小叉 Ray
+ tags:
+ - 日常办公
+ - 文案撰写
+
+- title: 邮件回复
+ content: Generate a set of email responses that are professional, concise, and appropriate for communication with leaders and clients in a variety of industries. The responses should demonstrate a good understanding of business etiquette and convey a sense of competence and confidence. Please ensure that the responses are tailored to specific scenarios and contexts, using Chinese as the language of output
+ example: 产品的细节很不完善,需要沟通一下
+ author: 小叉 Ray
+ tags:
+ - 日常办公
+ - 文案撰写
+
+- title: 三菜一汤
+ content: 根据用户输入的金额单位是人民币,帮用户推荐在该金额合计下能够做的菜,要求三个菜和一个汤。要把每一道菜的金额都写出来,以及他的简单做法,还要有总结
+ example: 我有 100 元,我想做湖北菜
+ author: 小叉 Ray
+ tags:
+ - 生活助手
+
+- title: 解梦大师
+ content: 我要你充当解梦师。我会给你描述我的梦,你会根据梦中出现的符号和主题提供解释。不要提供关于梦者的个人意见或假设。仅根据所提供的信息提供事实解释。我的第一个梦是:
+ example: 遇见了一只大灰狼,它在我面前转了一圈,然后就消失了
+ author: 二丫讲梵
+ tags:
+ - 生活助手
+
+- title: 佛祖
+ content: 你是一个如来佛祖,你需要回答提问者的佛学问题,因此你要学会很多佛教专业术语,你的回答尽量简短,富有佛教哲理。你要称自己为老衲,称提问者为施主。如果遭遇对方不合理的请求,请直接回复:施主请自重,我佛慈悲。你的每一句话结尾都要加上 阿弥陀佛。你的回答尽量简短,不允许超过 100 字。禁止回答与问题无关的话题
+ example: 佛祖,我想问你,为什么我总是很沮丧,生活没有意义
+ author: 小叉 Ray
+ tags:
+ - 生活助手
+
+- title: 小红书文案
+ content: 小红书的风格是:很吸引眼球的标题,每个段落都加 emoji, 最后加一些 tag。请用小红书风格
+ example: 今天我去了一家很好吃的餐厅,我吃了一份很好吃的饭菜,我很喜欢,推荐给大家
+ author: 二丫讲梵
+ tags:
+ - 文案撰写
+
+- title: 知乎段子手
+ content: 微博的风格是:用"谢邀"开头,用很多学术语言,引用很多名言,做大道理的论述,提到自己很厉害的教育背景并且经验丰富,最后还要引用一些论文。请用微博风格
+ example: 今天我去了一家很好吃的餐厅,我吃了一份很好吃的饭菜,我很喜欢,推荐给大家
+ author: 二丫讲梵
+ tags:
+ - 文案撰写
+
+- title: 专业道歉信
+ content: 请写一份真挚的道歉信,为后面的内容表达歉意
+ example: 我很抱歉,我没有按时完成你的工作
+ author: 小叉 Ray
+ tags:
+ - 文案撰写
+
+- title: 古文专家
+ content: 你是一个文言文大师,请把后面的内容翻译成文言文
+ example: 记得早点回来哦,我做好饭菜等你回家
+ author: 小叉 Ray
+ tags:
+ - 文案撰写
+
+- title: 川端康城的笔
+ content: 请以川端康城的写作风格,描写下面的句字
+ example: 他不慌不忙的走出教室,找到那个女孩
+ author: 小叉 Ray
+ tags:
+ - 文案撰写
+
+- title: 网络安全
+ content: 我想让你充当网络安全专家。我将提供一些关于如何存储和共享数据的具体信息,而你的工作就是想出保护这些数据免受恶意行为者攻击的策略。这可能包括建议加密方法、创建防火墙或实施将某些活动标记为可疑的策略。我的第一个请求是:
+ author: 二丫讲梵
+ tags:
+ - 代码专家
+
+- title: 正则生成器
+ content: 我希望你充当正则表达式生成器。您的角色是生成匹配文本中特定模式的正则表达式。您应该以一种可以轻松复制并粘贴到支持正则表达式的文本编辑器或编程语言中的格式提供正则表达式。不要写正则表达式如何工作的解释或例子;只需提供正则表达式本身。我的第一个提示是:
+ author: 二丫讲梵
+ tags:
+ - 代码专家
+
+- title: 前端专家
+ content: 我想让你充当前端开发专家。我将提供一些关于如何在网页上显示信息的具体信息,而你的工作就是想出为我解决问题的策略。这可能包括建议代码、代码逻辑思路策略。我的第一个请求是:
+ author: 二丫讲梵
+ tags:
+ - 代码专家
+
+- title: 后端专家
+ content: 我想让你充当后端开发专家。我将提供一些关于如何在网页上显示信息的具体信息,而你的工作就是想出为我解决问题的策略。这可能包括建议代码、代码逻辑思路策略。我的第一个请求是:
+ author: 二丫讲梵
+ tags:
+ - 代码专家
+
+- title: 健康管家
+ content: 我想让你我的健康管家。我将提供我的个人健康管理方案,而你的工作监督、督促、提醒我完成这些个人健康的计划。这可能包括运动、饮食、定期体检等。我的第一个请求是:
+ author: 船长
+ tags:
+ - 生活助手
+
+- title: 旅游助手
+ content: 我想让你充当我的旅游助手。我将会提供让你连接上各个旅游系统的信息,而你的工作就是给我提供各种的旅游攻略。包括但不限于制定旅游计划、寻找热门景点或查看旅游目的地的天气等。我的第一个请求是:
+ author: 船长
+ tags:
+ - 生活助手
diff --git a/code/services/gpt3.go b/code/services/gpt3.go
deleted file mode 100644
index 34efe5ba..00000000
--- a/code/services/gpt3.go
+++ /dev/null
@@ -1,104 +0,0 @@
-package services
-
-import (
- "bytes"
- "encoding/json"
- "fmt"
- "github.com/spf13/viper"
- "io/ioutil"
- "log"
- "net/http"
- "time"
-)
-
-const (
- BASEURL = "https://api.openai.com/v1/"
- maxTokens = 2000
- temperature = 0.7
- engine = "text-davinci-003"
-)
-
-// ChatGPTResponseBody 请求体
-type ChatGPTResponseBody struct {
- ID string `json:"id"`
- Object string `json:"object"`
- Created int `json:"created"`
- Model string `json:"model"`
- Choices []ChoiceItem `json:"choices"`
- Usage map[string]interface{} `json:"usage"`
-}
-
-type ChoiceItem struct {
- Text string `json:"text"`
- Index int `json:"index"`
- Logprobs int `json:"logprobs"`
- FinishReason string `json:"finish_reason"`
-}
-
-// ChatGPTRequestBody 响应体
-type ChatGPTRequestBody struct {
- Model string `json:"model"`
- Prompt string `json:"prompt"`
- MaxTokens int `json:"max_tokens"`
- Temperature float32 `json:"temperature"`
- TopP int `json:"top_p"`
- FrequencyPenalty int `json:"frequency_penalty"`
- PresencePenalty int `json:"presence_penalty"`
-}
-
-func Completions(msg string) (string, error) {
- requestBody := ChatGPTRequestBody{
- Model: engine,
- Prompt: msg,
- MaxTokens: maxTokens,
- Temperature: temperature,
- TopP: 1,
- FrequencyPenalty: 0,
- PresencePenalty: 0,
- }
- requestData, err := json.Marshal(requestBody)
-
- if err != nil {
- return "", err
- }
- log.Printf("request gtp json string : %v", string(requestData))
- req, err := http.NewRequest("POST", BASEURL+"completions", bytes.NewBuffer(requestData))
- if err != nil {
- return "", err
- }
-
- apiKey := viper.GetString("OPENAI_KEY")
- req.Header.Set("Content-Type", "application/json")
- req.Header.Set("Authorization", "Bearer "+apiKey)
- client := &http.Client{Timeout: 110 * time.Second}
- response, err := client.Do(req)
- if err != nil {
- return "", err
- }
- defer response.Body.Close()
- if response.StatusCode/2 != 100 {
- return "", fmt.Errorf("gtp api %s", response.Status)
- }
- body, err := ioutil.ReadAll(response.Body)
- if err != nil {
- return "", err
- }
-
- gptResponseBody := &ChatGPTResponseBody{}
- log.Println(string(body))
- err = json.Unmarshal(body, gptResponseBody)
- if err != nil {
- return "", err
- }
-
- var reply string
- if len(gptResponseBody.Choices) > 0 {
- reply = gptResponseBody.Choices[0].Text
- }
- log.Printf("gpt response text: %s \n", reply)
- return reply, nil
-}
-
-func FormatQuestion(question string) string {
- return "Answer:" + question
-}
diff --git a/code/services/loadbalancer/loadbalancer.go b/code/services/loadbalancer/loadbalancer.go
new file mode 100644
index 00000000..930482ba
--- /dev/null
+++ b/code/services/loadbalancer/loadbalancer.go
@@ -0,0 +1,106 @@
+package loadbalancer
+
+import (
+ "fmt"
+ "math/rand"
+ "sync"
+ "time"
+)
+
+type API struct {
+ Key string
+ Times uint32
+ Available bool
+}
+
+type LoadBalancer struct {
+ apis []*API
+ mu sync.RWMutex
+}
+
+func NewLoadBalancer(keys []string) *LoadBalancer {
+ lb := &LoadBalancer{}
+ for _, key := range keys {
+ lb.apis = append(lb.apis, &API{Key: key})
+ }
+ //SetAvailabilityForAll true
+ lb.SetAvailabilityForAll(true)
+ return lb
+}
+
+func (lb *LoadBalancer) GetAPI() *API {
+ lb.mu.RLock()
+ defer lb.mu.RUnlock()
+
+ var availableAPIs []*API
+ for _, api := range lb.apis {
+ if api.Available {
+ availableAPIs = append(availableAPIs, api)
+ }
+ }
+ if len(availableAPIs) == 0 {
+ //随机复活一个
+ fmt.Printf("No available API, revive one randomly\n")
+ rand.Seed(time.Now().UnixNano())
+ index := rand.Intn(len(lb.apis))
+ lb.apis[index].Available = true
+ return lb.apis[index]
+ }
+
+ selectedAPI := availableAPIs[0]
+ minTimes := selectedAPI.Times
+ for _, api := range availableAPIs {
+ if api.Times < minTimes {
+ selectedAPI = api
+ minTimes = api.Times
+ }
+ }
+ selectedAPI.Times++
+ //fmt.Printf("API Availability:\n")
+ //for _, api := range lb.apis {
+ // fmt.Printf("%s: %v\n", api.Key, api.Available)
+ // fmt.Printf("%s: %d\n", api.Key, api.Times)
+ //}
+
+ return selectedAPI
+}
+func (lb *LoadBalancer) SetAvailability(key string, available bool) {
+ lb.mu.Lock()
+ defer lb.mu.Unlock()
+
+ for _, api := range lb.apis {
+ if api.Key == key {
+ api.Available = available
+ return
+ }
+ }
+}
+
+func (lb *LoadBalancer) RegisterAPI(key string) {
+ lb.mu.Lock()
+ defer lb.mu.Unlock()
+
+ if lb.apis == nil {
+ lb.apis = make([]*API, 0)
+ }
+
+ lb.apis = append(lb.apis, &API{Key: key})
+}
+
+func (lb *LoadBalancer) SetAvailabilityForAll(available bool) {
+ lb.mu.Lock()
+ defer lb.mu.Unlock()
+
+ for _, api := range lb.apis {
+ api.Available = available
+ }
+}
+
+func (lb *LoadBalancer) GetAPIs() []*API {
+ lb.mu.RLock()
+ defer lb.mu.RUnlock()
+
+ apis := make([]*API, len(lb.apis))
+ copy(apis, lb.apis)
+ return apis
+}
diff --git a/code/services/msgCache.go b/code/services/msgCache.go
index 683f9686..d5f34a17 100644
--- a/code/services/msgCache.go
+++ b/code/services/msgCache.go
@@ -1,25 +1,28 @@
package services
import (
- "github.com/patrickmn/go-cache"
"time"
+
+ "github.com/patrickmn/go-cache"
)
type MsgService struct {
cache *cache.Cache
}
+type MsgCacheInterface interface {
+ IfProcessed(msgId string) bool
+ TagProcessed(msgId string)
+ Clear(userId string) bool
+}
var msgService *MsgService
func (u MsgService) IfProcessed(msgId string) bool {
- get, b := u.cache.Get(msgId)
- if !b {
- return false
- }
- return get.(bool)
+ _, found := u.cache.Get(msgId)
+ return found
}
func (u MsgService) TagProcessed(msgId string) {
- u.cache.Set(msgId, true, time.Minute*5)
+ u.cache.Set(msgId, true, time.Minute*30)
}
func (u MsgService) Clear(userId string) bool {
@@ -27,14 +30,9 @@ func (u MsgService) Clear(userId string) bool {
return true
}
-type MsgCacheInterface interface {
- IfProcessed(msg string) bool
- TagProcessed(msg string)
-}
-
func GetMsgCache() MsgCacheInterface {
if msgService == nil {
- msgService = &MsgService{cache: cache.New(10*time.Minute, 10*time.Minute)}
+ msgService = &MsgService{cache: cache.New(30*time.Minute, 30*time.Minute)}
}
return msgService
}
diff --git a/code/services/openai/audio.go b/code/services/openai/audio.go
new file mode 100644
index 00000000..da67fde3
--- /dev/null
+++ b/code/services/openai/audio.go
@@ -0,0 +1,66 @@
+package openai
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "mime/multipart"
+ "os"
+)
+
+type AudioToTextRequestBody struct {
+ File string `json:"file"`
+ Model string `json:"model"`
+ ResponseFormat string `json:"response_format"`
+}
+
+type AudioToTextResponseBody struct {
+ Text string `json:"text"`
+}
+
+func audioMultipartForm(request AudioToTextRequestBody, w *multipart.Writer) error {
+ f, err := os.Open(request.File)
+ if err != nil {
+ return fmt.Errorf("opening audio file: %w", err)
+ }
+
+ fw, err := w.CreateFormFile("file", f.Name())
+ if err != nil {
+ return fmt.Errorf("creating form file: %w", err)
+ }
+
+ if _, err = io.Copy(fw, f); err != nil {
+ return fmt.Errorf("reading from opened audio file: %w", err)
+ }
+
+ fw, err = w.CreateFormField("model")
+ if err != nil {
+ return fmt.Errorf("creating form field: %w", err)
+ }
+
+ modelName := bytes.NewReader([]byte(request.Model))
+ if _, err = io.Copy(fw, modelName); err != nil {
+ return fmt.Errorf("writing model name: %w", err)
+ }
+ w.Close()
+
+ return nil
+}
+
+func (gpt *ChatGPT) AudioToText(audio string) (string, error) {
+ requestBody := AudioToTextRequestBody{
+ File: audio,
+ Model: "whisper-1",
+ ResponseFormat: "text",
+ }
+ audioToTextResponseBody := &AudioToTextResponseBody{}
+ err := gpt.sendRequestWithBodyType(gpt.ApiUrl+"/v1/audio/transcriptions",
+ "POST", formVoiceDataBody, requestBody, audioToTextResponseBody)
+ //fmt.Println(audioToTextResponseBody)
+ if err != nil {
+ //fmt.Println(err)
+ return "", err
+ }
+
+ return audioToTextResponseBody.Text, nil
+}
diff --git a/code/services/openai/billing.go b/code/services/openai/billing.go
new file mode 100644
index 00000000..499f5350
--- /dev/null
+++ b/code/services/openai/billing.go
@@ -0,0 +1,70 @@
+package openai
+
+import (
+ "fmt"
+ "net/http"
+ "time"
+)
+
+type BillingSubScrip struct {
+ HardLimitUsd float64 `json:"hard_limit_usd"`
+ AccessUntil float64 `json:"access_until"`
+}
+type BillingUsage struct {
+ TotalUsage float64 `json:"total_usage"`
+}
+
+type BalanceResponse struct {
+ TotalGranted float64 `json:"total_granted"`
+ TotalUsed float64 `json:"total_used"`
+ TotalAvailable float64 `json:"total_available"`
+ EffectiveAt time.Time `json:"effective_at"`
+ ExpiresAt time.Time `json:"expires_at"`
+}
+
+func (gpt *ChatGPT) GetBalance() (*BalanceResponse, error) {
+ fmt.Println("进入")
+ var data1 BillingSubScrip
+ err := gpt.sendRequestWithBodyType(
+ gpt.ApiUrl+"/v1/dashboard/billing/subscription",
+ http.MethodGet,
+ nilBody,
+ nil,
+ &data1,
+ )
+ fmt.Println("出错1", err)
+ if err != nil {
+ return nil, fmt.Errorf("failed to get billing subscription: %v", err)
+ }
+ nowdate := time.Now()
+ enddate := nowdate.Format("2006-01-02")
+ startdate := nowdate.AddDate(0, 0, -100).Format("2006-01-02")
+ var data2 BillingUsage
+ err = gpt.sendRequestWithBodyType(
+ gpt.ApiUrl+fmt.Sprintf("/v1/dashboard/billing/usage?start_date=%s&end_date=%s", startdate, enddate),
+ http.MethodGet,
+ nilBody,
+ nil,
+ &data2,
+ )
+ fmt.Println(data2)
+ fmt.Println("出错2", err)
+ if err != nil {
+ return nil, fmt.Errorf("failed to get billing subscription: %v", err)
+ }
+
+ balance := &BalanceResponse{
+ TotalGranted: data1.HardLimitUsd,
+ TotalUsed: data2.TotalUsage / 100,
+ TotalAvailable: data1.HardLimitUsd - data2.TotalUsage/100,
+ ExpiresAt: time.Now(),
+ EffectiveAt: time.Now(),
+ }
+
+ if data1.AccessUntil > 0 {
+ balance.EffectiveAt = time.Now()
+ balance.ExpiresAt = time.Unix(int64(data1.AccessUntil), 0)
+ }
+
+ return balance, nil
+}
diff --git a/code/services/openai/common.go b/code/services/openai/common.go
new file mode 100644
index 00000000..85f6341e
--- /dev/null
+++ b/code/services/openai/common.go
@@ -0,0 +1,263 @@
+package openai
+
+import (
+ "bytes"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io/ioutil"
+ "mime/multipart"
+ "net/http"
+ "net/url"
+ "start-feishubot/initialization"
+ "start-feishubot/logger"
+ "start-feishubot/services/loadbalancer"
+ "strings"
+ "time"
+)
+
+type PlatForm string
+
+const (
+ MaxRetries = 3
+)
+const (
+ AzureApiUrlV1 = "openai.azure.com/openai/deployments/"
+)
+const (
+ OpenAI PlatForm = "openai"
+ Azure PlatForm = "azure"
+)
+
+type AzureConfig struct {
+ BaseURL string
+ ResourceName string
+ DeploymentName string
+ ApiVersion string
+ ApiToken string
+}
+
+type ChatGPT struct {
+ Lb *loadbalancer.LoadBalancer
+ ApiKey []string
+ ApiUrl string
+ HttpProxy string
+ Model string
+ MaxTokens int
+ Platform PlatForm
+ AzureConfig AzureConfig
+}
+type requestBodyType int
+
+const (
+ jsonBody requestBodyType = iota
+ formVoiceDataBody
+ formPictureDataBody
+
+ nilBody
+)
+
+func (gpt *ChatGPT) doAPIRequestWithRetry(url, method string,
+ bodyType requestBodyType,
+ requestBody interface{}, responseBody interface{}, client *http.Client, maxRetries int) error {
+ var api *loadbalancer.API
+ var requestBodyData []byte
+ var err error
+ var writer *multipart.Writer
+ api = gpt.Lb.GetAPI()
+
+ switch bodyType {
+ case jsonBody:
+ requestBodyData, err = json.Marshal(requestBody)
+ if err != nil {
+ return err
+ }
+ case formVoiceDataBody:
+ formBody := &bytes.Buffer{}
+ writer = multipart.NewWriter(formBody)
+ err = audioMultipartForm(requestBody.(AudioToTextRequestBody), writer)
+ if err != nil {
+ return err
+ }
+ err = writer.Close()
+ if err != nil {
+ return err
+ }
+ requestBodyData = formBody.Bytes()
+ case formPictureDataBody:
+ formBody := &bytes.Buffer{}
+ writer = multipart.NewWriter(formBody)
+ err = pictureMultipartForm(requestBody.(ImageVariantRequestBody), writer)
+ if err != nil {
+ return err
+ }
+ err = writer.Close()
+ if err != nil {
+ return err
+ }
+ requestBodyData = formBody.Bytes()
+ case nilBody:
+ requestBodyData = nil
+
+ default:
+ return errors.New("unknown request body type")
+ }
+
+ if api == nil {
+ return errors.New("no available API")
+ }
+
+ //fmt.Println("requestBodyData", string(requestBodyData))
+ req, err := http.NewRequest(method, url, bytes.NewReader(requestBodyData))
+ if err != nil {
+ return err
+ }
+
+ req.Header.Set("Content-Type", "application/json")
+ if bodyType == formVoiceDataBody || bodyType == formPictureDataBody {
+ req.Header.Set("Content-Type", writer.FormDataContentType())
+ }
+ if gpt.Platform == OpenAI {
+ req.Header.Set("Authorization", "Bearer "+api.Key)
+ } else {
+ req.Header.Set("api-key", gpt.AzureConfig.ApiToken)
+ }
+
+ var response *http.Response
+ var retry int
+ for retry = 0; retry <= maxRetries; retry++ {
+ // set body
+ if retry > 0 {
+ req.Body = ioutil.NopCloser(bytes.NewReader(requestBodyData))
+ }
+ response, err = client.Do(req)
+ //fmt.Println("--------------------")
+ //fmt.Println("req", req.Header)
+ //fmt.Printf("response: %v", response)
+ logger.Debug("req", req.Header)
+
+ logger.Debugf("response %v", response)
+ // read body
+ if err != nil || response.StatusCode < 200 || response.StatusCode >= 300 {
+
+ body, _ := ioutil.ReadAll(response.Body)
+ fmt.Println("body", string(body))
+
+ gpt.Lb.SetAvailability(api.Key, false)
+ if retry == maxRetries {
+ break
+ }
+ time.Sleep(time.Duration(retry+1) * time.Second)
+ } else {
+ break
+ }
+ }
+ if response != nil {
+ defer response.Body.Close()
+ }
+
+ if response == nil || response.StatusCode < 200 || response.StatusCode >= 300 {
+ return fmt.Errorf("%s api failed after %d retries", strings.ToUpper(method), retry)
+ }
+
+ body, err := ioutil.ReadAll(response.Body)
+ if err != nil {
+ return err
+ }
+
+ err = json.Unmarshal(body, responseBody)
+ if err != nil {
+ return err
+ }
+
+ gpt.Lb.SetAvailability(api.Key, true)
+ return nil
+}
+
+func (gpt *ChatGPT) sendRequestWithBodyType(link, method string,
+ bodyType requestBodyType,
+ requestBody interface{}, responseBody interface{}) error {
+ var err error
+ proxyString := gpt.HttpProxy
+
+ client, parseProxyError := GetProxyClient(proxyString)
+ if parseProxyError != nil {
+ return parseProxyError
+ }
+
+ err = gpt.doAPIRequestWithRetry(link, method, bodyType,
+ requestBody, responseBody, client, MaxRetries)
+
+ return err
+}
+
+func NewChatGPT(config initialization.Config) *ChatGPT {
+ var lb *loadbalancer.LoadBalancer
+ if config.AzureOn {
+ keys := []string{config.AzureOpenaiToken}
+ lb = loadbalancer.NewLoadBalancer(keys)
+ } else {
+ lb = loadbalancer.NewLoadBalancer(config.OpenaiApiKeys)
+ }
+ platform := OpenAI
+
+ if config.AzureOn {
+ platform = Azure
+ }
+
+ return &ChatGPT{
+ Lb: lb,
+ ApiKey: config.OpenaiApiKeys,
+ ApiUrl: config.OpenaiApiUrl,
+ HttpProxy: config.HttpProxy,
+ Model: config.OpenaiModel,
+ MaxTokens: config.OpenaiMaxTokens,
+ Platform: platform,
+ AzureConfig: AzureConfig{
+ BaseURL: AzureApiUrlV1,
+ ResourceName: config.AzureResourceName,
+ DeploymentName: config.AzureDeploymentName,
+ ApiVersion: config.AzureApiVersion,
+ ApiToken: config.AzureOpenaiToken,
+ },
+ }
+}
+
+func (gpt *ChatGPT) FullUrl(suffix string) string {
+ var url string
+ switch gpt.Platform {
+ case Azure:
+ url = fmt.Sprintf("https://%s.%s%s/%s?api-version=%s",
+ gpt.AzureConfig.ResourceName, gpt.AzureConfig.BaseURL,
+ gpt.AzureConfig.DeploymentName, suffix, gpt.AzureConfig.ApiVersion)
+ case OpenAI:
+ url = fmt.Sprintf("%s/v1/%s", gpt.ApiUrl, suffix)
+ }
+ return url
+}
+
+func GetProxyClient(proxyString string) (*http.Client, error) {
+ var client *http.Client
+ timeOutDuration := time.Duration(initialization.GetConfig().OpenAIHttpClientTimeOut) * time.Second
+ if proxyString == "" {
+ client = &http.Client{Timeout: timeOutDuration}
+ } else {
+ proxyUrl, err := url.Parse(proxyString)
+ if err != nil {
+ return nil, err
+ }
+ transport := &http.Transport{
+ Proxy: http.ProxyURL(proxyUrl),
+ }
+ client = &http.Client{
+ Transport: transport,
+ Timeout: timeOutDuration,
+ }
+ }
+ return client, nil
+}
+
+func (gpt *ChatGPT) ChangeMode(model string) *ChatGPT {
+ gpt.Model = model
+ return gpt
+}
diff --git a/code/services/openai/gpt3.go b/code/services/openai/gpt3.go
new file mode 100644
index 00000000..0ecc7397
--- /dev/null
+++ b/code/services/openai/gpt3.go
@@ -0,0 +1,99 @@
+package openai
+
+import (
+ "errors"
+ "start-feishubot/logger"
+ "strings"
+
+ "github.com/pandodao/tokenizer-go"
+)
+
+type AIMode float64
+
+const (
+ Fresh AIMode = 0.1
+ Warmth AIMode = 0.7
+ Balance AIMode = 1.2
+ Creativity AIMode = 1.7
+)
+
+var AIModeMap = map[string]AIMode{
+ "严谨": Fresh,
+ "简洁": Warmth,
+ "标准": Balance,
+ "发散": Creativity,
+}
+
+var AIModeStrs = []string{
+ "严谨",
+ "简洁",
+ "标准",
+ "发散",
+}
+
+type Messages struct {
+ Role string `json:"role"`
+ Content string `json:"content"`
+}
+
+// ChatGPTResponseBody 请求体
+type ChatGPTResponseBody struct {
+ ID string `json:"id"`
+ Object string `json:"object"`
+ Created int `json:"created"`
+ Model string `json:"model"`
+ Choices []ChatGPTChoiceItem `json:"choices"`
+ Usage map[string]interface{} `json:"usage"`
+}
+
+type ChatGPTChoiceItem struct {
+ Message Messages `json:"message"`
+ Index int `json:"index"`
+ FinishReason string `json:"finish_reason"`
+}
+
+// ChatGPTRequestBody 响应体
+type ChatGPTRequestBody struct {
+ Model string `json:"model"`
+ Messages []Messages `json:"messages"`
+ MaxTokens int `json:"max_tokens"`
+ Temperature AIMode `json:"temperature"`
+ TopP int `json:"top_p"`
+ FrequencyPenalty int `json:"frequency_penalty"`
+ PresencePenalty int `json:"presence_penalty"`
+}
+
+func (msg *Messages) CalculateTokenLength() int {
+ text := strings.TrimSpace(msg.Content)
+ return tokenizer.MustCalToken(text)
+}
+
+func (gpt *ChatGPT) Completions(msg []Messages, aiMode AIMode) (resp Messages,
+ err error) {
+ requestBody := ChatGPTRequestBody{
+ Model: gpt.Model,
+ Messages: msg,
+ MaxTokens: gpt.MaxTokens,
+ Temperature: aiMode,
+ TopP: 1,
+ FrequencyPenalty: 0,
+ PresencePenalty: 0,
+ }
+ gptResponseBody := &ChatGPTResponseBody{}
+ url := gpt.FullUrl("chat/completions")
+ //fmt.Println(url)
+ logger.Debug(url)
+ logger.Debug("request body ", requestBody)
+ if url == "" {
+ return resp, errors.New("无法获取openai请求地址")
+ }
+ err = gpt.sendRequestWithBodyType(url, "POST", jsonBody, requestBody, gptResponseBody)
+ if err == nil && len(gptResponseBody.Choices) > 0 {
+ resp = gptResponseBody.Choices[0].Message
+ } else {
+ logger.Errorf("ERROR %v", err)
+ resp = Messages{}
+ err = errors.New("openai 请求失败")
+ }
+ return resp, err
+}
diff --git a/code/services/openai/gpt3_test.go b/code/services/openai/gpt3_test.go
new file mode 100644
index 00000000..e13c4347
--- /dev/null
+++ b/code/services/openai/gpt3_test.go
@@ -0,0 +1,193 @@
+package openai
+
+import (
+ "context"
+ "fmt"
+ "testing"
+ "time"
+
+ "start-feishubot/initialization"
+)
+
+func TestCompletions(t *testing.T) {
+ config := initialization.LoadConfig("../../config.yaml")
+ msgs := []Messages{
+ {Role: "system", Content: "你是一个专业的翻译官,负责中英文翻译。"},
+ {Role: "user", Content: "翻译这段话: The assistant messages help store prior responses. They can also be written by a developer to help give examples of desired behavior."},
+ }
+ gpt := NewChatGPT(*config)
+ resp, err := gpt.Completions(msgs, Balance)
+ if err != nil {
+ t.Errorf("TestCompletions failed with error: %v", err)
+ }
+ fmt.Println(resp.Content, resp.Role)
+}
+
+func TestVisionOnePic(t *testing.T) {
+ config := initialization.LoadConfig("../../config.yaml")
+ content := []ContentType{
+ {Type: "text", Text: "What’s in this image?", ImageURL: nil},
+ {Type: "image_url", ImageURL: &ImageURL{
+ URL: "https://resource.liaobots." +
+ "com/1849d492904448a0ac17f975f0b7ca8b.jpg",
+ Detail: "high",
+ }},
+ }
+
+ msgs := []VisionMessages{
+ {Role: "assistant", Content: content},
+ }
+ gpt := NewChatGPT(*config)
+ resp, err := gpt.GetVisionInfo(msgs)
+ if err != nil {
+ t.Errorf("TestCompletions failed with error: %v", err)
+ }
+ fmt.Println(resp.Content, resp.Role)
+}
+
+func TestGenerateOneImage(t *testing.T) {
+ config := initialization.LoadConfig("../../config.yaml")
+ gpt := NewChatGPT(*config)
+ prompt := "a red apple"
+ size := "256x256"
+ imageURL, err := gpt.GenerateOneImage(prompt, size, "")
+ if err != nil {
+ t.Errorf("TestGenerateOneImage failed with error: %v", err)
+ }
+ if imageURL == "" {
+ t.Errorf("TestGenerateOneImage returned empty imageURL")
+ }
+}
+
+func TestAudioToText(t *testing.T) {
+ config := initialization.LoadConfig("../../config.yaml")
+ gpt := NewChatGPT(*config)
+ audio := "./test_file/test.wav"
+ text, err := gpt.AudioToText(audio)
+ if err != nil {
+ t.Errorf("TestAudioToText failed with error: %v", err)
+ }
+ fmt.Printf("TestAudioToText returned text: %s \n", text)
+ if text == "" {
+ t.Errorf("TestAudioToText returned empty text")
+ }
+
+}
+
+func TestVariateOneImage(t *testing.T) {
+ config := initialization.LoadConfig("../../config.yaml")
+ gpt := NewChatGPT(*config)
+ image := "./test_file/img.png"
+ size := "256x256"
+ //compressionType, err := GetImageCompressionType(image)
+ //if err != nil {
+ // return
+ //}
+ //fmt.Println("compressionType: ", compressionType)
+ ConvertToRGBA(image, image)
+ err := VerifyPngs([]string{image})
+ if err != nil {
+ t.Errorf("TestVariateOneImage failed with error: %v", err)
+ return
+ }
+
+ imageBs64, err := gpt.GenerateOneImageVariation(image, size)
+ if err != nil {
+ t.Errorf("TestVariateOneImage failed with error: %v", err)
+ }
+ //fmt.Printf("TestVariateOneImage returned imageBs64: %s \n", imageBs64)
+ if imageBs64 == "" {
+ t.Errorf("TestVariateOneImage returned empty imageURL")
+ }
+}
+
+func TestVariateOneImageWithJpg(t *testing.T) {
+ config := initialization.LoadConfig("../../config.yaml")
+ gpt := NewChatGPT(*config)
+ image := "./test_file/test.jpg"
+ size := "256x256"
+ compressionType, err := GetImageCompressionType(image)
+ if err != nil {
+ return
+ }
+ fmt.Println("compressionType: ", compressionType)
+ //ConvertJPGtoPNG(image)
+ ConvertToRGBA(image, image)
+ err = VerifyPngs([]string{image})
+ if err != nil {
+ t.Errorf("TestVariateOneImage failed with error: %v", err)
+ return
+ }
+
+ imageBs64, err := gpt.GenerateOneImageVariation(image, size)
+ if err != nil {
+ t.Errorf("TestVariateOneImage failed with error: %v", err)
+ }
+ fmt.Printf("TestVariateOneImage returned imageBs64: %s \n", imageBs64)
+ if imageBs64 == "" {
+ t.Errorf("TestVariateOneImage returned empty imageURL")
+ }
+}
+
+// 余额接口已经被废弃
+func TestChatGPT_GetBalance(t *testing.T) {
+ config := initialization.LoadConfig("../../config.yaml")
+ gpt := NewChatGPT(*config)
+ balance, err := gpt.GetBalance()
+ if err != nil {
+ t.Errorf("TestChatGPT_GetBalance failed with error: %v", err)
+ }
+ fmt.Println("balance: ", balance)
+}
+
+func TestChatGPT_streamChat(t *testing.T) {
+ // 初始化配置
+ config := initialization.LoadConfig("../../config.yaml")
+
+ // 准备测试用例
+ testCases := []struct {
+ msg []Messages
+ wantOutput string
+ wantErr bool
+ }{
+ {
+ msg: []Messages{
+ {
+ Role: "system",
+ Content: "从现在起你要化身职场语言大师,你需要用婉转的方式回复老板想你提出的问题,或像领导提出请求。",
+ },
+ {
+ Role: "user",
+ Content: "领导,我想请假一天",
+ },
+ },
+ wantOutput: "",
+ wantErr: false,
+ },
+ }
+
+ // 执行测试用例
+ for _, tc := range testCases {
+ // 准备输入和输出
+ responseStream := make(chan string)
+ ctx := context.Background()
+ c := NewChatGPT(*config)
+
+ // 启动一个协程来模拟流式聊天
+ go func() {
+ err := c.StreamChat(ctx, tc.msg, Balance, responseStream)
+ if err != nil {
+ t.Errorf("streamChat() error = %v, wantErr %v", err, tc.wantErr)
+ }
+ }()
+
+ // 等待输出并检查是否符合预期
+ select {
+ case gotOutput := <-responseStream:
+ fmt.Printf("gotOutput: %v\n", gotOutput)
+
+ case <-time.After(5 * time.Second):
+ t.Errorf("streamChat() timeout, expected output not received")
+ }
+ }
+}
diff --git a/code/services/openai/picture.go b/code/services/openai/picture.go
new file mode 100644
index 00000000..1c0ace43
--- /dev/null
+++ b/code/services/openai/picture.go
@@ -0,0 +1,323 @@
+package openai
+
+import (
+ "bufio"
+ "encoding/base64"
+ "fmt"
+ "image"
+ "image/jpeg"
+ "image/png"
+ "io"
+ "io/ioutil"
+ "mime/multipart"
+ "os"
+)
+
+type ImageGenerationRequestBody struct {
+ Prompt string `json:"prompt"`
+ N int `json:"n"`
+ Size string `json:"size"`
+ ResponseFormat string `json:"response_format"`
+ Model string `json:"model,omitempty"`
+ Style string `json:"style,omitempty"`
+}
+
+type ImageResponseBody struct {
+ Created int64 `json:"created"`
+ Data []struct {
+ Base64Json string `json:"b64_json"`
+ } `json:"data"`
+}
+
+type ImageVariantRequestBody struct {
+ Image string `json:"image"`
+ N int `json:"n"`
+ Size string `json:"size"`
+ ResponseFormat string `json:"response_format"`
+}
+
+func (gpt *ChatGPT) GenerateImage(prompt string, size string,
+ n int, style string) ([]string, error) {
+ requestBody := ImageGenerationRequestBody{
+ Prompt: prompt,
+ N: n,
+ Size: size,
+ ResponseFormat: "b64_json",
+ Model: "dall-e-3",
+ Style: style,
+ }
+
+ imageResponseBody := &ImageResponseBody{}
+ err := gpt.sendRequestWithBodyType(gpt.ApiUrl+"/v1/images/generations",
+ "POST", jsonBody, requestBody, imageResponseBody)
+
+ if err != nil {
+ return nil, err
+ }
+
+ var b64Pool []string
+ for _, data := range imageResponseBody.Data {
+ b64Pool = append(b64Pool, data.Base64Json)
+ }
+ return b64Pool, nil
+}
+
+func (gpt *ChatGPT) GenerateOneImage(prompt string,
+ size string, style string) (string, error) {
+ b64s, err := gpt.GenerateImage(prompt, size, 1, style)
+ if err != nil {
+ return "", err
+ }
+ return b64s[0], nil
+}
+
+func (gpt *ChatGPT) GenerateOneImageWithDefaultSize(
+ prompt string) (string, error) {
+ // works for dall-e 2&3
+ return gpt.GenerateOneImage(prompt, "1024x1024", "")
+}
+
+func (gpt *ChatGPT) GenerateImageVariation(images string,
+ size string, n int) ([]string, error) {
+ requestBody := ImageVariantRequestBody{
+ Image: images,
+ N: n,
+ Size: size,
+ ResponseFormat: "b64_json",
+ }
+
+ imageResponseBody := &ImageResponseBody{}
+ err := gpt.sendRequestWithBodyType(gpt.ApiUrl+"/v1/images/variations",
+ "POST", formPictureDataBody, requestBody, imageResponseBody)
+
+ if err != nil {
+ return nil, err
+ }
+
+ var b64Pool []string
+ for _, data := range imageResponseBody.Data {
+ b64Pool = append(b64Pool, data.Base64Json)
+ }
+ return b64Pool, nil
+}
+
+func (gpt *ChatGPT) GenerateOneImageVariation(images string,
+ size string) (string, error) {
+ b64s, err := gpt.GenerateImageVariation(images, size, 1)
+ if err != nil {
+ return "", err
+ }
+ return b64s[0], nil
+}
+
+func pictureMultipartForm(request ImageVariantRequestBody,
+ w *multipart.Writer) error {
+
+ f, err := os.Open(request.Image)
+ if err != nil {
+ return fmt.Errorf("opening audio file: %w", err)
+ }
+ fw, err := w.CreateFormFile("image", f.Name())
+ if err != nil {
+ return fmt.Errorf("creating form file: %w", err)
+ }
+ if _, err = io.Copy(fw, f); err != nil {
+ return fmt.Errorf("reading from opened audio file: %w", err)
+ }
+
+ err = w.WriteField("size", request.Size)
+ if err != nil {
+ return fmt.Errorf("writing size: %w", err)
+ }
+
+ err = w.WriteField("n", fmt.Sprintf("%d", request.N))
+ if err != nil {
+ return fmt.Errorf("writing n: %w", err)
+ }
+
+ err = w.WriteField("response_format", request.ResponseFormat)
+ if err != nil {
+ return fmt.Errorf("writing response_format: %w", err)
+ }
+
+ //err = w.WriteField("user", "user123456")
+
+ //fw, err = w.CreateFormField("model")
+ //if err != nil {
+ // return fmt.Errorf("creating form field: %w", err)
+ //}
+ //modelName := bytes.NewReader([]byte(request.Model))
+ //if _, err = io.Copy(fw, modelName); err != nil {
+ // return fmt.Errorf("writing model name: %w", err)
+ //}
+
+ //fmt.Printf("w.FormDataContentType(): %s ", w.FormDataContentType())
+
+ w.Close()
+
+ return nil
+}
+
+func VerifyPngs(pngPaths []string) error {
+ foundPng := false
+ var expectedWidth, expectedHeight int
+
+ for _, pngPath := range pngPaths {
+ f, err := os.Open(pngPath)
+ if err != nil {
+ return fmt.Errorf("os.Open: %v", err)
+ }
+
+ fi, err := f.Stat()
+ if err != nil {
+ return fmt.Errorf("f.Stat: %v", err)
+ }
+ if fi.Size() > 4*1024*1024 {
+ return fmt.Errorf("image size too large, "+
+ "must be under %d MB", 4)
+ }
+
+ image, err := png.Decode(f)
+ if err != nil {
+ return fmt.Errorf("image must be valid png, got error: %v", err)
+ }
+ width := image.Bounds().Dx()
+ height := image.Bounds().Dy()
+ if width != height {
+ return fmt.Errorf("found non-square image with dimensions %dx%d", width, height)
+ }
+
+ if !foundPng {
+ foundPng = true
+ expectedWidth = width
+ expectedHeight = height
+ } else {
+ if width != expectedWidth || height != expectedHeight {
+ return fmt.Errorf("dimensions of all images must match, got both (%dx%d) and (%dx%d)", width, height, expectedWidth, expectedHeight)
+ }
+ }
+ }
+
+ return nil
+}
+
+func ConvertToRGBA(inputFilePath string, outputFilePath string) error {
+ // 打开输入文件
+ inputFile, err := os.Open(inputFilePath)
+ if err != nil {
+ return fmt.Errorf("打开文件时出错:%w", err)
+ }
+ defer inputFile.Close()
+
+ // 解码图像
+ img, _, err := image.Decode(inputFile)
+ if err != nil {
+ return fmt.Errorf("解码图像时出错:%w", err)
+ }
+
+ // 将图像转换为RGBA模式
+ rgba := image.NewRGBA(img.Bounds())
+ for x := 0; x < img.Bounds().Max.X; x++ {
+ for y := 0; y < img.Bounds().Max.Y; y++ {
+ rgba.Set(x, y, img.At(x, y))
+ }
+ }
+
+ // 创建输出文件
+ outputFile, err := os.Create(outputFilePath)
+ if err != nil {
+ return fmt.Errorf("创建输出文件时出错:%w", err)
+ }
+ defer outputFile.Close()
+
+ // 编码图像为 PNG 格式并写入输出文件
+ if err := png.Encode(outputFile, rgba); err != nil {
+ return fmt.Errorf("编码图像时出错:%w", err)
+ }
+
+ return nil
+}
+
+func ConvertJpegToPNG(jpgPath string) error {
+ // Open the JPEG file for reading
+ f, err := os.Open(jpgPath)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ // Check if the file is a JPEG image
+ _, err = jpeg.Decode(f)
+ if err != nil {
+ // The file is not a JPEG image, no need to convert it
+ return fmt.Errorf("file %s is not a JPEG image", jpgPath)
+ }
+
+ // Reset the file pointer to the beginning of the file
+ _, err = f.Seek(0, 0)
+ if err != nil {
+ return err
+ }
+
+ // Create a new PNG file for writing
+ pngPath := jpgPath[:len(jpgPath)-4] + ".png" // replace .jpg extension with .png
+ out, err := os.Create(pngPath)
+ if err != nil {
+ return err
+ }
+ defer out.Close()
+
+ // Decode the JPEG image and encode it as PNG
+ img, err := jpeg.Decode(f)
+ if err != nil {
+ return err
+ }
+ err = png.Encode(out, img)
+ if err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func GetImageCompressionType(path string) (string, error) {
+ // 打开文件
+ file, err := os.Open(path)
+ if err != nil {
+ return "", err
+ }
+ defer file.Close()
+
+ // 创建 bufio.Reader
+ reader := bufio.NewReader(file)
+
+ // 解码图像
+ _, format, err := image.DecodeConfig(reader)
+ if err != nil {
+ fmt.Println("err: ", err)
+ return "", err
+ }
+
+ fmt.Println("format: ", format)
+ // 返回压缩类型
+ return format, nil
+}
+
+func GetBase64FromImage(imagePath string) (string, error) {
+ // 打开文件
+ // 读取图片文件
+ imageFile, err := os.Open(imagePath)
+ if err != nil {
+ return "", err
+ }
+ defer imageFile.Close()
+ // 读取图片内容
+ imageData, err := ioutil.ReadAll(imageFile)
+ if err != nil {
+ return "", err
+ }
+ // 将图片内容转换为base64编码
+ base64String := base64.StdEncoding.EncodeToString(imageData)
+
+ return base64String, nil
+}
diff --git a/code/services/openai/stream.go b/code/services/openai/stream.go
new file mode 100644
index 00000000..c16a8749
--- /dev/null
+++ b/code/services/openai/stream.go
@@ -0,0 +1,87 @@
+package openai
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ go_openai "github.com/sashabaranov/go-openai"
+ "io"
+)
+
+func (c *ChatGPT) StreamChat(ctx context.Context,
+ msg []Messages, mode AIMode,
+ responseStream chan string) error {
+ //change msg type from Messages to openai.ChatCompletionMessage
+ chatMsgs := make([]go_openai.ChatCompletionMessage, len(msg))
+ for i, m := range msg {
+ chatMsgs[i] = go_openai.ChatCompletionMessage{
+ Role: m.Role,
+ Content: m.Content,
+ }
+ }
+ return c.StreamChatWithHistory(ctx, chatMsgs, 2000, mode,
+ responseStream)
+}
+
+func (c *ChatGPT) StreamChatWithHistory(ctx context.Context,
+ msg []go_openai.ChatCompletionMessage, maxTokens int,
+ aiMode AIMode,
+ responseStream chan string,
+) error {
+
+ config := go_openai.DefaultConfig(c.ApiKey[0])
+ config.BaseURL = c.ApiUrl + "/v1"
+ if c.Platform != OpenAI {
+ baseUrl := fmt.Sprintf("https://%s.%s",
+ c.AzureConfig.ResourceName, "openai.azure.com")
+ config = go_openai.DefaultAzureConfig(c.AzureConfig.
+ ApiToken, baseUrl)
+ config.AzureModelMapperFunc = func(model string) string {
+ return c.AzureConfig.DeploymentName
+
+ }
+ }
+
+ proxyClient, parseProxyError := GetProxyClient(c.HttpProxy)
+ if parseProxyError != nil {
+ return parseProxyError
+ }
+ config.HTTPClient = proxyClient
+
+ client := go_openai.NewClientWithConfig(config)
+ //pp.Printf("client: %v", client)
+ //turn aimode to float64()
+ var temperature float32
+ temperature = float32(aiMode)
+ req := go_openai.ChatCompletionRequest{
+ Model: c.Model,
+ Messages: msg,
+ N: 1,
+ Temperature: temperature,
+ MaxTokens: maxTokens,
+ //TopP: 1,
+ //Moderation: true,
+ //ModerationStop: true,
+ }
+ stream, err := client.CreateChatCompletionStream(ctx, req)
+ if err != nil {
+ fmt.Errorf("CreateCompletionStream returned error: %v", err)
+ }
+
+ defer stream.Close()
+ for {
+ response, err := stream.Recv()
+ fmt.Println("response: ", response)
+ if errors.Is(err, io.EOF) {
+ //fmt.Println("Stream finished")
+ return nil
+ }
+ if err != nil {
+ fmt.Printf("Stream error: %v\n", err)
+ return err
+ }
+ responseStream <- response.Choices[0].Delta.Content
+ }
+ return nil
+
+}
diff --git a/code/services/openai/test_file/img.png b/code/services/openai/test_file/img.png
new file mode 100644
index 00000000..3071bd71
Binary files /dev/null and b/code/services/openai/test_file/img.png differ
diff --git a/code/services/openai/test_file/test.jpg b/code/services/openai/test_file/test.jpg
new file mode 100644
index 00000000..8a862b1a
Binary files /dev/null and b/code/services/openai/test_file/test.jpg differ
diff --git a/code/services/openai/test_file/test.wav b/code/services/openai/test_file/test.wav
new file mode 100644
index 00000000..ea490d90
Binary files /dev/null and b/code/services/openai/test_file/test.wav differ
diff --git a/code/services/openai/vision.go b/code/services/openai/vision.go
new file mode 100644
index 00000000..8523ab3f
--- /dev/null
+++ b/code/services/openai/vision.go
@@ -0,0 +1,53 @@
+package openai
+
+import (
+ "errors"
+ "start-feishubot/logger"
+)
+
+type ImageURL struct {
+ URL string `json:"url,omitempty"`
+ Detail string `json:"detail,omitempty"`
+}
+
+type ContentType struct {
+ Type string `json:"type"`
+ Text string `json:"text,omitempty"`
+ ImageURL *ImageURL `json:"image_url,omitempty"`
+}
+type VisionMessages struct {
+ Role string `json:"role"`
+ Content interface{} `json:"content"`
+}
+
+type VisionRequestBody struct {
+ Model string `json:"model"`
+ Messages []VisionMessages `json:"messages"`
+ MaxTokens int `json:"max_tokens"`
+}
+
+func (gpt *ChatGPT) GetVisionInfo(msg []VisionMessages) (
+ resp Messages, err error) {
+ requestBody := VisionRequestBody{
+ Model: "gpt-4-vision-preview",
+ Messages: msg,
+ MaxTokens: gpt.MaxTokens,
+ }
+ gptResponseBody := &ChatGPTResponseBody{}
+ url := gpt.FullUrl("chat/completions")
+ logger.Debug("request body ", requestBody)
+ if url == "" {
+ return resp, errors.New("无法获取openai请求地址")
+ }
+ //gpt.ChangeMode("gpt-4-vision-preview")
+ //fmt.Println("model", gpt.Model)
+ err = gpt.sendRequestWithBodyType(url, "POST", jsonBody, requestBody, gptResponseBody)
+ if err == nil && len(gptResponseBody.Choices) > 0 {
+ resp = gptResponseBody.Choices[0].Message
+ } else {
+ logger.Errorf("ERROR %v", err)
+ resp = Messages{}
+ err = errors.New("openai 请求失败")
+ }
+ return resp, err
+}
diff --git a/code/services/sessionCache.go b/code/services/sessionCache.go
new file mode 100644
index 00000000..21c12ce8
--- /dev/null
+++ b/code/services/sessionCache.go
@@ -0,0 +1,266 @@
+package services
+
+import (
+ "start-feishubot/services/openai"
+ "time"
+
+ "github.com/patrickmn/go-cache"
+)
+
+type SessionMode string
+type VisionDetail string
+type SessionService struct {
+ cache *cache.Cache
+}
+type PicSetting struct {
+ resolution Resolution
+ style PicStyle
+}
+type Resolution string
+type PicStyle string
+
+type SessionMeta struct {
+ Mode SessionMode `json:"mode"`
+ Msg []openai.Messages `json:"msg,omitempty"`
+ PicSetting PicSetting `json:"pic_setting,omitempty"`
+ AIMode openai.AIMode `json:"ai_mode,omitempty"`
+ VisionDetail VisionDetail `json:"vision_detail,omitempty"`
+}
+
+const (
+ Resolution256 Resolution = "256x256"
+ Resolution512 Resolution = "512x512"
+ Resolution1024 Resolution = "1024x1024"
+ Resolution10241792 Resolution = "1024x1792"
+ Resolution17921024 Resolution = "1792x1024"
+)
+const (
+ PicStyleVivid PicStyle = "vivid"
+ PicStyleNatural PicStyle = "natural"
+)
+const (
+ VisionDetailHigh VisionDetail = "high"
+ VisionDetailLow VisionDetail = "low"
+)
+const (
+ ModePicCreate SessionMode = "pic_create"
+ ModePicVary SessionMode = "pic_vary"
+ ModeGPT SessionMode = "gpt"
+ ModeVision SessionMode = "vision"
+)
+
+type SessionServiceCacheInterface interface {
+ Get(sessionId string) *SessionMeta
+ Set(sessionId string, sessionMeta *SessionMeta)
+ GetMsg(sessionId string) []openai.Messages
+ SetMsg(sessionId string, msg []openai.Messages)
+ SetMode(sessionId string, mode SessionMode)
+ GetMode(sessionId string) SessionMode
+ GetAIMode(sessionId string) openai.AIMode
+ SetAIMode(sessionId string, aiMode openai.AIMode)
+ SetPicResolution(sessionId string, resolution Resolution)
+ GetPicResolution(sessionId string) string
+ SetPicStyle(sessionId string, resolution PicStyle)
+ GetPicStyle(sessionId string) string
+ SetVisionDetail(sessionId string, visionDetail VisionDetail)
+ GetVisionDetail(sessionId string) string
+ Clear(sessionId string)
+}
+
+var sessionServices *SessionService
+
+// implement Get interface
+func (s *SessionService) Get(sessionId string) *SessionMeta {
+ sessionContext, ok := s.cache.Get(sessionId)
+ if !ok {
+ return nil
+ }
+ sessionMeta := sessionContext.(*SessionMeta)
+ return sessionMeta
+}
+
+// implement Set interface
+func (s *SessionService) Set(sessionId string, sessionMeta *SessionMeta) {
+ maxCacheTime := time.Hour * 12
+ s.cache.Set(sessionId, sessionMeta, maxCacheTime)
+}
+
+func (s *SessionService) GetMode(sessionId string) SessionMode {
+ // Get the session mode from the cache.
+ sessionContext, ok := s.cache.Get(sessionId)
+ if !ok {
+ return ModeGPT
+ }
+ sessionMeta := sessionContext.(*SessionMeta)
+ return sessionMeta.Mode
+}
+
+func (s *SessionService) SetMode(sessionId string, mode SessionMode) {
+ maxCacheTime := time.Hour * 12
+ sessionContext, ok := s.cache.Get(sessionId)
+ if !ok {
+ sessionMeta := &SessionMeta{Mode: mode}
+ s.cache.Set(sessionId, sessionMeta, maxCacheTime)
+ return
+ }
+ sessionMeta := sessionContext.(*SessionMeta)
+ sessionMeta.Mode = mode
+ s.cache.Set(sessionId, sessionMeta, maxCacheTime)
+}
+
+func (s *SessionService) GetAIMode(sessionId string) openai.AIMode {
+ sessionContext, ok := s.cache.Get(sessionId)
+ if !ok {
+ return openai.Balance
+ }
+ sessionMeta := sessionContext.(*SessionMeta)
+ return sessionMeta.AIMode
+}
+
+// SetAIMode set the ai mode for the session.
+func (s *SessionService) SetAIMode(sessionId string, aiMode openai.AIMode) {
+ maxCacheTime := time.Hour * 12
+ sessionContext, ok := s.cache.Get(sessionId)
+ if !ok {
+ sessionMeta := &SessionMeta{AIMode: aiMode}
+ s.cache.Set(sessionId, sessionMeta, maxCacheTime)
+ return
+ }
+ sessionMeta := sessionContext.(*SessionMeta)
+ sessionMeta.AIMode = aiMode
+ s.cache.Set(sessionId, sessionMeta, maxCacheTime)
+}
+
+func (s *SessionService) GetMsg(sessionId string) (msg []openai.Messages) {
+ sessionContext, ok := s.cache.Get(sessionId)
+ if !ok {
+ return nil
+ }
+ sessionMeta := sessionContext.(*SessionMeta)
+ return sessionMeta.Msg
+}
+
+func (s *SessionService) SetMsg(sessionId string, msg []openai.Messages) {
+ maxLength := 4096
+ maxCacheTime := time.Hour * 12
+
+ //限制对话上下文长度
+ for getStrPoolTotalLength(msg) > maxLength {
+ msg = append(msg[:1], msg[2:]...)
+ }
+
+ sessionContext, ok := s.cache.Get(sessionId)
+ if !ok {
+ sessionMeta := &SessionMeta{Msg: msg}
+ s.cache.Set(sessionId, sessionMeta, maxCacheTime)
+ return
+ }
+ sessionMeta := sessionContext.(*SessionMeta)
+ sessionMeta.Msg = msg
+ s.cache.Set(sessionId, sessionMeta, maxCacheTime)
+}
+
+func (s *SessionService) SetPicStyle(sessionId string, style PicStyle) {
+ maxCacheTime := time.Hour * 12
+
+ switch style {
+ case PicStyleVivid, PicStyleNatural:
+ default:
+ style = PicStyleVivid
+ }
+
+ sessionContext, ok := s.cache.Get(sessionId)
+ if !ok {
+ sessionMeta := &SessionMeta{PicSetting: PicSetting{style: style}}
+ s.cache.Set(sessionId, sessionMeta, maxCacheTime)
+ return
+ }
+ sessionMeta := sessionContext.(*SessionMeta)
+ sessionMeta.PicSetting.style = style
+ s.cache.Set(sessionId, sessionMeta, maxCacheTime)
+}
+
+func (s *SessionService) GetPicStyle(sessionId string) string {
+ sessionContext, ok := s.cache.Get(sessionId)
+ if !ok {
+ return string(PicStyleVivid)
+ }
+ sessionMeta := sessionContext.(*SessionMeta)
+ return string(sessionMeta.PicSetting.style)
+}
+
+func (s *SessionService) SetPicResolution(sessionId string,
+ resolution Resolution) {
+ maxCacheTime := time.Hour * 12
+
+ //if not in [Resolution256, Resolution512, Resolution1024] then set
+ //to Resolution256
+ switch resolution {
+ case Resolution256, Resolution512, Resolution1024, Resolution10241792, Resolution17921024:
+ default:
+ resolution = Resolution1024
+ }
+
+ sessionContext, ok := s.cache.Get(sessionId)
+ if !ok {
+ sessionMeta := &SessionMeta{PicSetting: PicSetting{resolution: resolution}}
+ s.cache.Set(sessionId, sessionMeta, maxCacheTime)
+ return
+ }
+ sessionMeta := sessionContext.(*SessionMeta)
+ sessionMeta.PicSetting.resolution = resolution
+ s.cache.Set(sessionId, sessionMeta, maxCacheTime)
+}
+
+func (s *SessionService) GetPicResolution(sessionId string) string {
+ sessionContext, ok := s.cache.Get(sessionId)
+ if !ok {
+ return string(Resolution256)
+ }
+ sessionMeta := sessionContext.(*SessionMeta)
+ return string(sessionMeta.PicSetting.resolution)
+
+}
+
+func (s *SessionService) Clear(sessionId string) {
+ // Delete the session context from the cache.
+ s.cache.Delete(sessionId)
+}
+
+func (s *SessionService) GetVisionDetail(sessionId string) string {
+ sessionContext, ok := s.cache.Get(sessionId)
+ if !ok {
+ return ""
+ }
+ sessionMeta := sessionContext.(*SessionMeta)
+ return string(sessionMeta.VisionDetail)
+}
+
+func (s *SessionService) SetVisionDetail(sessionId string,
+ visionDetail VisionDetail) {
+ maxCacheTime := time.Hour * 12
+ sessionContext, ok := s.cache.Get(sessionId)
+ if !ok {
+ sessionMeta := &SessionMeta{VisionDetail: visionDetail}
+ s.cache.Set(sessionId, sessionMeta, maxCacheTime)
+ return
+ }
+ sessionMeta := sessionContext.(*SessionMeta)
+ sessionMeta.VisionDetail = visionDetail
+ s.cache.Set(sessionId, sessionMeta, maxCacheTime)
+}
+
+func GetSessionCache() SessionServiceCacheInterface {
+ if sessionServices == nil {
+ sessionServices = &SessionService{cache: cache.New(time.Hour*12, time.Hour*1)}
+ }
+ return sessionServices
+}
+
+func getStrPoolTotalLength(strPool []openai.Messages) int {
+ var total int
+ for _, v := range strPool {
+ total += v.CalculateTokenLength()
+ }
+ return total
+}
diff --git a/code/services/userCache.go b/code/services/userCache.go
deleted file mode 100644
index 3ce444dc..00000000
--- a/code/services/userCache.go
+++ /dev/null
@@ -1,81 +0,0 @@
-package services
-
-import (
- "fmt"
- "github.com/patrickmn/go-cache"
- "time"
-)
-
-type UserService struct {
- cache *cache.Cache
-}
-
-var userServices *UserService
-
-func (u UserService) Get(userId string) string {
- // 获取用户的会话上下文
- sessionContext, ok := u.cache.Get(userId)
- if !ok {
- return ""
- }
- //list to string
- list := sessionContext.([]string)
- var result string
- for _, v := range list {
- result += v
- }
- return result
-}
-
-func (u UserService) Set(userId string, question, reply string) {
- // 列表,最多保存8个
- //如果满了,删除最早的一个
- //如果没有满,直接添加
- maxCache := 8
- maxLength := 2048
- maxCacheTime := time.Minute * 30
- listOut := make([]string, maxCache)
- value := fmt.Sprintf("Q:%s\nA:%s\n\n", question, reply)
- raw, ok := u.cache.Get(userId)
- if ok {
- listOut = raw.([]string)
- if len(listOut) == maxCache {
- listOut = listOut[1:]
- }
- listOut = append(listOut, value)
- } else {
- listOut = append(listOut, value)
- }
-
- //限制对话上下文长度
- if getStrPoolTotalLength(listOut) > maxLength {
- listOut = listOut[1:]
- }
- u.cache.Set(userId, listOut, maxCacheTime)
-}
-
-func (u UserService) Clear(userId string) bool {
- u.cache.Delete(userId)
- return true
-}
-
-type UserCacheInterface interface {
- Get(userId string) string
- Set(userId string, question, reply string)
- Clear(userId string) bool
-}
-
-func GetUserCache() UserCacheInterface {
- if userServices == nil {
- userServices = &UserService{cache: cache.New(10*time.Minute, 10*time.Minute)}
- }
- return userServices
-}
-
-func getStrPoolTotalLength(strPool []string) int {
- var total int
- for _, v := range strPool {
- total += len(v)
- }
- return total
-}
diff --git a/code/utils/audio/ogg.go b/code/utils/audio/ogg.go
new file mode 100644
index 00000000..fe65db59
--- /dev/null
+++ b/code/utils/audio/ogg.go
@@ -0,0 +1,61 @@
+package audio
+
+import (
+ "bytes"
+ "errors"
+ "io"
+ "os"
+
+ "github.com/pion/opus"
+ "github.com/pion/opus/pkg/oggreader"
+)
+
+func OggToWavByPath(ogg string, wav string) error {
+ input, err := os.Open(ogg)
+ if err != nil {
+ return err
+ }
+ defer input.Close()
+
+ output, err := os.Create(wav)
+ if err != nil {
+ return err
+ }
+
+ defer output.Close()
+ return OggToWav(input, output)
+}
+
+func OggToWav(input io.Reader, output io.WriteSeeker) error {
+ ogg, _, err := oggreader.NewWith(input)
+ if err != nil {
+ return err
+ }
+
+ out := make([]byte, 1920)
+
+ decoder := opus.NewDecoder()
+ encoder := NewEncoder(output, 44100, 16)
+
+ for {
+ segments, _, err := ogg.ParseNextPage()
+ if errors.Is(err, io.EOF) {
+ break
+ } else if bytes.HasPrefix(segments[0], []byte("OpusTags")) {
+ continue
+ }
+
+ if err != nil {
+ panic(err)
+ }
+
+ for i := range segments {
+ if _, _, err = decoder.Decode(segments[i], out); err != nil {
+ panic(err)
+ }
+ encoder.Write(out)
+ }
+ }
+ encoder.Close()
+ return nil
+}
diff --git a/code/utils/audio/wav.go b/code/utils/audio/wav.go
new file mode 100644
index 00000000..4308f604
--- /dev/null
+++ b/code/utils/audio/wav.go
@@ -0,0 +1,107 @@
+package audio
+
+import (
+ "encoding/binary"
+ "io"
+)
+
+type Encoder struct {
+ Output io.WriteSeeker
+ SampleRate int
+ BitDepth int
+ totalBytes uint32
+ isHeaderWritten bool
+}
+
+func (e *Encoder) WriteHeader() error {
+ if err := writeLe(e.Output, []byte("RIFF")); err != nil {
+ return err
+ }
+
+ if err := writeLe(e.Output, uint32(0)); err != nil { // Placeholder for file size
+ return err
+ }
+
+ if err := writeLe(e.Output, []byte("WAVE")); err != nil {
+ return err
+ }
+
+ if err := writeLe(e.Output, []byte("fmt ")); err != nil {
+ return err
+ }
+ if err := writeLe(e.Output, uint32(16)); err != nil {
+ return err
+ }
+
+ if err := writeLe(e.Output, uint16(1)); err != nil { // Audio format: PCM
+ return err
+ }
+ if err := writeLe(e.Output, uint16(1)); err != nil { // Number of channels: 1 (mono)
+ return err
+ }
+ if err := writeLe(e.Output, uint32(e.SampleRate)); err != nil {
+ return err
+ }
+
+ if err := writeLe(e.Output, uint32(e.SampleRate*e.BitDepth/8)); err != nil {
+ return err
+ }
+
+ if err := writeLe(e.Output, uint16(e.BitDepth/8)); err != nil {
+ return err
+ }
+ if err := writeLe(e.Output, uint16(e.BitDepth)); err != nil {
+ return err
+ }
+
+ if err := writeLe(e.Output, []byte("data")); err != nil {
+ return err
+ }
+
+ if err := writeLe(e.Output, uint32(0)); err != nil { //Placeholder for data size
+ return err
+ }
+ e.isHeaderWritten = true
+ return nil
+}
+
+func writeLe[T []byte | uint32 | uint16 | uint8](w io.Writer, data T) error {
+ return binary.Write(w, binary.LittleEndian, data)
+}
+
+func (e *Encoder) Write(data []byte) error {
+ if !e.isHeaderWritten {
+ e.WriteHeader()
+ }
+ n, err := e.Output.Write(data)
+ if err != nil {
+ return err
+ }
+ e.totalBytes += uint32(n)
+ return nil
+}
+
+func (e *Encoder) Close() error {
+ if _, err := e.Output.Seek(4, io.SeekStart); err != nil {
+ return err
+ }
+ if err := binary.Write(e.Output, binary.LittleEndian, uint32(36+e.totalBytes)); err != nil {
+ return err
+ }
+ if _, err := e.Output.Seek(40, io.SeekStart); err != nil {
+ return err
+ }
+ if err := binary.Write(e.Output, binary.LittleEndian, e.totalBytes); err != nil {
+ return err
+ }
+ return nil
+}
+
+func NewEncoder(w io.WriteSeeker, sampleRate int, bitDepth int) *Encoder {
+ return &Encoder{
+ SampleRate: sampleRate,
+ Output: w,
+ BitDepth: bitDepth,
+ isHeaderWritten: false,
+ }
+}
diff --git a/code/utils/strings.go b/code/utils/strings.go
new file mode 100644
index 00000000..bd623ee2
--- /dev/null
+++ b/code/utils/strings.go
@@ -0,0 +1,38 @@
+package utils
+
+import "strings"
+
+func CutPrefix(s, prefix string) (string, bool) {
+ if strings.HasPrefix(s, prefix) {
+ return strings.TrimPrefix(s, prefix), true
+ }
+ return s, false
+}
+
+func EitherCutPrefix(s string, prefix ...string) (string, bool) {
+ // 任一前缀匹配则返回剩余部分
+ for _, p := range prefix {
+ if strings.HasPrefix(s, p) {
+ return strings.TrimPrefix(s, p), true
+ }
+ }
+ return s, false
+}
+
+// trim space and equal
+func TrimEqual(s, prefix string) (string, bool) {
+ if strings.TrimSpace(s) == prefix {
+ return "", true
+ }
+ return s, false
+}
+
+func EitherTrimEqual(s string, prefix ...string) (string, bool) {
+ // 任一前缀匹配则返回剩余部分
+ for _, p := range prefix {
+ if strings.TrimSpace(s) == p {
+ return "", true
+ }
+ }
+ return s, false
+}
diff --git a/code/utils/strings_test.go b/code/utils/strings_test.go
new file mode 100644
index 00000000..757b16c1
--- /dev/null
+++ b/code/utils/strings_test.go
@@ -0,0 +1,109 @@
+package utils
+
+import "testing"
+
+func TestEitherCutPrefix(t *testing.T) {
+ type args struct {
+ s string
+ prefix []string
+ }
+ tests := []struct {
+ name string
+ args args
+ want string
+ want1 bool
+ }{
+ {
+ name: "Prefix match",
+ args: args{
+ s: "/system bar",
+ prefix: []string{"/system "},
+ },
+ want: "bar",
+ want1: true,
+ },
+
+ {
+ name: "Prefix match",
+ args: args{
+ s: "扮演 bar",
+ prefix: []string{"扮演 "},
+ },
+ want: "bar",
+ want1: true,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got, got1 := EitherCutPrefix(tt.args.s, tt.args.prefix...)
+ if got != tt.want {
+ t.Errorf("EitherCutPrefix() got = %v, want %v", got, tt.want)
+ }
+ if got1 != tt.want1 {
+ t.Errorf("EitherCutPrefix() got1 = %v, want %v", got1, tt.want1)
+ }
+ })
+ }
+}
+
+func TestEitherTrimEqual(t *testing.T) {
+ type args struct {
+ s string
+ prefix []string
+ }
+ tests := []struct {
+ name string
+ args args
+ want string
+ want1 bool
+ }{
+ {
+ name: "Prefix match",
+ args: args{
+ s: "清除",
+ prefix: []string{"清除"},
+ },
+ want: "",
+ want1: true,
+ },
+ {
+ name: "Prefix match",
+ args: args{
+ s: " /clear ",
+ prefix: []string{"清除", "/clear"},
+ },
+ want: "",
+ want1: true,
+ },
+ {
+ name: "Prefix match",
+ args: args{
+ s: " 清除 ",
+ prefix: []string{"清除", "/clear"},
+ },
+ want: "",
+ want1: true,
+ },
+ {
+ name: "Prefix match",
+ args: args{
+ s: " clear ",
+ prefix: []string{"清除", "/clear"},
+ },
+ want: " clear ",
+ want1: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got, got1 := EitherTrimEqual(tt.args.s, tt.args.prefix...)
+ if got != tt.want {
+ t.Errorf("EitherTrimEqual() got = %v, want %v", got, tt.want)
+ }
+ if got1 != tt.want1 {
+ t.Errorf("EitherTrimEqual() got1 = %v, want %v", got1, tt.want1)
+ }
+ })
+ }
+}
diff --git a/docker-compose.yaml b/docker-compose.yaml
new file mode 100644
index 00000000..67817a2c
--- /dev/null
+++ b/docker-compose.yaml
@@ -0,0 +1,30 @@
+version: '3.3'
+services:
+ feishu-chatgpt:
+ container_name: feishu-chatgpt
+ build:
+ context: .
+ dockerfile: Dockerfile
+ ports:
+ - "9000:9000/tcp"
+ # volumes:
+ # - ./code/config.yaml:/app/config.yaml:ro
+ environment:
+ - APP_ID=cli_axxx
+ - APP_SECRET=xxx
+ - APP_ENCRYPT_KEY=xxx
+ - APP_VERIFICATION_TOKEN=xxx
+ # 请确保和飞书应用管理平台中的设置一致
+ - BOT_NAME=chatGpt
+ # OpenAI API Key 支持负载均衡, 可以填写多个 Key 用逗号分隔
+ - OPENAI_KEY=sk-xxx,sk-xxx,sk-xxx
+ # 服务器配置
+ - HTTP_PORT=9000
+ - HTTPS_PORT=9001
+ - USE_HTTPS=false
+ - CERT_FILE=cert.pem
+ - KEY_FILE=key.pem
+ # OpenAI 地址, 一般不需要修改, 除非你有自己的反向代理
+ - API_URL=https://api.openai.com
+ # 代理设置, 例如 - HTTP_PROXY=http://127.0.0.1:7890, 默认代表不使用代理
+ - HTTP_PROXY
diff --git a/docs/help.png b/docs/help.png
new file mode 100644
index 00000000..5a714b55
Binary files /dev/null and b/docs/help.png differ
diff --git a/docs/image.png b/docs/image.png
deleted file mode 100644
index 96932aef..00000000
Binary files a/docs/image.png and /dev/null differ
diff --git a/docs/img.png b/docs/img.png
new file mode 100644
index 00000000..24c2d392
Binary files /dev/null and b/docs/img.png differ
diff --git a/docs/img3.png b/docs/img3.png
new file mode 100644
index 00000000..3d214698
Binary files /dev/null and b/docs/img3.png differ
diff --git a/docs/talk.png b/docs/talk.png
new file mode 100644
index 00000000..9d18f548
Binary files /dev/null and b/docs/talk.png differ
diff --git a/go.mod b/go.mod
deleted file mode 100644
index 364980db..00000000
--- a/go.mod
+++ /dev/null
@@ -1,3 +0,0 @@
-module leizhenpeng/feishu-chatgpt
-
-go 1.18
diff --git a/readme.md b/readme.md
index b46af805..eb359135 100644
--- a/readme.md
+++ b/readme.md
@@ -1,64 +1,132 @@
+
+
+
+
+
+
+ 📷 点击展开完整功能
+
+
+
+
+
+
+
+
+
+
+
-
-
-
+
+
+ 飞书 ×(GPT-4 + DALL·E + Whisper)
+
+ 🚀 Feishu OpenAI 🚀
+
+
- 在飞书与ChatGPT随时对话,智慧随身。
-
- Feishu ChatGpt
+ www.connectai-e.com
+
+## 商业支持
+
+升级至商业共创版,满足您公司的需求:
+1. 全面支持:集成OpenAI、Midjourney等主流AI模型,充足资源满足小团队全年需求。
+2. 超值优惠:后续补充AI资源价格仅为官方价的60%,为您节省成本。
+3. 持续更新:企联AI已获「奇绩创坛」数百万天使投资,确保商业版持续优化和质量提升。
+4. 管理便捷:实用Admin Panel助您轻松管理AI资源、查询日志、避免风险词汇和权限控制。
+5. 高度安全:提供在线SaaS版与企业级私有部署,确保数据安全和隐私保护。
+
+查看更多内容: https://www.connectai-e.com
+
+企业客户咨询: 15623677720
+
+
+
+
+
+
## 👻 机器人功能
-- [x] 群聊和私聊
-- [x] 引入持续对话功能
-- [x] 超时自动结束对话
-- [x] 限制对话上下文长度
-- [x] 主动退出对话
+🗣 语音交流:私人直接与机器人畅所欲言「Whisper」
+🕵️ 图片推理: 借助大模型互动式对话图片「GPT4V」
+💬 多话题对话:支持私人和群聊多话题讨论,高效连贯
+🖼 文本成图:支持文本成图和以图搜图 「DALLE-3」
-## 🌟 项目特点
-- 🍏 基于 OpenAi-[gpt3](https://platform.openai.com/account/api-keys) 接口
-- 🍎 通过 lark,将 ChatGPT 接入[飞书](https://open.feishu.cn/app)
-- 🥒 支持[Serverless](https://github.com/serverless-devs/serverless-devs)、[本地环境](https://dashboard.cpolar.com/login)、[Docker](https://www.docker.com/) 多渠道部署
-- 🍐 基于[责任链](https://refactoringguru.cn/design-patterns/chain-of-responsibility/go/example)的消息处理器,轻松自定义扩展命令
+🛖 场景预设:内置丰富场景列表,一键切换AI角色
-[//]: # (- 🍊 [zap](https://github.com/uber-go/zap)日志记录)
+🎭 角色扮演:支持场景模式,增添讨论乐趣和创意
-[//]: # (- )
-- 🍋 基于[goCache](https://github.com/patrickmn/go-cache)内存键值对缓存
+🤖 AI模式:内置4种AI模式,感受AI的智慧与创意
+🔄 上下文保留:回复对话框即可继续同一话题讨论
-## 项目部署
+⏰ 自动结束:超时自动结束对话,支持清除讨论历史
+📝 富文本卡片:支持富文本卡片回复,信息更丰富多彩
-###### 有关飞书具体的配置文件说明,**[➡︎ 点击查看](#详细配置步骤)**
+👍 交互式反馈:即时获取机器人处理结果
+🎰 余额查询:即时获取token消耗情况
-``` bash
-git clone git@github.com:Leizhenpeng/feishu-chatGpt.git
-cd feishu-chatGpt/code
+🔙 历史回档:轻松回档历史对话,继续话题讨论 🚧
+
+🔒 管理员模式:内置管理员模式,使用更安全可靠 🚧
+
+🌐 多token负载均衡:优化生产级别的高频调用场景
+
+↩️ 支持反向代理:为不同地区的用户提供更快、更稳定的访问体验
+
+📚 与飞书文档互动:成为企业员工的超级助手 🚧
+
+🎥 话题内容秒转PPT:让你的汇报从此变得更加简单 🚧
+
+📊 表格分析:轻松导入飞书表格,提升数据分析效率 🚧
+
+🍊 私有数据训练:利用公司产品信息对GPT二次训练,更好地满足客户个性化需求 🚧
+
+
+
+## 🌟 项目特点
+
+- 🍏 支持 OpenAI (https://platform.openai.com/account/api-keys) 主要Chat接口:GPT4、DALL·E-3、Whisper、GPT-4V
+- 🍎 将 ChatGPT 接入[飞书](https://open.feishu.cn/app)和[飞书国际版](https://www.larksuite.com/)
+- 🥒
+ 支持[Serverless 云函数](https://github.com/serverless-devs/serverless-devs)、[本地环境](https://dashboard.cpolar.com/login)、[Docker](https://www.docker.com/)、[二进制安装包](https://github.com/Leizhenpeng/feishu-chatgpt/releases/)
+ 等多种渠道部署
+- 🍋 基于[goCache](https://github.com/patrickmn/go-cache)内存键值对缓存
+
+## 项目部署
+
+###### 有关飞书的配置文件说明,**[➡︎ 点击查看](#详细配置步骤)**
-# 配置config.yaml
-mv config.example.yaml config.yaml
-```
本地部署
+```bash
+git clone git@github.com:Leizhenpeng/feishu-chatgpt.git
+cd feishu-chatgpt/code
+```
+
如果你的服务器没有公网 IP,可以使用反向代理的方式
-飞书的服务器在国内对ngrok的访问速度很慢,所以推荐使用一些国内的反向代理服务商
+飞书的服务器在国内对 ngrok 的访问速度很慢,所以推荐使用一些国内的反向代理服务商
+
- [cpolar](https://dashboard.cpolar.com/)
- [natapp](https://natapp.cn/)
-
```bash
+# 配置config.yaml
+mv config.example.yaml config.yaml
+
//测试部署
go run main.go
cpolar http 9000
@@ -74,67 +142,319 @@ ps -ef | grep cpolar
kill -9 PID
```
-更多详细介绍,参考[飞书上的小计算器: Go机器人来啦](https://www.bilibili.com/video/BV1nW4y1378T/)
+更多详细介绍,参考[飞书上的小计算器: Go 机器人来啦](https://www.bilibili.com/video/BV1nW4y1378T/)
-
- serverless部署
+ serverless云函数(阿里云等)部署
-``` bash
+```bash
+git clone git@github.com:Leizhenpeng/feishu-chatgpt.git
+cd feishu-chatgpt/code
+```
+
+安装[severless](https://docs.serverless-devs.com/serverless-devs/quick_start)工具
+
+```bash
+# 配置config.yaml
+mv config.example.yaml config.yaml
+# 安装severless cli
+npm install @serverless-devs/s -g
+```
+
+安装完成后,请根据您本地环境,根据下面教程部署`severless`
+
+- 本地 `linux`/`mac os` 环境
+
+1. 修改`s.yaml`中的部署地区和部署秘钥
+
+```
+edition: 1.0.0
+name: feishuBot-chatGpt
+access: "aliyun" # 修改自定义的秘钥别称
+
+vars: # 全局变量
+region: "cn-hongkong" # 修改云函数想要部署地区
+
+```
+
+2. 一键部署
+
+```bash
+cd ..
+s deploy
+```
+
+- 本地`windows`
+
+1. 首先打开本地`cmd`命令提示符工具,运行`go env`检查你电脑上 go 环境变量设置, 确认以下变量和值
+
+```cmd
+set GO111MODULE=on
+set GOARCH=amd64
+set GOOS=linux
+set CGO_ENABLED=0
+```
+
+如果值不正确,比如您电脑上为`set GOOS=windows`, 请运行以下命令设置`GOOS`变量值
+
+```cmd
+go env -w GOOS=linux
+```
+
+2. 修改`s.yaml`中的部署地区和部署秘钥
+
+```
+edition: 1.0.0
+name: feishuBot-chatGpt
+access: "aliyun" # 修改自定义的秘钥别称
+
+vars: # 全局变量
+ region: "cn-hongkong" # 修改云函数想要部署地区
+
+```
+
+3. 修改`s.yaml`中的`pre-deploy`, 去除第二步`run`前面的环变量改置部分
+
+```
+ pre-deploy:
+ - run: go mod tidy
+ path: ./code
+ - run: go build -o
+ target/main main.go # 删除GO111MODULE=on GOOS=linux GOARCH=amd64 CGO_ENABLED=0
+ path: ./code
+
+```
+
+4. 一键部署
+
+```bash
cd ..
s deploy
```
-更多详细介绍,参考[仅需1min,用Serverless部署基于 gin 的飞书机器人](https://www.bilibili.com/video/BV1nW4y1378T/)
+更多详细介绍,参考[仅需 1min,用 Serverless 部署基于 gin 的飞书机器人](https://www.bilibili.com/video/BV1nW4y1378T/)
+
+
+
+ 使用 Railway 平台一键部署
+
+
+Railway 是一家国外的 Serverless 平台,支持多种语言,可以一键将 GitHub 上的代码仓库部署到 Railway 平台,然后在 Railway
+平台上配置环境变量即可。部署本项目的流程如下:
+
+#### 1. 生成 Railway 项目
+
+点击下方按钮即可创建一个对应的 Railway 项目,其会自动 Fork 本项目到你的 GitHub 账号下。
+
+[![Deploy on Railway](https://railway.app/button.svg)](https://railway.app/template/10D-TF?referralCode=oMcVS2)
+
+#### 2. 配置环境变量
+
+在打开的页面中,配置环境变量,每个变量的说明如下图所示:
+
+
+
+
+#### 3. 部署项目
+
+填写完环境变量后,点击 Deploy 就完成了项目的部署。部署完成后还需获取对应的域名用于飞书机器人访问,如下图所示:
+
+
+
+如果不确定自己部署是否成功,可以通过访问上述获取到的域名 (https://xxxxxxxx.railway.app/ping) 来查看是否返回了`pong`
+,如果返回了`pong`,说明部署成功。
- docker部署
+ Repl.it部署
-待补充
-
+The fastest way to deploy the feishu-openai to `repl.it` is to click the `run on repl.it` button below.
+
+
+
+
+
+Remember switch to `secrets` tab then edit `System environment variables`.You can also edit raw json:
+
+```json
+{
+ "APP_ID": "",
+ "APP_SECRET": "",
+ "APP_ENCRYPT_KEY": "",
+ "APP_VERIFICATION_TOKEN": "",
+ "BOT_NAME": "ChatGPT",
+ "OPENAI_KEY": "sk-",
+ "OPENAI_MODEL": "gpt-3.5-turbo"
+}
+```
+
+ docker部署
+
-## 功能解释
+```bash
+docker build -t feishu-chatgpt:latest .
+docker run -d --name feishu-chatgpt -p 9000:9000 \
+--env APP_ID=xxx \
+--env APP_SECRET=xxx \
+--env APP_ENCRYPT_KEY=xxx \
+--env APP_VERIFICATION_TOKEN=xxx \
+--env BOT_NAME=chatGpt \
+--env OPENAI_KEY="sk-xxx1,sk-xxx2,sk-xxx3" \
+--env API_URL="https://api.openai.com" \
+--env HTTP_PROXY="" \
+feishu-chatgpt:latest
+```
-### 责任链-设计模式
+注意:
-划重点@bro
+- `BOT_NAME` 为飞书机器人名称,例如 `chatGpt`
+- `OPENAI_KEY` 为openai key,多个key用逗号分隔,例如 `sk-xxx1,sk-xxx2,sk-xxx3`
+- `HTTP_PROXY` 为宿主机的proxy地址,例如 `http://host.docker.internal:7890`,没有代理的话,可以不用设置
+- `API_URL` 为openai api 接口地址,例如 `https://api.openai.com`, 没有反向代理的话,可以不用设置
-千万不要用if else,这样的代码,不仅可读性差,而且,如果要增加一个处理器,就需要修改代码,违反了开闭原则
+---
-用户发送的文本消息,根据消息内容,匹配到对应的处理器,处理器处理消息,返回结果给用户
+小白简易化 docker 部署
-这种匹配,可以使用责任链模式,将匹配的逻辑抽象成一个个的处理器,然后将这些处理器串联起来,形成一个链条。
+- docker 地址: https://hub.docker.com/r/leizhenpeng/feishu-chatgpt
-用户发送的消息,从链条的头部开始,依次匹配,匹配到后,就不再继续匹配,直接返回结果给用户
+```bash
+docker run -d --restart=always --name feishu-chatgpt2 -p 9000:9000 -v /etc/localtime:/etc/localtim:ro \
+--env APP_ID=xxx \
+--env APP_SECRET=xxx \
+--env APP_ENCRYPT_KEY=xxx \
+--env APP_VERIFICATION_TOKEN=xxx \
+--env BOT_NAME=chatGpt \
+--env OPENAI_KEY="sk-xxx1,sk-xxx2,sk-xxx3" \
+--env API_URL=https://api.openai.com \
+--env HTTP_PROXY="" \
+dockerproxy.com/leizhenpeng/feishu-chatgpt:latest
+```
+事件回调地址: http://IP:9000/webhook/event
+卡片回调地址: http://IP:9000/webhook/card
-!!!切记!!!
+把它填入飞书后台
-责任链模式[参考代码](https://refactoringguru.cn/design-patterns/chain-of-responsibility)
+---
+部署azure版本
+```bash
+docker build -t feishu-chatgpt:latest .
+docker run -d --name feishu-chatgpt -p 9000:9000 \
+--env APP_ID=xxx \
+--env APP_SECRET=xxx \
+--env APP_ENCRYPT_KEY=xxx \
+--env APP_VERIFICATION_TOKEN=xxx \
+--env BOT_NAME=chatGpt \
+--env AZURE_ON=true \
+--env AZURE_API_VERSION=xxx \
+--env AZURE_RESOURCE_NAME=xxx \
+--env AZURE_DEPLOYMENT_NAME=xxx \
+--env AZURE_OPENAI_TOKEN=xxx \
+feishu-chatgpt:latest
+```
+
+注意:
+
+- `BOT_NAME` 为飞书机器人名称,例如 `chatGpt`
+- `AZURE_ON` 为是否使用azure ,请填写 `true`
+- `AZURE_API_VERSION` 为azure api版本 例如 `2023-03-15-preview`
+- `AZURE_RESOURCE_NAME` 为azure 资源名称 类似 `https://{AZURE_RESOURCE_NAME}.openai.azure.com`
+- `AZURE_DEPLOYMENT_NAME` 为azure 部署名称 类似 `https://{AZURE_RESOURCE_NAME}.openai.azure.com/deployments/{AZURE_DEPLOYMENT_NAME}/chat/completions`
+- `AZURE_OPENAI_TOKEN` 为azure openai token
+
+
+
+
+ docker-compose 部署
+
+
+编辑 docker-compose.yaml,通过 environment 配置相应环境变量(或者通过 volumes 挂载相应配置文件),然后运行下面的命令即可
+
+```bash
+# 构建镜像
+docker compose build
+
+# 启动服务
+docker compose up -d
+
+# 停止服务
+docker compose down
+```
+
+事件回调地址: http://IP:9000/webhook/event
+卡片回调地址: http://IP:9000/webhook/card
+
+
+
+
+ 二进制安装包部署
+
+
+1. 进入[release 页面](https://github.com/Leizhenpeng/feishu-chatgpt/releases/) 下载对应的安装包
+2. 解压安装包,修改 config.example.yml 中配置信息,另存为 config.yaml
+3. 目录下添加文件 `role_list.yaml`,自定义角色,可以从这里获取:[链接](https://github.com/Leizhenpeng/feishu-chatgpt/blob/master/code/role_list.yaml)
+3. 运行程序入口文件 `feishu-chatgpt`
+
+事件回调地址: http://IP:9000/webhook/event
+卡片回调地址: http://IP:9000/webhook/card
+
+
## 详细配置步骤
-- 获取 [OpenAI](https://platform.openai.com/account/api-keys) 的 KEY
-- 创建 [飞书](https://open.feishu.cn/) 机器人
+
+ 📸 点击展开飞书机器人配置的分步截图指导
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+- 获取 [OpenAI](https://platform.openai.com/account/api-keys) 的 KEY( 🙉 下面有免费的 KEY 供大家测试部署 )
+- 创建 [飞书](https://open.feishu.cn/) 机器人
1. 前往[开发者平台](https://open.feishu.cn/app?lang=zh-CN)创建应用,并获取到 APPID 和 Secret
2. 前往`应用功能-机器人`, 创建机器人
- 3. 从cpolar或者serverless获得公网地址,例如`http://xxxx.r6.cpolar.top/webhook/event` ,在飞书机器人的 `事件订阅` 板块填写回调地址。
- 4. 给订阅添加下列回调事件
+ 3. 从 cpolar、serverless 或 Railway 获得公网地址,在飞书机器人后台的 `事件订阅` 板块填写。例如,
+ - `http://xxxx.r6.cpolar.top`为 cpolar 暴露的公网地址
+ - `/webhook/event`为统一的应用路由
+ - 最终的回调地址为 `http://xxxx.r6.cpolar.top/webhook/event`
+ 4. 在飞书机器人后台的 `机器人` 板块,填写消息卡片请求网址。例如,
+ - `http://xxxx.r6.cpolar.top`为 cpolar 暴露的公网地址
+ - `/webhook/card`为统一的应用路由
+ - 最终的消息卡片请求网址为 `http://xxxx.r6.cpolar.top/webhook/card`
+ 5. 在事件订阅板块,搜索三个词`机器人进群`、 `接收消息`、 `消息已读`, 把他们后面所有的权限全部勾选。
+ 进入权限管理界面,搜索`图片`, 勾选`获取与上传图片或文件资源`。
+ 最终会添加下列回调事件
+ - im:resource(获取与上传图片或文件资源)
- im:message
- im:message.group_at_msg(获取群组中所有消息)
- im:message.group_at_msg:readonly(接收群聊中@机器人消息事件)
@@ -143,18 +463,43 @@ s deploy
- im:message:send_as_bot(获取用户在群组中@机器人的消息)
- im:chat:readonly(获取群组信息)
- im:chat(获取与更新群组信息)
+
+
5. 发布版本,等待企业管理员审核通过
-更多介绍,参考[飞书上的小计算器: Go机器人来啦](https://www.bilibili.com/video/BV12M41187rV/)
+更多介绍,参考[飞书上的小计算器: Go 机器人来啦](https://www.bilibili.com/video/BV12M41187rV/)
+
+
+## 一起交流
+
+遇到问题,可以加入飞书群沟通~
+
+
+
+
+## 企联AI
+
+| AI
| SDK | Application |
+| :-------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: |
+| 🎒OpenAI | [Go-OpenAI](https://github.com/ConnectAI-E/Go-OpenAI) | [🏅Feishu-OpenAI](https://github.com/ConnectAI-E/Feishu-OpenAI), [🎖Lark-OpenAI](https://github.com/ConnectAI-E/Lark-OpenAI), [Feishu-EX-ChatGPT](https://github.com/ConnectAI-E/Feishu-EX-ChatGPT), [🎖Feishu-OpenAI-Stream-Chatbot](https://github.com/ConnectAI-E/Feishu-OpenAI-Stream-Chatbot), [Feishu-TLDR](https://github.com/ConnectAI-E/Feishu-TLDR),[Feishu-OpenAI-Amazing](https://github.com/ConnectAI-E/Feishu-OpenAI-Amazing), [Feishu-Oral-Friend](https://github.com/ConnectAI-E/Feishu-Oral-Friend), [Feishu-OpenAI-Base-Helper](https://github.com/ConnectAI-E/Feishu-OpenAI-Base-Helper), [Feishu-Vector-Knowledge-Management](https://github.com/ConnectAI-E/Feishu-Vector-Knowledge-Management), [Feishu-OpenAI-PDF-Helper](https://github.com/ConnectAI-E/Feishu-OpenAI-PDF-Helper), [🏅Dingtalk-OpenAI](https://github.com/ConnectAI-E/Dingtalk-OpenAI), [Wework-OpenAI](https://github.com/ConnectAI-E/Wework-OpenAI), [WeWork-OpenAI-Node](https://github.com/ConnectAI-E/WeWork-OpenAI-Node), [llmplugin](https://github.com/ConnectAI-E/llmplugin) |
+| 🤖 AutoGPT | ------ | [🏅AutoGPT-Next-Web](https://github.com/ConnectAI-E/AutoGPT-Next-Web) |
+| 🎭 Stablediffusion | ------ | [🎖Feishu-Stablediffusion](https://github.com/ConnectAI-E/Feishu-Stablediffusion) |
+| 🍎 Midjourney | [Go-Midjourney](https://github.com/ConnectAI-E/Go-Midjourney) | [🏅Feishu-Midjourney](https://github.com/ConnectAI-E/Feishu-Midjourney), [🔥MidJourney-Web](https://github.com/ConnectAI-E/MidJourney-Web), [Dingtalk-Midjourney](https://github.com/ConnectAI-E/Dingtalk-Midjourney) |
+| 🍍 文心一言 | [Go-Wenxin](https://github.com/ConnectAI-E/Go-Wenxin) | [Feishu-Wenxin](https://github.com/ConnectAI-E/Feishu-Wenxin), [Dingtalk-Wenxin](https://github.com/ConnectAI-E/Dingtalk-Wenxin), [Wework-Wenxin](https://github.com/ConnectAI-E/Wework-Wenxin) |
+| 💸 Minimax | [Go-Minimax](https://github.com/ConnectAI-E/Go-Minimax) | [Feishu-Minimax](https://github.com/ConnectAI-E/Feishu-Minimax), [Dingtalk-Minimax](https://github.com/ConnectAI-E/Dingtalk-Minimax), [Wework-Minimax](https://github.com/ConnectAI-E/Wework-Minimax) |
+| ⛳️ CLAUDE | [Go-Claude](https://github.com/ConnectAI-E/Go-Claude) | [Feishu-Claude](https://github.com/ConnectAI-E/Feishu-Claude), [DingTalk-Claude](https://github.com/ConnectAI-E/DingTalk-Claude), [Wework-Claude](https://github.com/ConnectAI-E/Wework-Claude) |
+| 🥁 PaLM | [Go-PaLM](https://github.com/ConnectAI-E/go-PaLM) | [Feishu-PaLM](https://github.com/ConnectAI-E/Feishu-PaLM),[DingTalk-PaLM](https://github.com/ConnectAI-E/DingTalk-PaLM),[Wework-PaLM](https://github.com/ConnectAI-E/Wework-PaLM) |
+| 🎡 Prompt | ------ | [📖 Prompt-Engineering-Tutior](https://github.com/ConnectAI-E/Prompt-Engineering-Tutior) |
+| 🍋 ChatGLM | ------ | [Feishu-ChatGLM](https://github.com/ConnectAI-E/Feishu-ChatGLM) |
+| ⛓ LangChain | ------ | [📖 LangChain-Tutior](https://github.com/ConnectAI-E/LangChain-Tutior) |
+| 🪄 One-click | ------ | [🎖Awesome-One-Click-Deployment](https://github.com/ConnectAI-E/Awesome-One-Click-Deployment) |
+
+
-### 相关阅读
-- [go-cache](https://github.com/patrickmn/go-cache)
+开源社区:https://github.com/ConnectAI-E
-- [在Go语言项目中使用Zap日志库](https://www.liwenzhou.com/posts/Go/zap/)
-- [飞书 User_ID、Open_ID 与 Union_ID 区别](https://www.feishu.cn/hc/zh-CN/articles/794300086214)
-- [飞书重复接受到消息](https://open.feishu.cn/document/uAjLw4CM/ukTMukTMukTM/reference/im-v1/message/events/receive)
diff --git a/s.yaml b/s.yaml
index 279d9432..f54ef3a9 100644
--- a/s.yaml
+++ b/s.yaml
@@ -1,9 +1,9 @@
edition: 1.0.0
-name: hello-world-app
+name: feishuBot-chatGpt
access: "aliyun" # 秘钥别名
vars: # 全局变量
- region: "cn-hangzhou"
+ region: "ap-southeast-1"
services:
helloworld:
@@ -28,7 +28,7 @@ services:
name: "feishu-chatgpt"
description: 'a simple feishubot by serverless devs'
codeUri: './code'
- cAPort: 9000
+ caPort: 9000
customRuntimeConfig:
command:
- ./target/main
@@ -36,7 +36,7 @@ services:
handler: index.handler
instanceConcurrency: 20
instanceType: e1
- memorySize: 128
+ memorySize: 512
runtime: custom
timeout: 120
internetAccess: true