diff --git a/code/config.example.yaml b/code/config.example.yaml index e81a4748..0b1e42ef 100644 --- a/code/config.example.yaml +++ b/code/config.example.yaml @@ -15,4 +15,5 @@ CERT_FILE: cert.pem KEY_FILE: key.pem # openai 地址, 一般不需要修改, 除非你有自己的反向代理 API_URL: https://api.openai.com - +# 代理设置, 例如 "http://127.0.0.1:7890", ""代表不使用代理 +HTTP_PROXY: "" diff --git a/code/initialization/config.go b/code/initialization/config.go index bb18876f..a991e288 100644 --- a/code/initialization/config.go +++ b/code/initialization/config.go @@ -22,6 +22,7 @@ type Config struct { CertFile string KeyFile string OpenaiApiUrl string + HttpProxy string } func LoadConfig(cfg string) *Config { @@ -47,6 +48,7 @@ func LoadConfig(cfg string) *Config { CertFile: getViperStringValue("CERT_FILE", "cert.pem"), KeyFile: getViperStringValue("KEY_FILE", "key.pem"), OpenaiApiUrl: getViperStringValue("API_URL", "https://api.openai.com"), + HttpProxy: getViperStringValue("HTTP_PROXY", ""), } return config diff --git a/code/main.go b/code/main.go index bcfcbe34..8c5bdb83 100644 --- a/code/main.go +++ b/code/main.go @@ -26,7 +26,7 @@ func main() { pflag.Parse() config := initialization.LoadConfig(*cfg) initialization.LoadLarkClient(*config) - gpt := services.NewChatGPT(config.OpenaiApiKeys, config.OpenaiApiUrl) + gpt := services.NewChatGPT(*config) handlers.InitHandlers(gpt, *config) eventHandler := dispatcher.NewEventDispatcher( diff --git a/code/services/gpt3.go b/code/services/gpt3.go index 112f7ff5..cccca2ce 100644 --- a/code/services/gpt3.go +++ b/code/services/gpt3.go @@ -7,6 +7,8 @@ import ( "fmt" "io/ioutil" "net/http" + "net/url" + "start-feishubot/initialization" "start-feishubot/services/loadbalancer" "strings" "time" @@ -49,9 +51,10 @@ type ChatGPTRequestBody struct { PresencePenalty int `json:"presence_penalty"` } type ChatGPT struct { - Lb *loadbalancer.LoadBalancer - ApiKey []string - ApiUrl string + Lb *loadbalancer.LoadBalancer + ApiKey []string + ApiUrl string + HttpProxy string } type ImageGenerationRequestBody struct { @@ -68,8 +71,9 @@ type ImageGenerationResponseBody struct { } `json:"data"` } -func (gpt ChatGPT) sendRequest(url, method string, - requestBody interface{}, responseBody interface{}) error { +func (gpt ChatGPT) doRequest(url, method string, + requestBody interface{}, responseBody interface{}, + client *http.Client) error { api := gpt.Lb.GetAPI() if api == nil { return errors.New("no available API") @@ -87,7 +91,7 @@ func (gpt ChatGPT) sendRequest(url, method string, req.Header.Set("Content-Type", "application/json") req.Header.Set("Authorization", "Bearer "+api.Key) - client := &http.Client{Timeout: 110 * time.Second} + response, err := client.Do(req) if err != nil { gpt.Lb.SetAvailability(api.Key, false) @@ -114,6 +118,33 @@ func (gpt ChatGPT) sendRequest(url, method string, return nil } +func (gpt ChatGPT) sendRequest(link, method string, + requestBody interface{}, responseBody interface{}) error { + var err error + client := &http.Client{Timeout: 110 * time.Second} + if gpt.HttpProxy == "" { + err = gpt.doRequest(link, method, requestBody, responseBody, client) + } else { + //fmt.Println("using proxy: " + gpt.HttpProxy) + proxyUrl, err := url.Parse(gpt.HttpProxy) + if err != nil { + return err + } + + transport := &http.Transport{ + Proxy: http.ProxyURL(proxyUrl), + } + proxyClient := &http.Client{ + Transport: transport, + Timeout: 110 * time.Second, + } + + err = gpt.doRequest(link, method, requestBody, responseBody, proxyClient) + } + + return err +} + func (gpt ChatGPT) Completions(msg []Messages) (resp Messages, err error) { requestBody := ChatGPTRequestBody{ Model: engine, @@ -128,8 +159,11 @@ func (gpt ChatGPT) Completions(msg []Messages) (resp Messages, err error) { err = gpt.sendRequest(gpt.ApiUrl+"/v1/chat/completions", "POST", requestBody, gptResponseBody) - if err == nil { + if err == nil && len(gptResponseBody.Choices) > 0 { resp = gptResponseBody.Choices[0].Message + } else { + resp = Messages{} + err = errors.New("openai 请求失败") } return resp, err } @@ -165,11 +199,15 @@ func (gpt ChatGPT) GenerateOneImage(prompt string, size string) (string, error) return b64s[0], nil } -func NewChatGPT(apiKeys []string, apiUrl string) *ChatGPT { +func NewChatGPT(config initialization.Config) *ChatGPT { + apiKeys := config.OpenaiApiKeys + apiUrl := config.OpenaiApiUrl + httpProxy := config.HttpProxy lb := loadbalancer.NewLoadBalancer(apiKeys) return &ChatGPT{ - Lb: lb, - ApiKey: apiKeys, - ApiUrl: apiUrl, + Lb: lb, + ApiKey: apiKeys, + ApiUrl: apiUrl, + HttpProxy: httpProxy, } } diff --git a/code/services/gpt3_test.go b/code/services/gpt3_test.go index 31b9bb02..a909e904 100644 --- a/code/services/gpt3_test.go +++ b/code/services/gpt3_test.go @@ -14,7 +14,7 @@ func TestCompletions(t *testing.T) { {Role: "user", Content: "翻译这段话: The assistant messages help store prior responses. They can also be written by a developer to help give examples of desired behavior."}, } - gpt := NewChatGPT(config.OpenaiApiKeys, config.OpenaiApiUrl) + gpt := NewChatGPT(*config) resp, err := gpt.Completions(msgs) if err != nil { @@ -27,7 +27,7 @@ func TestCompletions(t *testing.T) { func TestGenerateOneImage(t *testing.T) { config := initialization.LoadConfig("../config.yaml") - gpt := NewChatGPT(config.OpenaiApiKeys, config.OpenaiApiUrl) + gpt := NewChatGPT(*config) prompt := "a red apple" size := "256x256" diff --git a/entrypoint.sh b/entrypoint.sh index 6a912fb0..eb324623 100755 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -17,6 +17,7 @@ USE_HTTPS=${USE_HTTPS:-""} CERT_FILE=${CERT_FILE:-""} KEY_FILE=${KEY_FILE:-""} API_URL=${API_URL:-""} +HTTP_PROXY=${HTTP_PROXY:-""} CONFIG_PATH=${CONFIG_PATH:-"config.yaml"} @@ -84,6 +85,10 @@ if [ "$API_URL" != "" ] ; then sed -i "17c API_URL: $API_URL" $CONFIG_PATH fi +if [ "$HTTP_PROXY" != "" ] ; then + sed -i "19c HTTP_PROXY: $HTTP_PROXY" $CONFIG_PATH +fi + echo -e "\033[32m[Success] Configuration file has been generated!\033[0m" /dist/feishu_chatgpt diff --git a/readme.md b/readme.md index b9885083..97f5342a 100644 --- a/readme.md +++ b/readme.md @@ -207,7 +207,8 @@ docker run -d --name feishu-chatgpt -p 9000:9000 \ --env APP_VERIFICATION_TOKEN=xxx \ --env BOT_NAME=chatGpt \ --env OPENAI_KEY="sk-xxx1,sk-xxx2,sk-xxx3" \ ---env API_URL=https://api.openai.com \ +--env API_URL="https://api.openai.com" \ +--env HTTP_PROXY="http://host.docker.internal:7890" \ feishu-chatgpt:latest ``` @@ -226,6 +227,7 @@ docker run -d --restart=always --name feishu-chatgpt2 -p 9000:9000 -v /etc/local --env BOT_NAME=chatGpt \ --env OPENAI_KEY="sk-xxx1,sk-xxx2,sk-xxx3" \ --env API_URL=https://api.openai.com \ +--env HTTP_PROXY="" \ dockerproxy.com/leizhenpeng/feishu-chatgpt:latest ```