Skip to content

Commit

Permalink
Merge pull request #77 from Leizhenpeng/feat_support_proxy
Browse files Browse the repository at this point in the history
feat: adding support for custom proxies
  • Loading branch information
Leizhenpeng authored Mar 10, 2023
2 parents e7dfe02 + 1487e21 commit a72b1b4
Show file tree
Hide file tree
Showing 7 changed files with 64 additions and 16 deletions.
3 changes: 2 additions & 1 deletion code/config.example.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,5 @@ CERT_FILE: cert.pem
KEY_FILE: key.pem
# openai 地址, 一般不需要修改, 除非你有自己的反向代理
API_URL: https://api.openai.com

# 代理设置, 例如 "http://127.0.0.1:7890", ""代表不使用代理
HTTP_PROXY: ""
2 changes: 2 additions & 0 deletions code/initialization/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ type Config struct {
CertFile string
KeyFile string
OpenaiApiUrl string
HttpProxy string
}

func LoadConfig(cfg string) *Config {
Expand All @@ -47,6 +48,7 @@ func LoadConfig(cfg string) *Config {
CertFile: getViperStringValue("CERT_FILE", "cert.pem"),
KeyFile: getViperStringValue("KEY_FILE", "key.pem"),
OpenaiApiUrl: getViperStringValue("API_URL", "https://api.openai.com"),
HttpProxy: getViperStringValue("HTTP_PROXY", ""),
}

return config
Expand Down
2 changes: 1 addition & 1 deletion code/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ func main() {
pflag.Parse()
config := initialization.LoadConfig(*cfg)
initialization.LoadLarkClient(*config)
gpt := services.NewChatGPT(config.OpenaiApiKeys, config.OpenaiApiUrl)
gpt := services.NewChatGPT(*config)
handlers.InitHandlers(gpt, *config)

eventHandler := dispatcher.NewEventDispatcher(
Expand Down
60 changes: 49 additions & 11 deletions code/services/gpt3.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ import (
"fmt"
"io/ioutil"
"net/http"
"net/url"
"start-feishubot/initialization"
"start-feishubot/services/loadbalancer"
"strings"
"time"
Expand Down Expand Up @@ -49,9 +51,10 @@ type ChatGPTRequestBody struct {
PresencePenalty int `json:"presence_penalty"`
}
type ChatGPT struct {
Lb *loadbalancer.LoadBalancer
ApiKey []string
ApiUrl string
Lb *loadbalancer.LoadBalancer
ApiKey []string
ApiUrl string
HttpProxy string
}

type ImageGenerationRequestBody struct {
Expand All @@ -68,8 +71,9 @@ type ImageGenerationResponseBody struct {
} `json:"data"`
}

func (gpt ChatGPT) sendRequest(url, method string,
requestBody interface{}, responseBody interface{}) error {
func (gpt ChatGPT) doRequest(url, method string,
requestBody interface{}, responseBody interface{},
client *http.Client) error {
api := gpt.Lb.GetAPI()
if api == nil {
return errors.New("no available API")
Expand All @@ -87,7 +91,7 @@ func (gpt ChatGPT) sendRequest(url, method string,

req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+api.Key)
client := &http.Client{Timeout: 110 * time.Second}

response, err := client.Do(req)
if err != nil {
gpt.Lb.SetAvailability(api.Key, false)
Expand All @@ -114,6 +118,33 @@ func (gpt ChatGPT) sendRequest(url, method string,
return nil
}

func (gpt ChatGPT) sendRequest(link, method string,
requestBody interface{}, responseBody interface{}) error {
var err error
client := &http.Client{Timeout: 110 * time.Second}
if gpt.HttpProxy == "" {
err = gpt.doRequest(link, method, requestBody, responseBody, client)
} else {
//fmt.Println("using proxy: " + gpt.HttpProxy)
proxyUrl, err := url.Parse(gpt.HttpProxy)
if err != nil {
return err
}

transport := &http.Transport{
Proxy: http.ProxyURL(proxyUrl),
}
proxyClient := &http.Client{
Transport: transport,
Timeout: 110 * time.Second,
}

err = gpt.doRequest(link, method, requestBody, responseBody, proxyClient)
}

return err
}

func (gpt ChatGPT) Completions(msg []Messages) (resp Messages, err error) {
requestBody := ChatGPTRequestBody{
Model: engine,
Expand All @@ -128,8 +159,11 @@ func (gpt ChatGPT) Completions(msg []Messages) (resp Messages, err error) {
err = gpt.sendRequest(gpt.ApiUrl+"/v1/chat/completions", "POST",
requestBody, gptResponseBody)

if err == nil {
if err == nil && len(gptResponseBody.Choices) > 0 {
resp = gptResponseBody.Choices[0].Message
} else {
resp = Messages{}
err = errors.New("openai 请求失败")
}
return resp, err
}
Expand Down Expand Up @@ -165,11 +199,15 @@ func (gpt ChatGPT) GenerateOneImage(prompt string, size string) (string, error)
return b64s[0], nil
}

func NewChatGPT(apiKeys []string, apiUrl string) *ChatGPT {
func NewChatGPT(config initialization.Config) *ChatGPT {
apiKeys := config.OpenaiApiKeys
apiUrl := config.OpenaiApiUrl
httpProxy := config.HttpProxy
lb := loadbalancer.NewLoadBalancer(apiKeys)
return &ChatGPT{
Lb: lb,
ApiKey: apiKeys,
ApiUrl: apiUrl,
Lb: lb,
ApiKey: apiKeys,
ApiUrl: apiUrl,
HttpProxy: httpProxy,
}
}
4 changes: 2 additions & 2 deletions code/services/gpt3_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ func TestCompletions(t *testing.T) {
{Role: "user", Content: "翻译这段话: The assistant messages help store prior responses. They can also be written by a developer to help give examples of desired behavior."},
}

gpt := NewChatGPT(config.OpenaiApiKeys, config.OpenaiApiUrl)
gpt := NewChatGPT(*config)

resp, err := gpt.Completions(msgs)
if err != nil {
Expand All @@ -27,7 +27,7 @@ func TestCompletions(t *testing.T) {
func TestGenerateOneImage(t *testing.T) {
config := initialization.LoadConfig("../config.yaml")

gpt := NewChatGPT(config.OpenaiApiKeys, config.OpenaiApiUrl)
gpt := NewChatGPT(*config)
prompt := "a red apple"
size := "256x256"

Expand Down
5 changes: 5 additions & 0 deletions entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ USE_HTTPS=${USE_HTTPS:-""}
CERT_FILE=${CERT_FILE:-""}
KEY_FILE=${KEY_FILE:-""}
API_URL=${API_URL:-""}
HTTP_PROXY=${HTTP_PROXY:-""}
CONFIG_PATH=${CONFIG_PATH:-"config.yaml"}


Expand Down Expand Up @@ -84,6 +85,10 @@ if [ "$API_URL" != "" ] ; then
sed -i "17c API_URL: $API_URL" $CONFIG_PATH
fi

if [ "$HTTP_PROXY" != "" ] ; then
sed -i "19c HTTP_PROXY: $HTTP_PROXY" $CONFIG_PATH
fi

echo -e "\033[32m[Success] Configuration file has been generated!\033[0m"

/dist/feishu_chatgpt
4 changes: 3 additions & 1 deletion readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,8 @@ docker run -d --name feishu-chatgpt -p 9000:9000 \
--env APP_VERIFICATION_TOKEN=xxx \
--env BOT_NAME=chatGpt \
--env OPENAI_KEY="sk-xxx1,sk-xxx2,sk-xxx3" \
--env API_URL=https://api.openai.com \
--env API_URL="https://api.openai.com" \
--env HTTP_PROXY="http://host.docker.internal:7890" \
feishu-chatgpt:latest
```

Expand All @@ -226,6 +227,7 @@ docker run -d --restart=always --name feishu-chatgpt2 -p 9000:9000 -v /etc/local
--env BOT_NAME=chatGpt \
--env OPENAI_KEY="sk-xxx1,sk-xxx2,sk-xxx3" \
--env API_URL=https://api.openai.com \
--env HTTP_PROXY="" \
dockerproxy.com/leizhenpeng/feishu-chatgpt:latest
```

Expand Down

0 comments on commit a72b1b4

Please sign in to comment.