diff --git a/README.md b/README.md index c250680..eabde7e 100644 --- a/README.md +++ b/README.md @@ -123,12 +123,10 @@ docker run -d \ -p 3000:3000 \ -e OPENAI_API_KEY="sk-******" \ -e BASE_URL="https://api.openai-forward.com" \ - -e CODE="kunyuan" \ + -e CODE="******" \ yidadaa/chatgpt-next-web ``` -这里部署了一个,供大家轻度使用: -https://chat.beidongjiedeguang.top , 访问密码: `kunyuan` ### 在代码中使用 @@ -185,17 +183,17 @@ curl --location 'https://api.openai-forward.com/v1/images/generations' \ ## 配置选项 配置的设置方式支持两种 -一种为在命令行中执行`openai-forward run`的运行参数(如`--port=8000`)中指定; +一种为在命令行中执行`aifd run` 的运行参数(如`--port=8000`)中指定; 另一种为读取环境变量的方式指定。 ### 命令行参数 -可通过 `openai-forward run --help` 查看 +可通过 `aifd run --help` 查看
Click for more details -**`openai-forward run`参数配置项** +**`aifd run`参数配置项** | 配置项 | 说明 | 默认值 | |-----------------|-------------------|:----------------------:| @@ -277,20 +275,20 @@ docker run -d \
Click for more details -保存路径在当前目录下的`Log/chat.log`路径中。 +保存路径在当前目录下的`Log/chat`路径中。 记录格式为 ```text -{'messages': [{'user': 'hi'}], 'model': 'gpt-3.5-turbo', 'forwarded-for': '', 'uid': '467a17ec-bf39-4b65-9ebd-e722b3bdd5c3'} -{'assistant': 'Hello! How can I assist you today?', 'uid': '467a17ec-bf39-4b65-9ebd-e722b3bdd5c3'} -{'messages': [{'user': 'Hello!'}], 'model': 'gpt-3.5-turbo', 'forwarded-for': '', 'uid': 'f844d156-e747-4887-aef8-e40d977b5ee7'} +{'messages': [{'user': 'hi'}], 'model': 'gpt-3.5-turbo', 'forwarded-for': '', 'uid': '467a17ec-bf39-4b65-9ebd-e722b3bdd5c3', 'datetime': '2023-07-18 14:01:21'} +{'assistant': 'Hello there! How can I assist you today?', 'uid': '467a17ec-bf39-4b65-9ebd-e722b3bdd5c3'} +{'messages': [{'user': 'Hello!'}], 'model': 'gpt-3.5-turbo', 'forwarded-for': '', 'uid': 'f844d156-e747-4887-aef8-e40d977b5ee7', 'datetime': '2023-07-18 14:01:23'} {'assistant': 'Hi there! How can I assist you today?', 'uid': 'f844d156-e747-4887-aef8-e40d977b5ee7'} ``` 转换为`json`格式: ```bash -openai-forward convert +aifd convert ``` 得到`chat.json`: @@ -298,6 +296,7 @@ openai-forward convert ```json [ { + "datetime": "2023-07-18 14:01:21", "forwarded-for": "", "model": "gpt-3.5-turbo", "messages": [ @@ -308,6 +307,7 @@ openai-forward convert "assistant": "Hello there! How can I assist you today?" }, { + "datetime": "2023-07-18 14:01:23", "forwarded-for": "", "model": "gpt-3.5-turbo", "messages": [ diff --git a/deploy.md b/deploy.md index d7d5c4a..3b81ea9 100644 --- a/deploy.md +++ b/deploy.md @@ -26,7 +26,7 @@ pip install openai-forward **运行服务** ```bash -openai_forward run +openai-forward run # 或者使用别名 aifd run ``` 服务就搭建完成了。 配置见[配置](README.md#配置选项) @@ -54,7 +54,7 @@ http://{ip}:{port}/v1/chat/completions proxy_buffering off; ``` -然后就可以https访问了。 +然后就可以https进行流式访问了。 ## Docker部署 @@ -78,7 +78,7 @@ git clone https://github.com/beidongjiedeguang/openai-forward.git --depth=1 cd openai-forward pip install -e . -openai-forward run +openai-forward run # 或使用别名 aifd run ``` 启用SSL同上. diff --git a/openai_forward/tool.py b/openai_forward/tool.py index de8f40a..f08f417 100644 --- a/openai_forward/tool.py +++ b/openai_forward/tool.py @@ -78,17 +78,19 @@ def parse_log_to_list(log_path: str): def convert_chatlog_to_jsonl(log_path: str, target_path: str): + """Convert single chatlog to jsonl""" message_list, assistant_list = parse_log_to_list(log_path) content_list = get_matches(messages=message_list, assistants=assistant_list) json_dump(content_list, target_path, indent_2=True) -def sort_logname_by_datetime(log_path: str): +def get_log_files_from_folder(log_path: str): return ls(log_path, "*.log", relp=False) def convert_folder_to_jsonl(folder_path: str, target_path: str): - log_files = sort_logname_by_datetime(folder_path) + """Convert chatlog folder to jsonl""" + log_files = get_log_files_from_folder(folder_path) messages = [] assistants = [] for log_path in log_files: diff --git a/pyproject.toml b/pyproject.toml index ff0a5b8..d1a0e57 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,13 +19,13 @@ classifiers = [ ] dependencies = [ - "loguru", + "loguru>=0.7.0", "sparrow-python>=0.1.5", - "fastapi", - "uvicorn", - "orjson", + "fastapi>=0.90.0", + "uvicorn>=0.23.1", + "orjson>=3.9.2", "python-dotenv", - "httpx", + "httpx>=0.24.1", "pytz", ] @@ -39,14 +39,14 @@ Source = "https://github.com/beidongjiedeguang/openai-forward" [project.optional-dependencies] test = [ - "openai", + "openai>=0.27.8", "pytest", ] [project.scripts] openai_forward = "openai_forward.__main__:main" openai-forward = "openai_forward.__main__:main" -aifwd = "openai_forward.__main__:main" +aifd = "openai_forward.__main__:main" [tool.hatch.version] path = "openai_forward/__init__.py"