Skip to content

Commit

Permalink
Fix missing lora path argument in convert script (#271)
Browse files Browse the repository at this point in the history
* Fix missing lora path argument

* Format code with latest black
  • Loading branch information
li-plus authored Mar 6, 2024
1 parent eff7f44 commit b2d89c6
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 6 deletions.
3 changes: 2 additions & 1 deletion chatglm_cpp/convert.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Convert Hugging Face ChatGLM/ChatGLM2 models to GGML format
"""

import argparse
import platform
import struct
Expand Down Expand Up @@ -534,7 +535,7 @@ def main():
args = parser.parse_args()

with open(args.save_path, "wb") as f:
convert(f, args.model_name_or_path, dtype=args.type)
convert(f, args.model_name_or_path, args.lora_model_name_or_path, dtype=args.type)

print(f"GGML model saved to {args.save_path}")

Expand Down
9 changes: 7 additions & 2 deletions chatglm_cpp/openai_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,9 @@ class ChatCompletionRequest(BaseModel):
tools: Optional[List[ChatCompletionTool]] = None

model_config = {
"json_schema_extra": {"examples": [{"model": "default-model", "messages": [{"role": "user", "content": "你好"}]}]}
"json_schema_extra": {
"examples": [{"model": "default-model", "messages": [{"role": "user", "content": "你好"}]}]
}
}


Expand Down Expand Up @@ -108,7 +110,10 @@ class ChatCompletionResponse(BaseModel):
"choices": [
{
"index": 0,
"message": {"role": "assistant", "content": "你好👋!我是人工智能助手 ChatGLM2-6B,很高兴见到你,欢迎问我任何问题。"},
"message": {
"role": "assistant",
"content": "你好👋!我是人工智能助手 ChatGLM2-6B,很高兴见到你,欢迎问我任何问题。",
},
"finish_reason": "stop",
}
],
Expand Down
18 changes: 15 additions & 3 deletions tests/test_chatglm_cpp.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,17 +37,29 @@ def check_pipeline(model_path, prompt, target, gen_kwargs={}):

@pytest.mark.skipif(not CHATGLM_MODEL_PATH.exists(), reason="model file not found")
def test_chatglm_pipeline():
check_pipeline(model_path=CHATGLM_MODEL_PATH, prompt="你好", target="你好👋!我是人工智能助手 ChatGLM-6B,很高兴见到你,欢迎问我任何问题。")
check_pipeline(
model_path=CHATGLM_MODEL_PATH,
prompt="你好",
target="你好👋!我是人工智能助手 ChatGLM-6B,很高兴见到你,欢迎问我任何问题。",
)


@pytest.mark.skipif(not CHATGLM2_MODEL_PATH.exists(), reason="model file not found")
def test_chatglm2_pipeline():
check_pipeline(model_path=CHATGLM2_MODEL_PATH, prompt="你好", target="你好👋!我是人工智能助手 ChatGLM2-6B,很高兴见到你,欢迎问我任何问题。")
check_pipeline(
model_path=CHATGLM2_MODEL_PATH,
prompt="你好",
target="你好👋!我是人工智能助手 ChatGLM2-6B,很高兴见到你,欢迎问我任何问题。",
)


@pytest.mark.skipif(not CHATGLM3_MODEL_PATH.exists(), reason="model file not found")
def test_chatglm3_pipeline():
check_pipeline(model_path=CHATGLM3_MODEL_PATH, prompt="你好", target="你好👋!我是人工智能助手 ChatGLM3-6B,很高兴见到你,欢迎问我任何问题。")
check_pipeline(
model_path=CHATGLM3_MODEL_PATH,
prompt="你好",
target="你好👋!我是人工智能助手 ChatGLM3-6B,很高兴见到你,欢迎问我任何问题。",
)


@pytest.mark.skipif(not CODEGEEX2_MODEL_PATH.exists(), reason="model file not found")
Expand Down

0 comments on commit b2d89c6

Please sign in to comment.