From 418d07786875a64b275ecbdebf84ee9716a8af3a Mon Sep 17 00:00:00 2001 From: Te993 <3923106166@qq.com> Date: Tue, 10 Dec 2024 21:33:13 +0800 Subject: [PATCH] update --- nexa/gguf/lib_utils.py | 3 +-- nexa/gguf/llama/llama_chat_format.py | 2 +- nexa/gguf/llama/llava_cpp.py | 2 +- pyproject.toml | 2 +- 4 files changed, 4 insertions(+), 5 deletions(-) diff --git a/nexa/gguf/lib_utils.py b/nexa/gguf/lib_utils.py index 8397e026..ec030b9d 100644 --- a/nexa/gguf/lib_utils.py +++ b/nexa/gguf/lib_utils.py @@ -17,8 +17,7 @@ def is_gpu_available(): # Load the library def load_library(lib_base_name: str): # Construct the paths to the possible shared library names - # _base_path = pathlib.Path(os.path.abspath(os.path.dirname(__file__))) / "lib" - _base_path = pathlib.Path('D:/repo/nexa-ai/llama-cpp-python/llama_cpp/lib') + _base_path = pathlib.Path(os.path.abspath(os.path.dirname(__file__))) / "lib" # Searching for the library in the current directory under the name "libllama" (default name # for llamacpp) and "llama" (default name for this repo) _lib_paths: List[pathlib.Path] = [] diff --git a/nexa/gguf/llama/llama_chat_format.py b/nexa/gguf/llama/llama_chat_format.py index f4e72617..aeee3399 100644 --- a/nexa/gguf/llama/llama_chat_format.py +++ b/nexa/gguf/llama/llama_chat_format.py @@ -2667,7 +2667,7 @@ class Llava15ChatHandler: ) def __init__(self, clip_model_path: str, verbose: bool = True): - import llama_cpp.llava_cpp as llava_cpp + import nexa.gguf.llama.llava_cpp as llava_cpp self.clip_model_path = clip_model_path self.verbose = verbose diff --git a/nexa/gguf/llama/llava_cpp.py b/nexa/gguf/llama/llava_cpp.py index e6728f1c..945da826 100644 --- a/nexa/gguf/llama/llava_cpp.py +++ b/nexa/gguf/llama/llava_cpp.py @@ -35,7 +35,7 @@ # Specify the base name of the shared library to load -_libllava_base_name = "llava" +_libllava_base_name = "llava_shared" # Load the library _libllava = load_library(_libllava_base_name) diff --git a/pyproject.toml b/pyproject.toml index c225a1bf..57443c1c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -131,7 +131,7 @@ cmake.args = [ "-DCMAKE_BUILD_PARALLEL_LEVEL=16", "-DSTABLE_DIFFUSION_BUILD=ON", "-DLLAMA_BUILD=ON", - "-DBARK_BUILD=ON", + "-DBARK_BUILD=OFF", "-DBUILD_SHARED_LIBS=ON", "-DLLAMA_BUILD_TESTS=OFF" ]