Skip to content

Commit

Permalink
Merge pull request #28 from NexaAI/xingyu/unified_lib
Browse files Browse the repository at this point in the history
unified cpu and gpu build
  • Loading branch information
zhiyuan8 authored Aug 22, 2024
2 parents 72d93b2 + 1944e97 commit 50361e5
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 186 deletions.
11 changes: 11 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,15 @@
cmake_minimum_required(VERSION 3.16)

if (GGML_CUDA OR GGML_METAL)
set(EMPTY_FILE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/nexa/gguf/lib/empty_file.txt")
add_custom_command(
OUTPUT ${EMPTY_FILE_PATH}
COMMAND ${CMAKE_COMMAND} -E touch ${EMPTY_FILE_PATH}
COMMENT "Creating an empty file because MY_FEATURE is ON"
)
add_custom_target(create_empty_file ALL DEPENDS ${EMPTY_FILE_PATH})
endif()

# Project: stable_diffusion_cpp
project(stable_diffusion_cpp)

Expand Down Expand Up @@ -174,3 +184,4 @@ if (LLAMA_BUILD)
endif()
endif()
endif()

4 changes: 3 additions & 1 deletion nexa/gguf/lib_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,9 @@


def is_gpu_available():
return is_nexa_cuda_installed() or is_nexa_metal_installed()
current_dir = os.path.dirname(os.path.abspath(__file__))
sentinel_file_exists = os.path.exists(os.path.join(current_dir, "lib", "empty_file.txt"))
return sentinel_file_exists

# Load the library
def load_library(lib_base_name: str):
Expand Down
94 changes: 0 additions & 94 deletions tomls/pyproject_cuda.toml

This file was deleted.

91 changes: 0 additions & 91 deletions tomls/pyproject_metal.toml

This file was deleted.

0 comments on commit 50361e5

Please sign in to comment.