diff --git a/README.md b/README.md index a10d1d64..72588a51 100644 --- a/README.md +++ b/README.md @@ -44,6 +44,10 @@ check if you have GPU acceleration (torch required) ```bash pip install nexaai --index-url https://nexaai.github.io/nexa-sdk/whl/cu124 --extra-index-url https://pypi.org/simple ``` + Optionally, you can install onnx supported version: + ```bash + pip install nexaai[onnx] --index-url https://nexaai.github.io/nexa-sdk/whl/cu124 --extra-index-url https://pypi.org/simple + ```
Apple M Chip: @@ -58,43 +62,74 @@ check if you have GPU acceleration (torch required) ```bash pip install nexaai --index-url https://nexaai.github.io/nexa-sdk/whl/metal --extra-index-url https://pypi.org/simple ``` + Optionally, you can install onnx supported version: + ```bash + pip install nexaai[onnx] --index-url https://nexaai.github.io/nexa-sdk/whl/metal --extra-index-url https://pypi.org/simple + ```
### CPU version
- Mac with Intel chips + Mac with Intel Chips - ``` + To install the `nexaai` package on a Mac with Intel chips, use the following command: + + ```bash CMAKE_ARGS="-DCMAKE_CXX_FLAGS=-fopenmp" pip install nexaai ``` + + **Optional:** To install the version with ONNX support, use: + + ```bash + CMAKE_ARGS="-DCMAKE_CXX_FLAGS=-fopenmp" pip install nexaai[onnx] + ``` +
- Mac with M chips or other Operating systems: + Mac with M Chips or Other Operating Systems - ``` + To install the `nexaai` package on a Mac with M chips or other operating systems, use the following command: + + ```bash pip install nexaai ``` + + **Optional:** To install the version with ONNX support, use: + + ```bash + pip install nexaai[onnx] + ``` + +
+If you prefer to install the pre-built wheel for CPU versions: -Or you prefer to install the pre-built wheel: ```bash pip install nexaai --index-url https://nexaai.github.io/nexa-sdk/whl/cpu --extra-index-url https://pypi.org/simple ``` -### Docker Usage -Note: Docker doesn't support GPU acceleration +To include ONNX support: -`docker pull nexa4ai/sdk:latest` +```bash +pip install nexaai[onnx] --index-url https://nexaai.github.io/nexa-sdk/whl/cpu --extra-index-url https://pypi.org/simple +``` +### Docker Usage +Note: Docker doesn't support GPU acceleration +```bash +docker pull nexa4ai/sdk:latest +``` replace following placeholder with your path and command - -`docker run -v :/model -it nexa4ai/sdk:latest [nexa_command] [your_model_relative_path]` +```bash +docker run -v :/model -it nexa4ai/sdk:latest [nexa_command] [your_model_relative_path] +``` Example: - -`docker run -v /home/ubuntu/.cache/nexa/hub/official:/model -it nexa4ai/sdk:latest nexa gen-text /model/Phi-3-mini-128k-instruct/q4_0.gguf` +```bash +docker run -v /home/ubuntu/.cache/nexa/hub/official:/model -it nexa4ai/sdk:latest nexa gen-text /model/Phi-3-mini-128k-instruct/q4_0.gguf +``` will create an interactive session with text generation diff --git a/pyproject.toml b/pyproject.toml index 39e8d90f..16aea040 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,7 +89,8 @@ sdist.exclude = [ build.verbose = true cmake.build-type = "Release" cmake.version = ">=3.16" -# cmake.args = ["-DCMAKE_CXX_FLAGS=-fopenmp"] +# cmake.args = ["-DCMAKE_CXX_FLAGS=-fopenmp"] # for macOS with Intel Silicon +cmake.args = ["-DCMAKE_BUILD_PARALLEL_LEVEL=16"] [tool.scikit-build.metadata.version] provider = "scikit_build_core.metadata.regex"