From 3ce7168c64ff9b9e2861c7ec4434bf21c719fdd3 Mon Sep 17 00:00:00 2001 From: Trevor Royer Date: Mon, 4 Nov 2024 08:49:04 -0700 Subject: [PATCH] test granite --- .github/workflows/build-images.yaml | 29 ++++++++++++++++----- granite-3.0-2b-instruct/Containerfile | 31 +++++++++++++++++++++++ granite-3.0-2b-instruct/README.md | 11 ++++++++ granite-3.0-2b-instruct/download_model.py | 13 ++++++++++ granite-3.0-2b-instruct/requirements.txt | 3 +++ 5 files changed, 81 insertions(+), 6 deletions(-) create mode 100644 granite-3.0-2b-instruct/Containerfile create mode 100644 granite-3.0-2b-instruct/README.md create mode 100755 granite-3.0-2b-instruct/download_model.py create mode 100644 granite-3.0-2b-instruct/requirements.txt diff --git a/.github/workflows/build-images.yaml b/.github/workflows/build-images.yaml index 5377414..8b99e0a 100644 --- a/.github/workflows/build-images.yaml +++ b/.github/workflows/build-images.yaml @@ -13,16 +13,33 @@ on: - README.md - docs/** +env: + REGISTRY: quay.io/redhat-ai-services + IMAGE_NAME: modelcar-catalog + jobs: - build-mistral-image: + build-granite-image: uses: ./.github/workflows/workflow-build-image.yaml with: - registry: quay.io/redhat-ai-services - image-name: modelcar-catalog - context: "mistral-7b-instruct-v0.3" - containerfile: ./mistral-7b-instruct-v0.3/Containerfile - tag: "mistral-7b-instruct-v0.3" + registry: ${REGISTRY} + image-name: ${IMAGE_NAME} + context: granite-3.0-2b-instruct + containerfile: ./granite-3.0-2b-instruct/Containerfile + tag: "granite-3.0-2b-instruct" secrets: registry-username: ${{ secrets.REGISTRY_USERNAME }} registry-password: ${{ secrets.REGISTRY_PASSWORD }} hf-token: ${{ secrets.HF_TOKEN }} + + # build-mistral-image: + # uses: ./.github/workflows/workflow-build-image.yaml + # with: + # registry: ${REGISTRY} + # image-name: ${IMAGE_NAME} + # context: "mistral-7b-instruct-v0.3" + # containerfile: ./mistral-7b-instruct-v0.3/Containerfile + # tag: "mistral-7b-instruct-v0.3" + # secrets: + # registry-username: ${{ secrets.REGISTRY_USERNAME }} + # registry-password: ${{ secrets.REGISTRY_PASSWORD }} + # hf-token: ${{ secrets.HF_TOKEN }} diff --git a/granite-3.0-2b-instruct/Containerfile b/granite-3.0-2b-instruct/Containerfile new file mode 100644 index 0000000..f9cd4d3 --- /dev/null +++ b/granite-3.0-2b-instruct/Containerfile @@ -0,0 +1,31 @@ +# Base image for the modelcar Granite image +FROM registry.access.redhat.com/ubi9/ubi-minimal:9.4 as base + +# Set the HF_TOKEN with --build-arg HF_TOKEN="hf_..." at build time +ARG HF_TOKEN + +# The model repo to download +ENV MODEL_REPO="mistralai/Mistral-7B-Instruct-v0.3" + +# Install necessary Python dependencies +RUN microdnf -y install git git-lfs python3-pip && \ + microdnf clean all + +COPY requirements.txt . + +# Install Hugging Face libraries +RUN pip3 install -r requirements.txt + +COPY download_model.py . + +# Download the necessary model files (config.json, tokenizer.json, and safetensors) +RUN python3 download_model.py + +# Final image containing only the essential model files +FROM registry.access.redhat.com/ubi9/ubi-minimal:9.4 + +# Copy only the necessary model files from the base image +COPY --from=base /models /models + +# Set the user to 1001 +USER 1001 diff --git a/granite-3.0-2b-instruct/README.md b/granite-3.0-2b-instruct/README.md new file mode 100644 index 0000000..c71204a --- /dev/null +++ b/granite-3.0-2b-instruct/README.md @@ -0,0 +1,11 @@ +# Granite-3.0-2b-instruct + +https://huggingface.co/ibm-granite/granite-3.0-2b-instruct + +## Building Image + +Once your token has been created, be sure to accept the terms and conditions for this model on the model home page. + +``` +podman build -t redhat-ai-services/modelcar-catalog:granite-3.0-2b-instruct . --build-arg HF_TOKEN="hf_..." +``` diff --git a/granite-3.0-2b-instruct/download_model.py b/granite-3.0-2b-instruct/download_model.py new file mode 100755 index 0000000..0c60ff8 --- /dev/null +++ b/granite-3.0-2b-instruct/download_model.py @@ -0,0 +1,13 @@ +import os + +from huggingface_hub import snapshot_download + +model_repo = os.getenv("MODEL_REPO") + +print(f"Attempting to download the following model from huggingface: {model_repo}") + +snapshot_download( + repo_id=model_repo, + local_dir="/models", + allow_patterns=["*.safetensors", "*.json", "*.txt"], +) diff --git a/granite-3.0-2b-instruct/requirements.txt b/granite-3.0-2b-instruct/requirements.txt new file mode 100644 index 0000000..37859e7 --- /dev/null +++ b/granite-3.0-2b-instruct/requirements.txt @@ -0,0 +1,3 @@ +torch +transformers +huggingface-hub