Skip to content

Commit

Permalink
test granite
Browse files Browse the repository at this point in the history
  • Loading branch information
strangiato committed Nov 4, 2024
1 parent 7329821 commit 3ce7168
Show file tree
Hide file tree
Showing 5 changed files with 81 additions and 6 deletions.
29 changes: 23 additions & 6 deletions .github/workflows/build-images.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,33 @@ on:
- README.md
- docs/**

env:
REGISTRY: quay.io/redhat-ai-services
IMAGE_NAME: modelcar-catalog

jobs:
build-mistral-image:
build-granite-image:
uses: ./.github/workflows/workflow-build-image.yaml
with:
registry: quay.io/redhat-ai-services
image-name: modelcar-catalog
context: "mistral-7b-instruct-v0.3"
containerfile: ./mistral-7b-instruct-v0.3/Containerfile
tag: "mistral-7b-instruct-v0.3"
registry: ${REGISTRY}
image-name: ${IMAGE_NAME}
context: granite-3.0-2b-instruct
containerfile: ./granite-3.0-2b-instruct/Containerfile
tag: "granite-3.0-2b-instruct"
secrets:
registry-username: ${{ secrets.REGISTRY_USERNAME }}
registry-password: ${{ secrets.REGISTRY_PASSWORD }}
hf-token: ${{ secrets.HF_TOKEN }}

# build-mistral-image:
# uses: ./.github/workflows/workflow-build-image.yaml
# with:
# registry: ${REGISTRY}
# image-name: ${IMAGE_NAME}
# context: "mistral-7b-instruct-v0.3"
# containerfile: ./mistral-7b-instruct-v0.3/Containerfile
# tag: "mistral-7b-instruct-v0.3"
# secrets:
# registry-username: ${{ secrets.REGISTRY_USERNAME }}
# registry-password: ${{ secrets.REGISTRY_PASSWORD }}
# hf-token: ${{ secrets.HF_TOKEN }}
31 changes: 31 additions & 0 deletions granite-3.0-2b-instruct/Containerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# Base image for the modelcar Granite image
FROM registry.access.redhat.com/ubi9/ubi-minimal:9.4 as base

# Set the HF_TOKEN with --build-arg HF_TOKEN="hf_..." at build time
ARG HF_TOKEN

# The model repo to download
ENV MODEL_REPO="mistralai/Mistral-7B-Instruct-v0.3"

# Install necessary Python dependencies
RUN microdnf -y install git git-lfs python3-pip && \
microdnf clean all

COPY requirements.txt .

# Install Hugging Face libraries
RUN pip3 install -r requirements.txt

COPY download_model.py .

# Download the necessary model files (config.json, tokenizer.json, and safetensors)
RUN python3 download_model.py

# Final image containing only the essential model files
FROM registry.access.redhat.com/ubi9/ubi-minimal:9.4

# Copy only the necessary model files from the base image
COPY --from=base /models /models

# Set the user to 1001
USER 1001
11 changes: 11 additions & 0 deletions granite-3.0-2b-instruct/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# Granite-3.0-2b-instruct

https://huggingface.co/ibm-granite/granite-3.0-2b-instruct

## Building Image

Once your token has been created, be sure to accept the terms and conditions for this model on the model home page.

```
podman build -t redhat-ai-services/modelcar-catalog:granite-3.0-2b-instruct . --build-arg HF_TOKEN="hf_..."
```
13 changes: 13 additions & 0 deletions granite-3.0-2b-instruct/download_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import os

from huggingface_hub import snapshot_download

model_repo = os.getenv("MODEL_REPO")

print(f"Attempting to download the following model from huggingface: {model_repo}")

snapshot_download(
repo_id=model_repo,
local_dir="/models",
allow_patterns=["*.safetensors", "*.json", "*.txt"],
)
3 changes: 3 additions & 0 deletions granite-3.0-2b-instruct/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
torch
transformers
huggingface-hub

0 comments on commit 3ce7168

Please sign in to comment.