Skip to content

Commit

Permalink
Merge branch 'runtipi:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
rodrigomescua authored Nov 28, 2024
2 parents 9dafdd5 + a60a808 commit ff5791e
Show file tree
Hide file tree
Showing 8 changed files with 16 additions and 16 deletions.
6 changes: 3 additions & 3 deletions apps/ollama-amd/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
"exposable": true,
"port": 11434,
"id": "ollama-amd",
"tipi_version": 42,
"version": "0.4.5-rocm",
"tipi_version": 43,
"version": "0.4.6-rocm",
"categories": ["ai"],
"description": "Get up and running with Llama 3, Mistral, Gemma, and other large language models.",
"short_desc": "LLMs inference server with OpenAI compatible API",
Expand All @@ -16,5 +16,5 @@
"form_fields": [],
"supported_architectures": ["arm64", "amd64"],
"created_at": 1691943801422,
"updated_at": 1732597295000
"updated_at": 1732757185000
}
2 changes: 1 addition & 1 deletion apps/ollama-amd/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ version: '3.7'

services:
ollama-amd:
image: ollama/ollama:0.4.5-rocm
image: ollama/ollama:0.4.6-rocm
restart: unless-stopped
container_name: ollama-amd
environment:
Expand Down
6 changes: 3 additions & 3 deletions apps/ollama-cpu/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
"exposable": true,
"port": 11436,
"id": "ollama-cpu",
"tipi_version": 42,
"version": "0.4.5",
"tipi_version": 43,
"version": "0.4.6",
"categories": ["ai"],
"description": "Get up and running with Llama 3, Mistral, Gemma, and other large language models.",
"short_desc": "LLMs inference server with OpenAI compatible API",
Expand All @@ -16,5 +16,5 @@
"form_fields": [],
"supported_architectures": ["arm64", "amd64"],
"created_at": 1691943801422,
"updated_at": 1732597314000
"updated_at": 1732757189000
}
2 changes: 1 addition & 1 deletion apps/ollama-cpu/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ version: '3.7'

services:
ollama-cpu:
image: ollama/ollama:0.4.5
image: ollama/ollama:0.4.6
restart: unless-stopped
container_name: ollama-cpu
ports:
Expand Down
6 changes: 3 additions & 3 deletions apps/ollama-nvidia/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
"exposable": true,
"port": 11435,
"id": "ollama-nvidia",
"tipi_version": 42,
"version": "0.4.5",
"tipi_version": 43,
"version": "0.4.6",
"categories": ["ai"],
"description": "Get up and running with Llama 3, Mistral, Gemma, and other large language models.",
"short_desc": "LLMs inference server with OpenAI compatible API",
Expand All @@ -16,5 +16,5 @@
"form_fields": [],
"supported_architectures": ["arm64", "amd64"],
"created_at": 1691943801422,
"updated_at": 1732597317000
"updated_at": 1732757191000
}
2 changes: 1 addition & 1 deletion apps/ollama-nvidia/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ version: '3.7'

services:
ollama-nvidia:
image: ollama/ollama:0.4.5
image: ollama/ollama:0.4.6
restart: unless-stopped
container_name: ollama-nvidia
ports:
Expand Down
6 changes: 3 additions & 3 deletions apps/pinchflat/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
"available": true,
"exposable": true,
"id": "pinchflat",
"tipi_version": 14,
"version": "2024.10.30",
"tipi_version": 15,
"version": "2024.11.27",
"categories": ["media"],
"description": "Your next YouTube media manager",
"short_desc": "Your next YouTube media manager",
Expand Down Expand Up @@ -34,5 +34,5 @@
],
"supported_architectures": ["arm64", "amd64"],
"created_at": 1691943801422,
"updated_at": 1730916717000
"updated_at": 1732757066000
}
2 changes: 1 addition & 1 deletion apps/pinchflat/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
services:
pinchflat:
image: keglin/pinchflat:v2024.10.30
image: keglin/pinchflat:v2024.11.27
container_name: pinchflat
environment:
- BASIC_AUTH_USERNAME=${PINCHFLAT_BASIC_AUTH_USERNAME}
Expand Down

0 comments on commit ff5791e

Please sign in to comment.