Skip to content

Commit

Permalink
Merge branch 'runtipi:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
rodrigomescua authored Dec 2, 2024
2 parents 1ea54f4 + 8da142d commit 0fd052f
Show file tree
Hide file tree
Showing 19 changed files with 37 additions and 37 deletions.
6 changes: 3 additions & 3 deletions apps/autobrr/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
"available": true,
"exposable": true,
"id": "autobrr",
"tipi_version": 38,
"version": "1.51.1",
"tipi_version": 39,
"version": "1.52.0",
"categories": ["media"],
"description": "autobrr is the modern download automation tool for torrents. With inspiration and ideas from tools like trackarr, autodl-irssi and flexget we built one tool that can do it all, and then some.",
"short_desc": "Automation for downloads.",
Expand All @@ -16,5 +16,5 @@
"form_fields": [],
"supported_architectures": ["arm64", "amd64"],
"created_at": 1691943801422,
"updated_at": 1732544746000
"updated_at": 1733088131000
}
2 changes: 1 addition & 1 deletion apps/autobrr/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ version: "3"
services:
autobrr:
container_name: autobrr
image: ghcr.io/autobrr/autobrr:v1.51.1
image: ghcr.io/autobrr/autobrr:v1.52.0
restart: unless-stopped
ports:
- ${APP_PORT}:7474
Expand Down
6 changes: 3 additions & 3 deletions apps/lobe-chat/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
"exposable": true,
"id": "lobe-chat",
"description": "LobeChat is an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible (Function Call) plugin system.",
"tipi_version": 377,
"version": "1.34.5",
"tipi_version": 380,
"version": "1.35.3",
"categories": ["ai"],
"short_desc": "LLM chatbot framework",
"author": "https://github.com/lobehub",
Expand Down Expand Up @@ -41,5 +41,5 @@
],
"supported_architectures": ["arm64", "amd64"],
"created_at": 1691943801422,
"updated_at": 1732780557000
"updated_at": 1733082087000
}
2 changes: 1 addition & 1 deletion apps/lobe-chat/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ version: '3.9'
services:
lobe-chat:
container_name: lobe-chat
image: lobehub/lobe-chat:v1.34.5
image: lobehub/lobe-chat:v1.35.3
environment:
- OPENAI_API_KEY=${OPENAI_API_KEY}
- OPENAI_PROXY_URL=${OPEANAI_PROXY_URL}
Expand Down
6 changes: 3 additions & 3 deletions apps/ollama-amd/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
"exposable": true,
"port": 11434,
"id": "ollama-amd",
"tipi_version": 43,
"version": "0.4.6-rocm",
"tipi_version": 44,
"version": "0.4.7-rocm",
"categories": ["ai"],
"description": "Get up and running with Llama 3, Mistral, Gemma, and other large language models.",
"short_desc": "LLMs inference server with OpenAI compatible API",
Expand All @@ -16,5 +16,5 @@
"form_fields": [],
"supported_architectures": ["arm64", "amd64"],
"created_at": 1691943801422,
"updated_at": 1732757185000
"updated_at": 1733027614000
}
2 changes: 1 addition & 1 deletion apps/ollama-amd/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ version: '3.7'

services:
ollama-amd:
image: ollama/ollama:0.4.6-rocm
image: ollama/ollama:0.4.7-rocm
restart: unless-stopped
container_name: ollama-amd
environment:
Expand Down
6 changes: 3 additions & 3 deletions apps/ollama-cpu/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
"exposable": true,
"port": 11436,
"id": "ollama-cpu",
"tipi_version": 43,
"version": "0.4.6",
"tipi_version": 44,
"version": "0.4.7",
"categories": ["ai"],
"description": "Get up and running with Llama 3, Mistral, Gemma, and other large language models.",
"short_desc": "LLMs inference server with OpenAI compatible API",
Expand All @@ -16,5 +16,5 @@
"form_fields": [],
"supported_architectures": ["arm64", "amd64"],
"created_at": 1691943801422,
"updated_at": 1732757189000
"updated_at": 1733027618000
}
2 changes: 1 addition & 1 deletion apps/ollama-cpu/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ version: '3.7'

services:
ollama-cpu:
image: ollama/ollama:0.4.6
image: ollama/ollama:0.4.7
restart: unless-stopped
container_name: ollama-cpu
ports:
Expand Down
6 changes: 3 additions & 3 deletions apps/ollama-nvidia/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
"exposable": true,
"port": 11435,
"id": "ollama-nvidia",
"tipi_version": 43,
"version": "0.4.6",
"tipi_version": 44,
"version": "0.4.7",
"categories": ["ai"],
"description": "Get up and running with Llama 3, Mistral, Gemma, and other large language models.",
"short_desc": "LLMs inference server with OpenAI compatible API",
Expand All @@ -16,5 +16,5 @@
"form_fields": [],
"supported_architectures": ["arm64", "amd64"],
"created_at": 1691943801422,
"updated_at": 1732757191000
"updated_at": 1733027620000
}
2 changes: 1 addition & 1 deletion apps/ollama-nvidia/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ version: '3.7'

services:
ollama-nvidia:
image: ollama/ollama:0.4.6
image: ollama/ollama:0.4.7
restart: unless-stopped
container_name: ollama-nvidia
ports:
Expand Down
6 changes: 3 additions & 3 deletions apps/onedev/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
"available": true,
"exposable": true,
"id": "onedev",
"tipi_version": 127,
"version": "11.6.0",
"tipi_version": 128,
"version": "11.6.1",
"categories": ["development"],
"description": "Self-hosted Git Server with Kanban and CI/CD",
"short_desc": "Self-hosted Git Server with Kanban and CI/CD",
Expand Down Expand Up @@ -46,5 +46,5 @@
],
"supported_architectures": ["arm64", "amd64"],
"created_at": 1691943801422,
"updated_at": 1732948964000
"updated_at": 1733035256000
}
2 changes: 1 addition & 1 deletion apps/onedev/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ version: "3.7"

services:
onedev:
image: 1dev/server:11.6.0
image: 1dev/server:11.6.1
container_name: onedev
environment:
- hibernate_dialect=io.onedev.server.persistence.PostgreSQLDialect
Expand Down
6 changes: 3 additions & 3 deletions apps/open-webui/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
"exposable": true,
"port": 8536,
"id": "open-webui",
"tipi_version": 33,
"version": "0.4.6",
"tipi_version": 34,
"version": "0.4.7",
"categories": ["ai"],
"description": "Open WebUI is an extensible, feature-rich, and user-friendly self-hosted WebUI designed to operate entirely offline.",
"short_desc": "User-friendly WebUI for LLMs",
Expand All @@ -31,5 +31,5 @@
"dynamic_config": true,
"supported_architectures": ["arm64", "amd64"],
"created_at": 1691943801422,
"updated_at": 1732689393000
"updated_at": 1733047736000
}
2 changes: 1 addition & 1 deletion apps/open-webui/docker-compose.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"services": [
{
"image": "ghcr.io/open-webui/open-webui:0.4.6",
"image": "ghcr.io/open-webui/open-webui:v0.4.7",
"name": "open-webui",
"internalPort": 8080,
"isMain": true,
Expand Down
2 changes: 1 addition & 1 deletion apps/open-webui/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
version: '3.8'
services:
open-webui:
image: ghcr.io/open-webui/open-webui:0.4.6
image: ghcr.io/open-webui/open-webui:v0.4.7
container_name: open-webui
volumes:
- ${APP_DATA_DIR}/data:/app/backend/data
Expand Down
6 changes: 3 additions & 3 deletions apps/spoolman/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@
"exposable": true,
"id": "spoolman",
"description": "Spoolman is a web service that helps you keep track of your filament spools and how they are being used. It acts as a database, where other printer software such as Octoprint and Moonraker can interact with to have a centralized place for spool information. For example, if used together with Moonraker, your spool weight will automatically be reduced as your print is progressing.",
"tipi_version": 8,
"version": "0.20.0",
"tipi_version": 9,
"version": "0.21.0",
"categories": ["utilities", "automation"],
"short_desc": "Keep track of your inventory of 3D-printer filament spools",
"author": "Donkie",
"source": "https://github.com/Donkie/Spoolman",
"supported_architectures": ["arm64", "amd64"],
"created_at": 1691943801422,
"updated_at": 1725844884000
"updated_at": 1733068838000
}
2 changes: 1 addition & 1 deletion apps/spoolman/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ version: '3.8'
services:
spoolman:
container_name: spoolman
image: ghcr.io/donkie/spoolman:0.20.0
image: ghcr.io/donkie/spoolman:0.21.0
restart: unless-stopped
volumes:
- ${APP_DATA_DIR}/data:/home/app/.local/share/spoolman
Expand Down
6 changes: 3 additions & 3 deletions apps/zigbee2mqtt/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
"available": true,
"exposable": true,
"id": "zigbee2mqtt",
"tipi_version": 12,
"version": "1.41.0",
"tipi_version": 13,
"version": "1.42.0",
"categories": ["utilities", "automation"],
"description": "Zigbee to MQTT bridge, get rid of your proprietary Zigbee bridges",
"short_desc": "Zigbee to MQTT bridge",
Expand All @@ -24,5 +24,5 @@
],
"supported_architectures": ["arm64", "amd64"],
"created_at": 1691943801422,
"updated_at": 1730917939000
"updated_at": 1733068839000
}
2 changes: 1 addition & 1 deletion apps/zigbee2mqtt/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ version: '3.7'
services:
zigbee2mqtt:
container_name: zigbee2mqtt
image: koenkk/zigbee2mqtt:1.41.0
image: koenkk/zigbee2mqtt:1.42.0
environment:
- TZ=${TZ}
volumes:
Expand Down

0 comments on commit 0fd052f

Please sign in to comment.