Skip to content

Commit

Permalink
Merge branch 'main' into enchance-safe-entrypoint-load
Browse files Browse the repository at this point in the history
  • Loading branch information
soamicharan authored Nov 7, 2024
2 parents 8842ba6 + 575a1f6 commit 8fc7d81
Show file tree
Hide file tree
Showing 8 changed files with 331 additions and 26 deletions.
41 changes: 35 additions & 6 deletions .github/workflows/integration-tests.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
# TODO: Replace `wait-for-server` with dedicated command
# https://github.com/PrefectHQ/prefect/issues/6990

name: Integration tests
on:
pull_request:
Expand Down Expand Up @@ -85,9 +88,6 @@ jobs:
./scripts/wait-for-server.py
# TODO: Replace `wait-for-server` with dedicated command
# https://github.com/PrefectHQ/prefect/issues/6990
- name: Start server
if: ${{ matrix.server-version.version == 'main' }}
env:
Expand All @@ -98,9 +98,6 @@ jobs:
./scripts/wait-for-server.py
# TODO: Replace `wait-for-server` with dedicated command
# https://github.com/PrefectHQ/prefect/issues/6990
- name: Run integration flows
env:
PREFECT_API_URL: http://127.0.0.1:4200/api
Expand All @@ -113,3 +110,35 @@ jobs:
run: |
cat server.log || echo "No logs available"
docker logs prefect-server || echo "No logs available"
sqlite-3-24-0:
name: Test SQLite 3.24.0 Compatibility
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Test with SQLite 3.24.0
run: >
docker build -t prefect-server-old-sqlite \
--build-arg SQLITE_VERSION=3240000 \
--build-arg SQLITE_YEAR=2018 \
-f old-sqlite.Dockerfile . &&
docker run prefect-server-old-sqlite sh -c "prefect server database downgrade --yes -r base && prefect server database upgrade --yes"
sqlite-3-31-1:
name: Test SQLite 3.31.1 Compatibility
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Test with SQLite 3.31.1
run: >
docker build -t prefect-server-new-sqlite \
--build-arg SQLITE_VERSION=3310100 \
--build-arg SQLITE_YEAR=2020 \
-f old-sqlite.Dockerfile . &&
docker run prefect-server-new-sqlite sh -c "prefect server database downgrade --yes -r base && prefect server database upgrade --yes"
59 changes: 59 additions & 0 deletions old-sqlite.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
# Build the Python distributable
FROM python:3.9-slim AS python-builder

WORKDIR /opt/prefect

# Install git for version calculation
RUN apt-get update && \
apt-get install --no-install-recommends -y \
git \
&& apt-get clean && rm -rf /var/lib/apt/lists/*

# Copy the repository for version calculation
COPY . .

# Create source distribution
RUN python setup.py sdist && \
mv "dist/$(python setup.py --fullname).tar.gz" "dist/prefect.tar.gz"

# Final image
FROM python:3.9-slim

# Accept SQLite version as build argument
ARG SQLITE_VERSION="3310100"
ARG SQLITE_YEAR="2020"

# Install build dependencies
RUN apt-get update && apt-get install -y \
build-essential \
wget

# Download and compile SQLite
RUN wget https://www.sqlite.org/${SQLITE_YEAR}/sqlite-autoconf-${SQLITE_VERSION}.tar.gz \
&& tar xvfz sqlite-autoconf-${SQLITE_VERSION}.tar.gz \
&& cd sqlite-autoconf-${SQLITE_VERSION} \
&& ./configure \
&& make \
&& make install \
&& ldconfig \
&& cd .. \
&& rm -rf sqlite-autoconf-${SQLITE_VERSION}*

# Install uv for faster pip operations
COPY --from=ghcr.io/astral-sh/uv:latest /uv /bin/uv
ENV UV_SYSTEM_PYTHON=1

# Set library path to use our compiled SQLite
ENV LD_LIBRARY_PATH=/usr/local/lib

WORKDIR /app

# Copy the built distributable
COPY --from=python-builder /opt/prefect/dist/prefect.tar.gz ./dist/

# Install requirements and Prefect
COPY requirements*.txt ./
RUN uv pip install -r requirements.txt
RUN uv pip install ./dist/prefect.tar.gz


Original file line number Diff line number Diff line change
Expand Up @@ -16,22 +16,30 @@


def upgrade():
# First drop the foreign key constraints
with op.batch_alter_table("block", schema=None) as batch_op:
batch_op.drop_constraint("fk_block__block_spec_id__block_spec")

# Then rename the tables
op.rename_table("block_spec", "block_schema")
op.rename_table("block", "block_document")

# Handle indexes and column renames for block_document
with op.batch_alter_table("block_document", schema=None) as batch_op:
# Drop indexes first
batch_op.drop_index("ix_block__is_default_storage_block")
batch_op.drop_index("ix_block__name")
batch_op.drop_index("ix_block__updated")
batch_op.drop_index("uq_block__spec_id_name")

# Rename columns
batch_op.alter_column("block_spec_id", new_column_name="block_schema_id")
batch_op.alter_column(
"is_default_storage_block",
new_column_name="is_default_storage_block_document",
)
batch_op.drop_constraint("fk_block__block_spec_id__block_spec")
batch_op.drop_constraint("pk_block_data")

# Create new indexes
with op.batch_alter_table("block_document", schema=None) as batch_op:
batch_op.create_index(
batch_op.f("ix_block_document__is_default_storage_block_document"),
Expand All @@ -48,6 +56,15 @@ def upgrade():
"uq_block__schema_id_name", ["block_schema_id", "name"], unique=True
)

# Re-create foreign key at the end
batch_op.create_foreign_key(
batch_op.f("fk_block__block_schema_id__block_schema"),
"block_schema",
["block_schema_id"],
["id"],
ondelete="cascade",
)

with op.batch_alter_table("block_schema", schema=None) as batch_op:
batch_op.drop_index("ix_block_spec__type")
batch_op.drop_index("ix_block_spec__updated")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
Create Date: 2024-09-16 16:27:19.451150
"""

import sqlalchemy as sa
from alembic import op

Expand Down Expand Up @@ -37,16 +38,18 @@ def upgrade():
# migrate existing data
sql = sa.text(
"""
WITH deployment_limit_mapping AS (
SELECT d.id AS deployment_id, l.id AS limit_id
FROM deployment d
JOIN concurrency_limit_v2 l ON l.name = 'deployment:' || d.id
)
UPDATE deployment
SET concurrency_limit_id = dlm.limit_id
FROM deployment_limit_mapping dlm
WHERE deployment.id = dlm.deployment_id;
"""
UPDATE deployment
SET concurrency_limit_id = (
SELECT l.id
FROM concurrency_limit_v2 l
WHERE l.name = 'deployment:' || deployment.id
)
WHERE EXISTS (
SELECT 1
FROM concurrency_limit_v2 l
WHERE l.name = 'deployment:' || deployment.id
);
"""
)
op.execute(sql)

Expand Down
21 changes: 21 additions & 0 deletions src/prefect/settings/sources.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple, Type

import dotenv
import toml
from pydantic import AliasChoices
from pydantic.fields import FieldInfo
Expand All @@ -15,6 +16,7 @@
from pydantic_settings.sources import ConfigFileSourceMixin

from prefect.settings.constants import DEFAULT_PREFECT_HOME, DEFAULT_PROFILES_PATH
from prefect.utilities.collections import get_from_dict


class EnvFilterSettingsSource(EnvSettingsSource):
Expand Down Expand Up @@ -230,6 +232,25 @@ def _get_profiles_path() -> Path:
return DEFAULT_PROFILES_PATH
if env_path := os.getenv("PREFECT_PROFILES_PATH"):
return Path(env_path)
if dotenv_path := dotenv.dotenv_values(".env").get("PREFECT_PROFILES_PATH"):
return Path(dotenv_path)
if toml_path := _get_profiles_path_from_toml("prefect.toml", ["profiles_path"]):
return Path(toml_path)
if pyproject_path := _get_profiles_path_from_toml(
"pyproject.toml", ["tool", "prefect", "profiles_path"]
):
return Path(pyproject_path)
if not (DEFAULT_PREFECT_HOME / "profiles.toml").exists():
return DEFAULT_PROFILES_PATH
return DEFAULT_PREFECT_HOME / "profiles.toml"


def _get_profiles_path_from_toml(path: str, keys: List[str]) -> Optional[str]:
"""Helper to get the profiles path from a toml file."""

try:
toml_data = toml.load(path)
except FileNotFoundError:
return None

return get_from_dict(toml_data, keys)
Loading

0 comments on commit 8fc7d81

Please sign in to comment.