Skip to content

Commit

Permalink
Merge branch 'nebari-dev:develop' into eks-private-cluster
Browse files Browse the repository at this point in the history
  • Loading branch information
joneszc authored Aug 9, 2024
2 parents 2d1548d + 847a260 commit 3ab1bf3
Show file tree
Hide file tree
Showing 25 changed files with 242 additions and 62 deletions.
24 changes: 22 additions & 2 deletions .github/workflows/test_aws_integration.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,32 @@ env:
AWS_DEFAULT_REGION: "us-west-2"
NEBARI_GH_BRANCH: ${{ github.event.inputs.branch || 'develop' }}
NEBARI_IMAGE_TAG: ${{ github.event.inputs.image-tag || 'main' }}
TF_LOG: ${{ github.event.inputs.tf-log-level || 'info' }}

TF_LOG: ${{ github.event.inputs.tf-log-level || 'info' }}
NO_PROVIDER_CREDENTIALS_aws: false

jobs:
# Used to skip cloud provider checks due to "jobs" not supporting {{ env }} variables contexts.
check-for-credentials_aws:
runs-on: ubuntu-latest
permissions:
id-token: write
contents: read
outputs:
provider_credentials_aws: ${{ steps.flag-check.outputs.provider_credentials_aws }}
steps:
- name: Check if user wants to run AWS integration based on credentials
id: flag-check
run: |
if [ "${{ env.NO_PROVIDER_CREDENTIALS_aws }}" == "true" ]; then
echo "::set-output name=provider_credentials_aws::0"
else
echo "::set-output name=provider_credentials_aws::1"
fi
test-aws-integration:
runs-on: ubuntu-latest
needs: check-for-credentials_aws
if: ${{ needs.check-for-credentials.outputs.provider_credentials_aws == '1' }}
permissions:
id-token: write
contents: read
Expand Down
26 changes: 25 additions & 1 deletion .github/workflows/test_azure_integration.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Azure Delpoyment
name: Azure Deployment

on:
schedule:
Expand Down Expand Up @@ -31,10 +31,34 @@ env:
NEBARI_GH_BRANCH: ${{ github.event.inputs.branch || 'develop' }}
NEBARI_IMAGE_TAG: ${{ github.event.inputs.image-tag || 'main' }}
TF_LOG: ${{ github.event.inputs.tf-log-level || 'info' }}
NO_PROVIDER_CREDENTIALS_azure: false

jobs:
# Used to skip cloud provider checks due to "jobs" not supporting {{ env }} variables contexts
check-for-credentials:
runs-on: ubuntu-latest
permissions:
id-token: write
contents: read

outputs:
provider_credentials_azure: ${{ steps.flag-check.outputs.provider_credentials_azure }}

steps:

- name: Check if user wants to run Azure integration based on credentials
id: flag-check
run: |
if [ "${{ env.NO_PROVIDER_CREDENTIALS_azure }}" == "true" ]; then
echo "::set-output name=provider_credentials_azure::0"
else
echo "::set-output name=provider_credentials_azure::1"
fi
test-azure-integration:
runs-on: ubuntu-latest
needs: check-for-credentials
if: ${{ needs.check-for-credentials.outputs.provider_credentials_azure == '1' }}
permissions:
id-token: write
contents: read
Expand Down
22 changes: 22 additions & 0 deletions .github/workflows/test_do_integration.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,33 @@ env:
NEBARI_GH_BRANCH: ${{ github.event.inputs.branch || 'develop' }}
NEBARI_IMAGE_TAG: ${{ github.event.inputs.image-tag || 'main' }}
TF_LOG: ${{ github.event.inputs.tf-log-level || 'info' }}
NO_PROVIDER_CREDENTIALS_do: false


jobs:
# Used to skip cloud provider checks due to "jobs" not supporting {{ env }} variables contexts
check-for-credentials:
runs-on: ubuntu-latest
permissions:
id-token: write
contents: read
outputs:
provider_credentials_do: ${{ steps.flag-check.outputs.provider_credentials_do }}
steps:
- name: Check if user wants to run Digital Ocean integration based on credentials
id: flag-check
run: |
if [ "${{ env.NO_PROVIDER_CREDENTIALS_do }}" == "true" ]; then
echo "::set-output name=provider_credentials_do::0"
else
echo "::set-output name=provider_credentials_do::1"
fi
test-do-integration:
runs-on: ubuntu-latest
needs: check-for-credentials
if: ${{ needs.check-for-credentials.outputs.provider_credentials_do == '1' }}
permissions:
id-token: write
contents: read
Expand Down
22 changes: 21 additions & 1 deletion .github/workflows/test_gcp_integration.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,31 @@ env:
NEBARI_GH_BRANCH: ${{ github.event.inputs.branch || 'develop' }}
NEBARI_IMAGE_TAG: ${{ github.event.inputs.image-tag || 'main' }}
TF_LOG: ${{ github.event.inputs.tf-log-level || 'info' }}

NO_PROVIDER_CREDENTIALS_gcp: false

jobs:
# Used to skip cloud provider checks due to "jobs" not supporting {{ env }} variables contexts
check-for-credentials:
runs-on: ubuntu-latest
permissions:
id-token: write
contents: read
outputs:
provider_credentials_gcp: ${{ steps.flag-check.outputs.provider_credentials_gcp }}
steps:
- name: Check if user wants to run GCP integration based on credentials
id: flag-check
run: |
if [ "${{ env.NO_PROVIDER_CREDENTIALS_gcp }}" == "true" ]; then
echo "::set-output name=provider_credentials_gcp::0"
else
echo "::set-output name=provider_credentials_gcp::1"
fi
test-gcp-integration:
runs-on: ubuntu-latest
needs: check-for-credentials
if: ${{ needs.check-for-credentials.outputs.provider_credentials_gcp == '1' }}
permissions:
id-token: write
contents: read
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/test_helm_charts.yaml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Right now the trigger is set to run on every Monday at 13:00 UTC,
# or when the workflow file is modified. An aditional manual trigger
# or when the workflow file is modified. An additional manual trigger
# is also available.
name: "Validate Helm Charts downloads"

Expand Down
10 changes: 7 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -36,18 +36,22 @@ repos:
- id: check-executables-have-shebangs
exclude: "^src/_nebari/template/"

- repo: https://github.com/crate-ci/typos
rev: v1.23.2
hooks:
- id: typos

- repo: https://github.com/codespell-project/codespell
rev: v2.3.0
hooks:
- id: codespell
args:
[
"--builtin=rare,clear,informal,names",
"--skip=_build,*/build/*,*/node_modules/*,nebari.egg-info,*.git,*.js,*.json,*.yaml,*.yml",
"--ignore-words-list=AKS,aks",
"--write",
]
language: python
additional_dependencies:
- tomli

# python
- repo: https://github.com/psf/black
Expand Down
35 changes: 34 additions & 1 deletion RELEASE.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,39 @@ This file is copied to nebari-dev/nebari-docs using a GitHub Action. -->

---

### Release 2024.7.1 - August 8, 2024

> NOTE: Support for Digital Ocean deployments using CLI commands and related Terraform modules is being deprecated. Although Digital Ocean will no longer be directly supported in future releases, you can still deploy to Digital Ocean infrastructure using the current `existing` deployment option.
## What's Changed
* Enable authentication by default in jupyter-server by @krassowski in https://github.com/nebari-dev/nebari/pull/2288
* remove dns sleep by @Adam-D-Lewis in https://github.com/nebari-dev/nebari/pull/2550
* Conda-store permissions v2 + load roles from keycloak by @aktech in https://github.com/nebari-dev/nebari/pull/2531
* Restrict public access and add bucket encryption using cmk by @dcmcand in https://github.com/nebari-dev/nebari/pull/2525
* Add overwrite to AWS coredns addon by @dcmcand in https://github.com/nebari-dev/nebari/pull/2538
* Add a default roles at initialisation by @aktech in https://github.com/nebari-dev/nebari/pull/2546
* Hide gallery section if no exhibits are configured by @krassowski in https://github.com/nebari-dev/nebari/pull/2549
* Add note about ~/.bash_profile by @Adam-D-Lewis in https://github.com/nebari-dev/nebari/pull/2575
* Expose jupyterlab-gallery branch and depth options by @krassowski in https://github.com/nebari-dev/nebari/pull/2556
* #2566 Upgrade Jupyterhub ssh image by @arjxn-py in https://github.com/nebari-dev/nebari/pull/2576
* Stop copying unnecessary files into user home directory by @Adam-D-Lewis in https://github.com/nebari-dev/nebari/pull/2578
* Include deprecation notes for init/deploy subcommands by @viniciusdc in https://github.com/nebari-dev/nebari/pull/2582
* Only download jar if file doesn't exist by @Adam-D-Lewis in https://github.com/nebari-dev/nebari/pull/2588
* Remove unnecessary experimental flag by @Adam-D-Lewis in https://github.com/nebari-dev/nebari/pull/2606
* Add typos spell checker to pre-commit by @Adam-D-Lewis in https://github.com/nebari-dev/nebari/pull/2568
* Enh 2451 skip conditionals by @BrianCashProf in https://github.com/nebari-dev/nebari/pull/2569
* Improve codespell support: adjust and concentrate config to pyproject.toml and fix more typos by @yarikoptic in https://github.com/nebari-dev/nebari/pull/2583
* Move codespell config to pyproject.toml only by @Adam-D-Lewis in https://github.com/nebari-dev/nebari/pull/2611
* Add `depends_on` for bucket encryption by @viniciusdc in https://github.com/nebari-dev/nebari/pull/2615

## New Contributors
* @BrianCashProf made their first contribution in https://github.com/nebari-dev/nebari/pull/2569
* @yarikoptic made their first contribution in https://github.com/nebari-dev/nebari/pull/2583


**Full Changelog**: https://github.com/nebari-dev/nebari/compare/2024.6.1...2024.7.1


### Release 2024.6.1 - June 26, 2024

> NOTE: This release includes an upgrade to the `kube-prometheus-stack` Helm chart, resulting in a newer version of Grafana. When upgrading your Nebari cluster, you will be prompted to have Nebari update some CRDs and delete a DaemonSet on your behalf. If you prefer, you can also run the commands yourself, which will be shown to you. If you have any custom dashboards, you'll also need to back them up by [exporting them as JSON](https://grafana.com/docs/grafana/latest/dashboards/share-dashboards-panels/#export-a-dashboard-as-json), so you can [import them](https://grafana.com/docs/grafana/latest/dashboards/build-dashboards/import-dashboards/#import-a-dashboard) after upgrading.
Expand Down Expand Up @@ -285,7 +318,7 @@ command and follow the instructions
* paginator for boto3 ec2 instance types by @sblair-metrostar in https://github.com/nebari-dev/nebari/pull/1923
* Update README.md -- fix typo. by @teoliphant in https://github.com/nebari-dev/nebari/pull/1925
* Add more unit tests, add cleanup step for Digital Ocean integration test by @iameskild in https://github.com/nebari-dev/nebari/pull/1910
* Add cleanup step for AWS integration test, ensure diable_prompt is passed through by @iameskild in https://github.com/nebari-dev/nebari/pull/1921
* Add cleanup step for AWS integration test, ensure disable_prompt is passed through by @iameskild in https://github.com/nebari-dev/nebari/pull/1921
* K8s 1.25 + More Improvements by @Adam-D-Lewis in https://github.com/nebari-dev/nebari/pull/1856
* adding lifecycle ignore to eks node group by @sblair-metrostar in https://github.com/nebari-dev/nebari/pull/1905
* nebari init unit tests by @sblair-metrostar in https://github.com/nebari-dev/nebari/pull/1931
Expand Down
13 changes: 13 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -177,3 +177,16 @@ exclude_also = [
"@(abc\\.)?abstractmethod",
]
ignore_errors = false

[tool.typos]
files.extend-exclude = ["_build", "*/build/*", "*/node_modules/*", "nebari.egg-info", "*.git", "*.js", "*.json", "*.yaml", "*.yml", "pre-commit-config.yaml"]
default.extend-ignore-re = ["(?Rm)^.*(#|//)\\s*typos: ignore$"]
default.extend-ignore-words-re = ["aks", "AKS"]
default.check-filename = false # Turn off initially, enable once https://github.com/nebari-dev/nebari/issues/2598 is addressed

[tool.codespell]
# Ref: https://github.com/codespell-project/codespell#using-a-config-file
skip = '_build,*/build/*,*/node_modules/*,nebari.egg-info,*.git,package-lock.json,*.lock'
check-hidden = true
ignore-regex = '^\s*"image/\S+": ".*'
ignore-words-list = 'aks'
2 changes: 1 addition & 1 deletion pytest.ini
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[pytest]
addopts =
# show tests that (f)ailed, (E)rror, or (X)passed in the summary
# show tests that (f)ailed, (E)rror, or (X)passed in the summary # typos: ignore
-rfEX
# Make tracebacks shorter
--tb=native
Expand Down
4 changes: 2 additions & 2 deletions src/_nebari/constants.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
CURRENT_RELEASE = "2024.6.1"
CURRENT_RELEASE = "2024.7.1"

# NOTE: Terraform cannot be upgraded further due to Hashicorp licensing changes
# implemented in August 2023.
Expand All @@ -15,7 +15,7 @@
DEFAULT_NEBARI_IMAGE_TAG = CURRENT_RELEASE
DEFAULT_NEBARI_WORKFLOW_CONTROLLER_IMAGE_TAG = CURRENT_RELEASE

DEFAULT_CONDA_STORE_IMAGE_TAG = "2024.6.1"
DEFAULT_CONDA_STORE_IMAGE_TAG = "2024.3.1"

LATEST_SUPPORTED_PYTHON_VERSION = "3.10"

Expand Down
16 changes: 8 additions & 8 deletions src/_nebari/stages/infrastructure/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -342,10 +342,10 @@ class GoogleCloudPlatformProvider(schema.Base):
@classmethod
def _check_input(cls, data: Any) -> Any:
google_cloud.check_credentials()
avaliable_regions = google_cloud.regions()
if data["region"] not in avaliable_regions:
available_regions = google_cloud.regions()
if data["region"] not in available_regions:
raise ValueError(
f"Google Cloud region={data['region']} is not one of {avaliable_regions}"
f"Google Cloud region={data['region']} is not one of {available_regions}"
)

available_kubernetes_versions = google_cloud.kubernetes_versions(data["region"])
Expand Down Expand Up @@ -588,16 +588,16 @@ def check_provider(cls, data: Any) -> Any:
f"'{provider}' is not a valid enumeration member; permitted: local, existing, do, aws, gcp, azure"
)
else:
setted_providers = [
set_providers = [
provider
for provider in provider_name_abbreviation_map.keys()
if provider in data
]
num_providers = len(setted_providers)
num_providers = len(set_providers)
if num_providers > 1:
raise ValueError(f"Multiple providers set: {setted_providers}")
raise ValueError(f"Multiple providers set: {set_providers}")
elif num_providers == 1:
data["provider"] = provider_name_abbreviation_map[setted_providers[0]]
data["provider"] = provider_name_abbreviation_map[set_providers[0]]
elif num_providers == 0:
data["provider"] = schema.ProviderEnum.local.value
return data
Expand All @@ -610,7 +610,7 @@ class NodeSelectorKeyValue(schema.Base):

class KubernetesCredentials(schema.Base):
host: str
cluster_ca_certifiate: str
cluster_ca_certifiate: str # ignored for now. More info in https://github.com/nebari-dev/nebari/issues/2597. # typos: ignore
token: Optional[str] = None
username: Optional[str] = None
password: Optional[str] = None
Expand Down
19 changes: 11 additions & 8 deletions src/_nebari/stages/infrastructure/template/aws/modules/s3/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,14 @@ resource "aws_s3_bucket" "main" {
}, var.tags)
}

resource "aws_s3_bucket_public_access_block" "main" {
bucket = aws_s3_bucket.main.id
ignore_public_acls = true
block_public_acls = true
block_public_policy = true
restrict_public_buckets = true
}

resource "aws_s3_bucket_server_side_encryption_configuration" "main" {
bucket = aws_s3_bucket.main.id

Expand All @@ -26,12 +34,7 @@ resource "aws_s3_bucket_server_side_encryption_configuration" "main" {
sse_algorithm = "aws:kms"
}
}
}

resource "aws_s3_bucket_public_access_block" "main" {
bucket = aws_s3_bucket.main.id
ignore_public_acls = true
block_public_acls = true
block_public_policy = true
restrict_public_buckets = true
// AWS may return HTTP 409 if PutBucketEncryption is called immediately after S3
// bucket creation. Adding dependency avoids concurrent requests.
depends_on = [aws_s3_bucket_public_access_block.main]
}
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ variable "availability_zones" {
}

variable "vpc_cidr_block" {
description = "VPC cidr block for infastructure"
description = "VPC cidr block for infrastructure"
type = string
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
output "credentials" {
description = "Credentials required for connecting to kubernets cluster"
description = "Credentials required for connecting to kubernetes cluster"
sensitive = true
value = {
endpoint = "https://${google_container_cluster.main.endpoint}"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,18 @@ extraInitContainers: |
- sh
- -c
- |
wget https://github.com/aerogear/keycloak-metrics-spi/releases/download/2.5.3/keycloak-metrics-spi-2.5.3.jar -P /data/ &&
export SHA256SUM=9b3f52f842a66dadf5ff3cc3a729b8e49042d32f84510a5d73d41a2e39f29a96 &&
if ! (echo "$SHA256SUM /data/keycloak-metrics-spi-2.5.3.jar" | sha256sum -c)
then
echo "Error: Checksum not verified" && exit 1
else
chown 1000:1000 /data/keycloak-metrics-spi-2.5.3.jar &&
chmod 777 /data/keycloak-metrics-spi-2.5.3.jar
if [ ! -f /data/keycloak-metrics-spi-2.5.3.jar ]; then
wget https://github.com/aerogear/keycloak-metrics-spi/releases/download/2.5.3/keycloak-metrics-spi-2.5.3.jar -P /data/ &&
export SHA256SUM=9b3f52f842a66dadf5ff3cc3a729b8e49042d32f84510a5d73d41a2e39f29a96 &&
if ! (echo "$SHA256SUM /data/keycloak-metrics-spi-2.5.3.jar" | sha256sum -c)
then
echo "Error: Checksum not verified" && exit 1
else
chown 1000:1000 /data/keycloak-metrics-spi-2.5.3.jar &&
chmod 777 /data/keycloak-metrics-spi-2.5.3.jar
fi
else
echo "File /data/keycloak-metrics-spi-2.5.3.jar already exists. Skipping download."
fi
image: busybox:1.36
name: initialize-spi-metrics-jar
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ def base_node_group(options):
default_node_group if worker_node_group is None else worker_node_group
)

# check `schduler_extra_pod_config` first
# check `scheduler_extra_pod_config` first
scheduler_node_group = (
config["profiles"][options.profile]
.get("scheduler_extra_pod_config", {})
Expand Down
Loading

0 comments on commit 3ab1bf3

Please sign in to comment.