Skip to content

Commit

Permalink
Merge pull request #463 from ropable/master
Browse files Browse the repository at this point in the history
Allow manager override for Azure acct provision, verify password complexity, refactor to sanitise args, bump minor package versions
  • Loading branch information
ropable authored Sep 17, 2024
2 parents f7a75d3 + 70fb285 commit daa67fe
Show file tree
Hide file tree
Showing 10 changed files with 418 additions and 247 deletions.
2 changes: 0 additions & 2 deletions .flake8

This file was deleted.

63 changes: 33 additions & 30 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,50 +1,53 @@
# syntax=docker/dockerfile:1
# Prepare the base environment.
FROM python:3.12.4-slim AS builder_base_itassets
FROM python:3.12.6-alpine AS builder_base
LABEL [email protected]
LABEL org.opencontainers.image.source=https://github.com/dbca-wa/it-assets

RUN apt-get update -y \
&& apt-get upgrade -y \
&& apt-get install -y libmagic-dev gcc binutils gdal-bin proj-bin python3-dev libpq-dev gzip \
&& rm -rf /var/lib/apt/lists/* \
&& pip install --root-user-action=ignore --upgrade pip

# Temporary additional steps to mitigate CVE-2023-45853 (zlibg).
#WORKDIR /zlib
# Additional requirements to build zlibg
#RUN apt-get update -y \
# && apt-get install -y wget build-essential make libc-dev \
#RUN wget -q https://zlib.net/zlib-1.3.1.tar.gz && tar xvzf zlib-1.3.1.tar.gz
#WORKDIR /zlib/zlib-1.3.1
#RUN ./configure --prefix=/usr/lib --libdir=/usr/lib/x86_64-linux-gnu \
# && make \
# && make install \
# && rm -rf /zlib
# Install system requirements to build Python packages.
RUN apk add --no-cache \
gcc \
libressl-dev \
musl-dev \
libffi-dev
# Create a non-root user to run the application.
ARG UID=10001
ARG GID=10001
RUN addgroup -g ${GID} appuser \
&& adduser -H -D -u ${UID} -G appuser appuser

# Install Python libs using Poetry.
FROM builder_base_itassets AS python_libs_itassets
FROM builder_base AS python_libs_itassets
# Add system dependencies required to use GDAL
# Ref: https://stackoverflow.com/a/59040511/14508
RUN apk add --no-cache \
gdal \
geos \
proj \
binutils \
&& ln -s /usr/lib/libproj.so.25 /usr/lib/libproj.so \
&& ln -s /usr/lib/libgdal.so.35 /usr/lib/libgdal.so \
&& ln -s /usr/lib/libgeos_c.so.1 /usr/lib/libgeos_c.so
WORKDIR /app
ARG POETRY_VERSION=1.8.3
RUN pip install --no-cache-dir --root-user-action=ignore poetry==${POETRY_VERSION}
COPY poetry.lock pyproject.toml ./
RUN poetry config virtualenvs.create false \
ARG POETRY_VERSION=1.8.3
RUN pip install --no-cache-dir --root-user-action=ignore poetry==${POETRY_VERSION} \
&& poetry config virtualenvs.create false \
&& poetry install --no-interaction --no-ansi --only main

# Create a non-root user.
ARG UID=10001
ARG GID=10001
RUN groupadd -g "${GID}" appuser \
&& useradd --no-create-home --no-log-init --uid "${UID}" --gid "${GID}" appuser
# Remove system libraries, no longer required.
RUN apk del \
gcc \
libressl-dev \
musl-dev \
libffi-dev

# Install the project.
FROM python_libs_itassets
FROM python_libs_itassets AS project_itassets
COPY gunicorn.py manage.py ./
COPY itassets ./itassets
COPY registers ./registers
COPY organisation ./organisation
RUN python manage.py collectstatic --noinput

USER ${UID}
EXPOSE 8080
CMD ["gunicorn", "itassets.wsgi", "--config", "gunicorn.py"]
50 changes: 50 additions & 0 deletions Dockerfile.debian
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
# syntax=docker/dockerfile:1
# Prepare the base environment.
FROM python:3.12.4-slim AS builder_base_itassets
LABEL [email protected]
LABEL org.opencontainers.image.source=https://github.com/dbca-wa/it-assets

RUN apt-get update -y \
&& apt-get upgrade -y \
&& apt-get install -y libmagic-dev gcc binutils gdal-bin proj-bin python3-dev libpq-dev gzip \
&& rm -rf /var/lib/apt/lists/* \
&& pip install --root-user-action=ignore --upgrade pip

# Temporary additional steps to mitigate CVE-2023-45853 (zlibg).
#WORKDIR /zlib
# Additional requirements to build zlibg
#RUN apt-get update -y \
# && apt-get install -y wget build-essential make libc-dev \
#RUN wget -q https://zlib.net/zlib-1.3.1.tar.gz && tar xvzf zlib-1.3.1.tar.gz
#WORKDIR /zlib/zlib-1.3.1
#RUN ./configure --prefix=/usr/lib --libdir=/usr/lib/x86_64-linux-gnu \
# && make \
# && make install \
# && rm -rf /zlib

# Install Python libs using Poetry.
FROM builder_base_itassets AS python_libs_itassets
WORKDIR /app
ARG POETRY_VERSION=1.8.3
RUN pip install --no-cache-dir --root-user-action=ignore poetry==${POETRY_VERSION}
COPY poetry.lock pyproject.toml ./
RUN poetry config virtualenvs.create false \
&& poetry install --no-interaction --no-ansi --only main

# Create a non-root user.
ARG UID=10001
ARG GID=10001
RUN groupadd -g "${GID}" appuser \
&& useradd --no-create-home --no-log-init --uid "${UID}" --gid "${GID}" appuser

# Install the project.
FROM python_libs_itassets
COPY gunicorn.py manage.py ./
COPY itassets ./itassets
COPY registers ./registers
COPY organisation ./organisation
RUN python manage.py collectstatic --noinput

USER ${UID}
EXPOSE 8080
CMD ["gunicorn", "itassets.wsgi", "--config", "gunicorn.py"]
34 changes: 19 additions & 15 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,62 +4,66 @@ This project consists of a Django application used by the Department of
Biodiversity, Conservation and Attractions to record and manage IT assets
and analytics.

# Installation
## Installation

The recommended way to set up this project for development is using
[Poetry](https://python-poetry.org/docs/) to install and manage a virtual Python
environment. With Poetry installed, change into the project directory and run:

poetry install

Activate the virtualenv like so:

poetry shell

To run Python commands in the virtualenv, thereafter run them like so:

poetry run python manage.py
python manage.py

Manage new or updating project dependencies with Poetry also, like so:

poetry add newpackage==1.0

# Environment variables
## Environment variables

This project uses confy to set environment variables (in a `.env` file).
The following variables are required for the project to run:

DATABASE_URL="postgis://USER:PASSWORD@HOST:PORT/DATABASE_NAME"
SECRET_KEY="ThisIsASecretKey"

# Running
## Running

Use `runserver` to run a local copy of the application:

poetry run python manage.py runserver 0:8080
python manage.py runserver 0:8080

Run console commands manually:

poetry run python manage.py shell_plus
python manage.py shell_plus

# Unit tests
## Unit tests

Start with `pip install coverage`. Run unit tests and obtain test coverage as follows:

poetry run coverage run --source='.' manage.py test -k
poetry run coverage report -m
coverage run --source='.' manage.py test -k
coverage report -m

# Docker image
## Docker image

To build a new Docker image from the `Dockerfile`:

docker image build -t ghcr.io/dbca-wa/it-assets .

# Pre-commit hooks
## Pre-commit hooks

This project includes the following pre-commit hooks:

- TruffleHog: https://docs.trufflesecurity.com/docs/scanning-git/precommit-hooks/
- TruffleHog (credential scanning): <https://github.com/marketplace/actions/trufflehog-oss>

Pre-commit hooks may have additional system dependencies to run. Optionally
install pre-commit hooks locally like so:
install pre-commit hooks locally like so (with the virtualenv activated first):

poetry run pre-commit install --allow-missing-config
pre-commit install

Reference: https://pre-commit.com/
Reference: <https://pre-commit.com/>
2 changes: 1 addition & 1 deletion kustomize/overlays/prod/kustomization.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -35,4 +35,4 @@ patches:
- path: postgres_fdw_service_patch.yaml
images:
- name: ghcr.io/dbca-wa/it-assets
newTag: 2.4.29
newTag: 2.4.30
77 changes: 51 additions & 26 deletions organisation/ascender.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,17 @@
from datetime import date, datetime, timedelta
import logging
import secrets
from datetime import date, datetime

import requests
from django.conf import settings
from django.core.mail import EmailMultiAlternatives
from django.utils import timezone
import logging
from psycopg import connect
import requests
import secrets
from psycopg import connect, sql

from itassets.utils import ms_graph_client_token
from organisation.microsoft_products import MS_PRODUCTS
from organisation.models import (
DepartmentUser,
DepartmentUserLog,
CostCentre,
Location,
AscenderActionLog,
)
from organisation.utils import title_except, ms_graph_subscribed_sku
from organisation.models import AscenderActionLog, CostCentre, DepartmentUser, DepartmentUserLog, Location
from organisation.utils import ms_graph_subscribed_sku, ms_graph_validate_password, title_except

LOGGER = logging.getLogger("organisation")
DATE_MAX = date(2049, 12, 31)
Expand Down Expand Up @@ -193,16 +188,31 @@ def ascender_db_fetch(employee_id=None):
"""Returns an iterator which yields all rows from the Ascender database query.
Optionally pass employee_id to filter on a single employee.
"""
if employee_id:
# Validate `employee_id`: this value needs be castable as an integer, even though we use it as a string.
try:
int(employee_id)
except ValueError:
raise ValueError("Invalid employee ID value")

conn = get_ascender_connection()
cur = conn.cursor()
columns = ", ".join(f[0] if isinstance(f, (list, tuple)) else f for f in FOREIGN_TABLE_FIELDS)
schema = settings.FOREIGN_SCHEMA
table = settings.FOREIGN_TABLE
columns = sql.SQL(",").join(
sql.Identifier(f[0]) if isinstance(f, (list, tuple)) else sql.Identifier(f) for f in FOREIGN_TABLE_FIELDS
)
schema = sql.Identifier(settings.FOREIGN_SCHEMA)
table = sql.Identifier(settings.FOREIGN_TABLE)
employee_no = sql.Identifier("employee_no")

if employee_id:
query = f"SELECT {columns} FROM {schema}.{table} WHERE employee_no = '{employee_id}'"
# query = f"SELECT {columns} FROM {schema}.{table} WHERE employee_no = '{employee_id}'"
query = sql.SQL("SELECT {columns} FROM {schema}.{table} WHERE {employee_no} = %s").format(
columns=columns, schema=schema, table=table, employee_no=employee_no
)
cur.execute(query, (employee_id,))
else:
query = f"SELECT {columns} FROM {schema}.{table}"
cur.execute(query)
query = sql.SQL("SELECT {columns} FROM {schema}.{table}").format(columns=columns, schema=schema, table=table)
cur.execute(query)

while True:
row = cur.fetchone()
Expand Down Expand Up @@ -268,7 +278,7 @@ def ascender_employees_fetch_all():
return records


def check_ascender_user_account_rules(job, ignore_job_start_date=False, logging=False):
def check_ascender_user_account_rules(job, ignore_job_start_date=False, manager_override_email=None, logging=False):
"""Given a passed-in Ascender record and any qualifiers, determine
whether a new Azure AD account can be provisioned for that user.
The 'job start date' rule can be optionally bypassed.
Expand Down Expand Up @@ -322,7 +332,15 @@ def check_ascender_user_account_rules(job, ignore_job_start_date=False, logging=
licence_type = "Cloud"

# Rule: user must have a manager recorded, and that manager must exist in our database.
if job["manager_emp_no"] and DepartmentUser.objects.filter(employee_id=job["manager_emp_no"]).exists():
# Partial exception: if the email is specified, we can override the manager in Ascender.
# That specifed manager must still exist in our database to proceed.
if manager_override_email and DepartmentUser.objects.filter(email=manager_override_email).exists():
manager = DepartmentUser.objects.get(email=manager_override_email)
elif manager_override_email and not DepartmentUser.objects.filter(email=manager_override_email).exists():
if logging:
LOGGER.warning(f"Manager with email {manager_override_email} not present in IT Assets, aborting")
return False
elif job["manager_emp_no"] and DepartmentUser.objects.filter(employee_id=job["manager_emp_no"]).exists():
manager = DepartmentUser.objects.get(employee_id=job["manager_emp_no"])
elif job["manager_emp_no"] and not DepartmentUser.objects.filter(employee_id=job["manager_emp_no"]).exists():
if logging:
Expand Down Expand Up @@ -486,7 +504,7 @@ def ascender_user_import_all():
create_ad_user_account(job, cc, job_start_date, licence_type, manager, location, token)


def ascender_user_import(employee_id, ignore_job_start_date=False):
def ascender_user_import(employee_id, ignore_job_start_date=False, manager_override_email=None):
"""A convenience function to import a single Ascender employee and create an AD account for them.
This is to allow easier manual intervention where a record goes in after the start date, or an
old employee returns to work and needs a new account created.
Expand All @@ -499,7 +517,7 @@ def ascender_user_import(employee_id, ignore_job_start_date=False):
return None
job = jobs[0]

rules_passed = check_ascender_user_account_rules(job, ignore_job_start_date, logging=True)
rules_passed = check_ascender_user_account_rules(job, ignore_job_start_date, manager_override_email, logging=True)
if not rules_passed:
LOGGER.warning(f"Ascender employee ID {employee_id} import did not pass all rules")
return None
Expand Down Expand Up @@ -645,6 +663,15 @@ def create_ad_user_account(job, cc, job_start_date, licence_type, manager, locat

LOGGER.info(f"Creating new Azure AD account: {display_name}, {email}, {licence_type} account")

# Generate an account password and validate its complexity.
# Reference: https://docs.python.org/3/library/secrets.html#secrets.token_urlsafe
password = None
while password is None:
password = secrets.token_urlsafe(20)
resp = ms_graph_validate_password(password)
if not resp.json()["isValid"]:
password = None

# Configuration setting to explicitly allow creation of new AD users.
if not settings.ASCENDER_CREATE_AZURE_AD:
LOGGER.info(f"Skipping creation of new Azure AD account: {ascender_record} (ASCENDER_CREATE_AZURE_AD == False)")
Expand All @@ -671,9 +698,7 @@ def create_ad_user_account(job, cc, job_start_date, licence_type, manager, locat
"mailNickname": mail_nickname,
"passwordProfile": {
"forceChangePasswordNextSignIn": True,
# Generated password should always meet our complexity requirements.
# Reference: https://docs.python.org/3/library/secrets.html#secrets.token_urlsafe
"password": secrets.token_urlsafe(16),
"password": password,
},
}
resp = requests.post(url, headers=headers, json=data)
Expand Down
Loading

0 comments on commit daa67fe

Please sign in to comment.