Skip to content

Commit

Permalink
add logging end point
Browse files Browse the repository at this point in the history
  • Loading branch information
chrstnbwnkl committed Nov 19, 2024
1 parent 3795b3b commit ccb4f50
Show file tree
Hide file tree
Showing 19 changed files with 199 additions and 75 deletions.
2 changes: 0 additions & 2 deletions .docker_env
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,6 @@ ADMIN_PASS=admin
# DATA_DIR=/home/nilsnolde/dev/gis-ops/routing-graph-packager/data
VALHALLA_URL="http://app"

VALHALLA_IMAGE=gisops/valhalla:latest

POSTGRES_DB=gis
POSTGRES_USER=admin
POSTGRES_PASS=admin
Expand Down
7 changes: 1 addition & 6 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,12 +1,7 @@
repos:
- repo: https://github.com/ambv/black
rev: 23.9.1
hooks:
- id: black
language_version: python3
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.0.289
rev: v0.7.4
hooks:
- id: ruff
args: [--fix]
10 changes: 5 additions & 5 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#--- BEGIN Usual Python stuff ---

FROM ghcr.io/valhalla/valhalla:latest as builder
LABEL [email protected]
FROM ghcr.io/valhalla/valhalla:latest AS builder
LABEL maintainer="Nils Nolde <[email protected]>"

WORKDIR /app

Expand All @@ -15,7 +15,7 @@ RUN apt-get update -y > /dev/null && \
python3-venv \
curl > /dev/null

ENV POETRY_BIN /root/.local/bin/poetry
ENV POETRY_BIN=/root/.local/bin/poetry

RUN curl -sSL https://install.python-poetry.org | python && \
$POETRY_BIN config virtualenvs.create false && \
Expand Down Expand Up @@ -46,8 +46,8 @@ RUN cd /usr/local/bin && \
for f in valhalla*; do rm $f; done && \
cd .. && mv $preserve ./bin

FROM ubuntu:24.04 as runner_base
MAINTAINER Nils Nolde <[email protected]>
FROM ubuntu:24.04 AS runner_base
LABEL maintainer="Nils Nolde <[email protected]>"

# install Valhalla stuff
RUN apt-get update > /dev/null && \
Expand Down
5 changes: 5 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ The default road dataset is the [OSM](openstreetmap.org) planet PBF. If availabl
- **data updater**: includes a daily OSM updater
- **asynchronous API**: graph generation is outsourced to a [`ARQ`](https://github.com/samuelcolvin/arq) worker
- **email notifications**: notifies the requesting user if the job succeeded/failed
- **logs API** read the logs for the worker, the app and the graph builder via the API

## "Quick Start"

Expand Down Expand Up @@ -87,3 +88,7 @@ The app is listening on `/api/v1/jobs` for new `POST` requests to generate some
- Zip graph tiles from disk according to the request's bounding box and put the package to `$DATA_DIR/output/<JOB_NAME>`, along with a metadata JSON
- **busy**, the current job will be put in the queue and will be processed once it reaches the queue's head
4. Send an email to the requesting user with success or failure notice (including the error message)

### Logs

The app exposes logs via the route `/api/v1/logs/{log_type}`. Available log types are `worker`, `app` and `builder`. An optional query parameter `?lines={n}` limits the output to the last `n` lines. Authentication is required.
5 changes: 1 addition & 4 deletions cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,17 +15,14 @@
from routing_packager_app import SETTINGS
from routing_packager_app.db import get_db
from routing_packager_app.api_v1.models import Job, User
from routing_packager_app.logger import AppSmtpHandler, get_smtp_details
from routing_packager_app.logger import AppSmtpHandler, get_smtp_details, LOGGER
from routing_packager_app.utils.geom_utils import wkbe_to_geom, wkbe_to_str

JOB_TIMEOUT = 60 * 60 # one hour to compress a single graph

description = "Runs the worker to update the ZIP packages."
parser = ArgumentParser(description=description)

# set up the logger basics
LOGGER = logging.getLogger("packager")


def _sort_jobs(jobs_: List[Job]):
out = list()
Expand Down
8 changes: 4 additions & 4 deletions conf/valhalla.conf
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ autorestart=false
redirect_stderr=true
# Either log to file inside the container or
# log to PID 1 (gunicorn in this case) so docker logs will show it
# stdout_logfile=/var/log/build_loop.log
# stdout_logfile_maxbytes=1MB
stdout_logfile=/proc/1/fd/1
stdout_logfile_maxbytes=0
stdout_logfile=%(ENV_TMP_DATA_DIR)s/logs/builder.log
stdout_logfile_maxbytes=1MB
# stdout_logfile=/proc/1/fd/1
# stdout_logfile_maxbytes=0
environment=CONCURRENCY="4",DATA_DIR="/app/data",TMP_DATA_DIR="/app/tmp_data"
3 changes: 3 additions & 0 deletions gunicorn.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
from routing_packager_app.logger import LOGGING_CONFIG

bind = "0.0.0.0:5000"
workers = 1
worker_class = "uvicorn.workers.UvicornWorker"
logconfig_dict = LOGGING_CONFIG
2 changes: 1 addition & 1 deletion main.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ async def lifespan(app: FastAPI):
p.mkdir(exist_ok=True)
SETTINGS.get_output_path().mkdir(exist_ok=True)
yield
app.state.redis_pool.shutdown()
await app.state.redis_pool.shutdown()


app: FastAPI = create_app(lifespan=lifespan)
Expand Down
33 changes: 11 additions & 22 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -44,24 +44,19 @@ coveralls = "^3.3.1"
requires = ["poetry>=1.0.0"]
build-backend = "poetry.masonry.api"

[tool.black]
line-length = 105
exclude = '''
/(
\.git
| \.venv
| dist
| build
)/
'''

[tool.ruff]

extend-exclude = [".venv", "third_party", "build"]
lint.preview = true
format.preview = true

# Enable pycodestyle (`E`) and Pyflakes (`F`) codes by default.
select = ["E", "F"]
ignore = []
lint.select = ["E", "F", "RUF022"]
lint.ignore = []
line-length = 105

# Allow autofix for all enabled rules (when `--fix`) is provided.
fixable = [
lint.fixable = [
"A",
"B",
"C",
Expand Down Expand Up @@ -106,12 +101,6 @@ fixable = [
"TRY",
"UP",
"YTT",
"RUF022",
]
unfixable = []

# Exclude a variety of commonly ignored directories.
exclude = [".venv", "__pycache__", ".git"]

# Same as Black.
line-length = 105
target-version = "py312"
lint.unfixable = []
3 changes: 2 additions & 1 deletion routing_packager_app/api_v1/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
from fastapi import APIRouter

from .routes import users, jobs
from .routes import jobs, logs, users

api_v1_router = APIRouter()
api_v1_router.include_router(jobs.router, prefix="/jobs", tags=["jobs"])
api_v1_router.include_router(users.router, prefix="/users", tags=["users"])
api_v1_router.include_router(logs.router, prefix="/logs", tags=["logs"])
11 changes: 9 additions & 2 deletions routing_packager_app/api_v1/models.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
from datetime import datetime
from typing import Optional, List
from enum import Enum
from typing import List, Optional

from fastapi.security import HTTPBasicCredentials
from geoalchemy2 import Geography
from pydantic import EmailStr
from sqlalchemy import Column
from sqlalchemy_utils import PasswordType
from sqlmodel import SQLModel, Field, DateTime, Relationship, Session, select, AutoString
from sqlmodel import AutoString, DateTime, Field, Relationship, Session, SQLModel, select

from ..config import SETTINGS
from ..constants import Providers, Statuses
Expand Down Expand Up @@ -108,3 +109,9 @@ def add_admin_user(session: Session):
admin_user = User(email=admin_email, password=admin_pass)
session.add(admin_user)
session.commit()


class LogType(str, Enum):
WORKER = "worker"
APP = "app"
BUILDER = "builder"
48 changes: 48 additions & 0 deletions routing_packager_app/api_v1/routes/logs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
from fastapi import APIRouter, Depends, HTTPException
from fastapi.responses import PlainTextResponse
from fastapi.security import HTTPBasicCredentials
from sqlmodel import Session
from starlette.status import (
HTTP_400_BAD_REQUEST,
HTTP_401_UNAUTHORIZED,
)

from ...auth.basic_auth import BasicAuth
from ...config import SETTINGS
from ...db import get_db
from ..models import LogType, User

router = APIRouter()


@router.get("/{log_type}", response_class=PlainTextResponse)
def get_logs(
log_type: LogType,
lines: int | None = None,
db: Session = Depends(get_db),
auth: HTTPBasicCredentials = Depends(BasicAuth),
):
# first authenticate
req_user = User.get_user(db, auth)
if not req_user:
raise HTTPException(HTTP_401_UNAUTHORIZED, "Not authorized to read logs.")

# figure out the type of logs
log_file = SETTINGS.get_logging_dir() / f"{log_type.value}.log"

try:
with open(log_file) as fh:
if lines is None:
return fh.read()
line_count = len([1 for _ in fh.readlines()])
start_i = line_count - lines if line_count > lines else 0
response = ""
fh.seek(0)
for i, line in enumerate(fh.readlines()):
if i < start_i:
continue
response += line
return response

except: # noqa
return HTTP_400_BAD_REQUEST(f"Unable to open {log_file}.")
20 changes: 16 additions & 4 deletions routing_packager_app/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ class BaseSettings(_BaseSettings):

DESCRIPTION_PATH: Path = BASE_DIR.joinpath("DESCRIPTION.md")

### APP ###
# APP ###
ADMIN_EMAIL: str = "[email protected]"
ADMIN_PASS: str = "admin"
# TODO: clarify if there's a need to restrict origins
Expand All @@ -30,15 +30,15 @@ class BaseSettings(_BaseSettings):

ENABLED_PROVIDERS: list[str] = list(CommaSeparatedStrings("osm"))

### DATABASES ###
# DATABASES ###
POSTGRES_HOST: str = "localhost"
POSTGRES_PORT: int = 5432
POSTGRES_DB: str = "gis"
POSTGRES_USER: str = "admin"
POSTGRES_PASS: str = "admin"
REDIS_URL: str = "redis://localhost"

### SMTP ###
# SMTP ###
SMTP_HOST: str = "localhost"
SMTP_PORT: int = 1025
SMTP_FROM: str = "[email protected]"
Expand Down Expand Up @@ -79,6 +79,19 @@ def get_tmp_data_dir(self) -> Path:

return tmp_data_dir

def get_logging_dir(self) -> Path:
"""
Gets the path where logs are stored for both worker and builder/app
"""
tmp_data_dir = self.TMP_DATA_DIR
if os.path.isdir("/app") and not os.getenv("CI", None): # pragma: no cover
tmp_data_dir = Path("/app/tmp_data")
log_dir = tmp_data_dir / "logs"

log_dir.mkdir(exist_ok=True)

return log_dir


class ProdSettings(BaseSettings):
model_config = SettingsConfigDict(case_sensitive=True, env_file=ENV_FILE, extra="ignore")
Expand Down Expand Up @@ -108,7 +121,6 @@ class TestSettings(BaseSettings):

# decide which settings we'll use
SETTINGS: Optional[BaseSettings] = None
print("LOADING SETTINGS")
env = os.getenv("API_CONFIG", "prod")
if env == "prod": # pragma: no cover
SETTINGS = ProdSettings()
Expand Down
60 changes: 60 additions & 0 deletions routing_packager_app/logger.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from logging.handlers import SMTPHandler
import logging
from logging import config
from typing import List # noqa: F401

from .config import SETTINGS
Expand Down Expand Up @@ -58,3 +59,62 @@ def get_smtp_details(toaddrs: List[str]):
conf["secure"] = tuple()

return conf


LOGGING_CONFIG = {
"version": 1,
"disable_existing_loggers": False,
"root": {
"level": "INFO",
"handlers": ["default"],
},
"loggers": {
"gunicorn.error": {
"level": "INFO",
"handlers": ["app"],
"propagate": True,
"qualname": "gunicorn.error",
},
"gunicorn.access": {
"level": "INFO",
"handlers": ["app"],
"propagate": True,
"qualname": "gunicorn.access",
},
"worker": {
"level": "INFO",
"handlers": ["worker"],
"propagate": True,
"qualname": "gunicorn.access",
},
},
"handlers": {
"worker": {
"class": "logging.FileHandler",
"formatter": "worker",
"filename": str(SETTINGS.get_logging_dir() / "worker.log"),
},
"app": {
"class": "logging.FileHandler",
"formatter": "app",
"filename": str(SETTINGS.get_logging_dir() / "app.log"),
},
"default": {"class": "logging.StreamHandler", "formatter": "std", "stream": "ext://sys.stdout"},
},
"formatters": {
"worker": {
"format": "worker: %(asctime)s [%(process)d] [%(levelname)s] %(message)s",
"datefmt": "[%Y-%m-%d %H:%M:%S %z]",
"class": "logging.Formatter",
},
"app": {
"format": "app: %(asctime)s [%(process)d] [%(levelname)s] %(message)s",
"datefmt": "[%Y-%m-%d %H:%M:%S %z]",
"class": "logging.Formatter",
},
"std": {"format": "%(asctime)s [%(process)d] [%(levelname)s] %(message)s"},
},
}

config.dictConfig(LOGGING_CONFIG)
LOGGER = logging.getLogger("worker")
Loading

0 comments on commit ccb4f50

Please sign in to comment.