Skip to content

Commit

Permalink
Merge pull request ITISFoundation#6650 from sanderegg/update-master-7
Browse files Browse the repository at this point in the history
⬆️Update upstream/master 7
  • Loading branch information
sanderegg authored Nov 1, 2024
2 parents 2486b38 + add4b61 commit 1fe311d
Show file tree
Hide file tree
Showing 50 changed files with 414 additions and 108 deletions.
29 changes: 15 additions & 14 deletions .env-devel
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
# unset $(grep -v '^#' .env | sed -E 's/(.*)=.*/\1/' | xargs)
#

AGENT_LOGLEVEL=WARNING
AGENT_LOGLEVEL=INFO
AGENT_VOLUMES_CLEANUP_S3_ACCESS_KEY=12345678
AGENT_VOLUMES_CLEANUP_S3_BUCKET=simcore-volume-backups
AGENT_VOLUMES_CLEANUP_S3_ENDPOINT=http://172.17.0.1:9001
Expand All @@ -19,7 +19,7 @@ AGENT_VOLUMES_CLEANUP_S3_REGION=us-east-1
AGENT_VOLUMES_CLEANUP_S3_SECRET_KEY=12345678

API_SERVER_DEV_FEATURES_ENABLED=0
API_SERVER_LOGLEVEL=WARNING
API_SERVER_LOGLEVEL=INFO
API_SERVER_PROFILING=1
TRAEFIK_API_SERVER_INFLIGHTREQ_AMOUNT=25

Expand All @@ -29,7 +29,7 @@ AUTOSCALING_DOCKER_JOIN_DRAINED=True
AUTOSCALING_WAIT_FOR_CLOUD_INIT_BEFORE_WARM_BUFFER_ACTIVATION=False
AUTOSCALING_EC2_ACCESS=null
AUTOSCALING_EC2_INSTANCES=null
AUTOSCALING_LOGLEVEL=WARNING
AUTOSCALING_LOGLEVEL=INFO
AUTOSCALING_NODES_MONITORING=null
AUTOSCALING_POLL_INTERVAL="00:00:10"
AUTOSCALING_SSM_ACCESS=null
Expand All @@ -39,7 +39,7 @@ AWS_S3_CLI_S3=null
CATALOG_BACKGROUND_TASK_REST_TIME=60
CATALOG_DEV_FEATURES_ENABLED=0
CATALOG_HOST=catalog
CATALOG_LOGLEVEL=WARNING
CATALOG_LOGLEVEL=INFO
CATALOG_PORT=8000
CATALOG_PROFILING=1
CATALOG_SERVICES_DEFAULT_RESOURCES='{"CPU": {"limit": 0.1, "reservation": 0.1}, "RAM": {"limit": 2147483648, "reservation": 2147483648}}'
Expand All @@ -52,15 +52,15 @@ CLUSTERS_KEEPER_DASK_WORKER_SATURATION=inf
CLUSTERS_KEEPER_EC2_ACCESS=null
CLUSTERS_KEEPER_SSM_ACCESS=null
CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX=""
CLUSTERS_KEEPER_LOGLEVEL=WARNING
CLUSTERS_KEEPER_LOGLEVEL=INFO
CLUSTERS_KEEPER_MAX_MISSED_HEARTBEATS_BEFORE_CLUSTER_TERMINATION=5
CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES=null
CLUSTERS_KEEPER_TASK_INTERVAL=30
CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES=null

DASK_SCHEDULER_HOST=dask-scheduler
DASK_SCHEDULER_PORT=8786
DASK_SIDECAR_LOGLEVEL=WARNING
DASK_SIDECAR_LOGLEVEL=INFO
DASK_TLS_CA_FILE=/home/scu/.dask/dask-crt.pem
DASK_TLS_CERT=/home/scu/.dask/dask-crt.pem
DASK_TLS_KEY=/home/scu/.dask/dask-key.pem
Expand Down Expand Up @@ -91,7 +91,7 @@ DIRECTOR_V2_DYNAMIC_SCHEDULER_CLOSE_SERVICES_VIA_FRONTEND_WHEN_CREDITS_LIMIT_REA
DIRECTOR_V2_DYNAMIC_SIDECAR_SLEEP_AFTER_CONTAINER_REMOVAL=0
DIRECTOR_V2_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS='{}'
DIRECTOR_V2_HOST=director-v2
DIRECTOR_V2_LOGLEVEL=WARNING
DIRECTOR_V2_LOGLEVEL=INFO
DIRECTOR_V2_NODE_PORTS_STORAGE_AUTH=null
DIRECTOR_V2_PORT=8000
DIRECTOR_V2_PROFILING=1
Expand All @@ -115,7 +115,7 @@ FUNCTION_SERVICES_AUTHORS='{"UN": {"name": "Unknown", "email": "[email protected]
# Can use 'docker run -it itisfoundation/invitations:latest simcore-service-invitations generate-dotenv --auto-password'
INVITATIONS_DEFAULT_PRODUCT=osparc
INVITATIONS_HOST=invitations
INVITATIONS_LOGLEVEL=WARNING
INVITATIONS_LOGLEVEL=INFO
INVITATIONS_OSPARC_URL=http://127.0.0.1.nip.io:9081
INVITATIONS_PASSWORD=adminadmin
INVITATIONS_PORT=8000
Expand All @@ -124,6 +124,7 @@ INVITATIONS_SWAGGER_API_DOC_ENABLED=1
INVITATIONS_USERNAME=admin

LOG_FORMAT_LOCAL_DEV_ENABLED=1
LOG_FILTER_MAPPING='{}'

PAYMENTS_ACCESS_TOKEN_EXPIRE_MINUTES=30
PAYMENTS_ACCESS_TOKEN_SECRET_KEY=2c0411810565e063309be1457009fb39ce023946f6a354e6935107b57676
Expand All @@ -138,7 +139,7 @@ PAYMENTS_FAKE_COMPLETION=0
PAYMENTS_GATEWAY_API_SECRET=adminadmin
PAYMENTS_GATEWAY_URL=http://127.0.0.1:32769
PAYMENTS_HOST=payments
PAYMENTS_LOGLEVEL=WARNING
PAYMENTS_LOGLEVEL=INFO
PAYMENTS_PASSWORD=adminadmin
PAYMENTS_PORT=8000
PAYMENTS_STRIPE_API_SECRET='REPLACE_ME_with_api_secret'
Expand Down Expand Up @@ -179,7 +180,7 @@ RESOURCE_MANAGER_RESOURCE_TTL_S=900
RESOURCE_USAGE_TRACKER_HOST=resource-usage-tracker
RESOURCE_USAGE_TRACKER_PORT=8000
RESOURCE_USAGE_TRACKER_EXTERNAL_PORT=8000
RESOURCE_USAGE_TRACKER_LOGLEVEL=WARNING
RESOURCE_USAGE_TRACKER_LOGLEVEL=INFO
RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_CHECK_ENABLED=1
RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_COUNTER_FAIL=6
RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_INTERVAL_SEC=300
Expand Down Expand Up @@ -214,7 +215,7 @@ BF_API_KEY=none
BF_API_SECRET=none
STORAGE_ENDPOINT=storage:8080
STORAGE_HOST=storage
STORAGE_LOGLEVEL=WARNING
STORAGE_LOGLEVEL=INFO
STORAGE_PORT=8080
STORAGE_PROFILING=1
# STORAGE ----
Expand Down Expand Up @@ -245,7 +246,7 @@ WB_GC_GARBAGE_COLLECTOR='{"GARBAGE_COLLECTOR_INTERVAL_S": 30}'
WB_GC_GROUPS=0
WB_GC_INVITATIONS=null
WB_GC_LOGIN=null
WB_GC_LOGLEVEL=WARNING
WB_GC_LOGLEVEL=INFO
WB_GC_META_MODELING=0
WB_GC_NOTIFICATIONS=0
WB_GC_PAYMENTS=null
Expand Down Expand Up @@ -278,7 +279,7 @@ WB_DB_EL_GARBAGE_COLLECTOR=null
WB_DB_EL_GROUPS=0
WB_DB_EL_INVITATIONS=null
WB_DB_EL_LOGIN=null
WB_DB_EL_LOGLEVEL=WARNING
WB_DB_EL_LOGLEVEL=INFO
WB_DB_EL_META_MODELING=0
WB_DB_EL_NOTIFICATIONS=0
WB_DB_EL_PAYMENTS=null
Expand Down Expand Up @@ -348,7 +349,7 @@ WEBSERVER_GROUPS=1
WEBSERVER_GUNICORN_CMD_ARGS=--timeout=180
WEBSERVER_HOST=webserver
WEBSERVER_LOGIN={}
WEBSERVER_LOGLEVEL=WARNING
WEBSERVER_LOGLEVEL=INFO
WEBSERVER_META_MODELING=1
WEBSERVER_NOTIFICATIONS=1
WEBSERVER_PAYMENTS={}
Expand Down
24 changes: 21 additions & 3 deletions packages/service-library/src/servicelib/logging_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from pathlib import Path
from typing import Any, NotRequired, TypeAlias, TypedDict, TypeVar

from .logging_utils_filtering import GeneralLogFilter, LoggerName, MessageSubstring
from .utils_secrets import mask_sensitive_data

_logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -86,7 +87,11 @@ def format(self, record) -> str:
# log_level=%{WORD:log_level} \| log_timestamp=%{TIMESTAMP_ISO8601:log_timestamp} \| log_source=%{DATA:log_source} \| log_msg=%{GREEDYDATA:log_msg}


def config_all_loggers(*, log_format_local_dev_enabled: bool) -> None:
def config_all_loggers(
*,
log_format_local_dev_enabled: bool,
logger_filter_mapping: dict[LoggerName, list[MessageSubstring]],
) -> None:
"""
Applies common configuration to ALL registered loggers
"""
Expand All @@ -102,12 +107,25 @@ def config_all_loggers(*, log_format_local_dev_enabled: bool) -> None:
fmt = LOCAL_FORMATTING

for logger in loggers:
set_logging_handler(
_set_logging_handler(
logger, fmt=fmt, log_format_local_dev_enabled=log_format_local_dev_enabled
)

for logger_name, filtered_routes in logger_filter_mapping.items():
logger = logging.getLogger(logger_name)
# Check if the logger has any handlers or is in active use
if not logger.hasHandlers():
_logger.warning(
"Logger %s does not have any handlers. Filter will not be added.",
logger_name,
)
continue

log_filter = GeneralLogFilter(filtered_routes)
logger.addFilter(log_filter)


def set_logging_handler(
def _set_logging_handler(
logger: logging.Logger,
*,
fmt: str,
Expand Down
28 changes: 28 additions & 0 deletions packages/service-library/src/servicelib/logging_utils_filtering.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
"""
This codes originates from this article
https://medium.com/swlh/add-log-decorators-to-your-python-project-84094f832181
SEE also https://github.com/Delgan/loguru for a future alternative
"""

import logging
from typing import TypeAlias

_logger = logging.getLogger(__name__)

LoggerName: TypeAlias = str
MessageSubstring: TypeAlias = str


class GeneralLogFilter(logging.Filter):
def __init__(self, filtered_routes: list[str]) -> None:
super().__init__()
self.filtered_routes = filtered_routes

def filter(self, record: logging.LogRecord) -> bool:
msg = record.getMessage()

# Check if the filtered routes exists in the message
return not any(
filter_criteria in msg for filter_criteria in self.filtered_routes
)
99 changes: 99 additions & 0 deletions packages/service-library/tests/test_logging_utils_filtering.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
# pylint: disable=redefined-outer-name

import logging
from typing import Generator

import pytest
from servicelib.logging_utils_filtering import GeneralLogFilter


@pytest.fixture
def logger_with_filter() -> Generator[tuple[logging.Logger, list[str]], None, None]:
# Set up a logger for testing
logger = logging.getLogger("uvicorn.access")
logger.setLevel(logging.DEBUG)

# Create a list to capture log outputs
log_capture = []

# Create a handler that appends log messages to the log_capture list
class ListHandler(logging.Handler):
def emit(self, record):
log_capture.append(self.format(record))

handler = ListHandler()
logger.addHandler(handler)

# Set up the filter based on the new logic
filtered_routes = [
'"GET / HTTP/1.1" 200',
'"GET /metrics HTTP/1.1" 200',
]

# Add the GeneralLogFilter to the logger
log_filter = GeneralLogFilter(filtered_routes)
logger.addFilter(log_filter)

# Return logger and the log_capture for testing
yield logger, log_capture

# Cleanup: remove handlers and filters after test
logger.handlers.clear()
logger.filters.clear()


def test_log_filtered_out(logger_with_filter: tuple[logging.Logger, list[str]]):
logger, log_capture = logger_with_filter

# Create a log record that should be filtered out (matches the filter criteria)
record = logger.makeRecord(
name="uvicorn.access",
level=logging.INFO,
fn="testfile",
lno=10,
msg='"GET / HTTP/1.1" 200 OK',
args=(),
exc_info=None,
)
logger.handle(record)

# Assert no log messages were captured (filtered out)
assert len(log_capture) == 0


def test_log_allowed(logger_with_filter):
logger, log_capture = logger_with_filter

# Create a log record that should NOT be filtered out (doesn't match any filter criteria)
record = logger.makeRecord(
name="uvicorn.access",
level=logging.INFO,
fn="testfile",
lno=10,
msg='"GET /another HTTP/1.1" 200 OK',
args=(),
exc_info=None,
)
logger.handle(record)

# Assert the log message was captured (not filtered out)
assert len(log_capture) == 1


def test_log_with_different_status(logger_with_filter):
logger, log_capture = logger_with_filter

# Create a log record that has the same route but a different status code (should pass through)
record = logger.makeRecord(
name="uvicorn.access",
level=logging.INFO,
fn="testfile",
lno=10,
msg='"GET / HTTP/1.1" 500 Internal Server Error',
args=(),
exc_info=None,
)
logger.handle(record)

# Assert the log message was captured (not filtered out due to different status code)
assert len(log_capture) == 1
3 changes: 2 additions & 1 deletion services/agent/src/simcore_service_agent/core/application.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@ def _setup_logger(settings: ApplicationSettings):
logging.basicConfig(level=settings.LOGLEVEL.value) # NOSONAR
logging.root.setLevel(settings.LOGLEVEL.value)
config_all_loggers(
log_format_local_dev_enabled=settings.AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED
log_format_local_dev_enabled=settings.AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED,
logger_filter_mapping=settings.AGENT_VOLUMES_LOG_FILTER_MAPPING,
)


Expand Down
6 changes: 6 additions & 0 deletions services/agent/src/simcore_service_agent/core/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from common_library.pydantic_validators import validate_numeric_string_as_timedelta
from models_library.basic_types import BootModeEnum, LogLevel
from pydantic import AliasChoices, Field, field_validator
from servicelib.logging_utils_filtering import LoggerName, MessageSubstring
from settings_library.base import BaseCustomSettings
from settings_library.r_clone import S3Provider
from settings_library.rabbit import RabbitSettings
Expand Down Expand Up @@ -32,6 +33,11 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings):
"disabled if you want to have structured logs!"
),
)
AGENT_VOLUMES_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field(
default_factory=dict,
env=["AGENT_VOLUMES_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING"],
description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.",
)
AGENT_VOLUMES_CLEANUP_TARGET_SWARM_STACK_NAME: str = Field(
..., description="Exactly the same as director-v2's `SWARM_STACK_NAME` env var"
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,8 @@ def init_app(settings: ApplicationSettings | None = None) -> FastAPI:
logging.basicConfig(level=settings.log_level)
logging.root.setLevel(settings.log_level)
config_all_loggers(
log_format_local_dev_enabled=settings.API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED
log_format_local_dev_enabled=settings.API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED,
logger_filter_mapping=settings.API_SERVER_LOG_FILTER_MAPPING,
)
_logger.debug("App settings:\n%s", settings.model_dump_json(indent=2))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
SecretStr,
field_validator,
)
from servicelib.logging_utils_filtering import LoggerName, MessageSubstring
from settings_library.base import BaseCustomSettings
from settings_library.catalog import CatalogSettings
from settings_library.director_v2 import DirectorV2Settings
Expand Down Expand Up @@ -67,6 +68,11 @@ class BasicSettings(BaseCustomSettings, MixinLoggingSettings):
),
description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!",
)
API_SERVER_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field(
default_factory=dict,
env=["API_SERVER_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING"],
description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.",
)

@field_validator("LOG_LEVEL", mode="before")
@classmethod
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
model_validator,
)
from pydantic_settings import SettingsConfigDict
from servicelib.logging_utils_filtering import LoggerName, MessageSubstring
from settings_library.base import BaseCustomSettings
from settings_library.docker_registry import RegistrySettings
from settings_library.ec2 import EC2Settings
Expand Down Expand Up @@ -245,6 +246,11 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings):
),
description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!",
)
AUTOSCALING_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field(
default_factory=dict,
env=["AUTOSCALING_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING"],
description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.",
)

AUTOSCALING_EC2_ACCESS: AutoscalingEC2Settings | None = Field(
json_schema_extra={"auto_default_from_env": True}
Expand Down
Loading

0 comments on commit 1fe311d

Please sign in to comment.