Skip to content

Commit

Permalink
Merge branch 'main' into fix/update-packages
Browse files Browse the repository at this point in the history
  • Loading branch information
dekkers authored Nov 29, 2024
2 parents 4b892f8 + 0b41aaf commit 53bdc29
Show file tree
Hide file tree
Showing 11 changed files with 118 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,13 @@
"impact": "System administrator ports should only be reachable from safe and known locations to reduce attack surface.",
"recommendation": "Determine if this port should be reachable from the identified location. Limit access to reduce the attack surface if necessary."
},
"KAT-REMOTE-DESKTOP-PORT": {
"description": "An open Microsoft Remote Desktop Protocol (RDP) port was detected.",
"source": "https://www.cloudflare.com/en-gb/learning/access-management/rdp-security-risks/",
"risk": "medium",
"impact": "Remote desktop ports are often the root cause in ransomware attacks, due to weak password usage, outdated software or insecure configurations.",
"recommendation": "Disable the Microsoft RDP service on port 3389 if this is publicly reachable. Add additional security layers, such as VPN access if these ports do require to be enabled to limit the attack surface."
},
"KAT-OPEN-DATABASE-PORT": {
"description": "A database port is open.",
"source": "https://en.wikipedia.org/wiki/List_of_TCP_and_UDP_port_numbers",
Expand Down
3 changes: 1 addition & 2 deletions mula/MANIFEST.in
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
include scheduler/alembic.ini
include scheduler/alembic/script.py.mako
include scheduler/storage/migrations/alembic.ini
2 changes: 1 addition & 1 deletion mula/packaging/deb/data/usr/bin/update-mula-db
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@ set -ae
source /usr/lib/kat/mula.defaults
source /etc/kat/mula.conf
cd /opt/venvs/kat-mula/lib/python*/site-packages
/opt/venvs/kat-mula/bin/python -m alembic --config scheduler/alembic.ini upgrade head
/opt/venvs/kat-mula/bin/python -m alembic --config scheduler/storage/migrations/alembic.ini upgrade head
71 changes: 70 additions & 1 deletion mula/scheduler/schedulers/schedulers/report.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from opentelemetry import trace

from scheduler import context, storage
from scheduler.models import Organisation, ReportTask, Task
from scheduler.models import Organisation, ReportTask, Task, TaskStatus
from scheduler.schedulers import Scheduler
from scheduler.schedulers.queue import PriorityQueue, QueueFullError
from scheduler.storage import filters
Expand Down Expand Up @@ -87,6 +87,40 @@ def push_tasks_for_rescheduling(self):
) as executor:
for schedule in schedules:
report_task = ReportTask.model_validate(schedule.data)

# When the schedule has no schedule (cron expression), but a
# task is already executed for this schedule we should not run
# the task again
if schedule.schedule is None:
try:
_, count = self.ctx.datastores.task_store.get_tasks(
scheduler_id=self.scheduler_id,
task_type=report_task.type,
filters=filters.FilterRequest(
filters=[
filters.Filter(column="hash", operator="eq", value=report_task.hash),
filters.Filter(column="schedule_id", operator="eq", value=str(schedule.id)),
]
),
)
if count > 0:
self.logger.debug(
"Schedule has no schedule, but task already executed",
schedule_id=schedule.id,
scheduler_id=self.scheduler_id,
organisation_id=self.organisation.id,
)
continue
except storage.errors.StorageError as exc_db:
self.logger.error(
"Could not get latest task by hash %s",
report_task.hash,
scheduler_id=self.scheduler_id,
organisation_id=self.organisation.id,
exc_info=exc_db,
)
continue

executor.submit(self.push_report_task, report_task, self.push_tasks_for_rescheduling.__name__)

def push_report_task(self, report_task: ReportTask, caller: str = "") -> None:
Expand All @@ -98,6 +132,16 @@ def push_report_task(self, report_task: ReportTask, caller: str = "") -> None:
caller=caller,
)

if self.has_report_task_started_running(report_task):
self.logger.debug(
"Report task already running",
task_hash=report_task.hash,
organisation_id=self.organisation.id,
scheduler_id=self.scheduler_id,
caller=caller,
)
return

if self.is_item_on_queue_by_hash(report_task.hash):
self.logger.debug(
"Report task already on queue",
Expand Down Expand Up @@ -139,3 +183,28 @@ def push_report_task(self, report_task: ReportTask, caller: str = "") -> None:
scheduler_id=self.scheduler_id,
caller=caller,
)

def has_report_task_started_running(self, task: ReportTask) -> bool:
task_db = None
try:
task_db = self.ctx.datastores.task_store.get_latest_task_by_hash(task.hash)
except storage.errors.StorageError as exc_db:
self.logger.error(
"Could not get latest task by hash %s",
task.hash,
organisation_id=self.organisation.id,
scheduler_id=self.scheduler_id,
exc_info=exc_db,
)
raise exc_db

if task_db is not None and task_db.status not in [TaskStatus.FAILED, TaskStatus.COMPLETED]:
self.logger.debug(
"Task is still running, according to the datastore",
task_id=task_db.id,
organisation_id=self.organisation.id,
scheduler_id=self.scheduler_id,
)
return True

return False
6 changes: 6 additions & 0 deletions octopoes/bits/ask_port_specification/question_schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,12 @@
"pattern": "^(\\s*(,*)[0-9]+,?\\s*)*$",
"default": "1433,1434,3050,3306,5432"
},
"microsoft_rdp_ports": {
"description": "Comma separated list of (Microsoft) RDP ports",
"type": "string",
"pattern": "^(\\s*(,*)[0-9]+,?\\s*)*$",
"default": "3389"
},
"aggregate_findings": {
"description": "Do you want to aggregate findings into one finding of the IP? Answer with true or false.",
"type": "string",
Expand Down
20 changes: 18 additions & 2 deletions octopoes/bits/port_classification_ip/port_classification_ip.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
21, # FTP
22, # SSH
23, # Telnet
3389, # Remote Desktop
5900, # VNC
]
DB_TCP_PORTS = [
Expand All @@ -36,6 +35,9 @@
3306, # MySQL
5432, # PostgreSQL
]
MICROSOFT_RDP_PORTS = [
3389 # Microsoft Remote Desktop
]


def get_ports_from_config(config, config_key, default):
Expand All @@ -53,6 +55,7 @@ def run(input_ooi: IPPort, additional_oois: list, config: dict[str, Any]) -> Ite
common_udp_ports = get_ports_from_config(config, "common_udp_ports", COMMON_UDP_PORTS)
sa_tcp_ports = get_ports_from_config(config, "sa_tcp_ports", SA_TCP_PORTS)
db_tcp_ports = get_ports_from_config(config, "db_tcp_ports", DB_TCP_PORTS)
microsoft_rdp_ports = get_ports_from_config(config, "microsoft_rdp_ports", MICROSOFT_RDP_PORTS)

for ip_port in additional_oois:
port = ip_port.port
Expand All @@ -66,7 +69,8 @@ def run(input_ooi: IPPort, additional_oois: list, config: dict[str, Any]) -> Ite
yield Finding(
finding_type=open_sa_port.reference,
ooi=ip_port.reference,
description=f"Port {port}/{protocol.value} is a system administrator port and should not be open.",
description=f"Port {port}/{protocol.value} is a system administrator port and "
f"should possibly not be open.",
)
elif protocol == Protocol.TCP and port in db_tcp_ports:
ft = KATFindingType(id="KAT-OPEN-DATABASE-PORT")
Expand All @@ -79,6 +83,18 @@ def run(input_ooi: IPPort, additional_oois: list, config: dict[str, Any]) -> Ite
ooi=ip_port.reference,
description=f"Port {port}/{protocol.value} is a database port and should not be open.",
)
elif port in microsoft_rdp_ports:
open_rdp_port = KATFindingType(id="KAT-REMOTE-DESKTOP-PORT")
if aggregate_findings:
open_ports.append(ip_port.port)
else:
yield open_rdp_port
yield Finding(
finding_type=open_rdp_port.reference,
ooi=ip_port.reference,
description=f"Port {port}/{protocol.value} is a Microsoft Remote Desktop port and "
f"should possibly not be open.",
)
elif (protocol == Protocol.TCP and port not in common_tcp_ports) or (
protocol == Protocol.UDP and port not in common_udp_ports
):
Expand Down
2 changes: 1 addition & 1 deletion octopoes/tests/test_bit_ports.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def test_port_classification_tcp_22():
assert len(results) == 2
finding = results[-1]
assert isinstance(finding, Finding)
assert finding.description == "Port 22/tcp is a system administrator port and should not be open."
assert finding.description == "Port 22/tcp is a system administrator port and should possibly not be open."


def test_port_classification_tcp_5432():
Expand Down
1 change: 1 addition & 0 deletions rocky/reports/views/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,6 +302,7 @@ def create_report_recipe(
ooi=report_recipe,
observed_at=datetime.now(timezone.utc),
)
logger.info("ReportRecipe created", event_code=800091, report_recipe=report_recipe)
return report_recipe

def get_input_data(self) -> dict[str, Any]:
Expand Down
7 changes: 7 additions & 0 deletions rocky/reports/views/mixins.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from typing import Any
from uuid import uuid4

import structlog
from django.contrib import messages
from django.utils.translation import gettext_lazy as _
from tools.ooi_helpers import create_ooi
Expand All @@ -18,6 +19,8 @@
from reports.report_types.multi_organization_report.report import MultiOrganizationReport, collect_report_data
from reports.views.base import BaseReportView, ReportDataDict

logger = structlog.get_logger(__name__)


def collect_reports(observed_at: datetime, octopoes_connector: OctopoesAPIConnector, ooi_pks: list[str], report_types):
error_reports = []
Expand Down Expand Up @@ -194,6 +197,8 @@ def save_report_data(
)

create_ooi(octopoes_api_connector, bytes_client, parent_report_ooi, observed_at)

logger.info("Report created", event_code=800071, report=parent_report_ooi)
return parent_report_ooi


Expand Down Expand Up @@ -271,6 +276,7 @@ def save_aggregate_report_data(

create_ooi(octopoes_api_connector, bytes_client, aggregate_sub_report_ooi, observed_at)

logger.info("Report created", event_code=800071, report=report_ooi)
return report_ooi


Expand Down Expand Up @@ -376,5 +382,6 @@ def save_report(self, report_names: list) -> Report:
)

create_ooi(self.octopoes_api_connector, self.bytes_client, report_ooi, observed_at)
logger.info("Report created", event_code=800071, report=report_ooi)

return report_ooi
3 changes: 3 additions & 0 deletions rocky/reports/views/report_overview.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,7 @@ def run_bulk_actions(self) -> None:

def delete_reports(self, report_references: list[Reference]) -> None:
self.octopoes_api_connector.delete_many(report_references, datetime.now(timezone.utc))
logger.info("Reports deleted", event_code=800073, reports=report_references)
messages.success(self.request, _("Deletion successful."))

def rerun_reports(self, report_references: list[str]) -> None:
Expand Down Expand Up @@ -247,6 +248,7 @@ def recreate_report(
)

create_ooi(self.octopoes_api_connector, self.bytes_client, new_report_ooi, observed_at)
logger.info("Report created", event_code=800071, report=new_report_ooi)

return new_report_ooi

Expand Down Expand Up @@ -349,6 +351,7 @@ def rename_reports(self, report_references: list[str]) -> None:
error_reports.append(f'"{report_ooi.name}"')

if not error_reports:
logger.info("Reports created", event_code=800071, reports=report_references)
return messages.success(self.request, _("Reports successfully renamed."))

return messages.error(self.request, _("Report {} could not be renamed.").format(", ".join(error_reports)))
Expand Down
3 changes: 3 additions & 0 deletions rocky/rocky/scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -299,13 +299,15 @@ def patch_schedule(self, schedule_id: str, params: dict[str, Any]) -> None:
try:
response = self._client.patch(f"/schedules/{schedule_id}", json=params)
response.raise_for_status()
logger.info("Schedule updated", event_code=800082, schedule_id=schedule_id, params=params)
except (HTTPStatusError, ConnectError):
raise SchedulerHTTPError()

def post_schedule(self, schedule: ScheduleRequest) -> ScheduleResponse:
try:
res = self._client.post("/schedules", json=schedule.model_dump(exclude_none=True))
res.raise_for_status()
logger.info("Schedule created", event_code=800081, schedule=schedule)
return ScheduleResponse.model_validate_json(res.content)
except (ValidationError, HTTPStatusError, ConnectError):
raise SchedulerValidationError(extra_message="Report schedule failed: ")
Expand All @@ -314,6 +316,7 @@ def delete_schedule(self, schedule_id: str) -> None:
try:
response = self._client.delete(f"/schedules/{schedule_id}")
response.raise_for_status()
logger.info("Schedule deleted", event_code=800083, schedule_id=schedule_id)
except (HTTPStatusError, ConnectError):
raise SchedulerHTTPError()

Expand Down

0 comments on commit 53bdc29

Please sign in to comment.