Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixed Kafka integration. #214

Merged
merged 1 commit into from
Jul 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion fastapi_template/input_model.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import abc
import enum
from collections import UserDict
from typing import Any, Callable, List, Optional

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,8 @@
"tests/test_echo.py",
"tests/test_dummy.py",
"tests/test_redis.py",
"tests/test_rabbit.py"
"tests/test_rabbit.py",
"tests/test_kafka.py"
]
},
"Users model": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -222,35 +222,28 @@ services:

{%- if cookiecutter.enable_kafka == "True" %}

zookeeper:
image: "bitnami/zookeeper:3.7.1"
hostname: "{{cookiecutter.project_name}}-zookeeper"
environment:
ALLOW_ANONYMOUS_LOGIN: "yes"
ZOO_LOG_LEVEL: "ERROR"
healthcheck:
test: zkServer.sh status
interval: 1s
timeout: 3s
retries: 30

kafka:
image: "bitnami/kafka:3.2.0"
image: bitnami/kafka:3.7.1-debian-12-r0
hostname: "{{cookiecutter.project_name}}-kafka"
environment:
KAFKA_BROKER_ID: "1"
ALLOW_PLAINTEXT_LISTENER: "yes"
KAFKA_CFG_LISTENERS: "PLAINTEXT://0.0.0.0:9092"
KAFKA_CFG_ADVERTISED_LISTENERS: "PLAINTEXT://{{cookiecutter.project_name}}-kafka:9092"
KAFKA_CFG_ZOOKEEPER_CONNECT: "{{cookiecutter.project_name}}-zookeeper:2181"
KAFKA_CFG_NODE_ID: "0"
KAFKA_CFG_PROCESS_ROLES: "controller,broker"
KAFKA_CFG_LISTENERS: "PLAINTEXT://:9092,CONTROLLER://:9093,EXTERNAL://:9094"
KAFKA_CFG_ADVERTISED_LISTENERS: "PLAINTEXT://kafka:9092,EXTERNAL://localhost:9094"
KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: "CONTROLLER:PLAINTEXT,EXTERNAL:PLAINTEXT,PLAINTEXT:PLAINTEXT"
KAFKA_CFG_CONTROLLER_QUORUM_VOTERS: "0@{{cookiecutter.project_name}}-kafka:9093"
KAFKA_CFG_CONTROLLER_LISTENER_NAMES: "CONTROLLER"
KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE: "true"
KAFKA_CFG_OFFSETS_TOPIC_REPLICATION_FACTOR: "1"
# Uncomment it to connect from localhost.
# ports:
# - 9094:9094
healthcheck:
test: kafka-topics.sh --list --bootstrap-server localhost:9092
interval: 1s
timeout: 3s
retries: 30
depends_on:
zookeeper:
condition: service_healthy

{%- endif %}

{% if cookiecutter.db_info.name != 'none' %}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -523,5 +523,5 @@ async def client(
:param fastapi_app: the application.
:yield: client for the app.
"""
async with AsyncClient(app=fastapi_app, base_url="http://test") as ac:
async with AsyncClient(app=fastapi_app, base_url="http://test", timeout=2.0) as ac:
yield ac
2 changes: 1 addition & 1 deletion fastapi_template/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def generator_start_dir() -> Generator[str, None, None]:


@pytest.fixture()
def default_context(project_name: str) -> None:
def default_context(project_name: str) -> BuilderContext:
"""
Default builder context without features.

Expand Down
10 changes: 5 additions & 5 deletions fastapi_template/tests/test_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,8 +225,8 @@ def test_gunicorn(default_context: BuilderContext, worker_id: str):
run_default_check(default_context, worker_id, without_pytest=True)


# @pytest.mark.parametrize("api", ["rest", "graphql"])
# def test_kafka(default_context: BuilderContext, api: str):
# default_context.enable_kafka = True
# default_context.api_type = api
# run_default_check(default_context)
@pytest.mark.parametrize("api", ["rest", "graphql"])
def test_kafka(default_context: BuilderContext, api: str, worker_id: str):
default_context.enable_kafka = True
default_context.api_type = api
run_default_check(default_context, worker_id)
Loading