diff --git a/mypy.ini b/mypy.ini index b6f89c5bb..38fe8c48a 100644 --- a/mypy.ini +++ b/mypy.ini @@ -81,9 +81,6 @@ ignore_errors = True [mypy-aiokafka.*] ignore_missing_imports = True -[mypy-confluent_kafka.*] -ignore_missing_imports = True - [mypy-kafka.*] ignore_missing_imports = True diff --git a/stubs/confluent_kafka/__init__.pyi b/stubs/confluent_kafka/__init__.pyi new file mode 100644 index 000000000..94c2c13ff --- /dev/null +++ b/stubs/confluent_kafka/__init__.pyi @@ -0,0 +1,4 @@ +from ._model import IsolationLevel +from .cimpl import TopicPartition + +__all__ = ("IsolationLevel", "TopicPartition") diff --git a/stubs/confluent_kafka/_model.pyi b/stubs/confluent_kafka/_model.pyi new file mode 100644 index 000000000..6f8938ea8 --- /dev/null +++ b/stubs/confluent_kafka/_model.pyi @@ -0,0 +1 @@ +class IsolationLevel: ... diff --git a/stubs/confluent_kafka/admin/__init__.pyi b/stubs/confluent_kafka/admin/__init__.pyi new file mode 100644 index 000000000..ad4d09af8 --- /dev/null +++ b/stubs/confluent_kafka/admin/__init__.pyi @@ -0,0 +1,53 @@ +from ..cimpl import NewTopic +from ._config import ConfigEntry, ConfigResource, ConfigSource +from ._listoffsets import ListOffsetsResultInfo, OffsetSpec +from ._metadata import BrokerMetadata, ClusterMetadata, TopicMetadata +from ._resource import ResourceType +from concurrent.futures import Future +from confluent_kafka import IsolationLevel, TopicPartition +from typing import Callable + +__all__ = ( + "AdminClient", + "BrokerMetadata", + "ClusterMetadata", + "ConfigResource", + "ConfigSource", + "NewTopic", + "OffsetSpec", + "ResourceType", + "TopicMetadata", +) + +class AdminClient: + def __init__(self, config: dict[str, str | int | Callable]) -> None: ... + def poll(self, timeout: float) -> int: ... + def list_topics(self, topic: str | None = None, timeout: float = -1) -> ClusterMetadata: ... + def create_topics( + self, + new_topics: list[NewTopic], + operation_timeout: float = 0, + request_timeout: float = -1, + validate_only: bool = False, + ) -> dict[str, Future[None]]: ... + def alter_configs( + self, + resources: list[ConfigResource], + request_timeout: float = -1, + validate_only: bool = False, + ) -> dict[str, Future[ConfigResource]]: ... + def delete_topics( + self, + topics: list[str], + operation_timeout: float = 0, + request_timeout: float = -1, + ) -> dict[str, Future[None]]: ... + def list_offsets( + self, + topic_partition_offsets: dict[TopicPartition, OffsetSpec], + isolation_level: IsolationLevel | None = None, + request_timeout: float = -1, + ) -> dict[TopicPartition, Future[ListOffsetsResultInfo]]: ... + def describe_configs( + self, resources: list[ConfigResource], request_timeout: float = -1 + ) -> dict[ConfigResource, Future[dict[str, ConfigEntry]]]: ... diff --git a/stubs/confluent_kafka/admin/_config.pyi b/stubs/confluent_kafka/admin/_config.pyi new file mode 100644 index 000000000..76c18ed5c --- /dev/null +++ b/stubs/confluent_kafka/admin/_config.pyi @@ -0,0 +1,18 @@ +from ._resource import ResourceType +from enum import Enum + +class ConfigResource: + Type = ResourceType + + def __init__( + self, + restype: ResourceType, + name: str, + set_config: dict[str, str] | None = None, + ) -> None: ... + +class ConfigSource(Enum): + UNKNOWN_CONFIG: int + DYNAMIC_TOPIC_CONFIG: int + +class ConfigEntry: ... diff --git a/stubs/confluent_kafka/admin/_listoffsets.pyi b/stubs/confluent_kafka/admin/_listoffsets.pyi new file mode 100644 index 000000000..ee2f5ebce --- /dev/null +++ b/stubs/confluent_kafka/admin/_listoffsets.pyi @@ -0,0 +1,7 @@ +class OffsetSpec: + @classmethod + def earliest(cls) -> "OffsetSpec": ... + @classmethod + def latest(cls) -> "OffsetSpec": ... + +class ListOffsetsResultInfo: ... diff --git a/stubs/confluent_kafka/admin/_metadata.pyi b/stubs/confluent_kafka/admin/_metadata.pyi new file mode 100644 index 000000000..878ed85bd --- /dev/null +++ b/stubs/confluent_kafka/admin/_metadata.pyi @@ -0,0 +1,22 @@ +class ClusterMetadata: + def __init__(self) -> None: + self.cluster_id: str | None + self.controller_id: int + self.brokers: dict[int, BrokerMetadata] + self.topics: dict[str, TopicMetadata] + self.orig_broker_id: int + self.orig_broker_name: str | None + +class BrokerMetadata: ... + +class TopicMetadata: + def __init__(self) -> None: + self.topic: str + self.partitions: dict[int, PartitionMetadata] + +class PartitionMetadata: + def __init__(self) -> None: + self.id: int + self.leader: int + self.replicas: list[int] + self.isrs: list[int] diff --git a/stubs/confluent_kafka/admin/_resource.pyi b/stubs/confluent_kafka/admin/_resource.pyi new file mode 100644 index 000000000..db1cda471 --- /dev/null +++ b/stubs/confluent_kafka/admin/_resource.pyi @@ -0,0 +1,4 @@ +from enum import Enum + +class ResourceType(Enum): + TOPIC: int diff --git a/stubs/confluent_kafka/cimpl.pyi b/stubs/confluent_kafka/cimpl.pyi new file mode 100644 index 000000000..f32df8e0d --- /dev/null +++ b/stubs/confluent_kafka/cimpl.pyi @@ -0,0 +1,32 @@ +from typing import Any + +class KafkaError: + _NOENT: int + _AUTHENTICATION: int + + def code(self) -> int: ... + +class KafkaException(Exception): + def __init__(self, *args: Any, **kwargs: Any) -> None: + self.args: tuple[KafkaError] + +class NewTopic: + def __init__( + self, + topic: str, + num_partitions: int = -1, + replication_factor: int = -1, + replica_assignment: list | None = None, + config: dict[str, str] | None = None, + ) -> None: + self.topic: str + +class TopicPartition: + def __init__( + self, + topic: str, + partition: int = -1, + offset: int = -1001, + metadata: str | None = None, + leader_epoc: int | None = None, + ) -> None: ... diff --git a/stubs/confluent_kafka/error.pyi b/stubs/confluent_kafka/error.pyi new file mode 100644 index 000000000..85ceee6da --- /dev/null +++ b/stubs/confluent_kafka/error.pyi @@ -0,0 +1,3 @@ +from confluent_kafka.cimpl import KafkaError, KafkaException + +__all__ = ("KafkaError", "KafkaException") diff --git a/stubs/confluent_kafka/py.typed b/stubs/confluent_kafka/py.typed new file mode 100644 index 000000000..e69de29bb