diff --git a/requirements-all.txt b/requirements-all.txt index 34444039b3..61b2882586 100644 --- a/requirements-all.txt +++ b/requirements-all.txt @@ -1,27 +1,27 @@ -aiodns==3.0.0 +aiodns==3.1.1 aiofiles==23.2.1 -aiohttp[speedups]==3.8.5 +aiohttp[speedups]==3.8.6 aiohttp-jinja2==1.5.1 aiohttp-swagger3==0.7.4 aiosignal==1.3.1 aiostream==0.4.5 -annotated-types==0.5.0 +annotated-types==0.6.0 apscheduler==3.10.4 asn1crypto==1.5.1 -astroid==3.0.0 +astroid==3.0.1 async-timeout==4.0.3 attrs==23.1.0 autocommand==2.2.2 azure-common==1.1.28 azure-core==1.29.4 -azure-identity==1.14.0 +azure-identity==1.14.1 azure-mgmt-core==1.4.0 azure-mgmt-resource==23.0.1 backoff==2.2.1 bcrypt==4.0.1 -black==23.9.1 -boto3==1.28.60 -botocore==1.31.60 +black==23.10.0 +boto3==1.28.65 +botocore==1.31.65 brotli==1.1.0 build==1.0.3 cachetools==5.3.1 @@ -37,7 +37,7 @@ click==8.1.7 colorama==0.4.6 coverage[toml]==7.3.2 cryptography==41.0.4 -deepdiff==6.6.0 +deepdiff==6.6.1 defusedxml==0.7.1 deprecated==1.2.14 dill==0.3.7 @@ -48,16 +48,16 @@ flake8==6.1.0 frozendict==2.3.8 frozenlist==1.4.0 google-api-core==2.12.0 -google-api-python-client==2.102.0 -google-auth==2.23.2 +google-api-python-client==2.104.0 +google-auth==2.23.3 google-auth-httplib2==0.1.1 google-cloud-core==2.3.3 -google-cloud-storage==2.11.0 +google-cloud-storage==2.12.0 google-crc32c==1.5.0 google-resumable-media==2.6.0 -googleapis-common-protos==1.60.0 +googleapis-common-protos==1.61.0 httplib2==0.22.0 -hypothesis==6.87.1 +hypothesis==6.88.1 idna==3.4 importlib-metadata==6.8.0 inflect==7.0.0 @@ -82,10 +82,10 @@ more-itertools==10.1.0 msal==1.24.1 msal-extensions==1.0.0 multidict==6.0.4 -mypy==1.5.1 +mypy==1.6.1 mypy-extensions==1.0.0 networkx==3.1 -numpy==1.26.0 +numpy==1.26.1 oauth2client==4.1.3 oauthlib==3.2.2 onelogin==2.0.4 @@ -97,7 +97,7 @@ parsy==2.1 pathspec==0.11.2 pep8-naming==0.13.3 pint==0.22 -pip==23.2.1 +pip==23.3 pip-tools==7.3.0 plantuml==0.3.0 platformdirs==3.11.0 @@ -108,13 +108,13 @@ posthog==3.0.2 prometheus-client==0.17.1 prompt-toolkit==3.0.39 protobuf==4.24.4 -psutil==5.9.5 +psutil==5.9.6 psycopg2-binary==2.9.9 pyarrow==13.0.0 pyasn1==0.5.0 pyasn1-modules==0.3.0 -pycares==4.3.0 -pycodestyle==2.11.0 +pycares==4.4.0 +pycodestyle==2.11.1 pycparser==2.21 pycryptodomex==3.19.0 pydantic==2.4.2 @@ -123,7 +123,7 @@ pyflakes==3.1.0 pygithub==2.1.1 pygments==2.16.1 pyjwt[crypto]==2.8.0 -pylint==3.0.0 +pylint==3.0.1 pymysql==1.1.0 pynacl==1.5.0 pyopenssl==23.2.0 @@ -154,7 +154,7 @@ s3transfer==0.7.0 setuptools==68.2.2 six==1.16.0 slack-sdk==3.23.0 -snowflake-connector-python==3.2.1 +snowflake-connector-python==3.3.0 snowflake-sqlalchemy==1.5.0 sortedcontainers==2.4.0 sqlalchemy==1.4.49 @@ -179,11 +179,11 @@ typish==1.9.3 tzdata==2023.3 tzlocal==5.1 uritemplate==4.1.1 -urllib3==1.26.17 +urllib3==1.26.18 ustache==0.1.5 virtualenv==20.24.5 wcwidth==0.2.8 -websocket-client==1.6.3 +websocket-client==1.6.4 wheel==0.41.2 wrapt==1.15.0 yarl==1.9.2 diff --git a/requirements-extra.txt b/requirements-extra.txt index c3e8953fdd..6200552ada 100644 --- a/requirements-extra.txt +++ b/requirements-extra.txt @@ -1,11 +1,11 @@ -aiodns==3.0.0 +aiodns==3.1.1 aiofiles==23.2.1 -aiohttp[speedups]==3.8.5 +aiohttp[speedups]==3.8.6 aiohttp-jinja2==1.5.1 aiohttp-swagger3==0.7.4 aiosignal==1.3.1 aiostream==0.4.5 -annotated-types==0.5.0 +annotated-types==0.6.0 apscheduler==3.10.4 asn1crypto==1.5.1 async-timeout==4.0.3 @@ -13,13 +13,13 @@ attrs==23.1.0 autocommand==2.2.2 azure-common==1.1.28 azure-core==1.29.4 -azure-identity==1.14.0 +azure-identity==1.14.1 azure-mgmt-core==1.4.0 azure-mgmt-resource==23.0.1 backoff==2.2.1 bcrypt==4.0.1 -boto3==1.28.60 -botocore==1.31.60 +boto3==1.28.65 +botocore==1.31.65 brotli==1.1.0 cachetools==5.3.1 cattrs==23.1.2 @@ -30,7 +30,7 @@ charset-normalizer==3.3.0 cheroot==10.0.0 cherrypy==18.8.0 cryptography==41.0.4 -deepdiff==6.6.0 +deepdiff==6.6.1 defusedxml==0.7.1 deprecated==1.2.14 fastjsonschema==2.16.3 @@ -38,14 +38,14 @@ filelock==3.12.4 frozendict==2.3.8 frozenlist==1.4.0 google-api-core==2.12.0 -google-api-python-client==2.102.0 -google-auth==2.23.2 +google-api-python-client==2.104.0 +google-auth==2.23.3 google-auth-httplib2==0.1.1 google-cloud-core==2.3.3 -google-cloud-storage==2.11.0 +google-cloud-storage==2.12.0 google-crc32c==1.5.0 google-resumable-media==2.6.0 -googleapis-common-protos==1.60.0 +googleapis-common-protos==1.61.0 httplib2==0.22.0 idna==3.4 importlib-metadata==6.8.0 @@ -69,7 +69,7 @@ msal==1.24.1 msal-extensions==1.0.0 multidict==6.0.4 networkx==3.1 -numpy==1.26.0 +numpy==1.26.1 oauth2client==4.1.3 oauthlib==3.2.2 onelogin==2.0.4 @@ -87,12 +87,12 @@ posthog==3.0.2 prometheus-client==0.17.1 prompt-toolkit==3.0.39 protobuf==4.24.4 -psutil==5.9.5 +psutil==5.9.6 psycopg2-binary==2.9.9 pyarrow==13.0.0 pyasn1==0.5.0 pyasn1-modules==0.3.0 -pycares==4.3.0 +pycares==4.4.0 pycparser==2.21 pycryptodomex==3.19.0 pydantic==2.4.2 @@ -124,7 +124,7 @@ s3transfer==0.7.0 setuptools==68.2.2 six==1.16.0 slack-sdk==3.23.0 -snowflake-connector-python==3.2.1 +snowflake-connector-python==3.3.0 snowflake-sqlalchemy==1.5.0 sortedcontainers==2.4.0 sqlalchemy==1.4.49 @@ -139,10 +139,10 @@ typish==1.9.3 tzdata==2023.3 tzlocal==5.1 uritemplate==4.1.1 -urllib3==1.26.17 +urllib3==1.26.18 ustache==0.1.5 wcwidth==0.2.8 -websocket-client==1.6.3 +websocket-client==1.6.4 wrapt==1.15.0 yarl==1.9.2 zc-lockfile==3.0.post1 diff --git a/requirements-test.txt b/requirements-test.txt index 578e1857e3..b52baee7e9 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,26 +1,26 @@ -aiodns==3.0.0 +aiodns==3.1.1 aiofiles==23.2.1 -aiohttp[speedups]==3.8.5 +aiohttp[speedups]==3.8.6 aiohttp-jinja2==1.5.1 aiohttp-swagger3==0.7.4 aiosignal==1.3.1 aiostream==0.4.5 -annotated-types==0.5.0 +annotated-types==0.6.0 apscheduler==3.10.4 -astroid==3.0.0 +astroid==3.0.1 async-timeout==4.0.3 attrs==23.1.0 autocommand==2.2.2 azure-common==1.1.28 azure-core==1.29.4 -azure-identity==1.14.0 +azure-identity==1.14.1 azure-mgmt-core==1.4.0 azure-mgmt-resource==23.0.1 backoff==2.2.1 bcrypt==4.0.1 -black==23.9.1 -boto3==1.28.60 -botocore==1.31.60 +black==23.10.0 +boto3==1.28.65 +botocore==1.31.65 brotli==1.1.0 build==1.0.3 cachetools==5.3.1 @@ -36,24 +36,23 @@ click==8.1.7 colorama==0.4.6 coverage[toml]==7.3.2 cryptography==41.0.4 -deepdiff==6.6.0 +deepdiff==6.6.1 defusedxml==0.7.1 deprecated==1.2.14 dill==0.3.7 distlib==0.3.7 -docutils==0.20.1 fastjsonschema==2.16.3 filelock==3.12.4 flake8==6.1.0 frozendict==2.3.8 frozenlist==1.4.0 google-api-core==2.12.0 -google-api-python-client==2.102.0 -google-auth==2.23.2 +google-api-python-client==2.104.0 +google-auth==2.23.3 google-auth-httplib2==0.1.1 -googleapis-common-protos==1.60.0 +googleapis-common-protos==1.61.0 httplib2==0.22.0 -hypothesis==6.87.1 +hypothesis==6.88.1 idna==3.4 importlib-metadata==6.8.0 inflect==7.0.0 @@ -78,7 +77,7 @@ more-itertools==10.1.0 msal==1.24.1 msal-extensions==1.0.0 multidict==6.0.4 -mypy==1.5.1 +mypy==1.6.1 mypy-extensions==1.0.0 networkx==3.1 oauth2client==4.1.3 @@ -91,7 +90,7 @@ parsy==2.1 pathspec==0.11.2 pep8-naming==0.13.3 pint==0.22 -pip==23.2.1 +pip==23.3 pip-tools==7.3.0 plantuml==0.3.0 platformdirs==3.11.0 @@ -102,11 +101,11 @@ posthog==3.0.2 prometheus-client==0.17.1 prompt-toolkit==3.0.39 protobuf==4.24.4 -psutil==5.9.5 +psutil==5.9.6 pyasn1==0.5.0 pyasn1-modules==0.3.0 -pycares==4.3.0 -pycodestyle==2.11.0 +pycares==4.4.0 +pycodestyle==2.11.1 pycparser==2.21 pydantic==2.4.2 pydantic-core==2.10.1 @@ -114,7 +113,7 @@ pyflakes==3.1.0 pygithub==2.1.1 pygments==2.16.1 pyjwt[crypto]==2.8.0 -pylint==3.0.0 +pylint==3.0.1 pynacl==1.5.0 pyparsing==3.1.1 pyproject-api==1.6.1 @@ -165,11 +164,11 @@ typish==1.9.3 tzdata==2023.3 tzlocal==5.1 uritemplate==4.1.1 -urllib3==1.26.17 +urllib3==1.26.18 ustache==0.1.5 virtualenv==20.24.5 wcwidth==0.2.8 -websocket-client==1.6.3 +websocket-client==1.6.4 wheel==0.41.2 wrapt==1.15.0 yarl==1.9.2 diff --git a/requirements.txt b/requirements.txt index bc878d5262..02cc4fc13c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,24 +1,24 @@ -aiodns==3.0.0 +aiodns==3.1.1 aiofiles==23.2.1 -aiohttp[speedups]==3.8.5 +aiohttp[speedups]==3.8.6 aiohttp-jinja2==1.5.1 aiohttp-swagger3==0.7.4 aiosignal==1.3.1 aiostream==0.4.5 -annotated-types==0.5.0 +annotated-types==0.6.0 apscheduler==3.10.4 async-timeout==4.0.3 attrs==23.1.0 autocommand==2.2.2 azure-common==1.1.28 azure-core==1.29.4 -azure-identity==1.14.0 +azure-identity==1.14.1 azure-mgmt-core==1.4.0 azure-mgmt-resource==23.0.1 backoff==2.2.1 bcrypt==4.0.1 -boto3==1.28.60 -botocore==1.31.60 +boto3==1.28.65 +botocore==1.31.65 brotli==1.1.0 cachetools==5.3.1 cattrs==23.1.2 @@ -29,17 +29,17 @@ charset-normalizer==3.3.0 cheroot==10.0.0 cherrypy==18.8.0 cryptography==41.0.4 -deepdiff==6.6.0 +deepdiff==6.6.1 defusedxml==0.7.1 deprecated==1.2.14 fastjsonschema==2.16.3 frozendict==2.3.8 frozenlist==1.4.0 google-api-core==2.12.0 -google-api-python-client==2.102.0 -google-auth==2.23.2 +google-api-python-client==2.104.0 +google-auth==2.23.3 google-auth-httplib2==0.1.1 -googleapis-common-protos==1.60.0 +googleapis-common-protos==1.61.0 httplib2==0.22.0 idna==3.4 importlib-metadata==6.8.0 @@ -78,10 +78,10 @@ posthog==3.0.2 prometheus-client==0.17.1 prompt-toolkit==3.0.39 protobuf==4.24.4 -psutil==5.9.5 +psutil==5.9.6 pyasn1==0.5.0 pyasn1-modules==0.3.0 -pycares==4.3.0 +pycares==4.4.0 pycparser==2.21 pydantic==2.4.2 pydantic-core==2.10.1 @@ -121,10 +121,10 @@ typish==1.9.3 tzdata==2023.3 tzlocal==5.1 uritemplate==4.1.1 -urllib3==1.26.17 +urllib3==1.26.18 ustache==0.1.5 wcwidth==0.2.8 -websocket-client==1.6.3 +websocket-client==1.6.4 wrapt==1.15.0 yarl==1.9.2 zc-lockfile==3.0.post1 diff --git a/resotocore/Makefile b/resotocore/Makefile index b3c4e4521c..a6d4985c49 100644 --- a/resotocore/Makefile +++ b/resotocore/Makefile @@ -59,7 +59,8 @@ lint: ## static code analysis black --line-length 120 --check resotocore tests flake8 resotocore pylint resotocore - mypy --python-version 3.9 --strict --install-types --non-interactive resotocore tests + # mypy --python-version 3.9 --strict --install-types --non-interactive resotocore tests + mypy --python-version 3.9 --strict resotocore tests test: ## run tests quickly with the default Python pytest diff --git a/resotocore/resotocore/cli/__init__.py b/resotocore/resotocore/cli/__init__.py index c223ec802d..5dc43f835c 100644 --- a/resotocore/resotocore/cli/__init__.py +++ b/resotocore/resotocore/cli/__init__.py @@ -3,10 +3,14 @@ from datetime import datetime from functools import lru_cache from typing import TypeVar, Union, Any, Callable, AsyncIterator, NoReturn, Optional, Awaitable, Tuple, List +from typing_extensions import TypeAlias from aiostream.core import Stream from parsy import Parser, regex, string +from resotocore.model.graph_access import Section +from resotocore.types import JsonElement +from resotocore.util import AnyT, utc, parse_utc from resotolib.durations import parse_duration, DurationRe from resotolib.parse_util import ( make_parser, @@ -24,14 +28,12 @@ dash_dp, equals_p, ) -from resotocore.model.graph_access import Section -from resotocore.types import JsonElement -from resotocore.util import AnyT, utc, parse_utc T = TypeVar("T") # Allow the function to return either a coroutine or the result directly Result = Union[T, Awaitable[T]] -JsGen = Union[Stream, AsyncIterator[JsonElement]] +JsStream: TypeAlias = Stream +JsGen = Union[JsStream, AsyncIterator[JsonElement]] # A sink function takes a stream and creates a result Sink = Callable[[JsGen], Awaitable[T]] diff --git a/resotocore/resotocore/cli/command.py b/resotocore/resotocore/cli/command.py index 4780c3d680..032012c853 100644 --- a/resotocore/resotocore/cli/command.py +++ b/resotocore/resotocore/cli/command.py @@ -76,6 +76,7 @@ parse_time_or_delta, strip_quotes, key_value_parser, + JsStream, ) from resotocore.cli.model import ( CLICommand, @@ -845,7 +846,9 @@ def args_info(self) -> ArgsInfo: @staticmethod async def aggregate_in( - content: Stream, group_props: Optional[List[str]] = None, fn_props: Optional[List[AggregateFunction]] = None + content: JsStream, + group_props: Optional[List[str]] = None, + fn_props: Optional[List[AggregateFunction]] = None, ) -> Dict[tuple[Any, ...], _AggregateIntermediateResult]: """ Aggregate the number of elements in the stream, grouped by the provided group properties and @@ -928,7 +931,7 @@ def group(keys: tuple[Any, ...]) -> Json: result[av.as_name or ".".join(av.all_names())] = value return result - async def aggregate_data(content: Stream) -> AsyncIterator[JsonElement]: + async def aggregate_data(content: JsStream) -> AsyncIterator[JsonElement]: async with content.stream() as in_stream: for key, value in (await self.aggregate_in(in_stream, var_names, aggregate.group_func)).items(): entry: Json = {"group": group(key)} @@ -1127,7 +1130,7 @@ def inc_identity(_: Any) -> None: fn = inc_prop if arg else inc_identity - async def count_in_stream(content: Stream) -> AsyncIterator[JsonElement]: + async def count_in_stream(content: JsStream) -> AsyncIterator[JsonElement]: async with content.stream() as in_stream: async for element in in_stream: fn(element) @@ -1624,7 +1627,7 @@ def parse(self, arg: Optional[str] = None, ctx: CLIContext = EmptyContext, **kwa def iterable(it: Any) -> bool: return False if isinstance(it, str) else isinstance(it, Iterable) - def iterate(it: Any) -> Stream: + def iterate(it: Any) -> JsStream: return stream.iterate(it) if is_async_iterable(it) or iterable(it) else stream.just(it) return CLIFlow(lambda in_stream: stream.flatmap(in_stream, iterate), required_permissions={Permission.read}) @@ -1923,7 +1926,7 @@ def property_defined_in(model: Model, path_: str) -> List[str]: if any(p for p in kind.resolved_properties() if p.path.same_as(path)) ] - async def source() -> Tuple[int, Stream]: + async def source() -> Tuple[int, JsStream]: model = await self.dependencies.model_handler.load_model(graph_name) def show(k: ComplexKind) -> bool: @@ -2305,12 +2308,12 @@ def parse(self, arg: Optional[str] = None, ctx: CLIContext = EmptyContext, **kwa raise AttributeError("A format renderer can not be combined together with a format string!") use = next(iter(format_to_use)) - async def render_single(converter: ConvertFn, iss: Stream) -> JsGen: + async def render_single(converter: ConvertFn, iss: JsStream) -> JsGen: async with iss.stream() as streamer: async for elem in converter(streamer): yield elem - async def format_stream(in_stream: Stream) -> JsGen: + async def format_stream(in_stream: JsStream) -> JsGen: if use: if all_renderer := self.render_all.get(use): return all_renderer(in_stream) @@ -2634,7 +2637,7 @@ def fmt_json(elem: Json) -> JsonElement: else: return elem - async def csv_stream(in_stream: Stream) -> JsGen: + async def csv_stream(in_stream: JsStream) -> JsGen: output = io.StringIO() dialect = csv.unix_dialect() writer = csv.writer(output, dialect=dialect, quoting=csv.QUOTE_NONNUMERIC) @@ -2658,7 +2661,7 @@ def to_csv_string(lst: List[Any]) -> str: result.append(value) yield to_csv_string(result) - def markdown_stream(in_stream: Stream) -> JsGen: + def markdown_stream(in_stream: JsStream) -> JsGen: chunk_size = 500 columns_padding = [len(name) for _, name in props_to_show] @@ -2988,7 +2991,7 @@ async def activate_deactivate_job(job_id: str, active: bool) -> AsyncIterator[Js else: yield f"No job with this id: {job_id}" - async def running_jobs() -> Tuple[int, Stream]: + async def running_jobs() -> Tuple[int, JsStream]: tasks = await self.dependencies.task_handler.running_tasks() return len(tasks), stream.iterate( {"job": t.descriptor.id, "started_at": to_json(t.task_started_at), "task-id": t.id} @@ -3035,10 +3038,10 @@ class SendWorkerTaskCommand(CLICommand, ABC): # this method expects a stream of Tuple[str, Dict[str, str], Json] def send_to_queue_stream( self, - in_stream: Stream, + in_stream: JsStream, result_handler: Callable[[WorkerTask, Future[Json]], Awaitable[Json]], wait_for_result: bool, - ) -> Stream: + ) -> JsStream: async def send_to_queue(task_name: str, task_args: Dict[str, str], data: Json) -> JsonElement: future = asyncio.get_event_loop().create_future() task = WorkerTask(TaskId(uuid_str()), task_name, task_args, data, future, self.timeout()) @@ -3058,11 +3061,11 @@ async def send_to_queue(task_name: str, task_args: Dict[str, str], data: Json) - def load_by_id_merged( self, model: Model, - in_stream: Stream, + in_stream: JsStream, variables: Optional[Set[str]], expected_kind: Optional[str] = None, **env: str, - ) -> Stream: + ) -> JsStream: async def load_element(items: List[JsonElement]) -> AsyncIterator[JsonElement]: # collect ids either from json dict or string ids: List[str] = [i["id"] if is_node(i) else i for i in items] # type: ignore @@ -3226,8 +3229,8 @@ def update_single(item: Json) -> Tuple[str, Dict[str, str], Json]: formatter, variables = ctx.formatter_with_variables(args or "") fn = call_function(lambda item: {"args": args_parts_unquoted_parser.parse(formatter(item)), "node": item}) - def setup_stream(in_stream: Stream) -> Stream: - def with_dependencies(model: Model) -> Stream: + def setup_stream(in_stream: JsStream) -> JsStream: + def with_dependencies(model: Model) -> JsStream: load = self.load_by_id_merged(model, in_stream, variables, allowed_on_kind, **ctx.env) handler = self.update_node_in_graphdb(model, **ctx.env) if expect_node_result else self.no_update return self.send_to_queue_stream(stream.map(load, fn), handler, True) @@ -3239,7 +3242,7 @@ async def load_model() -> Model: dependencies = stream.call(load_model) return stream.flatmap(dependencies, with_dependencies) - def setup_source() -> Stream: + def setup_source() -> JsStream: arg = {"args": args_parts_unquoted_parser.parse(formatter({}))} return self.send_to_queue_stream(stream.just((command_name, {}, arg)), self.no_update, True) @@ -3354,8 +3357,8 @@ def update_single(item: Json) -> Tuple[str, Dict[str, str], Json]: else: raise AttributeError("Expect update tag_key tag_value or delete tag_key") - def setup_stream(in_stream: Stream) -> Stream: - def with_dependencies(model: Model) -> Stream: + def setup_stream(in_stream: JsStream) -> JsStream: + def with_dependencies(model: Model) -> JsStream: load = self.load_by_id_merged(model, in_stream, variables, **ctx.env) result_handler = self.update_node_in_graphdb(model, **ctx.env) return self.send_to_queue_stream(stream.map(load, fn), result_handler, not ns.nowait) @@ -3372,7 +3375,7 @@ async def load_model() -> Model: class FileCommand(CLICommand, InternalPart): def parse(self, arg: Optional[str] = None, ctx: CLIContext = EmptyContext, **kwargs: Any) -> CLISource: - def file_command() -> Stream: + def file_command() -> JsStream: if not arg: raise AttributeError("file command needs a parameter!") elif not os.path.exists(arg): @@ -3399,7 +3402,7 @@ def parse(self, arg: Optional[str] = None, ctx: CLIContext = EmptyContext, **kwa raise AttributeError("upload command needs a parameter!") file_id = "file" - def upload_command() -> Stream: + def upload_command() -> JsStream: if file_id in ctx.uploaded_files: file = ctx.uploaded_files[file_id] return stream.just(f"Received file {file} of size {os.path.getsize(file)}") @@ -3700,7 +3703,7 @@ def args_info(self) -> ArgsInfo: return [ArgInfo(expects_value=True, value_hint="file", help_text="file to write to")] @staticmethod - async def write_result_to_file(in_stream: Stream, file_name: str) -> AsyncIterator[JsonElement]: + async def write_result_to_file(in_stream: JsStream, file_name: str) -> AsyncIterator[JsonElement]: async with TemporaryDirectory() as temp_dir: path = os.path.join(temp_dir, uuid_str()) async with aiofiles.open(path, "w") as f: @@ -3713,7 +3716,7 @@ async def write_result_to_file(in_stream: Stream, file_name: str) -> AsyncIterat yield FilePath.user_local(user=file_name, local=path).json() @staticmethod - async def already_file_stream(in_stream: Stream, file_name: str) -> AsyncIterator[JsonElement]: + async def already_file_stream(in_stream: JsStream, file_name: str) -> AsyncIterator[JsonElement]: async with in_stream.stream() as streamer: async for out in streamer: yield evolve(FilePath.from_path(out), user=Path(file_name)).json() @@ -4014,7 +4017,7 @@ def parse_header_query_params(remaining_args: List[str]) -> Tuple[Dict[str, str] else: raise AttributeError("No URL provided to connect to.") - def perform_requests(self, template: HttpRequestTemplate) -> Callable[[Stream], AsyncIterator[JsonElement]]: + def perform_requests(self, template: HttpRequestTemplate) -> Callable[[JsStream], AsyncIterator[JsonElement]]: retries_left = template.retries async def perform_request(e: JsonElement) -> int: @@ -4055,7 +4058,7 @@ async def perform_request(e: JsonElement) -> int: # define exceptions as server error return 500 - async def iterate_stream(in_stream: Stream) -> AsyncIterator[JsonElement]: + async def iterate_stream(in_stream: JsStream) -> AsyncIterator[JsonElement]: results: Dict[int, int] = defaultdict(lambda: 0) async with in_stream.stream() as streamer: async for elem in streamer: @@ -4264,7 +4267,7 @@ async def run_workflow(wf_id: str) -> AsyncIterator[str]: else: yield f"No workflow with this id: {wf_id}" - async def running_workflows() -> Tuple[int, Stream]: + async def running_workflows() -> Tuple[int, JsStream]: tasks = await self.dependencies.task_handler.running_tasks() def info(rt: RunningTask) -> JsonElement: @@ -4311,11 +4314,11 @@ def running_task_data(rtd: RunningTaskData) -> Json: result["errors"] = len(rtd.info_messages()) return result - async def history_aggregation() -> Stream: + async def history_aggregation() -> JsStream: info = await self.dependencies.db_access.running_task_db.aggregated_history() return stream.just(info) - async def history_of(history_args: List[str]) -> Tuple[int, Stream]: + async def history_of(history_args: List[str]) -> Tuple[int, JsStream]: parser = NoExitArgumentParser() parser.add_argument("workflow") parser.add_argument("--started-after", dest="started_after", type=date_parser.parse) @@ -4531,7 +4534,7 @@ async def update_config(cfg_id: ConfigId) -> AsyncIterator[str]: async for file in send_file(message + config): yield file - async def list_configs() -> Tuple[int, Stream]: + async def list_configs() -> Tuple[int, JsStream]: ids = [i async for i in self.dependencies.config_handler.list_config_ids()] return len(ids), stream.iterate(ids) @@ -5171,7 +5174,7 @@ async def app_run( else: raise ValueError(f"Config {config} not found.") - async def stream_to_iterator(in_stream: Stream) -> AsyncIterator[JsonElement]: + async def stream_to_iterator(in_stream: JsStream) -> AsyncIterator[JsonElement]: async with in_stream.stream() as streamer: async for item in streamer: yield item @@ -5816,7 +5819,7 @@ def parse(self, arg: Optional[str] = None, ctx: CLIContext = EmptyContext, **kwa in_source_position = kwargs.get("position") == 0 db_lookup = dict(mysql="mysql+pymysql", mariadb="mariadb+pymysql") - async def sync_database_result(p: Namespace, maybe_stream: Optional[Stream]) -> AsyncIterator[JsonElement]: + async def sync_database_result(p: Namespace, maybe_stream: Optional[JsStream]) -> AsyncIterator[JsonElement]: async with TemporaryDirectory() as temp_dir: # optional: path of the output file file_output: Optional[Path] = Path(temp_dir) / "db" if p.db == "sqlite" else None @@ -5859,7 +5862,7 @@ async def database_synchronize( complete_model: bool, drop_existing_tables: bool, query: Optional[Query], - in_stream: Stream, + in_stream: JsStream, ) -> None: resoto_model = await self.dependencies.model_handler.load_model(ctx.graph_name) diff --git a/resotocore/resotocore/cli/model.py b/resotocore/resotocore/cli/model.py index 6334677059..9b8497ec8e 100644 --- a/resotocore/resotocore/cli/model.py +++ b/resotocore/resotocore/cli/model.py @@ -32,7 +32,7 @@ from parsy import test_char, string from rich.jupyter import JupyterMixin -from resotocore.cli import JsGen, T, Sink +from resotocore.cli import JsGen, T, Sink, JsStream from resotocore.console_renderer import ConsoleRenderer, ConsoleColorSystem from resotocore.core_config import AliasTemplateConfig, AliasTemplateParameterConfig from resotocore.error import CLIParseError @@ -226,7 +226,7 @@ def __init__( self.required_permissions = required_permissions or set() @staticmethod - def make_stream(in_stream: JsGen) -> Stream: + def make_stream(in_stream: JsGen) -> JsStream: return in_stream if isinstance(in_stream, Stream) else stream.iterate(in_stream) @@ -242,7 +242,7 @@ def __init__( super().__init__(produces, requires, envelope, required_permissions) self._fn = fn - async def source(self) -> Tuple[Optional[int], Stream]: + async def source(self) -> Tuple[Optional[int], JsStream]: res = self._fn() count, gen = await res if iscoroutine(res) else res return count, self.make_stream(await gen if iscoroutine(gen) else gen) @@ -300,7 +300,7 @@ def __init__( super().__init__(produces, requires, envelope, required_permissions) self._fn = fn - async def flow(self, in_stream: JsGen) -> Stream: + async def flow(self, in_stream: JsGen) -> JsStream: gen = self._fn(self.make_stream(in_stream)) return self.make_stream(await gen if iscoroutine(gen) else gen) @@ -695,7 +695,7 @@ def is_allowed_to_execute(self) -> bool: return False return all(self.ctx.user.has_permission(cmd.action.required_permissions) for cmd in self.executable_commands) - async def execute(self) -> Tuple[Optional[int], Stream]: + async def execute(self) -> Tuple[Optional[int], JsStream]: if self.executable_commands: source_action = cast(CLISource, self.executable_commands[0].action) count, flow = await source_action.source() diff --git a/resotocore/resotocore/util.py b/resotocore/resotocore/util.py index 1f10145fa3..c2933695cd 100644 --- a/resotocore/resotocore/util.py +++ b/resotocore/resotocore/util.py @@ -126,7 +126,7 @@ def json_hash(js: Json) -> str: def pop_keys(d: Dict[AnyT, AnyR], keys: List[AnyT]) -> Dict[AnyT, AnyR]: res = dict(d) for key in keys: - res.pop(key, None) # type: ignore + res.pop(key, None) return res diff --git a/resotocore/tests/resotocore/hypothesis_extension.py b/resotocore/tests/resotocore/hypothesis_extension.py index 25725c6619..80bf8647eb 100644 --- a/resotocore/tests/resotocore/hypothesis_extension.py +++ b/resotocore/tests/resotocore/hypothesis_extension.py @@ -79,7 +79,7 @@ def node_gen(ud: UD) -> Json: } -def graph_stream(node_list: List[Json]) -> Stream: +def graph_stream(node_list: List[Json]) -> Stream[Json]: def from_node() -> Generator[Json, Any, None]: for node in node_list: yield node diff --git a/resotolib/resotolib/core/model_export.py b/resotolib/resotolib/core/model_export.py index 585b372b20..6fa82f821b 100644 --- a/resotolib/resotolib/core/model_export.py +++ b/resotolib/resotolib/core/model_export.py @@ -84,7 +84,7 @@ def check(to_check: type) -> None: if walk_subclasses: for subclass in clazz.__subclasses__(): check(subclass) - for field in attrs.fields(clazz): + for field in attrs.fields(clazz): # type: ignore check(field.type) elif is_enum(clazz): all_classes.add(clazz) @@ -232,7 +232,7 @@ def export_data_class(clazz: type) -> None: base_names = [model_name(base) for base in bases] base_props: Set[Attribute] = reduce(lambda result, base: result | set(attrs.fields(base)), bases, set()) # type: ignore # noqa: E501 props = [ - p for field in attrs.fields(clazz) if field not in base_props and should_export(field) for p in prop(field) + p for field in attrs.fields(clazz) if field not in base_props and should_export(field) for p in prop(field) # type: ignore # noqa: E501 ] root = any(sup == aggregate_root for sup in clazz.mro()) if aggregate_root else True kind = model_name(clazz) diff --git a/resotolib/resotolib/graph/__init__.py b/resotolib/resotolib/graph/__init__.py index a4cac6d32d..ebbfc58ef2 100644 --- a/resotolib/resotolib/graph/__init__.py +++ b/resotolib/resotolib/graph/__init__.py @@ -412,7 +412,7 @@ def resolve_type(clazz: Type[Any]) -> None: def validate_dataclass(node: BaseResource) -> bool: resolve_type(type(node)) # make sure all type annotations are resolved - for field in fields(type(node)): + for field in fields(type(node)): # type: ignore value = getattr(node, field.name) try: check_type(value, field.type)