From 7142248e48184bbd2f70ccf1cff7c1fa2bfe8087 Mon Sep 17 00:00:00 2001 From: PagesCoffy Date: Wed, 18 Sep 2024 16:40:33 +0000 Subject: [PATCH 01/11] [Integration][GitLab] - Improve on GitOps push events (#1028) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit # Description What - Improved on the way the integration handles GitOps push events by using only files that have been changed in the push even rather than fetching the entire repository tree Why - Some customers were not receiving the push events in their GitLab GitOps How - ## Type of change Please leave one option from the following and delete the rest: - [ ] Bug fix (non-breaking change which fixes an issue) - [ ] New feature (non-breaking change which adds functionality) - [ ] New Integration (non-breaking change which adds a new integration) - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) - [ ] Non-breaking change (fix of existing functionality that will not change current behavior) - [ ] Documentation (added/updated documentation)

All tests should be run against the port production environment(using a testing org).

### Core testing checklist - [ ] Integration able to create all default resources from scratch - [ ] Resync finishes successfully - [ ] Resync able to create entities - [ ] Resync able to update entities - [ ] Resync able to detect and delete entities - [ ] Scheduled resync able to abort existing resync and start a new one - [ ] Tested with at least 2 integrations from scratch - [ ] Tested with Kafka and Polling event listeners - [ ] Tested deletion of entities that don't pass the selector ### Integration testing checklist - [ ] Integration able to create all default resources from scratch - [ ] Resync able to create entities - [ ] Resync able to update entities - [ ] Resync able to detect and delete entities - [ ] Resync finishes successfully - [ ] If new resource kind is added or updated in the integration, add example raw data, mapping and expected result to the `examples` folder in the integration directory. - [ ] If resource kind is updated, run the integration with the example data and check if the expected result is achieved - [ ] If new resource kind is added or updated, validate that live-events for that resource are working as expected - [ ] Docs PR link [here](#) ### Preflight checklist - [ ] Handled rate limiting - [ ] Handled pagination - [ ] Implemented the code in async - [ ] Support Multi account ## Screenshots showing successful ingestion Screenshot 2024-09-17 at 8 03 09 PM entity diffs operations Screenshot 2024-09-17 at 8 03 26 PM ## API Documentation Provide links to the API documentation used for this integration. --- integrations/gitlab/CHANGELOG.md | 8 + .../gitlab_integration/core/entities.py | 8 +- .../events/event_handler.py | 8 +- .../gitlab_integration/events/hooks/base.py | 9 +- .../gitlab_integration/events/hooks/push.py | 149 ++++++++++++++++-- .../gitlab_integration/gitlab_service.py | 25 +-- .../gitlab/gitlab_integration/ocean.py | 4 +- integrations/gitlab/pyproject.toml | 2 +- 8 files changed, 181 insertions(+), 32 deletions(-) diff --git a/integrations/gitlab/CHANGELOG.md b/integrations/gitlab/CHANGELOG.md index d9e29c49e7..27da329bd7 100644 --- a/integrations/gitlab/CHANGELOG.md +++ b/integrations/gitlab/CHANGELOG.md @@ -7,6 +7,14 @@ this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm +0.1.121 (2024-09-17) +==================== + +### Improvements + +- Improved on the way the integration handles GitOps push events by using only files that have been changed in the push even rather than fetching the entire repository tree (0.1.121) + + 0.1.120 (2024-09-17) ==================== diff --git a/integrations/gitlab/gitlab_integration/core/entities.py b/integrations/gitlab/gitlab_integration/core/entities.py index 6bfe530416..04fe56b3d6 100644 --- a/integrations/gitlab/gitlab_integration/core/entities.py +++ b/integrations/gitlab/gitlab_integration/core/entities.py @@ -5,19 +5,23 @@ from port_ocean.core.models import Entity +from gitlab_integration.core.async_fetcher import AsyncFetcher + FILE_PROPERTY_PREFIX = "file://" SEARCH_PROPERTY_PREFIX = "search://" JSON_SUFFIX = ".json" -def generate_entity_from_port_yaml( +async def generate_entity_from_port_yaml( raw_entity: Entity, project: Project, ref: str ) -> Entity: properties = {} for key, value in raw_entity.properties.items(): if isinstance(value, str) and value.startswith(FILE_PROPERTY_PREFIX): file_meta = Path(value.replace(FILE_PROPERTY_PREFIX, "")) - gitlab_file = project.files.get(file_path=str(file_meta), ref=ref) + gitlab_file = await AsyncFetcher.fetch_single( + project.files.get, str(file_meta), ref + ) if file_meta.suffix == JSON_SUFFIX: properties[key] = json.loads(gitlab_file.decode().decode("utf-8")) diff --git a/integrations/gitlab/gitlab_integration/events/event_handler.py b/integrations/gitlab/gitlab_integration/events/event_handler.py index 60fe2608b0..ce2664f3a4 100644 --- a/integrations/gitlab/gitlab_integration/events/event_handler.py +++ b/integrations/gitlab/gitlab_integration/events/event_handler.py @@ -28,11 +28,16 @@ async def _start_event_processor(self) -> None: logger.info(f"Started {self.__class__.__name__} worker") while True: event_ctx, event_id, body = await self.webhook_tasks_queue.get() + logger.debug(f"Retrieved event: {event_id} from Queue, notifying observers") try: async with event_context( "gitlab_http_event_async_worker", parent_override=event_ctx ): await self._notify(event_id, body) + except Exception as e: + logger.error( + f"Error notifying observers for event: {event_id}, error: {e}" + ) finally: self.webhook_tasks_queue.task_done() @@ -44,6 +49,7 @@ async def _notify(self, event_id: str, body: dict[str, Any]) -> None: pass async def notify(self, event_id: str, body: dict[str, Any]) -> None: + logger.debug(f"Received event: {event_id}, putting it in Queue for processing") await self.webhook_tasks_queue.put( ( deepcopy(current_event_context), @@ -71,7 +77,7 @@ async def _notify(self, event_id: str, body: dict[str, Any]) -> None: ) if not observers: - logger.debug( + logger.info( f"event: {event_id} has no matching handler. the handlers available are for events: {self._observers.keys()}" ) diff --git a/integrations/gitlab/gitlab_integration/events/hooks/base.py b/integrations/gitlab/gitlab_integration/events/hooks/base.py index 6ed43a3403..8b2b8a3bcf 100644 --- a/integrations/gitlab/gitlab_integration/events/hooks/base.py +++ b/integrations/gitlab/gitlab_integration/events/hooks/base.py @@ -34,8 +34,13 @@ async def on_hook(self, event: str, body: dict[str, Any]) -> None: logger.info( f"Handling hook {event} for project {project.path_with_namespace}" ) - await self._on_hook(body, project) - logger.info(f"Finished handling {event}") + try: + await self._on_hook(body, project) + logger.info(f"Finished handling {event}") + except Exception as e: + logger.error( + f"Error handling hook {event} for project {project.path_with_namespace}. Error: {e}" + ) else: logger.info( f"Project {body['project']['id']} was filtered for event {event}. Skipping..." diff --git a/integrations/gitlab/gitlab_integration/events/hooks/push.py b/integrations/gitlab/gitlab_integration/events/hooks/push.py index d4a32d7259..3465118526 100644 --- a/integrations/gitlab/gitlab_integration/events/hooks/push.py +++ b/integrations/gitlab/gitlab_integration/events/hooks/push.py @@ -1,10 +1,11 @@ import typing from typing import Any +from enum import StrEnum from loguru import logger from gitlab.v4.objects import Project -from gitlab_integration.core.utils import generate_ref +from gitlab_integration.core.utils import generate_ref, does_pattern_apply from gitlab_integration.events.hooks.base import ProjectHandler from gitlab_integration.git_integration import GitlabPortAppConfig from gitlab_integration.utils import ObjectKind @@ -14,18 +15,45 @@ from port_ocean.context.ocean import ocean +class FileAction(StrEnum): + REMOVED = "removed" + ADDED = "added" + MODIFIED = "modified" + + class PushHook(ProjectHandler): events = ["Push Hook"] system_events = ["push"] async def _on_hook(self, body: dict[str, Any], gitlab_project: Project) -> None: - before, after, ref = body.get("before"), body.get("after"), body.get("ref") + commit_before, commit_after, ref = ( + body.get("before"), + body.get("after"), + body.get("ref"), + ) - if before is None or after is None or ref is None: + if commit_before is None or commit_after is None or ref is None: raise ValueError( "Invalid push hook. Missing one or more of the required fields (before, after, ref)" ) + added_files = [ + added_file + for commit in body.get("commits", []) + for added_file in commit.get(FileAction.ADDED, []) + ] + modified_files = [ + modified_file + for commit in body.get("commits", []) + for modified_file in commit.get(FileAction.MODIFIED, []) + ] + + removed_files = [ + removed_file + for commit in body.get("commits", []) + for removed_file in commit.get(FileAction.REMOVED, []) + ] + config: GitlabPortAppConfig = typing.cast( GitlabPortAppConfig, event.port_app_config ) @@ -33,17 +61,38 @@ async def _on_hook(self, body: dict[str, Any], gitlab_project: Project) -> None: branch = config.branch or gitlab_project.default_branch if generate_ref(branch) == ref: - entities_before, entities_after = ( - await self.gitlab_service.get_entities_diff( - gitlab_project, config.spec_path, before, after, branch - ) - ) + spec_path = config.spec_path + if not isinstance(spec_path, list): + spec_path = [spec_path] - # update the entities diff found in the `config.spec_path` file the user configured - await ocean.update_diff( - {"before": entities_before, "after": entities_after}, - UserAgentType.gitops, + await self._process_files( + gitlab_project, + removed_files, + spec_path, + commit_before, + "", + branch, + FileAction.REMOVED, ) + await self._process_files( + gitlab_project, + added_files, + spec_path, + "", + commit_after, + branch, + FileAction.ADDED, + ) + await self._process_files( + gitlab_project, + modified_files, + spec_path, + commit_before, + commit_after, + branch, + FileAction.MODIFIED, + ) + # update information regarding the project as well logger.info( f"Updating project information after push hook for project {gitlab_project.path_with_namespace}" @@ -52,8 +101,84 @@ async def _on_hook(self, body: dict[str, Any], gitlab_project: Project) -> None: gitlab_project ) await ocean.register_raw(ObjectKind.PROJECT, [enriched_project]) + else: logger.debug( f"Skipping push hook for project {gitlab_project.path_with_namespace} because the ref {ref} " f"does not match the branch {branch}" ) + + async def _process_files( + self, + gitlab_project: Project, + files: list[str], + spec_path: list[str], + commit_before: str, + commit_after: str, + branch: str, + file_action: FileAction, + ) -> None: + if not files: + return + logger.info( + f"Processing {file_action} files {files} for project {gitlab_project.path_with_namespace}" + ) + matching_files = [file for file in files if does_pattern_apply(spec_path, file)] + + if not matching_files: + logger.info("No matching files found for mapping") + logger.debug(f"Files {files} didn't match {spec_path} patten") + return + else: + logger.info( + f"While processing {file_action} Found {len(matching_files)} that matches {spec_path}, matching files: {matching_files}" + ) + + for file in matching_files: + try: + match file_action: + case FileAction.REMOVED: + entities_before = ( + await self.gitlab_service._get_entities_by_commit( + gitlab_project, file, commit_before, branch + ) + ) + await ocean.update_diff( + {"before": entities_before, "after": []}, + UserAgentType.gitops, + ) + + case FileAction.ADDED: + entities_after = ( + await self.gitlab_service._get_entities_by_commit( + gitlab_project, file, commit_after, branch + ) + ) + await ocean.update_diff( + {"before": [], "after": entities_after}, + UserAgentType.gitops, + ) + + case FileAction.MODIFIED: + entities_before = ( + await self.gitlab_service._get_entities_by_commit( + gitlab_project, file, commit_before, branch + ) + ) + entities_after = ( + await self.gitlab_service._get_entities_by_commit( + gitlab_project, file, commit_after, branch + ) + ) + await ocean.update_diff( + {"before": entities_before, "after": entities_after}, + UserAgentType.gitops, + ) + except Exception as e: + logger.error( + f"Error processing file {file} in action {file_action}: {str(e)}" + ) + skipped_files = set(files) - set(matching_files) + logger.debug( + f"Skipped {len(skipped_files)} files as they didn't match {spec_path} Skipped files: {skipped_files}" + ) diff --git a/integrations/gitlab/gitlab_integration/gitlab_service.py b/integrations/gitlab/gitlab_integration/gitlab_service.py index 2b94897662..db43ef9310 100644 --- a/integrations/gitlab/gitlab_integration/gitlab_service.py +++ b/integrations/gitlab/gitlab_integration/gitlab_service.py @@ -166,11 +166,14 @@ async def search_files_in_project( if files_with_content: yield files_with_content - def _get_entities_from_git( - self, project: Project, file_name: str, sha: str, ref: str + async def _get_entities_from_git( + self, project: Project, file_path: str | List[str], sha: str, ref: str ) -> List[Entity]: try: - file_content = project.files.get(file_path=file_name, ref=sha) + file_content = await AsyncFetcher.fetch_single( + project.files.get, file_path, sha + ) + entities = yaml.safe_load(file_content.decode()) raw_entities = [ Entity(**entity_data) @@ -179,29 +182,27 @@ def _get_entities_from_git( ) ] return [ - generate_entity_from_port_yaml(entity_data, project, ref) + await generate_entity_from_port_yaml(entity_data, project, ref) for entity_data in raw_entities ] except ParserError as exec: logger.error( - f"Failed to parse gitops entities from gitlab project {project.path_with_namespace},z file {file_name}." + f"Failed to parse gitops entities from gitlab project {project.path_with_namespace},z file {file_path}." f"\n {exec}" ) except Exception: logger.error( - f"Failed to get gitops entities from gitlab project {project.path_with_namespace}, file {file_name}" + f"Failed to get gitops entities from gitlab project {project.path_with_namespace}, file {file_path}" ) return [] async def _get_entities_by_commit( self, project: Project, spec: str | List["str"], commit: str, ref: str ) -> List[Entity]: - spec_paths = await self.get_all_file_paths(project, spec, commit) - return [ - entity - for path in spec_paths - for entity in self._get_entities_from_git(project, path, commit, ref) - ] + logger.info( + f"Getting entities for project {project.path_with_namespace} in path {spec} at commit {commit} and ref {ref}" + ) + return await self._get_entities_from_git(project, spec, commit, ref) def should_run_for_path(self, path: str) -> bool: return any(does_pattern_apply(mapping, path) for mapping in self.group_mapping) diff --git a/integrations/gitlab/gitlab_integration/ocean.py b/integrations/gitlab/gitlab_integration/ocean.py index e10fb92224..12e5e9949f 100644 --- a/integrations/gitlab/gitlab_integration/ocean.py +++ b/integrations/gitlab/gitlab_integration/ocean.py @@ -32,7 +32,7 @@ async def handle_webhook_request(group_id: str, request: Request) -> dict[str, A event_id = f"{request.headers.get('X-Gitlab-Event')}:{group_id}" with logger.contextualize(event_id=event_id): try: - logger.debug(f"Received webhook event {event_id} from Gitlab") + logger.info(f"Received webhook event {event_id} from Gitlab") body = await request.json() await event_handler.notify(event_id, body) return {"ok": True} @@ -50,7 +50,7 @@ async def handle_system_webhook_request(request: Request) -> dict[str, Any]: # some system hooks have event_type instead of event_name in the body, such as merge_request events event_name: str = str(body.get("event_name") or body.get("event_type")) with logger.contextualize(event_name=event_name): - logger.debug(f"Received system webhook event {event_name} from Gitlab") + logger.info(f"Received system webhook event {event_name} from Gitlab") await system_event_handler.notify(event_name, body) return {"ok": True} diff --git a/integrations/gitlab/pyproject.toml b/integrations/gitlab/pyproject.toml index 2af63ba5c9..4efaf60003 100644 --- a/integrations/gitlab/pyproject.toml +++ b/integrations/gitlab/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "gitlab" -version = "0.1.120" +version = "0.1.121" description = "Gitlab integration for Port using Port-Ocean Framework" authors = ["Yair Siman-Tov "] From bdc1c4bf7cc706f3813cc622f5705540f1492472 Mon Sep 17 00:00:00 2001 From: PagesCoffy Date: Wed, 18 Sep 2024 17:11:19 +0000 Subject: [PATCH 02/11] [Integration][GitLab] - Enable Existing Webhooks on Restarts (#1014) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit # Description What - We had a bug where GitLab webhooks were disabled on restart. The disablement could also be used by GitLab when the receiving host does not meet their requirement. I updated the logic for creating webhook such that it can re-create existing webhooks when it has been disabled. Why - How - ## Type of change Please leave one option from the following and delete the rest: - [ ] Bug fix (non-breaking change which fixes an issue) - [ ] New feature (non-breaking change which adds functionality) - [ ] New Integration (non-breaking change which adds a new integration) - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) - [ ] Non-breaking change (fix of existing functionality that will not change current behavior) - [ ] Documentation (added/updated documentation)

All tests should be run against the port production environment(using a testing org).

### Core testing checklist - [ ] Integration able to create all default resources from scratch - [ ] Resync finishes successfully - [ ] Resync able to create entities - [ ] Resync able to update entities - [ ] Resync able to detect and delete entities - [ ] Scheduled resync able to abort existing resync and start a new one - [ ] Tested with at least 2 integrations from scratch - [ ] Tested with Kafka and Polling event listeners - [ ] Tested deletion of entities that don't pass the selector ### Integration testing checklist - [x] Integration able to create all default resources from scratch - [x] Resync able to create entities - [x] Resync able to update entities - [ ] Resync able to detect and delete entities - [x] Resync finishes successfully - [ ] If new resource kind is added or updated in the integration, add example raw data, mapping and expected result to the `examples` folder in the integration directory. - [x] If resource kind is updated, run the integration with the example data and check if the expected result is achieved - [x] If new resource kind is added or updated, validate that live-events for that resource are working as expected - [ ] Docs PR link [here](#) ### Preflight checklist - [ ] Handled rate limiting - [ ] Handled pagination - [ ] Implemented the code in async - [ ] Support Multi account ## Screenshots Screenshot 2024-09-13 at 5 10 08 PM ## API Documentation [Delete Hook API](https://docs.gitlab.com/ee/api/group_webhooks.html#delete-a-group-hook) [Create Hook API ](https://docs.gitlab.com/ee/api/group_webhooks.html#add-a-group-hook) --------- Co-authored-by: Tom Tankilevitch <59158507+Tankilevitch@users.noreply.github.com> --- integrations/gitlab/CHANGELOG.md | 8 + .../gitlab_integration/gitlab_service.py | 50 ++++-- integrations/gitlab/pyproject.toml | 2 +- .../test_gitlab_service_webhook.py | 150 ++++++++++++++++++ 4 files changed, 194 insertions(+), 16 deletions(-) create mode 100644 integrations/gitlab/tests/gitlab_integration/test_gitlab_service_webhook.py diff --git a/integrations/gitlab/CHANGELOG.md b/integrations/gitlab/CHANGELOG.md index 27da329bd7..0b95cee875 100644 --- a/integrations/gitlab/CHANGELOG.md +++ b/integrations/gitlab/CHANGELOG.md @@ -7,6 +7,14 @@ this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm +0.1.122 (2024-09-17) +==================== + +### Improvements + +- Updated the webhook creation logic to recreate hooks for urls that are disabled by GitLab (0.1.122) + + 0.1.121 (2024-09-17) ==================== diff --git a/integrations/gitlab/gitlab_integration/gitlab_service.py b/integrations/gitlab/gitlab_integration/gitlab_service.py index db43ef9310..0da6542ba4 100644 --- a/integrations/gitlab/gitlab_integration/gitlab_service.py +++ b/integrations/gitlab/gitlab_integration/gitlab_service.py @@ -63,11 +63,20 @@ def __init__( GITLAB_SEARCH_RATE_LIMIT * 0.95, 60 ) - def _does_webhook_exist_for_group(self, group: RESTObject) -> bool: + def _get_webhook_for_group(self, group: RESTObject) -> RESTObject | None: + webhook_url = f"{self.app_host}/integration/hook/{group.get_id()}" for hook in group.hooks.list(iterator=True): - if hook.url == f"{self.app_host}/integration/hook/{group.get_id()}": - return True - return False + if hook.url == webhook_url: + return hook + return None + + def _delete_group_webhook(self, group: RESTObject, hook_id: int) -> None: + logger.info(f"Deleting webhook with id {hook_id} in group {group.get_id()}") + try: + group.hooks.delete(hook_id) + logger.info(f"Deleted webhook for {group.get_id()}") + except Exception as e: + logger.error(f"Failed to delete webhook for {group.get_id()} error={e}") def _create_group_webhook( self, group: RESTObject, events: list[str] | None @@ -80,16 +89,18 @@ def _create_group_webhook( logger.info( f"Creating webhook for {group.get_id()} with events: {[event for event in webhook_events if webhook_events[event]]}" ) - - resp = group.hooks.create( - { - "url": f"{self.app_host}/integration/hook/{group.get_id()}", - **webhook_events, - } - ) - logger.info( - f"Created webhook for {group.get_id()}, id={resp.id}, url={resp.url}" - ) + try: + resp = group.hooks.create( + { + "url": f"{self.app_host}/integration/hook/{group.get_id()}", + **webhook_events, + } + ) + logger.info( + f"Created webhook for {group.get_id()}, id={resp.id}, url={resp.url}" + ) + except Exception as e: + logger.error(f"Failed to create webhook for {group.get_id()} error={e}") def _get_changed_files_between_commits( self, project_id: int, head: str @@ -314,8 +325,17 @@ def create_webhook(self, group: Group, events: list[str] | None) -> str | None: if group_id is None: logger.info(f"Group {group.attributes['full_path']} has no id. skipping...") else: - if self._does_webhook_exist_for_group(group): + hook = self._get_webhook_for_group(group) + if hook: logger.info(f"Webhook already exists for group {group.get_id()}") + + if hook.alert_status == "disabled": + logger.info( + f"Webhook exists for group {group.get_id()} but is disabled, deleting and re-creating..." + ) + self._delete_group_webhook(group, hook.id) + self._create_group_webhook(group, events) + logger.info(f"Webhook re-created for group {group.get_id()}") else: self._create_group_webhook(group, events) webhook_id = str(group_id) diff --git a/integrations/gitlab/pyproject.toml b/integrations/gitlab/pyproject.toml index 4efaf60003..5b5c494cb8 100644 --- a/integrations/gitlab/pyproject.toml +++ b/integrations/gitlab/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "gitlab" -version = "0.1.121" +version = "0.1.122" description = "Gitlab integration for Port using Port-Ocean Framework" authors = ["Yair Siman-Tov "] diff --git a/integrations/gitlab/tests/gitlab_integration/test_gitlab_service_webhook.py b/integrations/gitlab/tests/gitlab_integration/test_gitlab_service_webhook.py new file mode 100644 index 0000000000..354caadfbb --- /dev/null +++ b/integrations/gitlab/tests/gitlab_integration/test_gitlab_service_webhook.py @@ -0,0 +1,150 @@ +from unittest.mock import MagicMock +from typing import Any +from gitlab_integration.gitlab_service import GitlabService + + +def test_get_webhook_for_group_found(mocked_gitlab_service: GitlabService) -> None: + # Arrange + mock_group = MagicMock() + mock_group.get_id.return_value = 456 + mock_webhook_url = "http://example.com/integration/hook/456" + mock_hook = MagicMock() + mock_hook.url = mock_webhook_url + mock_hook.id = 984 + mock_group.hooks.list.return_value = [mock_hook] + + # Act + result = mocked_gitlab_service._get_webhook_for_group(mock_group) + + # Assert + assert result == mock_hook + mock_group.hooks.list.assert_called_once_with(iterator=True) + + +def test_get_webhook_for_group_not_found(mocked_gitlab_service: GitlabService) -> None: + # Arrange + mock_group = MagicMock() + mock_group.get_id.return_value = 789 + mock_hook = MagicMock() + mock_hook.url = "http://example.com/other/hook" + mock_group.hooks.list.return_value = [mock_hook] + + # Act + result = mocked_gitlab_service._get_webhook_for_group(mock_group) + + # Assert + assert result is None + mock_group.hooks.list.assert_called_once_with(iterator=True) + + +def test_create_webhook_when_webhook_exists_but_disabled( + mocked_gitlab_service: GitlabService, monkeypatch: Any +): + # Arrange + mock_group = MagicMock() + mock_group.get_id.return_value = 456 + mock_group.attributes = {"full_path": "group2"} + + # Mock the group hooks.list method to return an existing disabled webhook + mock_hook = MagicMock() + mock_hook.url = "http://example.com/integration/hook/456" # Updated URL for clarity + mock_hook.alert_status = "disabled" + mock_hook.id = 456 + mock_group.hooks.list.return_value = [mock_hook] + + # Mock the methods for deleting and creating webhooks + mock_delete_webhook = MagicMock() + monkeypatch.setattr( + mocked_gitlab_service, "_delete_group_webhook", mock_delete_webhook + ) + mock_create_webhook = MagicMock() + monkeypatch.setattr( + mocked_gitlab_service, "_create_group_webhook", mock_create_webhook + ) + + # Act + webhook_id = mocked_gitlab_service.create_webhook( + mock_group, events=["push", "merge_request"] + ) + + # Assert + assert webhook_id == "456" + mock_delete_webhook.assert_called_once_with( + mock_group, mock_hook.id + ) # Ensure delete method is called + mock_create_webhook.assert_called_once_with( + mock_group, ["push", "merge_request"] + ) # Ensure create method is called with correct arguments + + +def test_create_webhook_when_webhook_exists_and_enabled( + mocked_gitlab_service: GitlabService, monkeypatch: Any +): + # Arrange + mock_group = MagicMock() + mock_group.get_id.return_value = 789 + mock_group.attributes = {"full_path": "group3"} + + # Mock the group hooks.list method to return an existing enabled webhook + mock_hook = MagicMock() + mock_hook.url = "http://example.com/integration/hook/789" + mock_hook.alert_status = "executable" + mock_hook.id = 789 + mock_group.hooks.list.return_value = [mock_hook] + + # Mock the method for creating webhooks + mock_create_webhook = MagicMock() + monkeypatch.setattr( + mocked_gitlab_service, "_create_group_webhook", mock_create_webhook + ) + + # Act + webhook_id = mocked_gitlab_service.create_webhook( + mock_group, events=["push", "merge_request"] + ) + + # Assert + assert webhook_id == "789" + mock_create_webhook.assert_not_called() # Ensure no new webhook is created + + +def test_create_webhook_when_no_webhook_exists( + mocked_gitlab_service: GitlabService, monkeypatch: Any +): + # Arrange + mock_group = MagicMock() + mock_group.get_id.return_value = 123 + mock_group.attributes = {"full_path": "group1"} + + # Mock the group hooks.list method to return no webhook + mock_group.hooks.list.return_value = [] + + # Act + webhook_id = mocked_gitlab_service.create_webhook( + mock_group, events=["push", "merge_request"] + ) + + # Assert + assert webhook_id == "123" + mock_group.hooks.create.assert_called_once() # A new webhook should be created + + +def test_delete_webhook(mocked_gitlab_service: GitlabService, monkeypatch: Any): + # Arrange + mock_group = MagicMock() + mock_group.get_id.return_value = 456 + mock_group.attributes = {"full_path": "group2"} + + # Mock the group hooks.list method to return a webhook + mock_hook = MagicMock() + mock_hook.url = "http://example.com/integration/hook/456" + mock_hook.id = 17 + mock_group.hooks.list.return_value = [mock_hook] + + # Act + mocked_gitlab_service._delete_group_webhook(mock_group, mock_hook.id) + + # Assert + mock_group.hooks.delete.assert_called_once_with( + mock_hook.id + ) # Ensure the webhook is deleted From 339d2849aafeb243b6cec2886681ee558a2cb08b Mon Sep 17 00:00:00 2001 From: PagesCoffy Date: Thu, 19 Sep 2024 16:30:26 +0000 Subject: [PATCH 03/11] [Integration][SonarQube] - Handle 4xx errors gracefully (#1027) --- integrations/sonarqube/CHANGELOG.md | 8 ++++++++ integrations/sonarqube/client.py | 16 ++++++++++++++-- integrations/sonarqube/pyproject.toml | 2 +- 3 files changed, 23 insertions(+), 3 deletions(-) diff --git a/integrations/sonarqube/CHANGELOG.md b/integrations/sonarqube/CHANGELOG.md index b7639f226c..481983c707 100644 --- a/integrations/sonarqube/CHANGELOG.md +++ b/integrations/sonarqube/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.95 (2024-09-19) + + +### Bug Fixes + +- Added handling for 400 and 404 HTTP errors to allow the integration to continue processing other requests instead of crashing (0.1.95) + + ## 0.1.94 (2024-09-17) diff --git a/integrations/sonarqube/client.py b/integrations/sonarqube/client.py index c6ee098380..3c61fe184a 100644 --- a/integrations/sonarqube/client.py +++ b/integrations/sonarqube/client.py @@ -102,14 +102,13 @@ async def send_paginated_api_request( query_params = query_params or {} query_params["ps"] = PAGE_SIZE + all_resources = [] # List to hold all fetched resources try: logger.debug( f"Sending API request to {method} {endpoint} with query params: {query_params}" ) - all_resources = [] # List to hold all fetched resources - while True: response = await self.http_client.request( method=method, @@ -135,6 +134,19 @@ async def send_paginated_api_request( logger.error( f"HTTP error with status code: {e.response.status_code} and response text: {e.response.text}" ) + if ( + e.response.status_code == 400 + and query_params.get("ps", 0) > PAGE_SIZE + and endpoint in [Endpoints.ONPREM_ISSUES, Endpoints.SAAS_ISSUES] + ): + logger.error( + "The request exceeded the maximum number of issues that can be returned (10,000) from SonarQube API. Consider using apiFilters in the config mapping to narrow the scope of your search. Returning accumulated issues and skipping further results." + ) + return all_resources + + if e.response.status_code == 404: + logger.error(f"Resource not found: {e.response.text}") + return all_resources raise except httpx.HTTPError as e: logger.error(f"HTTP occurred while fetching paginated data: {e}") diff --git a/integrations/sonarqube/pyproject.toml b/integrations/sonarqube/pyproject.toml index bba2f23fee..e7b1b932d6 100644 --- a/integrations/sonarqube/pyproject.toml +++ b/integrations/sonarqube/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "sonarqube" -version = "0.1.94" +version = "0.1.95" description = "SonarQube projects and code quality analysis integration" authors = ["Port Team "] From 236ae72a920c2370175f08f9f06b46cabcb40127 Mon Sep 17 00:00:00 2001 From: Tom Tankilevitch <59158507+Tankilevitch@users.noreply.github.com> Date: Sun, 22 Sep 2024 14:22:02 +0300 Subject: [PATCH 04/11] [Core] Fix overriding of next resync status when cancelled due to scheduled resync (#1035) --- CHANGELOG.md | 6 ++++++ port_ocean/core/integrations/mixins/sync_raw.py | 4 +++- port_ocean/ocean.py | 4 ++++ pyproject.toml | 2 +- 4 files changed, 14 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 02edc1e2cc..9afe4e146c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,12 @@ this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm +## 0.10.12 (2024-09-19) + +### Bug Fixes + +- Fixed updating state of resync when the resync is being cancelled by a new resync event + ## 0.10.11 (2024-09-17) ### Improvements diff --git a/port_ocean/core/integrations/mixins/sync_raw.py b/port_ocean/core/integrations/mixins/sync_raw.py index 0b306ff6ab..87fd2a8faf 100644 --- a/port_ocean/core/integrations/mixins/sync_raw.py +++ b/port_ocean/core/integrations/mixins/sync_raw.py @@ -456,7 +456,8 @@ async def sync_raw_all( creation_results.append(await task) except asyncio.CancelledError as e: - logger.warning("Resync aborted successfully") + logger.warning("Resync aborted successfully, skipping delete phase. This leads to an incomplete state") + raise else: if not did_fetched_current_state: logger.warning( @@ -489,3 +490,4 @@ async def sync_raw_all( {"before": entities_at_port, "after": flat_created_entities}, user_agent_type, ) + logger.info("Resync finished successfully") diff --git a/port_ocean/ocean.py b/port_ocean/ocean.py index 16b8b7ccda..caf983546f 100644 --- a/port_ocean/ocean.py +++ b/port_ocean/ocean.py @@ -82,6 +82,10 @@ async def execute_resync_all() -> None: try: await self.integration.sync_raw_all() await self.resync_state_updater.update_after_resync() + except asyncio.CancelledError: + logger.warning( + "resync was cancelled by the scheduled resync, skipping state update" + ) except Exception as e: await self.resync_state_updater.update_after_resync( IntegrationStateStatus.Failed diff --git a/pyproject.toml b/pyproject.toml index 7f43fe1200..89bbad686f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." readme = "README.md" homepage = "https://app.getport.io" From 6bd1ceed096458e373bea124ba47d7320e4db228 Mon Sep 17 00:00:00 2001 From: Port Bot <110599342+portmachineuser@users.noreply.github.com> Date: Sun, 22 Sep 2024 16:46:02 +0300 Subject: [PATCH 05/11] Apply Ocean version 0.10.12 to all integrations (#1045) --- .github/workflows/apply-release.yml | 2 +- integrations/argocd/CHANGELOG.md | 8 + integrations/argocd/poetry.lock | 8 +- integrations/argocd/pyproject.toml | 4 +- integrations/aws/CHANGELOG.md | 8 + integrations/aws/poetry.lock | 8 +- integrations/aws/pyproject.toml | 4 +- integrations/azure-devops/CHANGELOG.md | 8 + integrations/azure-devops/poetry.lock | 8 +- integrations/azure-devops/pyproject.toml | 4 +- integrations/azure/CHANGELOG.md | 7 + integrations/azure/poetry.lock | 8 +- integrations/azure/pyproject.toml | 4 +- integrations/datadog/CHANGELOG.md | 8 + integrations/datadog/poetry.lock | 8 +- integrations/datadog/pyproject.toml | 4 +- integrations/dynatrace/CHANGELOG.md | 8 + integrations/dynatrace/poetry.lock | 8 +- integrations/dynatrace/pyproject.toml | 4 +- integrations/fake-integration/CHANGELOG.md | 8 + integrations/fake-integration/poetry.lock | 8 +- integrations/fake-integration/pyproject.toml | 4 +- integrations/firehydrant/CHANGELOG.md | 8 + integrations/firehydrant/poetry.lock | 8 +- integrations/firehydrant/pyproject.toml | 4 +- integrations/gcp/CHANGELOG.md | 8 + integrations/gcp/poetry.lock | 8 +- integrations/gcp/pyproject.toml | 4 +- integrations/gitlab/CHANGELOG.md | 8 + integrations/gitlab/poetry.lock | 8 +- integrations/gitlab/pyproject.toml | 4 +- integrations/jenkins/CHANGELOG.md | 8 + integrations/jenkins/poetry.lock | 8 +- integrations/jenkins/pyproject.toml | 4 +- integrations/jira/CHANGELOG.md | 8 + integrations/jira/poetry.lock | 8 +- integrations/jira/pyproject.toml | 4 +- integrations/kafka/CHANGELOG.md | 8 + integrations/kafka/poetry.lock | 8 +- integrations/kafka/pyproject.toml | 4 +- integrations/kubecost/CHANGELOG.md | 8 + integrations/kubecost/poetry.lock | 8 +- integrations/kubecost/pyproject.toml | 4 +- integrations/launchdarkly/CHANGELOG.md | 8 + integrations/launchdarkly/poetry.lock | 8 +- integrations/launchdarkly/pyproject.toml | 4 +- integrations/linear/CHANGELOG.md | 8 + integrations/linear/poetry.lock | 8 +- integrations/linear/pyproject.toml | 4 +- integrations/newrelic/CHANGELOG.md | 8 + integrations/newrelic/poetry.lock | 8 +- integrations/newrelic/pyproject.toml | 4 +- integrations/octopus/CHANGELOG.md | 29 +- integrations/octopus/poetry.lock | 394 +++++++++---------- integrations/octopus/pyproject.toml | 4 +- integrations/opencost/CHANGELOG.md | 8 + integrations/opencost/poetry.lock | 8 +- integrations/opencost/pyproject.toml | 4 +- integrations/opsgenie/CHANGELOG.md | 8 + integrations/opsgenie/poetry.lock | 8 +- integrations/opsgenie/pyproject.toml | 4 +- integrations/pagerduty/CHANGELOG.md | 8 + integrations/pagerduty/poetry.lock | 8 +- integrations/pagerduty/pyproject.toml | 4 +- integrations/sentry/CHANGELOG.md | 8 + integrations/sentry/poetry.lock | 8 +- integrations/sentry/pyproject.toml | 4 +- integrations/servicenow/CHANGELOG.md | 8 + integrations/servicenow/poetry.lock | 8 +- integrations/servicenow/pyproject.toml | 4 +- integrations/snyk/CHANGELOG.md | 8 + integrations/snyk/poetry.lock | 8 +- integrations/snyk/pyproject.toml | 4 +- integrations/sonarqube/CHANGELOG.md | 8 + integrations/sonarqube/poetry.lock | 8 +- integrations/sonarqube/pyproject.toml | 4 +- integrations/statuspage/CHANGELOG.md | 8 + integrations/statuspage/poetry.lock | 8 +- integrations/statuspage/pyproject.toml | 4 +- integrations/terraform-cloud/CHANGELOG.md | 8 + integrations/terraform-cloud/poetry.lock | 8 +- integrations/terraform-cloud/pyproject.toml | 4 +- integrations/wiz/CHANGELOG.md | 8 + integrations/wiz/poetry.lock | 8 +- integrations/wiz/pyproject.toml | 4 +- scripts/bump-all.sh | 2 +- 86 files changed, 595 insertions(+), 375 deletions(-) diff --git a/.github/workflows/apply-release.yml b/.github/workflows/apply-release.yml index 830cce2300..acee3aff11 100644 --- a/.github/workflows/apply-release.yml +++ b/.github/workflows/apply-release.yml @@ -41,7 +41,7 @@ jobs: git config --local user.email "action@github.com" git config --local user.name "GitHub Action" - ./scripts/bump-all.sh ^${{ steps.version.outputs.version }} + ./scripts/bump-all.sh ${{ steps.version.outputs.version }} - name: Open pull request diff --git a/integrations/argocd/CHANGELOG.md b/integrations/argocd/CHANGELOG.md index 66eb96daf3..c87aa974df 100644 --- a/integrations/argocd/CHANGELOG.md +++ b/integrations/argocd/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.88 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.87 (2024-09-17) diff --git a/integrations/argocd/poetry.lock b/integrations/argocd/poetry.lock index f7f7586f32..d24f2ec583 100644 --- a/integrations/argocd/poetry.lock +++ b/integrations/argocd/poetry.lock @@ -885,13 +885,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1790,4 +1790,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "12324544c3e97d143b991fa81cfef87ba7ad1ea424732b037a20feba460faf99" +content-hash = "945e51cc5bbe37a397281700f72e7f627761e84a02665eb53fafe489e330a2fa" diff --git a/integrations/argocd/pyproject.toml b/integrations/argocd/pyproject.toml index 82b2038a82..ae74e69cc1 100644 --- a/integrations/argocd/pyproject.toml +++ b/integrations/argocd/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "argocd" -version = "0.1.87" +version = "0.1.88" description = "Argo CD integration powered by Ocean" authors = ["Isaac Coffie "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} [tool.poetry.group.dev.dependencies] # Uncomment this if you want to debug the ocean core together with your integration diff --git a/integrations/aws/CHANGELOG.md b/integrations/aws/CHANGELOG.md index 9f8e914816..0a6acdce3d 100644 --- a/integrations/aws/CHANGELOG.md +++ b/integrations/aws/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.2.41 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.2.40 (2024-09-17) diff --git a/integrations/aws/poetry.lock b/integrations/aws/poetry.lock index 4a036ca24b..469f07a476 100644 --- a/integrations/aws/poetry.lock +++ b/integrations/aws/poetry.lock @@ -2179,13 +2179,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -4126,4 +4126,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "0c3b5e7c4b1c01552258c7a018ea361bb0f441a4290804b205e1deacafa42b4b" +content-hash = "44881173d07ef617d939812527552b66a59b67fe3f28b32a6f70f26ff3a25e40" diff --git a/integrations/aws/pyproject.toml b/integrations/aws/pyproject.toml index a5dae7c048..92c43837a3 100644 --- a/integrations/aws/pyproject.toml +++ b/integrations/aws/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "aws" -version = "0.2.40" +version = "0.2.41" description = "This integration will map all your resources in all the available accounts to your Port entities" authors = ["Shalev Avhar ", "Erik Zaadi "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} python-dotenv = "^1.0.1" aioboto3 = "^12.4.0" boto3-stubs = {version = "1.34.76", extras = ["acm", "apigateway", "appconfig", "athena", "cloudcontrol", "cloudformation", "cloudwatch", "dynamodb", "ec2", "ec2-instance-connect", "ecr", "ecs", "elasticache", "elb", "elbv2", "events", "iam", "lambda", "logs", "organizations", "rds", "route53", "s3", "sagemaker", "secretsmanager", "sns", "sqs", "ssm", "sts"]} diff --git a/integrations/azure-devops/CHANGELOG.md b/integrations/azure-devops/CHANGELOG.md index 7b771b84cc..a496ad9799 100644 --- a/integrations/azure-devops/CHANGELOG.md +++ b/integrations/azure-devops/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.69 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.68 (2024-09-17) diff --git a/integrations/azure-devops/poetry.lock b/integrations/azure-devops/poetry.lock index 81bc8bcf03..2cc5b1a21a 100644 --- a/integrations/azure-devops/poetry.lock +++ b/integrations/azure-devops/poetry.lock @@ -885,13 +885,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1801,4 +1801,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "539870d1b8cc6e86ae7bd9435b2d65c821fa4b207e2636c01332f99605a5db52" +content-hash = "5c89c845d8de06c02e08450647d68645be60fe49d0c4a7ba4e643d702328bc9e" diff --git a/integrations/azure-devops/pyproject.toml b/integrations/azure-devops/pyproject.toml index 8779f90d27..d6112f8cee 100644 --- a/integrations/azure-devops/pyproject.toml +++ b/integrations/azure-devops/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "azure-devops" -version = "0.1.68" +version = "0.1.69" description = "An Azure Devops Ocean integration" authors = ["Matan Geva "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} [tool.poetry.group.dev.dependencies] # Uncomment this if you want to debug the ocean core together with your integration diff --git a/integrations/azure/CHANGELOG.md b/integrations/azure/CHANGELOG.md index 4042719c24..b200533688 100644 --- a/integrations/azure/CHANGELOG.md +++ b/integrations/azure/CHANGELOG.md @@ -1,3 +1,10 @@ +0.1.92 (2024-09-22) + +### Improvements + +- Bumped ocean version to ^0.10.12 + + 0.1.91 (2024-09-17) ### Improvements diff --git a/integrations/azure/poetry.lock b/integrations/azure/poetry.lock index 9fdf09da3e..19524b889f 100644 --- a/integrations/azure/poetry.lock +++ b/integrations/azure/poetry.lock @@ -1560,13 +1560,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -2659,4 +2659,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "3635163931295200570fae198d8d391e585c14e949bf7d0fe482850d2b173c15" +content-hash = "5f5c803ed9e6fe50e6306c2464d85271ad541db9493f24917845a7abe124ff23" diff --git a/integrations/azure/pyproject.toml b/integrations/azure/pyproject.toml index 320ec87239..281db7af2f 100644 --- a/integrations/azure/pyproject.toml +++ b/integrations/azure/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "azure" -version = "0.1.91" +version = "0.1.92" description = "Azure integration" authors = ["Tom Tankilevitch "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} # due to patching the azure-mgmt-resource package, we need to use a specific version azure-mgmt-resource = "23.0.1" azure-identity = "^1.13.0" diff --git a/integrations/datadog/CHANGELOG.md b/integrations/datadog/CHANGELOG.md index 1fe4cb23c8..dcd170155c 100644 --- a/integrations/datadog/CHANGELOG.md +++ b/integrations/datadog/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.42 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.41 (2024-09-17) diff --git a/integrations/datadog/poetry.lock b/integrations/datadog/poetry.lock index 7cbe327e13..f7de5d7048 100644 --- a/integrations/datadog/poetry.lock +++ b/integrations/datadog/poetry.lock @@ -885,13 +885,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1790,4 +1790,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "56d32aea32a3d0c94a3a107bac52e704bc5d70bc5179974dcfc69c4ff7397a92" +content-hash = "741032f6998e04bd816f31bb352eddb51393af1b0c4266984948a842f6083362" diff --git a/integrations/datadog/pyproject.toml b/integrations/datadog/pyproject.toml index f6917f9bbd..882e7b8a93 100644 --- a/integrations/datadog/pyproject.toml +++ b/integrations/datadog/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "datadog" -version = "0.1.41" +version = "0.1.42" description = "Datadog Ocean Integration" authors = ["Albert Luganga "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} loguru = "^0.7.2" [tool.poetry.group.dev.dependencies] diff --git a/integrations/dynatrace/CHANGELOG.md b/integrations/dynatrace/CHANGELOG.md index 5f8032ee27..d711ea6680 100644 --- a/integrations/dynatrace/CHANGELOG.md +++ b/integrations/dynatrace/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.54 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.53 (2024-09-17) diff --git a/integrations/dynatrace/poetry.lock b/integrations/dynatrace/poetry.lock index f7f7586f32..d24f2ec583 100644 --- a/integrations/dynatrace/poetry.lock +++ b/integrations/dynatrace/poetry.lock @@ -885,13 +885,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1790,4 +1790,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "12324544c3e97d143b991fa81cfef87ba7ad1ea424732b037a20feba460faf99" +content-hash = "945e51cc5bbe37a397281700f72e7f627761e84a02665eb53fafe489e330a2fa" diff --git a/integrations/dynatrace/pyproject.toml b/integrations/dynatrace/pyproject.toml index 5b1e851075..0b6d5135f9 100644 --- a/integrations/dynatrace/pyproject.toml +++ b/integrations/dynatrace/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "dynatrace" -version = "0.1.53" +version = "0.1.54" description = "An integration used to import Dynatrace resources into Port" authors = ["Ayodeji Adeoti <>"] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} [tool.poetry.group.dev.dependencies] # uncomment this if you want to debug the ocean core together with your integration diff --git a/integrations/fake-integration/CHANGELOG.md b/integrations/fake-integration/CHANGELOG.md index 9c1be6dd45..f268194627 100644 --- a/integrations/fake-integration/CHANGELOG.md +++ b/integrations/fake-integration/CHANGELOG.md @@ -5,6 +5,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.4-dev (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.3-dev (2024-09-17) diff --git a/integrations/fake-integration/poetry.lock b/integrations/fake-integration/poetry.lock index 201c6c8eed..60fcad6e8f 100644 --- a/integrations/fake-integration/poetry.lock +++ b/integrations/fake-integration/poetry.lock @@ -1012,13 +1012,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -2030,4 +2030,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "18c541b2a4e37deca52b99da212867c9436e84be051deb2a42745a0251fd301c" +content-hash = "de1e6d4f85358f0bdfa7fd54b08df1dad6abc01297f439344de9c302aa1846ba" diff --git a/integrations/fake-integration/pyproject.toml b/integrations/fake-integration/pyproject.toml index 1bc7b97568..85cda4ee1c 100644 --- a/integrations/fake-integration/pyproject.toml +++ b/integrations/fake-integration/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "fake-integration" -version = "0.1.3-dev" +version = "0.1.4-dev" description = "A useless fake integration that helps us test the Ocean Core" authors = ["Erik Zaadi "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} faker = "^28.0.0" [tool.poetry.group.dev.dependencies] diff --git a/integrations/firehydrant/CHANGELOG.md b/integrations/firehydrant/CHANGELOG.md index 0b6e8b7ede..4a7503b01b 100644 --- a/integrations/firehydrant/CHANGELOG.md +++ b/integrations/firehydrant/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.77 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.76 (2024-09-17) diff --git a/integrations/firehydrant/poetry.lock b/integrations/firehydrant/poetry.lock index f7f7586f32..d24f2ec583 100644 --- a/integrations/firehydrant/poetry.lock +++ b/integrations/firehydrant/poetry.lock @@ -885,13 +885,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1790,4 +1790,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "12324544c3e97d143b991fa81cfef87ba7ad1ea424732b037a20feba460faf99" +content-hash = "945e51cc5bbe37a397281700f72e7f627761e84a02665eb53fafe489e330a2fa" diff --git a/integrations/firehydrant/pyproject.toml b/integrations/firehydrant/pyproject.toml index 61249ce937..5ef0c43307 100644 --- a/integrations/firehydrant/pyproject.toml +++ b/integrations/firehydrant/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "firehydrant" -version = "0.1.76" +version = "0.1.77" description = "FireHydrant Integration Powered by Ocean" authors = ["Isaac Coffie "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} [tool.poetry.group.dev.dependencies] # uncomment this if you want to debug the ocean core together with your integration diff --git a/integrations/gcp/CHANGELOG.md b/integrations/gcp/CHANGELOG.md index ecaa4326ef..06a465f4b5 100644 --- a/integrations/gcp/CHANGELOG.md +++ b/integrations/gcp/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.58 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.57 (2024-09-17) diff --git a/integrations/gcp/poetry.lock b/integrations/gcp/poetry.lock index 472c1a3736..a3b20dadca 100644 --- a/integrations/gcp/poetry.lock +++ b/integrations/gcp/poetry.lock @@ -1191,13 +1191,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -2186,4 +2186,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "a8b4e3c8c3e6c76751e537260924f9ac23cdb199c9cd1eac8363ac8f54ba0ba4" +content-hash = "baea8440515772d7a32d64482c41add4ecd2f47db056192227611b23ebf29f11" diff --git a/integrations/gcp/pyproject.toml b/integrations/gcp/pyproject.toml index 15b3e4367e..b80d805b2f 100644 --- a/integrations/gcp/pyproject.toml +++ b/integrations/gcp/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "gcp" -version = "0.1.57" +version = "0.1.58" description = "A GCP ocean integration" authors = ["Matan Geva "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} google-cloud-asset = "^3.25.1" google-cloud-pubsub = "^2.21.1" google-cloud-resource-manager = "^1.12.3" diff --git a/integrations/gitlab/CHANGELOG.md b/integrations/gitlab/CHANGELOG.md index 0b95cee875..5222f3fd8b 100644 --- a/integrations/gitlab/CHANGELOG.md +++ b/integrations/gitlab/CHANGELOG.md @@ -7,6 +7,14 @@ this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm +0.1.123 (2024-09-22) +==================== + +### Improvements + +- Bumped ocean version to ^0.10.12 + + 0.1.122 (2024-09-17) ==================== diff --git a/integrations/gitlab/poetry.lock b/integrations/gitlab/poetry.lock index a45ef4566d..0df3de7080 100644 --- a/integrations/gitlab/poetry.lock +++ b/integrations/gitlab/poetry.lock @@ -972,13 +972,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -2048,4 +2048,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "01ced5b4ec812e55cf600a4036496d893931762d703c452603e35353be2d7343" +content-hash = "27fe821c6ee26b7b9dabe9f2f17c08e33de51fae3f8ce8f0cc0d5fb732bbca65" diff --git a/integrations/gitlab/pyproject.toml b/integrations/gitlab/pyproject.toml index 5b5c494cb8..d919a377dc 100644 --- a/integrations/gitlab/pyproject.toml +++ b/integrations/gitlab/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "gitlab" -version = "0.1.122" +version = "0.1.123" description = "Gitlab integration for Port using Port-Ocean Framework" authors = ["Yair Siman-Tov "] @@ -11,7 +11,7 @@ aiolimiter = "^1.1.0" python-gitlab = "^3.14.0" pathlib = "^1.0.1" jsonschema = "^4.17.3" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} [tool.poetry.group.dev.dependencies] # uncomment this if you want to debug the ocean core together with your integration diff --git a/integrations/jenkins/CHANGELOG.md b/integrations/jenkins/CHANGELOG.md index 99a28b79e3..9b7cd59772 100644 --- a/integrations/jenkins/CHANGELOG.md +++ b/integrations/jenkins/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.59 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.58 (2024-09-17) diff --git a/integrations/jenkins/poetry.lock b/integrations/jenkins/poetry.lock index 007749c517..ff799eaad1 100644 --- a/integrations/jenkins/poetry.lock +++ b/integrations/jenkins/poetry.lock @@ -896,13 +896,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1801,4 +1801,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "dae8f401cb6c33a49e1622fb1e0a8596ba595a9601889e8b18f8c6859479806f" +content-hash = "5e8175ccf44c3e2d1b0a02899aae6c4d9e1712bbd494b074d0fa378dc0d7aeef" diff --git a/integrations/jenkins/pyproject.toml b/integrations/jenkins/pyproject.toml index 4124bbb5ff..d16cdbd677 100644 --- a/integrations/jenkins/pyproject.toml +++ b/integrations/jenkins/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "jenkins" -version = "0.1.58" +version = "0.1.59" description = "Jenkins Integration to Port Ocean" authors = ["Albert Luganga "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} pip = "^23.3.1" python-dotenv = "^1.0.0" loguru = "^0.7.2" diff --git a/integrations/jira/CHANGELOG.md b/integrations/jira/CHANGELOG.md index 7b64ba3a24..9e8acb9a6a 100644 --- a/integrations/jira/CHANGELOG.md +++ b/integrations/jira/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.88 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.87 (2024-09-17) diff --git a/integrations/jira/poetry.lock b/integrations/jira/poetry.lock index af83a9d08d..5fceea55db 100644 --- a/integrations/jira/poetry.lock +++ b/integrations/jira/poetry.lock @@ -885,13 +885,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1790,4 +1790,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "6fd97423bb47a8540b3a5473b01a734a226f5dcbb468411868de2e08f83c32df" +content-hash = "8f280a0291e9f75e4ee41b1703b68344239f84f73a802d72f65deee61f356856" diff --git a/integrations/jira/pyproject.toml b/integrations/jira/pyproject.toml index 2ec3666409..1e8bd0b382 100644 --- a/integrations/jira/pyproject.toml +++ b/integrations/jira/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "jira" -version = "0.1.87" +version = "0.1.88" description = "Integration to bring information from Jira into Port" authors = ["Mor Paz "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} httpx = "^0.27.0" [tool.poetry.group.dev.dependencies] diff --git a/integrations/kafka/CHANGELOG.md b/integrations/kafka/CHANGELOG.md index bf2e62f9dc..d059ae8169 100644 --- a/integrations/kafka/CHANGELOG.md +++ b/integrations/kafka/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.76 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.75 (2024-09-17) diff --git a/integrations/kafka/poetry.lock b/integrations/kafka/poetry.lock index 685b95ac44..c1379e93a3 100644 --- a/integrations/kafka/poetry.lock +++ b/integrations/kafka/poetry.lock @@ -885,13 +885,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1790,4 +1790,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "e97f7ae7976aa71c90d7ec5d50920777ab2d12b301866036fcfbf3703a8b982a" +content-hash = "e68b5d64edd3a63accdfb18d3c8c3c1996ee5220b07b6cace40a5b4aa4d7909c" diff --git a/integrations/kafka/pyproject.toml b/integrations/kafka/pyproject.toml index 80f0dfe209..c68a02bebb 100644 --- a/integrations/kafka/pyproject.toml +++ b/integrations/kafka/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "kafka" -version = "0.1.75" +version = "0.1.76" description = "Integration to import information from a Kafka cluster into Port. The integration supports importing metadata regarding the Kafka cluster, brokers and topics." authors = ["Tal Sabag "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} confluent-kafka = "^2.2.0" [tool.poetry.group.dev.dependencies] diff --git a/integrations/kubecost/CHANGELOG.md b/integrations/kubecost/CHANGELOG.md index 635e19f7e8..7fe55cd623 100644 --- a/integrations/kubecost/CHANGELOG.md +++ b/integrations/kubecost/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.81 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.80 (2024-09-17) diff --git a/integrations/kubecost/poetry.lock b/integrations/kubecost/poetry.lock index f7f7586f32..d24f2ec583 100644 --- a/integrations/kubecost/poetry.lock +++ b/integrations/kubecost/poetry.lock @@ -885,13 +885,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1790,4 +1790,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "12324544c3e97d143b991fa81cfef87ba7ad1ea424732b037a20feba460faf99" +content-hash = "945e51cc5bbe37a397281700f72e7f627761e84a02665eb53fafe489e330a2fa" diff --git a/integrations/kubecost/pyproject.toml b/integrations/kubecost/pyproject.toml index 759fe46121..3f1e877a16 100644 --- a/integrations/kubecost/pyproject.toml +++ b/integrations/kubecost/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "kubecost" -version = "0.1.80" +version = "0.1.81" description = "Kubecost integration powered by Ocean" authors = ["Isaac Coffie "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} [tool.poetry.group.dev.dependencies] # uncomment this if you want to debug the ocean core together with your integration diff --git a/integrations/launchdarkly/CHANGELOG.md b/integrations/launchdarkly/CHANGELOG.md index fc111793bf..a278522e76 100644 --- a/integrations/launchdarkly/CHANGELOG.md +++ b/integrations/launchdarkly/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.53 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.52 (2024-09-17) diff --git a/integrations/launchdarkly/poetry.lock b/integrations/launchdarkly/poetry.lock index f7f7586f32..d24f2ec583 100644 --- a/integrations/launchdarkly/poetry.lock +++ b/integrations/launchdarkly/poetry.lock @@ -885,13 +885,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1790,4 +1790,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "12324544c3e97d143b991fa81cfef87ba7ad1ea424732b037a20feba460faf99" +content-hash = "945e51cc5bbe37a397281700f72e7f627761e84a02665eb53fafe489e330a2fa" diff --git a/integrations/launchdarkly/pyproject.toml b/integrations/launchdarkly/pyproject.toml index 4503434f1c..1dc809b786 100644 --- a/integrations/launchdarkly/pyproject.toml +++ b/integrations/launchdarkly/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "launchdarkly" -version = "0.1.52" +version = "0.1.53" description = "Launchdarkly integration for Port" authors = ["Michael Armah "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} [tool.poetry.group.dev.dependencies] # uncomment this if you want to debug the ocean core together with your integration diff --git a/integrations/linear/CHANGELOG.md b/integrations/linear/CHANGELOG.md index aeb2462b80..f27e9ea1d2 100644 --- a/integrations/linear/CHANGELOG.md +++ b/integrations/linear/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.39 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.38 (2024-09-17) diff --git a/integrations/linear/poetry.lock b/integrations/linear/poetry.lock index f7f7586f32..d24f2ec583 100644 --- a/integrations/linear/poetry.lock +++ b/integrations/linear/poetry.lock @@ -885,13 +885,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1790,4 +1790,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "12324544c3e97d143b991fa81cfef87ba7ad1ea424732b037a20feba460faf99" +content-hash = "945e51cc5bbe37a397281700f72e7f627761e84a02665eb53fafe489e330a2fa" diff --git a/integrations/linear/pyproject.toml b/integrations/linear/pyproject.toml index f1ceab6065..a042e1e8ef 100644 --- a/integrations/linear/pyproject.toml +++ b/integrations/linear/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "linear" -version = "0.1.38" +version = "0.1.39" description = "Integration to bring information from Linear into Port" authors = ["Mor Paz "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} [tool.poetry.group.dev.dependencies] # uncomment this if you want to debug the ocean core together with your integration diff --git a/integrations/newrelic/CHANGELOG.md b/integrations/newrelic/CHANGELOG.md index f5f8e3c12b..ae92370a90 100644 --- a/integrations/newrelic/CHANGELOG.md +++ b/integrations/newrelic/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.83 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.82 (2024-09-17) diff --git a/integrations/newrelic/poetry.lock b/integrations/newrelic/poetry.lock index af83a9d08d..5fceea55db 100644 --- a/integrations/newrelic/poetry.lock +++ b/integrations/newrelic/poetry.lock @@ -885,13 +885,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1790,4 +1790,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "6fd97423bb47a8540b3a5473b01a734a226f5dcbb468411868de2e08f83c32df" +content-hash = "8f280a0291e9f75e4ee41b1703b68344239f84f73a802d72f65deee61f356856" diff --git a/integrations/newrelic/pyproject.toml b/integrations/newrelic/pyproject.toml index fa23c05fc5..c2d26fb463 100644 --- a/integrations/newrelic/pyproject.toml +++ b/integrations/newrelic/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "newrelic" -version = "0.1.82" +version = "0.1.83" description = "New Relic Integration" authors = ["Tom Tankilevitch "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} httpx = "^0.27.0" [tool.poetry.group.dev.dependencies] diff --git a/integrations/octopus/CHANGELOG.md b/integrations/octopus/CHANGELOG.md index 978be6bbab..0aa5ec3f56 100644 --- a/integrations/octopus/CHANGELOG.md +++ b/integrations/octopus/CHANGELOG.md @@ -7,70 +7,77 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 -# Port_Ocean 0.1.10 (2024-09-17) +## 0.1.11-beta (2024-09-22) + +### Improvements + +- Bumped ocean version to ^0.10.12 + + +## 0.1.10 (2024-09-17) ### Improvements - Bumped ocean version to ^0.10.11 -# Port_Ocean 0.1.9 (2024-09-12) +## 0.1.9 (2024-09-12) ### Improvements - Bumped ocean version to ^0.10.10 (#1) -# Port_Ocean 0.1.8 (2024-09-05) +## 0.1.8 (2024-09-05) ### Improvements - Bumped ocean version to ^0.10.9 (#1) -# Port_Ocean 0.1.7 (2024-09-04) +## 0.1.7 (2024-09-04) ### Improvements - Bumped ocean version to ^0.10.8 (#1) -# Port_Ocean 0.1.6 (2024-09-01) +## 0.1.6 (2024-09-01) ### Improvements - Bumped ocean version to ^0.10.7 (#1) -# Port_Ocean 0.1.5 (2024-08-30) +## 0.1.5 (2024-08-30) ### Improvements - Bumped ocean version to ^0.10.5 (#1) -# Port_Ocean 0.1.4 (2024-08-28) +## 0.1.4 (2024-08-28) ### Improvements - Bumped ocean version to ^0.10.4 (#1) -# Port_Ocean 0.1.3 (2024-08-28) +## 0.1.3 (2024-08-28) ### Improvements - Bumped ocean version to ^0.10.3 (#1) -# Port_Ocean 0.1.2 (2024-08-26) +## 0.1.2 (2024-08-26) ### Improvements - Bumped ocean version to ^0.10.2 (#1) -# Port_Ocean 0.1.1 (2024-08-26) +## 0.1.1 (2024-08-26) ### Improvements @@ -82,5 +89,3 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Features - Added Octopus integration with support for Space, Project, Release, Deployment and Machine (PORT-9398) - - diff --git a/integrations/octopus/poetry.lock b/integrations/octopus/poetry.lock index f7f7586f32..1001ee4084 100644 --- a/integrations/octopus/poetry.lock +++ b/integrations/octopus/poetry.lock @@ -19,13 +19,13 @@ dev = ["pytest", "pytest-asyncio", "pytest-cov"] [[package]] name = "anyio" -version = "4.4.0" +version = "4.6.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, + {file = "anyio-4.6.0-py3-none-any.whl", hash = "sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a"}, + {file = "anyio-4.6.0.tar.gz", hash = "sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb"}, ] [package.dependencies] @@ -33,9 +33,9 @@ idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] [[package]] name = "arrow" @@ -58,13 +58,13 @@ test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock [[package]] name = "astroid" -version = "3.2.4" +version = "3.3.3" description = "An abstract syntax tree for Python with inference support." optional = false -python-versions = ">=3.8.0" +python-versions = ">=3.9.0" files = [ - {file = "astroid-3.2.4-py3-none-any.whl", hash = "sha256:413658a61eeca6202a59231abb473f932038fbcbf1666587f66d482083413a25"}, - {file = "astroid-3.2.4.tar.gz", hash = "sha256:0e14202810b30da1b735827f78f5157be2bbd4a7a59b7707ca0bfc2fb4c0063a"}, + {file = "astroid-3.3.3-py3-none-any.whl", hash = "sha256:2d79acfd3c594b6a2d4141fea98a1d62ab4a52e54332b1f1ddcf07b652cc5c0f"}, + {file = "astroid-3.3.3.tar.gz", hash = "sha256:63f8c5370d9bad8294163c87b2d440a7fdf546be6c72bbeac0549c93244dbd72"}, ] [[package]] @@ -273,45 +273,45 @@ files = [ [[package]] name = "confluent-kafka" -version = "2.5.0" +version = "2.5.3" description = "Confluent's Python client for Apache Kafka" optional = false python-versions = "*" files = [ - {file = "confluent-kafka-2.5.0.tar.gz", hash = "sha256:551cabaade717bb56ec13eb860ce439bedbcf1c97f4a4aa26957572ed1bfa74f"}, - {file = "confluent_kafka-2.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5509a219128fb177fa4186a8669071cc52acd52eba436f339edb9063aabb486d"}, - {file = "confluent_kafka-2.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ff98d8fbe7d3671cac3e1b692c13f160cf508b525c110a89906ffabd1cc140fe"}, - {file = "confluent_kafka-2.5.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:72ffae4387d283cb5657b6381a893c7231c26a9b4248557e7f030de76156290a"}, - {file = "confluent_kafka-2.5.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:75873087fd1bd753e082f74ab97f68cc3a0765d6b600c2ac3d3a0beffbdc569d"}, - {file = "confluent_kafka-2.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:2af917f93ac3a0aa88e6bee9b2056c1c176621e4a9c8f7051cc8646b81f91327"}, - {file = "confluent_kafka-2.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:efc8c48d5dbbcd1b56afe737df8156a74e62b50481ccffe581b9926eaa16c014"}, - {file = "confluent_kafka-2.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7b9f867c7e955a48ed60dee0da9c157b0f84e67724f7e42591bbcf6867e3865f"}, - {file = "confluent_kafka-2.5.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:b064baf6a93ab58199e63bddf73d9f2c855b89cc376d5313c2f89c633aa3254a"}, - {file = "confluent_kafka-2.5.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a55f3d761c8463c504012ad9b06be33ef07f301f246e61d656cc927d35763f82"}, - {file = "confluent_kafka-2.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:c05a677b1dbdcf2a4532e2cf41e78d2e2ffb3a6829347caf2825f472cda59e69"}, - {file = "confluent_kafka-2.5.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:db987d8953d0d58a28a455e43a1da74a0e9dec7a12a74f5abd85a7cb308aefd4"}, - {file = "confluent_kafka-2.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d828ebb45db153cd462e72c575f8683c2c56ddba62b282aa36d9c365847e212"}, - {file = "confluent_kafka-2.5.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fb863f76605e9bbbb1d7f02abf05899cf1435421aa721a5be212c600bd054aa3"}, - {file = "confluent_kafka-2.5.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:92efb98908e29f597c77ab97faa064f670b681f4540c3eabc415b8a6e58df9bf"}, - {file = "confluent_kafka-2.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:d668b5c426af595271bf6fce2917a6c3a15453656077a59db85f440958b5ccc2"}, - {file = "confluent_kafka-2.5.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:797250f1a66024dd8b1c94764cc75e1d7bd1b7224a0b982678eafbb39714874e"}, - {file = "confluent_kafka-2.5.0-cp36-cp36m-manylinux_2_28_aarch64.whl", hash = "sha256:e81dc0a2980e597848b73983ce6e0b4ae7d129c01370cd9f31599c15c5d02a5d"}, - {file = "confluent_kafka-2.5.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:ffda33c86f5fee6ae678cca039915a0c4c1863bbc592b6f2f82abfddc173b0d3"}, - {file = "confluent_kafka-2.5.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7410bd5b0d6f54df5fa3313c75801a6ebcfab7cbfb947c3f56149e38b0fe924c"}, - {file = "confluent_kafka-2.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9a29dc4b7d4a754037d7d8e3ad1873a27b16e7de8c0a06755456b20803a70b16"}, - {file = "confluent_kafka-2.5.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:089b68a43c3b911356a4ff08fa862245f1333387b79221ac7f60d99e5b4e24d6"}, - {file = "confluent_kafka-2.5.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:c3a17ebdd97c803cf369c8615a474ca0bea39b5f5944e51f1c320aee8d6d5da9"}, - {file = "confluent_kafka-2.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:151656afaeb623b46c042a752091d8b17fd05ff7d309be6d8b4953b8dc0783bc"}, - {file = "confluent_kafka-2.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:570fc091cdcf9d1baf90c5f4965322cea8185ba8698d0f02cd1c8bd38bf6664a"}, - {file = "confluent_kafka-2.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bd57edf51434d6ec289339a0c9b627ca1f1e7c1130e348c0b411407183db53c6"}, - {file = "confluent_kafka-2.5.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:8975fea2ccd6927aad188e198e1688ef16589dc36b42f7a33ad07b1ca1341901"}, - {file = "confluent_kafka-2.5.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7c0b1a7774905c9e3c24d09d9b8463d771685e4105150c2503185537a6a590f9"}, - {file = "confluent_kafka-2.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:eaf01cd79b4d2cdbdf1e7b6ace9c846ae9ad9f4cf573617bbb5735a5c48cbd20"}, - {file = "confluent_kafka-2.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa789332fd40a9e99b9388f87f28db8fc7dd8ca54a1d24d0bcd0ad33f50f3528"}, - {file = "confluent_kafka-2.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e917db155dc3a64e1496b293a3ceb0a8edf23e0bd6f93d43576c40f0c59d3067"}, - {file = "confluent_kafka-2.5.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:a8bb3af6d1f109aaac5514c65a46cac933d78b3935f6fea52fe1f2ea6a9951bf"}, - {file = "confluent_kafka-2.5.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a1fb72461dcf7aa7e1834133eb733f824331aafda87ef48ec917d9b09c805a99"}, - {file = "confluent_kafka-2.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:4bda1b5fa87cb993bcd964d271a76cc11cafa2455de02ab5eff6efd9e688d55e"}, + {file = "confluent-kafka-2.5.3.tar.gz", hash = "sha256:eca625b0a8742d864a954bbe6493d453c07bacedf9e10d71a54dd1047f775778"}, + {file = "confluent_kafka-2.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8a1a2a8756b2c1cd2654ea83d1e819a6e2c0a4337eacec50bfd2ab1f0c24a29c"}, + {file = "confluent_kafka-2.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c284eefed1b27133d90afc0fa2fd735864db8501190f3c2e0c8d8b1a20b07759"}, + {file = "confluent_kafka-2.5.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:46c6063726fcdae835902961bb6c0e4c148499b87fdd513e6b2a6b406922ae3e"}, + {file = "confluent_kafka-2.5.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:505078b402dde98dc06dc66b6356acd19984742ef6b82dd52fb860f2a14b5a57"}, + {file = "confluent_kafka-2.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:db30418bb36723a02ba51e058312056d0403c5f245beb379bff66e4b0c14337b"}, + {file = "confluent_kafka-2.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4dd5fa74231fc21c3a26eeda1999a27f84768a6291a8b04c3cd61ac1deea4ace"}, + {file = "confluent_kafka-2.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ac8b5fe45ee9c11ce7a516dc7c41441ebb17d9ff63c8646a59b8e52bd791b154"}, + {file = "confluent_kafka-2.5.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:7125c3f86a76136b25aa21c94303b33709e2dd15f777395ea81fbd6872d9147b"}, + {file = "confluent_kafka-2.5.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8ec7a407bcb2eb122ff159d602cedc41d858f4c66a436c778f5d2f9f15fbec4e"}, + {file = "confluent_kafka-2.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:4cfb18d69e6912fe90cbbcc9c7d805988122c51ab3041e1424ace64bc31b736f"}, + {file = "confluent_kafka-2.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8d86de3e2c7bb59fb16faea468e833712912106f32a3a3ec345088c366042734"}, + {file = "confluent_kafka-2.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9ffb298b3ea3477afdaa5da6033d35dc0be01b10537d9b63994411e79b41477"}, + {file = "confluent_kafka-2.5.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:61a92637bea8fca454ec711f46e7753647deb7da56132995103acb5eb5041a29"}, + {file = "confluent_kafka-2.5.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3daad72791ae06dec257c9105278e89ae0924e86ef107a1acb443106f002f361"}, + {file = "confluent_kafka-2.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:f626494cd6ad18fa2ed83f80d687bc0194cff6f61b3d4f2aa183efa23ede2e02"}, + {file = "confluent_kafka-2.5.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f034f13c08ba238154d818294ceabb2257e8df8fb6489f891ec7600c7c541553"}, + {file = "confluent_kafka-2.5.3-cp36-cp36m-manylinux_2_28_aarch64.whl", hash = "sha256:806c43fd1524034a9b6c958b4f9395ff5f56ef697218a336eac1da5006184f66"}, + {file = "confluent_kafka-2.5.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:0cdb150c12d5ac6e33572cbf16243284c65a178e3719baa610a48d672e9d92bf"}, + {file = "confluent_kafka-2.5.3-cp36-cp36m-win_amd64.whl", hash = "sha256:a2ed265bf3420811efd802fd8ebf5ec0f20a82e9baeff5299a67f6a84dde1b06"}, + {file = "confluent_kafka-2.5.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:27d048429b138667c51541adc04bb398afa61a37a7be89f16ff9a318019d02c6"}, + {file = "confluent_kafka-2.5.3-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:eb80c22a7ca17839f229f299bafca1450c9fe4d5ca222e60e52428df91d42b56"}, + {file = "confluent_kafka-2.5.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:5122b8e9f94b6160d47e8f0020857376caa21f715b95c4b13c68683b47260c8f"}, + {file = "confluent_kafka-2.5.3-cp37-cp37m-win_amd64.whl", hash = "sha256:3b69c3120e0cac9ca463ca603ddc9d4e811409ef4ec69d2b6bb8bd94d6fce95e"}, + {file = "confluent_kafka-2.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a6f152e704b01c6a726233d081921454b7de106a5e4036994d1d5f4b34e7e46f"}, + {file = "confluent_kafka-2.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b8eef8c2f963ca6f5fcc79a0d6edef4e25fba83dfc0ef3f0401e1644f60ff11"}, + {file = "confluent_kafka-2.5.3-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0751302b0fd8090cbca92d7d34d237768923107b30de2611f3db93c2118cf2a8"}, + {file = "confluent_kafka-2.5.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:0e0cb3b18a59d1c6fcae60297ee25b5c65d5c39c8ad8033a8fa1392498a71c9e"}, + {file = "confluent_kafka-2.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:0b14928cddba963ea7d1c66aa268b6d37976bc91b4cf2100b5b7336d848ced22"}, + {file = "confluent_kafka-2.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dae80e9e1e4417462fe61f64da0ab111395719e35c9f7f3eac7c671ff5e868fe"}, + {file = "confluent_kafka-2.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75e1da68b199ef2472e47785d9a5c2dc75d307ed78827ad929bb733728b18567"}, + {file = "confluent_kafka-2.5.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:fa2318eaa9b2d5f3ebc2022b71e4ebf6242c13963b4faccf46eea49fea0ad91f"}, + {file = "confluent_kafka-2.5.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:490836e9fc3b4489721327e3df987c8667916a97d178e2296913c8d5de6623a9"}, + {file = "confluent_kafka-2.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfabe291cda68fdc3136f2f367dd4e5a6c3750113785f0c1936ba9cae09f4e9d"}, ] [package.extras] @@ -557,15 +557,18 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "idna" -version = "3.8" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" files = [ - {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, - {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "incremental" version = "24.7.2" @@ -854,19 +857,19 @@ files = [ [[package]] name = "platformdirs" -version = "4.2.2" +version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" @@ -885,13 +888,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1031,17 +1034,17 @@ files = [ [[package]] name = "pylint" -version = "3.2.6" +version = "3.3.0" description = "python code static checker" optional = false -python-versions = ">=3.8.0" +python-versions = ">=3.9.0" files = [ - {file = "pylint-3.2.6-py3-none-any.whl", hash = "sha256:03c8e3baa1d9fb995b12c1dbe00aa6c4bcef210c2a2634374aedeb22fb4a8f8f"}, - {file = "pylint-3.2.6.tar.gz", hash = "sha256:a5d01678349454806cff6d886fb072294f56a58c4761278c97fb557d708e1eb3"}, + {file = "pylint-3.3.0-py3-none-any.whl", hash = "sha256:02dce1845f68974b9b03045894eb3bf05a8b3c7da9fd10af4de3c91e69eb92f1"}, + {file = "pylint-3.3.0.tar.gz", hash = "sha256:c685fe3c061ee5fb0ce7c29436174ab84a2f525fce2a268b1986e921e083fe22"}, ] [package.dependencies] -astroid = ">=3.2.4,<=3.3.0-dev0" +astroid = ">=3.3.3,<=3.4.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, @@ -1058,13 +1061,13 @@ testutils = ["gitpython (>3)"] [[package]] name = "pytest" -version = "8.3.2" +version = "8.3.3" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, - {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, ] [package.dependencies] @@ -1096,21 +1099,21 @@ testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-httpx" -version = "0.30.0" +version = "0.31.0" description = "Send responses to httpx." optional = false python-versions = ">=3.9" files = [ - {file = "pytest-httpx-0.30.0.tar.gz", hash = "sha256:755b8edca87c974dd4f3605c374fda11db84631de3d163b99c0df5807023a19a"}, - {file = "pytest_httpx-0.30.0-py3-none-any.whl", hash = "sha256:6d47849691faf11d2532565d0c8e0e02b9f4ee730da31687feae315581d7520c"}, + {file = "pytest_httpx-0.31.0-py3-none-any.whl", hash = "sha256:9ca001951fe7a08e7b08674c05904e0f148b4df28849a721f33a9ed404810e7b"}, + {file = "pytest_httpx-0.31.0.tar.gz", hash = "sha256:bfe133be94ec5b46b44233d3644d97f15d33660c91e94ac4b11d12d77795eb54"}, ] [package.dependencies] httpx = "==0.27.*" -pytest = ">=7,<9" +pytest = "==8.*" [package.extras] -testing = ["pytest-asyncio (==0.23.*)", "pytest-cov (==4.*)"] +testing = ["pytest-asyncio (==0.24.*)", "pytest-cov (==5.*)"] [[package]] name = "pytest-xdist" @@ -1162,18 +1165,15 @@ cli = ["click (>=5.0)"] [[package]] name = "python-multipart" -version = "0.0.9" +version = "0.0.10" description = "A streaming multipart parser for Python" optional = false python-versions = ">=3.8" files = [ - {file = "python_multipart-0.0.9-py3-none-any.whl", hash = "sha256:97ca7b8ea7b05f977dc3849c3ba99d51689822fab725c3703af7c866a0c2b215"}, - {file = "python_multipart-0.0.9.tar.gz", hash = "sha256:03f54688c663f1b7977105f021043b0793151e4cb1c1a9d4a11fc13d622c4026"}, + {file = "python_multipart-0.0.10-py3-none-any.whl", hash = "sha256:2b06ad9e8d50c7a8db80e3b56dab590137b323410605af2be20d62a5f1ba1dc8"}, + {file = "python_multipart-0.0.10.tar.gz", hash = "sha256:46eb3c6ce6fdda5fb1a03c7e11d490e407c6930a2703fe7aef4da71c374688fa"}, ] -[package.extras] -dev = ["atomicwrites (==1.4.1)", "attrs (==23.2.0)", "coverage (==7.4.1)", "hatch", "invoke (==2.2.0)", "more-itertools (==10.2.0)", "pbr (==6.0.0)", "pluggy (==1.4.0)", "py (==1.11.0)", "pytest (==8.0.0)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.2.0)", "pyyaml (==6.0.1)", "ruff (==0.2.1)"] - [[package]] name = "python-slugify" version = "8.0.4" @@ -1276,13 +1276,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.8.0" +version = "13.8.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.8.0-py3-none-any.whl", hash = "sha256:2e85306a063b9492dffc86278197a60cbece75bcb766022f3436f567cae11bdc"}, - {file = "rich-13.8.0.tar.gz", hash = "sha256:a5ac1f1cd448ade0d59cc3356f7db7a7ccda2c8cbae9c7a90c28ff463d3e91f4"}, + {file = "rich-13.8.1-py3-none-any.whl", hash = "sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06"}, + {file = "rich-13.8.1.tar.gz", hash = "sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a"}, ] [package.dependencies] @@ -1294,45 +1294,45 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.6.3" +version = "0.6.7" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.6.3-py3-none-linux_armv6l.whl", hash = "sha256:97f58fda4e309382ad30ede7f30e2791d70dd29ea17f41970119f55bdb7a45c3"}, - {file = "ruff-0.6.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:3b061e49b5cf3a297b4d1c27ac5587954ccb4ff601160d3d6b2f70b1622194dc"}, - {file = "ruff-0.6.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:34e2824a13bb8c668c71c1760a6ac7d795ccbd8d38ff4a0d8471fdb15de910b1"}, - {file = "ruff-0.6.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bddfbb8d63c460f4b4128b6a506e7052bad4d6f3ff607ebbb41b0aa19c2770d1"}, - {file = "ruff-0.6.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ced3eeb44df75353e08ab3b6a9e113b5f3f996bea48d4f7c027bc528ba87b672"}, - {file = "ruff-0.6.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47021dff5445d549be954eb275156dfd7c37222acc1e8014311badcb9b4ec8c1"}, - {file = "ruff-0.6.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:7d7bd20dc07cebd68cc8bc7b3f5ada6d637f42d947c85264f94b0d1cd9d87384"}, - {file = "ruff-0.6.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:500f166d03fc6d0e61c8e40a3ff853fa8a43d938f5d14c183c612df1b0d6c58a"}, - {file = "ruff-0.6.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:42844ff678f9b976366b262fa2d1d1a3fe76f6e145bd92c84e27d172e3c34500"}, - {file = "ruff-0.6.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70452a10eb2d66549de8e75f89ae82462159855e983ddff91bc0bce6511d0470"}, - {file = "ruff-0.6.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:65a533235ed55f767d1fc62193a21cbf9e3329cf26d427b800fdeacfb77d296f"}, - {file = "ruff-0.6.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d2e2c23cef30dc3cbe9cc5d04f2899e7f5e478c40d2e0a633513ad081f7361b5"}, - {file = "ruff-0.6.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d8a136aa7d228975a6aee3dd8bea9b28e2b43e9444aa678fb62aeb1956ff2351"}, - {file = "ruff-0.6.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:f92fe93bc72e262b7b3f2bba9879897e2d58a989b4714ba6a5a7273e842ad2f8"}, - {file = "ruff-0.6.3-py3-none-win32.whl", hash = "sha256:7a62d3b5b0d7f9143d94893f8ba43aa5a5c51a0ffc4a401aa97a81ed76930521"}, - {file = "ruff-0.6.3-py3-none-win_amd64.whl", hash = "sha256:746af39356fee2b89aada06c7376e1aa274a23493d7016059c3a72e3b296befb"}, - {file = "ruff-0.6.3-py3-none-win_arm64.whl", hash = "sha256:14a9528a8b70ccc7a847637c29e56fd1f9183a9db743bbc5b8e0c4ad60592a82"}, - {file = "ruff-0.6.3.tar.gz", hash = "sha256:183b99e9edd1ef63be34a3b51fee0a9f4ab95add123dbf89a71f7b1f0c991983"}, + {file = "ruff-0.6.7-py3-none-linux_armv6l.whl", hash = "sha256:08277b217534bfdcc2e1377f7f933e1c7957453e8a79764d004e44c40db923f2"}, + {file = "ruff-0.6.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:c6707a32e03b791f4448dc0dce24b636cbcdee4dd5607adc24e5ee73fd86c00a"}, + {file = "ruff-0.6.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:533d66b7774ef224e7cf91506a7dafcc9e8ec7c059263ec46629e54e7b1f90ab"}, + {file = "ruff-0.6.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17a86aac6f915932d259f7bec79173e356165518859f94649d8c50b81ff087e9"}, + {file = "ruff-0.6.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b3f8822defd260ae2460ea3832b24d37d203c3577f48b055590a426a722d50ef"}, + {file = "ruff-0.6.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ba4efe5c6dbbb58be58dd83feedb83b5e95c00091bf09987b4baf510fee5c99"}, + {file = "ruff-0.6.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:525201b77f94d2b54868f0cbe5edc018e64c22563da6c5c2e5c107a4e85c1c0d"}, + {file = "ruff-0.6.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8854450839f339e1049fdbe15d875384242b8e85d5c6947bb2faad33c651020b"}, + {file = "ruff-0.6.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f0b62056246234d59cbf2ea66e84812dc9ec4540518e37553513392c171cb18"}, + {file = "ruff-0.6.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b1462fa56c832dc0cea5b4041cfc9c97813505d11cce74ebc6d1aae068de36b"}, + {file = "ruff-0.6.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:02b083770e4cdb1495ed313f5694c62808e71764ec6ee5db84eedd82fd32d8f5"}, + {file = "ruff-0.6.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0c05fd37013de36dfa883a3854fae57b3113aaa8abf5dea79202675991d48624"}, + {file = "ruff-0.6.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f49c9caa28d9bbfac4a637ae10327b3db00f47d038f3fbb2195c4d682e925b14"}, + {file = "ruff-0.6.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a0e1655868164e114ba43a908fd2d64a271a23660195017c17691fb6355d59bb"}, + {file = "ruff-0.6.7-py3-none-win32.whl", hash = "sha256:a939ca435b49f6966a7dd64b765c9df16f1faed0ca3b6f16acdf7731969deb35"}, + {file = "ruff-0.6.7-py3-none-win_amd64.whl", hash = "sha256:590445eec5653f36248584579c06252ad2e110a5d1f32db5420de35fb0e1c977"}, + {file = "ruff-0.6.7-py3-none-win_arm64.whl", hash = "sha256:b28f0d5e2f771c1fe3c7a45d3f53916fc74a480698c4b5731f0bea61e52137c8"}, + {file = "ruff-0.6.7.tar.gz", hash = "sha256:44e52129d82266fa59b587e2cd74def5637b730a69c4542525dfdecfaae38bd5"}, ] [[package]] name = "setuptools" -version = "74.0.0" +version = "75.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-74.0.0-py3-none-any.whl", hash = "sha256:0274581a0037b638b9fc1c6883cc71c0210865aaa76073f7882376b641b84e8f"}, - {file = "setuptools-74.0.0.tar.gz", hash = "sha256:a85e96b8be2b906f3e3e789adec6a9323abf79758ecfa3065bd740d81158b11e"}, + {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, + {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, ] [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] @@ -1460,13 +1460,13 @@ typing-extensions = ">=3.7.4.3" [[package]] name = "types-python-dateutil" -version = "2.9.0.20240821" +version = "2.9.0.20240906" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" files = [ - {file = "types-python-dateutil-2.9.0.20240821.tar.gz", hash = "sha256:9649d1dcb6fef1046fb18bebe9ea2aa0028b160918518c34589a46045f6ebd98"}, - {file = "types_python_dateutil-2.9.0.20240821-py3-none-any.whl", hash = "sha256:f5889fcb4e63ed4aaa379b44f93c32593d50b9a94c9a60a0c854d8cc3511cd57"}, + {file = "types-python-dateutil-2.9.0.20240906.tar.gz", hash = "sha256:9706c3b68284c25adffc47319ecc7947e5bb86b3773f843c73906fd598bc176e"}, + {file = "types_python_dateutil-2.9.0.20240906-py3-none-any.whl", hash = "sha256:27c8cc2d058ccb14946eebcaaa503088f4f6dbc4fb6093d3d456a49aef2753f6"}, ] [[package]] @@ -1482,13 +1482,13 @@ files = [ [[package]] name = "urllib3" -version = "2.2.2" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] @@ -1663,97 +1663,97 @@ anyio = ">=3.0.0" [[package]] name = "websockets" -version = "13.0.1" +version = "13.1" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false python-versions = ">=3.8" files = [ - {file = "websockets-13.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1841c9082a3ba4a05ea824cf6d99570a6a2d8849ef0db16e9c826acb28089e8f"}, - {file = "websockets-13.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c5870b4a11b77e4caa3937142b650fbbc0914a3e07a0cf3131f35c0587489c1c"}, - {file = "websockets-13.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f1d3d1f2eb79fe7b0fb02e599b2bf76a7619c79300fc55f0b5e2d382881d4f7f"}, - {file = "websockets-13.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15c7d62ee071fa94a2fc52c2b472fed4af258d43f9030479d9c4a2de885fd543"}, - {file = "websockets-13.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6724b554b70d6195ba19650fef5759ef11346f946c07dbbe390e039bcaa7cc3d"}, - {file = "websockets-13.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56a952fa2ae57a42ba7951e6b2605e08a24801a4931b5644dfc68939e041bc7f"}, - {file = "websockets-13.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:17118647c0ea14796364299e942c330d72acc4b248e07e639d34b75067b3cdd8"}, - {file = "websockets-13.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64a11aae1de4c178fa653b07d90f2fb1a2ed31919a5ea2361a38760192e1858b"}, - {file = "websockets-13.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0617fd0b1d14309c7eab6ba5deae8a7179959861846cbc5cb528a7531c249448"}, - {file = "websockets-13.0.1-cp310-cp310-win32.whl", hash = "sha256:11f9976ecbc530248cf162e359a92f37b7b282de88d1d194f2167b5e7ad80ce3"}, - {file = "websockets-13.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c3c493d0e5141ec055a7d6809a28ac2b88d5b878bb22df8c621ebe79a61123d0"}, - {file = "websockets-13.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:699ba9dd6a926f82a277063603fc8d586b89f4cb128efc353b749b641fcddda7"}, - {file = "websockets-13.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf2fae6d85e5dc384bf846f8243ddaa9197f3a1a70044f59399af001fd1f51d4"}, - {file = "websockets-13.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:52aed6ef21a0f1a2a5e310fb5c42d7555e9c5855476bbd7173c3aa3d8a0302f2"}, - {file = "websockets-13.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8eb2b9a318542153674c6e377eb8cb9ca0fc011c04475110d3477862f15d29f0"}, - {file = "websockets-13.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5df891c86fe68b2c38da55b7aea7095beca105933c697d719f3f45f4220a5e0e"}, - {file = "websockets-13.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fac2d146ff30d9dd2fcf917e5d147db037a5c573f0446c564f16f1f94cf87462"}, - {file = "websockets-13.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b8ac5b46fd798bbbf2ac6620e0437c36a202b08e1f827832c4bf050da081b501"}, - {file = "websockets-13.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:46af561eba6f9b0848b2c9d2427086cabadf14e0abdd9fde9d72d447df268418"}, - {file = "websockets-13.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b5a06d7f60bc2fc378a333978470dfc4e1415ee52f5f0fce4f7853eb10c1e9df"}, - {file = "websockets-13.0.1-cp311-cp311-win32.whl", hash = "sha256:556e70e4f69be1082e6ef26dcb70efcd08d1850f5d6c5f4f2bcb4e397e68f01f"}, - {file = "websockets-13.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:67494e95d6565bf395476e9d040037ff69c8b3fa356a886b21d8422ad86ae075"}, - {file = "websockets-13.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f9c9e258e3d5efe199ec23903f5da0eeaad58cf6fccb3547b74fd4750e5ac47a"}, - {file = "websockets-13.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6b41a1b3b561f1cba8321fb32987552a024a8f67f0d05f06fcf29f0090a1b956"}, - {file = "websockets-13.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f73e676a46b0fe9426612ce8caeca54c9073191a77c3e9d5c94697aef99296af"}, - {file = "websockets-13.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f613289f4a94142f914aafad6c6c87903de78eae1e140fa769a7385fb232fdf"}, - {file = "websockets-13.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f52504023b1480d458adf496dc1c9e9811df4ba4752f0bc1f89ae92f4f07d0c"}, - {file = "websockets-13.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:139add0f98206cb74109faf3611b7783ceafc928529c62b389917a037d4cfdf4"}, - {file = "websockets-13.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:47236c13be337ef36546004ce8c5580f4b1150d9538b27bf8a5ad8edf23ccfab"}, - {file = "websockets-13.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c44ca9ade59b2e376612df34e837013e2b273e6c92d7ed6636d0556b6f4db93d"}, - {file = "websockets-13.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9bbc525f4be3e51b89b2a700f5746c2a6907d2e2ef4513a8daafc98198b92237"}, - {file = "websockets-13.0.1-cp312-cp312-win32.whl", hash = "sha256:3624fd8664f2577cf8de996db3250662e259bfbc870dd8ebdcf5d7c6ac0b5185"}, - {file = "websockets-13.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0513c727fb8adffa6d9bf4a4463b2bade0186cbd8c3604ae5540fae18a90cb99"}, - {file = "websockets-13.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1ee4cc030a4bdab482a37462dbf3ffb7e09334d01dd37d1063be1136a0d825fa"}, - {file = "websockets-13.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dbb0b697cc0655719522406c059eae233abaa3243821cfdfab1215d02ac10231"}, - {file = "websockets-13.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:acbebec8cb3d4df6e2488fbf34702cbc37fc39ac7abf9449392cefb3305562e9"}, - {file = "websockets-13.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63848cdb6fcc0bf09d4a155464c46c64ffdb5807ede4fb251da2c2692559ce75"}, - {file = "websockets-13.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:872afa52a9f4c414d6955c365b6588bc4401272c629ff8321a55f44e3f62b553"}, - {file = "websockets-13.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05e70fec7c54aad4d71eae8e8cab50525e899791fc389ec6f77b95312e4e9920"}, - {file = "websockets-13.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e82db3756ccb66266504f5a3de05ac6b32f287faacff72462612120074103329"}, - {file = "websockets-13.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4e85f46ce287f5c52438bb3703d86162263afccf034a5ef13dbe4318e98d86e7"}, - {file = "websockets-13.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f3fea72e4e6edb983908f0db373ae0732b275628901d909c382aae3b592589f2"}, - {file = "websockets-13.0.1-cp313-cp313-win32.whl", hash = "sha256:254ecf35572fca01a9f789a1d0f543898e222f7b69ecd7d5381d8d8047627bdb"}, - {file = "websockets-13.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:ca48914cdd9f2ccd94deab5bcb5ac98025a5ddce98881e5cce762854a5de330b"}, - {file = "websockets-13.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b74593e9acf18ea5469c3edaa6b27fa7ecf97b30e9dabd5a94c4c940637ab96e"}, - {file = "websockets-13.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:132511bfd42e77d152c919147078460c88a795af16b50e42a0bd14f0ad71ddd2"}, - {file = "websockets-13.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:165bedf13556f985a2aa064309baa01462aa79bf6112fbd068ae38993a0e1f1b"}, - {file = "websockets-13.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e801ca2f448850685417d723ec70298feff3ce4ff687c6f20922c7474b4746ae"}, - {file = "websockets-13.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30d3a1f041360f029765d8704eae606781e673e8918e6b2c792e0775de51352f"}, - {file = "websockets-13.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67648f5e50231b5a7f6d83b32f9c525e319f0ddc841be0de64f24928cd75a603"}, - {file = "websockets-13.0.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4f0426d51c8f0926a4879390f53c7f5a855e42d68df95fff6032c82c888b5f36"}, - {file = "websockets-13.0.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ef48e4137e8799998a343706531e656fdec6797b80efd029117edacb74b0a10a"}, - {file = "websockets-13.0.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:249aab278810bee585cd0d4de2f08cfd67eed4fc75bde623be163798ed4db2eb"}, - {file = "websockets-13.0.1-cp38-cp38-win32.whl", hash = "sha256:06c0a667e466fcb56a0886d924b5f29a7f0886199102f0a0e1c60a02a3751cb4"}, - {file = "websockets-13.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1f3cf6d6ec1142412d4535adabc6bd72a63f5f148c43fe559f06298bc21953c9"}, - {file = "websockets-13.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1fa082ea38d5de51dd409434edc27c0dcbd5fed2b09b9be982deb6f0508d25bc"}, - {file = "websockets-13.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4a365bcb7be554e6e1f9f3ed64016e67e2fa03d7b027a33e436aecf194febb63"}, - {file = "websockets-13.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:10a0dc7242215d794fb1918f69c6bb235f1f627aaf19e77f05336d147fce7c37"}, - {file = "websockets-13.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59197afd478545b1f73367620407b0083303569c5f2d043afe5363676f2697c9"}, - {file = "websockets-13.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d20516990d8ad557b5abeb48127b8b779b0b7e6771a265fa3e91767596d7d97"}, - {file = "websockets-13.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1a2e272d067030048e1fe41aa1ec8cfbbaabce733b3d634304fa2b19e5c897f"}, - {file = "websockets-13.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ad327ac80ba7ee61da85383ca8822ff808ab5ada0e4a030d66703cc025b021c4"}, - {file = "websockets-13.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:518f90e6dd089d34eaade01101fd8a990921c3ba18ebbe9b0165b46ebff947f0"}, - {file = "websockets-13.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:68264802399aed6fe9652e89761031acc734fc4c653137a5911c2bfa995d6d6d"}, - {file = "websockets-13.0.1-cp39-cp39-win32.whl", hash = "sha256:a5dc0c42ded1557cc7c3f0240b24129aefbad88af4f09346164349391dea8e58"}, - {file = "websockets-13.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b448a0690ef43db5ef31b3a0d9aea79043882b4632cfc3eaab20105edecf6097"}, - {file = "websockets-13.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:faef9ec6354fe4f9a2c0bbb52fb1ff852effc897e2a4501e25eb3a47cb0a4f89"}, - {file = "websockets-13.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:03d3f9ba172e0a53e37fa4e636b86cc60c3ab2cfee4935e66ed1d7acaa4625ad"}, - {file = "websockets-13.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d450f5a7a35662a9b91a64aefa852f0c0308ee256122f5218a42f1d13577d71e"}, - {file = "websockets-13.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f55b36d17ac50aa8a171b771e15fbe1561217510c8768af3d546f56c7576cdc"}, - {file = "websockets-13.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14b9c006cac63772b31abbcd3e3abb6228233eec966bf062e89e7fa7ae0b7333"}, - {file = "websockets-13.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b79915a1179a91f6c5f04ece1e592e2e8a6bd245a0e45d12fd56b2b59e559a32"}, - {file = "websockets-13.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f40de079779acbcdbb6ed4c65af9f018f8b77c5ec4e17a4b737c05c2db554491"}, - {file = "websockets-13.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:80e4ba642fc87fa532bac07e5ed7e19d56940b6af6a8c61d4429be48718a380f"}, - {file = "websockets-13.0.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a02b0161c43cc9e0232711eff846569fad6ec836a7acab16b3cf97b2344c060"}, - {file = "websockets-13.0.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6aa74a45d4cdc028561a7d6ab3272c8b3018e23723100b12e58be9dfa5a24491"}, - {file = "websockets-13.0.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00fd961943b6c10ee6f0b1130753e50ac5dcd906130dcd77b0003c3ab797d026"}, - {file = "websockets-13.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d93572720d781331fb10d3da9ca1067817d84ad1e7c31466e9f5e59965618096"}, - {file = "websockets-13.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:71e6e5a3a3728886caee9ab8752e8113670936a193284be9d6ad2176a137f376"}, - {file = "websockets-13.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c4a6343e3b0714e80da0b0893543bf9a5b5fa71b846ae640e56e9abc6fbc4c83"}, - {file = "websockets-13.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a678532018e435396e37422a95e3ab87f75028ac79570ad11f5bf23cd2a7d8c"}, - {file = "websockets-13.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6716c087e4aa0b9260c4e579bb82e068f84faddb9bfba9906cb87726fa2e870"}, - {file = "websockets-13.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e33505534f3f673270dd67f81e73550b11de5b538c56fe04435d63c02c3f26b5"}, - {file = "websockets-13.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:acab3539a027a85d568c2573291e864333ec9d912675107d6efceb7e2be5d980"}, - {file = "websockets-13.0.1-py3-none-any.whl", hash = "sha256:b80f0c51681c517604152eb6a572f5a9378f877763231fddb883ba2f968e8817"}, - {file = "websockets-13.0.1.tar.gz", hash = "sha256:4d6ece65099411cfd9a48d13701d7438d9c34f479046b34c50ff60bb8834e43e"}, + {file = "websockets-13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f48c749857f8fb598fb890a75f540e3221d0976ed0bf879cf3c7eef34151acee"}, + {file = "websockets-13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7e72ce6bda6fb9409cc1e8164dd41d7c91466fb599eb047cfda72fe758a34a7"}, + {file = "websockets-13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f779498eeec470295a2b1a5d97aa1bc9814ecd25e1eb637bd9d1c73a327387f6"}, + {file = "websockets-13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676df3fe46956fbb0437d8800cd5f2b6d41143b6e7e842e60554398432cf29b"}, + {file = "websockets-13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7affedeb43a70351bb811dadf49493c9cfd1ed94c9c70095fd177e9cc1541fa"}, + {file = "websockets-13.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1971e62d2caa443e57588e1d82d15f663b29ff9dfe7446d9964a4b6f12c1e700"}, + {file = "websockets-13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5f2e75431f8dc4a47f31565a6e1355fb4f2ecaa99d6b89737527ea917066e26c"}, + {file = "websockets-13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58cf7e75dbf7e566088b07e36ea2e3e2bd5676e22216e4cad108d4df4a7402a0"}, + {file = "websockets-13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c90d6dec6be2c7d03378a574de87af9b1efea77d0c52a8301dd831ece938452f"}, + {file = "websockets-13.1-cp310-cp310-win32.whl", hash = "sha256:730f42125ccb14602f455155084f978bd9e8e57e89b569b4d7f0f0c17a448ffe"}, + {file = "websockets-13.1-cp310-cp310-win_amd64.whl", hash = "sha256:5993260f483d05a9737073be197371940c01b257cc45ae3f1d5d7adb371b266a"}, + {file = "websockets-13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:61fc0dfcda609cda0fc9fe7977694c0c59cf9d749fbb17f4e9483929e3c48a19"}, + {file = "websockets-13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ceec59f59d092c5007e815def4ebb80c2de330e9588e101cf8bd94c143ec78a5"}, + {file = "websockets-13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1dca61c6db1166c48b95198c0b7d9c990b30c756fc2923cc66f68d17dc558fd"}, + {file = "websockets-13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:308e20f22c2c77f3f39caca508e765f8725020b84aa963474e18c59accbf4c02"}, + {file = "websockets-13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d516c325e6540e8a57b94abefc3459d7dab8ce52ac75c96cad5549e187e3a7"}, + {file = "websockets-13.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c6e35319b46b99e168eb98472d6c7d8634ee37750d7693656dc766395df096"}, + {file = "websockets-13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5f9fee94ebafbc3117c30be1844ed01a3b177bb6e39088bc6b2fa1dc15572084"}, + {file = "websockets-13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7c1e90228c2f5cdde263253fa5db63e6653f1c00e7ec64108065a0b9713fa1b3"}, + {file = "websockets-13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6548f29b0e401eea2b967b2fdc1c7c7b5ebb3eeb470ed23a54cd45ef078a0db9"}, + {file = "websockets-13.1-cp311-cp311-win32.whl", hash = "sha256:c11d4d16e133f6df8916cc5b7e3e96ee4c44c936717d684a94f48f82edb7c92f"}, + {file = "websockets-13.1-cp311-cp311-win_amd64.whl", hash = "sha256:d04f13a1d75cb2b8382bdc16ae6fa58c97337253826dfe136195b7f89f661557"}, + {file = "websockets-13.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9d75baf00138f80b48f1eac72ad1535aac0b6461265a0bcad391fc5aba875cfc"}, + {file = "websockets-13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9b6f347deb3dcfbfde1c20baa21c2ac0751afaa73e64e5b693bb2b848efeaa49"}, + {file = "websockets-13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de58647e3f9c42f13f90ac7e5f58900c80a39019848c5547bc691693098ae1bd"}, + {file = "websockets-13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1b54689e38d1279a51d11e3467dd2f3a50f5f2e879012ce8f2d6943f00e83f0"}, + {file = "websockets-13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf1781ef73c073e6b0f90af841aaf98501f975d306bbf6221683dd594ccc52b6"}, + {file = "websockets-13.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d23b88b9388ed85c6faf0e74d8dec4f4d3baf3ecf20a65a47b836d56260d4b9"}, + {file = "websockets-13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3c78383585f47ccb0fcf186dcb8a43f5438bd7d8f47d69e0b56f71bf431a0a68"}, + {file = "websockets-13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d6d300f8ec35c24025ceb9b9019ae9040c1ab2f01cddc2bcc0b518af31c75c14"}, + {file = "websockets-13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9dcaf8b0cc72a392760bb8755922c03e17a5a54e08cca58e8b74f6902b433cf"}, + {file = "websockets-13.1-cp312-cp312-win32.whl", hash = "sha256:2f85cf4f2a1ba8f602298a853cec8526c2ca42a9a4b947ec236eaedb8f2dc80c"}, + {file = "websockets-13.1-cp312-cp312-win_amd64.whl", hash = "sha256:38377f8b0cdeee97c552d20cf1865695fcd56aba155ad1b4ca8779a5b6ef4ac3"}, + {file = "websockets-13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a9ab1e71d3d2e54a0aa646ab6d4eebfaa5f416fe78dfe4da2839525dc5d765c6"}, + {file = "websockets-13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b9d7439d7fab4dce00570bb906875734df13d9faa4b48e261c440a5fec6d9708"}, + {file = "websockets-13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327b74e915cf13c5931334c61e1a41040e365d380f812513a255aa804b183418"}, + {file = "websockets-13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:325b1ccdbf5e5725fdcb1b0e9ad4d2545056479d0eee392c291c1bf76206435a"}, + {file = "websockets-13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:346bee67a65f189e0e33f520f253d5147ab76ae42493804319b5716e46dddf0f"}, + {file = "websockets-13.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91a0fa841646320ec0d3accdff5b757b06e2e5c86ba32af2e0815c96c7a603c5"}, + {file = "websockets-13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:18503d2c5f3943e93819238bf20df71982d193f73dcecd26c94514f417f6b135"}, + {file = "websockets-13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9cd1af7e18e5221d2878378fbc287a14cd527fdd5939ed56a18df8a31136bb2"}, + {file = "websockets-13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:70c5be9f416aa72aab7a2a76c90ae0a4fe2755c1816c153c1a2bcc3333ce4ce6"}, + {file = "websockets-13.1-cp313-cp313-win32.whl", hash = "sha256:624459daabeb310d3815b276c1adef475b3e6804abaf2d9d2c061c319f7f187d"}, + {file = "websockets-13.1-cp313-cp313-win_amd64.whl", hash = "sha256:c518e84bb59c2baae725accd355c8dc517b4a3ed8db88b4bc93c78dae2974bf2"}, + {file = "websockets-13.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c7934fd0e920e70468e676fe7f1b7261c1efa0d6c037c6722278ca0228ad9d0d"}, + {file = "websockets-13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:149e622dc48c10ccc3d2760e5f36753db9cacf3ad7bc7bbbfd7d9c819e286f23"}, + {file = "websockets-13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a569eb1b05d72f9bce2ebd28a1ce2054311b66677fcd46cf36204ad23acead8c"}, + {file = "websockets-13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95df24ca1e1bd93bbca51d94dd049a984609687cb2fb08a7f2c56ac84e9816ea"}, + {file = "websockets-13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8dbb1bf0c0a4ae8b40bdc9be7f644e2f3fb4e8a9aca7145bfa510d4a374eeb7"}, + {file = "websockets-13.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:035233b7531fb92a76beefcbf479504db8c72eb3bff41da55aecce3a0f729e54"}, + {file = "websockets-13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e4450fc83a3df53dec45922b576e91e94f5578d06436871dce3a6be38e40f5db"}, + {file = "websockets-13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:463e1c6ec853202dd3657f156123d6b4dad0c546ea2e2e38be2b3f7c5b8e7295"}, + {file = "websockets-13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6d6855bbe70119872c05107e38fbc7f96b1d8cb047d95c2c50869a46c65a8e96"}, + {file = "websockets-13.1-cp38-cp38-win32.whl", hash = "sha256:204e5107f43095012b00f1451374693267adbb832d29966a01ecc4ce1db26faf"}, + {file = "websockets-13.1-cp38-cp38-win_amd64.whl", hash = "sha256:485307243237328c022bc908b90e4457d0daa8b5cf4b3723fd3c4a8012fce4c6"}, + {file = "websockets-13.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9b37c184f8b976f0c0a231a5f3d6efe10807d41ccbe4488df8c74174805eea7d"}, + {file = "websockets-13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:163e7277e1a0bd9fb3c8842a71661ad19c6aa7bb3d6678dc7f89b17fbcc4aeb7"}, + {file = "websockets-13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4b889dbd1342820cc210ba44307cf75ae5f2f96226c0038094455a96e64fb07a"}, + {file = "websockets-13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:586a356928692c1fed0eca68b4d1c2cbbd1ca2acf2ac7e7ebd3b9052582deefa"}, + {file = "websockets-13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7bd6abf1e070a6b72bfeb71049d6ad286852e285f146682bf30d0296f5fbadfa"}, + {file = "websockets-13.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2aad13a200e5934f5a6767492fb07151e1de1d6079c003ab31e1823733ae79"}, + {file = "websockets-13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:df01aea34b6e9e33572c35cd16bae5a47785e7d5c8cb2b54b2acdb9678315a17"}, + {file = "websockets-13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e54affdeb21026329fb0744ad187cf812f7d3c2aa702a5edb562b325191fcab6"}, + {file = "websockets-13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ef8aa8bdbac47f4968a5d66462a2a0935d044bf35c0e5a8af152d58516dbeb5"}, + {file = "websockets-13.1-cp39-cp39-win32.whl", hash = "sha256:deeb929efe52bed518f6eb2ddc00cc496366a14c726005726ad62c2dd9017a3c"}, + {file = "websockets-13.1-cp39-cp39-win_amd64.whl", hash = "sha256:7c65ffa900e7cc958cd088b9a9157a8141c991f8c53d11087e6fb7277a03f81d"}, + {file = "websockets-13.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5dd6da9bec02735931fccec99d97c29f47cc61f644264eb995ad6c0c27667238"}, + {file = "websockets-13.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:2510c09d8e8df777177ee3d40cd35450dc169a81e747455cc4197e63f7e7bfe5"}, + {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1c3cf67185543730888b20682fb186fc8d0fa6f07ccc3ef4390831ab4b388d9"}, + {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcc03c8b72267e97b49149e4863d57c2d77f13fae12066622dc78fe322490fe6"}, + {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:004280a140f220c812e65f36944a9ca92d766b6cc4560be652a0a3883a79ed8a"}, + {file = "websockets-13.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e2620453c075abeb0daa949a292e19f56de518988e079c36478bacf9546ced23"}, + {file = "websockets-13.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9156c45750b37337f7b0b00e6248991a047be4aa44554c9886fe6bdd605aab3b"}, + {file = "websockets-13.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:80c421e07973a89fbdd93e6f2003c17d20b69010458d3a8e37fb47874bd67d51"}, + {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82d0ba76371769d6a4e56f7e83bb8e81846d17a6190971e38b5de108bde9b0d7"}, + {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9875a0143f07d74dc5e1ded1c4581f0d9f7ab86c78994e2ed9e95050073c94d"}, + {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a11e38ad8922c7961447f35c7b17bffa15de4d17c70abd07bfbe12d6faa3e027"}, + {file = "websockets-13.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4059f790b6ae8768471cddb65d3c4fe4792b0ab48e154c9f0a04cefaabcd5978"}, + {file = "websockets-13.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:25c35bf84bf7c7369d247f0b8cfa157f989862c49104c5cf85cb5436a641d93e"}, + {file = "websockets-13.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:83f91d8a9bb404b8c2c41a707ac7f7f75b9442a0a876df295de27251a856ad09"}, + {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a43cfdcddd07f4ca2b1afb459824dd3c6d53a51410636a2c7fc97b9a8cf4842"}, + {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48a2ef1381632a2f0cb4efeff34efa97901c9fbc118e01951ad7cfc10601a9bb"}, + {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459bf774c754c35dbb487360b12c5727adab887f1622b8aed5755880a21c4a20"}, + {file = "websockets-13.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:95858ca14a9f6fa8413d29e0a585b31b278388aa775b8a81fa24830123874678"}, + {file = "websockets-13.1-py3-none-any.whl", hash = "sha256:a9a396a6ad26130cdae92ae10c36af09d9bfe6cafe69670fd3b6da9b07b4044f"}, + {file = "websockets-13.1.tar.gz", hash = "sha256:a3b3366087c1bc0a2795111edcadddb8b3b59509d5db5d7ea3fdd69f954a8878"}, ] [[package]] @@ -1790,4 +1790,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "12324544c3e97d143b991fa81cfef87ba7ad1ea424732b037a20feba460faf99" +content-hash = "945e51cc5bbe37a397281700f72e7f627761e84a02665eb53fafe489e330a2fa" diff --git a/integrations/octopus/pyproject.toml b/integrations/octopus/pyproject.toml index e419ac2889..89b08a7154 100644 --- a/integrations/octopus/pyproject.toml +++ b/integrations/octopus/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "octopus" -version = "0.1.10" +version = "0.1.11-beta" description = "This integration ingest data from octopus deploy" authors = ["Adebayo Iyanuoluwa "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} [tool.poetry.group.dev.dependencies] # Uncomment this if you want to debug the ocean core together with your integration diff --git a/integrations/opencost/CHANGELOG.md b/integrations/opencost/CHANGELOG.md index 5ba67f6f49..acee0c7d8b 100644 --- a/integrations/opencost/CHANGELOG.md +++ b/integrations/opencost/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.79 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.78 (2024-09-17) diff --git a/integrations/opencost/poetry.lock b/integrations/opencost/poetry.lock index f7f7586f32..d24f2ec583 100644 --- a/integrations/opencost/poetry.lock +++ b/integrations/opencost/poetry.lock @@ -885,13 +885,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1790,4 +1790,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "12324544c3e97d143b991fa81cfef87ba7ad1ea424732b037a20feba460faf99" +content-hash = "945e51cc5bbe37a397281700f72e7f627761e84a02665eb53fafe489e330a2fa" diff --git a/integrations/opencost/pyproject.toml b/integrations/opencost/pyproject.toml index 969ba5e60d..bfb4d4ad4f 100644 --- a/integrations/opencost/pyproject.toml +++ b/integrations/opencost/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "opencost" -version = "0.1.78" +version = "0.1.79" description = "Ocean integration for OpenCost" authors = ["Isaac Coffie "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} [tool.poetry.group.dev.dependencies] # uncomment this if you want to debug the ocean core together with your integration diff --git a/integrations/opsgenie/CHANGELOG.md b/integrations/opsgenie/CHANGELOG.md index ae83e5658d..52ceae1e59 100644 --- a/integrations/opsgenie/CHANGELOG.md +++ b/integrations/opsgenie/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.2.3 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.2.2 (2024-09-17) diff --git a/integrations/opsgenie/poetry.lock b/integrations/opsgenie/poetry.lock index f7f7586f32..d24f2ec583 100644 --- a/integrations/opsgenie/poetry.lock +++ b/integrations/opsgenie/poetry.lock @@ -885,13 +885,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1790,4 +1790,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "12324544c3e97d143b991fa81cfef87ba7ad1ea424732b037a20feba460faf99" +content-hash = "945e51cc5bbe37a397281700f72e7f627761e84a02665eb53fafe489e330a2fa" diff --git a/integrations/opsgenie/pyproject.toml b/integrations/opsgenie/pyproject.toml index db90f60888..d96831f25a 100644 --- a/integrations/opsgenie/pyproject.toml +++ b/integrations/opsgenie/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "opsgenie" -version = "0.2.2" +version = "0.2.3" description = "Ocean integration for OpsGenie" authors = ["Isaac Coffie "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} [tool.poetry.group.dev.dependencies] # uncomment this if you want to debug the ocean core together with your integration diff --git a/integrations/pagerduty/CHANGELOG.md b/integrations/pagerduty/CHANGELOG.md index adfd550326..dff3e07435 100644 --- a/integrations/pagerduty/CHANGELOG.md +++ b/integrations/pagerduty/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.103 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.102 (2024-09-17) diff --git a/integrations/pagerduty/poetry.lock b/integrations/pagerduty/poetry.lock index af83a9d08d..5fceea55db 100644 --- a/integrations/pagerduty/poetry.lock +++ b/integrations/pagerduty/poetry.lock @@ -885,13 +885,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1790,4 +1790,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "6fd97423bb47a8540b3a5473b01a734a226f5dcbb468411868de2e08f83c32df" +content-hash = "8f280a0291e9f75e4ee41b1703b68344239f84f73a802d72f65deee61f356856" diff --git a/integrations/pagerduty/pyproject.toml b/integrations/pagerduty/pyproject.toml index 700af22131..0d9fe44f6b 100644 --- a/integrations/pagerduty/pyproject.toml +++ b/integrations/pagerduty/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "pagerduty" -version = "0.1.102" +version = "0.1.103" description = "Pagerduty Integration" authors = ["Port Team "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} httpx = "^0.27.0" [tool.poetry.group.dev.dependencies] diff --git a/integrations/sentry/CHANGELOG.md b/integrations/sentry/CHANGELOG.md index 7091a8adbb..150c1a49de 100644 --- a/integrations/sentry/CHANGELOG.md +++ b/integrations/sentry/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.79 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.78 (2024-09-17) diff --git a/integrations/sentry/poetry.lock b/integrations/sentry/poetry.lock index f7f7586f32..d24f2ec583 100644 --- a/integrations/sentry/poetry.lock +++ b/integrations/sentry/poetry.lock @@ -885,13 +885,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1790,4 +1790,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "12324544c3e97d143b991fa81cfef87ba7ad1ea424732b037a20feba460faf99" +content-hash = "945e51cc5bbe37a397281700f72e7f627761e84a02665eb53fafe489e330a2fa" diff --git a/integrations/sentry/pyproject.toml b/integrations/sentry/pyproject.toml index b223b3a8b6..f6efb967c0 100644 --- a/integrations/sentry/pyproject.toml +++ b/integrations/sentry/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "sentry" -version = "0.1.78" +version = "0.1.79" description = "Sentry Integration" authors = ["Dvir Segev ","Matan Geva "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} [tool.poetry.group.dev.dependencies] # uncomment this if you want to debug the ocean core together with your integration diff --git a/integrations/servicenow/CHANGELOG.md b/integrations/servicenow/CHANGELOG.md index 751e88cc43..81dd6600c7 100644 --- a/integrations/servicenow/CHANGELOG.md +++ b/integrations/servicenow/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.69 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.68 (2024-09-17) diff --git a/integrations/servicenow/poetry.lock b/integrations/servicenow/poetry.lock index f7f7586f32..d24f2ec583 100644 --- a/integrations/servicenow/poetry.lock +++ b/integrations/servicenow/poetry.lock @@ -885,13 +885,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1790,4 +1790,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "12324544c3e97d143b991fa81cfef87ba7ad1ea424732b037a20feba460faf99" +content-hash = "945e51cc5bbe37a397281700f72e7f627761e84a02665eb53fafe489e330a2fa" diff --git a/integrations/servicenow/pyproject.toml b/integrations/servicenow/pyproject.toml index b75d8c6f29..7001e35995 100644 --- a/integrations/servicenow/pyproject.toml +++ b/integrations/servicenow/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "servicenow" -version = "0.1.68" +version = "0.1.69" description = "Service Now Ocean Integration" authors = ["Isaac Coffie "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} [tool.poetry.group.dev.dependencies] # uncomment this if you want to debug the ocean core together with your integration diff --git a/integrations/snyk/CHANGELOG.md b/integrations/snyk/CHANGELOG.md index 09e547489b..a90664e5d5 100644 --- a/integrations/snyk/CHANGELOG.md +++ b/integrations/snyk/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.89 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.88 (2024-09-17) diff --git a/integrations/snyk/poetry.lock b/integrations/snyk/poetry.lock index f7f7586f32..d24f2ec583 100644 --- a/integrations/snyk/poetry.lock +++ b/integrations/snyk/poetry.lock @@ -885,13 +885,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1790,4 +1790,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "12324544c3e97d143b991fa81cfef87ba7ad1ea424732b037a20feba460faf99" +content-hash = "945e51cc5bbe37a397281700f72e7f627761e84a02665eb53fafe489e330a2fa" diff --git a/integrations/snyk/pyproject.toml b/integrations/snyk/pyproject.toml index 9ed5720890..9e86b7c182 100644 --- a/integrations/snyk/pyproject.toml +++ b/integrations/snyk/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "snyk" -version = "0.1.88" +version = "0.1.89" description = "Snyk integration powered by Ocean" authors = ["Isaac Coffie "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} [tool.poetry.group.dev.dependencies] # uncomment this if you want to debug the ocean core together with your integration diff --git a/integrations/sonarqube/CHANGELOG.md b/integrations/sonarqube/CHANGELOG.md index 481983c707..c8aaf3ee6f 100644 --- a/integrations/sonarqube/CHANGELOG.md +++ b/integrations/sonarqube/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.96 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.95 (2024-09-19) diff --git a/integrations/sonarqube/poetry.lock b/integrations/sonarqube/poetry.lock index 125fac666b..6f519d6605 100644 --- a/integrations/sonarqube/poetry.lock +++ b/integrations/sonarqube/poetry.lock @@ -885,13 +885,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1790,4 +1790,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "d3ccf11a06d6e0c3cdfad160f4d3cdd59b34d440c3ba61e4e5b756d00101c988" +content-hash = "206809f0c8cc2acdc790d758e56be78a1dc90a19af08f6b76a167acc511ec456" diff --git a/integrations/sonarqube/pyproject.toml b/integrations/sonarqube/pyproject.toml index e7b1b932d6..c6509ce38b 100644 --- a/integrations/sonarqube/pyproject.toml +++ b/integrations/sonarqube/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "sonarqube" -version = "0.1.95" +version = "0.1.96" description = "SonarQube projects and code quality analysis integration" authors = ["Port Team "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} rich = "^13.5.2" cookiecutter = "^2.3.0" diff --git a/integrations/statuspage/CHANGELOG.md b/integrations/statuspage/CHANGELOG.md index f3a3ddaa57..d491bba474 100644 --- a/integrations/statuspage/CHANGELOG.md +++ b/integrations/statuspage/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.28 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.27 (2024-09-17) diff --git a/integrations/statuspage/poetry.lock b/integrations/statuspage/poetry.lock index f7f7586f32..d24f2ec583 100644 --- a/integrations/statuspage/poetry.lock +++ b/integrations/statuspage/poetry.lock @@ -885,13 +885,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1790,4 +1790,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "12324544c3e97d143b991fa81cfef87ba7ad1ea424732b037a20feba460faf99" +content-hash = "945e51cc5bbe37a397281700f72e7f627761e84a02665eb53fafe489e330a2fa" diff --git a/integrations/statuspage/pyproject.toml b/integrations/statuspage/pyproject.toml index 7afe717447..501377c26a 100644 --- a/integrations/statuspage/pyproject.toml +++ b/integrations/statuspage/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "statuspage" -version = "0.1.27" +version = "0.1.28" description = "Connect Statuspage to Ocean and automatically ingest incidents, updates, and impacted components for comprehensive monitoring" authors = ["Albert Luganga "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} [tool.poetry.group.dev.dependencies] # uncomment this if you want to debug the ocean core together with your integration diff --git a/integrations/terraform-cloud/CHANGELOG.md b/integrations/terraform-cloud/CHANGELOG.md index d353e95b16..a6a3e17327 100644 --- a/integrations/terraform-cloud/CHANGELOG.md +++ b/integrations/terraform-cloud/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.67 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.66 (2024-09-17) diff --git a/integrations/terraform-cloud/poetry.lock b/integrations/terraform-cloud/poetry.lock index f7f7586f32..d24f2ec583 100644 --- a/integrations/terraform-cloud/poetry.lock +++ b/integrations/terraform-cloud/poetry.lock @@ -885,13 +885,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1790,4 +1790,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "12324544c3e97d143b991fa81cfef87ba7ad1ea424732b037a20feba460faf99" +content-hash = "945e51cc5bbe37a397281700f72e7f627761e84a02665eb53fafe489e330a2fa" diff --git a/integrations/terraform-cloud/pyproject.toml b/integrations/terraform-cloud/pyproject.toml index 27d60a8172..bc89ff4eb5 100644 --- a/integrations/terraform-cloud/pyproject.toml +++ b/integrations/terraform-cloud/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "terraform-cloud" -version = "0.1.66" +version = "0.1.67" description = "Terraform Cloud Integration for Port" authors = ["Michael Armah "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} [tool.poetry.group.dev.dependencies] # uncomment this if you want to debug the ocean core together with your integration diff --git a/integrations/wiz/CHANGELOG.md b/integrations/wiz/CHANGELOG.md index 4e0fbcdd61..35a87db7d3 100644 --- a/integrations/wiz/CHANGELOG.md +++ b/integrations/wiz/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.60 (2024-09-22) + + +### Improvements + +- Bumped ocean version to ^0.10.12 + + ## 0.1.59 (2024-09-17) diff --git a/integrations/wiz/poetry.lock b/integrations/wiz/poetry.lock index f7f7586f32..d24f2ec583 100644 --- a/integrations/wiz/poetry.lock +++ b/integrations/wiz/poetry.lock @@ -885,13 +885,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "port-ocean" -version = "0.10.11" +version = "0.10.12" description = "Port Ocean is a CLI tool for managing your Port projects." optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "port_ocean-0.10.11-py3-none-any.whl", hash = "sha256:09881fdf7a836d5946b07b0255b757e96e375df497d724e8d12458c23879ef38"}, - {file = "port_ocean-0.10.11.tar.gz", hash = "sha256:9f214f1895e2a35dd50c448616cf7d2e32d3c70205d79a546ab612a7be0f62c3"}, + {file = "port_ocean-0.10.12-py3-none-any.whl", hash = "sha256:1d848c6c95fc6f9af0ef9a41b5664ae41c927c23a3f4448b81e4ef235100ed66"}, + {file = "port_ocean-0.10.12.tar.gz", hash = "sha256:fd3f7ee8c534bc1d24efa13fe47e8091997971d624676625126e3ad938b26f16"}, ] [package.dependencies] @@ -1790,4 +1790,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "12324544c3e97d143b991fa81cfef87ba7ad1ea424732b037a20feba460faf99" +content-hash = "945e51cc5bbe37a397281700f72e7f627761e84a02665eb53fafe489e330a2fa" diff --git a/integrations/wiz/pyproject.toml b/integrations/wiz/pyproject.toml index ad630cfd6c..f0db78707b 100644 --- a/integrations/wiz/pyproject.toml +++ b/integrations/wiz/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "wiz" -version = "0.1.59" +version = "0.1.60" description = "Wiz Port integration in Ocean" authors = ["Albert Luganga "] [tool.poetry.dependencies] python = "^3.11" -port_ocean = {version = "^0.10.11", extras = ["cli"]} +port_ocean = {version = "^0.10.12", extras = ["cli"]} [tool.poetry.group.dev.dependencies] # uncomment this if you want to debug the ocean core together with your integration diff --git a/scripts/bump-all.sh b/scripts/bump-all.sh index 32f2715d2e..180806bcaf 100755 --- a/scripts/bump-all.sh +++ b/scripts/bump-all.sh @@ -3,7 +3,7 @@ SCRIPT_BASE="$(cd -P "$(dirname "$0")" && pwd)" ROOT_DIR="$(cd -P "${SCRIPT_BASE}/../" && pwd)" CURRENT_DIR=$(pwd) -VERSION="^$(cd "${ROOT_DIR}" && source ./.venv/bin/activate && poetry version --short)" +VERSION="^${1:-$(poetry search port-ocean | grep port-ocean | sed 's/.*(\(.*\))/\1/')}" echo "Going to bump ocean core to version ${VERSION} for all integrations" From 82eb44d14abbf1cbff8cf3f99b9aa50065224281 Mon Sep 17 00:00:00 2001 From: erikzaadi Date: Sun, 22 Sep 2024 11:24:32 +0300 Subject: [PATCH 06/11] Cleanup fixture for integrations should be more fault tolerant --- port_ocean/tests/helpers/fixtures.py | 30 ++++++++++++++++++++-------- 1 file changed, 22 insertions(+), 8 deletions(-) diff --git a/port_ocean/tests/helpers/fixtures.py b/port_ocean/tests/helpers/fixtures.py index 712847c389..200c4ea195 100644 --- a/port_ocean/tests/helpers/fixtures.py +++ b/port_ocean/tests/helpers/fixtures.py @@ -2,6 +2,7 @@ from typing import Any, AsyncGenerator, Callable, List, Tuple, Union import pytest_asyncio +from loguru import logger from pydantic import BaseModel from port_ocean.clients.port.client import PortClient @@ -33,15 +34,28 @@ def get_port_client_for_integration( async def cleanup_integration(client: PortClient, blueprints: List[str]) -> None: for blueprint in blueprints: - bp = await client.get_blueprint(blueprint) - if bp is not None: - migration_id = await client.delete_blueprint( - identifier=blueprint, delete_entities=True - ) - if migration_id: - await client.wait_for_migration_to_complete(migration_id=migration_id) + try: + bp = await client.get_blueprint(blueprint) + if bp is not None: + migration_id = await client.delete_blueprint( + identifier=blueprint, delete_entities=True + ) + if migration_id: + await client.wait_for_migration_to_complete( + migration_id=migration_id + ) + except Exception as bp_e: + logger.info(f"Skipping missing blueprint ({blueprint}): {bp_e}") headers = await client.auth.headers() - await client.client.delete(f"{client.auth.api_url}/integrations", headers=headers) + try: + await client.client.delete( + f"{client.auth.api_url}/integrations/{client.integration_identifier}", + headers=headers, + ) + except Exception as int_e: + logger.info( + f"Failed to delete integration ({client.integration_identifier}): {int_e}" + ) class SmokeTestDetails(BaseModel): From b741b23aef056545f139e0e157fa9b0765646775 Mon Sep 17 00:00:00 2001 From: erikzaadi Date: Sun, 22 Sep 2024 11:21:38 +0300 Subject: [PATCH 07/11] Return smoke test, cleanup detect-changes --- .github/workflows/core-test.yml | 72 ++++++++++++++++ .github/workflows/detect-changes-matrix.yml | 58 ++++++++----- .github/workflows/integrations-test.yml | 50 +++++++++++ .github/workflows/lint.yml | 4 + .github/workflows/test.yml | 92 --------------------- 5 files changed, 162 insertions(+), 114 deletions(-) create mode 100644 .github/workflows/core-test.yml create mode 100644 .github/workflows/integrations-test.yml delete mode 100644 .github/workflows/test.yml diff --git a/.github/workflows/core-test.yml b/.github/workflows/core-test.yml new file mode 100644 index 0000000000..054f05ccdd --- /dev/null +++ b/.github/workflows/core-test.yml @@ -0,0 +1,72 @@ +name: 🌊 Ocean Core Tests + +on: + pull_request: + workflow_dispatch: + +jobs: + detect-changes: + uses: ./.github/workflows/detect-changes-matrix.yml + test: + name: 🌊 Ocean Core Tests + needs: detect-changes + runs-on: ubuntu-latest + if: ${{ needs.detect-changes.outputs.core == 'true' }} + steps: + - name: Checkout Repo + uses: actions/checkout@v4 + + - name: Install poetry + run: pipx install poetry + + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: '3.11' + cache: 'poetry' + + - name: Install dependencies + run: | + make install + + - name: Build core for smoke test + run: | + make build + + - name: Run fake integration for core test + env: + PORT_CLIENT_ID: ${{ secrets.PORT_CLIENT_ID }} + PORT_CLIENT_SECRET: ${{ secrets.PORT_CLIENT_SECRET }} + PORT_BASE_URL: ${{ secrets.PORT_BASE_URL }} + SMOKE_TEST_SUFFIX: ${{ github.run_id }} + run: | + ./scripts/run-smoke-test.sh + + - name: Unit and Smoke Test Core + env: + PYTEST_ADDOPTS: --junitxml=junit/test-results-ocean/core.xml + PORT_CLIENT_ID: ${{ secrets.PORT_CLIENT_ID }} + PORT_CLIENT_SECRET: ${{ secrets.PORT_CLIENT_SECRET }} + PORT_BASE_URL: ${{ secrets.PORT_BASE_URL }} + SMOKE_TEST_SUFFIX: ${{ github.run_id }} + run: | + make test + + - name: Install current core for all integrations + run: | + echo "Installing local core for all integrations" + SCRIPT_TO_RUN='make install/local-core' make execute/all + + - name: Test all integrations with current core + run: | + echo "Testing all integrations with local core" + SCRIPT_TO_RUN="PYTEST_ADDOPTS=--junitxml=${PWD}/junit/test-results-core-change/\`pwd | xargs basename\`.xml make test" make execute/all + + - name: Publish Test Report + uses: mikepenz/action-junit-report@v4 + if: ${{ always() }} + with: + report_paths: '**/junit/test-results-**/*.xml' + include_passed: true + require_tests: true + fail_on_failure: true diff --git a/.github/workflows/detect-changes-matrix.yml b/.github/workflows/detect-changes-matrix.yml index 91380f3d89..42d84b6446 100644 --- a/.github/workflows/detect-changes-matrix.yml +++ b/.github/workflows/detect-changes-matrix.yml @@ -4,39 +4,53 @@ on: outputs: matrix: value: ${{ jobs.detect-changes.outputs.matrix }} + description: "Matrix of changed integrations / Ocean Core per git commit changes" + integrations: description: "Matrix of changed integrations per git commit changes" + value: ${{ jobs.detect-changes.outputs.integrations }} + core: + value: ${{ jobs.detect-changes.outputs.core }} + description: "Determine if any core changes per git commit changes" jobs: detect-changes: name: Detect changes runs-on: ubuntu-latest outputs: - matrix: ${{ steps.set-matrix.outputs.matrix }} + matrix: ${{ steps.set-all-matrix.outputs.matrix }} + integrations: ${{ steps.set-all-matrix.outputs.integrations }} + core: ${{ steps.set-all-matrix.outputs.core }} steps: - name: Checkout Repo uses: actions/checkout@v4 - name: Get list of changed files id: changed-files - uses: tj-actions/changed-files@v44.5.7 - - name: Set matrix - id: set-matrix - run: | - folders_to_ignore="integrations/*/LICENSE.md|integrations/*/README.md|integrations/*/CONTRIBUTING.md|integrations/*/CHANGELOG.md" - changed_folders=$(echo "${{ steps.changed-files.outputs.all_changed_files }}" \ - | tr ' ' '\n' | grep '^integrations/' | grep -v '^($folders_to_ignore)' \ - | egrep -v '_infra' \ - | cut -d'/' -f2 | sort | uniq) - if [ -z "$changed_folders" ]; then - changed_folders="" - fi - - folders_to_ignore="integrations/|scripts/|assets/|docs/|LICENSE.md|README.md|CONTRIBUTING.md|CHANGELOG.md" - other_changes=$(echo "${{ steps.changed-files.outputs.all_changed_files }}" | tr ' ' '\n' | grep -v '^($folders_to_ignore)' | wc -l) - if [ "$other_changes" -ne 0 ]; then - # Add the root directory to the matrix if there are changes outside the integrations folder - changed_folders=$(echo -e "$changed_folders\n.") - fi + uses: tj-actions/changed-files@v45.0.2 + with: + dir_names: true + json: true + dir_names_max_depth: 2 + escape_json: false + files_yaml: | + core: + - '!integrations/**' + - '!scripts/*' + - '!scripts/*' + - '!./*.md' + integrations: + - 'integrations/**' + - '!integrations/**/*.md' - matrix=$(echo "$changed_folders" | jq -R -s -c 'split("\n") | map(select(length > 0))') - echo "matrix=$matrix" >> $GITHUB_OUTPUT + - name: Set integrations and all matrix + id: set-all-matrix + run: | + INTEGRATIONS=$(node -e 'integrations=${{ steps.changed-files.outputs.integrations_all_changed_files }};console.log(JSON.stringify(integrations.map(integration => integration.split("/")[1])))') + HAS_CORE=${{ steps.changed-files.outputs.core_all_changed_files != '[]' }} + echo "Core changes : ${HAS_CORE}" + MATRIX=$(node -e "integrations=${INTEGRATIONS}; hasCore=${HAS_CORE}; console.log(JSON.stringify(hasCore ? integrations.concat(['.']) : integrations))") + echo "Integration changes : ${INTEGRATIONS}" + echo "All changes : ${MATRIX}" + echo "core=${HAS_CORE}" >> $GITHUB_OUTPUT + echo "integrations=${INTEGRATIONS}" >> $GITHUB_OUTPUT + echo "matrix=${MATRIX}" >> $GITHUB_OUTPUT diff --git a/.github/workflows/integrations-test.yml b/.github/workflows/integrations-test.yml new file mode 100644 index 0000000000..f212a41fc9 --- /dev/null +++ b/.github/workflows/integrations-test.yml @@ -0,0 +1,50 @@ +name: Integrations Test + +on: + pull_request: + workflow_dispatch: + +jobs: + detect-changes: + uses: ./.github/workflows/detect-changes-matrix.yml + test: + name: ${{ format('🚢 {0}', matrix.folder) }} + needs: detect-changes + runs-on: ubuntu-latest + if: ${{ needs.detect-changes.outputs.integrations != '[]' }} + strategy: + matrix: + folder: ${{ fromJson(needs.detect-changes.outputs.integrations) }} + steps: + - name: Checkout Repo + uses: actions/checkout@v4 + + - name: Install poetry + run: pipx install poetry + + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: '3.11' + cache: 'poetry' + + - name: Install dependencies + working-directory: ${{ format('integrations/{0}', matrix.folder) }} + run: | + make install + + - name: Test + working-directory: ${{ format('integrations/{0}', matrix.folder) }} + env: + PYTEST_ADDOPTS: --junitxml=junit/test-results-${{ format('integrations/{0}', matrix.folder) }}.xml + run: | + make test + + - name: Publish Test Report + uses: mikepenz/action-junit-report@v4 + if: ${{ always() }} + with: + report_paths: '**/junit/test-results-**/*.xml' + include_passed: true + require_tests: true + fail_on_failure: true diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 5de7346245..a837ce05e4 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -17,17 +17,21 @@ jobs: steps: - name: Checkout Repo uses: actions/checkout@v4 + - name: Install poetry run: pipx install poetry + - name: Set up Python 3.11 uses: actions/setup-python@v5 with: python-version: '3.11' cache: 'poetry' + - name: Install dependencies working-directory: ${{ matrix.folder != '.' && format('integrations/{0}', matrix.folder) || '.' }} run: | make install + - name: Lint working-directory: ${{ matrix.folder != '.' && format('integrations/{0}', matrix.folder) || '.' }} run: | diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml deleted file mode 100644 index 9d8fd11724..0000000000 --- a/.github/workflows/test.yml +++ /dev/null @@ -1,92 +0,0 @@ -name: Test - -on: - pull_request: - workflow_dispatch: - -jobs: - detect-changes: - uses: ./.github/workflows/detect-changes-matrix.yml - test: - name: ${{ matrix.folder == '.' && '🌊 Ocean Core' || format('🚢 {0}', matrix.folder) }} - needs: detect-changes - runs-on: ubuntu-latest - strategy: - matrix: - folder: ${{ fromJson(needs.detect-changes.outputs.matrix) }} - steps: - - name: Checkout Repo - uses: actions/checkout@v4 - - name: Install poetry - run: pipx install poetry - - name: Set up Python 3.11 - uses: actions/setup-python@v5 - with: - python-version: '3.11' - cache: 'poetry' - - name: Install dependencies - working-directory: ${{ matrix.folder != '.' && format('integrations/{0}', matrix.folder) || '.' }} - run: | - make install - - # # Core only actions - # - name: Build core for smoke test - # if: ${{ matrix.folder == '.' }} - # working-directory: ${{ matrix.folder != '.' && format('integrations/{0}', matrix.folder) || '.' }} - # run: | - # make build - - # - name: Run fake integration for core test - # working-directory: ${{ matrix.folder != '.' && format('integrations/{0}', matrix.folder) || '.' }} - # if: ${{ matrix.folder == '.' }} - # env: - # PORT_CLIENT_ID: ${{ secrets.PORT_CLIENT_ID }} - # PORT_CLIENT_SECRET: ${{ secrets.PORT_CLIENT_SECRET }} - # PORT_BASE_URL: ${{ secrets.PORT_BASE_URL }} - # SMOKE_TEST_SUFFIX: ${{ github.run_id }} - # run: | - # ./scripts/run-smoke-test.sh - - # - name: Test Core - # if: ${{ matrix.folder == '.' }} - # working-directory: ${{ matrix.folder != '.' && format('integrations/{0}', matrix.folder) || '.' }} - # env: - # PYTEST_ADDOPTS: --junitxml=junit/test-results-${{ matrix.folder != '.' && format('integrations/{0}', matrix.folder) || 'ocean/core' }}.xml - # PORT_CLIENT_ID: ${{ secrets.PORT_CLIENT_ID }} - # PORT_CLIENT_SECRET: ${{ secrets.PORT_CLIENT_SECRET }} - # PORT_BASE_URL: ${{ secrets.PORT_BASE_URL }} - # SMOKE_TEST_SUFFIX: ${{ github.run_id }} - # run: | - # make test - - - name: Install current core for all integrations - working-directory: ${{ matrix.folder != '.' && format('integrations/{0}', matrix.folder) || '.' }} - if: ${{ matrix.folder == '.' }} - run: | - echo "Installing local core for all integrations" - SCRIPT_TO_RUN='make install/local-core' make execute/all - - name: Test all integrations with current core - working-directory: ${{ matrix.folder != '.' && format('integrations/{0}', matrix.folder) || '.' }} - if: ${{ matrix.folder == '.' }} - run: | - echo "Testing all integrations with local core" - SCRIPT_TO_RUN="PYTEST_ADDOPTS=--junitxml=${PWD}/junit/test-results-core-change/\`pwd | xargs basename\`.xml make test" make execute/all - - # Integration step - - name: Test - # if: ${{ matrix.folder != '.' }} - working-directory: ${{ matrix.folder != '.' && format('integrations/{0}', matrix.folder) || '.' }} - env: - PYTEST_ADDOPTS: --junitxml=junit/test-results-${{ matrix.folder != '.' && format('integrations/{0}', matrix.folder) || 'ocean/core' }}.xml - run: | - make test - - # Generic - - name: Publish Test Report - uses: mikepenz/action-junit-report@v4 - if: ${{ always() }} - with: - report_paths: '**/junit/test-results-**/*.xml' - include_passed: true - require_tests: true - fail_on_failure: true From 50514c748bfc022f9a3006aaef69009c1ab87f97 Mon Sep 17 00:00:00 2001 From: erikzaadi Date: Sun, 22 Sep 2024 18:57:16 +0300 Subject: [PATCH 08/11] Reduce max parallel matrixes --- .github/workflows/ci.yml | 1 + .github/workflows/create-new-sonarcloud-project.yml | 1 + .github/workflows/integrations-test.yml | 1 + .github/workflows/lint.yml | 1 + .github/workflows/release-integrations.yml | 2 +- .github/workflows/sonarcloud-integrations.yml | 1 + 6 files changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c57d221781..067c6bba2b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -47,6 +47,7 @@ jobs: contents: read needs: [prepare-matrix] strategy: + max-parallel: 5 matrix: integration: ${{fromJson(needs.prepare-matrix.outputs.matrix)}} steps: diff --git a/.github/workflows/create-new-sonarcloud-project.yml b/.github/workflows/create-new-sonarcloud-project.yml index 8518a61b38..fbf9fda0b6 100644 --- a/.github/workflows/create-new-sonarcloud-project.yml +++ b/.github/workflows/create-new-sonarcloud-project.yml @@ -42,6 +42,7 @@ jobs: runs-on: ubuntu-latest name: Create new project for integrations strategy: + max-parallel: 5 matrix: ${{ fromJson(needs.pre-run.outputs.changed_integrations) }} steps: - name: Create integration variable diff --git a/.github/workflows/integrations-test.yml b/.github/workflows/integrations-test.yml index f212a41fc9..a645eb6828 100644 --- a/.github/workflows/integrations-test.yml +++ b/.github/workflows/integrations-test.yml @@ -13,6 +13,7 @@ jobs: runs-on: ubuntu-latest if: ${{ needs.detect-changes.outputs.integrations != '[]' }} strategy: + max-parallel: 5 matrix: folder: ${{ fromJson(needs.detect-changes.outputs.integrations) }} steps: diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index a837ce05e4..f73f50c903 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -12,6 +12,7 @@ jobs: needs: detect-changes runs-on: ubuntu-latest strategy: + max-parallel: 5 matrix: folder: ${{ fromJson(needs.detect-changes.outputs.matrix) }} steps: diff --git a/.github/workflows/release-integrations.yml b/.github/workflows/release-integrations.yml index 87a478a527..f79a312754 100644 --- a/.github/workflows/release-integrations.yml +++ b/.github/workflows/release-integrations.yml @@ -57,7 +57,7 @@ jobs: needs: [prepare-matrix] strategy: # limit the number of parallel jobs to avoid hitting the ghcr.io rate limit - max-parallel: 15 + max-parallel: 5 matrix: integration: ${{fromJson(needs.prepare-matrix.outputs.matrix)}} steps: diff --git a/.github/workflows/sonarcloud-integrations.yml b/.github/workflows/sonarcloud-integrations.yml index 63fe26bf95..62d4224fae 100644 --- a/.github/workflows/sonarcloud-integrations.yml +++ b/.github/workflows/sonarcloud-integrations.yml @@ -39,6 +39,7 @@ jobs: needs: pre-run if: ${{ needs.pre-run.outputs.count_changed_integrations > 0 }} strategy: + max-parallel: 5 matrix: ${{ fromJson(needs.pre-run.outputs.changed_integrations) }} runs-on: ubuntu-latest steps: From a49d0b22162544013e986484e63d08bd2ccfdaf6 Mon Sep 17 00:00:00 2001 From: hpal Date: Mon, 23 Sep 2024 21:50:26 +0100 Subject: [PATCH 09/11] [AWS] Fix: Handle NoneType response in is_access_denied_exception (#1046) --- integrations/aws/CHANGELOG.md | 5 +++++ integrations/aws/pyproject.toml | 2 +- integrations/aws/tests/test_sample.py | 2 -- integrations/aws/tests/test_utils.py | 27 +++++++++++++++++++++++++++ integrations/aws/utils/misc.py | 2 +- 5 files changed, 34 insertions(+), 4 deletions(-) delete mode 100644 integrations/aws/tests/test_sample.py create mode 100644 integrations/aws/tests/test_utils.py diff --git a/integrations/aws/CHANGELOG.md b/integrations/aws/CHANGELOG.md index 0a6acdce3d..d3d1c5ca9f 100644 --- a/integrations/aws/CHANGELOG.md +++ b/integrations/aws/CHANGELOG.md @@ -6,6 +6,11 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## 0.2.42 (2024-09-24) + +### Bug Fixes + +- Fixes an issue where `is_access_denied_exception` could raise an `AttributeError` if `e.response` is `None`. ## 0.2.41 (2024-09-22) diff --git a/integrations/aws/pyproject.toml b/integrations/aws/pyproject.toml index 92c43837a3..ded2a4facf 100644 --- a/integrations/aws/pyproject.toml +++ b/integrations/aws/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws" -version = "0.2.41" +version = "0.2.42" description = "This integration will map all your resources in all the available accounts to your Port entities" authors = ["Shalev Avhar ", "Erik Zaadi "] diff --git a/integrations/aws/tests/test_sample.py b/integrations/aws/tests/test_sample.py deleted file mode 100644 index dc80e299c8..0000000000 --- a/integrations/aws/tests/test_sample.py +++ /dev/null @@ -1,2 +0,0 @@ -def test_example() -> None: - assert 1 == 1 diff --git a/integrations/aws/tests/test_utils.py b/integrations/aws/tests/test_utils.py new file mode 100644 index 0000000000..45104a90ad --- /dev/null +++ b/integrations/aws/tests/test_utils.py @@ -0,0 +1,27 @@ +from utils.misc import is_access_denied_exception +from typing import Optional, Dict, Any + + +class MockException(Exception): + def __init__(self, response: Optional[Dict[str, Any]]) -> None: + self.response = response + + +def test_access_denied_exception_with_response() -> None: + e = MockException(response={"Error": {"Code": "AccessDenied"}}) + assert is_access_denied_exception(e) + + +def test_access_denied_exception_without_response() -> None: + e = MockException(response=None) + assert not is_access_denied_exception(e) + + +def test_access_denied_exception_with_other_error() -> None: + e = MockException(response={"Error": {"Code": "SomeOtherError"}}) + assert not is_access_denied_exception(e) + + +def test_access_denied_exception_no_response_attribute() -> None: + e = Exception("Test exception") + assert not is_access_denied_exception(e) diff --git a/integrations/aws/utils/misc.py b/integrations/aws/utils/misc.py index fe7dd9541a..bcc3285456 100644 --- a/integrations/aws/utils/misc.py +++ b/integrations/aws/utils/misc.py @@ -25,7 +25,7 @@ def is_access_denied_exception(e: Exception) -> bool: "UnauthorizedOperation", ] - if hasattr(e, "response"): + if hasattr(e, "response") and e.response is not None: error_code = e.response.get("Error", {}).get("Code") return error_code in access_denied_error_codes From c3e2f026e6cd9218104ef90b25e2fd5075d746ae Mon Sep 17 00:00:00 2001 From: PagesCoffy Date: Wed, 25 Sep 2024 13:14:35 +0000 Subject: [PATCH 10/11] [Integration][Gitlab] - Introduce Pagination and Run Code in Async (#1047) --- integrations/gitlab/CHANGELOG.md | 8 + .../events/event_handler.py | 13 +- .../gitlab/gitlab_integration/events/setup.py | 74 ++++---- .../gitlab_integration/gitlab_service.py | 86 ++++++--- .../gitlab/gitlab_integration/ocean.py | 32 +++- integrations/gitlab/pyproject.toml | 2 +- .../test_gitlab_service_webhook.py | 179 +++++++----------- 7 files changed, 204 insertions(+), 190 deletions(-) diff --git a/integrations/gitlab/CHANGELOG.md b/integrations/gitlab/CHANGELOG.md index 5222f3fd8b..2ea4f97483 100644 --- a/integrations/gitlab/CHANGELOG.md +++ b/integrations/gitlab/CHANGELOG.md @@ -7,6 +7,14 @@ this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm +0.1.124 (2024-09-24) +==================== + +### Improvements + +- Added more logs and implemented the webhook creation in async (0.1.124) + + 0.1.123 (2024-09-22) ==================== diff --git a/integrations/gitlab/gitlab_integration/events/event_handler.py b/integrations/gitlab/gitlab_integration/events/event_handler.py index ce2664f3a4..81689dacd8 100644 --- a/integrations/gitlab/gitlab_integration/events/event_handler.py +++ b/integrations/gitlab/gitlab_integration/events/event_handler.py @@ -69,17 +69,14 @@ def on(self, events: list[str], observer: Observer) -> None: self._observers[event].append(observer) async def _notify(self, event_id: str, body: dict[str, Any]) -> None: - observers = asyncio.gather( - *( - observer(event_id, body) - for observer in self._observers.get(event_id, []) - ) - ) - - if not observers: + observers_list = self._observers.get(event_id, []) + if not observers_list: logger.info( f"event: {event_id} has no matching handler. the handlers available are for events: {self._observers.keys()}" ) + return + + await asyncio.gather(*(observer(event_id, body) for observer in observers_list)) class SystemEventHandler(BaseEventHandler): diff --git a/integrations/gitlab/gitlab_integration/events/setup.py b/integrations/gitlab/gitlab_integration/events/setup.py index 069d36647d..53cae4e792 100644 --- a/integrations/gitlab/gitlab_integration/events/setup.py +++ b/integrations/gitlab/gitlab_integration/events/setup.py @@ -115,7 +115,7 @@ def validate_hooks_override_config( validate_groups_hooks_events(groups_paths) -def setup_listeners(gitlab_service: GitlabService, webhook_id: str) -> None: +def setup_listeners(gitlab_service: GitlabService, group_id: str) -> None: handlers = [ PushHook(gitlab_service), MergeRequest(gitlab_service), @@ -127,9 +127,9 @@ def setup_listeners(gitlab_service: GitlabService, webhook_id: str) -> None: ] for handler in handlers: logger.info( - f"Setting up listeners for webhook {webhook_id} for group mapping {gitlab_service.group_mapping}" + f"Setting up listeners {handler.events} for group {group_id} for group mapping {gitlab_service.group_mapping}" ) - event_ids = [f"{event_name}:{webhook_id}" for event_name in handler.events] + event_ids = [f"{event_name}:{group_id}" for event_name in handler.events] event_handler.on(event_ids, handler.on_hook) @@ -144,13 +144,14 @@ def setup_system_listeners(gitlab_clients: list[GitlabService]) -> None: ProjectFiles, ] for handler in handlers: + logger.info(f"Setting up system listeners {handler.system_events}") system_event_handler.on(handler) for gitlab_service in gitlab_clients: system_event_handler.add_client(gitlab_service) -def create_webhooks_by_client( +async def create_webhooks_by_client( gitlab_host: str, app_host: str, token: str, @@ -160,16 +161,16 @@ def create_webhooks_by_client( gitlab_client = Gitlab(gitlab_host, token) gitlab_service = GitlabService(gitlab_client, app_host, group_mapping) - groups_for_webhooks = gitlab_service.get_filtered_groups_for_webhooks( + groups_for_webhooks = await gitlab_service.get_filtered_groups_for_webhooks( list(groups_hooks_events_override.keys()) if groups_hooks_events_override else None ) - webhooks_ids: list[str] = [] + groups_ids_with_webhooks: list[str] = [] for group in groups_for_webhooks: - webhook_id = gitlab_service.create_webhook( + group_id = await gitlab_service.create_webhook( group, ( groups_hooks_events_override.get( @@ -180,13 +181,13 @@ def create_webhooks_by_client( ), ) - if webhook_id: - webhooks_ids.append(webhook_id) + if group_id: + groups_ids_with_webhooks.append(group_id) - return gitlab_service, webhooks_ids + return gitlab_service, groups_ids_with_webhooks -def setup_application( +async def setup_application( token_mapping: dict[str, list[str]], gitlab_host: str, app_host: str, @@ -196,6 +197,7 @@ def setup_application( validate_token_mapping(token_mapping) if use_system_hook: + logger.info("Using system hook") validate_use_system_hook(token_mapping) token, group_mapping = list(token_mapping.items())[0] gitlab_client = Gitlab(gitlab_host, token) @@ -203,38 +205,44 @@ def setup_application( setup_system_listeners([gitlab_service]) else: + logger.info("Using group hooks") validate_hooks_override_config( token_mapping, token_group_override_hooks_mapping ) - client_to_webhooks: list[tuple[GitlabService, list[str]]] = [] + client_to_group_ids_with_webhooks: list[tuple[GitlabService, list[str]]] = [] for token, group_mapping in token_mapping.items(): - if not token_group_override_hooks_mapping: - client_to_webhooks.append( - create_webhooks_by_client( - gitlab_host, - app_host, - token, - None, - group_mapping, - ) - ) - else: - groups = token_group_override_hooks_mapping.tokens.get( - token, WebhookTokenConfig(groups=[]) - ).groups - if groups: - client_to_webhooks.append( - create_webhooks_by_client( + try: + if not token_group_override_hooks_mapping: + client_to_group_ids_with_webhooks.append( + await create_webhooks_by_client( gitlab_host, app_host, token, - groups, + None, group_mapping, ) ) + else: + groups = token_group_override_hooks_mapping.tokens.get( + token, WebhookTokenConfig(groups=[]) + ).groups + if groups: + client_to_group_ids_with_webhooks.append( + await create_webhooks_by_client( + gitlab_host, + app_host, + token, + groups, + group_mapping, + ) + ) + except Exception as e: + logger.exception( + f"Failed to create webhooks for group mapping {group_mapping}, error: {e}" + ) - for client, webhook_ids in client_to_webhooks: - for webhook_id in webhook_ids: - setup_listeners(client, webhook_id) + for client, group_ids in client_to_group_ids_with_webhooks: + for group_id in group_ids: + setup_listeners(client, group_id) diff --git a/integrations/gitlab/gitlab_integration/gitlab_service.py b/integrations/gitlab/gitlab_integration/gitlab_service.py index 0da6542ba4..05e0657db1 100644 --- a/integrations/gitlab/gitlab_integration/gitlab_service.py +++ b/integrations/gitlab/gitlab_integration/gitlab_service.py @@ -19,6 +19,7 @@ ProjectFile, ProjectPipeline, ProjectPipelineJob, + Hook, ) from gitlab_integration.core.async_fetcher import AsyncFetcher from gitlab_integration.core.entities import generate_entity_from_port_yaml @@ -63,22 +64,34 @@ def __init__( GITLAB_SEARCH_RATE_LIMIT * 0.95, 60 ) - def _get_webhook_for_group(self, group: RESTObject) -> RESTObject | None: + async def get_group_hooks(self, group: RESTObject) -> AsyncIterator[List[Hook]]: + async for hooks_batch in AsyncFetcher.fetch_batch(group.hooks.list): + hooks = typing.cast(List[Hook], hooks_batch) + yield hooks + + async def _get_webhook_for_group(self, group: RESTObject) -> RESTObject | None: webhook_url = f"{self.app_host}/integration/hook/{group.get_id()}" - for hook in group.hooks.list(iterator=True): - if hook.url == webhook_url: - return hook + logger.info( + f"Getting webhook for group {group.get_id()} with url {webhook_url}" + ) + async for hook_batch in self.get_group_hooks(group): + for hook in hook_batch: + if hook.url == webhook_url: + logger.info( + f"Found webhook for group {group.get_id()} with id {hook.id} and url {hook.url}" + ) + return hook return None - def _delete_group_webhook(self, group: RESTObject, hook_id: int) -> None: + async def _delete_group_webhook(self, group: RESTObject, hook_id: int) -> None: logger.info(f"Deleting webhook with id {hook_id} in group {group.get_id()}") try: - group.hooks.delete(hook_id) + await AsyncFetcher.fetch_single(group.hooks.delete, hook_id) logger.info(f"Deleted webhook for {group.get_id()}") except Exception as e: logger.error(f"Failed to delete webhook for {group.get_id()} error={e}") - def _create_group_webhook( + async def _create_group_webhook( self, group: RESTObject, events: list[str] | None ) -> None: webhook_events = { @@ -87,20 +100,23 @@ def _create_group_webhook( } logger.info( - f"Creating webhook for {group.get_id()} with events: {[event for event in webhook_events if webhook_events[event]]}" + f"Creating webhook for group {group.get_id()} with events: {[event for event in webhook_events if webhook_events[event]]}" ) try: - resp = group.hooks.create( + resp = await AsyncFetcher.fetch_single( + group.hooks.create, { "url": f"{self.app_host}/integration/hook/{group.get_id()}", **webhook_events, - } + }, ) logger.info( - f"Created webhook for {group.get_id()}, id={resp.id}, url={resp.url}" + f"Created webhook for group {group.get_id()}, webhook id={resp.id}, url={resp.url}" ) except Exception as e: - logger.error(f"Failed to create webhook for {group.get_id()} error={e}") + logger.exception( + f"Failed to create webhook for group {group.get_id()} error={e}" + ) def _get_changed_files_between_commits( self, project_id: int, head: str @@ -253,14 +269,27 @@ def should_process_project( return True return project.name in repos - def get_root_groups(self) -> List[Group]: - groups = self.gitlab_client.groups.list(iterator=True) + async def get_root_groups(self) -> List[Group]: + groups: list[RESTObject] = [] + async for groups_batch in AsyncFetcher.fetch_batch( + self.gitlab_client.groups.list, retry_transient_errors=True + ): + groups_batch = typing.cast(List[RESTObject], groups_batch) + groups.extend(groups_batch) + return typing.cast( List[Group], [group for group in groups if group.parent_id is None] ) - def filter_groups_by_paths(self, groups_full_paths: list[str]) -> List[Group]: - groups = self.gitlab_client.groups.list(get_all=True) + async def filter_groups_by_paths(self, groups_full_paths: list[str]) -> List[Group]: + groups: list[RESTObject] = [] + + async for groups_batch in AsyncFetcher.fetch_batch( + self.gitlab_client.groups.list, retry_transient_errors=True + ): + groups_batch = typing.cast(List[RESTObject], groups_batch) + groups.extend(groups_batch) + return typing.cast( List[Group], [ @@ -270,7 +299,7 @@ def filter_groups_by_paths(self, groups_full_paths: list[str]) -> List[Group]: ], ) - def get_filtered_groups_for_webhooks( + async def get_filtered_groups_for_webhooks( self, groups_hooks_override_list: list[str] | None, ) -> List[Group]: @@ -278,9 +307,9 @@ def get_filtered_groups_for_webhooks( if groups_hooks_override_list is not None: if groups_hooks_override_list: logger.info( - "Getting all the specified groups in the mapping for a token to create their webhooks" + f"Getting all the specified groups in the mapping for a token to create their webhooks for: {groups_hooks_override_list}" ) - groups_for_webhooks = self.filter_groups_by_paths( + groups_for_webhooks = await self.filter_groups_by_paths( groups_hooks_override_list ) @@ -302,7 +331,7 @@ def get_filtered_groups_for_webhooks( ) else: logger.info("Getting all the root groups to create their webhooks") - root_groups = self.get_root_groups() + root_groups = await self.get_root_groups() groups_for_webhooks = [ group for group in root_groups @@ -316,16 +345,18 @@ def get_filtered_groups_for_webhooks( return groups_for_webhooks - def create_webhook(self, group: Group, events: list[str] | None) -> str | None: + async def create_webhook( + self, group: Group, events: list[str] | None + ) -> str | None: logger.info(f"Creating webhook for the group: {group.attributes['full_path']}") - webhook_id = None group_id = group.get_id() if group_id is None: logger.info(f"Group {group.attributes['full_path']} has no id. skipping...") + return None else: - hook = self._get_webhook_for_group(group) + hook = await self._get_webhook_for_group(group) if hook: logger.info(f"Webhook already exists for group {group.get_id()}") @@ -333,14 +364,13 @@ def create_webhook(self, group: Group, events: list[str] | None) -> str | None: logger.info( f"Webhook exists for group {group.get_id()} but is disabled, deleting and re-creating..." ) - self._delete_group_webhook(group, hook.id) - self._create_group_webhook(group, events) + await self._delete_group_webhook(group, hook.id) + await self._create_group_webhook(group, events) logger.info(f"Webhook re-created for group {group.get_id()}") else: - self._create_group_webhook(group, events) - webhook_id = str(group_id) + await self._create_group_webhook(group, events) - return webhook_id + return str(group_id) def create_system_hook(self) -> None: logger.info("Checking if system hook already exists") diff --git a/integrations/gitlab/gitlab_integration/ocean.py b/integrations/gitlab/gitlab_integration/ocean.py index 12e5e9949f..8ed72c113b 100644 --- a/integrations/gitlab/gitlab_integration/ocean.py +++ b/integrations/gitlab/gitlab_integration/ocean.py @@ -27,6 +27,16 @@ PROJECT_RESYNC_BATCH_SIZE = 10 +async def start_processors() -> None: + """Helper function to start the event processors.""" + try: + logger.info("Starting event processors") + await event_handler.start_event_processor() + await system_event_handler.start_event_processor() + except Exception as e: + logger.exception(f"Failed to start event processors: {e}") + + @ocean.router.post("/hook/{group_id}") async def handle_webhook_request(group_id: str, request: Request) -> dict[str, Any]: event_id = f"{request.headers.get('X-Gitlab-Event')}:{group_id}" @@ -79,8 +89,14 @@ async def on_start() -> None: if not integration_config.get("app_host"): logger.warning( - f"No app host provided, skipping webhook creation. {NO_WEBHOOK_WARNING}" + f"No app host provided, skipping webhook creation. {NO_WEBHOOK_WARNING}. Starting the event processors" ) + try: + await start_processors() + except Exception as e: + logger.exception( + f"Failed to start event processors: {e}. {NO_WEBHOOK_WARNING}" + ) return token_webhook_mapping: WebhookMappingConfig | None = None @@ -91,21 +107,19 @@ async def on_start() -> None: ) try: - setup_application( + await setup_application( integration_config["token_mapping"], integration_config["gitlab_host"], integration_config["app_host"], integration_config["use_system_hook"], token_webhook_mapping, ) - - await event_handler.start_event_processor() - await system_event_handler.start_event_processor() except Exception as e: - logger.warning( - f"Failed to setup webhook: {e}. {NO_WEBHOOK_WARNING}", - stack_info=True, - ) + logger.exception(f"Failed to setup webhook: {e}. {NO_WEBHOOK_WARNING}") + try: + await start_processors() # Ensure event processors are started regardless of webhook setup + except Exception as e: + logger.exception(f"Failed to start event processors: {e}. {NO_WEBHOOK_WARNING}") @ocean.on_resync(ObjectKind.GROUP) diff --git a/integrations/gitlab/pyproject.toml b/integrations/gitlab/pyproject.toml index d919a377dc..7d0ca910f0 100644 --- a/integrations/gitlab/pyproject.toml +++ b/integrations/gitlab/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "gitlab" -version = "0.1.123" +version = "0.1.124" description = "Gitlab integration for Port using Port-Ocean Framework" authors = ["Yair Siman-Tov "] diff --git a/integrations/gitlab/tests/gitlab_integration/test_gitlab_service_webhook.py b/integrations/gitlab/tests/gitlab_integration/test_gitlab_service_webhook.py index 354caadfbb..f3656de006 100644 --- a/integrations/gitlab/tests/gitlab_integration/test_gitlab_service_webhook.py +++ b/integrations/gitlab/tests/gitlab_integration/test_gitlab_service_webhook.py @@ -1,150 +1,107 @@ -from unittest.mock import MagicMock -from typing import Any +from unittest.mock import MagicMock, AsyncMock from gitlab_integration.gitlab_service import GitlabService +import pytest -def test_get_webhook_for_group_found(mocked_gitlab_service: GitlabService) -> None: +@pytest.mark.asyncio +async def test_delete_group_webhook_success( + mocked_gitlab_service: GitlabService, +) -> None: # Arrange mock_group = MagicMock() - mock_group.get_id.return_value = 456 - mock_webhook_url = "http://example.com/integration/hook/456" + mock_group.get_id.return_value = 123 + mock_group.hooks.delete = AsyncMock() # Mock successful deletion + + # Mock the group hooks.list method to return a webhook mock_hook = MagicMock() - mock_hook.url = mock_webhook_url - mock_hook.id = 984 - mock_group.hooks.list.return_value = [mock_hook] + mock_hook.url = "http://example.com/integration/hook/456" + mock_hook.id = 17 # Act - result = mocked_gitlab_service._get_webhook_for_group(mock_group) + await mocked_gitlab_service._delete_group_webhook(mock_group, mock_hook.id) # Assert - assert result == mock_hook - mock_group.hooks.list.assert_called_once_with(iterator=True) + mock_group.hooks.delete.assert_called_once_with(mock_hook.id) -def test_get_webhook_for_group_not_found(mocked_gitlab_service: GitlabService) -> None: +@pytest.mark.asyncio +async def test_delete_group_webhook_failure( + mocked_gitlab_service: GitlabService, +) -> None: # Arrange mock_group = MagicMock() - mock_group.get_id.return_value = 789 - mock_hook = MagicMock() - mock_hook.url = "http://example.com/other/hook" - mock_group.hooks.list.return_value = [mock_hook] + mock_group.get_id.return_value = 123 + mock_group.hooks.delete = AsyncMock(side_effect=Exception("Delete failed")) + mock_hook = MagicMock() + mock_hook.url = "http://example.com/integration/hook/456" + mock_hook.id = 17 # Act - result = mocked_gitlab_service._get_webhook_for_group(mock_group) + await mocked_gitlab_service._delete_group_webhook(mock_group, mock_hook.id) # Assert - assert result is None - mock_group.hooks.list.assert_called_once_with(iterator=True) + mock_group.hooks.delete.assert_called_once_with(mock_hook.id) -def test_create_webhook_when_webhook_exists_but_disabled( - mocked_gitlab_service: GitlabService, monkeypatch: Any -): +@pytest.mark.asyncio +async def test_create_group_webhook_success( + mocked_gitlab_service: GitlabService, +) -> None: # Arrange mock_group = MagicMock() - mock_group.get_id.return_value = 456 - mock_group.attributes = {"full_path": "group2"} - - # Mock the group hooks.list method to return an existing disabled webhook - mock_hook = MagicMock() - mock_hook.url = "http://example.com/integration/hook/456" # Updated URL for clarity - mock_hook.alert_status = "disabled" - mock_hook.id = 456 - mock_group.hooks.list.return_value = [mock_hook] - - # Mock the methods for deleting and creating webhooks - mock_delete_webhook = MagicMock() - monkeypatch.setattr( - mocked_gitlab_service, "_delete_group_webhook", mock_delete_webhook - ) - mock_create_webhook = MagicMock() - monkeypatch.setattr( - mocked_gitlab_service, "_create_group_webhook", mock_create_webhook + mock_group.get_id.return_value = 123 + mock_group.hooks.create = AsyncMock( + return_value=MagicMock(id=789, url="http://example.com/hook/123") ) # Act - webhook_id = mocked_gitlab_service.create_webhook( - mock_group, events=["push", "merge_request"] + await mocked_gitlab_service._create_group_webhook( + mock_group, ["push_events", "merge_requests_events"] ) # Assert - assert webhook_id == "456" - mock_delete_webhook.assert_called_once_with( - mock_group, mock_hook.id - ) # Ensure delete method is called - mock_create_webhook.assert_called_once_with( - mock_group, ["push", "merge_request"] - ) # Ensure create method is called with correct arguments - - -def test_create_webhook_when_webhook_exists_and_enabled( - mocked_gitlab_service: GitlabService, monkeypatch: Any -): - # Arrange - mock_group = MagicMock() - mock_group.get_id.return_value = 789 - mock_group.attributes = {"full_path": "group3"} - - # Mock the group hooks.list method to return an existing enabled webhook - mock_hook = MagicMock() - mock_hook.url = "http://example.com/integration/hook/789" - mock_hook.alert_status = "executable" - mock_hook.id = 789 - mock_group.hooks.list.return_value = [mock_hook] - - # Mock the method for creating webhooks - mock_create_webhook = MagicMock() - monkeypatch.setattr( - mocked_gitlab_service, "_create_group_webhook", mock_create_webhook - ) - - # Act - webhook_id = mocked_gitlab_service.create_webhook( - mock_group, events=["push", "merge_request"] + mock_group.hooks.create.assert_called_once_with( + { + "url": "http://example.com/integration/hook/123", + "push_events": True, + "merge_requests_events": True, + "issues_events": False, + "job_events": False, + "pipeline_events": False, + "releases_events": False, + "tag_push_events": False, + "subgroup_events": False, + "confidential_issues_events": False, + } ) - # Assert - assert webhook_id == "789" - mock_create_webhook.assert_not_called() # Ensure no new webhook is created - -def test_create_webhook_when_no_webhook_exists( - mocked_gitlab_service: GitlabService, monkeypatch: Any -): +@pytest.mark.asyncio +async def test_create_group_webhook_failure( + mocked_gitlab_service: GitlabService, +) -> None: # Arrange mock_group = MagicMock() mock_group.get_id.return_value = 123 - mock_group.attributes = {"full_path": "group1"} - - # Mock the group hooks.list method to return no webhook - mock_group.hooks.list.return_value = [] + mock_group.hooks.create = AsyncMock(side_effect=Exception("Create failed")) # Act - webhook_id = mocked_gitlab_service.create_webhook( - mock_group, events=["push", "merge_request"] + await mocked_gitlab_service._create_group_webhook( + mock_group, ["push_events", "merge_requests_events"] ) # Assert - assert webhook_id == "123" - mock_group.hooks.create.assert_called_once() # A new webhook should be created - - -def test_delete_webhook(mocked_gitlab_service: GitlabService, monkeypatch: Any): - # Arrange - mock_group = MagicMock() - mock_group.get_id.return_value = 456 - mock_group.attributes = {"full_path": "group2"} - - # Mock the group hooks.list method to return a webhook - mock_hook = MagicMock() - mock_hook.url = "http://example.com/integration/hook/456" - mock_hook.id = 17 - mock_group.hooks.list.return_value = [mock_hook] - - # Act - mocked_gitlab_service._delete_group_webhook(mock_group, mock_hook.id) - - # Assert - mock_group.hooks.delete.assert_called_once_with( - mock_hook.id - ) # Ensure the webhook is deleted + mock_group.hooks.create.assert_called_once_with( + { + "url": "http://example.com/integration/hook/123", + "push_events": True, + "merge_requests_events": True, + "issues_events": False, + "job_events": False, + "pipeline_events": False, + "releases_events": False, + "tag_push_events": False, + "subgroup_events": False, + "confidential_issues_events": False, + } + ) From 174af67ecd8ad5c8c00f8e37e21887ac79513fcc Mon Sep 17 00:00:00 2001 From: Matan <51418643+matan84@users.noreply.github.com> Date: Wed, 25 Sep 2024 16:48:52 +0300 Subject: [PATCH 11/11] [Gitlab] Added verbosity to files (#1032) # Description What - added logs Why - better UX How - ## Type of change Please leave one option from the following and delete the rest: - [ ] Bug fix (non-breaking change which fixes an issue) - [ ] New feature (non-breaking change which adds functionality) - [ ] New Integration (non-breaking change which adds a new integration) - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) - [ ] Non-breaking change (fix of existing functionality that will not change current behavior) - [ ] Documentation (added/updated documentation)

All tests should be run against the port production environment(using a testing org).

### Core testing checklist - [ ] Integration able to create all default resources from scratch - [ ] Resync finishes successfully - [ ] Resync able to create entities - [ ] Resync able to update entities - [ ] Resync able to detect and delete entities - [ ] Scheduled resync able to abort existing resync and start a new one - [ ] Tested with at least 2 integrations from scratch - [ ] Tested with Kafka and Polling event listeners - [ ] Tested deletion of entities that don't pass the selector ### Integration testing checklist - [ ] Integration able to create all default resources from scratch - [ ] Resync able to create entities - [ ] Resync able to update entities - [ ] Resync able to detect and delete entities - [ ] Resync finishes successfully - [ ] If new resource kind is added or updated in the integration, add example raw data, mapping and expected result to the `examples` folder in the integration directory. - [ ] If resource kind is updated, run the integration with the example data and check if the expected result is achieved - [ ] If new resource kind is added or updated, validate that live-events for that resource are working as expected - [ ] Docs PR link [here](#) ### Preflight checklist - [ ] Handled rate limiting - [ ] Handled pagination - [ ] Implemented the code in async - [ ] Support Multi account ## Screenshots Include screenshots from your environment showing how the resources of the integration will look. ## API Documentation Provide links to the API documentation used for this integration. --- integrations/gitlab/CHANGELOG.md | 7 +++++++ integrations/gitlab/gitlab_integration/ocean.py | 4 ++++ integrations/gitlab/pyproject.toml | 2 +- 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/integrations/gitlab/CHANGELOG.md b/integrations/gitlab/CHANGELOG.md index 2ea4f97483..092d45fca6 100644 --- a/integrations/gitlab/CHANGELOG.md +++ b/integrations/gitlab/CHANGELOG.md @@ -6,6 +6,13 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +0.1.125 (2024-09-25) +==================== + +### Improvements + +- Added log for when file kind's project iteration found a relevant project, and for when the batch entirely isn't relevant + 0.1.124 (2024-09-24) ==================== diff --git a/integrations/gitlab/gitlab_integration/ocean.py b/integrations/gitlab/gitlab_integration/ocean.py index 8ed72c113b..45bbde1784 100644 --- a/integrations/gitlab/gitlab_integration/ocean.py +++ b/integrations/gitlab/gitlab_integration/ocean.py @@ -208,9 +208,13 @@ async def resync_files(kind: str) -> ASYNC_GENERATOR_RESYNC_TYPE: for project in projects_batch if service.should_process_project(project, selector.files.repos) ] + if tasks: + logger.info(f"Found {len(tasks)} relevant projects in batch") async for batch in stream_async_iterators_tasks(*tasks): yield batch + else: + logger.info("No relevant projects were found in batch, skipping it") @ocean.on_resync(ObjectKind.MERGE_REQUEST) diff --git a/integrations/gitlab/pyproject.toml b/integrations/gitlab/pyproject.toml index 7d0ca910f0..084f52ffd7 100644 --- a/integrations/gitlab/pyproject.toml +++ b/integrations/gitlab/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "gitlab" -version = "0.1.124" +version = "0.1.125" description = "Gitlab integration for Port using Port-Ocean Framework" authors = ["Yair Siman-Tov "]