From d3e12a3ea25c2d1436e74cd33ad5f7628ba35091 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard <126242332+bisgaard-itis@users.noreply.github.com> Date: Fri, 15 Nov 2024 09:57:25 +0100 Subject: [PATCH 1/7] =?UTF-8?q?=F0=9F=90=9B=20Fix=20healthcheck=20in=20api?= =?UTF-8?q?-server=20(#6662)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/simcore_service_api_server/core/health_checker.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/core/health_checker.py b/services/api-server/src/simcore_service_api_server/core/health_checker.py index 8dcba7422ec..068b2d79f37 100644 --- a/services/api-server/src/simcore_service_api_server/core/health_checker.py +++ b/services/api-server/src/simcore_service_api_server/core/health_checker.py @@ -69,7 +69,9 @@ async def teardown(self): @property def healthy(self) -> bool: - return self._health_check_failure_count <= self._allowed_health_check_failures + return self._rabbit_client.healthy and ( + self._health_check_failure_count <= self._allowed_health_check_failures + ) # https://github.com/ITISFoundation/osparc-simcore/pull/6662 @property def health_check_failure_count(self) -> NonNegativeInt: @@ -82,9 +84,6 @@ async def _background_task_method(self): while self._dummy_queue.qsize() > 0: _ = self._dummy_queue.get_nowait() try: - if not self._rabbit_client.healthy: - self._increment_health_check_failure_count() - return await asyncio.wait_for( self._rabbit_client.publish( self._dummy_message.channel_name, self._dummy_message From 7e1ccbdc910137bd048df4d69dad0a76a78af793 Mon Sep 17 00:00:00 2001 From: Matus Drobuliak <60785969+matusdrobuliak66@users.noreply.github.com> Date: Fri, 15 Nov 2024 11:32:17 +0100 Subject: [PATCH 2/7] =?UTF-8?q?=E2=9C=A8=20add=20project=20tags=20to=20RUT?= =?UTF-8?q?=20listing/export=20(#6722)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../service_runs.py | 1 + ...f83486be7_enhance_projects_tags_for_rut.py | 90 +++++++++++++++++++ .../models/projects_tags.py | 20 ++++- .../utils_tags_sql.py | 12 ++- .../tests/test_utils_tags.py | 2 + .../models/service_runs.py | 1 + .../db/repositories/resource_tracker.py | 33 ++++++- .../services/service_runs.py | 1 + .../tests/unit/with_dbs/conftest.py | 1 + .../simcore_service_webserver/projects/db.py | 5 +- .../test_usage_services__list.py | 1 + 11 files changed, 157 insertions(+), 10 deletions(-) create mode 100644 packages/postgres-database/src/simcore_postgres_database/migration/versions/8e1f83486be7_enhance_projects_tags_for_rut.py diff --git a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/service_runs.py b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/service_runs.py index 22a56b0da4e..72001f8b550 100644 --- a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/service_runs.py +++ b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/service_runs.py @@ -20,6 +20,7 @@ class ServiceRunGet(BaseModel): user_email: str project_id: ProjectID project_name: str + project_tags: list[str] node_id: NodeID node_name: str root_parent_project_id: ProjectID diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/8e1f83486be7_enhance_projects_tags_for_rut.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/8e1f83486be7_enhance_projects_tags_for_rut.py new file mode 100644 index 00000000000..6c0d6608185 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/8e1f83486be7_enhance_projects_tags_for_rut.py @@ -0,0 +1,90 @@ +"""enhance projects_tags for RUT + +Revision ID: 8e1f83486be7 +Revises: 8bfe65a5e294 +Create Date: 2024-11-15 09:12:57.789183+00:00 + +""" +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "8e1f83486be7" +down_revision = "8bfe65a5e294" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "projects_tags", sa.Column("project_uuid_for_rut", sa.String(), nullable=True) + ) + + # Migrate + op.execute( + sa.DDL( + """ + UPDATE projects_tags + SET project_uuid_for_rut = projects.uuid + FROM projects + WHERE projects_tags.project_id = projects.id; + """ + ) + ) + + op.alter_column( + "projects_tags", + "project_uuid_for_rut", + existing_type=sa.String(), + nullable=False, + ) + op.alter_column( + "projects_tags", "project_id", existing_type=sa.BIGINT(), nullable=True + ) + op.drop_constraint( + "study_tags_study_id_tag_id_key", "projects_tags", type_="unique" + ) + op.create_unique_constraint( + "project_tags_project_uuid_unique", + "projects_tags", + ["project_uuid_for_rut", "tag_id"], + ) + op.drop_constraint("study_tags_study_id_fkey", "projects_tags", type_="foreignkey") + op.create_foreign_key( + "project_tags_project_id_fkey", + "projects_tags", + "projects", + ["project_id"], + ["id"], + onupdate="CASCADE", + ondelete="SET NULL", + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint( + "project_tags_project_id_fkey", "projects_tags", type_="foreignkey" + ) + op.create_foreign_key( + "study_tags_study_id_fkey", + "projects_tags", + "projects", + ["project_id"], + ["id"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.drop_constraint( + "project_tags_project_uuid_unique", "projects_tags", type_="unique" + ) + op.create_unique_constraint( + "study_tags_study_id_tag_id_key", "projects_tags", ["project_id", "tag_id"] + ) + op.alter_column( + "projects_tags", "project_id", existing_type=sa.BIGINT(), nullable=False + ) + op.drop_column("projects_tags", "project_uuid_for_rut") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/models/projects_tags.py b/packages/postgres-database/src/simcore_postgres_database/models/projects_tags.py index 4ac88510e2d..223271872b7 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/projects_tags.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/projects_tags.py @@ -13,9 +13,14 @@ sa.Column( "project_id", sa.BigInteger, - sa.ForeignKey(projects.c.id, onupdate="CASCADE", ondelete="CASCADE"), - nullable=False, - doc="NOTE that project.c.id != project.c.uuid", + sa.ForeignKey( + projects.c.id, + onupdate="CASCADE", + ondelete="SET NULL", + name="project_tags_project_id_fkey", + ), + nullable=True, # <-- NULL means that project was deleted + doc="NOTE that project.c.id != project.c.uuid. If project is deleted, we do not delete project in this table, we just set this column to NULL. Why? Because the `project_uuid_for_rut` is still used by resource usage tracker", ), sa.Column( "tag_id", @@ -23,5 +28,12 @@ sa.ForeignKey(tags.c.id, onupdate="CASCADE", ondelete="CASCADE"), nullable=False, ), - sa.UniqueConstraint("project_id", "tag_id"), + sa.Column( + "project_uuid_for_rut", + sa.String, + nullable=False, + ), + sa.UniqueConstraint( + "project_uuid_for_rut", "tag_id", name="project_tags_project_uuid_unique" + ), ) diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_tags_sql.py b/packages/postgres-database/src/simcore_postgres_database/utils_tags_sql.py index bd727a0dcc3..072a6bd2d67 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_tags_sql.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_tags_sql.py @@ -1,4 +1,5 @@ import functools +from uuid import UUID import sqlalchemy as sa from simcore_postgres_database.models.groups import user_to_groups @@ -60,7 +61,7 @@ def get_tag_stmt( # aggregation ensures MOST PERMISSIVE policy of access-rights sa.func.bool_or(tags_access_rights.c.read).label("read"), sa.func.bool_or(tags_access_rights.c.write).label("write"), - sa.func.bool_or(tags_access_rights.c.delete).label("delete") + sa.func.bool_or(tags_access_rights.c.delete).label("delete"), ) .select_from( _join_user_to_given_tag( @@ -80,7 +81,7 @@ def list_tags_stmt(*, user_id: int): # aggregation ensures MOST PERMISSIVE policy of access-rights sa.func.bool_or(tags_access_rights.c.read).label("read"), sa.func.bool_or(tags_access_rights.c.write).label("write"), - sa.func.bool_or(tags_access_rights.c.delete).label("delete") + sa.func.bool_or(tags_access_rights.c.delete).label("delete"), ) .select_from( _join_user_to_tags( @@ -104,7 +105,7 @@ def count_groups_with_given_access_rights_stmt( tag_id: int, read: bool | None, write: bool | None, - delete: bool | None + delete: bool | None, ): """ How many groups (from this user_id) are given EXACTLY these access permissions @@ -192,12 +193,15 @@ def get_tags_for_project_stmt(*, project_index: int): ) -def add_tag_to_project_stmt(*, project_index: int, tag_id: int): +def add_tag_to_project_stmt( + *, project_index: int, tag_id: int, project_uuid_for_rut: UUID +): return ( pg_insert(projects_tags) .values( project_id=project_index, tag_id=tag_id, + project_uuid_for_rut=f"{project_uuid_for_rut}", ) .on_conflict_do_nothing() ) diff --git a/packages/postgres-database/tests/test_utils_tags.py b/packages/postgres-database/tests/test_utils_tags.py index 26f9a301f76..1f7f882da0a 100644 --- a/packages/postgres-database/tests/test_utils_tags.py +++ b/packages/postgres-database/tests/test_utils_tags.py @@ -668,6 +668,7 @@ def _check(func_smt, **kwargs): user_id = 425 # 4 tag_id = 4 project_index = 1 + project_uuid = "106f8b4b-ffb6-459a-a27b-981c779e6d3f" service_key = "simcore/services/comp/isolve" service_version = "2.0.85" @@ -726,6 +727,7 @@ def _check(func_smt, **kwargs): add_tag_to_project_stmt, project_index=project_index, tag_id=tag_id, + project_uuid_for_rut=project_uuid, ) _check( diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/service_runs.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/service_runs.py index 6bceaab4f8c..3ff9f66f8b6 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/service_runs.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/service_runs.py @@ -101,6 +101,7 @@ class Config: class ServiceRunWithCreditsDB(ServiceRunDB): osparc_credits: Decimal | None transaction_status: CreditTransactionStatus | None + project_tags: list[str] class Config: orm_mode = True diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/repositories/resource_tracker.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/repositories/resource_tracker.py index 33a3e58d137..2301bf9e99f 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/repositories/resource_tracker.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/repositories/resource_tracker.py @@ -28,6 +28,7 @@ from models_library.users import UserID from models_library.wallets import WalletID from pydantic import PositiveInt +from simcore_postgres_database.models.projects_tags import projects_tags from simcore_postgres_database.models.resource_tracker_credit_transactions import ( resource_tracker_credit_transactions, ) @@ -46,6 +47,7 @@ from simcore_postgres_database.models.resource_tracker_service_runs import ( resource_tracker_service_runs, ) +from simcore_postgres_database.models.tags import tags from sqlalchemy.dialects.postgresql import ARRAY, INTEGER from .....exceptions.errors import ( @@ -212,6 +214,15 @@ async def get_service_run_by_id( return None return ServiceRunDB.from_orm(row) + _project_tags_subquery = ( + sa.select( + projects_tags.c.project_uuid_for_rut, + sa.func.array_agg(tags.c.name).label("project_tags"), + ) + .select_from(projects_tags.join(tags, projects_tags.c.tag_id == tags.c.id)) + .group_by(projects_tags.c.project_uuid_for_rut) + ).subquery("project_tags_subquery") + async def list_service_runs_by_product_and_user_and_wallet( self, product_name: ProductName, @@ -260,6 +271,10 @@ async def list_service_runs_by_product_and_user_and_wallet( resource_tracker_service_runs.c.missed_heartbeat_counter, resource_tracker_credit_transactions.c.osparc_credits, resource_tracker_credit_transactions.c.transaction_status, + sa.func.coalesce( + self._project_tags_subquery.c.project_tags, + sa.cast(sa.text("'{}'"), sa.ARRAY(sa.String)), + ).label("project_tags"), ) .select_from( resource_tracker_service_runs.join( @@ -273,6 +288,11 @@ async def list_service_runs_by_product_and_user_and_wallet( == resource_tracker_credit_transactions.c.service_run_id ), isouter=True, + ).join( + self._project_tags_subquery, + resource_tracker_service_runs.c.project_id + == self._project_tags_subquery.c.project_uuid_for_rut, + isouter=True, ) ) .where(resource_tracker_service_runs.c.product_name == product_name) @@ -436,7 +456,9 @@ async def export_service_runs_table_to_s3( resource_tracker_service_runs.c.service_run_id, resource_tracker_service_runs.c.wallet_name, resource_tracker_service_runs.c.user_email, - resource_tracker_service_runs.c.project_name, + resource_tracker_service_runs.c.root_parent_project_name.label( + "project_name" + ), resource_tracker_service_runs.c.node_name, resource_tracker_service_runs.c.service_key, resource_tracker_service_runs.c.service_version, @@ -445,6 +467,10 @@ async def export_service_runs_table_to_s3( resource_tracker_service_runs.c.stopped_at, resource_tracker_credit_transactions.c.osparc_credits, resource_tracker_credit_transactions.c.transaction_status, + sa.func.coalesce( + self._project_tags_subquery.c.project_tags, + sa.cast(sa.text("'{}'"), sa.ARRAY(sa.String)), + ).label("project_tags"), ) .select_from( resource_tracker_service_runs.join( @@ -452,6 +478,11 @@ async def export_service_runs_table_to_s3( resource_tracker_service_runs.c.service_run_id == resource_tracker_credit_transactions.c.service_run_id, isouter=True, + ).join( + self._project_tags_subquery, + resource_tracker_service_runs.c.project_id + == self._project_tags_subquery.c.project_uuid_for_rut, + isouter=True, ) ) .where(resource_tracker_service_runs.c.product_name == product_name) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/service_runs.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/service_runs.py index 782b084c789..a963b8340df 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/service_runs.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/service_runs.py @@ -121,6 +121,7 @@ async def list_service_runs( user_email=service.user_email, project_id=service.project_id, project_name=service.project_name, + project_tags=service.project_tags, root_parent_project_id=service.root_parent_project_id, root_parent_project_name=service.root_parent_project_name, node_id=service.node_id, diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/conftest.py b/services/resource-usage-tracker/tests/unit/with_dbs/conftest.py index 581952e1100..032b64a10fc 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/conftest.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/conftest.py @@ -51,6 +51,7 @@ def mock_env(monkeypatch: pytest.MonkeyPatch) -> EnvVarsDict: "SC_BOOT_MODE": "production", "POSTGRES_CLIENT_NAME": "postgres_test_client", "RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_CHECK_ENABLED": "0", + "RESOURCE_USAGE_TRACKER_TRACING": "null", } setenvs_from_dict(monkeypatch, env_vars) return env_vars diff --git a/services/web/server/src/simcore_service_webserver/projects/db.py b/services/web/server/src/simcore_service_webserver/projects/db.py index 2281b807a71..5554bee1d18 100644 --- a/services/web/server/src/simcore_service_webserver/projects/db.py +++ b/services/web/server/src/simcore_service_webserver/projects/db.py @@ -403,7 +403,9 @@ async def list_projects( # pylint: disable=too-many-arguments,too-many-statemen sa.select( projects_tags.c.project_id, sa.func.array_agg(projects_tags.c.tag_id).label("tags"), - ).group_by(projects_tags.c.project_id) + ) + .where(projects_tags.c.project_id.is_not(None)) + .group_by(projects_tags.c.project_id) ).subquery("project_tags_subquery") ### @@ -1218,6 +1220,7 @@ async def add_tag( projects_tags.insert().values( project_id=project["id"], tag_id=tag_id, + project_uuid_for_rut=project["uuid"], ) ) project_tags.append(tag_id) diff --git a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__list.py b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__list.py index 33b9d9146f5..9c8a29f2b6c 100644 --- a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__list.py +++ b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__list.py @@ -35,6 +35,7 @@ "user_email": "name@email.testing", "project_id": "5c2110be-441b-11ee-a0e8-02420a000040", "project_name": "osparc", + "project_tags": [], "node_id": "3d2133f4-aba4-4364-9f7a-9377dea1221f", "node_name": "sleeper", "root_parent_project_id": "5c2110be-441b-11ee-a0e8-02420a000040", From e1d940acdff4d80022ba172d6bcfa4737bda0221 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Fri, 15 Nov 2024 12:09:12 +0100 Subject: [PATCH 3/7] =?UTF-8?q?=E2=9C=A8=20[Frontend]=20Feature:=20Announc?= =?UTF-8?q?ement=20generator=20(#6723)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source/class/osparc/admin/AdminCenter.js | 10 +- .../class/osparc/admin/Announcements.js | 159 ++++++++++++++++++ .../source/class/osparc/data/Permissions.js | 4 +- 3 files changed, 170 insertions(+), 3 deletions(-) create mode 100644 services/static-webserver/client/source/class/osparc/admin/Announcements.js diff --git a/services/static-webserver/client/source/class/osparc/admin/AdminCenter.js b/services/static-webserver/client/source/class/osparc/admin/AdminCenter.js index ae2d101f753..36329e85b97 100644 --- a/services/static-webserver/client/source/class/osparc/admin/AdminCenter.js +++ b/services/static-webserver/client/source/class/osparc/admin/AdminCenter.js @@ -28,6 +28,7 @@ qx.Class.define("osparc.admin.AdminCenter", { this.__addPricingPlansPage(); this.__addMaintenancePage(); + this.__addAnnouncementsPage(); }, members: { @@ -43,6 +44,13 @@ qx.Class.define("osparc.admin.AdminCenter", { const iconSrc = "@FontAwesome5Solid/wrench/22"; const maintenance = new osparc.admin.Maintenance(); this.addTab(title, iconSrc, maintenance); - } + }, + + __addAnnouncementsPage: function() { + const title = this.tr("Announcements"); + const iconSrc = "@FontAwesome5Solid/bullhorn/22"; + const announcements = new osparc.admin.Announcements(); + this.addTab(title, iconSrc, announcements); + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/admin/Announcements.js b/services/static-webserver/client/source/class/osparc/admin/Announcements.js new file mode 100644 index 00000000000..84ca9f99079 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/admin/Announcements.js @@ -0,0 +1,159 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2023 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.admin.Announcements", { + extend: osparc.po.BaseView, + + members: { + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "create-announcement": + control = this.__createAnnouncements(); + this._add(control); + break; + case "announcement-container": { + control = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); + this._add(control, { + flex: 1 + }); + break; + } + } + return control || this.base(arguments, id); + }, + + _buildLayout: function() { + this.getChildControl("create-announcement"); + this.getChildControl("announcement-container"); + }, + + __createAnnouncements: function() { + const announcementGroupBox = osparc.po.BaseView.createGroupBox(this.tr("Create announcement")); + + const announcementForm = this.__createAnnouncementForm(); + const form = new qx.ui.form.renderer.Single(announcementForm); + announcementGroupBox.add(form); + + return announcementGroupBox; + }, + + __createAnnouncementForm: function() { + const form = new qx.ui.form.Form(); + + const title = new qx.ui.form.TextField().set({ + placeholder: this.tr("title") + }); + form.add(title, this.tr("Title")); + + const description = new qx.ui.form.TextArea().set({ + placeholder: this.tr("description"), + maxHeight: 60 + }); + form.add(description, this.tr("Description")); + + const link = new qx.ui.form.TextField().set({ + placeholder: this.tr("link") + }); + form.add(link, this.tr("Link")); + + const widgetLogin = new qx.ui.form.CheckBox().set({ + value: false + }); + form.add(widgetLogin, this.tr("Login")); + + const widgetRibbon = new qx.ui.form.CheckBox().set({ + value: false + }); + form.add(widgetRibbon, this.tr("Ribbon")); + + const widgetUserMenu = new qx.ui.form.CheckBox().set({ + value: false + }); + form.add(widgetUserMenu, this.tr("User Menu")); + + const dateFormat = new qx.util.format.DateFormat("dd/MM/yyyy"); + const now = new Date(); + + const start = new qx.ui.form.DateField(); + start.setDateFormat(dateFormat); + start.setValue(now); + form.add(start, this.tr("Start")); + + const end = new qx.ui.form.DateField(); + end.setDateFormat(dateFormat); + end.setValue(now); + form.add(end, this.tr("End")); + + const generateAnnouncementBtn = new osparc.ui.form.FetchButton(this.tr("Generate")); + generateAnnouncementBtn.set({appearance: "form-button"}); + generateAnnouncementBtn.addListener("execute", () => { + const products = [osparc.product.Utils.getProductName()]; + const widgets = []; + if (widgetLogin.getValue()) { + widgets.push("login"); + } + if (widgetRibbon.getValue()) { + widgets.push("ribbon"); + } + if (widgetUserMenu.getValue()) { + widgets.push("user-menu"); + } + if (widgets.length === 0) { + const msg = "Select at least one widget"; + osparc.FlashMessenger.getInstance().logAs(msg, "WARNING"); + } + const announcementData = { + "id": osparc.utils.Utils.uuidV4(), + "products": products, + "title": title.getValue() ? title.getValue() : "", + "description": description.getValue() ? description.getValue() : "", + "link": link.getValue() ? link.getValue() : "", + "widgets": widgets, + "start": start.getValue(), + "end": end.getValue(), + }; + this.__populateAnnouncementLayout(announcementData); + }, this); + form.addButton(generateAnnouncementBtn); + + return form; + }, + + __populateAnnouncementLayout: function(announcementData) { + const vBox = this.getChildControl("announcement-container"); + vBox.removeAll(); + + const announcementField = new qx.ui.form.TextArea(JSON.stringify(announcementData)).set({ + readOnly: true + }); + vBox.add(announcementField); + + const copyAnnouncementBtn = new qx.ui.form.Button(this.tr("Copy announcement")).set({ + alignX: "left", + allowGrowX: false, + }); + copyAnnouncementBtn.set({appearance: "form-button"}); + copyAnnouncementBtn.addListener("execute", () => { + if (osparc.utils.Utils.copyTextToClipboard(JSON.stringify(announcementData))) { + copyAnnouncementBtn.setIcon("@FontAwesome5Solid/check/12"); + } + }); + vBox.add(copyAnnouncementBtn); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/data/Permissions.js b/services/static-webserver/client/source/class/osparc/data/Permissions.js index 6f02e870608..44d00471160 100644 --- a/services/static-webserver/client/source/class/osparc/data/Permissions.js +++ b/services/static-webserver/client/source/class/osparc/data/Permissions.js @@ -289,11 +289,11 @@ qx.Class.define("osparc.data.Permissions", { }, isProductOwner: function() { - return ["admin", "product_owner"].includes(this.getRole()); + return this.getRole() === "product_owner"; }, isAdmin: function() { - return ["admin"].includes(this.getRole()); + return this.getRole() === "admin"; }, } }); From fd6a4360e3e3c07c4827480ddab24d64e0c350b1 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Fri, 15 Nov 2024 13:47:19 +0100 Subject: [PATCH 4/7] =?UTF-8?q?=F0=9F=90=9B=20[e2e-playwright]=20Fix=20tes?= =?UTF-8?q?ts=20(#6731)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/e2e-playwright/tests/conftest.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tests/e2e-playwright/tests/conftest.py b/tests/e2e-playwright/tests/conftest.py index 0a90d6fd746..c50cdeba6c2 100644 --- a/tests/e2e-playwright/tests/conftest.py +++ b/tests/e2e-playwright/tests/conftest.py @@ -454,6 +454,9 @@ def _( open_button.click() lrt_data = lrt.value.json() lrt_data = lrt_data["data"] + if is_product_billable: + # Open project with default resources + page.get_by_test_id("openWithResources").click() with log_context( logging.INFO, "Copying template data", @@ -489,9 +492,9 @@ def wait_for_done(response): ... else: open_button.click() - if is_product_billable: - # Open project with default resources - page.get_by_test_id("openWithResources").click() + if is_product_billable: + # Open project with default resources + page.get_by_test_id("openWithResources").click() project_data = response_info.value.json() assert project_data project_uuid = project_data["data"]["uuid"] From 9915ebd195043616617467eac75a142ff562f43a Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Fri, 15 Nov 2024 16:27:55 +0100 Subject: [PATCH 5/7] =?UTF-8?q?=F0=9F=90=9B=20[e2e-playwright]=20Fix=20tes?= =?UTF-8?q?ts,=202nd=20attempt=20(#6735)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/e2e-playwright/tests/conftest.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/e2e-playwright/tests/conftest.py b/tests/e2e-playwright/tests/conftest.py index c50cdeba6c2..a8a1f8d979d 100644 --- a/tests/e2e-playwright/tests/conftest.py +++ b/tests/e2e-playwright/tests/conftest.py @@ -447,6 +447,10 @@ def _( if press_open: open_button = page.get_by_test_id("openResource") if template_id is not None: + if is_product_billable: + open_button.click() + # Open project with default resources + open_button = page.get_by_test_id("openWithResources") # it returns a Long Running Task with page.expect_response( re.compile(rf"/projects\?from_study\={template_id}") @@ -454,9 +458,6 @@ def _( open_button.click() lrt_data = lrt.value.json() lrt_data = lrt_data["data"] - if is_product_billable: - # Open project with default resources - page.get_by_test_id("openWithResources").click() with log_context( logging.INFO, "Copying template data", From 7ec1d25a5d30c132a5c131e6e4f8787b561568de Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Mon, 18 Nov 2024 07:59:35 +0100 Subject: [PATCH 6/7] =?UTF-8?q?=E2=9C=A8Computational=20backend:=20refacto?= =?UTF-8?q?ring=20of=20dv-2=20computational=20scheduler=20(Part=202)=20(#6?= =?UTF-8?q?711)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/models_library/clusters.py | 2 +- .../modules/comp_scheduler/__init__.py | 5 +- .../modules/comp_scheduler/_base_scheduler.py | 200 ++++++++++-------- .../modules/comp_scheduler/_dask_scheduler.py | 104 ++++++--- .../comp_scheduler/_scheduler_factory.py | 32 +-- ...t_modules_comp_scheduler_dask_scheduler.py | 44 ++-- 6 files changed, 216 insertions(+), 171 deletions(-) diff --git a/packages/models-library/src/models_library/clusters.py b/packages/models-library/src/models_library/clusters.py index 1856dc5c287..5fa14c2daa1 100644 --- a/packages/models-library/src/models_library/clusters.py +++ b/packages/models-library/src/models_library/clusters.py @@ -154,7 +154,7 @@ class Config: ClusterID: TypeAlias = NonNegativeInt -DEFAULT_CLUSTER_ID: Final[NonNegativeInt] = 0 +DEFAULT_CLUSTER_ID: Final[ClusterID] = 0 class Cluster(BaseCluster): diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/__init__.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/__init__.py index d06c37457b7..2b29acf16c9 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/__init__.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/__init__.py @@ -16,10 +16,7 @@ async def start_scheduler() -> None: with log_context( _logger, level=logging.INFO, msg="starting computational scheduler" ): - app.state.scheduler = scheduler = await _scheduler_factory.create_from_db( - app - ) - scheduler.recover_scheduling() + app.state.scheduler = await _scheduler_factory.create_from_db(app) return start_scheduler diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py index 097afd95288..e6d8e6da491 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py @@ -17,8 +17,9 @@ import functools import logging from abc import ABC, abstractmethod +from collections.abc import Callable from dataclasses import dataclass, field -from typing import Final +from typing import Final, TypeAlias import arrow import networkx as nx @@ -52,12 +53,13 @@ ) from ...core.settings import ComputationalBackendSettings from ...models.comp_pipelines import CompPipelineAtDB -from ...models.comp_runs import RunMetadataDict +from ...models.comp_runs import CompRunsAtDB, RunMetadataDict from ...models.comp_tasks import CompTaskAtDB from ...utils.comp_scheduler import ( COMPLETED_STATES, PROCESSING_STATES, RUNNING_STATES, + SCHEDULED_STATES, TASK_TO_START_STATES, WAITING_FOR_START_STATES, Iteration, @@ -82,9 +84,12 @@ _Current = CompTaskAtDB _MAX_WAITING_FOR_CLUSTER_TIMEOUT_IN_MIN: Final[int] = 10 _SCHEDULER_INTERVAL: Final[datetime.timedelta] = datetime.timedelta(seconds=5) -_TASK_NAME_TEMPLATE: Final[ - str -] = "computational-scheduler-{user_id}:{project_id}:{iteration}" +_TASK_NAME_TEMPLATE: Final[str] = ( + "computational-scheduler-{user_id}:{project_id}:{iteration}" +) + +PipelineSchedulingTask: TypeAlias = asyncio.Task +PipelineSchedulingWakeUpEvent: TypeAlias = asyncio.Event @dataclass(frozen=True, slots=True) @@ -138,13 +143,8 @@ async def _triage_changed_tasks( @dataclass(kw_only=True) class ScheduledPipelineParams: - cluster_id: ClusterID - run_metadata: RunMetadataDict - mark_for_cancellation: datetime.datetime | None - use_on_demand_clusters: bool - - scheduler_task: asyncio.Task | None = None - scheduler_waker: asyncio.Event = field(default_factory=asyncio.Event) + scheduler_task: asyncio.Task + scheduler_waker: asyncio.Event def wake_up(self) -> None: self.scheduler_waker.set() @@ -152,17 +152,41 @@ def wake_up(self) -> None: @dataclass class BaseCompScheduler(ABC): - scheduled_pipelines: dict[ - tuple[UserID, ProjectID, Iteration], ScheduledPipelineParams - ] db_engine: Engine - wake_up_event: asyncio.Event = field(default_factory=asyncio.Event, init=False) rabbitmq_client: RabbitMQClient rabbitmq_rpc_client: RabbitMQRPCClient settings: ComputationalBackendSettings service_runtime_heartbeat_interval: datetime.timedelta redis_client: RedisClientSDK + # NOTE: this is a trick to be able to inheritate from the class + _scheduled_pipelines: dict[ + tuple[UserID, ProjectID, Iteration], ScheduledPipelineParams + ] = field(default_factory=dict, init=False) + + def __post_init__(self) -> None: + self._scheduled_pipelines = {} + + async def restore_scheduling_from_db(self) -> None: + # get currently scheduled runs + comp_runs = await CompRunsRepository.instance(self.db_engine).list( + filter_by_state=SCHEDULED_STATES + ) + + for run in comp_runs: + task, wake_up_event = self._start_scheduling( + run.user_id, run.project_uuid, run.iteration + ) + self._scheduled_pipelines |= { + ( + run.user_id, + run.project_uuid, + run.iteration, + ): ScheduledPipelineParams( + scheduler_task=task, scheduler_waker=wake_up_event + ) + } + async def run_new_pipeline( self, user_id: UserID, @@ -192,13 +216,11 @@ async def run_new_pipeline( metadata=run_metadata, use_on_demand_clusters=use_on_demand_clusters, ) - self.scheduled_pipelines[ - (user_id, project_id, new_run.iteration) - ] = pipeline_params = ScheduledPipelineParams( - cluster_id=cluster_id, - run_metadata=new_run.metadata, - use_on_demand_clusters=use_on_demand_clusters, - mark_for_cancellation=None, + task, wake_up_event = self._start_scheduling( + user_id, project_id, new_run.iteration + ) + self._scheduled_pipelines[(user_id, project_id, new_run.iteration)] = ( + ScheduledPipelineParams(scheduler_task=task, scheduler_waker=wake_up_event) ) await publish_project_log( self.rabbitmq_client, @@ -208,8 +230,6 @@ async def run_new_pipeline( log_level=logging.INFO, ) - self._start_scheduling(pipeline_params, user_id, project_id, new_run.iteration) - async def stop_pipeline( self, user_id: UserID, project_id: ProjectID, iteration: int | None = None ) -> None: @@ -217,7 +237,7 @@ async def stop_pipeline( # if no iteration given find the latest one in the list possible_iterations = { it - for u_id, p_id, it in self.scheduled_pipelines + for u_id, p_id, it in self._scheduled_pipelines if u_id == user_id and p_id == project_id } if not possible_iterations: @@ -236,28 +256,17 @@ async def stop_pipeline( ) if updated_comp_run: assert updated_comp_run.cancelled is not None # nosec - self.scheduled_pipelines[ - (user_id, project_id, selected_iteration) - ].mark_for_cancellation = updated_comp_run.cancelled # ensure the scheduler starts right away - self.scheduled_pipelines[ + self._scheduled_pipelines[ (user_id, project_id, selected_iteration) ].wake_up() - def recover_scheduling(self) -> None: - for ( - user_id, - project_id, - iteration, - ), params in self.scheduled_pipelines.items(): - self._start_scheduling(params, user_id, project_id, iteration) - async def shutdown(self) -> None: # cancel all current scheduling processes await asyncio.gather( *( stop_periodic_task(p.scheduler_task, timeout=3) - for p in self.scheduled_pipelines.values() + for p in self._scheduled_pipelines.values() if p.scheduler_task ), return_exceptions=True, @@ -267,7 +276,7 @@ def _get_last_iteration(self, user_id: UserID, project_id: ProjectID) -> Iterati # if no iteration given find the latest one in the list possible_iterations = { it - for u_id, p_id, it in self.scheduled_pipelines + for u_id, p_id, it in self._scheduled_pipelines if u_id == user_id and p_id == project_id } if not possible_iterations: @@ -277,40 +286,41 @@ def _get_last_iteration(self, user_id: UserID, project_id: ProjectID) -> Iterati def _start_scheduling( self, - pipeline_params: ScheduledPipelineParams, user_id: UserID, project_id: ProjectID, iteration: Iteration, - ) -> None: + ) -> tuple[PipelineSchedulingTask, PipelineSchedulingWakeUpEvent]: async def _exclusive_safe_schedule_pipeline( *, user_id: UserID, project_id: ProjectID, iteration: Iteration, - pipeline_params: ScheduledPipelineParams, + wake_up_callback: Callable[[], None], ) -> None: with contextlib.suppress(CouldNotAcquireLockError): await self._schedule_pipeline( user_id=user_id, project_id=project_id, iteration=iteration, - pipeline_params=pipeline_params, + wake_up_callback=wake_up_callback, ) - pipeline_params.scheduler_task = start_periodic_task( + pipeline_wake_up_event = asyncio.Event() + pipeline_task = start_periodic_task( functools.partial( _exclusive_safe_schedule_pipeline, user_id=user_id, project_id=project_id, iteration=iteration, - pipeline_params=pipeline_params, + wake_up_callback=pipeline_wake_up_event.set, ), interval=_SCHEDULER_INTERVAL, task_name=_TASK_NAME_TEMPLATE.format( user_id=user_id, project_id=project_id, iteration=iteration ), - early_wake_up_event=pipeline_params.scheduler_waker, + early_wake_up_event=pipeline_wake_up_event, ) + return pipeline_task, pipeline_wake_up_event async def _get_pipeline_dag(self, project_id: ProjectID) -> nx.DiGraph: comp_pipeline_repo = CompPipelinesRepository.instance(self.db_engine) @@ -451,10 +461,10 @@ async def _get_changed_tasks_from_backend( self, user_id: UserID, processing_tasks: list[CompTaskAtDB], - pipeline_params: ScheduledPipelineParams, + comp_run: CompRunsAtDB, ) -> list[tuple[_Previous, _Current]]: tasks_backend_status = await self._get_tasks_status( - user_id, processing_tasks, pipeline_params + user_id, processing_tasks, comp_run ) return [ @@ -592,7 +602,7 @@ async def _update_states_from_comp_backend( project_id: ProjectID, iteration: Iteration, pipeline_dag: nx.DiGraph, - pipeline_params: ScheduledPipelineParams, + comp_run: CompRunsAtDB, ) -> None: tasks = await self._get_pipeline_tasks(project_id, pipeline_dag) tasks_inprocess = [t for t in tasks.values() if t.state in PROCESSING_STATES] @@ -601,7 +611,7 @@ async def _update_states_from_comp_backend( # get the tasks which state actually changed since last check tasks_with_changed_states = await self._get_changed_tasks_from_backend( - user_id, tasks_inprocess, pipeline_params + user_id, tasks_inprocess, comp_run ) # NOTE: typical states a task goes through # NOT_STARTED (initial state) -> PUBLISHED (user press run/API call) -> PENDING -> WAITING_FOR_CLUSTER (cluster creation) -> @@ -620,7 +630,7 @@ async def _update_states_from_comp_backend( sorted_tasks.started, user_id=user_id, iteration=iteration, - run_metadata=pipeline_params.run_metadata, + run_metadata=comp_run.metadata, ) if sorted_tasks.completed or sorted_tasks.potentially_lost: @@ -628,7 +638,7 @@ async def _update_states_from_comp_backend( user_id, sorted_tasks.completed + sorted_tasks.potentially_lost, iteration, - pipeline_params=pipeline_params, + comp_run=comp_run, ) if sorted_tasks.waiting: @@ -641,27 +651,19 @@ async def _start_tasks( user_id: UserID, project_id: ProjectID, scheduled_tasks: dict[NodeID, CompTaskAtDB], - pipeline_params: ScheduledPipelineParams, - ) -> None: - ... + comp_run: CompRunsAtDB, + wake_up_callback: Callable[[], None], + ) -> None: ... @abstractmethod async def _get_tasks_status( - self, - user_id: UserID, - tasks: list[CompTaskAtDB], - pipeline_params: ScheduledPipelineParams, - ) -> list[RunningState]: - ... + self, user_id: UserID, tasks: list[CompTaskAtDB], comp_run: CompRunsAtDB + ) -> list[RunningState]: ... @abstractmethod async def _stop_tasks( - self, - user_id: UserID, - tasks: list[CompTaskAtDB], - pipeline_params: ScheduledPipelineParams, - ) -> None: - ... + self, user_id: UserID, tasks: list[CompTaskAtDB], comp_run: CompRunsAtDB + ) -> None: ... @abstractmethod async def _process_completed_tasks( @@ -669,9 +671,8 @@ async def _process_completed_tasks( user_id: UserID, tasks: list[CompTaskAtDB], iteration: Iteration, - pipeline_params: ScheduledPipelineParams, - ) -> None: - ... + comp_run: CompRunsAtDB, + ) -> None: ... @staticmethod def _build_exclusive_lock_key(*args, **kwargs) -> str: @@ -695,7 +696,7 @@ async def _schedule_pipeline( user_id: UserID, project_id: ProjectID, iteration: PositiveInt, - pipeline_params: ScheduledPipelineParams, + wake_up_callback: Callable[[], None], ) -> None: with log_context( _logger, @@ -704,19 +705,22 @@ async def _schedule_pipeline( ): dag: nx.DiGraph = nx.DiGraph() try: + comp_run = await CompRunsRepository.instance(self.db_engine).get( + user_id, project_id, iteration + ) dag = await self._get_pipeline_dag(project_id) # 1. Update our list of tasks with data from backend (state, results) await self._update_states_from_comp_backend( - user_id, project_id, iteration, dag, pipeline_params=pipeline_params + user_id, project_id, iteration, dag, comp_run ) # 2. Any task following a FAILED task shall be ABORTED comp_tasks = await self._set_states_following_failed_to_aborted( project_id, dag ) # 3. do we want to stop the pipeline now? - if pipeline_params.mark_for_cancellation: + if comp_run.cancelled: await self._schedule_tasks_to_stop( - user_id, project_id, comp_tasks, pipeline_params + user_id, project_id, comp_tasks, comp_run ) else: # let's get the tasks to schedule then @@ -725,7 +729,8 @@ async def _schedule_pipeline( project_id=project_id, comp_tasks=comp_tasks, dag=dag, - pipeline_params=pipeline_params, + comp_run=comp_run, + wake_up_callback=wake_up_callback, ) # 4. timeout if waiting for cluster has been there for more than X minutes comp_tasks = await self._timeout_if_waiting_for_cluster_too_long( @@ -744,14 +749,17 @@ async def _schedule_pipeline( # 7. Are we done scheduling that pipeline? if not dag.nodes() or pipeline_result in COMPLETED_STATES: # there is nothing left, the run is completed, we're done here - self.scheduled_pipelines.pop((user_id, project_id, iteration), None) + self._scheduled_pipelines.pop( + (user_id, project_id, iteration), None + ) _logger.info( "pipeline %s scheduling completed with result %s", f"{project_id=}", f"{pipeline_result=}", ) - assert pipeline_params.scheduler_task is not None # nosec - pipeline_params.scheduler_task.cancel() + current_task = asyncio.current_task() + assert current_task is not None # nosec + current_task.cancel() except PipelineNotFoundError: _logger.warning( "pipeline %s does not exist in comp_pipeline table, it will be removed from scheduler", @@ -760,7 +768,7 @@ async def _schedule_pipeline( await self._set_run_result( user_id, project_id, iteration, RunningState.ABORTED ) - self.scheduled_pipelines.pop((user_id, project_id, iteration), None) + self._scheduled_pipelines.pop((user_id, project_id, iteration), None) except InvalidPipelineError as exc: _logger.warning( "pipeline %s appears to be misconfigured, it will be removed from scheduler. Please check pipeline:\n%s", @@ -770,7 +778,7 @@ async def _schedule_pipeline( await self._set_run_result( user_id, project_id, iteration, RunningState.ABORTED ) - self.scheduled_pipelines.pop((user_id, project_id, iteration), None) + self._scheduled_pipelines.pop((user_id, project_id, iteration), None) except (DaskClientAcquisisitonError, ClustersKeeperNotAvailableError): _logger.exception( "Unexpected error while connecting with computational backend, aborting pipeline" @@ -787,7 +795,7 @@ async def _schedule_pipeline( await self._set_run_result( user_id, project_id, iteration, RunningState.FAILED ) - self.scheduled_pipelines.pop((user_id, project_id, iteration), None) + self._scheduled_pipelines.pop((user_id, project_id, iteration), None) except ComputationalBackendNotConnectedError: _logger.exception("Computational backend is not connected!") @@ -796,7 +804,7 @@ async def _schedule_tasks_to_stop( user_id: UserID, project_id: ProjectID, comp_tasks: dict[NodeIDStr, CompTaskAtDB], - pipeline_params: ScheduledPipelineParams, + comp_run: CompRunsAtDB, ) -> None: # get any running task and stop them comp_tasks_repo = CompTasksRepository.instance(self.db_engine) @@ -804,8 +812,10 @@ async def _schedule_tasks_to_stop( project_id ) # stop any remaining running task, these are already submitted - tasks_to_stop = [t for t in comp_tasks.values() if t.state in PROCESSING_STATES] - await self._stop_tasks(user_id, tasks_to_stop, pipeline_params) + if tasks_to_stop := [ + t for t in comp_tasks.values() if t.state in PROCESSING_STATES + ]: + await self._stop_tasks(user_id, tasks_to_stop, comp_run) async def _schedule_tasks_to_start( # noqa: C901 self, @@ -813,7 +823,8 @@ async def _schedule_tasks_to_start( # noqa: C901 project_id: ProjectID, comp_tasks: dict[NodeIDStr, CompTaskAtDB], dag: nx.DiGraph, - pipeline_params: ScheduledPipelineParams, + comp_run: CompRunsAtDB, + wake_up_callback: Callable[[], None], ) -> dict[NodeIDStr, CompTaskAtDB]: # filter out the successfully completed tasks dag.remove_nodes_from( @@ -845,7 +856,8 @@ async def _schedule_tasks_to_start( # noqa: C901 user_id=user_id, project_id=project_id, scheduled_tasks=tasks_ready_to_start, - pipeline_params=pipeline_params, + comp_run=comp_run, + wake_up_callback=wake_up_callback, ) except ( ComputationalBackendNotConnectedError, @@ -863,9 +875,9 @@ async def _schedule_tasks_to_start( # noqa: C901 RunningState.WAITING_FOR_CLUSTER, ) for task in tasks_ready_to_start: - comp_tasks[ - NodeIDStr(f"{task}") - ].state = RunningState.WAITING_FOR_CLUSTER + comp_tasks[NodeIDStr(f"{task}")].state = ( + RunningState.WAITING_FOR_CLUSTER + ) except ComputationalBackendOnDemandNotReadyError as exc: _logger.info( @@ -887,9 +899,9 @@ async def _schedule_tasks_to_start( # noqa: C901 RunningState.WAITING_FOR_CLUSTER, ) for task in tasks_ready_to_start: - comp_tasks[ - NodeIDStr(f"{task}") - ].state = RunningState.WAITING_FOR_CLUSTER + comp_tasks[NodeIDStr(f"{task}")].state = ( + RunningState.WAITING_FOR_CLUSTER + ) except ClustersKeeperNotAvailableError: _logger.exception("Unexpected error while starting tasks:") await publish_project_log( @@ -934,7 +946,7 @@ async def _schedule_tasks_to_start( # noqa: C901 "Unexpected error for %s with %s on %s happened when scheduling %s:", f"{user_id=}", f"{project_id=}", - f"{pipeline_params.cluster_id=}", + f"{comp_run.cluster_id=}", f"{tasks_ready_to_start.keys()=}", ) await CompTasksRepository.instance( diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_dask_scheduler.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_dask_scheduler.py index 512df1b1712..2f0c8c4eab4 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_dask_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_dask_scheduler.py @@ -4,6 +4,7 @@ from collections.abc import AsyncIterator from contextlib import asynccontextmanager from dataclasses import dataclass +from typing import Callable import arrow from dask_task_models_library.container_tasks.errors import TaskCancelledError @@ -12,7 +13,7 @@ TaskProgressEvent, ) from dask_task_models_library.container_tasks.io import TaskOutputData -from models_library.clusters import DEFAULT_CLUSTER_ID, BaseCluster +from models_library.clusters import DEFAULT_CLUSTER_ID, BaseCluster, ClusterID from models_library.errors import ErrorDict from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID @@ -27,7 +28,7 @@ ComputationalBackendOnDemandNotReadyError, TaskSchedulingError, ) -from ...models.comp_runs import RunMetadataDict +from ...models.comp_runs import CompRunsAtDB, RunMetadataDict from ...models.comp_tasks import CompTaskAtDB from ...models.dask_subsystem import DaskClientTaskState from ...utils.comp_scheduler import Iteration, get_resource_tracking_run_id @@ -49,7 +50,7 @@ from ..db.repositories.clusters import ClustersRepository from ..db.repositories.comp_runs import CompRunsRepository from ..db.repositories.comp_tasks import CompTasksRepository -from ._base_scheduler import BaseCompScheduler, ScheduledPipelineParams +from ._base_scheduler import BaseCompScheduler _logger = logging.getLogger(__name__) @@ -69,19 +70,22 @@ @asynccontextmanager async def _cluster_dask_client( user_id: UserID, - pipeline_params: ScheduledPipelineParams, scheduler: "DaskScheduler", + *, + use_on_demand_clusters: bool, + cluster_id: ClusterID, + run_metadata: RunMetadataDict, ) -> AsyncIterator[DaskClient]: cluster: BaseCluster = scheduler.settings.default_cluster - if pipeline_params.use_on_demand_clusters: + if use_on_demand_clusters: cluster = await get_or_create_on_demand_cluster( scheduler.rabbitmq_rpc_client, user_id=user_id, - wallet_id=pipeline_params.run_metadata.get("wallet_id"), + wallet_id=run_metadata.get("wallet_id"), ) - if pipeline_params.cluster_id != DEFAULT_CLUSTER_ID: + if cluster_id != DEFAULT_CLUSTER_ID: clusters_repo = ClustersRepository.instance(scheduler.db_engine) - cluster = await clusters_repo.get_cluster(user_id, pipeline_params.cluster_id) + cluster = await clusters_repo.get_cluster(user_id, cluster_id) async with scheduler.dask_clients_pool.acquire(cluster) as client: yield client @@ -104,10 +108,21 @@ async def _start_tasks( user_id: UserID, project_id: ProjectID, scheduled_tasks: dict[NodeID, CompTaskAtDB], - pipeline_params: ScheduledPipelineParams, + comp_run: CompRunsAtDB, + wake_up_callback: Callable[[], None], ) -> None: # now transfer the pipeline to the dask scheduler - async with _cluster_dask_client(user_id, pipeline_params, self) as client: + async with _cluster_dask_client( + user_id, + self, + use_on_demand_clusters=comp_run.use_on_demand_clusters, + cluster_id=( + comp_run.cluster_id + if comp_run.cluster_id is not None + else DEFAULT_CLUSTER_ID + ), + run_metadata=comp_run.metadata, + ) as client: # Change the tasks state to PENDING comp_tasks_repo = CompTasksRepository.instance(self.db_engine) await comp_tasks_repo.update_project_tasks_state( @@ -121,11 +136,15 @@ async def _start_tasks( client.send_computation_tasks( user_id=user_id, project_id=project_id, - cluster_id=pipeline_params.cluster_id, + cluster_id=( + comp_run.cluster_id + if comp_run.cluster_id is not None + else DEFAULT_CLUSTER_ID + ), tasks={node_id: task.image}, hardware_info=task.hardware_info, - callback=pipeline_params.wake_up, - metadata=pipeline_params.run_metadata, + callback=wake_up_callback, + metadata=comp_run.metadata, ) for node_id, task in scheduled_tasks.items() ), @@ -146,10 +165,20 @@ async def _get_tasks_status( self, user_id: UserID, tasks: list[CompTaskAtDB], - pipeline_params: ScheduledPipelineParams, + comp_run: CompRunsAtDB, ) -> list[RunningState]: try: - async with _cluster_dask_client(user_id, pipeline_params, self) as client: + async with _cluster_dask_client( + user_id, + self, + use_on_demand_clusters=comp_run.use_on_demand_clusters, + cluster_id=( + comp_run.cluster_id + if comp_run.cluster_id is not None + else DEFAULT_CLUSTER_ID + ), + run_metadata=comp_run.metadata, + ) as client: tasks_statuses = await client.get_tasks_status( [f"{t.job_id}" for t in tasks] ) @@ -177,14 +206,21 @@ async def _get_tasks_status( return [RunningState.WAITING_FOR_CLUSTER] * len(tasks) async def _stop_tasks( - self, - user_id: UserID, - tasks: list[CompTaskAtDB], - pipeline_params: ScheduledPipelineParams, + self, user_id: UserID, tasks: list[CompTaskAtDB], comp_run: CompRunsAtDB ) -> None: # NOTE: if this exception raises, it means the backend was anyway not up with contextlib.suppress(ComputationalBackendOnDemandNotReadyError): - async with _cluster_dask_client(user_id, pipeline_params, self) as client: + async with _cluster_dask_client( + user_id, + self, + use_on_demand_clusters=comp_run.use_on_demand_clusters, + cluster_id=( + comp_run.cluster_id + if comp_run.cluster_id is not None + else DEFAULT_CLUSTER_ID + ), + run_metadata=comp_run.metadata, + ) as client: await asyncio.gather( *[ client.abort_computation_task(t.job_id) @@ -209,10 +245,20 @@ async def _process_completed_tasks( user_id: UserID, tasks: list[CompTaskAtDB], iteration: Iteration, - pipeline_params: ScheduledPipelineParams, + comp_run: CompRunsAtDB, ) -> None: try: - async with _cluster_dask_client(user_id, pipeline_params, self) as client: + async with _cluster_dask_client( + user_id, + self, + use_on_demand_clusters=comp_run.use_on_demand_clusters, + cluster_id=( + comp_run.cluster_id + if comp_run.cluster_id is not None + else DEFAULT_CLUSTER_ID + ), + run_metadata=comp_run.metadata, + ) as client: tasks_results = await asyncio.gather( *[client.get_task_result(t.job_id or "undefined") for t in tasks], return_exceptions=True, @@ -220,13 +266,23 @@ async def _process_completed_tasks( await asyncio.gather( *[ self._process_task_result( - task, result, pipeline_params.run_metadata, iteration + task, result, comp_run.metadata, iteration ) for task, result in zip(tasks, tasks_results, strict=True) ] ) finally: - async with _cluster_dask_client(user_id, pipeline_params, self) as client: + async with _cluster_dask_client( + user_id, + self, + use_on_demand_clusters=comp_run.use_on_demand_clusters, + cluster_id=( + comp_run.cluster_id + if comp_run.cluster_id is not None + else DEFAULT_CLUSTER_ID + ), + run_metadata=comp_run.metadata, + ) as client: await asyncio.gather( *[client.release_task_result(t.job_id) for t in tasks if t.job_id] ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py index 4f7812816cc..39b432b9492 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py @@ -1,19 +1,15 @@ import logging from fastapi import FastAPI -from models_library.clusters import DEFAULT_CLUSTER_ID from servicelib.logging_utils import log_context from settings_library.redis import RedisDatabase from ...core.errors import ConfigurationError from ...core.settings import AppSettings -from ...models.comp_runs import CompRunsAtDB -from ...utils.comp_scheduler import SCHEDULED_STATES from ..dask_clients_pool import DaskClientsPool -from ..db.repositories.comp_runs import CompRunsRepository from ..rabbitmq import get_rabbitmq_client, get_rabbitmq_rpc_client from ..redis import get_redis_client_manager -from ._base_scheduler import BaseCompScheduler, ScheduledPipelineParams +from ._base_scheduler import BaseCompScheduler from ._dask_scheduler import DaskScheduler _logger = logging.getLogger(__name__) @@ -24,39 +20,19 @@ async def create_from_db(app: FastAPI) -> BaseCompScheduler: msg = "Database connection is missing. Please check application configuration." raise ConfigurationError(msg) db_engine = app.state.engine - runs_repository = CompRunsRepository.instance(db_engine) - - # get currently scheduled runs - runs: list[CompRunsAtDB] = await runs_repository.list( - filter_by_state=SCHEDULED_STATES - ) - - _logger.debug( - "Following scheduled comp_runs found still to be scheduled: %s", - runs if runs else "NONE", - ) with log_context( _logger, logging.INFO, msg="Creating Dask-based computational scheduler" ): app_settings: AppSettings = app.state.settings - return DaskScheduler( + scheduler = DaskScheduler( settings=app_settings.DIRECTOR_V2_COMPUTATIONAL_BACKEND, dask_clients_pool=DaskClientsPool.instance(app), rabbitmq_client=get_rabbitmq_client(app), rabbitmq_rpc_client=get_rabbitmq_rpc_client(app), redis_client=get_redis_client_manager(app).client(RedisDatabase.LOCKS), db_engine=db_engine, - scheduled_pipelines={ - (r.user_id, r.project_uuid, r.iteration): ScheduledPipelineParams( - cluster_id=( - r.cluster_id if r.cluster_id is not None else DEFAULT_CLUSTER_ID - ), - run_metadata=r.metadata, - mark_for_cancellation=r.cancelled, - use_on_demand_clusters=r.use_on_demand_clusters, - ) - for r in runs - }, service_runtime_heartbeat_interval=app_settings.SERVICE_TRACKING_HEARTBEAT, ) + await scheduler.restore_scheduling_from_db() + return scheduler diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py index 1df1ae09d39..927476e851d 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py @@ -71,9 +71,6 @@ BaseCompScheduler, get_scheduler, ) -from simcore_service_director_v2.modules.comp_scheduler._base_scheduler import ( - ScheduledPipelineParams, -) from simcore_service_director_v2.modules.comp_scheduler._dask_scheduler import ( DaskScheduler, ) @@ -165,14 +162,14 @@ async def schedule_all_pipelines(scheduler: BaseCompScheduler) -> None: # misconfigured pipelines that would be removed from the scheduler # NOTE: we simulate multiple dv-2 replicas by running several times # the same pipeline scheduling - local_pipelines = deepcopy(scheduler.scheduled_pipelines) + local_pipelines = deepcopy(scheduler._scheduled_pipelines) # noqa: SLF001 results = await asyncio.gather( *( scheduler._schedule_pipeline( # noqa: SLF001 user_id=user_id, project_id=project_id, iteration=iteration, - pipeline_params=params, + wake_up_callback=params.scheduler_waker.set, ) for _ in range(3) for ( @@ -256,15 +253,16 @@ def mocked_clean_task_output_fct(mocker: MockerFixture) -> mock.MagicMock: @pytest.fixture def with_disabled_auto_scheduling(mocker: MockerFixture) -> mock.MagicMock: - """disables the scheduler task, note that it needs to be triggered manually then""" + """disables the scheduler task, note that it needs to be triggered manu>ally then""" def _fake_starter( self: BaseCompScheduler, - pipeline_params: ScheduledPipelineParams, *args, **kwargs, - ) -> None: - pipeline_params.scheduler_task = mocker.MagicMock() + ): + scheduler_task = mocker.MagicMock() + scheduler_task_wake_up_event = mocker.MagicMock() + return scheduler_task, scheduler_task_wake_up_event return mocker.patch( "simcore_service_director_v2.modules.comp_scheduler._base_scheduler.BaseCompScheduler._start_scheduling", @@ -358,7 +356,7 @@ async def test_empty_pipeline_is_not_scheduled( run_metadata=run_metadata, use_on_demand_clusters=False, ) - assert len(scheduler.scheduled_pipelines) == 0 + assert len(scheduler._scheduled_pipelines) == 0 # noqa: SLF001 # check the database is empty async with aiopg_engine.acquire() as conn: result = await conn.scalar( @@ -397,12 +395,15 @@ async def test_misconfigured_pipeline_is_not_scheduled( run_metadata=run_metadata, use_on_demand_clusters=False, ) - assert len(scheduler.scheduled_pipelines) == 1 - for (u_id, p_id, it), params in scheduler.scheduled_pipelines.items(): + assert len(scheduler._scheduled_pipelines) == 1 # noqa: SLF001 + for ( + u_id, + p_id, + it, + ) in scheduler._scheduled_pipelines: # noqa: SLF001 assert u_id == user["id"] assert p_id == sleepers_project.uuid assert it > 0 - assert params.mark_for_cancellation is None # check the database was properly updated async with aiopg_engine.acquire() as conn: result = await conn.execute( @@ -416,7 +417,7 @@ async def test_misconfigured_pipeline_is_not_scheduled( # let the scheduler kick in await schedule_all_pipelines(scheduler) # check the scheduled pipelines is again empty since it's misconfigured - assert len(scheduler.scheduled_pipelines) == 0 + assert len(scheduler._scheduled_pipelines) == 0 # noqa: SLF001 # check the database entry is correctly updated async with aiopg_engine.acquire() as conn: result = await conn.execute( @@ -445,13 +446,17 @@ async def _assert_start_pipeline( run_metadata=run_metadata, use_on_demand_clusters=False, ) - assert len(scheduler.scheduled_pipelines) == 1, "the pipeline is not scheduled!" - for (u_id, p_id, it), params in scheduler.scheduled_pipelines.items(): + assert ( + len(scheduler._scheduled_pipelines) == 1 # noqa: SLF001 + ), "the pipeline is not scheduled!" + for ( + u_id, + p_id, + it, + ) in scheduler._scheduled_pipelines: # noqa: SLF001 assert u_id == published_project.project.prj_owner assert p_id == published_project.project.uuid assert it > 0 - assert params.mark_for_cancellation is None - assert params.run_metadata == run_metadata # check the database is correctly updated, the run is published await _assert_comp_run_db(aiopg_engine, published_project, RunningState.PUBLISHED) @@ -1019,7 +1024,7 @@ async def _return_3rd_task_success(job_ids: list[str]) -> list[DaskClientTaskSta assert isinstance(messages[1], RabbitResourceTrackingStoppedMessage) # the scheduled pipeline shall be removed - assert scheduler.scheduled_pipelines == {} + assert scheduler._scheduled_pipelines == {} # noqa: SLF001 async def test_task_progress_triggers( @@ -1160,7 +1165,6 @@ class RebootState: expected_run_state: RunningState -@pytest.mark.flaky(max_runs=3) @pytest.mark.parametrize( "reboot_state", [ From c83d60c569fd869c4cc86b4fc4f13db4dc5045d0 Mon Sep 17 00:00:00 2001 From: Matus Drobuliak <60785969+matusdrobuliak66@users.noreply.github.com> Date: Mon, 18 Nov 2024 11:59:16 +0100 Subject: [PATCH 7/7] =?UTF-8?q?=F0=9F=8E=A8=20Improving=20E2E=20tests:=20`?= =?UTF-8?q?expected=5Fservice=5Frunning`=20(#6739)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/pytest_simcore/helpers/playwright.py | 39 ++++++++++++++- .../helpers/playwright_sim4life.py | 5 +- tests/e2e-playwright/requirements/_test.in | 1 + tests/e2e-playwright/requirements/_test.txt | 49 ++++++++++++++----- .../tests/sim4life/test_sim4life.py | 3 ++ .../e2e-playwright/tests/tip/test_ti_plan.py | 5 ++ 6 files changed, 85 insertions(+), 17 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/playwright.py b/packages/pytest-simcore/src/pytest_simcore/helpers/playwright.py index 38539f0d7fb..f063b6efd61 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/playwright.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/playwright.py @@ -5,12 +5,15 @@ from collections import defaultdict from collections.abc import Generator, Iterator from dataclasses import dataclass, field +from datetime import UTC, datetime, timedelta from enum import Enum, unique from typing import Any, Final +import httpx from playwright.sync_api import FrameLocator, Page, Request from playwright.sync_api import TimeoutError as PlaywrightTimeoutError from playwright.sync_api import WebSocket +from pydantic import AnyUrl from pytest_simcore.helpers.logging_tools import log_context SECOND: Final[int] = 1000 @@ -196,9 +199,11 @@ def __call__(self, message: str) -> None: class SocketIONodeProgressCompleteWaiter: node_id: str logger: logging.Logger + product_url: AnyUrl _current_progress: dict[NodeProgressType, float] = field( default_factory=defaultdict ) + _last_poll_timestamp: datetime = field(default_factory=lambda: datetime.now(tz=UTC)) def __call__(self, message: str) -> bool: # socket.io encodes messages like so @@ -234,6 +239,27 @@ def __call__(self, message: str) -> bool: round(progress, 1) == 1.0 for progress in self._current_progress.values() ) + + _current_timestamp = datetime.now(UTC) + if _current_timestamp - self._last_poll_timestamp > timedelta(seconds=5): + url = f"https://{self.node_id}.services.{self.get_partial_product_url()}" + response = httpx.get(url, timeout=10) + self.logger.info( + "Querying the service endpoint from the E2E test. Url: %s Response: %s", + url, + response, + ) + if response.status_code <= 401: + # NOTE: If the response status is less than 400, it means that the backend is ready (There are some services that respond with a 3XX) + # MD: for now I have included 401 - as this also means that backend is ready + if self.got_expected_node_progress_types(): + self.logger.warning( + "⚠️ Progress bar didn't receive 100 percent but service is already running: %s ⚠️", # https://github.com/ITISFoundation/osparc-simcore/issues/6449 + self.get_current_progress(), + ) + return True + self._last_poll_timestamp = datetime.now(UTC) + return False def got_expected_node_progress_types(self): @@ -245,6 +271,9 @@ def got_expected_node_progress_types(self): def get_current_progress(self): return self._current_progress.values() + def get_partial_product_url(self): + return f"{self.product_url}".split("//")[1] + def wait_for_pipeline_state( current_state: RunningState, @@ -332,9 +361,12 @@ def expected_service_running( websocket: WebSocket, timeout: int, press_start_button: bool, + product_url: AnyUrl, ) -> Generator[ServiceRunning, None, None]: with log_context(logging.INFO, msg="Waiting for node to run") as ctx: - waiter = SocketIONodeProgressCompleteWaiter(node_id=node_id, logger=ctx.logger) + waiter = SocketIONodeProgressCompleteWaiter( + node_id=node_id, logger=ctx.logger, product_url=product_url + ) service_running = ServiceRunning(iframe_locator=None) try: @@ -366,12 +398,15 @@ def wait_for_service_running( websocket: WebSocket, timeout: int, press_start_button: bool, + product_url: AnyUrl, ) -> FrameLocator: """NOTE: if the service was already started this will not work as some of the required websocket events will not be emitted again In which case this will need further adjutment""" with log_context(logging.INFO, msg="Waiting for node to run") as ctx: - waiter = SocketIONodeProgressCompleteWaiter(node_id=node_id, logger=ctx.logger) + waiter = SocketIONodeProgressCompleteWaiter( + node_id=node_id, logger=ctx.logger, product_url=product_url + ) with websocket.expect_event("framereceived", waiter, timeout=timeout): if press_start_button: _trigger_service_start(page, node_id) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/playwright_sim4life.py b/packages/pytest-simcore/src/pytest_simcore/helpers/playwright_sim4life.py index d2349d6fa7c..c59718f4aff 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/playwright_sim4life.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/playwright_sim4life.py @@ -6,8 +6,7 @@ import arrow from playwright.sync_api import FrameLocator, Page, WebSocket, expect -from pydantic import TypeAdapter # pylint: disable=no-name-in-module -from pydantic import ByteSize +from pydantic import AnyUrl, ByteSize, TypeAdapter # pylint: disable=no-name-in-module from .logging_tools import log_context from .playwright import ( @@ -105,6 +104,7 @@ def wait_for_launched_s4l( *, autoscaled: bool, copy_workspace: bool, + product_url: AnyUrl, ) -> WaitForS4LDict: with log_context(logging.INFO, "launch S4L") as ctx: predicate = S4LWaitForWebsocket(logger=ctx.logger) @@ -130,6 +130,7 @@ def wait_for_launched_s4l( ) + (_S4L_COPY_WORKSPACE_TIME if copy_workspace else 0), press_start_button=False, + product_url=product_url, ) s4l_websocket = ws_info.value ctx.logger.info("acquired S4L websocket!") diff --git a/tests/e2e-playwright/requirements/_test.in b/tests/e2e-playwright/requirements/_test.in index a50905c9086..b3fd9442f78 100644 --- a/tests/e2e-playwright/requirements/_test.in +++ b/tests/e2e-playwright/requirements/_test.in @@ -13,3 +13,4 @@ pytest-runner pytest-sugar pyyaml tenacity +httpx diff --git a/tests/e2e-playwright/requirements/_test.txt b/tests/e2e-playwright/requirements/_test.txt index 2934b76a3a8..011cb6fbd7c 100644 --- a/tests/e2e-playwright/requirements/_test.txt +++ b/tests/e2e-playwright/requirements/_test.txt @@ -1,24 +1,43 @@ +# +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: +# +# pip-compile _test.in +# annotated-types==0.7.0 # via pydantic +anyio==4.6.2.post1 + # via httpx arrow==1.3.0 - # via -r requirements/_test.in + # via -r _test.in certifi==2024.8.30 - # via requests + # via + # httpcore + # httpx + # requests charset-normalizer==3.3.2 # via requests dnspython==2.6.1 # via email-validator docker==7.1.0 - # via -r requirements/_test.in + # via -r _test.in email-validator==2.2.0 # via pydantic faker==29.0.0 - # via -r requirements/_test.in + # via -r _test.in greenlet==3.0.3 # via playwright +h11==0.14.0 + # via httpcore +httpcore==1.0.7 + # via httpx +httpx==0.27.2 + # via -r _test.in idna==3.10 # via + # anyio # email-validator + # httpx # requests iniconfig==2.0.0 # via pytest @@ -34,8 +53,8 @@ playwright==1.47.0 # via pytest-playwright pluggy==1.5.0 # via pytest -pydantic==2.9.2 - # via -r requirements/_test.in +pydantic[email]==2.9.2 + # via -r _test.in pydantic-core==2.23.4 # via pydantic pyee==12.0.0 @@ -51,17 +70,17 @@ pytest==8.3.3 pytest-base-url==2.1.0 # via pytest-playwright pytest-html==4.1.1 - # via -r requirements/_test.in + # via -r _test.in pytest-instafail==0.5.0 - # via -r requirements/_test.in + # via -r _test.in pytest-metadata==3.1.1 # via pytest-html pytest-playwright==0.5.2 - # via -r requirements/_test.in + # via -r _test.in pytest-runner==6.0.1 - # via -r requirements/_test.in + # via -r _test.in pytest-sugar==1.0.0 - # via -r requirements/_test.in + # via -r _test.in python-dateutil==2.9.0.post0 # via # arrow @@ -69,15 +88,19 @@ python-dateutil==2.9.0.post0 python-slugify==8.0.4 # via pytest-playwright pyyaml==6.0.2 - # via -r requirements/_test.in + # via -r _test.in requests==2.32.3 # via # docker # pytest-base-url six==1.16.0 # via python-dateutil +sniffio==1.3.1 + # via + # anyio + # httpx tenacity==9.0.0 - # via -r requirements/_test.in + # via -r _test.in termcolor==2.4.0 # via pytest-sugar text-unidecode==1.3 diff --git a/tests/e2e-playwright/tests/sim4life/test_sim4life.py b/tests/e2e-playwright/tests/sim4life/test_sim4life.py index 96c361bb546..924e6efa535 100644 --- a/tests/e2e-playwright/tests/sim4life/test_sim4life.py +++ b/tests/e2e-playwright/tests/sim4life/test_sim4life.py @@ -11,6 +11,7 @@ from typing import Any from playwright.sync_api import Page, WebSocket +from pydantic import AnyUrl from pytest_simcore.helpers.playwright import ( ServiceType, web_socket_default_log_handler, @@ -33,6 +34,7 @@ def test_sim4life( use_plus_button: bool, is_autoscaled: bool, check_videostreaming: bool, + product_url: AnyUrl, ): if use_plus_button: project_data = create_project_from_new_button(service_key) @@ -54,6 +56,7 @@ def test_sim4life( log_in_and_out, autoscaled=is_autoscaled, copy_workspace=False, + product_url=product_url, ) s4l_websocket = resp["websocket"] with web_socket_default_log_handler(s4l_websocket): diff --git a/tests/e2e-playwright/tests/tip/test_ti_plan.py b/tests/e2e-playwright/tests/tip/test_ti_plan.py index aa878eb9274..56f028d197d 100644 --- a/tests/e2e-playwright/tests/tip/test_ti_plan.py +++ b/tests/e2e-playwright/tests/tip/test_ti_plan.py @@ -14,6 +14,7 @@ from typing import Any, Final from playwright.sync_api import Page, WebSocket +from pydantic import AnyUrl from pytest_simcore.helpers.logging_tools import log_context from pytest_simcore.helpers.playwright import ( MINUTE, @@ -92,6 +93,7 @@ def test_classic_ti_plan( # noqa: PLR0915 is_autoscaled: bool, is_product_lite: bool, create_tip_plan_from_dashboard: Callable[[str], dict[str, Any]], + product_url: AnyUrl, ): with log_context(logging.INFO, "Checking 'Access TIP' teaser"): # click to open and expand @@ -141,6 +143,7 @@ def test_classic_ti_plan( # noqa: PLR0915 else _ELECTRODE_SELECTOR_MAX_STARTUP_TIME ), press_start_button=False, + product_url=product_url, ) # NOTE: Sometimes this iframe flicks and shows a white page. This wait will avoid it page.wait_for_timeout(_ELECTRODE_SELECTOR_FLICKERING_WAIT_TIME) @@ -200,6 +203,7 @@ def test_classic_ti_plan( # noqa: PLR0915 else _JLAB_MAX_STARTUP_MAX_TIME ), press_start_button=False, + product_url=product_url, ) as service_running: app_mode_trigger_next_app(page) ti_iframe = service_running.iframe_locator @@ -284,6 +288,7 @@ def test_classic_ti_plan( # noqa: PLR0915 else _POST_PRO_MAX_STARTUP_TIME ), press_start_button=False, + product_url=product_url, ) as service_running: app_mode_trigger_next_app(page) s4l_postpro_iframe = service_running.iframe_locator