diff --git a/.ci/jenkins/Jenkinsfile.daily-dev-publish b/.ci/jenkins/Jenkinsfile.daily-dev-publish
index 5571b5267f0..decfe021896 100644
--- a/.ci/jenkins/Jenkinsfile.daily-dev-publish
+++ b/.ci/jenkins/Jenkinsfile.daily-dev-publish
@@ -230,6 +230,8 @@ pipeline {
steps {
dir('kie-tools') {
sh """#!/bin/bash -el
+ git config user.email asf-ci-kie@jenkins.kie.apache.org
+ git config user.name asf-ci-kie
export WEBPACK__minimize=true
export WEBPACK__tsLoaderTranspileOnly=false
export CHROME_EXTENSION__routerTargetOrigin=https://apache.github.io
diff --git a/.rat-excludes b/.rat-excludes
index b652dffe0dc..00dae3453a6 100644
--- a/.rat-excludes
+++ b/.rat-excludes
@@ -628,6 +628,32 @@ goapi.go
greetingAPI.yaml
# packages/kn-plugin-workflow/pkg/specs/testdata/hello.sw.yaml
hello.sw.yaml
+# packages/kn-plugin-workflow/pkg/specs/testdata/refs/emptyworkflow.sw.yaml
+emptyworkflow.sw.yaml
+# packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi-subflow34.expected.yaml
+openapi-subflow34.expected.yaml
+# packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi.expected.yaml
+openapi.expected.yaml
+# packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi.yaml
+openapi.yaml
+# packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi1.expected.yaml
+openapi1.expected.yaml
+# packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi1.yaml
+openapi1.yaml
+# packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi2.expected.yaml
+openapi2.expected.yaml
+# packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi2.yaml
+openapi2.yaml
+# packages/kn-plugin-workflow/pkg/specs/testdata/refs/subflow1.sw.yaml
+subflow1.sw.yaml
+# packages/kn-plugin-workflow/pkg/specs/testdata/refs/subflow2.sw.yaml
+subflow2.sw.yaml
+# packages/kn-plugin-workflow/pkg/specs/testdata/refs/subflow3.sw.yaml
+subflow3.sw.yaml
+# packages/kn-plugin-workflow/pkg/specs/testdata/refs/subflow4.sw.yaml
+subflow4.sw.yaml
+# packages/kn-plugin-workflow/pkg/specs/testdata/refs/workflow1.sw.yaml
+workflow1.sw.yaml
# packages/kn-plugin-workflow/pkg/specs/testdata/subflow-custom.sw.yaml
subflow-custom.sw.yaml
# packages/kn-plugin-workflow/pkg/specs/testdata/subflow-mySpecsDir.sw.yaml
diff --git a/examples/sonataflow-greeting/src/main/resources/application.properties b/examples/sonataflow-greeting/src/main/resources/application.properties
index ce9b26ac3ce..f0030b12430 100644
--- a/examples/sonataflow-greeting/src/main/resources/application.properties
+++ b/examples/sonataflow-greeting/src/main/resources/application.properties
@@ -28,5 +28,5 @@ quarkus.native.native-image-xmx=8g
%container.quarkus.container-image.registry=dev.local
%container.quarkus.container-image.tag=1.0-SNAPSHOT
%container.quarkus.jib.jvm-entrypoint=/home/kogito/kogito-app-launch.sh
-%container.quarkus.jib.base-jvm-image=registry.access.redhat.com/ubi9/openjdk-17:1.20
+%container.quarkus.jib.base-jvm-image=registry.access.redhat.com/ubi9/openjdk-17:1.21
%container.quarkus.jib.working-directory=/home/kogito/bin
diff --git a/packages/cors-proxy-image/Containerfile b/packages/cors-proxy-image/Containerfile
index f7ca5372f04..a17e9775111 100644
--- a/packages/cors-proxy-image/Containerfile
+++ b/packages/cors-proxy-image/Containerfile
@@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
-FROM --platform=linux/amd64 registry.access.redhat.com/ubi9/ubi-minimal:9.4
+FROM --platform=linux/amd64 registry.access.redhat.com/ubi9/ubi-minimal:9.5
ARG CORS_PROXY_DEFAULT_PORT=8080
ARG CORS_PROXY_DEFAULT_ORIGIN=*
diff --git a/packages/dashbuilder-viewer-image/Containerfile b/packages/dashbuilder-viewer-image/Containerfile
index fcc4531b36b..990aabc0f0e 100644
--- a/packages/dashbuilder-viewer-image/Containerfile
+++ b/packages/dashbuilder-viewer-image/Containerfile
@@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
-FROM --platform=linux/amd64 registry.access.redhat.com/ubi9/ubi-minimal:9.4
+FROM --platform=linux/amd64 registry.access.redhat.com/ubi9/ubi-minimal:9.5
RUN microdnf --disableplugin=subscription-manager -y install httpd \
&& microdnf --disableplugin=subscription-manager clean all \
diff --git a/packages/dev-deployment-base-image/README.md b/packages/dev-deployment-base-image/README.md
index 1e1d0b5ea97..bea7e8a78f8 100644
--- a/packages/dev-deployment-base-image/README.md
+++ b/packages/dev-deployment-base-image/README.md
@@ -21,9 +21,9 @@ Docker image with Java and Maven, as well as the dev-deployment-upload-service b
## Build arguments
-- `BUILDER_IMAGE_ARG`: The base image used for building this image (defaults to `registry.access.redhat.com/ubi9/openjdk-17:1.20`).
+- `BUILDER_IMAGE_ARG`: The base image used for building this image (defaults to `registry.access.redhat.com/ubi9/openjdk-17:1.21`).
- Tested with:
- - registry.access.redhat.com/ubi9/openjdk-17:1.20
+ - registry.access.redhat.com/ubi9/openjdk-17:1.21
- icr.io/appcafe/ibm-semeru-runtimes:open-17-jdk-ubi-minimal
## Environment variables
diff --git a/packages/dev-deployment-base-image/env/index.js b/packages/dev-deployment-base-image/env/index.js
index 6970faab87a..3fa07d5b047 100644
--- a/packages/dev-deployment-base-image/env/index.js
+++ b/packages/dev-deployment-base-image/env/index.js
@@ -24,7 +24,7 @@ const rootEnv = require("@kie-tools/root-env/env");
module.exports = composeEnv([rootEnv], {
vars: varsWithName({
DEV_DEPLOYMENT_BASE_IMAGE__builderImage: {
- default: "registry.access.redhat.com/ubi9/openjdk-17:1.20",
+ default: "registry.access.redhat.com/ubi9/openjdk-17:1.21",
description: "The image used in the FROM import.",
},
DEV_DEPLOYMENT_BASE_IMAGE__userId: {
diff --git a/packages/dev-deployment-dmn-form-webapp-image/Containerfile b/packages/dev-deployment-dmn-form-webapp-image/Containerfile
index 55e5c8e0366..3f775fc49c7 100644
--- a/packages/dev-deployment-dmn-form-webapp-image/Containerfile
+++ b/packages/dev-deployment-dmn-form-webapp-image/Containerfile
@@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
-FROM --platform=linux/amd64 registry.access.redhat.com/ubi9/ubi-minimal:9.4
+FROM --platform=linux/amd64 registry.access.redhat.com/ubi9/ubi-minimal:9.5
ARG DEV_DEPLOYMENT_DMN_FORM_WEBAPP_DEFAULT_PORT=8081
diff --git a/packages/dev-deployment-upload-service/dev/Containerfile.ddus-buildtime-install b/packages/dev-deployment-upload-service/dev/Containerfile.ddus-buildtime-install
index edc61a9a30f..03436d8db05 100644
--- a/packages/dev-deployment-upload-service/dev/Containerfile.ddus-buildtime-install
+++ b/packages/dev-deployment-upload-service/dev/Containerfile.ddus-buildtime-install
@@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
-FROM registry.access.redhat.com/ubi9/ubi-minimal:9.4
+FROM registry.access.redhat.com/ubi9/ubi-minimal:9.5
ARG DDUS_FILESERVER_IP=""
ARG DDUS_VERSION="0.0.0"
diff --git a/packages/dev-deployment-upload-service/dev/Containerfile.ddus-fileserver b/packages/dev-deployment-upload-service/dev/Containerfile.ddus-fileserver
index 317871ea51c..b7a1b09e7b5 100644
--- a/packages/dev-deployment-upload-service/dev/Containerfile.ddus-fileserver
+++ b/packages/dev-deployment-upload-service/dev/Containerfile.ddus-fileserver
@@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
-FROM registry.access.redhat.com/ubi9/ubi-minimal:9.4
+FROM registry.access.redhat.com/ubi9/ubi-minimal:9.5
ARG DDUS_VERSION="0.0.0"
diff --git a/packages/dev-deployment-upload-service/dev/Containerfile.ddus-runtime-install b/packages/dev-deployment-upload-service/dev/Containerfile.ddus-runtime-install
index 9a1257528da..38a7768ad57 100644
--- a/packages/dev-deployment-upload-service/dev/Containerfile.ddus-runtime-install
+++ b/packages/dev-deployment-upload-service/dev/Containerfile.ddus-runtime-install
@@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
-FROM registry.access.redhat.com/ubi9/ubi-minimal:9.4
+FROM registry.access.redhat.com/ubi9/ubi-minimal:9.5
ENV DDUS_FILESERVER_IP=""
ENV DDUS_VERSION="0.0.0"
diff --git a/packages/dmn-editor/src/dataTypes/DataTypeName.tsx b/packages/dmn-editor/src/dataTypes/DataTypeName.tsx
index b816e114925..06262f0227b 100644
--- a/packages/dmn-editor/src/dataTypes/DataTypeName.tsx
+++ b/packages/dmn-editor/src/dataTypes/DataTypeName.tsx
@@ -32,6 +32,7 @@ import { InlineFeelNameInput, OnInlineFeelNameRenamed } from "../feel/InlineFeel
import { useExternalModels } from "../includedModels/DmnEditorDependenciesContext";
import { State } from "../store/Store";
import { DmnBuiltInDataType } from "@kie-tools/boxed-expression-component/dist/api";
+import { isStruct } from "./DataTypeSpec";
export function DataTypeName({
isReadOnly,
@@ -137,7 +138,7 @@ export function DataTypeName({
/>
{!isEditingLabel && (
diff --git a/packages/jbpm-quarkus-devui/jbpm-quarkus-devui-deployment/src/main/java/org/jbpm/quarkus/devui/deployment/DevConsoleProcessor.java b/packages/jbpm-quarkus-devui/jbpm-quarkus-devui-deployment/src/main/java/org/jbpm/quarkus/devui/deployment/DevConsoleProcessor.java
index 03bc33317f2..00e6f44c261 100644
--- a/packages/jbpm-quarkus-devui/jbpm-quarkus-devui-deployment/src/main/java/org/jbpm/quarkus/devui/deployment/DevConsoleProcessor.java
+++ b/packages/jbpm-quarkus-devui/jbpm-quarkus-devui-deployment/src/main/java/org/jbpm/quarkus/devui/deployment/DevConsoleProcessor.java
@@ -135,21 +135,21 @@ public CardPageBuildItem pages(
.metadata("page", "Processes")
.title("Process Instances")
.icon("font-awesome-solid:diagram-project")
- .dynamicLabelJsonRPCMethodName("queryProcessInstancesCount"));
+ .streamingLabelJsonRPCMethodName("queryProcessInstancesCount"));
cardPageBuildItem.addPage(Page.webComponentPageBuilder()
.componentLink("qwc-jbpm-quarkus-devui.js")
.metadata("page", "Tasks")
.title("Tasks")
.icon("font-awesome-solid:bars-progress")
- .dynamicLabelJsonRPCMethodName("queryTasksCount"));
+ .streamingLabelJsonRPCMethodName("queryTasksCount"));
cardPageBuildItem.addPage(Page.webComponentPageBuilder()
.componentLink("qwc-jbpm-quarkus-devui.js")
.metadata("page", "Jobs")
.title("Jobs")
.icon("font-awesome-solid:clock")
- .dynamicLabelJsonRPCMethodName("queryJobsCount"));
+ .streamingLabelJsonRPCMethodName("queryJobsCount"));
cardPageBuildItem.addPage(Page.webComponentPageBuilder()
.componentLink("qwc-jbpm-quarkus-devui.js")
diff --git a/packages/jbpm-quarkus-devui/jbpm-quarkus-devui-runtime/pom.xml b/packages/jbpm-quarkus-devui/jbpm-quarkus-devui-runtime/pom.xml
index d56dbb280f9..dc2fe91058e 100644
--- a/packages/jbpm-quarkus-devui/jbpm-quarkus-devui-runtime/pom.xml
+++ b/packages/jbpm-quarkus-devui/jbpm-quarkus-devui-runtime/pom.xml
@@ -95,6 +95,11 @@
vertx-web-client
+
+ org.kie.kogito
+ kogito-api
+
+
org.junit.jupiter
junit-jupiter-engine
diff --git a/packages/jbpm-quarkus-devui/jbpm-quarkus-devui-runtime/src/main/java/org/jbpm/quarkus/devui/runtime/rpc/DataIndexCounter.java b/packages/jbpm-quarkus-devui/jbpm-quarkus-devui-runtime/src/main/java/org/jbpm/quarkus/devui/runtime/rpc/DataIndexCounter.java
new file mode 100644
index 00000000000..e378ada468c
--- /dev/null
+++ b/packages/jbpm-quarkus-devui/jbpm-quarkus-devui-runtime/src/main/java/org/jbpm/quarkus/devui/runtime/rpc/DataIndexCounter.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.jbpm.quarkus.devui.runtime.rpc;
+
+import io.smallrye.mutiny.Multi;
+import io.smallrye.mutiny.operators.multi.MultiCacheOp;
+import io.smallrye.mutiny.operators.multi.processors.BroadcastProcessor;
+import io.vertx.core.Vertx;
+import io.vertx.core.json.JsonObject;
+import io.vertx.ext.web.client.WebClient;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class DataIndexCounter {
+ private static final Logger LOGGER = LoggerFactory.getLogger(DataIndexCounter.class);
+
+ private final Vertx vertx;
+ private final MultiCacheOp multi;
+ private final WebClient dataIndexWebClient;
+ private final String path;
+
+ private final String query;
+ private final String field;
+
+ public DataIndexCounter(String query, String graphField, String path, Vertx vertx, WebClient dataIndexWebClient) {
+ if (dataIndexWebClient == null) {
+ throw new IllegalArgumentException("dataIndexWebClient is null");
+ }
+ this.query = query;
+ this.field = graphField;
+ this.path = path;
+
+ this.vertx = vertx;
+ this.dataIndexWebClient = dataIndexWebClient;
+
+ this.multi = new MultiCacheOp<>(BroadcastProcessor.create());
+
+ refreshCount();
+ }
+
+ public void refresh() {
+ vertx.setTimer(1000, id -> {
+ refreshCount();
+ });
+ }
+
+ public void stop() {
+ multi.onComplete();
+ }
+
+ private void refreshCount() {
+ LOGGER.debug("Refreshing data for query: {}", query);
+
+ this.dataIndexWebClient.post(path + "/graphql")
+ .putHeader("content-type", "application/json")
+ .sendJson(new JsonObject(query))
+ .map(response -> {
+ if (response.statusCode() == 200) {
+ JsonObject responseData = response.bodyAsJsonObject().getJsonObject("data");
+ return String.valueOf(responseData.getJsonArray(field).size());
+ }
+ return "0";
+ })
+ .onComplete(count -> this.multi.onNext(count.result()));
+ }
+
+ public Multi getMulti() {
+ return multi;
+ }
+}
\ No newline at end of file
diff --git a/packages/jbpm-quarkus-devui/jbpm-quarkus-devui-runtime/src/main/java/org/jbpm/quarkus/devui/runtime/rpc/JBPMDevUIEventPublisher.java b/packages/jbpm-quarkus-devui/jbpm-quarkus-devui-runtime/src/main/java/org/jbpm/quarkus/devui/runtime/rpc/JBPMDevUIEventPublisher.java
new file mode 100644
index 00000000000..399274f3540
--- /dev/null
+++ b/packages/jbpm-quarkus-devui/jbpm-quarkus-devui-runtime/src/main/java/org/jbpm/quarkus/devui/runtime/rpc/JBPMDevUIEventPublisher.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.jbpm.quarkus.devui.runtime.rpc;
+
+import jakarta.enterprise.context.ApplicationScoped;
+import jakarta.enterprise.inject.Default;
+
+import org.kie.kogito.event.DataEvent;
+import org.kie.kogito.event.EventPublisher;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import io.quarkus.arc.profile.IfBuildProfile;
+
+import java.util.Collection;
+import java.util.Objects;
+@ApplicationScoped
+@IfBuildProfile("dev")
+public class JBPMDevUIEventPublisher implements EventPublisher {
+
+ private Runnable onProcessEvent;
+ private Runnable onTaskEvent;
+ private Runnable onJobEvent;
+
+ @Override
+ public void publish(DataEvent> event) {
+ switch (event.getType()) {
+ case "ProcessInstanceStateDataEvent":
+ maybeRun(onProcessEvent);
+ break;
+ case "UserTaskInstanceStateDataEvent":
+ maybeRun(onTaskEvent);
+ break;
+ case "JobEvent":
+ maybeRun(onJobEvent);
+ break;
+ }
+ }
+
+ @Override
+ public void publish(Collection> events) {
+ events.forEach(this::publish);
+ }
+
+ private void maybeRun(Runnable runnable) {
+ if (Objects.nonNull(runnable)) {
+ runnable.run();
+ }
+ }
+
+ public void setOnProcessEventListener(Runnable onProcessEvent) {
+ this.onProcessEvent = onProcessEvent;
+ }
+
+ public void setOnTaskEventListener(Runnable onTaskEvent) {
+ this.onTaskEvent = onTaskEvent;
+ }
+
+ public void setOnJobEventListener(Runnable onJobEvent) {
+ this.onJobEvent = onJobEvent;
+ }
+}
\ No newline at end of file
diff --git a/packages/jbpm-quarkus-devui/jbpm-quarkus-devui-runtime/src/main/java/org/jbpm/quarkus/devui/runtime/rpc/JBPMDevuiJsonRPCService.java b/packages/jbpm-quarkus-devui/jbpm-quarkus-devui-runtime/src/main/java/org/jbpm/quarkus/devui/runtime/rpc/JBPMDevuiJsonRPCService.java
index 2e62a776691..287e5629549 100644
--- a/packages/jbpm-quarkus-devui/jbpm-quarkus-devui-runtime/src/main/java/org/jbpm/quarkus/devui/runtime/rpc/JBPMDevuiJsonRPCService.java
+++ b/packages/jbpm-quarkus-devui/jbpm-quarkus-devui-runtime/src/main/java/org/jbpm/quarkus/devui/runtime/rpc/JBPMDevuiJsonRPCService.java
@@ -24,11 +24,14 @@
import java.util.Optional;
import io.smallrye.mutiny.Uni;
+import io.quarkus.arc.profile.IfBuildProfile;
+import io.smallrye.mutiny.Multi;
import io.vertx.core.Vertx;
-import io.vertx.core.json.JsonObject;
import io.vertx.ext.web.client.WebClient;
import io.vertx.ext.web.client.WebClientOptions;
import jakarta.annotation.PostConstruct;
+import jakarta.annotation.PreDestroy;
+
import org.eclipse.microprofile.config.ConfigProvider;
import org.jbpm.quarkus.devui.runtime.forms.FormsStorage;
@@ -38,6 +41,7 @@
import org.slf4j.LoggerFactory;
@ApplicationScoped
+@IfBuildProfile("dev")
public class JBPMDevuiJsonRPCService {
private static final String DATA_INDEX_URL = "kogito.data-index.url";
@@ -54,11 +58,16 @@ public class JBPMDevuiJsonRPCService {
private WebClient dataIndexWebClient;
private final Vertx vertx;
+ private final JBPMDevUIEventPublisher eventPublisher;
private final FormsStorage formsStorage;
+ private DataIndexCounter processesCounter;
+ private DataIndexCounter tasksCounter;
+ private DataIndexCounter jobsCounter;
@Inject
- public JBPMDevuiJsonRPCService(Vertx vertx, FormsStorage formsStorage) {
+ public JBPMDevuiJsonRPCService(Vertx vertx, JBPMDevUIEventPublisher eventPublisher, FormsStorage formsStorage) {
this.vertx = vertx;
+ this.eventPublisher = eventPublisher;
this.formsStorage = formsStorage;
}
@@ -70,50 +79,50 @@ public void init() {
private void initDataIndexWebClient(String dataIndexURL) {
try {
- this.dataIndexWebClient = WebClient.create(vertx, buildWebClientOptions(dataIndexURL));
+ URL url = new URL(dataIndexURL);
+ this.dataIndexWebClient = WebClient.create(vertx, buildWebClientOptions(url));
+
+ String contextPath = url.getPath();
+ this.processesCounter = new DataIndexCounter(ALL_PROCESS_INSTANCES_IDS_QUERY, PROCESS_INSTANCES,
+ contextPath, vertx, dataIndexWebClient);
+ this.tasksCounter = new DataIndexCounter(ALL_TASKS_IDS_QUERY, USER_TASKS, contextPath, vertx, dataIndexWebClient);
+ this.jobsCounter = new DataIndexCounter(ALL_JOBS_IDS_QUERY, JOBS, contextPath, vertx, dataIndexWebClient);
+
+ this.eventPublisher.setOnProcessEventListener(processesCounter::refresh);
+ this.eventPublisher.setOnTaskEventListener(tasksCounter::refresh);
+ this.eventPublisher.setOnJobEventListener(jobsCounter::refresh);
} catch (Exception ex) {
LOGGER.warn("Cannot configure dataIndexWebClient with 'kogito.data-index.url'='{}':", dataIndexURL, ex);
}
}
- protected WebClientOptions buildWebClientOptions(String dataIndexURL) throws MalformedURLException {
- URL url = new URL(dataIndexURL);
+ protected WebClientOptions buildWebClientOptions(URL dataIndexURL) throws MalformedURLException {
return new WebClientOptions()
- .setDefaultHost(url.getHost())
- .setDefaultPort((url.getPort() != -1 ? url.getPort() : url.getDefaultPort()))
- .setSsl(url.getProtocol().compareToIgnoreCase("https") == 0);
- }
-
- public Uni queryProcessInstancesCount() {
- return doQuery(ALL_PROCESS_INSTANCES_IDS_QUERY, PROCESS_INSTANCES);
+ .setDefaultHost(dataIndexURL.getHost())
+ .setDefaultPort((dataIndexURL.getPort() != -1 ? dataIndexURL.getPort() : dataIndexURL.getDefaultPort()))
+ .setSsl(dataIndexURL.getProtocol().compareToIgnoreCase("https") == 0);
}
- public Uni queryTasksCount() {
- return doQuery(ALL_TASKS_IDS_QUERY, USER_TASKS);
+ public Multi queryProcessInstancesCount() {
+ return processesCounter.getMulti();
}
- public Uni queryJobsCount() {
- return doQuery(ALL_JOBS_IDS_QUERY, JOBS);
+ public Multi queryTasksCount() {
+ return tasksCounter.getMulti();
}
- private Uni doQuery(String query, String graphModelName) {
- if(dataIndexWebClient == null) {
- LOGGER.warn("Cannot perform '{}' query, dataIndexWebClient couldn't be set. Is DataIndex correctly? Please verify '{}' value", graphModelName, DATA_INDEX_URL);
- return Uni.createFrom().item("-");
- }
- return Uni.createFrom().completionStage(this.dataIndexWebClient.post("/graphql")
- .putHeader("content-type", "application/json")
- .sendJson(new JsonObject(query))
- .map(response -> {
- if(response.statusCode() == 200) {
- JsonObject responseData = response.bodyAsJsonObject().getJsonObject("data");
- return String.valueOf(responseData.getJsonArray(graphModelName).size());
- }
- return "-";
- }).toCompletionStage());
+ public Multi queryJobsCount() {
+ return jobsCounter.getMulti();
}
public Uni getFormsCount() {
return Uni.createFrom().item(String.valueOf(this.formsStorage.getFormsCount()));
}
-}
+
+ @PreDestroy
+ public void destroy() {
+ processesCounter.stop();
+ tasksCounter.stop();
+ jobsCounter.stop();
+ }
+}
\ No newline at end of file
diff --git a/packages/kie-editors-dev-vscode-extension/e2e-tests/extension-editors-bpmn.test.ts b/packages/kie-editors-dev-vscode-extension/e2e-tests/extension-editors-bpmn.test.ts
index c4204bbfc7a..d4757a342dc 100644
--- a/packages/kie-editors-dev-vscode-extension/e2e-tests/extension-editors-bpmn.test.ts
+++ b/packages/kie-editors-dev-vscode-extension/e2e-tests/extension-editors-bpmn.test.ts
@@ -269,12 +269,12 @@ describe("KIE Editors End to End Test Suite - BPMN Editor", () => {
await bpmnEditorTester.scrollElementIntoView(onExitActionSection);
const newOnEntryAction = "console.log('On Entry Action test log');";
- const newOnEntryLanguage = "javascript";
+ const newOnEntryLanguage = "mvel";
await propertiesPanel.changeWidgetedProperty("On Entry Action", newOnEntryAction, "textarea");
await propertiesPanel.changeWidgetedProperty("On Entry Action", newOnEntryLanguage, "select");
const newOnExitAction = "console.log('On Exit Action test log');";
- const newOnExitLanguage = "javascript";
+ const newOnExitLanguage = "mvel";
await propertiesPanel.changeWidgetedProperty("On Exit Action", newOnExitAction, "textarea");
await propertiesPanel.changeWidgetedProperty("On Exit Action", newOnExitLanguage, "select");
diff --git a/packages/kie-sandbox-extended-services-image/env/index.js b/packages/kie-sandbox-extended-services-image/env/index.js
index 01b4f7303cc..fa256c27be7 100644
--- a/packages/kie-sandbox-extended-services-image/env/index.js
+++ b/packages/kie-sandbox-extended-services-image/env/index.js
@@ -28,7 +28,7 @@ const {
module.exports = composeEnv([rootEnv], {
vars: varsWithName({
KIE_SANDBOX_EXTENDED_SERVICES__builderImage: {
- default: "registry.access.redhat.com/ubi9/openjdk-17:1.20",
+ default: "registry.access.redhat.com/ubi9/openjdk-17:1.21",
description: "The image used in the FROM import.",
},
KIE_SANDBOX_EXTENDED_SERVICES__imageRegistry: {
diff --git a/packages/kie-sandbox-webapp-image/Containerfile b/packages/kie-sandbox-webapp-image/Containerfile
index b581a0dcfdc..4470454d014 100644
--- a/packages/kie-sandbox-webapp-image/Containerfile
+++ b/packages/kie-sandbox-webapp-image/Containerfile
@@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
-FROM --platform=linux/amd64 registry.access.redhat.com/ubi9/ubi-minimal:9.4
+FROM --platform=linux/amd64 registry.access.redhat.com/ubi9/ubi-minimal:9.5
ARG KIE_SANDBOX_DEFAULT_PORT=8080
diff --git a/packages/kn-plugin-workflow/e2e-tests/helper_test.go b/packages/kn-plugin-workflow/e2e-tests/helper_test.go
index 392e959ee92..ad7f283e820 100644
--- a/packages/kn-plugin-workflow/e2e-tests/helper_test.go
+++ b/packages/kn-plugin-workflow/e2e-tests/helper_test.go
@@ -22,6 +22,7 @@
package e2e_tests
import (
+ "bufio"
"bytes"
"fmt"
"io"
@@ -32,6 +33,7 @@ import (
"syscall"
"testing"
+ "github.com/apache/incubator-kie-tools/packages/kn-plugin-workflow/pkg/command"
"github.com/apache/incubator-kie-tools/packages/kn-plugin-workflow/pkg/command/quarkus"
"github.com/spf13/cobra"
"github.com/stretchr/testify/require"
@@ -63,6 +65,11 @@ func ExecuteKnWorkflowWithCmd(cmd *exec.Cmd, args ...string) (string, error) {
return executeCommandWithOutput(cmd, args...)
}
+// ExecuteKnWorkflowWithCmdAndStopContainer executes the 'kn-workflow' CLI tool with the given arguments using the provided command and returns the containerID and possible error message.
+func ExecuteKnWorkflowWithCmdAndStopContainer(cmd *exec.Cmd, args ...string) (string, error) {
+ return executeCommandWithOutputAndStopContainer(cmd, args...)
+}
+
// ExecuteKnWorkflowQuarkusWithCmd executes the 'kn-workflow' CLI tool with 'quarkus' command with the given arguments using the provided command and returns the command's output and possible error message.
func ExecuteKnWorkflowQuarkusWithCmd(cmd *exec.Cmd, args ...string) (string, error) {
newArgs := append([]string{"quarkus"}, args...)
@@ -89,6 +96,70 @@ func executeCommandWithOutput(cmd *exec.Cmd, args ...string) (string, error) {
return stdout.String(), nil
}
+func executeCommandWithOutputAndStopContainer(cmd *exec.Cmd, args ...string) (string, error) {
+ cmd.Args = append([]string{cmd.Path}, args...)
+
+ var containerId string
+ var stderr bytes.Buffer
+
+ stdoutPipe, err := cmd.StdoutPipe()
+ if err != nil {
+ return "", fmt.Errorf("failed to create stdout pipe: %w", err)
+ }
+ defer stdoutPipe.Close()
+
+ stdinPipe, err := cmd.StdinPipe()
+ if err != nil {
+ return "", fmt.Errorf("failed to create stdin pipe: %w", err)
+ }
+ defer stdinPipe.Close()
+
+ cmd.Stderr = &stderr
+ errorCh := make(chan error, 1)
+
+ go func() {
+ defer close(errorCh)
+ scanner := bufio.NewScanner(stdoutPipe)
+ for scanner.Scan() {
+ line := scanner.Text()
+
+ if strings.HasPrefix(line, "Created container with ID ") {
+ id, ok := strings.CutPrefix(line, "Created container with ID ")
+ if !ok || id == "" {
+ errorCh <- fmt.Errorf("failed to parse container ID from output: %q", line)
+ return
+ }
+ containerId = id
+ }
+
+ if line == command.StopContainerMsg {
+ _, err := io.WriteString(stdinPipe, "any\n")
+ if err != nil {
+ errorCh <- fmt.Errorf("failed to write to stdin: %w", err)
+ return
+ }
+ }
+ }
+
+ if err := scanner.Err(); err != nil {
+ errorCh <- fmt.Errorf("error reading from stdout: %w", err)
+ return
+ }
+ }()
+
+ err = cmd.Run()
+ if err != nil {
+ return "", fmt.Errorf("command run error: %w (stderr: %s)", err, stderr.String())
+ }
+
+ readErr := <-errorCh
+ if readErr != nil {
+ return "", readErr
+ }
+
+ return containerId, nil
+}
+
// VerifyFileContent verifies that the content of a file matches the expected content.
func VerifyFileContent(t *testing.T, filePath string, expected string) {
actual, err := os.ReadFile(filePath)
diff --git a/packages/kn-plugin-workflow/e2e-tests/run_test.go b/packages/kn-plugin-workflow/e2e-tests/run_test.go
index f3f371b4b21..2cabd4d35e7 100644
--- a/packages/kn-plugin-workflow/e2e-tests/run_test.go
+++ b/packages/kn-plugin-workflow/e2e-tests/run_test.go
@@ -32,6 +32,7 @@ import (
"github.com/apache/incubator-kie-tools/packages/kn-plugin-workflow/pkg/command"
"github.com/apache/incubator-kie-tools/packages/kn-plugin-workflow/pkg/common"
+ "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@@ -80,6 +81,7 @@ func TestRunCommand(t *testing.T) {
func RunRunTest(t *testing.T, cfgTestInputPrepareCreate CfgTestInputCreate, test cfgTestInputRun) string {
var err error
+ var containerId string
// Create the project
RunCreateTest(t, cfgTestInputPrepareCreate)
@@ -99,7 +101,8 @@ func RunRunTest(t *testing.T, cfgTestInputPrepareCreate CfgTestInputCreate, test
// Run the `run` command
go func() {
defer wg.Done()
- _, err = ExecuteKnWorkflowWithCmd(cmd, transformRunCmdCfgToArgs(test.input)...)
+ containerId, err = ExecuteKnWorkflowWithCmdAndStopContainer(cmd, transformRunCmdCfgToArgs(test.input)...)
+ assert.NotNil(t, containerId, "Container ID is nil")
require.Truef(t, err == nil || IsSignalInterrupt(err), "Expected nil error or signal interrupt, got %v", err)
}()
@@ -120,5 +123,18 @@ func RunRunTest(t *testing.T, cfgTestInputPrepareCreate CfgTestInputCreate, test
wg.Wait()
+ stopped := make(chan bool)
+ t.Logf("Checking if container is stopped")
+ assert.NotNil(t, containerId, "Container ID is nil")
+ // Check if the container is stopped within a specified time limit.
+ go common.PollContainerStoppedCheck(containerId, pollInterval, stopped)
+ select {
+ case <-stopped:
+ fmt.Println("Project is stopped")
+ case <-time.After(timeout):
+ t.Fatalf("Test case timed out after %s. The project was not stopped within the specified time.", timeout)
+ cmd.Process.Signal(os.Interrupt)
+ }
+
return projectName
}
diff --git a/packages/kn-plugin-workflow/pkg/command/gen_manifest.go b/packages/kn-plugin-workflow/pkg/command/gen_manifest.go
index 53e996f059e..b7dc0b6a7c5 100644
--- a/packages/kn-plugin-workflow/pkg/command/gen_manifest.go
+++ b/packages/kn-plugin-workflow/pkg/command/gen_manifest.go
@@ -52,7 +52,7 @@ func NewGenManifest() *cobra.Command {
{{.Name}} gen-manifest --skip-namespace
# Persist the generated Operator manifests on a specific custom path
- {{.Name}} gen-manifest --custom-generated-manifest-dir=
+ {{.Name}} gen-manifest --custom-generated-manifests-dir=
# Specify a custom subflows files directory. (default: ./subflows)
{{.Name}} gen-manifest --subflows-dir=
diff --git a/packages/kn-plugin-workflow/pkg/command/run.go b/packages/kn-plugin-workflow/pkg/command/run.go
index 27ed3ae5e1c..7cb275c632c 100644
--- a/packages/kn-plugin-workflow/pkg/command/run.go
+++ b/packages/kn-plugin-workflow/pkg/command/run.go
@@ -20,6 +20,7 @@
package command
import (
+ "bufio"
"fmt"
"os"
"sync"
@@ -34,8 +35,12 @@ import (
type RunCmdConfig struct {
PortMapping string
OpenDevUI bool
+ StopContainerOnUserInput bool
}
+const StopContainerMsg = "Press any key to stop the container"
+
+
func NewRunCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "run",
@@ -56,9 +61,13 @@ func NewRunCommand() *cobra.Command {
# Disable automatic browser launch of SonataFlow Dev UI
{{.Name}} run --open-dev-ui=false
+
+ # Stop the container when the user presses any key
+ {{.Name}} run --stop-container-on-user-input=false
+
`,
SuggestFor: []string{"rnu", "start"}, //nolint:misspell
- PreRunE: common.BindEnv("port", "open-dev-ui"),
+ PreRunE: common.BindEnv("port", "open-dev-ui", "stop-container-on-user-input"),
}
cmd.RunE = func(cmd *cobra.Command, args []string) error {
@@ -67,6 +76,7 @@ func NewRunCommand() *cobra.Command {
cmd.Flags().StringP("port", "p", "8080", "Maps a different host port to the running container port.")
cmd.Flags().Bool("open-dev-ui", true, "Disable automatic browser launch of SonataFlow Dev UI")
+ cmd.Flags().Bool("stop-container-on-user-input", true, "Stop the container when the user presses any key")
cmd.SetHelpFunc(common.DefaultTemplatedHelp)
return cmd
@@ -92,8 +102,9 @@ func run() error {
func runDevCmdConfig() (cfg RunCmdConfig, err error) {
cfg = RunCmdConfig{
- PortMapping: viper.GetString("port"),
- OpenDevUI: viper.GetBool("open-dev-ui"),
+ PortMapping: viper.GetString("port"),
+ OpenDevUI: viper.GetBool("open-dev-ui"),
+ StopContainerOnUserInput: viper.GetBool("stop-container-on-user-input"),
}
return
}
@@ -137,6 +148,36 @@ func runSWFProjectDevMode(containerTool string, cfg RunCmdConfig) (err error) {
pollInterval := 5 * time.Second
common.ReadyCheck(readyCheckURL, pollInterval, cfg.PortMapping, cfg.OpenDevUI)
+ if cfg.StopContainerOnUserInput {
+ if err := stopContainer(containerTool); err != nil {
+ return err
+ }
+ }
+
wg.Wait()
return err
}
+
+func stopContainer(containerTool string) error {
+ fmt.Println(StopContainerMsg)
+
+ reader := bufio.NewReader(os.Stdin)
+
+ _, err := reader.ReadString('\n')
+ if err != nil {
+ return fmt.Errorf("error reading from stdin: %w", err)
+ }
+
+ fmt.Println("⏳ Stopping the container...")
+
+ containerID, err := common.GetContainerID(containerTool)
+ if err != nil {
+ return err
+ }
+ if err := common.StopContainer(containerTool, containerID); err != nil {
+ return err
+ }
+ return nil
+}
+
+
diff --git a/packages/kn-plugin-workflow/pkg/common/containers.go b/packages/kn-plugin-workflow/pkg/common/containers.go
index afb2482b69e..44d9e3ab883 100644
--- a/packages/kn-plugin-workflow/pkg/common/containers.go
+++ b/packages/kn-plugin-workflow/pkg/common/containers.go
@@ -389,3 +389,47 @@ func processOutputDuringContainerExecution(cli *client.Client, ctx context.Conte
return nil
}
+
+
+func PollContainerStoppedCheck(containerID string, interval time.Duration, ready chan<- bool) {
+ for {
+ running, err := IsContainerRunning(containerID)
+ if err != nil {
+ fmt.Printf("Error checking if container %s is running: %s", containerID, err)
+ ready <- false
+ return
+ }
+ if !running {
+ ready <- true
+ return
+ }
+ time.Sleep(interval)
+ }
+}
+
+func IsContainerRunning(containerID string) (bool, error) {
+ if errDocker := CheckDocker(); errDocker == nil {
+ cli, err := getDockerClient()
+ if err != nil {
+ return false, fmt.Errorf("unable to create docker client: %w", err)
+ }
+ containerJSON, err := cli.ContainerInspect(context.Background(), containerID)
+ if err != nil {
+ if client.IsErrNotFound(err) {
+ return false, nil
+ }
+ return false, fmt.Errorf("unable to inspect container %s with docker: %w", containerID, err)
+ }
+ return containerJSON.State.Running, nil
+
+ } else if errPodman := CheckPodman(); errPodman == nil {
+ cmd := exec.Command("podman", "inspect", containerID, "--format", "{{.State.Running}}")
+ output, err := cmd.Output()
+ if err != nil {
+ return false, fmt.Errorf("unable to inspect container %s with podman: %w", containerID, err)
+ }
+ return strings.TrimSpace(string(output)) == "true", nil
+ }
+
+ return false, fmt.Errorf("there is no docker or podman available")
+}
diff --git a/packages/kn-plugin-workflow/pkg/specs/openapi_minifier.go b/packages/kn-plugin-workflow/pkg/specs/openapi_minifier.go
index 4fe7ef0747c..0d3f1a6e0e2 100644
--- a/packages/kn-plugin-workflow/pkg/specs/openapi_minifier.go
+++ b/packages/kn-plugin-workflow/pkg/specs/openapi_minifier.go
@@ -27,6 +27,7 @@ import (
"os"
"path"
"path/filepath"
+ "reflect"
"strings"
"github.com/apache/incubator-kie-tools/packages/kn-plugin-workflow/pkg/common"
@@ -158,22 +159,9 @@ func (m *OpenApiMinifier) minifySpecsFile(specFileName string, operations sets.S
return "", fmt.Errorf("❌ ERROR: failed to read OpenAPI document: %w", err)
}
- doc, err := openapi3.NewLoader().LoadFromData(data)
+ doc, err := m.removeUnusedNodes(data, specFile, operations)
if err != nil {
- return "", fmt.Errorf("❌ ERROR: failed to load OpenAPI document: %w", err)
- }
- if doc.Paths == nil {
- return "", fmt.Errorf("OpenAPI document %s has no paths", specFileName)
- }
- for key, value := range doc.Paths.Map() {
- for method, operation := range value.Operations() {
- if !operations.Has(operation.OperationID) {
- value.SetOperation(method, nil)
- }
- }
- if isPathItemEmpty(value) {
- doc.Paths.Delete(key)
- }
+ return "", err
}
minifiedFile, err := m.writeMinifiedFileToDisk(specFile, doc)
@@ -194,6 +182,66 @@ func (m *OpenApiMinifier) minifySpecsFile(specFileName string, operations sets.S
return minifiedFile, nil
}
+func (m *OpenApiMinifier) removeUnusedNodes(data []byte, specFileName string, operations sets.Set[string]) (*openapi3.T, error) {
+ doc, err := openapi3.NewLoader().LoadFromData(data)
+
+ collector, err := newCollector(specFileName)
+ if err != nil {
+ return nil, err
+ }
+
+ keep, err := collector.collect(operations)
+ if err != nil {
+ return nil, err
+ }
+
+ if err != nil {
+ return nil, fmt.Errorf("❌ ERROR: failed to load OpenAPI document: %w", err)
+ }
+ if doc.Paths == nil {
+ return nil, fmt.Errorf("OpenAPI document %s has no paths", specFileName)
+ }
+ for key, value := range doc.Paths.Map() {
+ for method, operation := range value.Operations() {
+ if !operations.Has(operation.OperationID) {
+ value.SetOperation(method, nil)
+ }
+ }
+ if isPathItemEmpty(value) {
+ doc.Paths.Delete(key)
+ }
+ }
+
+ if doc.Components != nil {
+ // note we have to skip securitySchemes, because it aren't referenced by operation directly via $ref
+ components := map[string]interface{}{
+ "schemas": doc.Components.Schemas,
+ "headers": doc.Components.Headers,
+ "parameters": doc.Components.Parameters,
+ "responses": doc.Components.Responses,
+ "requestBodies": doc.Components.RequestBodies,
+ "examples": doc.Components.Examples,
+ "links": doc.Components.Links,
+ "callbacks": doc.Components.Callbacks,
+ }
+
+ for key, componentMap := range components {
+ if componentMap == nil {
+ continue
+ }
+
+ componentValue := reflect.ValueOf(componentMap)
+ for _, name := range componentValue.MapKeys() {
+ nameStr := name.String()
+ if !keep["components"][key].Has(nameStr) {
+ componentValue.SetMapIndex(name, reflect.Value{})
+ }
+ }
+ }
+ }
+ return doc, nil
+}
+
func (m *OpenApiMinifier) findWorkflowFile() error {
file, err := common.FindSonataFlowFile(workflowExtensionsType)
if err != nil {
diff --git a/packages/kn-plugin-workflow/pkg/specs/openapi_minifier_test.go b/packages/kn-plugin-workflow/pkg/specs/openapi_minifier_test.go
index b3be4bfc93e..05ba5d7335b 100644
--- a/packages/kn-plugin-workflow/pkg/specs/openapi_minifier_test.go
+++ b/packages/kn-plugin-workflow/pkg/specs/openapi_minifier_test.go
@@ -24,6 +24,7 @@ import (
"io"
"os"
"path"
+ "reflect"
"strings"
"testing"
@@ -36,6 +37,7 @@ import (
type spec struct {
file string
+ expected string
initial int
minified int
}
@@ -51,30 +53,30 @@ type minifyTest struct {
func TestOpenAPIMinify(t *testing.T) {
tests := []minifyTest{
{
- workflowFile: "testdata/workflow.sw.yaml", // 4 functions, 2 of them are ref to the same openapi spec
- openapiSpecFiles: []spec{{"testdata/flink-openapi.yaml", 5, 3}}, // 5 operations, 3 must left
+ workflowFile: "testdata/workflow.sw.yaml", // 4 functions, 2 of them are ref to the same openapi spec
+ openapiSpecFiles: []spec{{file: "testdata/flink-openapi.yaml", initial: 5, minified: 3}}, // 5 operations, 3 must left
specsDir: "specs",
subflowsDir: "subflows",
},
{
- workflowFile: "testdata/workflow.sw.json", // 4 functions, 2 of them are ref to the same openapi spec
- openapiSpecFiles: []spec{{"testdata/flink-openapi.yaml", 5, 3}}, // 5 operations, 3 must left
+ workflowFile: "testdata/workflow.sw.json", // 4 functions, 2 of them are ref to the same openapi spec
+ openapiSpecFiles: []spec{{file: "testdata/flink-openapi.yaml", initial: 5, minified: 3}}, // 5 operations, 3 must left
specsDir: "specs",
subflowsDir: "subflows",
},
{
- workflowFile: "testdata/workflow-json-openapi.sw.json", // 4 functions, 2 of them are ref to the same openapi spec
- openapiSpecFiles: []spec{{"testdata/flink-openapi-json.json", 5, 3}}, // 5 operations, 3 must left
+ workflowFile: "testdata/workflow-json-openapi.sw.json", // 4 functions, 2 of them are ref to the same openapi spec
+ openapiSpecFiles: []spec{{file: "testdata/flink-openapi-json.json", initial: 5, minified: 3}}, // 5 operations, 3 must left
specsDir: "specs",
subflowsDir: "subflows",
},
{
workflowFile: "testdata/workflow2.sw.yaml", // 4 functions, 1 per openapi spec file
openapiSpecFiles: []spec{
- {"testdata/flink1-openapi.yaml", 3, 1},
- {"testdata/flink2-openapi.yaml", 3, 1},
- {"testdata/flink3-openapi.yaml", 3, 1},
- {"testdata/flink4-openapi.yaml", 3, 1}},
+ {file: "testdata/flink1-openapi.yaml", initial: 3, minified: 1},
+ {file: "testdata/flink2-openapi.yaml", initial: 3, minified: 1},
+ {file: "testdata/flink3-openapi.yaml", initial: 3, minified: 1},
+ {file: "testdata/flink4-openapi.yaml", initial: 3, minified: 1}},
specsDir: "specs",
subflowsDir: "subflows",
},
@@ -86,26 +88,26 @@ func TestOpenAPIMinify(t *testing.T) {
},
{
workflowFile: "testdata/workflow-empty.sw.yaml", // check all operations are removed
- openapiSpecFiles: []spec{{"testdata/flink-openapi.yaml", 5, 0}},
+ openapiSpecFiles: []spec{{file: "testdata/flink-openapi.yaml", initial: 5, minified: 0}},
specsDir: "specs",
subflowsDir: "subflows",
},
{
workflowFile: "testdata/workflow-mySpecsDir.sw.yaml", // check all operations are removed, with different specs dir
- openapiSpecFiles: []spec{{"testdata/flink-openapi.yaml", 5, 3}},
+ openapiSpecFiles: []spec{{file: "testdata/flink-openapi.yaml", initial: 5, minified: 3}},
specsDir: "mySpecsDir",
subflowsDir: "subflows",
},
{
workflowFile: "testdata/workflow-mySpecsDir-one-finction.sw.yaml", // check all operations are removed, with different specs dir
- openapiSpecFiles: []spec{{"testdata/flink-openapi.yaml", 5, 2}},
+ openapiSpecFiles: []spec{{file: "testdata/flink-openapi.yaml", initial: 5, minified: 2}},
specsDir: "mySpecsDir",
subflowsDir: "subflows",
subflows: []string{"testdata/subflow-mySpecsDir.sw.yaml"},
},
{
workflowFile: "testdata/workflow-empty.sw.yaml", // check all operations are removed, with different subflow dir
- openapiSpecFiles: []spec{{"testdata/flink-openapi.yaml", 5, 0}},
+ openapiSpecFiles: []spec{{file: "testdata/flink-openapi.yaml", initial: 5, minified: 0}},
specsDir: "mySpecsDir",
subflowsDir: "subflows",
},
@@ -117,35 +119,35 @@ func TestOpenAPIMinify(t *testing.T) {
},
{
workflowFile: "testdata/workflow-empty2.sw.yaml", // check functions is on subflow
- openapiSpecFiles: []spec{{"testdata/flink-openapi.yaml", 5, 2}},
+ openapiSpecFiles: []spec{{file: "testdata/flink-openapi.yaml", initial: 5, minified: 2}},
specsDir: "specs",
subflowsDir: "subflows",
subflows: []string{"testdata/subflow.sw.yaml"},
},
{
workflowFile: "testdata/workflow-empty2.sw.yaml", // check functions is on subflow, with different subflow and specs dirs
- openapiSpecFiles: []spec{{"testdata/flink-openapi.yaml", 5, 2}},
+ openapiSpecFiles: []spec{{file: "testdata/flink-openapi.yaml", initial: 5, minified: 2}},
specsDir: "mySpecsDir",
subflowsDir: "mySubFlowDir",
subflows: []string{"testdata/subflow-mySpecsDir.sw.yaml"},
},
{
- workflowFile: "testdata/workflow-greeting.sw.yaml", // check we can process subflows with the same file name but different extensions
- openapiSpecFiles: []spec{{"testdata/greetingAPI.yaml", 3, 1}},
+ workflowFile: "testdata/workflow-greeting.sw.yaml", // check we can process subflows with the same file name but different extensions
+ openapiSpecFiles: []spec{{file: "testdata/greetingAPI.yaml", initial: 3, minified: 1}},
specsDir: "specs",
subflowsDir: "custom_subflows",
subflows: []string{"testdata/hello.sw.json", "testdata/hello.sw.yaml"}, // 2 subflows, 1 of them has a function that uses the greetingAPI.yaml
},
{
- workflowFile: "testdata/workflow-greeting.sw.yaml", // check we can process subflows with the same file name but different extensions
- openapiSpecFiles: []spec{{"testdata/flink-openapi.yaml", 5, 2}},
+ workflowFile: "testdata/workflow-greeting.sw.yaml", // check we can process subflows with the same file name but different extensions
+ openapiSpecFiles: []spec{{file: "testdata/flink-openapi.yaml", initial: 5, minified: 2}},
specsDir: "custom_specs",
subflowsDir: "custom_subflows",
subflows: []string{"testdata/subflow-custom.sw.json", "testdata/subflow-custom.sw.yaml"}, // 2 subflows, each one has a function that uses the flink-openapi.yaml
},
{
- workflowFile: "testdata/workflow-subflow-custom.sw.yaml", // workflow with a function that uses a subflow with a function that uses the flink-openapi.yaml
- openapiSpecFiles: []spec{{"testdata/flink-openapi.yaml", 5, 3}},
+ workflowFile: "testdata/workflow-subflow-custom.sw.yaml", // workflow with a function that uses a subflow with a function that uses the flink-openapi.yaml
+ openapiSpecFiles: []spec{{file: "testdata/flink-openapi.yaml", initial: 5, minified: 3}},
specsDir: "custom_specs",
subflowsDir: "custom_subflows",
subflows: []string{"testdata/subflow-custom.sw.json", "testdata/subflow-custom.sw.yaml"}, // 2 subflows, each one has a function that uses the flink-openapi.yaml
@@ -159,30 +161,8 @@ func TestOpenAPIMinify(t *testing.T) {
for _, test := range tests {
t.Run(test.workflowFile, func(t *testing.T) {
- if err := os.Mkdir(test.specsDir, 0755); err != nil {
- t.Fatalf("Error creating specs directory: %v", err)
- }
- defer os.RemoveAll(test.specsDir)
- if err := copyFile(test.workflowFile, path.Base(test.workflowFile)); err != nil {
- t.Fatalf("Error copying workflow file: %v", err)
- }
- defer os.Remove(path.Base(test.workflowFile))
- if len(test.subflows) > 0 {
- if err := os.Mkdir(test.subflowsDir, 0755); err != nil {
- t.Fatalf("Error creating subflows directory: %v", err)
- }
- defer os.RemoveAll(test.subflowsDir)
- for _, subflow := range test.subflows {
- if err := copyFile(subflow, path.Join(test.subflowsDir, path.Base(subflow))); err != nil {
- t.Fatalf("Error copying subflow file: %v", err)
- }
- }
- }
- for _, openapiSpecFile := range test.openapiSpecFiles {
- if err := copyFile(openapiSpecFile.file, path.Join(test.specsDir, path.Base(openapiSpecFile.file))); err != nil {
- t.Fatalf("Error copying openapi spec file: %v", err)
- }
- }
+ prepareStructure(t, test)
+ defer cleanUp(t, test)
minifiedfiles, err := NewMinifier(&OpenApiMinifierOpts{
SpecsDir: path.Join(current, test.specsDir),
@@ -197,6 +177,160 @@ func TestOpenAPIMinify(t *testing.T) {
}
}
+// These tests contain openapi specs with $ref to other specs
+func TestOpenAPIMinifyRefs(t *testing.T) {
+ tests := []minifyTest{
+ {
+ workflowFile: "testdata/refs/workflow.sw.yaml",
+ openapiSpecFiles: []spec{{file: "testdata/refs/openapi.yaml", expected: "testdata/refs/openapi.expected.yaml", initial: 5, minified: 3}},
+ specsDir: "specs",
+ subflowsDir: "subflows",
+ },
+ {
+ workflowFile: "testdata/refs/workflow.sw.yaml",
+ openapiSpecFiles: []spec{{file: "testdata/refs/openapi.yaml", expected: "testdata/refs/openapi.expected.yaml", initial: 5, minified: 3}},
+ specsDir: "my_specs",
+ subflowsDir: "subflows",
+ },
+ {
+ workflowFile: "testdata/refs/emptyworkflow.sw.yaml",
+ openapiSpecFiles: []spec{{file: "testdata/refs/openapi1.yaml", expected: "testdata/refs/openapi1.expected.yaml", initial: 1, minified: 1},
+ {file: "testdata/refs/openapi2.yaml", expected: "testdata/refs/openapi2.expected.yaml", initial: 1, minified: 1}},
+ specsDir: "specs",
+ subflowsDir: "subflows",
+ },
+ {
+ workflowFile: "testdata/refs/emptyworkflow.sw.yaml",
+ openapiSpecFiles: []spec{{file: "testdata/refs/openapi1.yaml", expected: "testdata/refs/openapi1.expected.yaml", initial: 1, minified: 1},
+ {file: "testdata/refs/openapi2.yaml", expected: "testdata/refs/openapi2.expected.yaml", initial: 1, minified: 1}},
+ specsDir: "my_specs",
+ subflowsDir: "subflows",
+ },
+ {
+ workflowFile: "testdata/refs/emptyworkflow.sw.yaml",
+ openapiSpecFiles: []spec{{file: "testdata/refs/openapi1.yaml", expected: "testdata/refs/openapi1.expected.yaml", initial: 1, minified: 1},
+ {file: "testdata/refs/openapi2.yaml", expected: "testdata/refs/openapi2.expected.yaml", initial: 1, minified: 1}},
+ specsDir: "my_specs",
+ subflowsDir: "custom_specs",
+ subflows: []string{"testdata/refs//subflow2.sw.yaml", "testdata/refs/subflow2.sw.yaml"}, // 2 subflows, each one has a function that uses the flink-openapi.yaml
+ },
+ {
+ workflowFile: "testdata/refs/emptyworkflow.sw.yaml",
+ openapiSpecFiles: []spec{{file: "testdata/refs/openapi.yaml", expected: "testdata/refs/openapi-subflow34.expected.yaml", initial: 5, minified: 2}},
+ specsDir: "specs",
+ subflowsDir: "custom_specs",
+ subflows: []string{"testdata/refs//subflow3.sw.yaml", "testdata/refs/subflow4.sw.yaml"}, // 2 subflows, each one has a function that uses the flink-openapi.yaml
+ },
+ }
+
+ current, err := os.Getwd()
+ if err != nil {
+ t.Fatalf("Error getting current directory: %v", err)
+ }
+
+ for _, test := range tests {
+ t.Run(test.workflowFile, func(t *testing.T) {
+ prepareStructure(t, test)
+ defer cleanUp(t, test)
+
+ minifiedfiles, err := NewMinifier(&OpenApiMinifierOpts{
+ SpecsDir: path.Join(current, test.specsDir),
+ SubflowsDir: path.Join(current, test.subflowsDir),
+ }).Minify()
+
+ if err != nil {
+ t.Fatalf("Error minifying openapi specs: %v", err)
+ }
+ testFiles := map[string]spec{}
+
+ for _, spec := range test.openapiSpecFiles {
+ testFiles[path.Base(spec.file)] = spec
+ }
+
+ for k, v := range minifiedfiles {
+ expected := testFiles[k].expected
+ assert.Nil(t, validateOpenAPISpec(v), "Minified file %s is not a valid OpenAPI spec", v)
+ assert.True(t, compareYAMLFiles(t, v, expected), "Minified file %s is not equal to the expected file %s", v, expected)
+ }
+ })
+ }
+}
+
+func validateOpenAPISpec(filePath string) error {
+ loader := openapi3.NewLoader()
+ doc, err := loader.LoadFromFile(filePath)
+ if err != nil {
+ return fmt.Errorf("failed to load OpenAPI spec from file: %v", err)
+ }
+
+ if err := doc.Validate(loader.Context); err != nil {
+ return fmt.Errorf("OpenAPI spec is invalid: %v", err)
+ }
+ return nil
+}
+
+func compareYAMLFiles(t *testing.T, file1Path, file2Path string) bool {
+ data1, err := os.ReadFile(file1Path)
+ if err != nil {
+ t.Fatalf("failed to read file %s: %v", file1Path, err)
+ }
+
+ data2, err := os.ReadFile(file2Path)
+ if err != nil {
+ t.Fatalf("failed to read file %s: %v", file2Path, err)
+ }
+
+ var obj1, obj2 interface{}
+ if err := yaml.Unmarshal(data1, &obj1); err != nil {
+ t.Fatalf("failed to unmarshal file %s: %v", file1Path, err)
+ }
+ if err := yaml.Unmarshal(data2, &obj2); err != nil {
+ t.Fatalf("failed to unmarshal file %s: %v", file2Path, err)
+ }
+
+ return reflect.DeepEqual(obj1, obj2)
+}
+
+func prepareStructure(t *testing.T, test minifyTest) {
+ if err := os.Mkdir(test.specsDir, 0755); err != nil {
+ t.Fatalf("Error creating specs directory: %v", err)
+ }
+ if err := copyFile(test.workflowFile, path.Base(test.workflowFile)); err != nil {
+ t.Fatalf("Error copying workflow file: %v", err)
+ }
+ if len(test.subflows) > 0 {
+ if err := os.Mkdir(test.subflowsDir, 0755); err != nil {
+ t.Fatalf("Error creating subflows directory: %v", err)
+ }
+ for _, subflow := range test.subflows {
+ if err := copyFile(subflow, path.Join(test.subflowsDir, path.Base(subflow))); err != nil {
+ t.Fatalf("Error copying subflow file: %v", err)
+ }
+ }
+ }
+ for _, openapiSpecFile := range test.openapiSpecFiles {
+ if err := copyFile(openapiSpecFile.file, path.Join(test.specsDir, path.Base(openapiSpecFile.file))); err != nil {
+ t.Fatalf("Error copying openapi spec file: %v", err)
+ }
+ }
+}
+
+func cleanUp(t *testing.T, test minifyTest) {
+ err := os.Remove(path.Base(test.workflowFile))
+ if err != nil {
+ t.Fatalf("Error removing workflow file: %v", err)
+ }
+ err = os.RemoveAll(test.specsDir)
+ if err != nil {
+ t.Fatalf("Error removing specs directory: %v", err)
+ }
+ err = os.RemoveAll(test.subflowsDir)
+ if err != nil {
+ t.Fatalf("Error removing subflows directory: %v", err)
+ }
+
+}
+
// checkInitial checks the initial number of operations in the openapi specs
func checkInitial(t *testing.T, test minifyTest) {
for _, spec := range test.openapiSpecFiles {
diff --git a/packages/kn-plugin-workflow/pkg/specs/ref_collector.go b/packages/kn-plugin-workflow/pkg/specs/ref_collector.go
new file mode 100644
index 00000000000..90011103c2b
--- /dev/null
+++ b/packages/kn-plugin-workflow/pkg/specs/ref_collector.go
@@ -0,0 +1,190 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package specs
+
+import (
+ "fmt"
+ "gopkg.in/yaml.v3"
+ "k8s.io/apimachinery/pkg/util/sets"
+ "os"
+ "strings"
+)
+
+type collector struct {
+ filename string
+ refs sets.Set[string]
+ doc map[string]any
+}
+
+type node struct {
+ section string
+ subsection string
+ object string
+}
+
+func newCollector(file string) (*collector, error) {
+ data, err := os.ReadFile(file)
+ if err != nil {
+ return nil, fmt.Errorf("❌ ERROR: failed to read OpenAPI spec file %s: %w", file, err)
+ }
+
+ m := make(map[string]any)
+ err = yaml.Unmarshal(data, &m)
+ if err != nil {
+ return nil, fmt.Errorf("❌ ERROR: failed to unmarshal OpenAPI spec file %s: %w", file, err)
+ }
+
+ return &collector{filename: file, doc: m, refs: sets.Set[string]{}}, nil
+}
+
+func (c *collector) collect(operations sets.Set[string]) (map[string]map[string]sets.Set[string], error) {
+ for operation := range operations {
+ operationNode, err := c.findByOperationId(operation, c.doc)
+ if err != nil {
+ return nil, err
+ }
+ mapEntry(operationNode.(map[string]interface{}), c.refs)
+ }
+ visited, err := c.collectDependentRefs()
+ if err != nil {
+ return nil, fmt.Errorf("❌ ERROR: failed to collect dependent refs in OpenAPI spec file %s: %w", c.filename, err)
+ }
+
+ preserve := map[string]map[string]sets.Set[string]{}
+ for ref := range visited {
+ node, err := c.parseRef(ref)
+ if err != nil {
+ return nil, fmt.Errorf("❌ ERROR: failed to parse ref at OpenAPI spec file %s: %w", c.filename, err)
+ }
+ if preserve[node.section] == nil {
+ preserve[node.section] = map[string]sets.Set[string]{}
+ }
+ if preserve[node.section][node.subsection] == nil {
+ preserve[node.section][node.subsection] = sets.Set[string]{}
+ }
+ preserve[node.section][node.subsection].Insert(node.object)
+ }
+ return preserve, nil
+}
+
+func (c *collector) collectDependentRefs() (sets.Set[string], error) {
+ var visited = sets.Set[string]{}
+ for c.refs.Len() > 0 {
+ operation, _ := c.refs.PopAny()
+ if !visited.Has(operation) {
+ visited.Insert(operation)
+ var current = sets.Set[string]{}
+ node, err := c.findByRefObject(operation, c.doc)
+ if err != nil {
+ return nil, err
+ }
+ mapEntry(node, current)
+ for current.Len() > 0 {
+ operation, _ := current.PopAny()
+ if !visited.Has(operation) {
+ c.refs.Insert(operation)
+ }
+ }
+ }
+ }
+ return visited, nil
+}
+
+func (c *collector) parseRef(ref string) (node, error) {
+ if !strings.HasPrefix(ref, "#/") {
+ return node{}, fmt.Errorf("invalid $ref: %s, must start with #/ at OpenAPI spec file %s", ref, c.filename)
+ }
+ parts := strings.Split(ref, "/")
+ if len(parts) < 4 {
+ return node{}, fmt.Errorf("invalid $ref %s at OpenAPI spec file %s", ref, c.filename)
+ }
+ return node{section: parts[1], subsection: parts[2], object: parts[3]}, nil
+}
+
+func (c *collector) findByRefObject(ref string, m map[string]interface{}) (map[string]interface{}, error) {
+ parsedRef, err := c.parseRef(ref)
+ if err != nil {
+ return nil, err
+ }
+ section, ok := m[parsedRef.section].(map[string]interface{})
+ if !ok {
+ return nil, fmt.Errorf("OpenAPI spec file %s has no such section: %s", c.filename, ref)
+ }
+ subsection, ok := section[parsedRef.subsection].(map[string]interface{})
+ if !ok {
+ return nil, fmt.Errorf("OpenAPI spec file %s has no such subsection: %s", c.filename, ref)
+ }
+ object, ok := subsection[parsedRef.object].(map[string]interface{})
+ if !ok {
+ return nil, fmt.Errorf("OpenAPI spec file %s has no such object: %s", c.filename, ref)
+ }
+
+ return object, nil
+}
+
+func (c *collector) findByOperationId(operationId string, m map[string]interface{}) (any, error) {
+ paths, ok := m["paths"].(map[string]interface{})
+ if !ok {
+ return nil, fmt.Errorf("OpenAPI spec file %s has no paths", c.filename)
+ }
+ for _, pathItem := range paths {
+ operations, ok := pathItem.(map[string]interface{})
+ if !ok {
+ continue
+ }
+ for _, operationDetails := range operations {
+ operation, ok := operationDetails.(map[string]interface{})
+ if !ok {
+ continue
+ }
+ if operation["operationId"] == operationId {
+ return operation, nil
+ }
+ }
+ }
+ return nil, fmt.Errorf("operationId %s not found at OpenAPI spec file %s", operationId, c.filename)
+}
+
+func entry(e any, refs sets.Set[string]) {
+ switch v := e.(type) {
+ case map[string]interface{}:
+ mapEntry(v, refs)
+ case []interface{}:
+ sliceEntry(v, refs)
+ default:
+ return
+ }
+}
+
+func sliceEntry(s []interface{}, refs sets.Set[string]) {
+ for _, v := range s {
+ entry(v, refs)
+ }
+}
+
+func mapEntry(m map[string]interface{}, refs sets.Set[string]) {
+ for k, v := range m {
+ if k == "$ref" {
+ refs.Insert(v.(string))
+ continue
+ }
+ entry(v, refs)
+ }
+}
diff --git a/packages/kn-plugin-workflow/pkg/specs/testdata/refs/emptyworkflow.sw.yaml b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/emptyworkflow.sw.yaml
new file mode 100644
index 00000000000..bd4c84d6c89
--- /dev/null
+++ b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/emptyworkflow.sw.yaml
@@ -0,0 +1,29 @@
+id: flink-workflow
+version: "1.0"
+specVersion: "0.8"
+name: flink workflow
+description: Create a starter flink job management
+start: Get Flink Jars
+states:
+ - name: Get Flink Jars
+ type: operation
+ actionMode: sequential
+ actions:
+ - name: Get Flink Jars
+ functionRef:
+ refName: getJars
+ transition: Run Flink Job
+ - name: Run Flink Job
+ type: operation
+ actionMode: sequential
+ actions:
+ - actionDataFilter:
+ useResults: true
+ name: Run Flink Job
+ functionRef:
+ refName: runFlinkJob
+ arguments:
+ jarid: 72ecfc25-43ca-4f53-a4ee-1aaf93ac709a_flink-streaming-1.0.jar
+ entry-class: com.demo.flink.streaming.StreamingJob
+ end:
+ terminate: true
diff --git a/packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi-subflow34.expected.yaml b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi-subflow34.expected.yaml
new file mode 100644
index 00000000000..2e9cf85ce25
--- /dev/null
+++ b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi-subflow34.expected.yaml
@@ -0,0 +1,115 @@
+info:
+ title: Example API
+ version: 1.0.0
+openapi: 3.0.3
+paths:
+ /items:
+ get:
+ operationId: getItems
+ parameters:
+ - in: query
+ name: filter
+ schema:
+ $ref: "#/components/schemas/FilterSchema"
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ items:
+ $ref: "#/components/schemas/Item"
+ type: array
+ description: Successful response
+ "201":
+ description: Created
+ links:
+ GetItemById:
+ $ref: "#/components/links/GetItemById"
+ summary: Get a list of items
+ tags:
+ - items
+ /orders:
+ get:
+ operationId: getOrders
+ parameters:
+ - $ref: "#/components/parameters/OrderId"
+ - $ref: "#/components/parameters/Limit"
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/OrdersList"
+ description: List of orders
+ headers:
+ X-Rate-Limit-Remaining:
+ $ref: "#/components/headers/RateLimitHeader"
+ summary: Get a list of orders
+components:
+ parameters:
+ Limit:
+ description: Record limit
+ in: query
+ name: limit
+ schema:
+ type: integer
+ OrderId:
+ description: Order identifier
+ in: query
+ name: orderId
+ required: true
+ schema:
+ type: string
+ schemas:
+ FilterSchema:
+ properties:
+ category:
+ type: string
+ status:
+ type: string
+ type: object
+ Item:
+ properties:
+ id:
+ type: string
+ name:
+ type: string
+ type: object
+ Order:
+ type: object
+ properties:
+ id:
+ type: string
+ status:
+ type: string
+ OrdersList:
+ type: array
+ items:
+ $ref: "#/components/schemas/Order"
+ links:
+ GetItemById:
+ operationId: getItem
+ parameters:
+ itemId: $response.body#/id
+ headers:
+ RateLimitHeader:
+ description: Remaining request limit
+ schema:
+ type: integer
+ securitySchemes:
+ ApiKeyAuth:
+ type: apiKey
+ in: header
+ name: X-API-Key
+ OAuth2Security:
+ type: oauth2
+ flows:
+ authorizationCode:
+ authorizationUrl: https://example.com/oauth/authorize
+ tokenUrl: https://example.com/oauth/token
+ scopes:
+ read: Read access
+ write: Write access
+tags:
+ - description: Item operations
+ name: items
diff --git a/packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi.expected.yaml b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi.expected.yaml
new file mode 100644
index 00000000000..83ff5bd3835
--- /dev/null
+++ b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi.expected.yaml
@@ -0,0 +1,186 @@
+info:
+ title: Example API
+ version: 1.0.0
+openapi: 3.0.3
+paths:
+ /items:
+ get:
+ operationId: getItems
+ parameters:
+ - in: query
+ name: filter
+ schema:
+ $ref: "#/components/schemas/FilterSchema"
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ items:
+ $ref: "#/components/schemas/Item"
+ type: array
+ description: Successful response
+ "201":
+ description: Created
+ links:
+ GetItemById:
+ $ref: "#/components/links/GetItemById"
+ summary: Get a list of items
+ tags:
+ - items
+ /orders:
+ get:
+ operationId: getOrders
+ parameters:
+ - $ref: "#/components/parameters/OrderId"
+ - $ref: "#/components/parameters/Limit"
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/OrdersList"
+ description: List of orders
+ headers:
+ X-Rate-Limit-Remaining:
+ $ref: "#/components/headers/RateLimitHeader"
+ summary: Get a list of orders
+ post:
+ operationId: createOrder
+ requestBody:
+ $ref: "#/components/requestBodies/CreateOrderRequest"
+ responses:
+ "201":
+ $ref: "#/components/responses/OrderCreatedResponse"
+ summary: Create a new order
+ /orders/{orderId}:
+ get:
+ operationId: getOrder
+ parameters:
+ - in: path
+ name: orderId
+ required: true
+ schema:
+ type: string
+ responses:
+ "200":
+ $ref: "#/components/responses/OrderResponse"
+ "404":
+ $ref: "#/components/responses/NotFoundResponse"
+ summary: Get order information
+components:
+ examples:
+ OrderExample:
+ summary: Order example
+ value:
+ id: "12345"
+ status: processing
+ headers:
+ RateLimitHeader:
+ description: Remaining request limit
+ schema:
+ type: integer
+ links:
+ GetItemById:
+ operationId: getItem
+ parameters:
+ itemId: $response.body#/id
+ parameters:
+ Limit:
+ description: Record limit
+ in: query
+ name: limit
+ schema:
+ type: integer
+ OrderId:
+ description: Order identifier
+ in: query
+ name: orderId
+ required: true
+ schema:
+ type: string
+ requestBodies:
+ CreateOrderRequest:
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/CreateOrderSchema"
+ description: Data to create a new order
+ responses:
+ NotFoundResponse:
+ content:
+ application/json:
+ schema:
+ properties:
+ error:
+ type: string
+ type: object
+ description: Resource not found
+ OrderCreatedResponse:
+ content:
+ application/json:
+ schema:
+ properties:
+ id:
+ type: string
+ type: object
+ description: Order created
+ OrderResponse:
+ content:
+ application/json:
+ examples:
+ orderExample:
+ $ref: "#/components/examples/OrderExample"
+ schema:
+ $ref: "#/components/schemas/Order"
+ description: Order information
+ schemas:
+ CreateOrderSchema:
+ properties:
+ productId:
+ type: string
+ quantity:
+ type: integer
+ type: object
+ FilterSchema:
+ properties:
+ category:
+ type: string
+ status:
+ type: string
+ type: object
+ Item:
+ properties:
+ id:
+ type: string
+ name:
+ type: string
+ type: object
+ Order:
+ properties:
+ id:
+ type: string
+ status:
+ type: string
+ type: object
+ OrdersList:
+ items:
+ $ref: "#/components/schemas/Order"
+ type: array
+ securitySchemes:
+ ApiKeyAuth:
+ in: header
+ name: X-API-Key
+ type: apiKey
+ OAuth2Security:
+ flows:
+ authorizationCode:
+ authorizationUrl: https://example.com/oauth/authorize
+ scopes:
+ read: Read access
+ write: Write access
+ tokenUrl: https://example.com/oauth/token
+ type: oauth2
+tags:
+ - description: Item operations
+ name: items
diff --git a/packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi.yaml b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi.yaml
new file mode 100644
index 00000000000..a8370ccf447
--- /dev/null
+++ b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi.yaml
@@ -0,0 +1,320 @@
+openapi: 3.0.3
+info:
+ title: Example API
+ version: 1.0.0
+
+paths:
+ # 1. $ref within a parameter using a schema
+ /items:
+ get:
+ summary: Get a list of items
+ operationId: getItems
+ tags:
+ - items
+ parameters:
+ - name: filter
+ in: query
+ schema:
+ $ref: "#/components/schemas/FilterSchema"
+ responses:
+ "200":
+ description: Successful response
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ $ref: "#/components/schemas/Item"
+ "201":
+ description: Created
+ links:
+ GetItemById:
+ $ref: "#/components/links/GetItemById"
+
+ # 2. $ref within response headers
+ /orders:
+ get:
+ summary: Get a list of orders
+ operationId: getOrders
+ parameters:
+ - $ref: "#/components/parameters/OrderId"
+ - $ref: "#/components/parameters/Limit"
+ responses:
+ "200":
+ description: List of orders
+ headers:
+ X-Rate-Limit-Remaining:
+ $ref: "#/components/headers/RateLimitHeader"
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/OrdersList"
+ post:
+ summary: Create a new order
+ operationId: createOrder
+ requestBody:
+ $ref: "#/components/requestBodies/CreateOrderRequest"
+ responses:
+ "201":
+ $ref: "#/components/responses/OrderCreatedResponse"
+
+ # 3. $ref within the request body schema
+ /orders/{orderId}:
+ get:
+ summary: Get order information
+ operationId: getOrder
+ parameters:
+ - name: orderId
+ in: path
+ required: true
+ schema:
+ type: string
+ responses:
+ "200":
+ $ref: "#/components/responses/OrderResponse"
+ "404":
+ $ref: "#/components/responses/NotFoundResponse"
+
+ # 4. $ref for examples in responses
+ put:
+ summary: Update an order
+ operationId: updateOrder
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/UpdateOrder"
+ examples:
+ updateExample:
+ $ref: "#/components/examples/UpdateOrderExample"
+ responses:
+ "200":
+ $ref: "#/components/responses/OrderResponse"
+
+ # 5. $ref within callbacks
+ /webhooks:
+ post:
+ summary: Set a webhook
+ operationId: setWebhook
+ requestBody:
+ $ref: "#/components/requestBodies/WebhookRequest"
+ responses:
+ "201":
+ description: Webhook set
+ callbacks:
+ onEvent:
+ $ref: "#/components/callbacks/EventCallback"
+
+ # 7. Using security schemes
+ /secure-data:
+ get:
+ summary: Get secure data
+ operationId: getSecureData
+ security:
+ - ApiKeyAuth: []
+ - OAuth2Security: ["read", "write"]
+ responses:
+ "200":
+ description: Successful response
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ data:
+ type: string
+
+components:
+ # 1. Schemas used in parameters and request bodies
+ schemas:
+ FilterSchema:
+ type: object
+ properties:
+ status:
+ type: string
+ category:
+ type: string
+ Item:
+ type: object
+ properties:
+ id:
+ type: string
+ name:
+ type: string
+ CreateOrderSchema:
+ type: object
+ properties:
+ productId:
+ type: string
+ quantity:
+ type: integer
+ UpdateOrder:
+ type: object
+ properties:
+ status:
+ type: string
+ Order:
+ type: object
+ properties:
+ id:
+ type: string
+ status:
+ type: string
+ OrdersList:
+ type: array
+ items:
+ $ref: "#/components/schemas/Order"
+ Event:
+ type: object
+ properties:
+ event:
+ type: string
+
+ # 2. Parameters
+ parameters:
+ OrderId:
+ name: orderId
+ in: query
+ description: Order identifier
+ required: true
+ schema:
+ type: string
+ Limit:
+ name: limit
+ in: query
+ description: Record limit
+ required: false
+ schema:
+ type: integer
+
+ # 3. Headers
+ headers:
+ RateLimitHeader:
+ description: Remaining request limit
+ schema:
+ type: integer
+
+ # 4. Request bodies
+ requestBodies:
+ CreateOrderRequest:
+ description: Data to create a new order
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/CreateOrderSchema"
+ WebhookRequest:
+ description: Data to set a webhook
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ url:
+ type: string
+
+ # 6. Responses
+ responses:
+ OrderCreatedResponse:
+ description: Order created
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ id:
+ type: string
+ OrderCreatedResponseWithLink:
+ description: Order created
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ id:
+ type: string
+ links:
+ GetOrderById:
+ $ref: "#/components/links/GetOrderById"
+ OrdersResponse:
+ description: List of orders
+ headers:
+ X-Rate-Limit-Remaining:
+ $ref: "#/components/headers/RateLimitHeader"
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/OrdersList"
+ OrderResponse:
+ description: Order information
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/Order"
+ examples:
+ orderExample:
+ $ref: "#/components/examples/OrderExample"
+ NotFoundResponse:
+ description: Resource not found
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ error:
+ type: string
+
+ # 7. Examples for responses
+ examples:
+ OrderExample:
+ summary: Order example
+ value:
+ id: "12345"
+ status: "processing"
+ UpdateOrderExample:
+ summary: Order update example
+ value:
+ status: "shipped"
+
+ # 8. Callbacks
+ callbacks:
+ EventCallback:
+ "{$request.body#/callbackUrl}":
+ post:
+ summary: Event received
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/Event"
+ responses:
+ "200":
+ description: Event processed
+
+ # 9. Links
+ links:
+ GetOrderById:
+ operationId: getOrder
+ parameters:
+ orderId: "$response.body#/id"
+ GetItemById:
+ operationId: getItem
+ parameters:
+ itemId: "$response.body#/id"
+ # 10. Security schemes
+ securitySchemes:
+ ApiKeyAuth:
+ type: apiKey
+ in: header
+ name: X-API-Key
+ OAuth2Security:
+ type: oauth2
+ flows:
+ authorizationCode:
+ authorizationUrl: https://example.com/oauth/authorize
+ tokenUrl: https://example.com/oauth/token
+ scopes:
+ read: Read access
+ write: Write access
+tags:
+ - name: items
+ description: Item operations
diff --git a/packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi1.expected.yaml b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi1.expected.yaml
new file mode 100644
index 00000000000..7d5905f2cc9
--- /dev/null
+++ b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi1.expected.yaml
@@ -0,0 +1,42 @@
+openapi: 3.0.3
+info:
+ title: Example API
+ version: 1.0.0
+
+paths:
+ /items:
+ get:
+ summary: Get a list of items
+ operationId: getItems
+ tags:
+ - items
+ parameters:
+ - name: filter
+ in: query
+ schema:
+ $ref: "#/components/schemas/FilterSchema"
+ responses:
+ "200":
+ description: Successful response
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ $ref: "#/components/schemas/Item"
+components:
+ schemas:
+ Item:
+ type: object
+ properties:
+ id:
+ type: integer
+ name:
+ type: string
+ FilterSchema:
+ type: object
+ properties:
+ type:
+ type: string
+ description:
+ type: string
diff --git a/packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi1.yaml b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi1.yaml
new file mode 100644
index 00000000000..3f87553a10c
--- /dev/null
+++ b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi1.yaml
@@ -0,0 +1,75 @@
+openapi: 3.0.3
+info:
+ title: Example API
+ version: 1.0.0
+
+paths:
+ /items:
+ get:
+ summary: Get a list of items
+ operationId: getItems
+ tags:
+ - items
+ parameters:
+ - name: filter
+ in: query
+ schema:
+ $ref: "#/components/schemas/FilterSchema"
+ responses:
+ "200":
+ description: Successful response
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ $ref: "#/components/schemas/Item"
+components:
+ schemas:
+ Item:
+ type: object
+ properties:
+ id:
+ type: integer
+ name:
+ type: string
+ FilterSchema:
+ type: object
+ properties:
+ type:
+ type: string
+ description:
+ type: string
+ OrdersList:
+ type: object
+ properties:
+ orders:
+ type: array
+ items:
+ $ref: "#/components/schemas/Order"
+ Order:
+ type: object
+ properties:
+ id:
+ type: integer
+
+ Jars:
+ type: object
+ properties:
+ jarid:
+ type: string
+ jarname:
+ type: string
+ jarversion:
+ type: string
+ Message:
+ type: object
+ properties:
+ message:
+ type: string
+ requestBodies:
+ HelloWorld:
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/Message"
diff --git a/packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi2.expected.yaml b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi2.expected.yaml
new file mode 100644
index 00000000000..2d2ba09440b
--- /dev/null
+++ b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi2.expected.yaml
@@ -0,0 +1,36 @@
+openapi: 3.0.3
+info:
+ title: Example API
+ version: 1.0.0
+
+paths:
+ /orders:
+ get:
+ summary: Get a list of items
+ operationId: getOrders
+ responses:
+ "200":
+ description: List of orders
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/OrdersList"
+components:
+ schemas:
+ OrdersList:
+ type: object
+ properties:
+ orders:
+ type: array
+ items:
+ $ref: "#/components/schemas/Order"
+ Order:
+ type: object
+ properties:
+ id:
+ type: integer
+ securitySchemes:
+ ApiKeyAuth:
+ type: apiKey
+ in: header
+ name: X-API-Key
diff --git a/packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi2.yaml b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi2.yaml
new file mode 100644
index 00000000000..9365ac5ad22
--- /dev/null
+++ b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/openapi2.yaml
@@ -0,0 +1,64 @@
+openapi: 3.0.3
+info:
+ title: Example API
+ version: 1.0.0
+
+paths:
+ /orders:
+ get:
+ summary: Get a list of items
+ operationId: getOrders
+ responses:
+ "200":
+ description: List of orders
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/OrdersList"
+
+components:
+ schemas:
+ OrdersList:
+ type: object
+ properties:
+ orders:
+ type: array
+ items:
+ $ref: "#/components/schemas/Order"
+ Order:
+ type: object
+ properties:
+ id:
+ type: integer
+ parameters:
+ OrderId:
+ name: orderId
+ in: path
+ required: true
+ schema:
+ type: integer
+ Limit:
+ name: limit
+ in: query
+ required: false
+ schema:
+ type: integer
+ links:
+ GetItemById:
+ operationId: getItemById
+ securitySchemes:
+ ApiKeyAuth:
+ type: apiKey
+ in: header
+ name: X-API-Key
+ requestBodies:
+ CreateOrderRequest:
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/Order"
+ headers:
+ RateLimitHeader:
+ description: Rate limit remaining
+ schema:
+ type: integer
diff --git a/packages/kn-plugin-workflow/pkg/specs/testdata/refs/subflow1.sw.yaml b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/subflow1.sw.yaml
new file mode 100644
index 00000000000..3a034500437
--- /dev/null
+++ b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/subflow1.sw.yaml
@@ -0,0 +1,21 @@
+---
+id: "helloworldyaml1"
+version: "1.0"
+specVersion: "0.8"
+name: "Hello World Workflow"
+description: "JSON based hello world workflow"
+start: "Inject Hello World SubFlow"
+functions:
+ - name: getItems
+ operation: custom_specs/openapi1.yaml#getItems
+states:
+ - name: "Inject Hello World SubFlow"
+ type: "inject"
+ data:
+ greeting-subflow: "Hello World SubFlow"
+ transition: "Inject Mantra SubFlow"
+ - name: "Inject Mantra SubFlow"
+ type: "inject"
+ data:
+ mantra-subflow: "SubFlow Serverless Workflow is awesome!"
+ end: true
diff --git a/packages/kn-plugin-workflow/pkg/specs/testdata/refs/subflow2.sw.yaml b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/subflow2.sw.yaml
new file mode 100644
index 00000000000..4131ae2ea1f
--- /dev/null
+++ b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/subflow2.sw.yaml
@@ -0,0 +1,21 @@
+---
+id: "helloworldyaml1"
+version: "1.0"
+specVersion: "0.8"
+name: "Hello World Workflow"
+description: "JSON based hello world workflow"
+start: "Inject Hello World SubFlow"
+functions:
+ - name: getOrders
+ operation: custom_specs/openapi2.yaml#getOrders
+states:
+ - name: "Inject Hello World SubFlow"
+ type: "inject"
+ data:
+ greeting-subflow: "Hello World SubFlow"
+ transition: "Inject Mantra SubFlow"
+ - name: "Inject Mantra SubFlow"
+ type: "inject"
+ data:
+ mantra-subflow: "SubFlow Serverless Workflow is awesome!"
+ end: true
diff --git a/packages/kn-plugin-workflow/pkg/specs/testdata/refs/subflow3.sw.yaml b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/subflow3.sw.yaml
new file mode 100644
index 00000000000..107c96a3951
--- /dev/null
+++ b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/subflow3.sw.yaml
@@ -0,0 +1,21 @@
+---
+id: "helloworldyaml1"
+version: "1.0"
+specVersion: "0.8"
+name: "Hello World Workflow"
+description: "JSON based hello world workflow"
+start: "Inject Hello World SubFlow"
+functions:
+ - name: getItems
+ operation: specs/openapi.yaml#getItems
+states:
+ - name: "Inject Hello World SubFlow"
+ type: "inject"
+ data:
+ greeting-subflow: "Hello World SubFlow"
+ transition: "Inject Mantra SubFlow"
+ - name: "Inject Mantra SubFlow"
+ type: "inject"
+ data:
+ mantra-subflow: "SubFlow Serverless Workflow is awesome!"
+ end: true
diff --git a/packages/kn-plugin-workflow/pkg/specs/testdata/refs/subflow4.sw.yaml b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/subflow4.sw.yaml
new file mode 100644
index 00000000000..7c854e30afe
--- /dev/null
+++ b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/subflow4.sw.yaml
@@ -0,0 +1,21 @@
+---
+id: "helloworldyaml1"
+version: "1.0"
+specVersion: "0.8"
+name: "Hello World Workflow"
+description: "JSON based hello world workflow"
+start: "Inject Hello World SubFlow"
+functions:
+ - name: getOrders
+ operation: specs/openapi.yaml#getOrders
+states:
+ - name: "Inject Hello World SubFlow"
+ type: "inject"
+ data:
+ greeting-subflow: "Hello World SubFlow"
+ transition: "Inject Mantra SubFlow"
+ - name: "Inject Mantra SubFlow"
+ type: "inject"
+ data:
+ mantra-subflow: "SubFlow Serverless Workflow is awesome!"
+ end: true
diff --git a/packages/kn-plugin-workflow/pkg/specs/testdata/refs/workflow.sw.yaml b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/workflow.sw.yaml
new file mode 100644
index 00000000000..566d18e4fcd
--- /dev/null
+++ b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/workflow.sw.yaml
@@ -0,0 +1,38 @@
+id: flink-workflow
+version: "1.0"
+specVersion: "0.8"
+name: flink workflow
+description: Create a starter flink job management
+functions:
+ - name: getItems
+ operation: specs/openapi.yaml#getItems
+ - name: getOrders
+ operation: specs/openapi.yaml#getOrders
+ - name: createOrder
+ operation: specs/openapi.yaml#createOrder
+ - name: getOrder
+ operation: specs/openapi.yaml#getOrder
+start: Get Flink Jars
+states:
+ - name: Get Flink Jars
+ type: operation
+ actionMode: sequential
+ actions:
+ - name: Get Flink Jars
+ functionRef:
+ refName: getJars
+ transition: Run Flink Job
+ - name: Run Flink Job
+ type: operation
+ actionMode: sequential
+ actions:
+ - actionDataFilter:
+ useResults: true
+ name: Run Flink Job
+ functionRef:
+ refName: runFlinkJob
+ arguments:
+ jarid: 72ecfc25-43ca-4f53-a4ee-1aaf93ac709a_flink-streaming-1.0.jar
+ entry-class: com.demo.flink.streaming.StreamingJob
+ end:
+ terminate: true
diff --git a/packages/kn-plugin-workflow/pkg/specs/testdata/refs/workflow1.sw.yaml b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/workflow1.sw.yaml
new file mode 100644
index 00000000000..6e7f3233236
--- /dev/null
+++ b/packages/kn-plugin-workflow/pkg/specs/testdata/refs/workflow1.sw.yaml
@@ -0,0 +1,34 @@
+id: flink-workflow
+version: "1.0"
+specVersion: "0.8"
+name: flink workflow
+description: Create a starter flink job management
+functions:
+ - name: getItems
+ operation: specs/openapi1.yaml#getItems
+ - name: getOrders
+ operation: specs/openapi2.yaml#getOrders
+start: Get Flink Jars
+states:
+ - name: Get Flink Jars
+ type: operation
+ actionMode: sequential
+ actions:
+ - name: Get Flink Jars
+ functionRef:
+ refName: getJars
+ transition: Run Flink Job
+ - name: Run Flink Job
+ type: operation
+ actionMode: sequential
+ actions:
+ - actionDataFilter:
+ useResults: true
+ name: Run Flink Job
+ functionRef:
+ refName: runFlinkJob
+ arguments:
+ jarid: 72ecfc25-43ca-4f53-a4ee-1aaf93ac709a_flink-streaming-1.0.jar
+ entry-class: com.demo.flink.streaming.StreamingJob
+ end:
+ terminate: true
diff --git a/packages/kogito-base-builder-image/resources/incubator-kie-kogito-base-builder-image.yaml b/packages/kogito-base-builder-image/resources/incubator-kie-kogito-base-builder-image.yaml
index 627f891be91..49b09efd8a8 100644
--- a/packages/kogito-base-builder-image/resources/incubator-kie-kogito-base-builder-image.yaml
+++ b/packages/kogito-base-builder-image/resources/incubator-kie-kogito-base-builder-image.yaml
@@ -20,7 +20,7 @@ schema_version: 1
name: "docker.io/apache/incubator-kie-kogito-base-builder"
version: "main"
-from: "registry.access.redhat.com/ubi8/openjdk-17:1.19"
+from: "registry.access.redhat.com/ubi8/openjdk-17:1.21"
description: "Image with JDK and Maven, used as a base image. It is used by Web Tools !"
labels:
diff --git a/packages/kogito-data-index-ephemeral-image/resources/incubator-kie-kogito-data-index-ephemeral-image.yaml b/packages/kogito-data-index-ephemeral-image/resources/incubator-kie-kogito-data-index-ephemeral-image.yaml
index a84340f07a5..e7b34795a3b 100644
--- a/packages/kogito-data-index-ephemeral-image/resources/incubator-kie-kogito-data-index-ephemeral-image.yaml
+++ b/packages/kogito-data-index-ephemeral-image/resources/incubator-kie-kogito-data-index-ephemeral-image.yaml
@@ -18,7 +18,7 @@
#
name: "docker.io/apache/incubator-kie-kogito-data-index-ephemeral"
version: "main"
-from: "registry.access.redhat.com/ubi8/openjdk-17-runtime:1.20"
+from: "registry.access.redhat.com/ubi8/openjdk-17-runtime:1.21"
description: "Runtime image for Kogito Data Index Service for ephemeral PostgreSQL persistence provider"
labels:
diff --git a/packages/kogito-data-index-postgresql-image/resources/incubator-kie-kogito-data-index-postgresql-image.yaml b/packages/kogito-data-index-postgresql-image/resources/incubator-kie-kogito-data-index-postgresql-image.yaml
index cf4c8027420..0c92ea2332f 100644
--- a/packages/kogito-data-index-postgresql-image/resources/incubator-kie-kogito-data-index-postgresql-image.yaml
+++ b/packages/kogito-data-index-postgresql-image/resources/incubator-kie-kogito-data-index-postgresql-image.yaml
@@ -20,7 +20,7 @@ schema_version: 1
name: "docker.io/apache/incubator-kie-kogito-data-index-postgresql"
version: "main"
-from: "registry.access.redhat.com/ubi8/openjdk-17-runtime:1.20"
+from: "registry.access.redhat.com/ubi8/openjdk-17-runtime:1.21"
description: "Runtime image for Kogito Data Index Service for PostgreSQL persistence provider"
labels:
diff --git a/packages/kogito-jit-runner-image/resources/incubator-kie-kogito-jit-runner-image.yaml b/packages/kogito-jit-runner-image/resources/incubator-kie-kogito-jit-runner-image.yaml
index 323a6cc1d50..02ae0bf09bd 100644
--- a/packages/kogito-jit-runner-image/resources/incubator-kie-kogito-jit-runner-image.yaml
+++ b/packages/kogito-jit-runner-image/resources/incubator-kie-kogito-jit-runner-image.yaml
@@ -20,7 +20,7 @@ schema_version: 1
name: "docker.io/apache/incubator-kie-kogito-jit-runner"
version: "main"
-from: "registry.access.redhat.com/ubi8/openjdk-17-runtime:1.20"
+from: "registry.access.redhat.com/ubi8/openjdk-17-runtime:1.21"
description: "Runtime image for Kogito JIT Runner"
labels:
diff --git a/packages/kogito-jobs-service-allinone-image/resources/incubator-kie-kogito-jobs-service-allinone-image.yaml b/packages/kogito-jobs-service-allinone-image/resources/incubator-kie-kogito-jobs-service-allinone-image.yaml
index 03a41ec9d9d..b0bd0a71fb3 100644
--- a/packages/kogito-jobs-service-allinone-image/resources/incubator-kie-kogito-jobs-service-allinone-image.yaml
+++ b/packages/kogito-jobs-service-allinone-image/resources/incubator-kie-kogito-jobs-service-allinone-image.yaml
@@ -20,7 +20,7 @@ schema_version: 1
name: "docker.io/apache/incubator-kie-kogito-jobs-service-ephemeral"
version: "main"
-from: "registry.access.redhat.com/ubi8/openjdk-17-runtime:1.20"
+from: "registry.access.redhat.com/ubi8/openjdk-17-runtime:1.21"
description: "Runtime image for Kogito Jobs Service with all available jdbc providers"
labels:
diff --git a/packages/kogito-jobs-service-ephemeral-image/resources/incubator-kie-kogito-jobs-service-ephemeral-image.yaml b/packages/kogito-jobs-service-ephemeral-image/resources/incubator-kie-kogito-jobs-service-ephemeral-image.yaml
index e9b17647ad7..9adabc4967d 100644
--- a/packages/kogito-jobs-service-ephemeral-image/resources/incubator-kie-kogito-jobs-service-ephemeral-image.yaml
+++ b/packages/kogito-jobs-service-ephemeral-image/resources/incubator-kie-kogito-jobs-service-ephemeral-image.yaml
@@ -20,7 +20,7 @@ schema_version: 1
name: "docker.io/apache/incubator-kie-kogito-jobs-service-ephemeral"
version: "main"
-from: "registry.access.redhat.com/ubi8/openjdk-17-runtime:1.20"
+from: "registry.access.redhat.com/ubi8/openjdk-17-runtime:1.21"
description: "Runtime image for Kogito in memory Jobs Service"
labels:
diff --git a/packages/kogito-jobs-service-postgresql-image/resources/incubator-kie-kogito-jobs-service-postgresql-image.yaml b/packages/kogito-jobs-service-postgresql-image/resources/incubator-kie-kogito-jobs-service-postgresql-image.yaml
index 06ac396d1cb..2f7e9844ff7 100644
--- a/packages/kogito-jobs-service-postgresql-image/resources/incubator-kie-kogito-jobs-service-postgresql-image.yaml
+++ b/packages/kogito-jobs-service-postgresql-image/resources/incubator-kie-kogito-jobs-service-postgresql-image.yaml
@@ -20,7 +20,7 @@ schema_version: 1
name: "docker.io/apache/incubator-kie-kogito-jobs-service-postgresql"
version: "main"
-from: "registry.access.redhat.com/ubi8/openjdk-17-runtime:1.20"
+from: "registry.access.redhat.com/ubi8/openjdk-17-runtime:1.21"
description: "Runtime image for Kogito Jobs Service based on Postgresql"
labels:
diff --git a/packages/kogito-management-console/Containerfile b/packages/kogito-management-console/Containerfile
index e58ae32a702..2c2440d5a0f 100644
--- a/packages/kogito-management-console/Containerfile
+++ b/packages/kogito-management-console/Containerfile
@@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
-FROM --platform=linux/amd64 registry.access.redhat.com/ubi9/ubi-minimal:9.4
+FROM --platform=linux/amd64 registry.access.redhat.com/ubi9/ubi-minimal:9.5
ARG KOGITO_MANAGEMENT_CONSOLE_PORT=8080
diff --git a/packages/maven-m2-repo-via-http-image/Containerfile b/packages/maven-m2-repo-via-http-image/Containerfile
index 214b9e3164f..88c1053f8c6 100644
--- a/packages/maven-m2-repo-via-http-image/Containerfile
+++ b/packages/maven-m2-repo-via-http-image/Containerfile
@@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
-FROM --platform=linux/amd64 registry.access.redhat.com/ubi9/ubi-minimal:9.4
+FROM --platform=linux/amd64 registry.access.redhat.com/ubi9/ubi-minimal:9.5
# Argument for configuring the port
ARG PORT=80
diff --git a/packages/serverless-workflow-diagram-editor/sw-editor/sw-editor-api/src/main/java/org/kie/workbench/common/stunner/sw/definition/StateExecTimeout.java b/packages/serverless-workflow-diagram-editor/sw-editor/sw-editor-api/src/main/java/org/kie/workbench/common/stunner/sw/definition/StateExecTimeout.java
new file mode 100644
index 00000000000..7b007f41e92
--- /dev/null
+++ b/packages/serverless-workflow-diagram-editor/sw-editor/sw-editor-api/src/main/java/org/kie/workbench/common/stunner/sw/definition/StateExecTimeout.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+package org.kie.workbench.common.stunner.sw.definition;
+
+import jsinterop.annotations.JsType;
+import org.kie.j2cl.tools.json.mapper.annotation.JSONMapper;
+import org.kie.j2cl.tools.processors.annotations.GWT3Export;
+import org.kie.j2cl.tools.yaml.mapper.api.annotation.YAMLMapper;
+
+@JSONMapper
+@YAMLMapper
+@JsType
+@GWT3Export
+public class StateExecTimeout {
+
+ private String single;
+ private String total;
+
+ public final String getSingle() {
+ return single;
+ }
+
+ public final void setSingle(String single) {
+ this.single = single;
+ }
+
+ public final String getTotal() {
+ return total;
+ }
+
+ public final void setTotal(String total) {
+ this.total = total;
+ }
+}
diff --git a/packages/serverless-workflow-diagram-editor/sw-editor/sw-editor-api/src/main/java/org/kie/workbench/common/stunner/sw/definition/WorkflowTimeouts.java b/packages/serverless-workflow-diagram-editor/sw-editor/sw-editor-api/src/main/java/org/kie/workbench/common/stunner/sw/definition/WorkflowTimeouts.java
index 1f6f5fc806a..b7e71230b5b 100644
--- a/packages/serverless-workflow-diagram-editor/sw-editor/sw-editor-api/src/main/java/org/kie/workbench/common/stunner/sw/definition/WorkflowTimeouts.java
+++ b/packages/serverless-workflow-diagram-editor/sw-editor/sw-editor-api/src/main/java/org/kie/workbench/common/stunner/sw/definition/WorkflowTimeouts.java
@@ -28,7 +28,9 @@
import org.kie.j2cl.tools.yaml.mapper.api.annotation.YAMLMapper;
import org.kie.j2cl.tools.yaml.mapper.api.annotation.YamlTypeDeserializer;
import org.kie.j2cl.tools.yaml.mapper.api.annotation.YamlTypeSerializer;
+import org.kie.workbench.common.stunner.sw.marshall.json.StateExecTimeoutJsonSerializer;
import org.kie.workbench.common.stunner.sw.marshall.json.WorkflowExecTimeoutJsonSerializer;
+import org.kie.workbench.common.stunner.sw.marshall.yaml.StateExecTimeoutYamlSerializer;
import org.kie.workbench.common.stunner.sw.marshall.yaml.WorkflowExecTimeoutYamlSerializer;
@JSONMapper
@@ -42,7 +44,12 @@ public class WorkflowTimeouts {
@YamlTypeSerializer(WorkflowExecTimeoutYamlSerializer.class)
@YamlTypeDeserializer(WorkflowExecTimeoutYamlSerializer.class)
private Object workflowExecTimeout;
- private String stateExecTimeout;
+
+ @JsonbTypeSerializer(StateExecTimeoutJsonSerializer.class)
+ @JsonbTypeDeserializer(StateExecTimeoutJsonSerializer.class)
+ @YamlTypeSerializer(StateExecTimeoutYamlSerializer.class)
+ @YamlTypeDeserializer(StateExecTimeoutYamlSerializer.class)
+ private Object stateExecTimeout;
private String actionExecTimeout;
private String branchExecTimeout;
private String eventTimeout;
@@ -55,11 +62,11 @@ public final void setWorkflowExecTimeout(Object workflowExecTimeout) {
this.workflowExecTimeout = workflowExecTimeout;
}
- public final String getStateExecTimeout() {
+ public final Object getStateExecTimeout() {
return stateExecTimeout;
}
- public final void setStateExecTimeout(String stateExecTimeout) {
+ public final void setStateExecTimeout(Object stateExecTimeout) {
this.stateExecTimeout = stateExecTimeout;
}
diff --git a/packages/serverless-workflow-diagram-editor/sw-editor/sw-editor-api/src/main/java/org/kie/workbench/common/stunner/sw/marshall/json/StateExecTimeoutJsonSerializer.java b/packages/serverless-workflow-diagram-editor/sw-editor/sw-editor-api/src/main/java/org/kie/workbench/common/stunner/sw/marshall/json/StateExecTimeoutJsonSerializer.java
new file mode 100644
index 00000000000..490d99d5d26
--- /dev/null
+++ b/packages/serverless-workflow-diagram-editor/sw-editor/sw-editor-api/src/main/java/org/kie/workbench/common/stunner/sw/marshall/json/StateExecTimeoutJsonSerializer.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.kie.workbench.common.stunner.sw.marshall.json;
+
+import java.lang.reflect.Type;
+
+import jakarta.json.JsonValue;
+import jakarta.json.bind.serializer.DeserializationContext;
+import jakarta.json.bind.serializer.JsonbDeserializer;
+import jakarta.json.bind.serializer.JsonbSerializer;
+import jakarta.json.bind.serializer.SerializationContext;
+import jakarta.json.stream.JsonGenerator;
+import jakarta.json.stream.JsonParser;
+import org.kie.j2cl.tools.json.mapper.internal.deserializer.StringJsonDeserializer;
+import org.kie.j2cl.tools.json.mapper.internal.serializer.StringJsonSerializer;
+import org.kie.workbench.common.stunner.sw.definition.StateExecTimeout;
+import org.kie.workbench.common.stunner.sw.definition.StateExecTimeout_JsonDeserializerImpl;
+import org.kie.workbench.common.stunner.sw.definition.StateExecTimeout_JsonSerializerImpl;
+
+public class StateExecTimeoutJsonSerializer implements JsonbDeserializer