diff --git a/.github/workflows/m2k-func.yaml b/.github/workflows/m2k-func.yaml index 13dce995..26b38d52 100644 --- a/.github/workflows/m2k-func.yaml +++ b/.github/workflows/m2k-func.yaml @@ -11,9 +11,9 @@ on: env: WORKDIR: workflows/move2kube/m2k-func MVN_OPTS: "" - WF_CONFIG_REPO: parodos-dev/serverless-workflows-config + WF_CONFIG_REPO: rhdhorchestrator/serverless-workflows-config REGISTRY_REPO: orchestrator - GH_TOKEN: ${{ secrets.HELM_REPO_TOKEN }} + GH_TOKEN: ${{ secrets.HELM_REPO_TOKEN }} jobs: build-and-push-m2k-func: @@ -69,7 +69,7 @@ jobs: - name: Print image url run: echo "Image pushed to ${{ steps.push-to-quay.outputs.registry-paths }}" - + - uses: actions/github-script@v7 id: get_pr_data with: @@ -85,7 +85,7 @@ jobs: - name: Send PRs to config repo if: ${{ ! inputs.it_mode }} env: - GH_TOKEN: ${{ secrets.HELM_REPO_TOKEN }} + GH_TOKEN: ${{ secrets.HELM_REPO_TOKEN }} run: | # determine pr or commit url if [ ${{ fromJSON(steps.get_pr_data.outputs.result)}} != "" ]; then @@ -99,7 +99,7 @@ jobs: ${PR_OR_COMMIT_URL} \ "yq --inplace '.kfunction.image=\"quay.io/orchestrator/serverless-workflow-m2k-kfunc:${GITHUB_SHA}\"' charts/move2kube/values.yaml" \ scripts/create_automated_m2k_kfunc_pr.sh \ - ${{ github.event.head_commit.author.email || 'parodos@redhat.com' }} \ + ${{ github.event.head_commit.author.email || 'rhdhorchestrator@redhat.com' }} \ ${{ github.event.head_commit.author.name || github.triggering_actor }} \ ${{ env.WORKDIR }} \ ${{ env.GH_TOKEN }} \ diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 5a418d50..0186de59 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -18,9 +18,9 @@ on: default: false env: - WF_CONFIG_REPO: parodos-dev/serverless-workflows-config + WF_CONFIG_REPO: rhdhorchestrator/serverless-workflows-config REGISTRY_REPO: orchestrator - GH_TOKEN: ${{ secrets.HELM_REPO_TOKEN }} + GH_TOKEN: ${{ secrets.HELM_REPO_TOKEN }} jobs: build: @@ -75,7 +75,7 @@ jobs: generate-manifests: needs: build - runs-on: ubuntu-24.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v4 - name: Generate Manifests @@ -125,7 +125,7 @@ jobs: ${PR_OR_COMMIT_URL} \ "" \ scripts/create_automated_pr.sh \ - ${{ github.event.head_commit.author.email || 'parodos@redhat.com' }} \ + ${{ github.event.head_commit.author.email || 'rhdhorchestrator@redhat.com' }} \ ${{ github.event.head_commit.author.name || github.triggering_actor }} \ $WORKDIR \ ${{ env.GH_TOKEN }} \ diff --git a/.github/workflows/move2kube-e2e.yaml b/.github/workflows/move2kube-e2e.yaml index 86d73e13..8ce32ca7 100644 --- a/.github/workflows/move2kube-e2e.yaml +++ b/.github/workflows/move2kube-e2e.yaml @@ -1,4 +1,4 @@ -name: Move2kube Workflow end to end tests +name: Move2kube Workflow end to end tests on: workflow_dispatch: @@ -80,7 +80,7 @@ jobs: - name: Deploy sonataflow-operator run: | - helm repo add orchestrator https://parodos-dev.github.io/orchestrator-helm-chart + helm repo add orchestrator https://rhdhorchestrator.github.io/orchestrator-helm-chart helm install orchestrator orchestrator/orchestrator-k8s --set platform.eventing.broker.name="" kubectl get sfp -A @@ -95,7 +95,7 @@ jobs: -n knative-serving \ --type merge \ -p '{"data":{"kubernetes.podspec-init-containers": "enabled", "kubernetes.podspec-securitycontext": "enabled"}}' - + yq --inplace '.spec.podTemplate.container |= ( . + {"env": [{"name": "K_SINK", "value": "http://broker-ingress.knative-eventing.svc.cluster.local/sonataflow-infra/default"}]} )' manifests/0?-sonataflow_m2k.yaml # reuse orchestrator psql persistence for e2e tests @@ -125,7 +125,7 @@ jobs: yq --inplace ".data.NOTIFICATIONS_BEARER_TOKEN=\"$BACKEND_SECRET\"" manifests/0?-secret_m2k.yaml # notsecret cat manifests/0?-secret_m2k.yaml - # deploy the manifests created by the ${{ steps.build-image.outputs.image }}" + # deploy the manifests created by the ${{ steps.build-image.outputs.image }}" kubectl apply -f manifests sleep 5 kubectl get deployment m2k -o yaml @@ -134,7 +134,7 @@ jobs: sleep 15 kubectl get pods -o wide kubectl wait --for=condition=Ready=true pods -l "app=m2k" --timeout=1m - + kubectl patch configmap/m2k-props \ --type merge \ -p '{"data": {"application.properties" :"move2kube_url=http://move2kube-instance-svc.default.svc.cluster.local:8080\nquarkus.rest-client.move2kube_yaml.url=http://move2kube-instance-svc.default.svc.cluster.local:8080\nquarkus.rest-client.notifications.url=http://orchestrator-backstage.default.svc.cluster.local:7007/"}}' diff --git a/.github/workflows/mta-v7.x-e2e.yaml b/.github/workflows/mta-v7.x-e2e.yaml index 250f184d..ec7e5e92 100644 --- a/.github/workflows/mta-v7.x-e2e.yaml +++ b/.github/workflows/mta-v7.x-e2e.yaml @@ -1,4 +1,4 @@ -name: MTA v7.x Workflow end to end tests +name: MTA v7.x Workflow end to end tests on: workflow_dispatch: @@ -33,7 +33,7 @@ jobs: run: | kubectl apply -f https://raw.githubusercontent.com/operator-framework/operator-lifecycle-manager/master/deploy/upstream/quickstart/crds.yaml # give the apiserver time - sleep 5s + sleep 5s kubectl apply -f https://raw.githubusercontent.com/operator-framework/operator-lifecycle-manager/master/deploy/upstream/quickstart/olm.yaml - name: Install Konveyor 0.3 (MTA upstream equivalent to 7.0) @@ -44,8 +44,8 @@ jobs: # give the apiserver time echo "sleeping 300 seconds to give time for the operator to pull images and start" sleep 300s - kubectl get csv -A - # TODO its a bit smelly that the csv name is coded here. + kubectl get csv -A + # TODO its a bit smelly that the csv name is coded here. kubectl wait --for=jsonpath='{.status.phase}=Succeeded' -n my-konveyor-operator csv/konveyor-operator.v0.3.2 kubectl get pods -A kubectl wait --for=condition=Ready=true pods -l "name=tackle-operator" -n my-konveyor-operator --timeout=240s @@ -73,7 +73,7 @@ jobs: - name: Deploy sonataflow-operator run: | - helm repo add orchestrator https://parodos-dev.github.io/orchestrator-helm-chart + helm repo add orchestrator https://rhdhorchestrator.github.io/orchestrator-helm-chart helm install orchestrator orchestrator/orchestrator-k8s --set platform.eventing.broker.name="" kubectl get sfp -A diff --git a/.github/workflows/workflow-executor.yml b/.github/workflows/workflow-executor.yml index 16e7e2ec..de868447 100644 --- a/.github/workflows/workflow-executor.yml +++ b/.github/workflows/workflow-executor.yml @@ -16,7 +16,7 @@ on: default: true env: - WF_CONFIG_REPO: parodos-dev/serverless-workflows-config + WF_CONFIG_REPO: rhdhorchestrator/serverless-workflows-config REGISTRY_REPO: orchestrator GH_TOKEN: ${{ secrets.HELM_REPO_TOKEN }} @@ -78,7 +78,7 @@ jobs: gen-manifests # TODO Push manifests to repo - + - name: Push manifests to CD repo if: ${{ ! inputs.it_mode }} run: | @@ -90,7 +90,7 @@ jobs: fi WORKDIR=${{ runner.temp }}/serverless-workflows - + make WORKFLOW_ID=${{ inputs.workflow_id }} \ WORKDIR=${WORKDIR} \ APPLICATION_ID=${{ inputs.application_id }} \ diff --git a/.tekton/m2k-func-pull-request.yaml b/.tekton/m2k-func-pull-request.yaml index dd77528e..01476208 100644 --- a/.tekton/m2k-func-pull-request.yaml +++ b/.tekton/m2k-func-pull-request.yaml @@ -2,7 +2,7 @@ apiVersion: tekton.dev/v1 kind: PipelineRun metadata: annotations: - build.appstudio.openshift.io/repo: https://github.com/parodos-dev/serverless-workflows?rev={{revision}} + build.appstudio.openshift.io/repo: https://github.com/rhdhorchestrator/serverless-workflows?rev={{revision}} build.appstudio.redhat.com/commit_sha: '{{revision}}' build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' build.appstudio.redhat.com/target_branch: '{{target_branch}}' diff --git a/.tekton/m2k-func-push.yaml b/.tekton/m2k-func-push.yaml index e2604b50..a1c087a0 100644 --- a/.tekton/m2k-func-push.yaml +++ b/.tekton/m2k-func-push.yaml @@ -2,7 +2,7 @@ apiVersion: tekton.dev/v1 kind: PipelineRun metadata: annotations: - build.appstudio.openshift.io/repo: https://github.com/parodos-dev/serverless-workflows?rev={{revision}} + build.appstudio.openshift.io/repo: https://github.com/rhdhorchestrator/serverless-workflows?rev={{revision}} build.appstudio.redhat.com/commit_sha: '{{revision}}' build.appstudio.redhat.com/target_branch: '{{target_branch}}' pipelinesascode.tekton.dev/max-keep-runs: "3" diff --git a/.tekton/move2kube-serverless-workflow-pull-request.yaml b/.tekton/move2kube-serverless-workflow-pull-request.yaml index f7b78678..db8ead50 100644 --- a/.tekton/move2kube-serverless-workflow-pull-request.yaml +++ b/.tekton/move2kube-serverless-workflow-pull-request.yaml @@ -2,7 +2,7 @@ apiVersion: tekton.dev/v1 kind: PipelineRun metadata: annotations: - build.appstudio.openshift.io/repo: https://github.com/parodos-dev/serverless-workflows?rev={{revision}} + build.appstudio.openshift.io/repo: https://github.com/rhdhorchestrator/serverless-workflows?rev={{revision}} build.appstudio.redhat.com/commit_sha: '{{revision}}' build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' build.appstudio.redhat.com/target_branch: '{{target_branch}}' diff --git a/.tekton/move2kube-serverless-workflow-push.yaml b/.tekton/move2kube-serverless-workflow-push.yaml index 933bc1fe..2d3b15e1 100644 --- a/.tekton/move2kube-serverless-workflow-push.yaml +++ b/.tekton/move2kube-serverless-workflow-push.yaml @@ -2,7 +2,7 @@ apiVersion: tekton.dev/v1 kind: PipelineRun metadata: annotations: - build.appstudio.openshift.io/repo: https://github.com/parodos-dev/serverless-workflows?rev={{revision}} + build.appstudio.openshift.io/repo: https://github.com/rhdhorchestrator/serverless-workflows?rev={{revision}} build.appstudio.redhat.com/commit_sha: '{{revision}}' build.appstudio.redhat.com/target_branch: '{{target_branch}}' pipelinesascode.tekton.dev/max-keep-runs: "3" diff --git a/.tekton/mta-serverless-workflow-pull-request.yaml b/.tekton/mta-serverless-workflow-pull-request.yaml index f1ef0192..7740ec43 100644 --- a/.tekton/mta-serverless-workflow-pull-request.yaml +++ b/.tekton/mta-serverless-workflow-pull-request.yaml @@ -2,7 +2,7 @@ apiVersion: tekton.dev/v1 kind: PipelineRun metadata: annotations: - build.appstudio.openshift.io/repo: https://github.com/parodos-dev/serverless-workflows?rev={{revision}} + build.appstudio.openshift.io/repo: https://github.com/rhdhorchestrator/serverless-workflows?rev={{revision}} build.appstudio.redhat.com/commit_sha: '{{revision}}' build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' build.appstudio.redhat.com/target_branch: '{{target_branch}}' diff --git a/.tekton/mta-serverless-workflow-push.yaml b/.tekton/mta-serverless-workflow-push.yaml index bc120415..508a1acb 100644 --- a/.tekton/mta-serverless-workflow-push.yaml +++ b/.tekton/mta-serverless-workflow-push.yaml @@ -2,7 +2,7 @@ apiVersion: tekton.dev/v1 kind: PipelineRun metadata: annotations: - build.appstudio.openshift.io/repo: https://github.com/parodos-dev/serverless-workflows?rev={{revision}} + build.appstudio.openshift.io/repo: https://github.com/rhdhorchestrator/serverless-workflows?rev={{revision}} build.appstudio.redhat.com/commit_sha: '{{revision}}' build.appstudio.redhat.com/target_branch: '{{target_branch}}' pipelinesascode.tekton.dev/max-keep-runs: "3" diff --git a/README.md b/README.md index 34f51f47..d4c11924 100644 --- a/README.md +++ b/README.md @@ -51,7 +51,7 @@ Follow these steps to successfully add a new workflow: 6. Create a PR to [serverless-workflows-config repository][3] and make sure its merge. 7. Now the PR from 4 can be merged and an automatic PR will be created with the generated manifests. Review and merge. -See [Continuous Integration with make](https://github.com/parodos-dev/serverless-workflows/blob/main/make.md) for implementation details of the CI pipeline. +See [Continuous Integration with make](https://github.com/rhdhorchestrator/serverless-workflows/blob/main/make.md) for implementation details of the CI pipeline. ### Builder image @@ -68,5 +68,5 @@ For every PR merged in the workflow directory, a GitHub Action runs an image bui [1]: https://github.com/serverlessworkflow/specification/tree/main?tab=readme-ov-file#documentation [2]: https://github.com/apache/incubator-kie-kogito-serverless-operator/ -[3]: https://github.com/parodos-dev/serverless-workflows-config -[4]: https://github.com/parodos-dev/serverless-workflows/blob/main/best-practices.md +[3]: https://github.com/rhdhorchestrator/serverless-workflows-config +[4]: https://github.com/rhdhorchestrator/serverless-workflows/blob/main/best-practices.md diff --git a/escalation/jira-listener/src/main/java/io/rhdhorchestrator/jiralistener/EventNotifier.java b/escalation/jira-listener/src/main/java/io/rhdhorchestrator/jiralistener/EventNotifier.java new file mode 100644 index 00000000..b4c1a944 --- /dev/null +++ b/escalation/jira-listener/src/main/java/io/rhdhorchestrator/jiralistener/EventNotifier.java @@ -0,0 +1,18 @@ +package io.rhdhorchestrator.jiralistener; + +import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; + +import io.cloudevents.CloudEvent; +import io.cloudevents.jackson.JsonFormat; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; + +@Path("/") +@RegisterRestClient(configKey="ce-emitter") +public interface EventNotifier { + + @POST + @Produces(JsonFormat.CONTENT_TYPE) + void emit(CloudEvent event); +} diff --git a/escalation/jira-listener/src/main/java/io/rhdhorchestrator/jiralistener/JiraListenerResource.java b/escalation/jira-listener/src/main/java/io/rhdhorchestrator/jiralistener/JiraListenerResource.java new file mode 100644 index 00000000..3d684b33 --- /dev/null +++ b/escalation/jira-listener/src/main/java/io/rhdhorchestrator/jiralistener/JiraListenerResource.java @@ -0,0 +1,66 @@ +package io.rhdhorchestrator.jiralistener; + +import java.io.IOException; +import java.lang.System.Logger; +import java.lang.System.Logger.Level; +import java.util.Map; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import io.rhdhorchestrator.jiralistener.JiraListenerService.OnEventResponse; +import io.rhdhorchestrator.jiralistener.model.JiraIssue; +import io.rhdhorchestrator.jiralistener.model.WebhookEvent; +import jakarta.inject.Inject; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.Response.Status; + +@Path("/") +public class JiraListenerResource { + private Logger logger = System.getLogger(JiraListenerResource.class.getName()); + + @Inject + ObjectMapper mapper; + + @Inject + JiraListenerService jiraListenerService; + + // Test endpoint used in dev mode when not specifying a K_SINK variable + @POST + @Consumes(MediaType.APPLICATION_JSON) + @Path("/") + public void test(Object any) { + logger.log(Level.INFO, "RECEIVED " + any); + } + + @POST + @Consumes(MediaType.APPLICATION_JSON) + @Path("/webhook/jira") + public Response onEvent(Map requestBody) { + logger.log(Level.INFO, "Received " + requestBody); + + try { + WebhookEvent webhookEvent = mapper.readValue(mapper.writeValueAsBytes(requestBody), WebhookEvent.class); + logger.log(Level.INFO, "Received " + webhookEvent); + if (webhookEvent.getIssue() == null) { + logger.log(Level.WARNING, "Discarded because of missing field: issue"); + return Response.noContent().build(); + } + JiraIssue jiraIssue = webhookEvent.getIssue(); + + OnEventResponse response = jiraListenerService.onEvent(jiraIssue); + if (response.eventAccepted) { + return Response.ok(response.jiraTicketEventData).build(); + } + return Response.noContent().build(); + } catch (IOException e) { + return Response + .status(Status.BAD_REQUEST.getStatusCode(), + "Not a valid webhook event for a Jira issue: " + e.getMessage()) + .build(); + } + } +} \ No newline at end of file diff --git a/escalation/jira-listener/src/main/java/io/rhdhorchestrator/jiralistener/JiraListenerService.java b/escalation/jira-listener/src/main/java/io/rhdhorchestrator/jiralistener/JiraListenerService.java new file mode 100644 index 00000000..8097ee68 --- /dev/null +++ b/escalation/jira-listener/src/main/java/io/rhdhorchestrator/jiralistener/JiraListenerService.java @@ -0,0 +1,154 @@ +package io.rhdhorchestrator.jiralistener; + +import java.lang.System.Logger; +import java.lang.System.Logger.Level; +import java.net.URI; +import java.util.List; +import java.util.Optional; +import java.util.UUID; + +import org.eclipse.microprofile.config.inject.ConfigProperty; +import org.eclipse.microprofile.rest.client.inject.RestClient; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import io.rhdhorchestrator.jiralistener.model.JiraTicketEventData; +import io.rhdhorchestrator.jiralistener.model.JiraIssue; +import io.rhdhorchestrator.jiralistener.model.JiraIssue.StatusCategory; +import io.cloudevents.CloudEvent; +import io.cloudevents.core.builder.CloudEventBuilder; +import io.cloudevents.core.data.PojoCloudEventData; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; +import jakarta.ws.rs.core.MediaType; + +@ApplicationScoped +public class JiraListenerService { + @ConfigProperty(name = "cloudevent.type") + String cloudeventType; + @ConfigProperty(name = "cloudevent.source") + String cloudeventSource; + + @ConfigProperty(name = "jira.webhook.label.workflowInstanceId") + String workflowInstanceIdJiraLabel; + @ConfigProperty(name = "jira.webhook.label.workflowName") + String workflowNameJiraLabel; + @ConfigProperty(name = "escalation.workflowName") + String expectedWorkflowName; + + private Logger logger = System.getLogger(JiraListenerService.class.getName()); + + @Inject + @RestClient + EventNotifier eventNotifier; + + @Inject + ObjectMapper mapper; + + static class OnEventResponse { + boolean eventAccepted; + JiraTicketEventData jiraTicketEventData; + } + + OnEventResponse onEvent(JiraIssue jiraIssue) { + OnEventResponse response = new OnEventResponse(); + response.eventAccepted = false; + + Optional ticket = validateIsAClosedJiraIssue(jiraIssue); + if (ticket.isPresent()) { + logger.log(Level.INFO, "Created ticket " + ticket.get()); + CloudEvent newCloudEvent = CloudEventBuilder.v1() + .withDataContentType(MediaType.APPLICATION_JSON) + .withExtension("kogitoprocrefid", ticket.get().getWorkFlowInstanceId()) + .withId(UUID.randomUUID().toString()) + .withType(cloudeventType) + .withSource(URI.create(cloudeventSource)) + .withData(PojoCloudEventData.wrap(ticket.get(), + mapper::writeValueAsBytes)) + .build(); + + logger.log(Level.INFO, "Emitting " + newCloudEvent); + eventNotifier.emit(newCloudEvent); + response.eventAccepted = true; + response.jiraTicketEventData = ticket.get(); + } + + return response; + } + + private Optional validateIsAClosedJiraIssue(JiraIssue jiraIssue) { + Optional notaClosedJiraIssue = Optional.empty(); + String issueKey = jiraIssue.getKey(); + if (jiraIssue.getKey() != null) { + if (jiraIssue.getFields() == null) { + logger.log(Level.WARNING, "Discarded because of missing field: issue.fields"); + return notaClosedJiraIssue; + } + + if (jiraIssue.getFields().getLabels() == null) { + logger.log(Level.WARNING, String.format("Discarded because of missing field: issue.fields.labels")); + return notaClosedJiraIssue; + } + List labels = jiraIssue.getFields().getLabels(); + + Optional workflowInstanceIdLabel = labels.stream() + .filter(l -> l.startsWith(workflowInstanceIdJiraLabel + "=")).findFirst(); + if (workflowInstanceIdLabel.isEmpty()) { + logger.log(Level.INFO, + String.format("Discarded because no %s label found", workflowInstanceIdJiraLabel)); + return notaClosedJiraIssue; + } + String workflowInstanceId = workflowInstanceIdLabel.get().split("=")[1]; + + Optional workflowNameLabel = labels.stream() + .filter(l -> l.startsWith(workflowNameJiraLabel + "=")).findFirst(); + if (workflowNameLabel.isEmpty()) { + logger.log(Level.INFO, String.format("Discarded because no %s label found", workflowNameJiraLabel)); + return notaClosedJiraIssue; + } + String workflowName = workflowNameLabel.get().split("=")[1]; + if (!workflowName.equals(expectedWorkflowName)) { + logger.log(Level.INFO, + String.format("Discarded because label %s is not matching the expected value %s", + workflowNameLabel.get(), expectedWorkflowName)); + return notaClosedJiraIssue; + } + + if (jiraIssue.getFields().getStatus() == null) { + logger.log(Level.WARNING, String.format("Discarded because of missing field: issue.fields.status")); + return notaClosedJiraIssue; + } + JiraIssue.Status status = jiraIssue.getFields().getStatus(); + + if (status.getStatusCategory() == null) { + logger.log(Level.WARNING, + String.format("Discarded because of missing field: issue.fields.status.statusCategory")); + return notaClosedJiraIssue; + } + StatusCategory statusCategory = status.getStatusCategory(); + + if (statusCategory.getKey() == null) { + logger.log(Level.WARNING, + String.format("Discarded because of missing field: issue.fields.status.statusCategory.key")); + return notaClosedJiraIssue; + } + String statusCategoryKey = statusCategory.getKey(); + + logger.log(Level.INFO, + String.format("Received Jira issue %s with workflowInstanceId %s, workflowName %s and status %s", + issueKey, + workflowInstanceId, workflowName, statusCategoryKey)); + if (!statusCategoryKey.equals("done")) { + logger.log(Level.INFO, "Discarded because not a completed issue but " + statusCategoryKey); + return notaClosedJiraIssue; + } + + return Optional.of(JiraTicketEventData.builder().ticketId(issueKey) + .workFlowInstanceId(workflowInstanceId) + .workflowName(workflowName).status(statusCategoryKey).build()); + } else { + logger.log(Level.INFO, "Discarded because of missing field: key"); + return notaClosedJiraIssue; + } + } +} diff --git a/escalation/jira-listener/src/main/java/io/rhdhorchestrator/jiralistener/model/JiraIssue.java b/escalation/jira-listener/src/main/java/io/rhdhorchestrator/jiralistener/model/JiraIssue.java new file mode 100644 index 00000000..6304fef1 --- /dev/null +++ b/escalation/jira-listener/src/main/java/io/rhdhorchestrator/jiralistener/model/JiraIssue.java @@ -0,0 +1,49 @@ +package io.rhdhorchestrator.jiralistener.model; + +import java.util.List; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; + +import lombok.Builder; +import lombok.Data; +import lombok.extern.jackson.Jacksonized; + +@Data +@Builder +@Jacksonized +@JsonIgnoreProperties(ignoreUnknown = true) +public class JiraIssue { + @JsonProperty(required = true) + private String key; + @JsonProperty(required = true) + private Fields fields; + + @Data + @Builder + @Jacksonized + @JsonIgnoreProperties(ignoreUnknown = true) + public static class Fields { + private List labels; + private Status status; + + } + + @Data + @Builder + @Jacksonized + @JsonIgnoreProperties(ignoreUnknown = true) + public static class Status { + private StatusCategory statusCategory; + + } + + @Data + @Builder + @Jacksonized + @JsonIgnoreProperties(ignoreUnknown = true) + public static class StatusCategory { + private String key; + + } +} diff --git a/escalation/jira-listener/src/main/java/io/rhdhorchestrator/jiralistener/model/JiraTicketEventData.java b/escalation/jira-listener/src/main/java/io/rhdhorchestrator/jiralistener/model/JiraTicketEventData.java new file mode 100644 index 00000000..2c6b1f4c --- /dev/null +++ b/escalation/jira-listener/src/main/java/io/rhdhorchestrator/jiralistener/model/JiraTicketEventData.java @@ -0,0 +1,15 @@ +package io.rhdhorchestrator.jiralistener.model; + +import lombok.Builder; +import lombok.Data; +import lombok.extern.jackson.Jacksonized; + +@Builder +@Data +@Jacksonized +public class JiraTicketEventData { + private String ticketId; + private String workFlowInstanceId; + private String workflowName; + private String status; +} diff --git a/escalation/jira-listener/src/main/java/io/rhdhorchestrator/jiralistener/model/WebhookEvent.java b/escalation/jira-listener/src/main/java/io/rhdhorchestrator/jiralistener/model/WebhookEvent.java new file mode 100644 index 00000000..fa31b6f9 --- /dev/null +++ b/escalation/jira-listener/src/main/java/io/rhdhorchestrator/jiralistener/model/WebhookEvent.java @@ -0,0 +1,19 @@ +package io.rhdhorchestrator.jiralistener.model; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + +import lombok.Builder; +import lombok.Data; +import lombok.extern.jackson.Jacksonized; + +@Data +@Builder +@Jacksonized +@JsonIgnoreProperties(ignoreUnknown = true) +public class WebhookEvent { + private String timestamp; + private String webhookEvent; + private String issue_event_type_name; + + private JiraIssue issue; +} diff --git a/escalation/jira-listener/src/test/java/io/rhdhorchestrator/jiralistener/JiraConstants.java b/escalation/jira-listener/src/test/java/io/rhdhorchestrator/jiralistener/JiraConstants.java new file mode 100644 index 00000000..f57d2574 --- /dev/null +++ b/escalation/jira-listener/src/test/java/io/rhdhorchestrator/jiralistener/JiraConstants.java @@ -0,0 +1,10 @@ +package io.rhdhorchestrator.jiralistener; + +public final class JiraConstants { + static final String ISSUE = "issue"; + static final String KEY = "key"; + static final String FIELDS = "fields"; + static final String LABELS = "labels"; + static final String STATUS = "status"; + static final String STATUS_CATEGORY = "statusCategory"; +} diff --git a/escalation/jira-listener/src/test/java/io/rhdhorchestrator/jiralistener/JiraListenerResourceIT.java b/escalation/jira-listener/src/test/java/io/rhdhorchestrator/jiralistener/JiraListenerResourceIT.java new file mode 100644 index 00000000..6c372fb0 --- /dev/null +++ b/escalation/jira-listener/src/test/java/io/rhdhorchestrator/jiralistener/JiraListenerResourceIT.java @@ -0,0 +1,8 @@ +package io.rhdhorchestrator.jiralistener; + +import io.quarkus.test.junit.QuarkusIntegrationTest; + +@QuarkusIntegrationTest +public class JiraListenerResourceIT extends JiraListenerResourceTest { + // Execute the same tests but in packaged mode. +} diff --git a/escalation/jira-listener/src/test/java/io/rhdhorchestrator/jiralistener/JiraListenerResourceTest.java b/escalation/jira-listener/src/test/java/io/rhdhorchestrator/jiralistener/JiraListenerResourceTest.java new file mode 100644 index 00000000..bfb532e5 --- /dev/null +++ b/escalation/jira-listener/src/test/java/io/rhdhorchestrator/jiralistener/JiraListenerResourceTest.java @@ -0,0 +1,232 @@ +package io.rhdhorchestrator.jiralistener; + +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.post; +import static com.github.tomakehurst.wiremock.client.WireMock.postRequestedFor; +import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; +import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.options; +import static io.rhdhorchestrator.jiralistener.JiraConstants.FIELDS; +import static io.rhdhorchestrator.jiralistener.JiraConstants.ISSUE; +import static io.rhdhorchestrator.jiralistener.JiraConstants.KEY; +import static io.rhdhorchestrator.jiralistener.JiraConstants.LABELS; +import static io.rhdhorchestrator.jiralistener.JiraConstants.STATUS; +import static io.rhdhorchestrator.jiralistener.JiraConstants.STATUS_CATEGORY; +import static io.restassured.RestAssured.given; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasSize; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.eclipse.microprofile.config.inject.ConfigProperty; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import com.fasterxml.jackson.core.exc.StreamReadException; +import com.fasterxml.jackson.databind.DatabindException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.github.tomakehurst.wiremock.WireMockServer; +import com.github.tomakehurst.wiremock.client.WireMock; +import com.github.tomakehurst.wiremock.stubbing.ServeEvent; +import com.google.common.collect.Lists; + +import io.rhdhorchestrator.jiralistener.model.JiraTicketEventData; +import io.quarkus.test.junit.QuarkusTest; +import io.restassured.response.ExtractableResponse; +import io.restassured.response.Response; +import jakarta.inject.Inject; + +@QuarkusTest +public class JiraListenerResourceTest { + private static WireMockServer sink; + + @ConfigProperty(name = "cloudevent.type") + String cloudeventType; + @ConfigProperty(name = "cloudevent.source") + String cloudeventSource; + + @ConfigProperty(name = "jira.webhook.label.workflowInstanceId") + String workflowInstanceIdJiraLabel; + @ConfigProperty(name = "jira.webhook.label.workflowName") + String workflowNameJiraLabel; + @ConfigProperty(name = "escalation.workflowName") + String expectedWorkflowName; + + @Inject + ObjectMapper mapper; + + @BeforeAll + public static void startSink() { + sink = new WireMockServer(options().port(8181)); + sink.start(); + sink.stubFor(post("/").willReturn(aResponse().withBody("ok").withStatus(200))); + } + + @AfterAll + public static void stopSink() { + if (sink != null) { + sink.stop(); + } + } + + @BeforeEach + public void resetSink() { + sink.resetRequests(); + } + + private Map aClosedIssue() { + Map statusCategory = new HashMap(Map.of(KEY, "done")); + Map status = new HashMap(Map.of(STATUS_CATEGORY, statusCategory)); + List labels = new ArrayList<>(List.of(workflowInstanceIdJiraLabel + "=500", + workflowNameJiraLabel + "=" + expectedWorkflowName)); + Map fields = new HashMap(Map.of(LABELS, labels, STATUS, status)); + Map issue = new HashMap(Map.of(KEY, "PR-1", FIELDS, fields)); + return new HashMap(Map.of(ISSUE, issue)); + } + + @Test + public void when_jiraIssueIsClosed_onEvent_returnsClosedTicket() + throws StreamReadException, DatabindException, IOException { + Map webhookEvent = aClosedIssue(); + + String workflowInstanceId = "500"; + JiraTicketEventData closedTicket = JiraTicketEventData.builder().ticketId("PR-1") + .workFlowInstanceId(workflowInstanceId) + .workflowName("escalation").status("done").build(); + + ExtractableResponse response = given() + .when().contentType("application/json") + .body(webhookEvent).post("/webhook/jira") + .then() + .statusCode(200) + .extract(); + + assertEquals(response.as(JiraTicketEventData.class), closedTicket, "Returns JiraTicketEventData"); + sink.verify(1, postRequestedFor(urlEqualTo("/")) + .withHeader("ce-source", WireMock.equalTo(cloudeventSource)) + .withHeader("ce-type", WireMock.equalTo(cloudeventType)) + .withHeader("ce-kogitoprocrefid", WireMock.equalTo(workflowInstanceId))); + List allServeEvents = sink.getAllServeEvents(); + allServeEvents = Lists.reverse(allServeEvents); + assertThat(allServeEvents, hasSize(1)); + + ServeEvent event = allServeEvents.get(0); + System.out.println("Received event with headers " + event.getRequest().getAllHeaderKeys()); + JiraTicketEventData eventBody = mapper.readValue(event.getRequest().getBody(), JiraTicketEventData.class); + System.out.println("Received event with eventBody " + eventBody); + assertThat(event.getRequest().header("ce-source").values().get(0), + is(cloudeventSource)); + assertThat(event.getRequest().header("ce-type").values().get(0), + is(cloudeventType)); + assertThat(event.getRequest().header("ce-kogitoprocrefid").values().get(0), + is(workflowInstanceId)); + assertThat("Response body is equal to the request body", eventBody, is(closedTicket)); + } + + @Test + public void when_payloadIsInvalid_onEvent_returnsNoContent() + throws StreamReadException, DatabindException, IOException { + Map invalidIssue = Map.of("invalid", "any"); + validateNoContentRequest(invalidIssue); + + } + + @Test + public void when_jiraIssueHasNotAllRequiredFiels_onEvent_returnsNoContent() + throws StreamReadException, DatabindException, IOException { + Map webhookEvent = aClosedIssue(); + Map issue = (Map) webhookEvent.get(ISSUE); + ((List) ((Map) issue.get(FIELDS)).get(LABELS)).remove(0); + validateNoContentRequest(webhookEvent); + + webhookEvent = aClosedIssue(); + issue = (Map) webhookEvent.get(ISSUE); + ((List) ((Map) issue.get(FIELDS)).get(LABELS)).remove(1); + validateNoContentRequest(webhookEvent); + + webhookEvent = aClosedIssue(); + issue = (Map) webhookEvent.get(ISSUE); + ((Map) issue.get(FIELDS)).remove(LABELS); + validateNoContentRequest(webhookEvent); + + webhookEvent = aClosedIssue(); + issue = (Map) webhookEvent.get(ISSUE); + ((Map) ((Map) ((Map) issue.get(FIELDS)).get(STATUS)).get(STATUS_CATEGORY)) + .remove(KEY); + validateNoContentRequest(webhookEvent); + + webhookEvent = aClosedIssue(); + issue = (Map) webhookEvent.get(ISSUE); + ((Map) ((Map) issue.get(FIELDS)).get(STATUS)).remove(STATUS_CATEGORY); + validateNoContentRequest(webhookEvent); + + webhookEvent = aClosedIssue(); + issue = (Map) webhookEvent.get(ISSUE); + ((Map) issue.get(FIELDS)).remove(STATUS); + validateNoContentRequest(webhookEvent); + + webhookEvent = aClosedIssue(); + issue = (Map) webhookEvent.get(ISSUE); + issue.remove(FIELDS); + validateNoContentRequest(webhookEvent); + + webhookEvent = aClosedIssue(); + issue = (Map) webhookEvent.get(ISSUE); + issue.remove(KEY); + validateNoContentRequest(webhookEvent); + } + + @Test + public void when_jiraIssueIsNotClosed_onEvent_returnsNoContent() + throws StreamReadException, DatabindException, IOException { + Map webhookEvent = aClosedIssue(); + Map issue = (Map) webhookEvent.get(ISSUE); + ((Map) ((Map) ((Map) issue.get(FIELDS)).get(STATUS)).get(STATUS_CATEGORY)) + .put(KEY, + "undone"); + validateNoContentRequest(webhookEvent); + } + + @Test + public void when_jiraIssueHasWrongWorkflowName_onEvent_returnsNoContent() + throws StreamReadException, DatabindException, IOException { + Map webhookEvent = aClosedIssue(); + Map issue = (Map) webhookEvent.get(ISSUE); + Map fields = ((Map) issue.get(FIELDS)); + fields.put(LABELS, List.of(workflowInstanceIdJiraLabel + "=500", + workflowNameJiraLabel + "=invalidName")); + validateNoContentRequest(webhookEvent); + } + + @Test + public void when_jiraIssueHasWrongLabels_onEvent_returnsNoContent() + throws StreamReadException, DatabindException, IOException { + Map webhookEvent = aClosedIssue(); + Map issue = (Map) webhookEvent.get(ISSUE); + Map fields = ((Map) issue.get(FIELDS)); + fields.put(LABELS, List.of("anotherLabel")); + validateNoContentRequest(webhookEvent); + } + + private void validateNoContentRequest(Map issue) { + ExtractableResponse response = given() + .when().contentType("application/json") + .body(issue).post("/webhook/jira") + .then() + .statusCode(204) + .extract(); + + assertThat("Returns no content", response.asString(), is("")); + sink.verify(0, postRequestedFor(urlEqualTo("/"))); + List allServeEvents = sink.getAllServeEvents(); + allServeEvents = Lists.reverse(allServeEvents); + assertThat(allServeEvents, hasSize(0)); + } +} \ No newline at end of file diff --git a/make.md b/make.md index 2bddfcf7..9eb71793 100644 --- a/make.md +++ b/make.md @@ -17,8 +17,8 @@ on the specific git service provider (e.g. GitHub rather than Bitbucket). * Container registry credentials to publish the generated image The actual implementation is delegated to shell scripts located under the [scripts](./scripts/) folder that run -in a containerized image including all the required dependencies: there's no need to install any other -tool in the local environment. +in a containerized image including all the required dependencies: there's no need to install any other +tool in the local environment. ## Configuration variables Variables can be used to configure the behavior of the [Makefile](./Makefile): @@ -44,13 +44,13 @@ Variables can be used to configure the behavior of the [Makefile](./Makefile): | REGISTRY_PASSWORD | Container registry user password | `""` (e.g., no login attempted) | | IMAGE_PREFIX | Automatically added image prefix | `serverless-workflow` | | IMAGE_TAG | Automatically added image tag | 8 chars commit hash of the latest commit | -| DEPLOYMENT_REPO | Git repo of the deployment source code | `parodos-dev/serverless-workflows-config` | +| DEPLOYMENT_REPO | Git repo of the deployment source code | `rhdhorchestrator/serverless-workflows-config` | | DEPLOYMENT_BRANCH | Branch of the deployment git repo | `main` | | ENABLE_PERSISTENCE | Enables the addition of persistence to the generated manifests. Useful for local testing | `false` | Override the default values with: ```bash -make = ... = +make = ... = ``` ### Requirements for WORKFLOW_ID variable @@ -61,16 +61,16 @@ make = ... = * Must match one of the application folders under the given workflow folder, like [jira-listener](./escalation/jira-listener/) * Must contain a valid, Maven Java project * Must be compatible with the selected `JDK_IMAGE` - + ### Requirements for Linux UBI image See the [setup](./setup/README.md) documentation. ### Requirements for the deployment repo -The procedure assumes that the folder structure of the target deployment repository reflects the one of the [default repository](https://github.com/parodos-dev/serverless-workflows-config), e.g.: - * Manifests are stored in the `base` subfolder - * Image is customized in the `overlays/prod` subfolder -* `helm` projects are located under the `charts/workflows/charts/WORKFLOW_ID` folder - * Manifests are copied under the `templates` subfolder with no Helm-specific manipulation +The procedure assumes that the folder structure of the target deployment repository reflects the one of the [default repository](https://github.com/rhdhorchestrator/serverless-workflows-config), e.g.: + * Manifests are stored in the `base` subfolder + * Image is customized in the `overlays/prod` subfolder + * `helm` projects are located under the `charts/workflows/charts/WORKFLOW_ID` folder + * Manifests are copied under the `templates` subfolder with no Helm-specific manipulation ## Building with make The following examples show how to build a specific workflow like `escalation` in the local repository. diff --git a/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/CloudEventsCustomizer.java b/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/CloudEventsCustomizer.java new file mode 100644 index 00000000..621ac55e --- /dev/null +++ b/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/CloudEventsCustomizer.java @@ -0,0 +1,20 @@ +package io.rhdhorchestrator; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; +import io.cloudevents.jackson.JsonFormat; +import io.quarkus.jackson.ObjectMapperCustomizer; +import jakarta.inject.Singleton; + +/** + * Ensure the registration of the CloudEvent jackson module according to the Quarkus suggested procedure. + */ +@Singleton +public class CloudEventsCustomizer implements ObjectMapperCustomizer { + + @Override + public void customize(ObjectMapper mapper) { + mapper.registerModule(JsonFormat.getCloudEventJacksonModule()); + mapper.registerModule(new JavaTimeModule()); + } +} diff --git a/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/EventGenerator.java b/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/EventGenerator.java new file mode 100644 index 00000000..30ede3a1 --- /dev/null +++ b/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/EventGenerator.java @@ -0,0 +1,60 @@ +package io.rhdhorchestrator; + +import io.quarkus.funqy.knative.events.CloudEvent; +import io.quarkus.funqy.knative.events.CloudEventBuilder; +import org.eclipse.microprofile.config.ConfigProvider; +import org.eclipse.microprofile.config.inject.ConfigProperty; + +import java.time.OffsetDateTime; +import java.util.Map; +import java.util.UUID; + +public class EventGenerator { + + public static final String ERROR_EVENT = ConfigProvider.getConfig().getValue("error.event.name",String.class); + + public static final String TRANSFORMATION_SAVED_EVENT = ConfigProvider.getConfig().getValue("transformation-saved.event.name",String.class); + + public static CloudEvent createCloudEvent(String workflowId, EventPOJO data, String eventType, String source) { + return baseCloudEventBuilder(workflowId, eventType, source) + .build(data); + } + + public static CloudEvent createCloudEvent(String workflowId, String eventType, String source) { + return baseCloudEventBuilder(workflowId, eventType, source) + .build(new EventPOJO()); + } + + public static CloudEvent createTransformationSavedEvent(String workflowId, String source) { + return baseCloudEventBuilder(workflowId, TRANSFORMATION_SAVED_EVENT, source) + .build(new EventPOJO()); + } + + public static CloudEvent createErrorEvent(String workflowCallerId, String message, String source) { + return createCloudEvent(workflowCallerId, new EventPOJO().setError(message), ERROR_EVENT, source); + } + private static CloudEventBuilder baseCloudEventBuilder(String workflowId, String eventType, String source) { + return CloudEventBuilder.create() + .id(UUID.randomUUID().toString()) + .source(source) + .type(eventType) + .time(OffsetDateTime.now()) + .extensions(Map.of("kogitoprocrefid", workflowId)); + } + + + public static class EventPOJO { + public String error; + public String message; + + public EventPOJO setError(String error) { + this.error = error; + return this; + } + + public EventPOJO setMessage(String message) { + this.message = message; + return this; + } + } +} diff --git a/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/SaveTransformationFunction.java b/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/SaveTransformationFunction.java new file mode 100644 index 00000000..6f37cd27 --- /dev/null +++ b/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/SaveTransformationFunction.java @@ -0,0 +1,242 @@ +package io.rhdhorchestrator; + +import io.rhdhorchestrator.move2kube.ApiException; +import io.rhdhorchestrator.service.FolderCreatorService; +import io.rhdhorchestrator.service.GitService; +import io.rhdhorchestrator.service.Move2KubeService; +import io.quarkus.funqy.Funq; +import io.quarkus.funqy.knative.events.CloudEvent; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; +import org.apache.commons.io.FileUtils; +import org.eclipse.jgit.api.Git; +import org.eclipse.jgit.api.errors.GitAPIException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.util.Date; + +@ApplicationScoped +public class SaveTransformationFunction { + private static final Logger log = LoggerFactory.getLogger(SaveTransformationFunction.class); + + public static final String COMMIT_MESSAGE = "Move2Kube transformation"; + + public static final String SOURCE = "save-transformation"; + + public static final String[] MOVE2KUBE_OUTPUT_DIRECTORIES_TO_SAVE = new String[]{"source","deploy","scripts" }; + + @Inject + GitService gitService; + + @Inject + Move2KubeService move2KubeService; + + @Inject + FolderCreatorService folderCreatorService; + + + + @Funq("saveTransformation") + public CloudEvent saveTransformation(FunInput input) { + if (!input.validate()) { + log.error("One or multiple mandatory input field was missing; input: {}", input); + return EventGenerator.createErrorEvent(input.workflowCallerId, String.format("One or multiple mandatory input field was missing; input: %s", input), + SOURCE); + } + + Path transformationOutputPath; + try { + transformationOutputPath = move2KubeService.getTransformationOutput(input.workspaceId, input.projectId, input.transformId); + } catch (IllegalArgumentException | IOException | ApiException e) { + log.error("Error while retrieving transformation output", e); + return EventGenerator.createErrorEvent(input.workflowCallerId, String.format("Cannot get transformation output of transformation %s" + + " in workspace %s for project %s for repo %s; error: %s", + input.transformId, input.workspaceId, input.projectId, input.gitRepo, e), SOURCE); + } catch (NoSuchAlgorithmException | KeyStoreException | KeyManagementException e) { + log.error("Error while creating httpclient to get transformation output", e); + return EventGenerator.createErrorEvent(input.workflowCallerId, String.format("Cannot create client to get transformation output of transformation %s" + + " in workspace %s for project %s for repo %s; error: %s", + input.transformId, input.workspaceId, input.projectId, input.gitRepo, e), SOURCE); + } + + try { + cleanTransformationOutputFolder(transformationOutputPath); + } catch (IOException e) { + log.error("Error while cleaning extracted transformation output", e); + return EventGenerator.createErrorEvent(input.workflowCallerId, String.format("Cannot clean extracted transformation output of transformation %s" + + " in workspace %s for project %s for repo %s; error: %s", + input.transformId, input.workspaceId, input.projectId, input.gitRepo, e), SOURCE); + } + + return pushTransformationToGitRepo(input, transformationOutputPath); + } + + + private CloudEvent pushTransformationToGitRepo(FunInput input, Path transformationOutputPath) { + try { + Path gitDir = folderCreatorService.createGitRepositoryLocalFolder(input.gitRepo, String.format("%s-%s_%d", input.branch, input.transformId, new Date().getTime())); + + try (Git clonedRepo = gitService.cloneRepo(input.gitRepo, input.branch, gitDir)) { + CloudEvent errorEvent = createBranch(input, clonedRepo); + if (errorEvent != null) return errorEvent; + + try { + moveTransformationOutputToBranchDirectory(transformationOutputPath, gitDir); + } catch (IOException e) { + log.error("Cannot move transformation output to local git repo " + + "(repo {}; transformation: {}; workspace: {}; project: {})", + input.gitRepo, input.transformId, input.workspaceId, input.projectId, e); + return EventGenerator.createErrorEvent(input.workflowCallerId, String.format("Cannot move transformation output to local git repo " + + "(repo %s; transformation: %s; workspace: %s; project: %s); error: %s", + input.gitRepo, input.transformId, input.workspaceId, input.projectId, e), SOURCE); + } + + return commitAndPush(input, clonedRepo); + + } catch (GitAPIException e) { + log.error("Cannot clone repo {}", input.gitRepo, e); + return EventGenerator.createErrorEvent(input.workflowCallerId, String.format("Cannot clone repo %s; error: %s", input.gitRepo, e), + SOURCE); + } + } catch (IOException e) { + log.error("Cannot create temp dir to clone repo {}", input.gitRepo, e); + return EventGenerator.createErrorEvent(input.workflowCallerId, String.format("Cannot create temp dir to clone repo %s; error: %s", input.gitRepo, e), + SOURCE); + } + + } + + public void moveTransformationOutputToBranchDirectory(Path transformationOutput, Path gitDirectory) throws IOException { + cleanCurrentGitFolder(gitDirectory); + log.info("Moving extracted files located in {} to git repo folder {}", transformationOutput, gitDirectory); + Path finalPath; + try (var dir = Files.newDirectoryStream(transformationOutput.resolve("output"), Files::isDirectory)) { + finalPath = dir.iterator().next(); + } + log.info("FinalPath is --->{} and GitPath is {}", finalPath, gitDirectory); + for(String directory: MOVE2KUBE_OUTPUT_DIRECTORIES_TO_SAVE) { + FileUtils.copyDirectory(finalPath.resolve(Paths.get(directory)).toFile(), + gitDirectory.resolve(directory).toFile()); + } + FileUtils.copyFile(finalPath.resolve(Paths.get("Readme.md")).toFile(), + gitDirectory.resolve("Readme.md").toFile()); + } + + private static void cleanTransformationOutputFolder(Path transformationOutput) throws IOException { + try (var files = Files.walk(transformationOutput)) { + files.forEach(path -> { + if (path.toAbsolutePath().toString().contains(".git") && path.toFile().isDirectory()) { + try { + FileUtils.deleteDirectory(path.toFile()); + } catch (IOException e) { + log.error("Error while deleting .git directory {} of transformation output", path, e); + } + } + }); + } + } + + private static void cleanCurrentGitFolder(Path gitDirectory) throws IOException { + try (var files = Files.walk(gitDirectory, 1)) { + files.forEach(path -> { + if (!path.equals(gitDirectory) && !path.toAbsolutePath().toString().contains(".git")) { + File f = path.toFile(); + if (f.isDirectory()) { + try { + FileUtils.deleteDirectory(f); + } catch (IOException e) { + log.error("Error while deleting directory {}", path, e); + } + } else { + try { + FileUtils.delete(f); + } catch (IOException e) { + log.error("Error while deleting file {}", path, e); + } + } + } + }); + } + } + + private CloudEvent createBranch(FunInput input, Git clonedRepo) { + try { + if (gitService.branchExists(clonedRepo, input.branch)) { + log.error("Branch {} already exists on repo {}", input.branch, input.gitRepo); + return EventGenerator.createErrorEvent(input.workflowCallerId, String.format("Branch '%s' already exists on repo %s", + input.branch, input.gitRepo), SOURCE); + } + gitService.createBranch(clonedRepo, input.branch); + } catch (GitAPIException e) { + log.error("Cannot create branch {} to remote repo {}", input.branch, input.gitRepo, e); + return EventGenerator.createErrorEvent(input.workflowCallerId, String.format("Cannot create branch '%s' on repo %s; error: %s", + input.branch, input.gitRepo, e), SOURCE); + } + return null; + } + + private CloudEvent commitAndPush(FunInput input, Git clonedRepo) { + try { + gitService.commit(clonedRepo, COMMIT_MESSAGE, "."); + } catch (GitAPIException e) { + log.error("Cannot commit to local repo {}", input.gitRepo, e); + return EventGenerator.createErrorEvent(input.workflowCallerId, String.format("Cannot commit to local repo %s; error: %s", input.gitRepo, e), + SOURCE); + } + log.info("Pushing commit to branch {} of repo {}", input.branch, input.gitRepo); + try { + gitService.push(clonedRepo); + } catch (GitAPIException | IOException e) { + log.error("Cannot push branch {} to remote repo {}", input.branch, input.gitRepo, e); + return EventGenerator.createErrorEvent(input.workflowCallerId, String.format("Cannot push branch %s to remote repo %s; error: %s", + input.branch, input.gitRepo, e), SOURCE); + } + + var event = EventGenerator.createTransformationSavedEvent(input.workflowCallerId, SOURCE); + log.info("Sending cloud event {} to workflow {}", event, input.workflowCallerId); + return event; + } + + + public static class FunInput { + public String gitRepo; + public String branch; + + public String workspaceId; + public String projectId; + public String transformId; + + public String workflowCallerId; + + public boolean validate() { + return !((gitRepo == null || gitRepo.isBlank()) || + (branch == null || branch.isBlank()) || + (workspaceId == null || workspaceId.isBlank()) || + (projectId == null || projectId.isBlank()) || + (workflowCallerId == null || workflowCallerId.isBlank()) || + (transformId == null || transformId.isBlank())); + } + + @Override + public String toString() { + return "FunInput{" + + "gitRepo='" + gitRepo + '\'' + + ", branch='" + branch + '\'' + + ", workspaceId='" + workspaceId + '\'' + + ", projectId='" + projectId + '\'' + + ", transformId='" + transformId + '\'' + + ", workflowCallerId='" + workflowCallerId + '\'' + + '}'; + } + } + +} diff --git a/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/service/FolderCreatorService.java b/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/service/FolderCreatorService.java new file mode 100644 index 00000000..f372292f --- /dev/null +++ b/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/service/FolderCreatorService.java @@ -0,0 +1,13 @@ +package io.rhdhorchestrator.service; + +import java.io.IOException; +import java.nio.file.Path; + + +public interface FolderCreatorService { + + + Path createGitRepositoryLocalFolder(String gitRepo, String uniqueIdentifier) throws IOException; + + Path createMove2KubeTransformationFolder(String transformationId) throws IOException; +} diff --git a/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/service/FolderCreatorServiceImpl.java b/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/service/FolderCreatorServiceImpl.java new file mode 100644 index 00000000..8883b663 --- /dev/null +++ b/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/service/FolderCreatorServiceImpl.java @@ -0,0 +1,30 @@ +package io.rhdhorchestrator.service; + +import jakarta.enterprise.context.ApplicationScoped; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; + +@ApplicationScoped +public class FolderCreatorServiceImpl implements FolderCreatorService { + static final Logger log = LoggerFactory.getLogger(FolderCreatorServiceImpl.class); + + @Override + public Path createGitRepositoryLocalFolder(String gitRepo, String uniqueIdentifier) throws IOException { + String folder = String.format("local-git-transform-%s-%s", StringUtils.substringAfterLast(gitRepo, "/"), uniqueIdentifier); + log.info("Creating temp folder: {}", folder); + return Files.createTempDirectory(folder); + } + + @Override + public Path createMove2KubeTransformationFolder(String transformationId) throws IOException { + String folder = String.format("move2kube-transform-%s", transformationId); + log.info("Creating temp folder: {}", folder); + return Files.createTempDirectory(folder); + } + +} diff --git a/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/service/GitService.java b/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/service/GitService.java new file mode 100644 index 00000000..93df7dfa --- /dev/null +++ b/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/service/GitService.java @@ -0,0 +1,25 @@ +package io.rhdhorchestrator.service; + +import org.eclipse.jgit.api.Git; +import org.eclipse.jgit.api.errors.GitAPIException; + +import java.io.IOException; +import java.nio.file.Path; + +public interface GitService { + + // Clone the git repository locally on the `targetDirectory` folder + Git cloneRepo(String repo, String branch, Path targetDirectory) throws GitAPIException, IOException; + + // Clone then archive the git repository. The archive is saved as `archiveOutputPath`. + // The repository is locally persisted when cloned in the parent directory of `archiveOutputPath` + + void createBranch(Git repo, String branch) throws GitAPIException; + + void commit(Git repo, String commitMessage, String filePattern) throws GitAPIException; + + void push(Git repo) throws GitAPIException, IOException; + + // Check is a branch exists on the repository based on the cloned git repository persisted in the directory `gitDir` + public boolean branchExists(Git repo, String branch) throws GitAPIException; +} diff --git a/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/service/GitServiceImpl.java b/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/service/GitServiceImpl.java new file mode 100644 index 00000000..28719fde --- /dev/null +++ b/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/service/GitServiceImpl.java @@ -0,0 +1,114 @@ +package io.rhdhorchestrator.service; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Collections; + +import jakarta.enterprise.context.ApplicationScoped; + +import org.eclipse.jgit.api.CloneCommand; +import org.eclipse.jgit.api.CommitCommand; +import org.eclipse.jgit.api.Git; +import org.eclipse.jgit.api.ListBranchCommand; +import org.eclipse.jgit.api.PushCommand; +import org.eclipse.jgit.api.TransportConfigCallback; +import org.eclipse.jgit.api.errors.GitAPIException; +import org.eclipse.jgit.api.errors.InvalidRemoteException; +import org.eclipse.jgit.transport.SshTransport; +import org.eclipse.jgit.transport.Transport; +import org.eclipse.jgit.transport.sshd.SshdSessionFactoryBuilder; +import org.eclipse.microprofile.config.ConfigProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@ApplicationScoped +public class GitServiceImpl implements GitService { + private static final Logger log = LoggerFactory.getLogger(GitServiceImpl.class); + public static final Path SSH_PRIV_KEY_PATH = Path.of(ConfigProvider.getConfig().getValue("ssh-priv-key-path", String.class)); + @Override + public Git cloneRepo(String repo, String branch, Path targetDirectory) throws GitAPIException, IOException { + try { + if (repo.startsWith("ssh") && !repo.contains("@")) { + log.info("No user specified in ssh git url, using 'git' user"); + String[] protocolAndHost = repo.split("://"); + String repoWithGitUser = "git@" + protocolAndHost[1]; + repo = protocolAndHost[0] + "://" + repoWithGitUser; + } + CloneCommand cloneCommand = Git.cloneRepository().setURI(repo).setDirectory(targetDirectory.toFile()); + log.info("Cloning repo {} in {} using ssh keys {}", repo, targetDirectory, SSH_PRIV_KEY_PATH); + cloneCommand.setTransportConfigCallback(getTransport(SSH_PRIV_KEY_PATH)); + + return cloneCommand.call(); + } catch (InvalidRemoteException e) { + log.error("remote repository server '{}' is not available", repo, e); + throw e; + } catch (GitAPIException e) { + log.error("Cannot clone repository: {}", repo, e); + throw e; + } catch (IOException e) { + log.error("Cannot set ssh transport: {}", repo, e); + throw e; + } + } + + @Override + public void createBranch(Git repo, String branch) throws GitAPIException { + log.info("Creating branch {} in repo {}", branch, repo.toString()); + repo.branchCreate().setName(branch).call(); + repo.checkout().setName(branch).call(); + } + + @Override + public boolean branchExists(Git repo, String branch) throws GitAPIException { + return repo.branchList() + .setListMode(ListBranchCommand.ListMode.ALL) + .call() + .stream() + .map(ref -> ref.getName()) + .anyMatch(branchName -> branchName.contains(branch)); + } + + @Override + public void commit(Git repo, String commitMessage, String filePattern) throws GitAPIException { + log.info("Committing files matching the pattern '{}' with message '{}' to repo {}", filePattern, commitMessage, repo); + repo.add().setUpdate(true).addFilepattern(filePattern).call(); + repo.add().addFilepattern(filePattern).call(); + CommitCommand commit = repo.commit().setMessage(commitMessage); + commit.setSign(Boolean.FALSE); + commit.call(); + } + + @Override + public void push(Git repo) throws GitAPIException, IOException { + log.info("Pushing to repo {}", repo); + PushCommand pushCommand = repo.push().setForce(false).setRemote("origin"); + + log.info("Push using ssh key {}", SSH_PRIV_KEY_PATH); + pushCommand.setTransportConfigCallback(getTransport(SSH_PRIV_KEY_PATH)); + + pushCommand.call(); + } + + public static TransportConfigCallback getTransport(Path sshKeyPath) throws IOException { + if (!sshKeyPath.toFile().exists()) { + throw new IOException("SSH key file at '%s' does not exists".formatted(sshKeyPath.toString())); + } + + var sshSessionFactory = new SshdSessionFactoryBuilder() + .setDefaultIdentities(f -> Collections.singletonList(sshKeyPath)) + .setPreferredAuthentications("publickey") + .setSshDirectory(sshKeyPath.getParent().toFile()) + .setHomeDirectory(sshKeyPath.getParent().toFile()) + .build(null); + + return new TransportConfigCallback() { + @Override + public void configure(Transport transport) { + SshTransport sshTransport = (SshTransport) transport; + sshTransport.setSshSessionFactory(sshSessionFactory); + } + }; + } +} diff --git a/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/service/Move2KubeService.java b/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/service/Move2KubeService.java new file mode 100644 index 00000000..768c68bf --- /dev/null +++ b/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/service/Move2KubeService.java @@ -0,0 +1,14 @@ +package io.rhdhorchestrator.service; + +import io.rhdhorchestrator.move2kube.ApiException; + +import java.io.IOException; +import java.nio.file.Path; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; + +public interface Move2KubeService { + + public Path getTransformationOutput(String workspaceId, String projectId, String transformationId) throws IllegalArgumentException, IOException, ApiException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException; +} diff --git a/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/service/Move2KubeServiceImpl.java b/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/service/Move2KubeServiceImpl.java new file mode 100644 index 00000000..fd07a3ec --- /dev/null +++ b/move2kube/m2k-func/src/main/java/io/rhdhorchestrator/service/Move2KubeServiceImpl.java @@ -0,0 +1,128 @@ +package io.rhdhorchestrator.service; + +import io.rhdhorchestrator.move2kube.ApiClient; +import io.rhdhorchestrator.move2kube.ApiException; +import io.rhdhorchestrator.move2kube.api.ProjectOutputsApi; +import io.rhdhorchestrator.move2kube.api.ProjectsApi; +import io.rhdhorchestrator.move2kube.client.model.Project; +import io.rhdhorchestrator.move2kube.client.model.ProjectOutputsValue; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; +import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; +import org.apache.commons.compress.archivers.zip.ZipFile; +import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; +import org.apache.hc.client5.http.impl.classic.HttpClientBuilder; +import org.apache.hc.client5.http.impl.io.PoolingHttpClientConnectionManager; +import org.apache.hc.client5.http.impl.io.PoolingHttpClientConnectionManagerBuilder; +import org.apache.hc.client5.http.ssl.NoopHostnameVerifier; +import org.apache.hc.client5.http.ssl.SSLConnectionSocketFactoryBuilder; +import org.apache.hc.core5.ssl.SSLContextBuilder; +import org.apache.hc.core5.ssl.TrustStrategy; +import org.eclipse.microprofile.config.inject.ConfigProperty; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.file.Path; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.util.Collections; + +@ApplicationScoped +public class Move2KubeServiceImpl implements Move2KubeService { + private static final Logger log = LoggerFactory.getLogger(Move2KubeServiceImpl.class); + + @Inject + FolderCreatorService folderCreatorService; + + @ConfigProperty(name = "move2kube.api") + String move2kubeApi; + + @Override + public Path getTransformationOutput(String workspaceId, String projectId, String transformationId) throws IllegalArgumentException, IOException, ApiException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { + Path outputPath = folderCreatorService.createMove2KubeTransformationFolder(String.format("move2kube-transform-%s", transformationId)); + ApiClient client = new ApiClient(createHttpClientAcceptingSelfSignedCerts()); + client.setBasePath(move2kubeApi); + + ProjectOutputsApi output = new ProjectOutputsApi(client); + + waitForTransformationToBeDone(workspaceId, projectId, transformationId, client); + + log.info("Retrieving transformation {} output (Workspace: {}, project: {}", workspaceId, projectId, transformationId); + File file = output.getProjectOutput(workspaceId, projectId, transformationId); + if (file == null) { + log.error("Cannot get output file from transformation {} (Workspace: {}, project: {}", transformationId, workspaceId, projectId); + throw new FileNotFoundException(String.format("Cannot get output file from transformation %s (Workspace: %s, project: %s", transformationId, workspaceId, projectId)); + } + + log.info("Extracting {} to {}", file.getAbsolutePath(), outputPath); + extractZipFile(file, outputPath); + + return outputPath; + } + + private void waitForTransformationToBeDone(String workspaceId, String projectId, String transformationId, ApiClient client) throws ApiException { + log.info("Waiting for transformation {} to be done", transformationId); + ProjectsApi project = new ProjectsApi(client); + Project res ; + ProjectOutputsValue o; + do { + res = project.getProject(workspaceId, projectId); + o = res.getOutputs().get(transformationId); + if (o == null) { + log.error("Output is null for transformation {}", transformationId); + throw new IllegalArgumentException(String.format("Cannot get the project (%s) transformation (%s) output from the list", projectId, transformationId)); + } + log.info("Status of transformation Id {} is: {}", transformationId, o.getStatus()); + try { + Thread.sleep(5000L); + } catch (InterruptedException ignored) { + + } + } while (!o.getStatus().equals("done")) ; + } + + public static void extractZipFile(File zipFile, Path extractPath) throws IOException { + try (ZipFile zip = new ZipFile(zipFile)) { + for (ZipArchiveEntry entry : Collections.list(zip.getEntries())) { + // normalizing the path to resolve any potentiall ../../ or leading / + var entryPath = extractPath.resolve(entry.getName()).normalize(); + if (!entryPath.startsWith(extractPath)) { + // this could be a zip slip attack by trying entries name with ../.. trying to + // escape the target dir + throw new IOException("Entry path name resolved an invalid path " + entryPath.toString()); + } + File entryFile = entryPath.toFile(); + if (entry.isDirectory()) { + entryFile.mkdirs(); + continue; + } + File parentDir = entryFile.getParentFile(); + if (parentDir != null && !parentDir.exists()) { + parentDir.mkdirs(); + } + try (FileOutputStream fos = new FileOutputStream(entryFile)) { + zip.getInputStream(entry).transferTo(fos); + } + } + } + } + + private static CloseableHttpClient createHttpClientAcceptingSelfSignedCerts() throws NoSuchAlgorithmException, KeyManagementException, KeyStoreException { + PoolingHttpClientConnectionManager connectionManager = PoolingHttpClientConnectionManagerBuilder.create() + .setSSLSocketFactory(SSLConnectionSocketFactoryBuilder.create() + .setSslContext(new SSLContextBuilder().loadTrustMaterial(null, (TrustStrategy) (x509Certificates, s) -> true).build() + ) + .setHostnameVerifier(NoopHostnameVerifier.INSTANCE) + .build()) + .build(); + return HttpClientBuilder + .create() + .setConnectionManager(connectionManager) + .build(); + } +} diff --git a/move2kube/m2k-func/src/test/java/io/rhdhorchestrator/SaveTransformationFunctionIT.java b/move2kube/m2k-func/src/test/java/io/rhdhorchestrator/SaveTransformationFunctionIT.java new file mode 100644 index 00000000..fcc63285 --- /dev/null +++ b/move2kube/m2k-func/src/test/java/io/rhdhorchestrator/SaveTransformationFunctionIT.java @@ -0,0 +1,52 @@ +package io.rhdhorchestrator; + +import io.rhdhorchestrator.move2kube.ApiException; +import io.quarkus.test.junit.QuarkusTest; +import io.quarkus.test.junit.QuarkusTestProfile; +import io.quarkus.test.junit.TestProfile; +import io.restassured.RestAssured; +import io.restassured.http.ContentType; +import org.eclipse.jgit.api.errors.GitAPIException; +import org.eclipse.microprofile.config.inject.ConfigProperty; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.util.Map; +import java.util.UUID; + +import static org.hamcrest.Matchers.containsString; + +@QuarkusTest +public class SaveTransformationFunctionIT { + @ConfigProperty(name = "transformation-saved.event.name") + private String transformationSavedEventName; + + @Test + @Disabled + // TODO: before each (or all?) create a workspoce and a project in move2kube local instance + public void testSaveTransformationOK() throws GitAPIException, IOException, ApiException { + UUID workflowCallerId = UUID.randomUUID(); + RestAssured.given().contentType("application/json") + .header("ce-specversion", "1.0") + .header("ce-id", UUID.randomUUID().toString()) + .header("ce-type", "save-transformation") + .header("ce-source", "test") + .body("{\"gitRepo\": \"https://github.com/gabriel-farache/dotfiles\", " + + "\"branch\": \"m2k-test\"," + + " \"token\": \"\"," + + " \"workspaceId\": \"765a25fe-ab46-4aee-be7b-15d9b82da566\"," + + " \"projectId\": \"dd4a8dc9-bc61-4047-a9b0-88bf43306d55\"," + + " \"transformId\": \"b3350712-cac0-4a5c-a42d-355c5f9d1e5b\"," + + " \"workflowCallerId\": \"" + workflowCallerId + "\"" + + "}") + .post("/") + .then() + .statusCode(200) + .contentType(ContentType.JSON) + .header("ce-type", transformationSavedEventName) + .header("ce-kogitoprocrefid", workflowCallerId.toString()) + .header("ce-source", SaveTransformationFunction.SOURCE) + .body(containsString("\"error\":null")); + } +} diff --git a/move2kube/m2k-func/src/test/java/io/rhdhorchestrator/SaveTransformationFunctionTest.java b/move2kube/m2k-func/src/test/java/io/rhdhorchestrator/SaveTransformationFunctionTest.java new file mode 100644 index 00000000..04948502 --- /dev/null +++ b/move2kube/m2k-func/src/test/java/io/rhdhorchestrator/SaveTransformationFunctionTest.java @@ -0,0 +1,464 @@ +package io.rhdhorchestrator; + +import io.rhdhorchestrator.move2kube.ApiException; +import io.rhdhorchestrator.service.FolderCreatorService; +import io.rhdhorchestrator.service.GitService; +import io.rhdhorchestrator.service.Move2KubeService; +import io.rhdhorchestrator.service.Move2KubeServiceImpl; +import io.quarkus.test.InjectMock; +import io.quarkus.test.junit.QuarkusTest; +import io.quarkus.test.junit.QuarkusTestProfile; +import io.quarkus.test.junit.TestProfile; +import io.restassured.RestAssured; +import io.restassured.http.ContentType; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.filefilter.TrueFileFilter; +import org.eclipse.jgit.api.Git; +import org.eclipse.jgit.api.errors.GitAPIException; +import org.eclipse.jgit.api.errors.InvalidRemoteException; +import org.eclipse.microprofile.config.inject.ConfigProperty; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.io.File; +import java.io.IOException; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.util.Map; +import java.util.UUID; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.not; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@QuarkusTest +public class SaveTransformationFunctionTest { + + @InjectMock + GitService gitServiceMock; + + @InjectMock + Move2KubeService move2KubeServiceMock; + + @InjectMock + FolderCreatorService folderCreatorService; + + @ConfigProperty(name = "transformation-saved.event.name") + private String transformationSavedEventName; + + Path transformOutputPath; + Path gitRepoLocalFolder; + + + public static final String TRANSFORMED_ZIP = "SaveTransformationFunctionTest/references/transformation_output.zip"; + + public static final String REFERENCE_FOLDER_ZIP = "SaveTransformationFunctionTest/references/expected_output.zip"; + + public static File REFERENCE_OUTPUT_UNZIP_PATH; + + ClassLoader classLoader = getClass().getClassLoader(); + + private Git git; + + + @BeforeEach + public void setUp() throws GitAPIException, IOException { + File tmpDir = Files.createTempDirectory("gitRepoTest").toFile(); + git = Git.init().setDirectory(tmpDir).call(); + REFERENCE_OUTPUT_UNZIP_PATH = Files.createTempDirectory("refOutput").toFile(); + Move2KubeServiceImpl.extractZipFile(new File(classLoader.getResource(REFERENCE_FOLDER_ZIP).getFile()), REFERENCE_OUTPUT_UNZIP_PATH.toPath()); + } + + @AfterEach + public void tearDown() throws IOException { + git.getRepository().close(); + if (transformOutputPath != null) { + FileUtils.deleteDirectory(transformOutputPath.toFile()); + } + if (gitRepoLocalFolder != null) { + FileUtils.deleteDirectory(gitRepoLocalFolder.toFile()); + } + if (REFERENCE_OUTPUT_UNZIP_PATH != null) { + FileUtils.deleteDirectory(REFERENCE_OUTPUT_UNZIP_PATH); + } + transformOutputPath = null; + gitRepoLocalFolder = null; + REFERENCE_OUTPUT_UNZIP_PATH = null; + } + + @Test + public void testSaveTransformationIsWorking() throws GitAPIException, IOException, ApiException, URISyntaxException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { + UUID workflowCallerId = UUID.randomUUID(); + UUID transformId = UUID.randomUUID(); + transformOutputPath = Files.createTempDirectory(String.format("move2kube-transform-TEST-%s", transformId)); + gitRepoLocalFolder = Files.createTempDirectory(String.format("local-git-transform-TEST-%s", transformId)); + + URL transformedZip = classLoader.getResource(TRANSFORMED_ZIP); + Move2KubeServiceImpl.extractZipFile(new File(transformedZip.getFile()), transformOutputPath); + + when(folderCreatorService.createGitRepositoryLocalFolder(eq("gitRepo"), anyString())).thenReturn(gitRepoLocalFolder); + when(move2KubeServiceMock.getTransformationOutput(anyString(), anyString(), anyString())).thenReturn(transformOutputPath); + when(gitServiceMock.cloneRepo(anyString(), anyString(), any())).thenReturn(git); + when(gitServiceMock.branchExists(any(), anyString())).thenReturn(false); + doNothing().when(gitServiceMock).createBranch(eq(git), anyString()); + doNothing().when(gitServiceMock).commit(eq(git), anyString(), anyString()); + doNothing().when(gitServiceMock).createBranch(eq(git), anyString()); + doNothing().when(gitServiceMock).push(eq(git)); + + RestAssured.given().contentType("application/json") + .header("ce-specversion", "1.0") + .header("ce-id", UUID.randomUUID().toString()) + .header("ce-type", "save-transformation") + .header("ce-source", "test") + .body("{\"gitRepo\": \"gitRepo\", " + + "\"branch\": \"branch\"," + + " \"workspaceId\": \"workspaceId\"," + + " \"projectId\": \"projectId\"," + + " \"workflowCallerId\": \"" + workflowCallerId + "\"," + + " \"transformId\": \"" + transformId + "\"" + + "}") + .post("/") + .then() + .statusCode(200) + .contentType(ContentType.JSON) + .header("ce-type", transformationSavedEventName) + .header("ce-kogitoprocrefid", workflowCallerId.toString()) + .header("ce-source", SaveTransformationFunction.SOURCE) + .body(containsString("\"error\":null")); + + verify(move2KubeServiceMock, times(1)).getTransformationOutput(anyString(), anyString(), anyString()); + verify(gitServiceMock, times(1)).cloneRepo(anyString(), anyString(), any()); + verify(gitServiceMock, times(1)).createBranch(eq(git), anyString()); + verify(gitServiceMock, times(1)).branchExists(any(), anyString()); + verify(gitServiceMock, times(1)).commit(eq(git), anyString(), anyString()); + verify(gitServiceMock, times(1)).push(eq(git)); + + AssertFileMovedToGitLocalFolder(REFERENCE_OUTPUT_UNZIP_PATH.toPath()); + } + + @Test + public void testSaveTransformationIsFailingWhenRetrievingTransformationOutput() throws IOException, ApiException, GitAPIException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { + UUID workflowCallerId = UUID.randomUUID(); + UUID transformId = UUID.randomUUID(); + transformOutputPath = Files.createTempDirectory(String.format("move2kube-transform-TEST-%s", transformId)); + gitRepoLocalFolder = Files.createTempDirectory(String.format("local-git-transform-TEST-%s", transformId)); + URL transformedZip = classLoader.getResource(TRANSFORMED_ZIP); + Move2KubeServiceImpl.extractZipFile(new File(transformedZip.getFile()), transformOutputPath); + + when(folderCreatorService.createGitRepositoryLocalFolder(eq("gitRepo"), anyString())).thenReturn(gitRepoLocalFolder); + when(move2KubeServiceMock.getTransformationOutput(anyString(), anyString(), anyString())).thenThrow(new IOException("Error while retrieving transformation output")); + + RestAssured.given().contentType("application/json") + .header("ce-specversion", "1.0") + .header("ce-id", UUID.randomUUID().toString()) + .header("ce-type", "save-transformation") + .header("ce-source", "test") + .body("{\"gitRepo\": \"gitRepo\", " + + "\"branch\": \"branch\"," + + " \"workspaceId\": \"workspaceId\"," + + " \"projectId\": \"projectId\"," + + " \"workflowCallerId\": \"" + workflowCallerId + "\"," + + " \"transformId\": \"" + transformId + "\"" + + "}") + .post("/") + .then() + .statusCode(200) + .contentType(ContentType.JSON) + .header("ce-type", EventGenerator.ERROR_EVENT) + .header("ce-kogitoprocrefid", workflowCallerId.toString()) + .header("ce-source", SaveTransformationFunction.SOURCE) + .body(not(containsString("\"error\":null"))); + + verify(move2KubeServiceMock, times(1)).getTransformationOutput(anyString(), anyString(), anyString()); + verify(gitServiceMock, times(0)).cloneRepo(anyString(), anyString(), any()); + verify(gitServiceMock, times(0)).createBranch(eq(git), anyString()); + verify(gitServiceMock, times(0)).branchExists(any(), anyString()); + verify(gitServiceMock, times(0)).commit(eq(git), anyString(), anyString()); + verify(gitServiceMock, times(0)).push(eq(git)); + } + + @Test + public void testSaveTransformationGitCloneFail() throws GitAPIException, IOException, ApiException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { + UUID workflowCallerId = UUID.randomUUID(); + UUID transformId = UUID.randomUUID(); + transformOutputPath = Files.createTempDirectory(String.format("move2kube-transform-TEST-%s", transformId)); + gitRepoLocalFolder = Files.createTempDirectory(String.format("local-git-transform-TEST-%s", transformId)); + URL transformedZip = classLoader.getResource(TRANSFORMED_ZIP); + Move2KubeServiceImpl.extractZipFile(new File(transformedZip.getFile()), transformOutputPath); + + when(folderCreatorService.createGitRepositoryLocalFolder(eq("gitRepo"), anyString())).thenReturn(gitRepoLocalFolder); + when(move2KubeServiceMock.getTransformationOutput(anyString(), anyString(), anyString())).thenReturn(transformOutputPath); + when(gitServiceMock.cloneRepo(anyString(), anyString(), any())).thenThrow(new InvalidRemoteException("Error while cloning repo")); + + RestAssured.given().contentType("application/json") + .header("ce-specversion", "1.0") + .header("ce-id", UUID.randomUUID().toString()) + .header("ce-type", "save-transformation") + .header("ce-source", "test") + .body("{\"gitRepo\": \"gitRepo\", " + + "\"branch\": \"branch\"," + + " \"workspaceId\": \"workspaceId\"," + + " \"projectId\": \"projectId\"," + + " \"workflowCallerId\": \"" + workflowCallerId + "\"," + + " \"transformId\": \"" + transformId + "\"" + + "}") + .post("/") + .then() + .statusCode(200) + .contentType(ContentType.JSON) + .header("ce-type", EventGenerator.ERROR_EVENT) + .header("ce-kogitoprocrefid", workflowCallerId.toString()) + .header("ce-source", SaveTransformationFunction.SOURCE) + .body(not(containsString("\"error\":null"))); + + verify(move2KubeServiceMock, times(1)).getTransformationOutput(anyString(), anyString(), anyString()); + verify(gitServiceMock, times(1)).cloneRepo(anyString(), anyString(), any()); + verify(gitServiceMock, times(0)).branchExists(any(), anyString()); + verify(gitServiceMock, times(0)).createBranch(eq(git), anyString()); + verify(gitServiceMock, times(0)).commit(eq(git), anyString(), anyString()); + verify(gitServiceMock, times(0)).push(eq(git)); + } + + @Test + public void testSaveTransformationBranchExists() throws GitAPIException, IOException, ApiException, URISyntaxException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { + UUID workflowCallerId = UUID.randomUUID(); + UUID transformId = UUID.randomUUID(); + transformOutputPath = Files.createTempDirectory(String.format("move2kube-transform-TEST-%s", transformId)); + gitRepoLocalFolder = Files.createTempDirectory(String.format("local-git-transform-TEST-%s", transformId)); + URL transformedZip = classLoader.getResource(TRANSFORMED_ZIP); + Move2KubeServiceImpl.extractZipFile(new File(transformedZip.getFile()), transformOutputPath); + + when(folderCreatorService.createGitRepositoryLocalFolder(eq("gitRepo"), anyString())).thenReturn(gitRepoLocalFolder); + when(move2KubeServiceMock.getTransformationOutput(anyString(), anyString(), anyString())).thenReturn(transformOutputPath); + when(gitServiceMock.cloneRepo(anyString(), anyString(), any())).thenReturn(git); + when(gitServiceMock.branchExists(any(), anyString())).thenReturn(true); + + RestAssured.given().contentType("application/json") + .header("ce-specversion", "1.0") + .header("ce-id", UUID.randomUUID().toString()) + .header("ce-type", "save-transformation") + .header("ce-source", "test") + .body("{\"gitRepo\": \"gitRepo\", " + + "\"branch\": \"branch\"," + + " \"workspaceId\": \"workspaceId\"," + + " \"projectId\": \"projectId\"," + + " \"workflowCallerId\": \"" + workflowCallerId + "\"," + + " \"transformId\": \"" + transformId + "\"" + + "}") + .post("/") + .then() + .statusCode(200) + .contentType(ContentType.JSON) + .header("ce-type", EventGenerator.ERROR_EVENT) + .header("ce-kogitoprocrefid", workflowCallerId.toString()) + .header("ce-source", SaveTransformationFunction.SOURCE) + .body(not(containsString("\"error\":null"))); + + verify(move2KubeServiceMock, times(1)).getTransformationOutput(anyString(), anyString(), anyString()); + verify(gitServiceMock, times(1)).cloneRepo(anyString(), anyString(), any()); + verify(gitServiceMock, times(1)).branchExists(any(), anyString()); + verify(gitServiceMock, times(0)).createBranch(eq(git), anyString()); + verify(gitServiceMock, times(0)).commit(eq(git), anyString(), anyString()); + verify(gitServiceMock, times(0)).push(eq(git)); + } + + @Test + public void testSaveTransformationCreateBranchFail() throws GitAPIException, IOException, ApiException, URISyntaxException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { + UUID workflowCallerId = UUID.randomUUID(); + UUID transformId = UUID.randomUUID(); + transformOutputPath = Files.createTempDirectory(String.format("move2kube-transform-TEST-%s", transformId)); + gitRepoLocalFolder = Files.createTempDirectory(String.format("local-git-transform-TEST-%s", transformId)); + URL transformedZip = classLoader.getResource(TRANSFORMED_ZIP); + Move2KubeServiceImpl.extractZipFile(new File(transformedZip.getFile()), transformOutputPath); + + when(folderCreatorService.createGitRepositoryLocalFolder(eq("gitRepo"), anyString())).thenReturn(gitRepoLocalFolder); + when(move2KubeServiceMock.getTransformationOutput(anyString(), anyString(), anyString())).thenReturn(transformOutputPath); + when(gitServiceMock.cloneRepo(anyString(), anyString(), any())).thenReturn(git); + when(gitServiceMock.branchExists(any(), anyString())).thenReturn(false); + doThrow(new InvalidRemoteException("Error while creating new branch")).when(gitServiceMock).createBranch(eq(git), anyString()); + + RestAssured.given().contentType("application/json") + .header("ce-specversion", "1.0") + .header("ce-id", UUID.randomUUID().toString()) + .header("ce-type", "save-transformation") + .header("ce-source", "test") + .body("{\"gitRepo\": \"gitRepo\", " + + "\"branch\": \"branch\"," + + " \"workspaceId\": \"workspaceId\"," + + " \"projectId\": \"projectId\"," + + " \"workflowCallerId\": \"" + workflowCallerId + "\"," + + " \"transformId\": \"" + transformId + "\"" + + "}") + .post("/") + .then() + .statusCode(200) + .contentType(ContentType.JSON) + .header("ce-type", EventGenerator.ERROR_EVENT) + .header("ce-kogitoprocrefid", workflowCallerId.toString()) + .header("ce-source", SaveTransformationFunction.SOURCE) + .body(not(containsString("\"error\":null"))); + + verify(move2KubeServiceMock, times(1)).getTransformationOutput(anyString(), anyString(), anyString()); + verify(gitServiceMock, times(1)).cloneRepo(anyString(), anyString(), any()); + verify(gitServiceMock, times(1)).branchExists(any(), anyString()); + verify(gitServiceMock, times(1)).createBranch(eq(git), anyString()); + verify(gitServiceMock, times(0)).commit(eq(git), anyString(), anyString()); + verify(gitServiceMock, times(0)).push(eq(git)); + } + + @Test + public void testSaveTransformationCommitChangesFail() throws GitAPIException, IOException, ApiException, URISyntaxException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { + UUID workflowCallerId = UUID.randomUUID(); + UUID transformId = UUID.randomUUID(); + transformOutputPath = Files.createTempDirectory(String.format("move2kube-transform-TEST-%s", transformId)); + gitRepoLocalFolder = Files.createTempDirectory(String.format("local-git-transform-TEST-%s", transformId)); + URL transformedZip = classLoader.getResource(TRANSFORMED_ZIP); + Move2KubeServiceImpl.extractZipFile(new File(transformedZip.getFile()), transformOutputPath); + + when(folderCreatorService.createGitRepositoryLocalFolder(eq("gitRepo"), anyString())).thenReturn(gitRepoLocalFolder); + when(move2KubeServiceMock.getTransformationOutput(anyString(), anyString(), anyString())).thenReturn(transformOutputPath); + when(gitServiceMock.cloneRepo(anyString(), anyString(), any())).thenReturn(git); + when(gitServiceMock.branchExists(any(), anyString())).thenReturn(false); + doNothing().when(gitServiceMock).createBranch(eq(git), anyString()); + doThrow(new InvalidRemoteException("Error while committing changes")).when(gitServiceMock).commit(eq(git), anyString(), anyString()); + + RestAssured.given().contentType("application/json") + .header("ce-specversion", "1.0") + .header("ce-id", UUID.randomUUID().toString()) + .header("ce-type", "save-transformation") + .header("ce-source", "test") + .body("{\"gitRepo\": \"gitRepo\", " + + "\"branch\": \"branch\"," + + " \"workspaceId\": \"workspaceId\"," + + " \"projectId\": \"projectId\"," + + " \"workflowCallerId\": \"" + workflowCallerId + "\"," + + " \"transformId\": \"" + transformId + "\"" + + "}") + .post("/") + .then() + .statusCode(200) + .contentType(ContentType.JSON) + .header("ce-type", EventGenerator.ERROR_EVENT) + .header("ce-kogitoprocrefid", workflowCallerId.toString()) + .header("ce-source", SaveTransformationFunction.SOURCE) + .body(not(containsString("\"error\":null"))); + + verify(move2KubeServiceMock, times(1)).getTransformationOutput(anyString(), anyString(), anyString()); + verify(gitServiceMock, times(1)).cloneRepo(anyString(), anyString(), any()); + verify(gitServiceMock, times(1)).branchExists(any(), anyString()); + verify(gitServiceMock, times(1)).createBranch(eq(git), anyString()); + verify(gitServiceMock, times(1)).commit(eq(git), anyString(), anyString()); + verify(gitServiceMock, times(0)).push(eq(git)); + + AssertFileMovedToGitLocalFolder(REFERENCE_OUTPUT_UNZIP_PATH.toPath()); + } + + @Test + public void testSaveTransformationGitPushFails() throws GitAPIException, IOException, ApiException, URISyntaxException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { + UUID workflowCallerId = UUID.randomUUID(); + UUID transformId = UUID.randomUUID(); + transformOutputPath = Files.createTempDirectory(String.format("move2kube-transform-TEST-%s", transformId)); + gitRepoLocalFolder = Files.createTempDirectory(String.format("local-git-transform-TEST-%s", transformId)); + URL transformedZip = classLoader.getResource(TRANSFORMED_ZIP); + Move2KubeServiceImpl.extractZipFile(new File(transformedZip.getFile()), transformOutputPath); + + when(folderCreatorService.createGitRepositoryLocalFolder(eq("gitRepo"), anyString())).thenReturn(gitRepoLocalFolder); + when(move2KubeServiceMock.getTransformationOutput(anyString(), anyString(), anyString())).thenReturn(transformOutputPath); + when(gitServiceMock.cloneRepo(anyString(), anyString(), any())).thenReturn(git); + doNothing().when(gitServiceMock).commit(eq(git), anyString(), anyString()); + doNothing().when(gitServiceMock).createBranch(eq(git), anyString()); + doThrow(new InvalidRemoteException("Error while pushing to remote")).when(gitServiceMock).push(eq(git)); + + RestAssured.given().contentType("application/json") + .header("ce-specversion", "1.0") + .header("ce-id", UUID.randomUUID().toString()) + .header("ce-type", "save-transformation") + .header("ce-source", "test") + .body("{\"gitRepo\": \"gitRepo\", " + + "\"branch\": \"branch\"," + + " \"workspaceId\": \"workspaceId\"," + + " \"projectId\": \"projectId\"," + + " \"workflowCallerId\": \"" + workflowCallerId + "\"," + + " \"transformId\": \"" + transformId + "\"" + + "}") + .post("/") + .then() + .statusCode(200) + .contentType(ContentType.JSON) + .header("ce-type", EventGenerator.ERROR_EVENT) + .header("ce-kogitoprocrefid", workflowCallerId.toString()) + .header("ce-source", SaveTransformationFunction.SOURCE) + .body(not(containsString("\"error\":null"))); + + verify(move2KubeServiceMock, times(1)).getTransformationOutput(anyString(), anyString(), anyString()); + verify(gitServiceMock, times(1)).cloneRepo(anyString(), anyString(), any()); + verify(gitServiceMock, times(1)).branchExists(any(), anyString()); + verify(gitServiceMock, times(1)).createBranch(eq(git), anyString()); + verify(gitServiceMock, times(1)).commit(eq(git), anyString(), anyString()); + verify(gitServiceMock, times(1)).push(eq(git)); + + AssertFileMovedToGitLocalFolder(REFERENCE_OUTPUT_UNZIP_PATH.toPath()); + } + + @Test + public void testSaveTransformationMissingInput() throws GitAPIException, IOException, ApiException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { + UUID workflowCallerId = UUID.randomUUID(); + RestAssured.given().contentType("application/json") + .header("ce-specversion", "1.0") + .header("ce-id", UUID.randomUUID().toString()) + .header("ce-type", "save-transformation") + .header("ce-source", "test") + .body("{\"gitRepo\": \"gitRepo\", " + + "\"branch\": \"branch\"," + + " \"projectId\": \"projectId\"," + + " \"workflowCallerId\": \"" + workflowCallerId + "\"" + + "}") + .post("/") + .then() + .statusCode(200) + .contentType(ContentType.JSON) + .header("ce-type", EventGenerator.ERROR_EVENT) + .header("ce-kogitoprocrefid", workflowCallerId.toString()) + .header("ce-source", SaveTransformationFunction.SOURCE) + .body(not(containsString("\"error\":null"))); + + verify(move2KubeServiceMock, times(0)).getTransformationOutput(anyString(), anyString(), anyString()); + verify(gitServiceMock, times(0)).cloneRepo(anyString(), anyString(), any()); + verify(gitServiceMock, times(0)).branchExists(any(), anyString()); + verify(gitServiceMock, times(0)).createBranch(eq(git), anyString()); + verify(gitServiceMock, times(0)).commit(eq(git), anyString(), anyString()); + verify(gitServiceMock, times(0)).push(eq(git)); + } + + private void AssertFileMovedToGitLocalFolder(Path localOutputRef) { + Path refDeploy = Path.of(localOutputRef.toString(), "/deploy"); + Path actualDeploy = Path.of(gitRepoLocalFolder.toString(), "/deploy"); + Path refScripts = Path.of(localOutputRef.toString(), "/scripts"); + Path actualScripts = Path.of(gitRepoLocalFolder.toString(), "/scripts"); + Path refSource = Path.of(localOutputRef.toString(), "/source"); + Path actualSource = Path.of(gitRepoLocalFolder.toString(), "/source"); + AssertDirsEqual(refDeploy.toFile(), actualDeploy.toFile()); + AssertDirsEqual(refScripts.toFile(), actualScripts.toFile()); + AssertDirsEqual(refSource.toFile(), actualSource.toFile()); + } + + public static void AssertDirsEqual(File ref, File actual) { + Assertions.assertArrayEquals( + FileUtils.listFilesAndDirs(ref, TrueFileFilter.TRUE, TrueFileFilter.TRUE).stream().map(File::getName).sorted().toArray(), + FileUtils.listFilesAndDirs(actual, TrueFileFilter.TRUE, TrueFileFilter.TRUE).stream().map(File::getName).sorted().toArray()); + } +} diff --git a/move2kube/m2k-func/src/test/java/io/rhdhorchestrator/service/GitServiceImplTest.java b/move2kube/m2k-func/src/test/java/io/rhdhorchestrator/service/GitServiceImplTest.java new file mode 100644 index 00000000..465827bf --- /dev/null +++ b/move2kube/m2k-func/src/test/java/io/rhdhorchestrator/service/GitServiceImplTest.java @@ -0,0 +1,123 @@ +package io.rhdhorchestrator.service; + +import java.io.IOException; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.attribute.PosixFilePermissions; +import java.util.Collections; +import java.util.Comparator; + +import org.apache.sshd.common.keyprovider.ClassLoadableResourceKeyPairProvider; +import org.apache.sshd.git.GitLocationResolver; +import org.apache.sshd.git.pack.GitPackCommandFactory; +import org.apache.sshd.server.SshServer; +import org.apache.sshd.server.auth.password.AcceptAllPasswordAuthenticator; +import org.apache.sshd.server.keyprovider.SimpleGeneratorHostKeyProvider; +import org.apache.sshd.sftp.server.SftpSubsystemFactory; +import org.eclipse.jgit.api.AddCommand; +import org.eclipse.jgit.api.Git; +import org.eclipse.jgit.api.errors.GitAPIException; +import org.eclipse.jgit.lib.Repository; +import org.eclipse.jgit.storage.file.FileRepositoryBuilder; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + + +class GitServiceImplTest { + static SshServer sshd; + static Path serverDir; + static Path cloneDir; + static final String testFile = "test-file"; + static final String testFileContent = "test-content"; + GitServiceImpl underTest = new GitServiceImpl(); + + @Test + void cloneRepo() throws IOException, GitAPIException { + try (Git git = underTest.cloneRepo("ssh://%s:%d%s".formatted(sshd.getHost(), sshd.getPort(), serverDir.toString()), "", cloneDir)) { + Path testfile = cloneDir.resolve(testFile); + Assertions.assertTrue(Files.exists(testfile), "cloned file doesn't exists"); + Assertions.assertEquals(Files.readString(testfile), testFileContent); + } + } + + @Test + void push() throws IOException, GitAPIException { + try (Git git = underTest.cloneRepo("ssh://%s:%d%s".formatted(sshd.getHost(), sshd.getPort(), serverDir.toString()), "", cloneDir)) { + Files.write(cloneDir.resolve("push-test-file"), "foo".getBytes()); + underTest.commit(git, "push test commit", "."); + Assertions.assertDoesNotThrow(() -> underTest.push(git)); + } + } + + @BeforeAll + static void init() throws IOException, GitAPIException { + // create an ssh server + sshd = SshServer.setUpDefaultServer(); + sshd.setHost("localhost"); + sshd.setPort(30022); + URL hostkey = GitServiceImplTest.class.getClassLoader().getResource("m2k-test-ssh-id_ed25519.pub"); + SimpleGeneratorHostKeyProvider keyPairProvider = new SimpleGeneratorHostKeyProvider(Paths.get(hostkey.getPath())); + sshd.setKeyPairProvider(keyPairProvider); + keyPairProvider.loadKeys(null); + sshd.setPasswordAuthenticator(AcceptAllPasswordAuthenticator.INSTANCE); + + serverDir = Files.createTempDirectory( + "m2kfunc-test-git-repo-server", + PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rwxrwxrwx"))); + + sshd.setSubsystemFactories(Collections.singletonList(new SftpSubsystemFactory())); + sshd.setCommandFactory(new GitPackCommandFactory(GitLocationResolver.constantPath(Paths.get("/")))); + sshd.setKeyPairProvider(new ClassLoadableResourceKeyPairProvider("test.key")); + sshd.setPublickeyAuthenticator((username, key, session) -> true); + sshd.start(); + + // crete git repo on the ssh server + Repository repo = new FileRepositoryBuilder().setWorkTree(serverDir.toFile()).build(); + repo.create(true); + Files.write(serverDir.resolve(testFile).toAbsolutePath(), testFileContent.getBytes()); + new AddCommand(repo).addFilepattern(".").call(); + Git git = new Git(repo); + git.add().addFilepattern(".").call(); + git.commit().setAuthor("me", "me@me").setMessage("first commit from test init").call(); + } + + @AfterAll + static void afterAll() throws IOException { + try (var walk = Files.walk(serverDir)) { + walk.sorted(Comparator.reverseOrder()).forEach(path -> { + try { + Files.deleteIfExists(path); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + } + } + + @BeforeEach + void setup() throws IOException { + cloneDir = Files.createTempDirectory( + "m2kfunc-test-git-repo-clone", + PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rwxrwxrwx"))); + } + + @AfterEach + void teardown() throws IOException { + try (var walk = Files.walk(cloneDir)) { + walk.sorted(Comparator.reverseOrder()).forEach(path -> { + try { + Files.deleteIfExists(path); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + } + } + +} \ No newline at end of file diff --git a/mta-v7.x/README.md b/mta-v7.x/README.md new file mode 100644 index 00000000..39debbd3 --- /dev/null +++ b/mta-v7.x/README.md @@ -0,0 +1,49 @@ +# MTA - migration analysis workflow + +# Synopsis +This workflow is an assessment workflow type, that invokes an application analysis workflow using [MTA][1] +and returns the [move2kube][3] workflow reference, to run next if the analysis is considered to be successful. + +Users are encouraged to use this workflow as self-service alternative for interacting with the MTA UI. Instead of running +a mass-migration of project from a managed place, the project stakeholders can use this (or automation) to regularly check +the cloud-readiness compatibility of their code. + +# Inputs +- `repositoryUrl` [mandatory] - the git repo url to examine +- `recipients` [mandatory] - A list of recipients for the notification in the format of `user:/` or `group:/`, i.e. `user:default/jsmith`. + +# Output +1. On completion the workflow returns an [options structure][2] in the exit state of the workflow (also named variables in SonataFlow) +linking to the [move2kube][3] workflow that will generate k8s manifests for container deployment. +1. When the workflow completes there should be a report link on the exit state of the workflow (also named variables in SonataFlow) +Currently this is working with MTA version 6.2.x and in the future 7.x version the report link will be removed or will be made +optional. Instead of an html report the workflow will use a machine friendly json file. + +# Dependencies +- MTA version 6.2.x or Konveyor 0.2.x + + - For OpenShift install MTA using the OperatorHub, search for MTA. Documentation is [here][1] + - For Kubernetes install Konveyor with olm + ```bash + kubectl create -f https://operatorhub.io/install/konveyor-0.2/konveyor-operator.yaml + ``` +# Runtime configuration + +| key | default | description | +|------------------------------------------------------|----------------------------------------------------------------------------------------------|-------------------------------------------| +| mta.url | http://mta-ui.openshift-mta.svc.cluster.local:8080 | Endpoint (with protocol and port) for MTA | +| quarkus.rest-client.mta_json.url | ${mta.url}/hub | MTA hub api | +| quarkus.rest-client.notifications.url | ${BACKSTAGE_NOTIFICATIONS_URL:http://backstage-backstage.rhdh-operator/api/notifications/} | Backstage notification url | +| quarkus.rest-client.mta_json.auth.basicAuth.username | username | Username for the MTA api | +| quarkus.rest-client.mta_json.auth.basicAuth.password | password | Password for the MTA api | + +All the configuration items are on [./application.properties] + +For running and testing the workflow refer to [mta testing](https://github.com/rhdhorchestrator/serverless-workflows/tree/main/mta#output). + +# Workflow Diagram +![mta workflow diagram](https://github.com/rhdhorchestrator/serverless-workflows/blob/main/mta/mta.svg?raw=true) + +[1]: https://developers.redhat.com/products/mta/download +[2]: https://github.com/rhdhorchestrator/serverless-workflows/blob/main/assessment/schema/workflow-options-output-schema.json +[3]: https://github.com/rhdhorchestrator/serverless-workflows/tree/main/move2kube diff --git a/scripts/build_m2kfunc.sh b/scripts/build_m2kfunc.sh index 6c2b4c88..bc9e6673 100755 --- a/scripts/build_m2kfunc.sh +++ b/scripts/build_m2kfunc.sh @@ -15,10 +15,10 @@ curl https://raw.githubusercontent.com/konveyor/move2kube-api/main/assets/openap rm -rf java-client ${CONTAINER_ENGINE} run --rm -v "${PWD}":/tmp -e GENERATE_PERMISSIONS=true openapitools/openapi-generator-cli \ generate -i /tmp/openapi.json -g java -o /tmp/java-client \ - --invoker-package dev.parodos.move2kube \ - --model-package dev.parodos.move2kube.client.model \ - --api-package dev.parodos.move2kube.api \ - --group-id dev.parodos --artifact-id move2kube --artifact-version v${VERSION} \ + --invoker-package io.rhdhorchestrator.move2kube \ + --model-package io.rhdhorchestrator.move2kube.client.model \ + --api-package io.rhdhorchestrator.move2kube.api \ + --group-id io.rhdhorchestrator --artifact-id move2kube --artifact-version v${VERSION} \ --library apache-httpclient ${CONTAINER_ENGINE} run --rm -v "${WORKDIR}":/workdir -e MVN_OPTS="${MVN_OPTS}" -w /workdir/workflows/"${WORKFLOW_ID}" \ diff --git a/setup/README.md b/setup/README.md index a6aa4b68..5b90fba2 100644 --- a/setup/README.md +++ b/setup/README.md @@ -15,4 +15,4 @@ Customize the `push` command to publish in your own repository: docker push quay.io/$USER/ubi9-pipeline:latest ``` # A workflow for building the image -When the Dockerfile is changed and merged, a [workflow](https://github.com/parodos-dev/serverless-workflows/blob/main/.github/workflows/builder-utility.yaml) is triggered to build and publish the image. +When the Dockerfile is changed and merged, a [workflow](https://github.com/rhdhorchestrator/serverless-workflows/blob/main/.github/workflows/builder-utility.yaml) is triggered to build and publish the image. diff --git a/workflows/create-ocp-project/README.md b/workflows/create-ocp-project/README.md index ef6da13e..147feca1 100644 --- a/workflows/create-ocp-project/README.md +++ b/workflows/create-ocp-project/README.md @@ -34,8 +34,8 @@ Application properties can be initialized from environment variables before runn - `OCP project to create` [required] - the OCP project to be created on the OCP cluster. ## Workflow diagram -![Create OpenShift Project diagram](https://github.com/parodos-dev/serverless-workflow-examples/blob/main/create-ocp-project/create-ocp-project.svg?raw=true) +![Create OpenShift Project diagram](https://github.com/rhdhorchestrator/serverless-workflow-examples/blob/main/create-ocp-project/create-ocp-project.svg?raw=true) ## Installation -See [official installation guide](https://github.com/parodos-dev/serverless-workflows-config/blob/main/docs/main/create-ocp-project) +See [official installation guide](https://github.com/rhdhorchestrator/serverless-workflows-config/blob/main/docs/main/create-ocp-project) diff --git a/workflows/create-ocp-project/create-ocp-project.svg b/workflows/create-ocp-project/create-ocp-project.svg index 42476c7b..3424bfca 100644 --- a/workflows/create-ocp-project/create-ocp-project.svg +++ b/workflows/create-ocp-project/create-ocp-project.svg @@ -1 +1 @@ -Start[Audit]: workflow started Open issue on Jira for operations Notify Backstage: Jira ticket created Poll Operation Jira issue Check Operation issue done Create ProjectPost operation: get project resource Post operation: check project resource created su ccessfully Notify project active Notify project not active [Audit]: Close issue on Jira End [Audit]: Close issue on Jira - Project Creation Denied EndResource reque... Resource reque... Resource reque... Resource reque... Project Active Project Active \ No newline at end of file +Start[Audit]: workflow started Open issue on Jira for operations Notify Backstage: Jira ticket created Poll Operation Jira issue Check Operation issue done Create ProjectPost operation: get project resource Post operation: check project resource created su ccessfully Notify project active Notify project not active [Audit]: Close issue on Jira End [Audit]: Close issue on Jira - Project Creation Denied EndResource reque... Resource reque... Resource reque... Resource reque... Project Active Project Active \ No newline at end of file diff --git a/workflows/create-ocp-project/create-ocp-project.sw.yaml b/workflows/create-ocp-project/create-ocp-project.sw.yaml index 880e00a6..e570ef66 100644 --- a/workflows/create-ocp-project/create-ocp-project.sw.yaml +++ b/workflows/create-ocp-project/create-ocp-project.sw.yaml @@ -9,7 +9,7 @@ extensions: outputSchema: schemas/workflow-output-schema.json - extensionid: workflow-uri-definitions definitions: - notifications: "https://raw.githubusercontent.com/parodos-dev/serverless-workflows/main/workflows/shared/specs/notifications-openapi.yaml" + notifications: "https://raw.githubusercontent.com/rhdhorchestrator/serverless-workflows/main/workflows/shared/specs/notifications-openapi.yaml" functions: - name: jiraCreateIssue operation: specs/jira-openapi.yaml#createIssue diff --git a/workflows/escalation/README.md b/workflows/escalation/README.md index 6096b9ea..5324d03b 100644 --- a/workflows/escalation/README.md +++ b/workflows/escalation/README.md @@ -6,7 +6,7 @@ An escalation workflow integrated with Atlassian JIRA using [SonataFlow](https:/ * Access to an OpenShift cluster with `admin` Role ## Workflow diagram -![Escalation workflow diagram](https://github.com/parodos-dev/serverless-workflows/blob/main/workflows/escalation/ticketEscalation.svg?raw=true) +![Escalation workflow diagram](https://github.com/rhdhorchestrator/serverless-workflows/blob/main/workflows/escalation/ticketEscalation.svg?raw=true) **Note**: The value of the `.jiraIssue.fields.status.statusCategory.key` field is the one to be used to identify when the `done` status is reached, all the other diff --git a/workflows/escalation/jira-listener/README.md b/workflows/escalation/jira-listener/README.md index ff5ad2af..7a9b8177 100644 --- a/workflows/escalation/jira-listener/README.md +++ b/workflows/escalation/jira-listener/README.md @@ -12,7 +12,7 @@ The generated event includes only the relevant data, e.g.: "workFlowInstanceId":"500", "workflowName":"escalation", "status":"done" -} +} ``` No events are generated for discarded webhooks. @@ -24,7 +24,7 @@ The following environment variables can modify the configuration properties: | Variable | Description | Default value | |----------|-------------|---------------| -| CLOUD_EVENT_TYPE | The value of `ce-type` header in the generated `CloudEvent` | `dev.parodos.escalation` | +| CLOUD_EVENT_TYPE | The value of `ce-type` header in the generated `CloudEvent` | `io.rhdhorchestrator.escalation` | | CLOUD_EVENT_SOURCE | The value of `ce-source` header in the generated `CloudEvent` | `ticket.listener` | | WORKFLOW_INSTANCE_ID_LABEL | The name part of the Jira ticket label that contains the ID of the relates SWF instance (e.g. `workflowInstanceId=123`) | `workflowInstanceId` | | WORKFLOW_NAME_LABEL | The name part of the Jira ticket label that contains the name of the SWF (e.g. `workflowName=escalation`) | `workflowName` | @@ -33,7 +33,7 @@ The following environment variables can modify the configuration properties: ### Event modeling Instead of leveraging on the [Jira Java SDK](https://developer.atlassian.com/server/jira/platform/java-apis/), we used a simplified model of the relevant data, -defined in the [WebhookEvent](./src/main/java/dev/parodos/jiralistener/model/WebhookEvent.java) Java class. This way we can simplify the dependency stack +defined in the [WebhookEvent](./src/main/java/io/rhdhorchestrator/jiralistener/model/WebhookEvent.java) Java class. This way we can simplify the dependency stack and also limit the risk of parsing failures due to unexpected changes in the payload format. Parsing was derived from the original example in [this Backstage repo](https://github.com/tiagodolphine/backstage/blob/eedfe494dd313a3ad6a484c0596ba12d6199c1a8/plugins/swf-backend/src/service/JiraService.ts#L66C19-L66C40) @@ -90,10 +90,10 @@ apiVersion: route.openshift.io/v1 kind: Route metadata: annotations: - haproxy.router.openshift.io/timeout: 600s + haproxy.router.openshift.io/timeout: 600s kubernetes.io/tls-acme: "true" - name: jira-listener - namespace: knative-serving-ingress + name: jira-listener + namespace: knative-serving-ingress ... ``` @@ -130,7 +130,7 @@ protocol instead of the expected `https`. To overcome this issue, you can define a different name for the `jira-listener` service by setting the property `jiralistener.name` as in: ```bash -helm upgrade -n default escalation-eda helm/escalation-eda --set jiralistener.name=my-jira-listener --debug +helm upgrade -n default escalation-eda helm/escalation-eda --set jiralistener.name=my-jira-listener --debug ``` ### Troubleshooting the SAN short enough to fit in CN issue diff --git a/workflows/escalation/jira-listener/pom.xml b/workflows/escalation/jira-listener/pom.xml index 335d11d4..7f83e8f8 100644 --- a/workflows/escalation/jira-listener/pom.xml +++ b/workflows/escalation/jira-listener/pom.xml @@ -4,7 +4,7 @@ xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> 4.0.0 - dev.parodos + io.rhdhorchestrator jira-listener 1.0.0-SNAPSHOT diff --git a/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/EventNotifier.java b/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/EventNotifier.java index d5ad81f8..b4c1a944 100644 --- a/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/EventNotifier.java +++ b/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/EventNotifier.java @@ -1,4 +1,4 @@ -package dev.parodos.jiralistener; +package io.rhdhorchestrator.jiralistener; import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; diff --git a/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/JiraListenerResource.java b/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/JiraListenerResource.java index 51d39215..3d684b33 100644 --- a/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/JiraListenerResource.java +++ b/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/JiraListenerResource.java @@ -1,4 +1,4 @@ -package dev.parodos.jiralistener; +package io.rhdhorchestrator.jiralistener; import java.io.IOException; import java.lang.System.Logger; @@ -7,9 +7,9 @@ import com.fasterxml.jackson.databind.ObjectMapper; -import dev.parodos.jiralistener.JiraListenerService.OnEventResponse; -import dev.parodos.jiralistener.model.JiraIssue; -import dev.parodos.jiralistener.model.WebhookEvent; +import io.rhdhorchestrator.jiralistener.JiraListenerService.OnEventResponse; +import io.rhdhorchestrator.jiralistener.model.JiraIssue; +import io.rhdhorchestrator.jiralistener.model.WebhookEvent; import jakarta.inject.Inject; import jakarta.ws.rs.Consumes; import jakarta.ws.rs.POST; diff --git a/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/JiraListenerService.java b/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/JiraListenerService.java index aaf4ec2d..8097ee68 100644 --- a/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/JiraListenerService.java +++ b/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/JiraListenerService.java @@ -1,4 +1,4 @@ -package dev.parodos.jiralistener; +package io.rhdhorchestrator.jiralistener; import java.lang.System.Logger; import java.lang.System.Logger.Level; @@ -12,9 +12,9 @@ import com.fasterxml.jackson.databind.ObjectMapper; -import dev.parodos.jiralistener.model.JiraTicketEventData; -import dev.parodos.jiralistener.model.JiraIssue; -import dev.parodos.jiralistener.model.JiraIssue.StatusCategory; +import io.rhdhorchestrator.jiralistener.model.JiraTicketEventData; +import io.rhdhorchestrator.jiralistener.model.JiraIssue; +import io.rhdhorchestrator.jiralistener.model.JiraIssue.StatusCategory; import io.cloudevents.CloudEvent; import io.cloudevents.core.builder.CloudEventBuilder; import io.cloudevents.core.data.PojoCloudEventData; diff --git a/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/model/JiraIssue.java b/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/model/JiraIssue.java index 462418c3..6304fef1 100644 --- a/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/model/JiraIssue.java +++ b/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/model/JiraIssue.java @@ -1,4 +1,4 @@ -package dev.parodos.jiralistener.model; +package io.rhdhorchestrator.jiralistener.model; import java.util.List; diff --git a/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/model/JiraTicketEventData.java b/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/model/JiraTicketEventData.java index ce25f978..2c6b1f4c 100644 --- a/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/model/JiraTicketEventData.java +++ b/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/model/JiraTicketEventData.java @@ -1,4 +1,4 @@ -package dev.parodos.jiralistener.model; +package io.rhdhorchestrator.jiralistener.model; import lombok.Builder; import lombok.Data; diff --git a/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/model/WebhookEvent.java b/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/model/WebhookEvent.java index 81b5cda4..fa31b6f9 100644 --- a/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/model/WebhookEvent.java +++ b/workflows/escalation/jira-listener/src/main/java/dev/parodos/jiralistener/model/WebhookEvent.java @@ -1,4 +1,4 @@ -package dev.parodos.jiralistener.model; +package io.rhdhorchestrator.jiralistener.model; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; diff --git a/workflows/escalation/jira-listener/src/main/resources/META-INF/resources/index.html b/workflows/escalation/jira-listener/src/main/resources/META-INF/resources/index.html index 19fca5b7..ec3593d1 100644 --- a/workflows/escalation/jira-listener/src/main/resources/META-INF/resources/index.html +++ b/workflows/escalation/jira-listener/src/main/resources/META-INF/resources/index.html @@ -235,7 +235,7 @@
Application
    -
  • GroupId: dev.parodos
  • +
  • GroupId: io.rhdhorchestrator
  • ArtifactId: jira-listener
  • Version: 1.0.0-SNAPSHOT
  • Quarkus Version: 3.4.1
  • diff --git a/workflows/escalation/jira-listener/src/main/resources/application.properties b/workflows/escalation/jira-listener/src/main/resources/application.properties index 39fc294e..44bcb2be 100644 --- a/workflows/escalation/jira-listener/src/main/resources/application.properties +++ b/workflows/escalation/jira-listener/src/main/resources/application.properties @@ -1,4 +1,4 @@ -cloudevent.type=${CLOUD_EVENT_TYPE:dev.parodos.escalation} +cloudevent.type=${CLOUD_EVENT_TYPE:io.rhdhorchestrator.escalation} cloudevent.source=${CLOUD_EVENT_SOURCE:ticket.listener} jira.webhook.label.workflowInstanceId=${WORKFLOW_INSTANCE_ID_LABEL:workflowInstanceId} jira.webhook.label.workflowName=${WORKFLOW_NAME_LABEL:workflowName} diff --git a/workflows/escalation/jira-listener/src/test/java/dev/parodos/jiralistener/JiraConstants.java b/workflows/escalation/jira-listener/src/test/java/dev/parodos/jiralistener/JiraConstants.java index c4f581b6..f57d2574 100644 --- a/workflows/escalation/jira-listener/src/test/java/dev/parodos/jiralistener/JiraConstants.java +++ b/workflows/escalation/jira-listener/src/test/java/dev/parodos/jiralistener/JiraConstants.java @@ -1,4 +1,4 @@ -package dev.parodos.jiralistener; +package io.rhdhorchestrator.jiralistener; public final class JiraConstants { static final String ISSUE = "issue"; diff --git a/workflows/escalation/jira-listener/src/test/java/dev/parodos/jiralistener/JiraListenerResourceIT.java b/workflows/escalation/jira-listener/src/test/java/dev/parodos/jiralistener/JiraListenerResourceIT.java index 73ec9d6a..6c372fb0 100644 --- a/workflows/escalation/jira-listener/src/test/java/dev/parodos/jiralistener/JiraListenerResourceIT.java +++ b/workflows/escalation/jira-listener/src/test/java/dev/parodos/jiralistener/JiraListenerResourceIT.java @@ -1,4 +1,4 @@ -package dev.parodos.jiralistener; +package io.rhdhorchestrator.jiralistener; import io.quarkus.test.junit.QuarkusIntegrationTest; diff --git a/workflows/escalation/jira-listener/src/test/java/dev/parodos/jiralistener/JiraListenerResourceTest.java b/workflows/escalation/jira-listener/src/test/java/dev/parodos/jiralistener/JiraListenerResourceTest.java index de69b459..bfb532e5 100644 --- a/workflows/escalation/jira-listener/src/test/java/dev/parodos/jiralistener/JiraListenerResourceTest.java +++ b/workflows/escalation/jira-listener/src/test/java/dev/parodos/jiralistener/JiraListenerResourceTest.java @@ -1,16 +1,16 @@ -package dev.parodos.jiralistener; +package io.rhdhorchestrator.jiralistener; import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; import static com.github.tomakehurst.wiremock.client.WireMock.post; import static com.github.tomakehurst.wiremock.client.WireMock.postRequestedFor; import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.options; -import static dev.parodos.jiralistener.JiraConstants.FIELDS; -import static dev.parodos.jiralistener.JiraConstants.ISSUE; -import static dev.parodos.jiralistener.JiraConstants.KEY; -import static dev.parodos.jiralistener.JiraConstants.LABELS; -import static dev.parodos.jiralistener.JiraConstants.STATUS; -import static dev.parodos.jiralistener.JiraConstants.STATUS_CATEGORY; +import static io.rhdhorchestrator.jiralistener.JiraConstants.FIELDS; +import static io.rhdhorchestrator.jiralistener.JiraConstants.ISSUE; +import static io.rhdhorchestrator.jiralistener.JiraConstants.KEY; +import static io.rhdhorchestrator.jiralistener.JiraConstants.LABELS; +import static io.rhdhorchestrator.jiralistener.JiraConstants.STATUS; +import static io.rhdhorchestrator.jiralistener.JiraConstants.STATUS_CATEGORY; import static io.restassured.RestAssured.given; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; @@ -37,7 +37,7 @@ import com.github.tomakehurst.wiremock.stubbing.ServeEvent; import com.google.common.collect.Lists; -import dev.parodos.jiralistener.model.JiraTicketEventData; +import io.rhdhorchestrator.jiralistener.model.JiraTicketEventData; import io.quarkus.test.junit.QuarkusTest; import io.restassured.response.ExtractableResponse; import io.restassured.response.Response; diff --git a/workflows/escalation/ticketEscalation.svg b/workflows/escalation/ticketEscalation.svg index db48238e..39f7b2be 100644 --- a/workflows/escalation/ticketEscalation.svg +++ b/workflows/escalation/ticketEscalation.svg @@ -1 +1 @@ -StartCreateTicketGetTicketCheckTicketStateWaitForApprovalEv ent EscalateCreateK8sNamespa ce EndtimeoutError timeoutError (.getRespo... (.getRespo... notAvailable notAvailable \ No newline at end of file +StartCreateTicketGetTicketCheckTicketStateWaitForApprovalEv ent EscalateCreateK8sNamespa ce EndtimeoutError timeoutError (.getResponse.s... (.getResponse.s... notAvailable notAvailable \ No newline at end of file diff --git a/workflows/escalation/ticketEscalation.sw.yaml b/workflows/escalation/ticketEscalation.sw.yaml index c6da84a9..2016cb03 100644 --- a/workflows/escalation/ticketEscalation.sw.yaml +++ b/workflows/escalation/ticketEscalation.sw.yaml @@ -11,7 +11,7 @@ start: CreateTicket extensions: - extensionid: workflow-uri-definitions definitions: - notifications: "https://raw.githubusercontent.com/parodos-dev/serverless-workflows/main/workflows/shared/specs/notifications-openapi.yaml" + notifications: "https://raw.githubusercontent.com/rhdhorchestrator/serverless-workflows/main/workflows/shared/specs/notifications-openapi.yaml" dataInputSchema: failOnValidationErrors: true schema: schemas/ticket-escalation-schema.json @@ -31,7 +31,7 @@ functions: events: - name: approvalEvent source: ticket.listener - type: dev.parodos.escalation + type: io.rhdhorchestrator.escalation states: - name: CreateTicket type: operation diff --git a/workflows/greeting/README.md b/workflows/greeting/README.md index c1ab784b..09358692 100644 --- a/workflows/greeting/README.md +++ b/workflows/greeting/README.md @@ -5,8 +5,8 @@ The greeting workflow simply greets the user in the language chosen by the user - `language` [optional] - the language ## Workflow diagram -![Greeting workflow diagram](https://github.com/parodos-dev/serverless-workflows/blob/main/greeting/greeting.svg?raw=true) +![Greeting workflow diagram](https://github.com/rhdhorchestrator/serverless-workflows/blob/main/greeting/greeting.svg?raw=true) ## Installation -See [official installation guide](https://github.com/parodos-dev/serverless-workflows-config/blob/main/docs/main/greeting) +See [official installation guide](https://github.com/rhdhorchestrator/serverless-workflows-config/blob/main/docs/main/greeting) diff --git a/workflows/modify-vm-resources/README.md b/workflows/modify-vm-resources/README.md index 187dc753..39f6cc6b 100644 --- a/workflows/modify-vm-resources/README.md +++ b/workflows/modify-vm-resources/README.md @@ -1,6 +1,6 @@ # VM Updater workflow The VM updater workflow is a workflow that demonstrates the following features of the serverless workflow technology: -* Integration with external service, in this case, OCP cluster via its OpenAPI +* Integration with external service, in this case, OCP cluster via its OpenAPI * Conditional branching * Using the Notifications plugin to send notifications to the user @@ -35,8 +35,8 @@ Application properties can be initialized from environment variables before runn - `Recipients` [mandatory] - A list of recipients for the notification in the format of `user:/` or `group:/`, i.e. `user:default/jsmith`. ## Workflow diagram -![VM Updater diagram](https://github.com/parodos-dev/serverless-workflows/blob/main/modify-vm-resources/modify-vm-resources.svg?raw=true) +![VM Updater diagram](https://github.com/rhdhorchestrator/serverless-workflows/blob/main/modify-vm-resources/modify-vm-resources.svg?raw=true) ## Installation -See [official installation guide](https://github.com/parodos-dev/serverless-workflows-config/blob/main/docs/main/modify-vm-resources) \ No newline at end of file +See [official installation guide](https://github.com/rhdhorchestrator/serverless-workflows-config/blob/main/docs/main/modify-vm-resources) diff --git a/workflows/modify-vm-resources/modify-vm-resources.svg b/workflows/modify-vm-resources/modify-vm-resources.svg index b2d9be4b..77c48483 100644 --- a/workflows/modify-vm-resources/modify-vm-resources.svg +++ b/workflows/modify-vm-resources/modify-vm-resources.svg @@ -1 +1 @@ -StartGet VMFill empty valuesOpen issue on JIRANotify Backstage: JIRA ticket createdPoll issueCheck issue doneallOfNotify VM update authorized Update VMCheck auto restartRestart VMInitVMStatusRunnin gRetry Poll VMIncreaseVMStatusRunningRetriesRetryCheck VM statusallOfNotify VM runningEndallOfNotify VM not running EndallOfNotify VM update denied EndVM update denied VM update denied VM update aut... VM update aut... VM running VM running VM not running VM not running VM autorestart... VM autorestart... \ No newline at end of file +StartGet VMFill empty valuesOpen issue on JIRANotify Backstage: JIRA ticket createdPoll issueCheck issue doneallOfNotify VM update authorized Update VMCheck auto restartRestart VMInitVMStatusRunnin gRetry Poll VMIncreaseVMStatusRunningRetriesRetryCheck VM statusallOfNotify VM runningEndallOfNotify VM not running EndallOfNotify VM update denied EndVM update denied VM update denied VM update aut... VM update aut... VM running VM running VM not running VM not running VM autorestart... VM autorestart... \ No newline at end of file diff --git a/workflows/modify-vm-resources/modify-vm-resources.sw.yaml b/workflows/modify-vm-resources/modify-vm-resources.sw.yaml index ed06dc1e..22e5b1a9 100644 --- a/workflows/modify-vm-resources/modify-vm-resources.sw.yaml +++ b/workflows/modify-vm-resources/modify-vm-resources.sw.yaml @@ -9,7 +9,7 @@ extensions: outputSchema: schemas/workflow-output-schema.json - extensionid: workflow-uri-definitions definitions: - notifications: "https://raw.githubusercontent.com/parodos-dev/serverless-workflows/main/workflows/shared/specs/notifications-openapi.yaml" + notifications: "https://raw.githubusercontent.com/rhdhorchestrator/serverless-workflows/main/workflows/shared/specs/notifications-openapi.yaml" functions: - name: readNamespacedVirtualMachine operation: specs/kubevirt-openapi.yaml#readNamespacedVirtualMachine diff --git a/workflows/move2kube/README.md b/workflows/move2kube/README.md index 515af48c..0d1da200 100644 --- a/workflows/move2kube/README.md +++ b/workflows/move2kube/README.md @@ -5,11 +5,11 @@ This workflow is using https://move2kube.konveyor.io/ to migrate the existing co Once the transformation is over, move2kube provides a zip file containing the transformed repo. ### Design diagram -![sequence_diagram.svg](https://github.com/parodos-dev/serverless-workflows/blob/main/workflows/move2kube/sequence_diagram.jpg?raw=true) -![design.svg](https://github.com/parodos-dev/serverless-workflows/blob/main/workflows/move2kube/design.svg?raw=true) +![sequence_diagram.svg](https://github.com/rhdhorchestrator/serverless-workflows/blob/main/workflows/move2kube/sequence_diagram.jpg?raw=true) +![design.svg](https://github.com/rhdhorchestrator/serverless-workflows/blob/main/workflows/move2kube/design.svg?raw=true) ### Workflow -![m2k.svg](https://github.com/parodos-dev/serverless-workflows/blob/main/workflows/move2kube/m2k.svg?raw=true) +![m2k.svg](https://github.com/rhdhorchestrator/serverless-workflows/blob/main/workflows/move2kube/m2k.svg?raw=true) Note that if an error occurs during the migration planning there is no feedback given by the move2kube instance API. To overcome this, we defined a maximum amount of retries (`move2kube_get_plan_max_retries`) to execute while getting the planning before exiting with an error. By default the value is set to 10 and it can be overridden with the environment variable `MOVE2KUBE_GET_PLAN_MAX_RETRIES`. @@ -65,7 +65,7 @@ See [official installation guide](https://github.com/parodos-dev/serverless-work * for more information, please refer to https://move2kube.konveyor.io/tutorials/ui 2. Go to the backstage instance. -To get it, you can run +To get it, you can run ```bash oc -n rhdh-operator get routes ``` @@ -74,11 +74,11 @@ Sample output: NAME HOST/PORT PATH SERVICES PORT TERMINATION WILDCARD backstage-backstage backstage-backstage-rhdh-operator.apps.cluster-c68jb.dynamic.redhatworkshops.io / backstage-backstage http-backend edge/Redirect None ``` -3. Go to the `Orchestrator` page. +3. Go to the `Orchestrator` page. 4. Click on `Move2Kube workflow` and then click the `run` button on the top right of the page. 5. In the `repositoryURL` field, put the URL of your git project - * ie: https://bitbucket.org/parodos/m2k-test + * ie: https://bitbucket.org/rhdhorchestrator/m2k-test 6. In the `sourceBranch` field, put the name of the branch holding the project you want to transform * ie: `main` 7. In the `targetBranch` field, put the name of the branch in which you want the move2kube output to be persisted. If the branch exists, the workflow will fail @@ -87,7 +87,7 @@ backstage-backstage backstage-backstage-rhdh-operator.apps.cluster-c68jb.dynam * ie: `a46b802d-511c-4097-a5cb-76c892b48d71` 9. In the `projectId` field, put the ID of the move2kube instance project under the previous workspace to use for the transformation. Use the ID of the project created at the 1st step. * ie: `9c7f8914-0b63-4985-8696-d46c17ba4ebe` -10. Then click on `nextStep` +10. Then click on `nextStep` 11. Click on `run` to trigger the execution 12. Once a new transformation has started and is waiting for your input, you will receive a notification with a link to the Q&A * For more information about what to expect and how to answer the Q&A, please visit [the official move2kube documentation](https://move2kube.konveyor.io/tutorials/ui) diff --git a/workflows/move2kube/m2k-func/pom.xml b/workflows/move2kube/m2k-func/pom.xml index 461bcc4e..90ac564f 100644 --- a/workflows/move2kube/m2k-func/pom.xml +++ b/workflows/move2kube/m2k-func/pom.xml @@ -5,7 +5,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> 4.0.0 m2k-func - dev.parodos + io.rhdhorchestrator 1.0.0-SNAPSHOT false @@ -131,9 +131,9 @@ 2.14.0 test -