diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index cb7c250a48be..a4ce214dc6a5 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -1,5 +1,7 @@ FROM mcr.microsoft.com/devcontainers/dotnet:8.0 # Install the xz-utils package -RUN apt-get update && apt-get install -y xz-utils ca-certificates curl gnupg +# RUN apt-get update && apt-get install -y xz-utils ca-certificates curl gnupg -RUN curl -fsSL https://aka.ms/install-azd.sh | bash \ No newline at end of file +# RUN curl -fsSL https://aka.ms/install-azd.sh | bash + +# RUN curl -sL https://aka.ms/DevTunnelCliInstall | bash \ No newline at end of file diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 7a946812793e..530e27a57443 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -8,17 +8,14 @@ "workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}", "features": { "ghcr.io/devcontainers/features/azure-cli:1": {}, - "ghcr.io/devcontainers/features/common-utils:2": {}, + "ghcr.io/devcontainers/features/common-utils:2": { + "configureZshAsDefaultShell" : true + }, "ghcr.io/devcontainers/features/docker-in-docker:2": {}, "ghcr.io/azure/azure-dev/azd:latest": {}, - "ghcr.io/devcontainers/features/node:1": { - "nodeGypDependencies": true, - "version": "18", - "nvmVersion": "latest" - }, - "ghcr.io/azure/azure-dev/azd:0": { - "version": "stable" - } + "ghcr.io/devcontainers/features/node:1": {}, + "ghcr.io/azure/azure-dev/azd:0": {}, + "ghcr.io/stuartleeks/dev-container-features/dev-tunnels:0": {} }, "postCreateCommand": "bash .devcontainer/startup.sh", "hostRequirements": { diff --git a/.devcontainer/startup.sh b/.devcontainer/startup.sh index 331e5eb6688e..c2c069ec5e98 100644 --- a/.devcontainer/startup.sh +++ b/.devcontainer/startup.sh @@ -1,8 +1,8 @@ #!/bin/bash -curl -k https://localhost:8081/_explorer/emulator.pem > ~/emulatorcert.crt -sudo cp ~/emulatorcert.crt /usr/local/share/ca-certificates/ -sudo update-ca-certificates -sleep 10 +# curl -k https://localhost:8081/_explorer/emulator.pem > ~/emulatorcert.crt +# sudo cp ~/emulatorcert.crt /usr/local/share/ca-certificates/ +# sudo update-ca-certificates +# sleep 10 dotnet restore sk-dev-team.sln -dotnet build util/seed-memory/seed-memory.csproj && dotnet util/seed-memory/bin/Debug/net7.0/seed-memory.dll \ No newline at end of file +# dotnet build util/seed-memory/seed-memory.csproj && dotnet util/seed-memory/bin/Debug/net7.0/seed-memory.dll \ No newline at end of file diff --git a/.gitignore b/.gitignore index dc3d5b131222..33926235baf3 100644 --- a/.gitignore +++ b/.gitignore @@ -496,4 +496,5 @@ elsa-core/ sk-azfunc-server/local.settings.json .azure temp - +.mono/** +**/values.xml \ No newline at end of file diff --git a/README.md b/README.md index 376b37719b00..4fdd77d22887 100644 --- a/README.md +++ b/README.md @@ -1,127 +1,43 @@ -# sk-dev-team - -# Build a Virtual AI Dev Team using Semantic Kernel Skills - -# Goal - -From a natural language specification, set out to integrate a team of AI copilot skills into your team’s dev process, either for discrete tasks on an existing repo (unit tests, pipeline expansions, PRs for specific intents), developing a new feature, or even building an application from scratch. Starting from an existing repo and a broad statement of intent, work with multiple AI copilot dev skills, each of which has a different emphasis - from architecture, to task breakdown, to plans for individual tasks, to code output, code review, efficiency, documentation, build, writing tests, setting up pipelines, deployment, integration tests, and then validation. -The system will present a view that facilitates chain-of-thought coordination across multiple trees of reasoning with the dev team skills. - -## Status - -* You can iterate on building a workflow for your semantic kernel ai dev skills using Elsa Workflows designer and run these workflows to see the results. The workflows do not yet support adding memory context. -* You can use the CLI project to run the SK dev skills from the command line. The CLI supports using the [Microsoft Azure Well-Architected Frameworl](https://learn.microsoft.com/en-us/azure/well-architected/) as memory context for the skill invocations. - -## Trying it out - -### Elsa.SemanticKernel - -SemanticKernel Activity Provider for Elsa Workflows 3.x - -The project supports running [Microsoft Semantic Kernel](https://github.com/microsoft/semantic-kernel) Skills as workflows using [Elsa Workflows](https://v3.elsaworkflows.io). You can build the workflows as .NET code or in the visual designer. -To run the designer: - -```bash -> cd WorkflowsApp -> cp .env_example .env -# Edit the .env file to choose your AI model, add your API Endpoint, and secrets. -> . ./.env -> dotnet build -> dotnet run -# Open browser to the URI in the console output -``` - -By Default you can use "admin" and "password" to login. Please review [Workflow Security](https://v3.elsaworkflows.io/docs/installation/aspnet-apps-workflow-server) for into on securing the app, using API tokens, and more. - -To [invoke](https://v3.elsaworkflows.io/docs/guides/invoking-workflows) a workflow, first it must be "Published". If your workflow has a trigger activity, you can use that. When your workflow is ready, click the "Publish" button. You can also execute the workflow using the API. Then, find the Workflow Definition ID. From a command line, you can use "curl": - -```bash -> curl --location 'https://localhost:5001/elsa/api/workflow-definitions/{workflow_definition_id}/execute' \ ---header 'Content-Type: application/json' \ ---header 'Authorization: ApiKey {api_key}' \ ---data '{ -}' -``` - -Once you have the app runing locally, you can login (admin/password - see the [Elsa Workflows](https://v3.elsaworkflows.io) for info about securing). Then you can click "new workflow" to begin building your workflow with semantic kernel skills. - -1. Drag workflow Activity blocks into the designer, and examine the settings. -2. Connect the Activities to specify an order of operations. -3. You can use Workfflow Variables to pass state between activities. - 1. Create a Workflow Variable, "MyVariable" - 2. Click on the Activity that you want to use to populate the variable. - 3. In the Settings box for the Activity, Click "Output" - 4. Set the "Output" to the variable chosen. - 5. Click the Activity that will use the variable. Click on "Settings". - 6. Find the text box representing the variable that you want to populate, in this case usually "input". - 7. Click the "..." widget above the text box, and select "javascript" - 8. Set the value of the text box to - - ```javascript - `${getMyVariable()}` - ``` - - 9. Run the workflow. - -## Via CLI - -The easiest way to run the project is in Codespaces. Codespaces will start a qdrant instance for you. - -1. Create a new codespace from the *code* button on the main branch. -2. Once the code space setup is finished, from the terminal: - -```bash -> cd cli -cli> cp ../WorkflowsApp/.env_example . -# Edit the .env file to choose your AI model, add your API Endpoint, and secrets. -cli> bash .env -cli> dotnet build -cli> dotnet run --file util/ToDoListSamplePrompt.txt do it -``` - -You will find the output in the *output/* directory. - -## Proposed UX - -* Possible UI: Start with an existing repo (GH or ADO), either populated or empty, and API Keys / config for access – once configured / loaded split view between three columns: - * Settings/History/Tasks (allows browsing into each of the chats with a copilot dev team role) | [Central Window Chat interface with Copilot DevTeam] | Repo browsing/editing - * Alternate interface will be via VS Code plugin/other IDE plugins, following the plugin idiom for each IDE - * Settings include teams channel for conversations, repo config and api keys, model config and api keys, and any desired prompt template additions -* CLI: start simple with a CLI that can be passed a file as prompt input and takes optional arguments as to which skills to invoke -* User begins with specifying a repository and then statement of what they want to accomplish, natural language, as simple or as detailed as needed. - * SK DevTeam skill will use dialog to refine the intent as needed, returns a plan, proposes necessary steps - * User approves the plan or gives feedback, requests iteration - * Plan is parceled out to the appropriate further skills - * Eg, for a new app: - * Architecture is passed to DevLead skill gives plan/task breakdown. - * DevLead breaks down tasks into smaller tasks, each of these is fed to a skill to decide if it is a single code module or multiple - * Each module is further fed to a dev lead to break down again or specify a prompt for a coder - * Each code module prompt is fed to a coder - * Each module output from a coder is fed to a code reviewer (with context, specific goals) - * Each reviewer proposes changes, which result in a new prompt for the original coder - * Changes are accepted by the coder - * Each module fed to a builder - * If it doesn’t build sent back to review - * (etc) - -## Proposed Architecture - -* SK Kernel Service – ASP.NET Core Service with REST API -* SK Skills: - * PM Skill – generates pot, word docs, describing app, - * Designer Skill – mockups? - * Architect Skill – proposes overall arch - * DevLead Skill – proposes task breakdown - * CoderSkill – builds code modules for each task - * ReviewerSkill – improves code modules - * TestSkill – writes tests - * Etc -* Web app: prompt front end and wizard style editor of app -* Build service sandboxes – using branches and actions/pipelines 1st draft; Alternate – ephemeral build containers -* Logging service streaming back to azure logs analytics, app insights, and teams channel -* Deployment service – actions/pipelines driven -* Azure Dev Skill – lean into azure integrations – crawl the azure estate to inventory a tenant’s existing resources to memory and help inform new code. Eg: you have a large azure sql estate? Ok, most likely you want to wire your new app to one of those dbs, etc…. +# Ai Agents + +Build a Dev Team using event driven agents. +This project is an experiment and is not intended to be used in production. + +# Background - initial idea + +From a natural language specification, set out to integrate a team of AI agents into your team’s dev process, either for discrete tasks on an existing repo (unit tests, pipeline expansions, PRs for specific intents), developing a new feature, or even building an application from scratch. Starting from an existing repo and a broad statement of intent, work with multiple AI agents, each of which has a different emphasis - from architecture, to task breakdown, to plans for individual tasks, to code output, code review, efficiency, documentation, build, writing tests, setting up pipelines, deployment, integration tests, and then validation. +The system will present a view that facilitates chain-of-thought coordination across multiple trees of reasoning with the dev team agents. + +# Emerging framework - Ai Agents + +While building the dev team agents, we stumbled upon few patterns and abstractions that we think are usefull for building a variety of agentic systems. +At the moment they reside in `src/libs/Microsoft.AI.DevTeam`, but we plan to move them to a separate repo and nuget package. + +# Github dev agents demo + +https://github.com/microsoft/azure-openai-dev-skills-orchestrator/assets/10728102/cafb1546-69ab-4c27-aaf5-1968313d637f + +## How it works + +* User begins with creating an issue and then stateing what they want to accomplish, natural language, as simple or as detailed as needed. +* Product manager agent will respond with a Readme, which can be iterated upon. + * User approves the readme or gives feedback via issue comments. + * Once the readme is approved, the user closes the issue and the Readme is commited to a PR. +* Developer lead agent responds with a decomposed plan for development, which also can be iterated upon. + * User approves the plan or gives feedback via issue comments. + * Once the readme is approved, the user closes the issue and the plan is used to break down the task to different developer agents. +* Developer agents respond with code, which can be iterated upon. + * User approves the code or gives feedback via issue comments. + * Once the code is approved, the user closes the issue and the code is commited to a PR. + +# How to run the Github dev agents locally + +Check [the getting started guide](./docs/github-flow-getting-started.md) + +# Other scenarios using the AiAgents + +## TODO # Contributing diff --git a/docs/elsa-workflows.md b/docs/elsa-workflows.md new file mode 100644 index 000000000000..d24e358521ad --- /dev/null +++ b/docs/elsa-workflows.md @@ -0,0 +1,64 @@ +# SemanticKernel Activity Provider for Elsa Workflows 3.x + +The project supports running [Microsoft Semantic Kernel](https://github.com/microsoft/semantic-kernel) Skills as workflows using [Elsa Workflows](https://v3.elsaworkflows.io). You can build the workflows as .NET code or in the visual designer. +To run the designer: + +```bash +> cd WorkflowsApp +> cp .env_example .env +# Edit the .env file to choose your AI model, add your API Endpoint, and secrets. +> . ./.env +> dotnet build +> dotnet run +# Open browser to the URI in the console output +``` + +By Default you can use "admin" and "password" to login. Please review [Workflow Security](https://v3.elsaworkflows.io/docs/installation/aspnet-apps-workflow-server) for into on securing the app, using API tokens, and more. + +To [invoke](https://v3.elsaworkflows.io/docs/guides/invoking-workflows) a workflow, first it must be "Published". If your workflow has a trigger activity, you can use that. When your workflow is ready, click the "Publish" button. You can also execute the workflow using the API. Then, find the Workflow Definition ID. From a command line, you can use "curl": + +```bash +> curl --location 'https://localhost:5001/elsa/api/workflow-definitions/{workflow_definition_id}/execute' \ +--header 'Content-Type: application/json' \ +--header 'Authorization: ApiKey {api_key}' \ +--data '{ +}' +``` + +Once you have the app runing locally, you can login (admin/password - see the [Elsa Workflows](https://v3.elsaworkflows.io) for info about securing). Then you can click "new workflow" to begin building your workflow with semantic kernel skills. + +1. Drag workflow Activity blocks into the designer, and examine the settings. +2. Connect the Activities to specify an order of operations. +3. You can use Workfflow Variables to pass state between activities. + 1. Create a Workflow Variable, "MyVariable" + 2. Click on the Activity that you want to use to populate the variable. + 3. In the Settings box for the Activity, Click "Output" + 4. Set the "Output" to the variable chosen. + 5. Click the Activity that will use the variable. Click on "Settings". + 6. Find the text box representing the variable that you want to populate, in this case usually "input". + 7. Click the "..." widget above the text box, and select "javascript" + 8. Set the value of the text box to + + ```javascript + `${getMyVariable()}` + ``` + + 9. Run the workflow. + +## Run via codespaces + +The easiest way to run the project is in Codespaces. Codespaces will start a qdrant instance for you. + +1. Create a new codespace from the *code* button on the main branch. +2. Once the code space setup is finished, from the terminal: + +```bash +> cd cli +cli> cp ../WorkflowsApp/.env_example . +# Edit the .env file to choose your AI model, add your API Endpoint, and secrets. +cli> bash .env +cli> dotnet build +cli> dotnet run --file util/ToDoListSamplePrompt.txt do it +``` + +You will find the output in the *output/* directory. diff --git a/docs/github-flow-architecture.md b/docs/github-flow-architecture.md deleted file mode 100644 index 04be0b78ab67..000000000000 --- a/docs/github-flow-architecture.md +++ /dev/null @@ -1 +0,0 @@ -# Azure components diff --git a/docs/github-flow-getting-started.md b/docs/github-flow-getting-started.md index e2c67c986d63..a60a13761f56 100644 --- a/docs/github-flow-getting-started.md +++ b/docs/github-flow-getting-started.md @@ -3,22 +3,24 @@ - Access to gpt3.5-turbo or preferably gpt4 - [Get access here](https://learn.microsoft.com/en-us/azure/ai-services/openai/overview#how-do-i-get-access-to-azure-openai) - [Setup a Github app](#how-do-i-setup-the-github-app) - [Install the Github app](https://docs.github.com/en/apps/using-github-apps/installing-your-own-github-app) +- [Provision the azure resources](#how-do-I-deploy-the-azure-bits) - [Create labels for the dev team skills](#which-labels-should-i-create) ### How do I setup the Github app? -- [Register a Github app](https://docs.github.com/en/apps/creating-github-apps/registering-a-github-app/registering-a-github-app). -- Setup the following permissions - - Repository - - Contents - read and write - - Issues - read and write - - Metadata - read only - - Pull requests - read and write -- Subscribe to the following events: - - Issues - - Issue comment -- Allow this app to be installed by any user or organization -- Add a dummy value for the webhook url, we'll come back to this setting +- [Register a Github app](https://docs.github.com/en/apps/creating-github-apps/registering-a-github-app/registering-a-github-app), with the options listed above + - Setup the following permissions + - Repository + - Contents - read and write + - Issues - read and write + - Metadata - read only + - Pull requests - read and write + - Subscribe to the following events: + - Issues + - Issue comment + - Allow this app to be installed by any user or organization + - Add a dummy value for the webhook url, we'll come back to this setting + - Enter a webhook secret, which you'll need later on when filling in the `WebhookSecret` property in the `appsettings.json` file - After the app is created, generate a private key, we'll use it later for authentication to Github from the app ### Which labels should I create? @@ -27,7 +29,6 @@ In order for us to know which skill and persona we need to talk with, we are usi The default bunch of skills and personnas are as follows: - PM.Readme -- PM.BootstrapProject - Do.It - DevLead.Plan - Developer.Implement @@ -37,20 +38,38 @@ Once you start adding your own skills, just remember to add the corresponding La ## How do I run this locally? Codespaces are preset for this repo. +Start by creating a codespace -Create a codespace and once the codespace is created, make sure to fill in the `local.settings.json` file. +![Alt text](./images/new-codespace.png) -There is a `local.settings.template.json` you can copy and fill in, containing comments on the different config values. +and fill in the `appsettings.json` file, located in the `src\apps\gh-flow` folder. +There is a `appsettings.local.template.json` which you can copy and fill in, containing comments on the different config values. -Hit F5 and go to the Ports tab in your codespace, make sure you make the `:7071` port publically visible. [How to share port?](https://docs.github.com/en/codespaces/developing-in-codespaces/forwarding-ports-in-your-codespace?tool=vscode#sharing-a-port-1) +In the Explorer tab in VS Code, find the Solution explorer, right click on the `gh-flow` project and click Debug -> Start new instance -Copy the local address (it will look something like https://foo-bar-7071.preview.app.github.dev) and append `/api/github/webhooks` at the end. Using this value, update the Github App's webhook URL and you are ready to go! +![Alt text](./images/solution-explorer.png) -Before you go and have the best of times, there is one last thing left to do [load the WAF into the vector DB](#load-the-waf-into-qdrant) +We'll need to expose the running application to the GH App webhooks, for example using [DevTunnels](https://learn.microsoft.com/en-us/azure/developer/dev-tunnels/overview), but any tool like ngrok can also work. +The following commands will create a persistent tunnel, so we need to only do this once: +```bash +TUNNEL_NAME=_name_yout_tunnel_here_ +devtunnel user login +devtunnel create -a $TUNNEL_NAME +devtunnel port create -p 5244 $TUNNEL_NAME +``` +and once we have the tunnel created we can just start forwarding with the following command: +```bash +devtunnel host $TUNNEL_NAME +``` + +Copy the local address (it will look something like https://yout_tunnel_name.euw.devtunnels.ms) and append `/api/github/webhooks` at the end. Using this value, update the Github App's webhook URL and you are ready to go! +Before you go and have the best of times, there is one last thing left to do [load the WAF into the vector DB](#load-the-waf-into-qdrant) -## How do I deploy this to Azure? +Also, since this project is relying on Orleans for the Agents implementation, there is a [dashboard](https://github.com/OrleansContrib/OrleansDashboard) available at https://yout_tunnel_name.euw.devtunnels.ms/dashboard, with useful metrics and stats related to the running Agents. + +## How do I deploy the azure bits? This repo is setup to use [azd](https://learn.microsoft.com/en-us/azure/developer/azure-developer-cli/overview) to work with the Azure bits. `azd` is installed in the codespace. @@ -59,32 +78,21 @@ Let's start by logging in to Azure using azd auth login ``` -After we've logged in, we need to create a new environment and setup the OpenAI and GithubApp config. +After we've logged in, we need to create a new environment provision the azure bits. ```bash -azd env new dev -azd env set -e dev GH_APP_ID replace_with_gh_app_id -azd env set -e dev GH_APP_INST_ID replace_with_inst_id -azd env set -e dev GH_APP_KEY replace_with_gh_app_key -azd env set -e dev OAI_DEPLOYMENT_ID replace_with_deployment_id -azd env set -e dev OAI_EMBEDDING_ID replace_with_embedding_id -azd env set -e dev OAI_ENDPOINT replace_with_oai_endpoint -azd env set -e dev OAI_KEY replace_with_oai_key -azd env set -e dev OAI_SERVICE_ID replace_with_oai_service_id -azd env set -e dev OAI_SERVICE_TYPE AzureOpenAI +ENVIRONMENT=_name_of_your_env +azd env new $ENVIRONMENT +azd provision -e $ENVIRONMENT ``` +After the provisioning is done, you can inspect the outputs with the following command -Now that we have all that setup, the only thing left to do is run - -``` -azd up -e dev +```bash +azd env get-values -e dev ``` - -and wait for the azure components to be provisioned and the app deployed. - As the last step, we also need to [load the WAF into the vector DB](#load-the-waf-into-qdrant) ### Load the WAF into Qdrant. If you are running the app locally, we have [Qdrant](https://qdrant.tech/) setup in the Codespace and if you are running in Azure, Qdrant is deployed to ACA. -The loader is a project in the `util` folder, called `seed-memory`. We need to fill in the `appsettings.json` file in the `config` folder with the OpenAI details and the Qdrant endpoint, then just run the loader with `dotnet run` and you are ready to go. \ No newline at end of file +The loader is a project in the `src\apps` folder, called `seed-memory`. We need to fill in the `appsettings.json` file in the `config` folder with the OpenAI details and the Qdrant endpoint, then just run the loader with `dotnet run` and you are ready to go. \ No newline at end of file diff --git a/docs/github-flow.md b/docs/github-flow.md index d9d7244216c1..b6dd39e344e2 100644 --- a/docs/github-flow.md +++ b/docs/github-flow.md @@ -1,78 +1,34 @@ -![](/docs/images/github-sk-dev-team.png) - # How does the event flow look like? +```mermaid +graph TD; + NEA([NewAsk event]) -->|Hubber| NEA1[Creation of PM issue, DevLead issue, and new branch]; + + RR([ReadmeRequested event]) -->|ProductManager| PM1[Generation of new README]; + NEA1 --> RR; + PM1 --> RG([ReadmeGenerated event]); + RG -->|Hubber| RC[Post the readme as a new comment on the issue]; + RC --> RCC([ReadmeChainClosed event]); + RCC -->|ProductManager| RCR([ReadmeCreated event]); + RCR --> |AzureGenie| RES[Store Readme in blob storage]; + RES --> RES2([ReadmeStored event]); + RES2 --> |Hubber| REC[Readme commited to branch and create new PR]; + DPR([DevPlanRequested event]) -->|DeveloperLead| DPG[Generation of new development plan]; + NEA1 --> DPR; + DPG --> DPGE([DevPlanGenerated event]); + DPGE -->|Hubber| DPGEC[Posting the plan as a new comment on the issue]; + DPGEC --> DPCC([DevPlanChainClosed event]); + DPCC -->|DeveloperLead| DPCE([DevPlanCreated event]); + DPCE --> |Hubber| DPC[Creates a Dev issue for each subtask]; -### Hubber agent handles: - ``` - NewAsk - -> create PM issue - -> create DevLead issue - -> create a branch -``` -``` - ReadmeGenerated - -> post comment -``` -``` - DevPlanGenerated - -> post comment -``` -``` - DevPlanFinished - -> for each step, create Dev issue -``` -``` - CodeGenerated - -> post comment -``` -``` - ReadmeFinished - -> commit to branch -``` -``` - SandboxRunFinished - -> commit to branch -``` - -### AzureOps agent handles: -``` - ReadmeChainClosed - -> store - -> ReadmeStored -``` -``` - CodeChainClosed - -> store - -> run in sandbox -``` - -### PM agent handles: -``` - ReadmeRequested - -> ReadmeGenerated -``` -``` - ChainClosed - -> ReadmeFinished -``` - -### DevLead agent handles: -``` - DevPlanRequested - -> DevPlanGenerated -``` -``` - ChainClosed - -> DevPlanFinished -``` - -### Dev handles: -``` - CodeGenerationRequested - -> CodeGenerated -``` + DPC([CodeGenerationRequested event]) -->|Developer| CG[Generation of new code]; + CG --> CGE([CodeGenerated event]); + CGE -->|Hubber| CGC[Posting the code as a new comment on the issue]; + CGC --> CCCE([CodeChainClosed event]); + CCCE -->|Developer| CCE([CodeCreated event]); + CCE --> |AzureGenie| CS[Store code in blob storage and schedule a run in the sandbox]; + CS --> SRC([SandboxRunCreated event]); + SRC --> |Sandbox| SRM[Check every minute if the run finished]; + SRM --> SRF([SandboxRunFinished event]); + SRF --> |Hubber| SRCC[Code files commited to branch]; ``` - ChainClosed - -> CodeFinished -``` \ No newline at end of file diff --git a/docs/images/new-codespace.png b/docs/images/new-codespace.png new file mode 100644 index 000000000000..928756830fe6 Binary files /dev/null and b/docs/images/new-codespace.png differ diff --git a/docs/images/solution-explorer.png b/docs/images/solution-explorer.png new file mode 100644 index 000000000000..48165f8ddb89 Binary files /dev/null and b/docs/images/solution-explorer.png differ diff --git a/infra/main.bicep b/infra/main.bicep index e7af7b64fce4..9dfbdac6c020 100644 --- a/infra/main.bicep +++ b/infra/main.bicep @@ -9,37 +9,18 @@ param environmentName string @description('Primary location for all resources') param location string -@secure() -param githubAppKey string -param githubAppId string -param githubAppInstallationId string -param openAIServiceType string -param openAIServiceId string -param openAIDeploymentId string -param openAIEmbeddingId string -param openAIEndpoint string -@secure() -param openAIKey string - -param apiServiceName string = '' param applicationInsightsDashboardName string = '' param applicationInsightsName string = '' -param appServicePlanName string = '' param logAnalyticsName string = '' param resourceGroupName string = '' param storageAccountName string = '' param containerAppsEnvironmentName string = '' param containerRegistryName string = '' -param ghFlowServiceName string = '' -param cosmosAccountName string = '' var aciShare = 'acishare' var qdrantShare = 'qdrantshare' -var metadataTable = 'Metadata' -var containerMetadataTable = 'ContainersMetadata' - var abbrs = loadJsonContent('./abbreviations.json') var resourceToken = toLower(uniqueString(subscription().id, environmentName, location)) var tags = { 'azd-env-name': environmentName } @@ -62,10 +43,6 @@ module storage './core/storage/storage-account.bicep' = { aciShare qdrantShare ] - tables: [ - metadataTable - containerMetadataTable - ] } } @@ -108,106 +85,6 @@ module qdrant './core/database/qdrant/qdrant-aca.bicep' = { } } -// Create an App Service Plan to group applications under the same payment plan and SKU -// module appServicePlan './core/host/appserviceplan.bicep' = { -// name: 'appserviceplan' -// scope: rg -// params: { -// name: !empty(appServicePlanName) ? appServicePlanName : '${abbrs.webServerFarms}${resourceToken}' -// location: location -// tags: tags -// sku: { -// name: 'EP1' -// tier: 'ElasticPremium' -// family: 'EP' -// } -// kind: 'elastic' -// reserved: false -// } -// } - -// var appName = !empty(apiServiceName) ? apiServiceName : '${abbrs.webSitesFunctions}api-${resourceToken}' - -// The application backend -// module skfunc './app/sk-func.bicep' = { -// name: 'skfunc' -// scope: rg -// params: { -// name: appName -// location: location -// tags: tags -// applicationInsightsName: monitoring.outputs.applicationInsightsName -// appServicePlanId: appServicePlan.outputs.id -// storageAccountName: storage.outputs.name -// appSettings: { -// SANDBOX_IMAGE: 'mcr.microsoft.com/dotnet/sdk:7.0' -// AzureWebJobsFeatureFlags: 'EnableHttpProxying' -// FUNCTIONS_FQDN: 'https://${appName}.azurewebsites.net' -// 'GithubOptions__AppKey': githubAppKey -// 'GithubOptions__AppId': githubAppId -// 'GithubOptions__InstallationId': githubAppInstallationId -// 'AzureOptions__SubscriptionId': subscription().subscriptionId -// 'AzureOptions__Location': location -// 'AzureOptions__ContainerInstancesResourceGroup': rg.name -// 'AzureOptions__FilesShareName': aciShare -// 'AzureOptions__FilesAccountName': storage.outputs.name -// 'OpenAIOptions__ServiceType': openAIServiceType -// 'OpenAIOptions__ServiceId': openAIServiceId -// 'OpenAIOptions__DeploymentOrModelId': openAIDeploymentId -// 'OpenAIOptions__EmbeddingDeploymentOrModelId': openAIEmbeddingId -// 'OpenAIOptions__Endpoint': openAIEndpoint -// 'OpenAIOptions__ApiKey': openAIKey -// 'QdrantOptions__Endpoint':'https://${qdrant.outputs.fqdn}' -// 'QdrantOptions__VectorSize':'1536' -// } -// } -// } - - -// The application database -module cosmos './app/db.bicep' = { - name: 'cosmos' - scope: rg - params: { - accountName: !empty(cosmosAccountName) ? cosmosAccountName : '${abbrs.documentDBDatabaseAccounts}${resourceToken}' - databaseName: 'devteam' - location: location - tags: tags - } -} - -module ghFlow './app/gh-flow.bicep' = { - name: 'gh-flow' - scope: rg - params: { - name: !empty(ghFlowServiceName) ? ghFlowServiceName : '${abbrs.appContainerApps}ghflow-${resourceToken}' - location: location - tags: tags - identityName: '${abbrs.managedIdentityUserAssignedIdentities}ghflow-${resourceToken}' - applicationInsightsName: monitoring.outputs.applicationInsightsName - containerAppsEnvironmentName: containerApps.outputs.environmentName - containerRegistryName:containerApps.outputs.registryName - storageAccountName: storage.outputs.name - aciShare: aciShare - githubAppId: githubAppId - githubAppInstallationId: githubAppInstallationId - githubAppKey: githubAppKey - openAIDeploymentId: openAIDeploymentId - openAIEmbeddingId: openAIEmbeddingId - openAIEndpoint: openAIEndpoint - openAIKey: openAIKey - openAIServiceId: openAIServiceId - openAIServiceType: openAIServiceType - qdrantEndpoint: 'https://${qdrant.outputs.fqdn}' - rgName: rg.name - cosmosAccountName: cosmos.outputs.accountName - } -} - -// Data outputs -output AZURE_COSMOS_ENDPOINT string = cosmos.outputs.endpoint -output AZURE_COSMOS_CONNECTION_STRING_KEY string = cosmos.outputs.connectionStringKey -output AZURE_COSMOS_DATABASE_NAME string = cosmos.outputs.databaseName // App outputs output APPLICATIONINSIGHTS_CONNECTION_STRING string = monitoring.outputs.applicationInsightsConnectionString @@ -215,4 +92,9 @@ output AZURE_CONTAINER_ENVIRONMENT_NAME string = containerApps.outputs.environme output AZURE_CONTAINER_REGISTRY_ENDPOINT string = containerApps.outputs.registryLoginServer output AZURE_CONTAINER_REGISTRY_NAME string = containerApps.outputs.registryName output AZURE_LOCATION string = location -output AZURE_TENANT_ID string = tenant().tenantId +output AZURE_TENANT_ID string = subscription().tenantId +output AZURE_SUBSCRIPTION_ID string = subscription().subscriptionId +output AZURE_RESOURCE_GROUP_NAME string = rg.name +output AZURE_FILESHARE_NAME string = aciShare +output AZURE_FILESHARE_ACCOUNT_NAME string = storage.outputs.name +output QDRANT_ENDPOINT string = 'https://${qdrant.outputs.fqdn}' diff --git a/infra/main.parameters.json b/infra/main.parameters.json index 05edbfaaee13..67ad8524c440 100644 --- a/infra/main.parameters.json +++ b/infra/main.parameters.json @@ -8,33 +8,6 @@ "location": { "value": "${AZURE_LOCATION}" }, - "githubAppKey": { - "value": "${GH_APP_KEY}" - }, - "githubAppId": { - "value": "${GH_APP_ID}" - }, - "githubAppInstallationId": { - "value": "${GH_APP_INST_ID}" - }, - "openAIServiceType": { - "value": "${OAI_SERVICE_TYPE}" - }, - "openAIServiceId": { - "value": "${OAI_SERVICE_ID}" - }, - "openAIDeploymentId": { - "value": "${OAI_DEPLOYMENT_ID}" - }, - "openAIEmbeddingId": { - "value": "${OAI_EMBEDDING_ID}" - }, - "openAIEndpoint": { - "value": "${OAI_ENDPOINT}" - }, - "openAIKey": { - "value": "${OAI_KEY}" - }, "principalId": { "value": "${AZURE_PRINCIPAL_ID}" } diff --git a/src/apps/gh-flow/Program.cs b/src/apps/gh-flow/Program.cs index 45c143cec982..f0528ae688cf 100644 --- a/src/apps/gh-flow/Program.cs +++ b/src/apps/gh-flow/Program.cs @@ -91,7 +91,8 @@ app.UseRouting() .UseEndpoints(endpoints => { - endpoints.MapGitHubWebhooks(); + var ghOptions = app.Services.GetService>().Value; + endpoints.MapGitHubWebhooks(secret: ghOptions.WebhookSecret ); }); app.Map("/dashboard", x => x.UseOrleansDashboard()); diff --git a/src/apps/gh-flow/Services/GithubWebHookProcessor.cs b/src/apps/gh-flow/Services/GithubWebHookProcessor.cs index 7a856b463610..b5546c141fa3 100644 --- a/src/apps/gh-flow/Services/GithubWebHookProcessor.cs +++ b/src/apps/gh-flow/Services/GithubWebHookProcessor.cs @@ -27,7 +27,7 @@ protected override async Task ProcessIssuesWebhookAsync(WebhookHeaders headers, try { _logger.LogInformation("Processing issue event"); - var org = issuesEvent.Organization.Login; + var org = issuesEvent.Repository.Owner.Login; var repo = issuesEvent.Repository.Name; var issueNumber = issuesEvent.Issue.Number; var input = issuesEvent.Issue.Body; @@ -52,9 +52,10 @@ protected override async Task ProcessIssuesWebhookAsync(WebhookHeaders headers, await HandleClosingIssue(issueNumber, parentNumber,skillName, labels[skillName], suffix, org, repo); } } - catch (System.Exception) + catch (Exception ex) { - _logger.LogError("Processing issue event"); + _logger.LogError(ex, "Processing issue event"); + throw; } } @@ -66,10 +67,10 @@ protected override async Task ProcessIssueCommentWebhookAsync( try { _logger.LogInformation("Processing issue comment event"); - var org = issueCommentEvent.Organization.Login; + var org = issueCommentEvent.Repository.Owner.Login; var repo = issueCommentEvent.Repository.Name; var issueNumber = issueCommentEvent.Issue.Number; - var input = issueCommentEvent.Issue.Body; + var input = issueCommentEvent.Comment.Body; // Assumes the label follows the following convention: Skill.Function example: PM.Readme var labels = issueCommentEvent.Issue.Labels .Select(l => l.Name.Split('.')) @@ -84,9 +85,10 @@ protected override async Task ProcessIssueCommentWebhookAsync( await HandleNewAsk(issueNumber, parentNumber, skillName, labels[skillName], suffix, input, org, repo); } } - catch (System.Exception ex) + catch (Exception ex) { - _logger.LogError("Processing issue comment event"); + _logger.LogError(ex, "Processing issue comment event"); + throw; } } @@ -149,9 +151,10 @@ await stream.OnNextAsync(new Event Data = data }); } - catch (System.Exception) + catch (Exception ex) { - _logger.LogError("Handling new ask"); + _logger.LogError(ex, "Handling new ask"); + throw; } } } diff --git a/src/apps/gh-flow/appsettings.template.json b/src/apps/gh-flow/appsettings.azure.template.json similarity index 72% rename from src/apps/gh-flow/appsettings.template.json rename to src/apps/gh-flow/appsettings.azure.template.json index 8877352b6263..2fc698845a99 100644 --- a/src/apps/gh-flow/appsettings.template.json +++ b/src/apps/gh-flow/appsettings.azure.template.json @@ -2,7 +2,8 @@ "Logging": { "LogLevel": { "Default": "Information", - "Microsoft.AspNetCore": "Warning" + "Microsoft.AspNetCore": "Information", + "Orleans.Streams": "Information" } }, "ApplicationInsights": { @@ -13,7 +14,8 @@ "GithubOptions" : { "AppKey": "", "AppId": "", - "InstallationId": "" + "InstallationId": "", + "WebhookSecret": "" }, "AzureOptions" : { "SubscriptionId":"", @@ -22,15 +24,14 @@ "FilesShareName":"", "FilesAccountName":"", "FilesAccountKey":"", - "CosmosConnectionString":"", "SandboxImage" : "mcr.microsoft.com/dotnet/sdk:7.0", "ManagedIdentity": "" }, "OpenAIOptions" : { "ServiceType":"AzureOpenAI", - "ServiceId":"", - "DeploymentOrModelId":"", - "EmbeddingDeploymentOrModelId":"", + "ServiceId":"gpt-4", + "DeploymentOrModelId":"gpt-4", + "EmbeddingDeploymentOrModelId":"text-embedding-ada-002", "Endpoint":"", "ApiKey":"" }, diff --git a/src/apps/gh-flow/appsettings.local.template.json b/src/apps/gh-flow/appsettings.local.template.json new file mode 100644 index 000000000000..2fc698845a99 --- /dev/null +++ b/src/apps/gh-flow/appsettings.local.template.json @@ -0,0 +1,45 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Information", + "Orleans.Streams": "Information" + } + }, + "ApplicationInsights": { + "ConnectionString": "" + }, + "AllowedHosts": "*", + "SANDBOX_IMAGE" : "mcr.microsoft.com/dotnet/sdk:7.0", + "GithubOptions" : { + "AppKey": "", + "AppId": "", + "InstallationId": "", + "WebhookSecret": "" + }, + "AzureOptions" : { + "SubscriptionId":"", + "Location":"", + "ContainerInstancesResourceGroup":"", + "FilesShareName":"", + "FilesAccountName":"", + "FilesAccountKey":"", + "SandboxImage" : "mcr.microsoft.com/dotnet/sdk:7.0", + "ManagedIdentity": "" + }, + "OpenAIOptions" : { + "ServiceType":"AzureOpenAI", + "ServiceId":"gpt-4", + "DeploymentOrModelId":"gpt-4", + "EmbeddingDeploymentOrModelId":"text-embedding-ada-002", + "Endpoint":"", + "ApiKey":"" + }, + "QdrantOptions" : { + "Endpoint" : "http://qdrant:6333", + "VectorSize" : "1536" + }, + "ServiceOptions" : { + "IngesterUrl" : "http://localhost:7071" + } +} diff --git a/src/libs/Microsoft.AI.DevTeam.Skills/DevLead.cs b/src/libs/Microsoft.AI.DevTeam.Skills/DevLead.cs index a1c677bc1402..ae53ab071c3f 100644 --- a/src/libs/Microsoft.AI.DevTeam.Skills/DevLead.cs +++ b/src/libs/Microsoft.AI.DevTeam.Skills/DevLead.cs @@ -30,6 +30,7 @@ In each LLM prompt restrict the model from outputting other text that is not in ] } Do not output any other text. + Do not wrap the JSON in any other text, output the JSON format described above. Input: {{$input}} {{$wafContext}} """; diff --git a/src/libs/Microsoft.AI.DevTeam/Agents/AiAgent.cs b/src/libs/Microsoft.AI.DevTeam/Agents/AiAgent.cs index 761751b80f8e..f4e9bf661055 100644 --- a/src/libs/Microsoft.AI.DevTeam/Agents/AiAgent.cs +++ b/src/libs/Microsoft.AI.DevTeam/Agents/AiAgent.cs @@ -39,12 +39,19 @@ protected void AddToHistory(string message, ChatUserType userType) }); } + protected string GetChatHistory() + { + return string.Join("\n",_state.State.History.Select(message=> $"{message.UserType}: {message.Message}")); + } + protected async Task CallFunction(string template, string ask, IKernel kernel, ISemanticTextMemory memory) { var function = kernel.CreateSemanticFunction(template, new OpenAIRequestSettings { MaxTokens = 15000, Temperature = 0.8, TopP = 1 }); - var context = await CreateWafContext(memory, ask); - var result = (await kernel.RunAsync(context, function)).ToString(); AddToHistory(ask, ChatUserType.User); + var history = GetChatHistory(); + var context = await CreateWafContext(memory, history); + var result = (await kernel.RunAsync(context, function)).ToString(); + AddToHistory(result, ChatUserType.Agent); await _state.WriteStateAsync(); return result; diff --git a/src/libs/Microsoft.AI.DevTeam/Options/GithubOptions.cs b/src/libs/Microsoft.AI.DevTeam/Options/GithubOptions.cs index 90ed5cdc04cc..c01d703381c4 100644 --- a/src/libs/Microsoft.AI.DevTeam/Options/GithubOptions.cs +++ b/src/libs/Microsoft.AI.DevTeam/Options/GithubOptions.cs @@ -3,4 +3,5 @@ public class GithubOptions public string AppKey { get; set; } public int AppId { get; set; } public long InstallationId { get; set; } -} + public string WebhookSecret { get; set; } +} \ No newline at end of file diff --git a/src/libs/Microsoft.AI.DevTeam/Services/AzureService.cs b/src/libs/Microsoft.AI.DevTeam/Services/AzureService.cs index 987c7b76df9f..1e8d1d3b5ba6 100644 --- a/src/libs/Microsoft.AI.DevTeam/Services/AzureService.cs +++ b/src/libs/Microsoft.AI.DevTeam/Services/AzureService.cs @@ -40,6 +40,7 @@ public async Task DeleteSandbox(string sandboxId) catch (Exception ex) { _logger.LogError(ex, "Error deleting sandbox"); + throw; } } @@ -59,7 +60,7 @@ public async Task IsSandboxCompleted(string sandboxId) catch (Exception ex) { _logger.LogError(ex, "Error checking sandbox status"); - return false; + throw; } } @@ -105,6 +106,7 @@ public async Task RunInSandbox(string org, string repo, long parentIssueNumber, catch (Exception ex) { _logger.LogError(ex, "Error running sandbox"); + throw; } } @@ -146,6 +148,7 @@ await file.UploadRangeAsync( catch (Exception ex) { _logger.LogError(ex, "Error storing output"); + throw; } } } diff --git a/src/libs/Microsoft.AI.DevTeam/Services/GithubAuthService.cs b/src/libs/Microsoft.AI.DevTeam/Services/GithubAuthService.cs index 7a5156a77e63..ae0b1b5f8758 100644 --- a/src/libs/Microsoft.AI.DevTeam/Services/GithubAuthService.cs +++ b/src/libs/Microsoft.AI.DevTeam/Services/GithubAuthService.cs @@ -41,7 +41,7 @@ public async Task GetGitHubClient() catch (Exception ex) { _logger.LogError(ex, "Error getting GitHub client"); - return default; + throw; } } } \ No newline at end of file diff --git a/src/libs/Microsoft.AI.DevTeam/Services/GithubService.cs b/src/libs/Microsoft.AI.DevTeam/Services/GithubService.cs index e50eb83464e2..384c07c4a146 100644 --- a/src/libs/Microsoft.AI.DevTeam/Services/GithubService.cs +++ b/src/libs/Microsoft.AI.DevTeam/Services/GithubService.cs @@ -72,6 +72,7 @@ await _ghClient.Repository.Content.CreateFile( catch (Exception ex) { _logger.LogError(ex, "Error committing to branch"); + throw; } } @@ -85,6 +86,7 @@ public async Task CreateBranch(string org, string repo, string branch) catch (Exception ex) { _logger.LogError(ex, "Error creating branch"); + throw; } } @@ -99,7 +101,7 @@ public async Task GetMainLanguage(string org, string repo) catch (Exception ex) { _logger.LogError(ex, "Error getting main language"); - return default; + throw; } } @@ -119,7 +121,7 @@ public async Task CreateIssue(string org, string repo, string input, string catch (Exception ex) { _logger.LogError(ex, "Error creating issue"); - return default; + throw; } } @@ -133,6 +135,7 @@ public async Task CreatePR(string org, string repo, long number, string branch) catch (Exception ex) { _logger.LogError(ex, "Error creating PR"); + throw; } } @@ -147,6 +150,7 @@ public async Task MarkTaskComplete(string org, string repo, int commentId) catch (Exception ex) { _logger.LogError(ex, "Error marking task complete"); + throw; } } @@ -159,6 +163,7 @@ public async Task PostComment(string org, string repo, long issueNumber, string catch (Exception ex) { _logger.LogError(ex, "Error posting comment"); + throw; } } @@ -172,7 +177,7 @@ public async Task> GetFiles(string org, string repo, s catch (Exception ex) { _logger.LogError(ex, "Error getting files"); - return Enumerable.Empty(); + throw; } } @@ -203,7 +208,7 @@ private async Task> CollectFiles(string org, string re catch (Exception ex) { _logger.LogError(ex, "Error collecting files"); - return Enumerable.Empty(); + throw; } } }