diff --git a/src/ui/ManagementPortal/js/api.ts b/src/ui/ManagementPortal/js/api.ts index 20c15d23f7..d19b8f60b6 100644 --- a/src/ui/ManagementPortal/js/api.ts +++ b/src/ui/ManagementPortal/js/api.ts @@ -5,6 +5,7 @@ import type { AppConfigUnion, AgentIndex, AgentGatekeeper, + AIModel, FilterRequest, CreateAgentRequest, CheckNameResponse, @@ -463,59 +464,12 @@ export default { return agentGetResult; }, - // async updateAgent(agentId: string, request: CreateAgentRequest): Promise { - // return await this.fetch( - // `/instances/${this.instanceId}/providers/FoundationaLLM.Agent/agents/${agentId}?api-version=${this.apiVersion}`, - // { - // method: 'POST', - // body: request, - // }, - // ); - // }, - async upsertAgent(agentId: string, agentData: CreateAgentRequest): Promise { - // Deep copy the agent object to prevent modifiying its references - const agent = JSON.parse(JSON.stringify(agentData)) as CreateAgentRequest; - - if (agent.orchestration_settings.orchestrator.toLowerCase() === 'langchain' || - agent.orchestration_settings.orchestrator.toLowerCase() === 'semantickernel') { - for (const [propertyName, propertyValue] of Object.entries( - agent.orchestration_settings.endpoint_configuration, - )) { - if (!propertyValue) { - continue; - } - - if (propertyValue.startsWith('FoundationaLLM:') && - propertyName !== 'api_key') { - // Get the static value from the app config. - const appConfigResult = await this.getAppConfig(propertyValue); - // Set the static value to the endpoint configuration. - agent.orchestration_settings.endpoint_configuration[propertyName] = appConfigResult.resource.value; - } - } - } - - for (const [propertyName, propertyValue] of Object.entries( - agent.orchestration_settings.model_parameters, - )) { - if (!propertyValue) { - continue; - } - - if (propertyValue.startsWith('FoundationaLLM:')) { - // Get the static value from the app config. - const appConfigResult = await this.getAppConfig(propertyValue); - // Set the static value to the endpoint configuration. - agent.orchestration_settings.model_parameters[propertyName] = appConfigResult.resource.value; - } - } - return await this.fetch( `/instances/${this.instanceId}/providers/FoundationaLLM.Agent/agents/${agentId}?api-version=${this.apiVersion}`, { method: 'POST', - body: agent, + body: agentData, }, ); }, @@ -606,6 +560,14 @@ export default { return data; }, + async getAIModels(): Promise[]> { + const data = await this.fetch( + `/instances/${this.instanceId}/providers/FoundationaLLM.AIModel/aiModels?api-version=${this.apiVersion}`, + ) as ResourceProviderGetResult[]; + + return data; + }, + /* Role Assignments */ diff --git a/src/ui/ManagementPortal/js/types.ts b/src/ui/ManagementPortal/js/types.ts index d1635f1ada..e37e86e008 100644 --- a/src/ui/ManagementPortal/js/types.ts +++ b/src/ui/ManagementPortal/js/types.ts @@ -30,6 +30,8 @@ export type Agent = ResourceBase & { type: 'knowledge-management' | 'analytics'; inline_context: boolean; + ai_model_object_id: string; + vectorization: { dedicated_pipeline: boolean; indexing_profile_object_ids: string[]; @@ -44,22 +46,12 @@ export type Agent = ResourceBase & { sessions_enabled: boolean; orchestration_settings: { orchestrator: string; - endpoint_configuration: { - endpoint: string; - api_key: string; - api_version: string; - operation_type: string; - }; - model_parameters: { - temperature: number; - deployment_name: string; - }; }; conversation_history: { enabled: boolean; max_history: number; }; - gatekeeper: { + gatekeeper_settings: { use_system_setting: boolean; options: string[]; }; @@ -102,6 +94,19 @@ export type ExternalOrchestrationService = ResourceBase & { resolved_api_key: string; }; +export type AIModel = ResourceBase & { + name: string; + type: string; + // The object id of the APIEndpointConfiguration object providing the configuration for the API endpoint used to interact with the model. + endpoint_object_id: string; + // The version of the AI model. + version?: string | null; + // The name of the deployment corresponding to the AI model. + deployment_name?: string | null; + // Dictionary with default values for the model parameters. + model_parameters: { [key: string]: any }; +}; + export interface ConfigurationReferenceMetadata { isKeyVaultBacked: boolean; } @@ -296,6 +301,8 @@ export type CreateAgentRequest = ResourceBase & { name: string; inline_context: boolean; + ai_model_object_id: string; + language_model: { type: string; provider: string; @@ -322,22 +329,12 @@ export type CreateAgentRequest = ResourceBase & { sessions_enabled: boolean; orchestration_settings: { orchestrator: string; - endpoint_configuration: { - endpoint: string; - api_key: string; - api_version: string; - operation_type: string; - }; - model_parameters: { - temperature: number; - deployment_name: string; - }; }; conversation_history: { enabled: boolean; max_history: number; }; - gatekeeper: { + gatekeeper_settings: { use_system_setting: boolean; options: string[]; }; diff --git a/src/ui/ManagementPortal/pages/agents/create.vue b/src/ui/ManagementPortal/pages/agents/create.vue index b1f44b959e..6bd8383171 100644 --- a/src/ui/ManagementPortal/pages/agents/create.vue +++ b/src/ui/ManagementPortal/pages/agents/create.vue @@ -574,6 +574,43 @@ /> + +
Which AI model should the orchestrator use?
+ + + + + + +
Would you like to assign this agent to a cost center?
@@ -598,53 +635,6 @@ />
- -
System Prompt
@@ -694,6 +684,7 @@ import type { Agent, AgentIndex, AgentDataSource, + AIModel, DataSource, CreateAgentRequest, ExternalOrchestrationService, @@ -730,6 +721,9 @@ const getDefaultFormValues = () => { editTextEmbeddingProfile: false as boolean, selectedTextEmbeddingProfile: null as null | TextEmbeddingProfile, + editAIModel: false as boolean, + selectedAIModel: null as null | AIModel, + chunkSize: 500, overlapSize: 50, @@ -752,34 +746,7 @@ const getDefaultFormValues = () => { orchestration_settings: { orchestrator: 'LangChain' as string, - endpoint_configuration: { - auth_type: 'key' as string, - provider: 'microsoft' as string, - endpoint: 'FoundationaLLM:AzureOpenAI:API:Endpoint' as string, - api_key: 'FoundationaLLM:AzureOpenAI:API:Key' as string, - api_version: 'FoundationaLLM:AzureOpenAI:API:Version' as string, - //operation_type: 'chat' as string, - } as object, - model_parameters: { - deployment_name: 'FoundationaLLM:AzureOpenAI:API:Completions:DeploymentName' as string, - temperature: 0 as number, - } as object, }, - - api_endpoint: 'FoundationaLLM:AzureOpenAI:API:Endpoint', - api_key: 'FoundationaLLM:AzureOpenAI:API:Key', - api_version: 'FoundationaLLM:AzureOpenAI:API:Version', - version: 'FoundationaLLM:AzureOpenAI:API:Completions:ModelVersion', - deployment: 'FoundationaLLM:AzureOpenAI:API:Completions:DeploymentName', - - // resolved_orchestration_settings: { - // endpoint_configuration: { - // endpoint: '' as string, - // api_key: '' as string, - // api_version: '' as string, - // operation_type: 'chat' as string, - // } as object, - // }, }; }; @@ -814,6 +781,7 @@ export default { indexSources: [] as AgentIndex[], textEmbeddingProfileSources: [] as TextEmbeddingProfile[], externalOrchestratorOptions: [] as ExternalOrchestrationService[], + aiModelOptions: [] as AIModel[], orchestratorOptions: [ { @@ -917,6 +885,12 @@ export default { const externalOrchestrationServicesResult = await api.getExternalOrchestrationServices(); this.externalOrchestratorOptions = externalOrchestrationServicesResult.map(result => result.resource); + this.loadingStatusText = 'Retrieving AI models...'; + const aiModelsResult = await api.getAIModels(); + this.aiModelOptions = aiModelsResult.map(result => result.resource); + // Filter the AIModels so we only display the ones where the type is 'completion'. + this.aiModelOptions = this.aiModelOptions.filter((model) => model.type === 'completion'); + // Update the orchestratorOptions with the externalOrchestratorOptions. this.orchestratorOptions = this.orchestratorOptions.concat( this.externalOrchestratorOptions.map((service) => ({ @@ -979,27 +953,6 @@ export default { this.orchestration_settings.orchestrator = agent.orchestration_settings?.orchestrator || this.orchestration_settings.orchestrator; - this.orchestration_settings.endpoint_configuration.endpoint = - agent.orchestration_settings?.endpoint_configuration?.endpoint || - this.orchestration_settings.endpoint_configuration.endpoint; - this.orchestration_settings.endpoint_configuration.api_key = - agent.orchestration_settings?.endpoint_configuration?.api_key || - this.orchestration_settings.endpoint_configuration.api_key; - this.orchestration_settings.endpoint_configuration.api_version = - agent.orchestration_settings?.endpoint_configuration?.api_version || - this.orchestration_settings.endpoint_configuration.api_version; - this.orchestration_settings.endpoint_configuration.operation_type = - agent.orchestration_settings?.endpoint_configuration?.operation_type || - this.orchestration_settings.endpoint_configuration.operation_type; - - this.orchestration_settings.model_parameters.deployment_name = - agent.orchestration_settings?.model_parameters?.deployment_name || - this.orchestration_settings.model_parameters.deployment_name; - this.orchestration_settings.model_parameters.temperature = - agent.orchestration_settings?.model_parameters?.temperature || - this.orchestration_settings.model_parameters.temperature; - - // this.resolved_orchestration_settings = agent.resolved_orchestration_settings || this.resolved_orchestration_settings; if (agent.vectorization) { this.dedicated_pipeline = agent.vectorization.dedicated_pipeline; @@ -1028,20 +981,25 @@ export default { this.dataSources.find( (dataSource) => dataSource.object_id === agent.vectorization?.data_source_object_id, ) || null; + + this.selectedAIModel = + this.aiModelOptions.find( + (aiModel) => aiModel.object_id === agent.ai_model_object_id, + ) || null; this.conversationHistory = agent.conversation_history?.enabled || this.conversationHistory; this.conversationMaxMessages = agent.conversation_history?.max_history || this.conversationMaxMessages; - this.gatekeeperEnabled = Boolean(agent.gatekeeper?.use_system_setting); + this.gatekeeperEnabled = Boolean(agent.gatekeeper_settings?.use_system_setting); - if (agent.gatekeeper && agent.gatekeeper.options) { + if (agent.gatekeeper_settings && agent.gatekeeper_settings.options) { this.selectedGatekeeperContentSafety = this.gatekeeperContentSafetyOptions.filter((localOption) => - agent.gatekeeper?.options?.includes(localOption.code) + agent.gatekeeper_settings?.options?.includes(localOption.code) ) || this.selectedGatekeeperContentSafety; this.selectedGatekeeperDataProtection = this.gatekeeperDataProtectionOptions.filter((localOption) => - agent.gatekeeper?.options?.includes(localOption.code) + agent.gatekeeper_settings?.options?.includes(localOption.code) ) || this.selectedGatekeeperDataProtection; } }, @@ -1115,6 +1073,11 @@ export default { this.editTextEmbeddingProfile = false; }, + handleAIModelSelected(aiModel: AIModel) { + this.selectedAIModel = aiModel; + this.editAIModel = false; + }, + async handleCreateAgent() { const errors = []; if (!this.agentName) { @@ -1135,6 +1098,14 @@ export default { errors.push('Please provide a system prompt.'); } + if (!this.orchestration_settings.orchestrator) { + errors.push('Please select an orchestrator.'); + } + + if (!this.selectedAIModel) { + errors.push('Please select an AI model for the orchestrator.'); + } + // if (!this.selectedDataSource) { // errors.push('Please select a data source.'); // } @@ -1241,7 +1212,7 @@ export default { max_history: Number(this.conversationMaxMessages), }, - gatekeeper: { + gatekeeper_settings: { use_system_setting: this.gatekeeperEnabled, options: [ ...(this.selectedGatekeeperContentSafety || []).map((option: any) => option.code), @@ -1253,6 +1224,7 @@ export default { prompt_object_id: promptObjectId, orchestration_settings: this.orchestration_settings, + ai_model_object_id: this.selectedAIModel.object_id, }; if (this.editAgent) {