diff --git a/.env.example b/.env.example index e076242971..149ee071bb 100644 --- a/.env.example +++ b/.env.example @@ -86,6 +86,13 @@ EMBEDDING_OPENAI_MODEL= # Default: text-embedding-3-small IMAGE_OPENAI_MODEL= # Default: dall-e-3 USE_OPENAI_EMBEDDING= # Set to TRUE for OpenAI/1536, leave blank for local +# Atoma SDK Configuration +ATOMASDK_BEARER_AUTH= # Atoma SDK Bearer Auth token +ATOMA_API_URL= # Default: https://api.atoma.network/v1 +SMALL_ATOMA_MODEL= # Default: meta-llama/Llama-3.3-70B-Instruct +MEDIUM_ATOMA_MODEL= # Default: meta-llama/Llama-3.3-70B-Instruct +LARGE_ATOMA_MODEL= # Default: meta-llama/Llama-3.3-70B-Instruct + # Eternal AI's Decentralized Inference API ETERNALAI_URL= ETERNALAI_MODEL= # Default: "NousResearch/Hermes-3-Llama-3.1-70B-FP8" diff --git a/agent/src/index.ts b/agent/src/index.ts index 7ba4418bdf..f35aede1f5 100644 --- a/agent/src/index.ts +++ b/agent/src/index.ts @@ -473,6 +473,11 @@ export function getTokenForProvider( character.settings?.secrets?.VENICE_API_KEY || settings.VENICE_API_KEY ); + case ModelProviderName.ATOMA: + return ( + character.settings?.secrets?.ATOMASDK_BEARER_AUTH || + settings.ATOMASDK_BEARER_AUTH + ); case ModelProviderName.AKASH_CHAT_API: return ( character.settings?.secrets?.AKASH_CHAT_API_KEY || diff --git a/packages/core/src/generation.ts b/packages/core/src/generation.ts index 0cd836a5d5..75fd972935 100644 --- a/packages/core/src/generation.ts +++ b/packages/core/src/generation.ts @@ -963,6 +963,36 @@ export async function generateText({ break; } + case ModelProviderName.ATOMA: { + elizaLogger.debug("Initializing Atoma model."); + const atoma = createOpenAI({ + apiKey, + baseURL: endpoint, + fetch: runtime.fetch, + }); + + const { text: atomaResponse } = await aiGenerateText({ + model: atoma.languageModel(model), + prompt: context, + system: + runtime.character.system ?? + settings.SYSTEM_PROMPT ?? + undefined, + tools: tools, + onStepFinish: onStepFinish, + maxSteps: maxSteps, + temperature: temperature, + maxTokens: max_response_length, + frequencyPenalty: frequency_penalty, + presencePenalty: presence_penalty, + experimental_telemetry: experimental_telemetry, + }); + + response = atomaResponse; + elizaLogger.debug("Received response from Atoma model."); + break; + } + case ModelProviderName.GALADRIEL: { elizaLogger.debug("Initializing Galadriel model."); const headers = {}; @@ -2333,4 +2363,4 @@ export async function generateTweetActions({ await new Promise((resolve) => setTimeout(resolve, retryDelay)); retryDelay *= 2; } -} +} \ No newline at end of file diff --git a/packages/core/src/models.ts b/packages/core/src/models.ts index 903f865724..2c6046d9c5 100644 --- a/packages/core/src/models.ts +++ b/packages/core/src/models.ts @@ -998,6 +998,38 @@ export const models: Models = { }, }, }, + [ModelProviderName.ATOMA]: { + endpoint: settings.ATOMA_API_URL || "https://api.atoma.network/v1", + model: { + [ModelClass.SMALL]: { + name: + settings.SMALL_ATOMA_MODEL || + "meta-llama/Llama-3.3-70B-Instruct", + stop: [], + maxInputTokens: 128000, + maxOutputTokens: 8192, + temperature: 0.7, + }, + [ModelClass.MEDIUM]: { + name: + settings.MEDIUM_ATOMA_MODEL || + "meta-llama/Llama-3.3-70B-Instruct", + stop: [], + maxInputTokens: 128000, + maxOutputTokens: 8192, + temperature: 0.7, + }, + [ModelClass.LARGE]: { + name: + settings.LARGE_ATOMA_MODEL || + "meta-llama/Llama-3.3-70B-Instruct", + stop: [], + maxInputTokens: 128000, + maxOutputTokens: 8192, + temperature: 0.7, + }, + }, + }, }; export function getModelSettings( diff --git a/packages/core/src/types.ts b/packages/core/src/types.ts index 621ffe81c6..9c5e9602b7 100644 --- a/packages/core/src/types.ts +++ b/packages/core/src/types.ts @@ -229,6 +229,7 @@ export type Models = { [ModelProviderName.LIVEPEER]: Model; [ModelProviderName.DEEPSEEK]: Model; [ModelProviderName.INFERA]: Model; + [ModelProviderName.ATOMA]: Model; }; /** @@ -264,6 +265,7 @@ export enum ModelProviderName { LETZAI = "letzai", DEEPSEEK = "deepseek", INFERA = "infera", + ATOMA = "atoma", } /** @@ -1558,4 +1560,4 @@ export enum TranscriptionProvider { export enum ActionTimelineType { ForYou = "foryou", Following = "following", -} \ No newline at end of file +}