-
Notifications
You must be signed in to change notification settings - Fork 8.4k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
ad76ee7
commit 51a9e67
Showing
12 changed files
with
218 additions
and
0 deletions.
There are no files selected for viewing
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added
BIN
+11.8 KB
api/core/model_runtime/model_providers/leptonai/_assets/icon_s_en.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
29 changes: 29 additions & 0 deletions
29
api/core/model_runtime/model_providers/leptonai/leptonai.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,29 @@ | ||
import logging | ||
|
||
from core.model_runtime.entities.model_entities import ModelType | ||
from core.model_runtime.errors.validate import CredentialsValidateFailedError | ||
from core.model_runtime.model_providers.__base.model_provider import ModelProvider | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
class LeptonAIProvider(ModelProvider): | ||
|
||
def validate_provider_credentials(self, credentials: dict) -> None: | ||
""" | ||
Validate provider credentials | ||
if validate failed, raise exception | ||
:param credentials: provider credentials, credentials form defined in `provider_credential_schema`. | ||
""" | ||
try: | ||
model_instance = self.get_model_instance(ModelType.LLM) | ||
|
||
model_instance.validate_credentials( | ||
model='llama2-7b', | ||
credentials=credentials | ||
) | ||
except CredentialsValidateFailedError as ex: | ||
raise ex | ||
except Exception as ex: | ||
logger.exception(f'{self.get_provider_schema().provider} credentials validate failed') | ||
raise ex |
29 changes: 29 additions & 0 deletions
29
api/core/model_runtime/model_providers/leptonai/leptonai.yaml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,29 @@ | ||
provider: leptonai | ||
label: | ||
zh_Hans: Lepton AI | ||
en_US: Lepton AI | ||
icon_small: | ||
en_US: icon_s_en.png | ||
icon_large: | ||
en_US: icon_l_en.png | ||
background: "#F5F5F4" | ||
help: | ||
title: | ||
en_US: Get your API Key from Lepton AI | ||
zh_Hans: 从 Lepton AI 获取 API Key | ||
url: | ||
en_US: https://dashboard.lepton.ai | ||
supported_model_types: | ||
- llm | ||
configurate_methods: | ||
- predefined-model | ||
provider_credential_schema: | ||
credential_form_schemas: | ||
- variable: api_key | ||
label: | ||
en_US: API Key | ||
type: secret-input | ||
required: true | ||
placeholder: | ||
zh_Hans: 在此输入您的 API Key | ||
en_US: Enter your API Key |
6 changes: 6 additions & 0 deletions
6
api/core/model_runtime/model_providers/leptonai/llm/_position.yaml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,6 @@ | ||
- gemma-7b | ||
- mistral-7b | ||
- mixtral-8x7b | ||
- llama2-7b | ||
- llama2-13b | ||
- llama3-70b |
20 changes: 20 additions & 0 deletions
20
api/core/model_runtime/model_providers/leptonai/llm/gemma-7b.yaml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
model: gemma-7b | ||
label: | ||
zh_Hans: gemma-7b | ||
en_US: gemma-7b | ||
model_type: llm | ||
features: | ||
- agent-thought | ||
model_properties: | ||
mode: chat | ||
context_size: 8192 | ||
parameter_rules: | ||
- name: temperature | ||
use_template: temperature | ||
- name: top_p | ||
use_template: top_p | ||
- name: max_tokens | ||
use_template: max_tokens | ||
default: 1024 | ||
min: 1 | ||
max: 1024 |
20 changes: 20 additions & 0 deletions
20
api/core/model_runtime/model_providers/leptonai/llm/llama2-13b.yaml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
model: llama2-13b | ||
label: | ||
zh_Hans: llama2-13b | ||
en_US: llama2-13b | ||
model_type: llm | ||
features: | ||
- agent-thought | ||
model_properties: | ||
mode: chat | ||
context_size: 4096 | ||
parameter_rules: | ||
- name: temperature | ||
use_template: temperature | ||
- name: top_p | ||
use_template: top_p | ||
- name: max_tokens | ||
use_template: max_tokens | ||
default: 1024 | ||
min: 1 | ||
max: 1024 |
20 changes: 20 additions & 0 deletions
20
api/core/model_runtime/model_providers/leptonai/llm/llama2-7b.yaml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
model: llama2-7b | ||
label: | ||
zh_Hans: llama2-7b | ||
en_US: llama2-7b | ||
model_type: llm | ||
features: | ||
- agent-thought | ||
model_properties: | ||
mode: chat | ||
context_size: 4096 | ||
parameter_rules: | ||
- name: temperature | ||
use_template: temperature | ||
- name: top_p | ||
use_template: top_p | ||
- name: max_tokens | ||
use_template: max_tokens | ||
default: 1024 | ||
min: 1 | ||
max: 1024 |
20 changes: 20 additions & 0 deletions
20
api/core/model_runtime/model_providers/leptonai/llm/llama3-70b.yaml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
model: llama3-70b | ||
label: | ||
zh_Hans: llama3-70b | ||
en_US: llama3-70b | ||
model_type: llm | ||
features: | ||
- agent-thought | ||
model_properties: | ||
mode: chat | ||
context_size: 8192 | ||
parameter_rules: | ||
- name: temperature | ||
use_template: temperature | ||
- name: top_p | ||
use_template: top_p | ||
- name: max_tokens | ||
use_template: max_tokens | ||
default: 1024 | ||
min: 1 | ||
max: 1024 |
34 changes: 34 additions & 0 deletions
34
api/core/model_runtime/model_providers/leptonai/llm/llm.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,34 @@ | ||
from collections.abc import Generator | ||
from typing import Optional, Union | ||
|
||
from core.model_runtime.entities.llm_entities import LLMResult | ||
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool | ||
from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel | ||
|
||
|
||
class LeptonAILargeLanguageModel(OAIAPICompatLargeLanguageModel): | ||
MODEL_PREFIX_MAP = { | ||
'llama2-7b': 'llama2-7b', | ||
'gemma-7b': 'gemma-7b', | ||
'mistral-7b': 'mistral-7b', | ||
'mixtral-8x7b': 'mixtral-8x7b', | ||
'llama3-70b': 'llama3-70b', | ||
'llama2-13b': 'llama2-13b', | ||
} | ||
def _invoke(self, model: str, credentials: dict, | ||
prompt_messages: list[PromptMessage], model_parameters: dict, | ||
tools: Optional[list[PromptMessageTool]] = None, stop: Optional[list[str]] = None, | ||
stream: bool = True, user: Optional[str] = None) \ | ||
-> Union[LLMResult, Generator]: | ||
self._add_custom_parameters(credentials, model) | ||
return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream) | ||
|
||
def validate_credentials(self, model: str, credentials: dict) -> None: | ||
self._add_custom_parameters(credentials, model) | ||
super().validate_credentials(model, credentials) | ||
|
||
@classmethod | ||
def _add_custom_parameters(cls, credentials: dict, model: str) -> None: | ||
credentials['mode'] = 'chat' | ||
credentials['endpoint_url'] = f'https://{cls.MODEL_PREFIX_MAP[model]}.lepton.run/api/v1' | ||
|
20 changes: 20 additions & 0 deletions
20
api/core/model_runtime/model_providers/leptonai/llm/mistral-7b.yaml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
model: mistral-7b | ||
label: | ||
zh_Hans: mistral-7b | ||
en_US: mistral-7b | ||
model_type: llm | ||
features: | ||
- agent-thought | ||
model_properties: | ||
mode: chat | ||
context_size: 8192 | ||
parameter_rules: | ||
- name: temperature | ||
use_template: temperature | ||
- name: top_p | ||
use_template: top_p | ||
- name: max_tokens | ||
use_template: max_tokens | ||
default: 1024 | ||
min: 1 | ||
max: 1024 |
20 changes: 20 additions & 0 deletions
20
api/core/model_runtime/model_providers/leptonai/llm/mixtral-8x7b.yaml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
model: mixtral-8x7b | ||
label: | ||
zh_Hans: mixtral-8x7b | ||
en_US: mixtral-8x7b | ||
model_type: llm | ||
features: | ||
- agent-thought | ||
model_properties: | ||
mode: chat | ||
context_size: 32000 | ||
parameter_rules: | ||
- name: temperature | ||
use_template: temperature | ||
- name: top_p | ||
use_template: top_p | ||
- name: max_tokens | ||
use_template: max_tokens | ||
default: 1024 | ||
min: 1 | ||
max: 1024 |