diff --git a/src/completion-provider.ts b/src/completion-provider.ts index f036c0e..d8b602b 100644 --- a/src/completion-provider.ts +++ b/src/completion-provider.ts @@ -3,10 +3,10 @@ import { IInlineCompletionContext, IInlineCompletionProvider } from '@jupyterlab/completer'; -import { LLM } from '@langchain/core/language_models/llms'; +import { BaseLanguageModel } from '@langchain/core/language_models/base'; +import { ReadonlyPartialJSONObject } from '@lumino/coreutils'; import { getCompleter, IBaseCompleter, BaseCompleter } from './llm-models'; -import { ReadonlyPartialJSONObject } from '@lumino/coreutils'; /** * The generic completion provider to register to the completion provider manager. @@ -57,7 +57,7 @@ export class CompletionProvider implements IInlineCompletionProvider { /** * Get the LLM completer. */ - get llmCompleter(): LLM | null { + get llmCompleter(): BaseLanguageModel | null { return this._completer?.provider || null; } diff --git a/src/llm-models/anthropic-completer.ts b/src/llm-models/anthropic-completer.ts new file mode 100644 index 0000000..2f10404 --- /dev/null +++ b/src/llm-models/anthropic-completer.ts @@ -0,0 +1,67 @@ +import { + CompletionHandler, + IInlineCompletionContext +} from '@jupyterlab/completer'; +import { ChatAnthropic } from '@langchain/anthropic'; +import { BaseChatModel } from '@langchain/core/language_models/chat_models'; +import { AIMessage, SystemMessage } from '@langchain/core/messages'; + +import { BaseCompleter, IBaseCompleter } from './base-completer'; + +export class AnthropicCompleter implements IBaseCompleter { + constructor(options: BaseCompleter.IOptions) { + this._anthropicProvider = new ChatAnthropic({ ...options.settings }); + } + + get provider(): BaseChatModel { + return this._anthropicProvider; + } + + async fetch( + request: CompletionHandler.IRequest, + context: IInlineCompletionContext + ) { + const { text, offset: cursorOffset } = request; + const prompt = text.slice(0, cursorOffset); + + // Anthropic does not allow whitespace at the end of the AIMessage + const trimmedPrompt = prompt.trim(); + + const messages = [ + new SystemMessage( + 'You ara a code completion bot which complete the following code from a Jupyter Notebook cell.' + ), + new AIMessage(trimmedPrompt) + ]; + + try { + const response = await this._anthropicProvider.invoke(messages); + console.log('CONTENT', response.content); + const items = []; + + // Anthropic can return string or complex content, a list of string/images/other. + if (typeof response.content === 'string') { + items.push({ + insertText: response.content, + filterText: prompt.substring(trimmedPrompt.length) + }); + } else { + response.content.forEach(content => { + if (content.type !== 'text') { + return; + } + items.push({ + insertText: content.text, + filterText: prompt.substring(trimmedPrompt.length) + }); + }); + } + return { items }; + } catch (error) { + console.error('Error fetching completions', error); + return { items: [] }; + } + } + + private _anthropicProvider: ChatAnthropic; +} diff --git a/src/llm-models/base-completer.ts b/src/llm-models/base-completer.ts index 0828a9c..574e7af 100644 --- a/src/llm-models/base-completer.ts +++ b/src/llm-models/base-completer.ts @@ -2,14 +2,14 @@ import { CompletionHandler, IInlineCompletionContext } from '@jupyterlab/completer'; -import { LLM } from '@langchain/core/language_models/llms'; +import { BaseLanguageModel } from '@langchain/core/language_models/base'; import { ReadonlyPartialJSONObject } from '@lumino/coreutils'; export interface IBaseCompleter { /** * The LLM completer. */ - provider: LLM; + provider: BaseLanguageModel; /** * The function to fetch a new completion. diff --git a/src/llm-models/utils.ts b/src/llm-models/utils.ts index 9b88b54..cb12f75 100644 --- a/src/llm-models/utils.ts +++ b/src/llm-models/utils.ts @@ -2,6 +2,7 @@ import { ChatAnthropic } from '@langchain/anthropic'; import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { ChatMistralAI } from '@langchain/mistralai'; import { IBaseCompleter } from './base-completer'; +import { AnthropicCompleter } from './anthropic-completer'; import { CodestralCompleter } from './codestral-completer'; import { ReadonlyPartialJSONObject } from '@lumino/coreutils'; @@ -14,6 +15,8 @@ export function getCompleter( ): IBaseCompleter | null { if (name === 'MistralAI') { return new CodestralCompleter({ settings }); + } else if (name === 'Anthropic') { + return new AnthropicCompleter({ settings }); } return null; }