diff --git a/README.md b/README.md index 40c269f..06ef286 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Mistral Javascript Client -You can use the Mistral Javascript client to interact with the Mistral AI API +You can use the Mistral Javascript client to interact with the Mistral AI API. ## Installing @@ -8,36 +8,9 @@ You can install the library in your project using: `npm install mistralai` -## Usage - -### Chat - -The simplest use case is to chat with Mistral AI models: - -```javascript -const client = require("mistralai"); - -const response = client.chat('le-tiny', [{role: 'user', content: 'What is your favourite French food, and why is it mayonnaise?'}]) - -``` - -You can also use `client.chatStream` for streaming results. - -### Embeddings - -To use our embedding API you can use the following code: - -```javascript -const client = require('mistralai'); - -const response = client.embed('le-embed', 'My favourite place to eat mayonnaise is embed'); -``` - ## Run examples -Examples can be found in the `examples/` directory you can run them using: - +You can run the examples in the `examples/` directory using ```bash -node [example.js] - +node examples/chat_no_streaming.js ``` diff --git a/examples/chat_no_streaming.js b/examples/chat_no_streaming.js new file mode 100644 index 0000000..e0ee842 --- /dev/null +++ b/examples/chat_no_streaming.js @@ -0,0 +1,12 @@ +import MistralClient from '@mistralai/mistralai'; + +const apiKey = process.env.MISTRAL_API_KEY; + +const client = new MistralClient(apiKey); + +const chatResponse = await client.chat( + 'mistral-tiny', + [{role: 'user', content: 'What is the best French cheese?'}], +); + +console.log('Chat:', chatResponse); diff --git a/examples/chat_with_streaming.js b/examples/chat_with_streaming.js new file mode 100644 index 0000000..01960d2 --- /dev/null +++ b/examples/chat_with_streaming.js @@ -0,0 +1,14 @@ +import MistralClient from '@mistralai/mistralai'; + +const apiKey = process.env.MISTRAL_API_KEY; + +const client = new MistralClient(apiKey); + +const chatStreamResponse = await client.chatStream( + 'mistral-tiny', + [{role: 'user', content: 'What is the best French cheese?'}], +); + +for await (const chunk of chatStreamResponse) { + console.log('Chat Stream:', '' + chunk); +} diff --git a/examples/embeddings.js b/examples/embeddings.js new file mode 100644 index 0000000..27cbcd8 --- /dev/null +++ b/examples/embeddings.js @@ -0,0 +1,14 @@ +import MistralClient from '@mistralai/mistralai'; + +const apiKey = process.env.MISTRAL_API_KEY; + +const client = new MistralClient(apiKey); + +const input = []; +for (let i = 0; i < 10; i++) { + input.push('What is the best French cheese?'); +} + +const embeddingsBatchResponse = await client.embeddings('mistral-embed', input); + +console.log('Embeddings Batch:', embeddingsBatchResponse.data); diff --git a/examples/examples.js b/examples/examples.js deleted file mode 100644 index 56f434c..0000000 --- a/examples/examples.js +++ /dev/null @@ -1,51 +0,0 @@ -import MistralClient from '../client.js'; - -const apiKey = process.env.MISTRAL_API_KEY; - -const client = new MistralClient(apiKey); - -// LIST MODELS -const listModelsResponse = await client.listModels(); - -listModelsResponse.data.forEach((model) => { - console.log('Model:', model); -}); - -// CHAT -const chatResponse = await client.chat( - 'le-tiny-v2312', - [{role: 'user', content: 'hello world'}], -); - -console.log('Chat:', chatResponse); - -// CHAT STREAM -const chatStreamResponse = await client.chatStream( - 'le-tiny-v2312', [{role: 'user', content: 'hello world'}], -); - -for await (const chunk of chatStreamResponse) { - console.log('Chat Stream:', '' + chunk); -} - -// chatStreamResponse.data.on('data', (data) => { -// console.log('Chat Stream:', '' + data); -// }); - -// EMBEDDINGS -const embeddingsResponse = await client.embeddings('le-embed', 'hello world'); - -console.log('Embeddings:', embeddingsResponse.data); - - -// EMBEDDINGS BATCH - -// Create 100 strings to embed -const input = []; -for (let i = 0; i < 10; i++) { - input.push('hello world'); -} - -const embeddingsBatchResponse = await client.embeddings('le-embed', input); - -console.log('Embeddings Batch:', embeddingsBatchResponse.data); diff --git a/examples/list_models.js b/examples/list_models.js new file mode 100644 index 0000000..7b265da --- /dev/null +++ b/examples/list_models.js @@ -0,0 +1,11 @@ +import MistralClient from '@mistralai/mistralai'; + +const apiKey = process.env.MISTRAL_API_KEY; + +const client = new MistralClient(apiKey); + +const listModelsResponse = await client.listModels(); + +listModelsResponse.data.forEach((model) => { + console.log('Model:', model); +});