From f3e2485e2a2ec55f7bff984d86552622bb03405b Mon Sep 17 00:00:00 2001 From: mjdaoudi Date: Sat, 6 Jan 2024 19:38:40 +0100 Subject: [PATCH 1/3] Update client.js --- src/client.js | 85 ++++++++++++++++++++++++++++----------------------- 1 file changed, 46 insertions(+), 39 deletions(-) diff --git a/src/client.js b/src/client.js index c8239da..d821b55 100644 --- a/src/client.js +++ b/src/client.js @@ -1,12 +1,19 @@ let isNode = false; -if (typeof window === 'undefined' || typeof globalThis.fetch !== 'undefined') { - globalThis.fetch = (await import('node-fetch')).default; - isNode = true; + +async function initializeFetch() { + if (typeof globalThis.fetch === "undefined") { + const nodeFetch = await import("node-fetch"); + fetch = nodeFetch.default; + isNode = true; + } else { + fetch = globalThis.fetch; + } } +initializeFetch(); const RETRY_STATUS_CODES = [429, 500, 502, 503, 504]; -const ENDPOINT = 'https://api.mistral.ai'; +const ENDPOINT = "https://api.mistral.ai"; /** * MistralAPIError @@ -20,9 +27,9 @@ class MistralAPIError extends Error { */ constructor(message) { super(message); - this.name = 'MistralAPIError'; + this.name = "MistralAPIError"; } -}; +} /** * MistralClient @@ -41,7 +48,7 @@ class MistralClient { apiKey = process.env.MISTRAL_API_KEY, endpoint = ENDPOINT, maxRetries = 5, - timeout = 120, + timeout = 120 ) { this.endpoint = endpoint; this.apiKey = apiKey; @@ -57,16 +64,16 @@ class MistralClient { * @param {*} request * @return {Promise<*>} */ - _request = async function(method, path, request) { + _request = async function (method, path, request) { const url = `${this.endpoint}/${path}`; const options = { method: method, headers: { - 'Accept': 'application/json', - 'Content-Type': 'application/json', - 'Authorization': `Bearer ${this.apiKey}`, + Accept: "application/json", + "Content-Type": "application/json", + Authorization: `Bearer ${this.apiKey}`, }, - body: method !== 'get' ? JSON.stringify(request) : null, + body: method !== "get" ? JSON.stringify(request) : null, timeout: this.timeout * 1000, }; @@ -86,11 +93,11 @@ class MistralClient { const decoder = new TextDecoder(); while (true) { // Read from the stream - const {done, value} = await reader.read(); + const { done, value } = await reader.read(); // Exit if we're done if (done) return; // Else yield the chunk - yield decoder.decode(value, {stream: true}); + yield decoder.decode(value, { stream: true }); } } finally { reader.releaseLock(); @@ -105,31 +112,31 @@ class MistralClient { console.debug( `Retrying request on response status: ${response.status}`, `Response: ${await response.text()}`, - `Attempt: ${attempts + 1}`, + `Attempt: ${attempts + 1}` ); // eslint-disable-next-line max-len await new Promise((resolve) => - setTimeout(resolve, Math.pow(2, (attempts + 1)) * 500), + setTimeout(resolve, Math.pow(2, attempts + 1) * 500) ); } else { throw new MistralAPIError( `HTTP error! status: ${response.status} ` + - `Response: \n${await response.text()}`, + `Response: \n${await response.text()}` ); } } catch (error) { console.error(`Request failed: ${error.message}`); - if (error.name === 'MistralAPIError') { + if (error.name === "MistralAPIError") { throw error; } if (attempts === this.maxRetries - 1) throw error; // eslint-disable-next-line max-len await new Promise((resolve) => - setTimeout(resolve, Math.pow(2, (attempts + 1)) * 500), + setTimeout(resolve, Math.pow(2, attempts + 1) * 500) ); } } - throw new Error('Max retries reached'); + throw new Error("Max retries reached"); }; /** @@ -144,7 +151,7 @@ class MistralClient { * @param {*} safeMode * @return {Promise} */ - _makeChatCompletionRequest = function( + _makeChatCompletionRequest = function ( model, messages, temperature, @@ -152,7 +159,7 @@ class MistralClient { topP, randomSeed, stream, - safeMode, + safeMode ) { return { model: model, @@ -170,8 +177,8 @@ class MistralClient { * Returns a list of the available models * @return {Promise} */ - listModels = async function() { - const response = await this._request('get', 'v1/models'); + listModels = async function () { + const response = await this._request("get", "v1/models"); return response; }; @@ -187,7 +194,7 @@ class MistralClient { * @param {*} safeMode whether to use safe mode, e.g. true * @return {Promise} */ - chat = async function({ + chat = async function ({ model, messages, temperature, @@ -204,12 +211,12 @@ class MistralClient { topP, randomSeed, false, - safeMode, + safeMode ); const response = await this._request( - 'post', - 'v1/chat/completions', - request, + "post", + "v1/chat/completions", + request ); return response; }; @@ -243,25 +250,25 @@ class MistralClient { topP, randomSeed, true, - safeMode, + safeMode ); const response = await this._request( - 'post', - 'v1/chat/completions', - request, + "post", + "v1/chat/completions", + request ); - let buffer = ''; + let buffer = ""; for await (const chunk of response) { buffer += chunk; let firstNewline; - while ((firstNewline = buffer.indexOf('\n')) !== -1) { + while ((firstNewline = buffer.indexOf("\n")) !== -1) { const chunkLine = buffer.substring(0, firstNewline); buffer = buffer.substring(firstNewline + 1); - if (chunkLine.startsWith('data:')) { + if (chunkLine.startsWith("data:")) { const json = chunkLine.substring(6).trim(); - if (json !== '[DONE]') { + if (json !== "[DONE]") { yield JSON.parse(json); } } @@ -277,12 +284,12 @@ class MistralClient { * e.g. ['What is the best French cheese?'] * @return {Promise} */ - embeddings = async function({model, input}) { + embeddings = async function ({ model, input }) { const request = { model: model, input: input, }; - const response = await this._request('post', 'v1/embeddings', request); + const response = await this._request("post", "v1/embeddings", request); return response; }; } From 50ae49637427c15acefa2229f4efd3e8061417b7 Mon Sep 17 00:00:00 2001 From: Mehdi Jdaoudi <72312833+mjdaoudi@users.noreply.github.com> Date: Sat, 6 Jan 2024 19:48:57 +0100 Subject: [PATCH 2/3] Style - Code similar Formating Prettier changed the formatting of the file. I changed it back to be similar to mistral. --- src/client.js | 80 +++++++++++++++++++++++++++------------------------ 1 file changed, 42 insertions(+), 38 deletions(-) diff --git a/src/client.js b/src/client.js index d821b55..8b26b0f 100644 --- a/src/client.js +++ b/src/client.js @@ -1,8 +1,12 @@ let isNode = false; +if (typeof window === 'undefined' || typeof globalThis.fetch !== 'undefined') { + globalThis.fetch = (await import('node-fetch')).default; + isNode = true; +} async function initializeFetch() { - if (typeof globalThis.fetch === "undefined") { - const nodeFetch = await import("node-fetch"); + if (typeof globalThis.fetch === 'undefined') { + const nodeFetch = await import('node-fetch'); fetch = nodeFetch.default; isNode = true; } else { @@ -13,7 +17,7 @@ async function initializeFetch() { initializeFetch(); const RETRY_STATUS_CODES = [429, 500, 502, 503, 504]; -const ENDPOINT = "https://api.mistral.ai"; +const ENDPOINT = 'https://api.mistral.ai'; /** * MistralAPIError @@ -27,9 +31,9 @@ class MistralAPIError extends Error { */ constructor(message) { super(message); - this.name = "MistralAPIError"; + this.name = 'MistralAPIError'; } -} +}; /** * MistralClient @@ -48,7 +52,7 @@ class MistralClient { apiKey = process.env.MISTRAL_API_KEY, endpoint = ENDPOINT, maxRetries = 5, - timeout = 120 + timeout = 120, ) { this.endpoint = endpoint; this.apiKey = apiKey; @@ -64,16 +68,16 @@ class MistralClient { * @param {*} request * @return {Promise<*>} */ - _request = async function (method, path, request) { + _request = async function(method, path, request) { const url = `${this.endpoint}/${path}`; const options = { method: method, headers: { - Accept: "application/json", - "Content-Type": "application/json", - Authorization: `Bearer ${this.apiKey}`, + 'Accept': 'application/json', + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${this.apiKey}`, }, - body: method !== "get" ? JSON.stringify(request) : null, + body: method !== 'get' ? JSON.stringify(request) : null, timeout: this.timeout * 1000, }; @@ -93,11 +97,11 @@ class MistralClient { const decoder = new TextDecoder(); while (true) { // Read from the stream - const { done, value } = await reader.read(); + const {done, value} = await reader.read(); // Exit if we're done if (done) return; // Else yield the chunk - yield decoder.decode(value, { stream: true }); + yield decoder.decode(value, {stream: true}); } } finally { reader.releaseLock(); @@ -112,31 +116,31 @@ class MistralClient { console.debug( `Retrying request on response status: ${response.status}`, `Response: ${await response.text()}`, - `Attempt: ${attempts + 1}` + `Attempt: ${attempts + 1}`, ); // eslint-disable-next-line max-len await new Promise((resolve) => - setTimeout(resolve, Math.pow(2, attempts + 1) * 500) + setTimeout(resolve, Math.pow(2, (attempts + 1)) * 500), ); } else { throw new MistralAPIError( `HTTP error! status: ${response.status} ` + - `Response: \n${await response.text()}` + `Response: \n${await response.text()}`, ); } } catch (error) { console.error(`Request failed: ${error.message}`); - if (error.name === "MistralAPIError") { + if (error.name === 'MistralAPIError') { throw error; } if (attempts === this.maxRetries - 1) throw error; // eslint-disable-next-line max-len await new Promise((resolve) => - setTimeout(resolve, Math.pow(2, attempts + 1) * 500) + setTimeout(resolve, Math.pow(2, (attempts + 1)) * 500), ); } } - throw new Error("Max retries reached"); + throw new Error('Max retries reached'); }; /** @@ -151,7 +155,7 @@ class MistralClient { * @param {*} safeMode * @return {Promise} */ - _makeChatCompletionRequest = function ( + _makeChatCompletionRequest = function( model, messages, temperature, @@ -159,7 +163,7 @@ class MistralClient { topP, randomSeed, stream, - safeMode + safeMode, ) { return { model: model, @@ -177,8 +181,8 @@ class MistralClient { * Returns a list of the available models * @return {Promise} */ - listModels = async function () { - const response = await this._request("get", "v1/models"); + listModels = async function() { + const response = await this._request('get', 'v1/models'); return response; }; @@ -194,7 +198,7 @@ class MistralClient { * @param {*} safeMode whether to use safe mode, e.g. true * @return {Promise} */ - chat = async function ({ + chat = async function({ model, messages, temperature, @@ -211,12 +215,12 @@ class MistralClient { topP, randomSeed, false, - safeMode + safeMode, ); const response = await this._request( - "post", - "v1/chat/completions", - request + 'post', + 'v1/chat/completions', + request, ); return response; }; @@ -250,25 +254,25 @@ class MistralClient { topP, randomSeed, true, - safeMode + safeMode, ); const response = await this._request( - "post", - "v1/chat/completions", - request + 'post', + 'v1/chat/completions', + request, ); - let buffer = ""; + let buffer = ''; for await (const chunk of response) { buffer += chunk; let firstNewline; - while ((firstNewline = buffer.indexOf("\n")) !== -1) { + while ((firstNewline = buffer.indexOf('\n')) !== -1) { const chunkLine = buffer.substring(0, firstNewline); buffer = buffer.substring(firstNewline + 1); - if (chunkLine.startsWith("data:")) { + if (chunkLine.startsWith('data:')) { const json = chunkLine.substring(6).trim(); - if (json !== "[DONE]") { + if (json !== '[DONE]') { yield JSON.parse(json); } } @@ -284,12 +288,12 @@ class MistralClient { * e.g. ['What is the best French cheese?'] * @return {Promise} */ - embeddings = async function ({ model, input }) { + embeddings = async function({model, input}) { const request = { model: model, input: input, }; - const response = await this._request("post", "v1/embeddings", request); + const response = await this._request('post', 'v1/embeddings', request); return response; }; } From 984a83ad67c6c7c627f665fc2ee621c9dacfc67c Mon Sep 17 00:00:00 2001 From: Bam4d Date: Mon, 15 Jan 2024 12:09:52 +0100 Subject: [PATCH 3/3] some eslint and compatibility things --- src/client.js | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/client.js b/src/client.js index 8b26b0f..e8e9bd2 100644 --- a/src/client.js +++ b/src/client.js @@ -1,11 +1,12 @@ let isNode = false; -if (typeof window === 'undefined' || typeof globalThis.fetch !== 'undefined') { - globalThis.fetch = (await import('node-fetch')).default; - isNode = true; -} +/** + * Initialize fetch + * @return {Promise} + */ async function initializeFetch() { - if (typeof globalThis.fetch === 'undefined') { + if (typeof window === 'undefined' || + typeof globalThis.fetch === 'undefined') { const nodeFetch = await import('node-fetch'); fetch = nodeFetch.default; isNode = true;