Skip to content

Commit

Permalink
feat: update llm-ls to 0.5.3 (#141)
Browse files Browse the repository at this point in the history
  • Loading branch information
McPatate committed May 24, 2024
1 parent c9e1faf commit a8a67b8
Show file tree
Hide file tree
Showing 4 changed files with 27 additions and 23 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ on:

env:
FETCH_DEPTH: 0 # pull in the tags for the version string
LLM_LS_VERSION: 0.5.2
LLM_LS_VERSION: 0.5.3

jobs:
package:
Expand Down Expand Up @@ -48,7 +48,7 @@ jobs:
- name: Install Node.js
uses: actions/setup-node@v3
with:
node-version: 16
node-version: 20

- uses: robinraju/[email protected]
with:
Expand Down Expand Up @@ -89,7 +89,7 @@ jobs:
- name: Install Nodejs
uses: actions/setup-node@v3
with:
node-version: 16
node-version: 20

- run: echo "HEAD_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV
- run: 'echo "HEAD_SHA: $HEAD_SHA"'
Expand Down
25 changes: 11 additions & 14 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -83,20 +83,8 @@ const data = { inputs, ...configuration.requestBody };
const model = configuration.modelId;
let endpoint;
switch(configuration.backend) {
case "huggingface":
let url;
if (configuration.url === null) {
url = "https://api-inference.huggingface.co";
} else {
url = configuration.url;
}
endpoint = `${url}/models/${model}`;
break;
case "ollama":
case "openai":
case "tgi":
endpoint = configuration.url;
break;
// cf URL construction
let endpoint = build_url(configuration);
}

const res = await fetch(endpoint, {
Expand All @@ -110,6 +98,15 @@ const json = await res.json() as { generated_text: string };

Note that the example above is a simplified version to explain what is happening under the hood.

#### URL construction

The endpoint URL that is queried to fetch suggestions is build the following way:
- depending on the backend, it will try to append the correct path to the base URL located in the configuration (e.g. `{url}/v1/completions` for the `openai` backend)
- if no URL is set for the `huggingface` backend, it will automatically use the default URL
- it will error for other backends as there is no sensible default URL
- if you do set the **correct** path at the end of the URL it will not add it a second time as it checks if it is already present
- there is an option to disable this behavior: `llm.disableUrlPathCompletion`

### Suggestion behavior

You can tune the way the suggestions behave:
Expand Down
5 changes: 5 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,11 @@
"pattern": "**"
},
"description": "Filter documents to enable suggestions for"
},
"llm.disableUrlPathCompletion": {
"type": "boolean",
"default": false,
"description": "When setting `llm.url`, llm-ls will try to append the correct path to your URL if it doesn't end with such a path, e.g. for an OpenAI backend if it doesn't end with `/v1/completions`. Set this to `true` to disable this behavior."
}
}
}
Expand Down
14 changes: 8 additions & 6 deletions src/extension.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,10 @@ let ctx: vscode.ExtensionContext;
let loadingIndicator: vscode.StatusBarItem;

function createLoadingIndicator(): vscode.StatusBarItem {
let li = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Left, 10)
li.text = "$(loading~spin) LLM"
li.tooltip = "Generating completions..."
return li
let li = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Left, 10);
li.text = "$(loading~spin) LLM";
li.tooltip = "Generating completions...";
return li;
}

export async function activate(context: vscode.ExtensionContext) {
Expand All @@ -48,6 +48,7 @@ export async function activate(context: vscode.ExtensionContext) {
if (command.startsWith("~/")) {
command = homedir() + command.slice("~".length);
}

const serverOptions: ServerOptions = {
run: {
command, transport: TransportKind.stdio, options: {
Expand Down Expand Up @@ -81,7 +82,7 @@ export async function activate(context: vscode.ExtensionContext) {
clientOptions
);

loadingIndicator = createLoadingIndicator()
loadingIndicator = createLoadingIndicator();

await client.start();

Expand Down Expand Up @@ -173,6 +174,7 @@ export async function activate(context: vscode.ExtensionContext) {
tlsSkipVerifyInsecure: config.get("tlsSkipVerifyInsecure") as boolean,
ide: "vscode",
tokenizerConfig,
disableUrlPathCompletion: config.get("disableUrlPathCompletion") as boolean,
};
try {
loadingIndicator.show()
Expand Down Expand Up @@ -345,4 +347,4 @@ async function delay(milliseconds: number, token: vscode.CancellationToken): Pro
resolve(token.isCancellationRequested)
}, milliseconds);
});
}
}

0 comments on commit a8a67b8

Please sign in to comment.