Skip to content

Commit

Permalink
frontend/llm: respect AI student project limit in AI formula generato…
Browse files Browse the repository at this point in the history
…r; throw proper error when querying client/llm without an enabled LLM for a specific tag
  • Loading branch information
haraldschilly committed Mar 15, 2024
1 parent 39a378a commit d3150c5
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 7 deletions.
9 changes: 6 additions & 3 deletions src/packages/frontend/client/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -82,10 +82,13 @@ export class LLMClient {
system ??= getSystemPrompt(model, path);

if (!redux.getStore("projects").hasLanguageModelEnabled(project_id, tag)) {
return `Language model support is not currently enabled ${
project_id ? "in this project" : "on this server"
}.`;
throw new Error(
`Language model support is not currently enabled ${
project_id ? "in this project" : "on this server"
}. [tag=${tag}]`,
);
}

input = input.trim();
if (chatStream == null) {
if (!input || input == "test") {
Expand Down
15 changes: 11 additions & 4 deletions src/packages/frontend/codemirror/extensions/ai-formula.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ import { unreachable } from "@cocalc/util/misc";

type Mode = "tex" | "md";

const LLM_USAGE_TAG = `generate-formula`;

interface Opts {
mode: Mode;
text?: string;
Expand Down Expand Up @@ -50,7 +52,7 @@ function AiGenFormula({ mode, text = "", project_id, cb }: Props) {

const enabled = redux
.getStore("projects")
.hasLanguageModelEnabled(project_id);
.hasLanguageModelEnabled(project_id, LLM_USAGE_TAG);

function getPrompt() {
const description = input || text;
Expand Down Expand Up @@ -132,12 +134,17 @@ function AiGenFormula({ mode, text = "", project_id, cb }: Props) {
setGenerating(true);
setFormula("");
setFullReply("");
const tag = `generate-formula`;
track("chatgpt", { project_id, tag, mode, type: "generate", model });
track("chatgpt", {
project_id,
tag: LLM_USAGE_TAG,
mode,
type: "generate",
model,
});
const reply = await webapp_client.openai_client.query({
input: getPrompt(),
project_id,
tag,
tag: LLM_USAGE_TAG,
model,
system: "",
});
Expand Down

0 comments on commit d3150c5

Please sign in to comment.