Skip to content

Commit

Permalink
ollama: starting with configuration + frontend
Browse files Browse the repository at this point in the history
  • Loading branch information
haraldschilly committed Feb 23, 2024
1 parent fd4a48e commit 5b78197
Show file tree
Hide file tree
Showing 33 changed files with 390 additions and 114 deletions.
37 changes: 21 additions & 16 deletions src/packages/frontend/account/other-settings.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -378,28 +378,21 @@ export class OtherSettings extends Component<Props> {

render_language_model(): Rendered {
const projectsStore = redux.getStore("projects");
const haveOpenAI = projectsStore.hasLanguageModelEnabled(
undefined,
undefined,
"openai",
);
const haveGoogle = projectsStore.hasLanguageModelEnabled(
undefined,
undefined,
"google",
);
const enabled = projectsStore.llmEnabledSummary();
const ollama = redux.getStore("customize").get("ollama")?.toJS() ?? {};

const defaultModel = getValidLanguageModelName(
this.props.other_settings.get(SETTINGS_LANGUAGE_MODEL_KEY),
{ openai: haveOpenAI, google: haveGoogle },
enabled,
Object.keys(ollama),
);

const options: { value: string; display: JSX.Element }[] = [];

for (const key of USER_SELECTABLE_LANGUAGE_MODELS) {
const vendor = model2vendor(key);
if (vendor === "google" && !haveGoogle) continue;
if (vendor === "openai" && !haveOpenAI) continue;
if (vendor === "google" && !enabled.google) continue;
if (vendor === "openai" && !enabled.openai) continue;

const txt = isFreeModel(key) ? " (free)" : "";
const display = (
Expand All @@ -410,6 +403,18 @@ export class OtherSettings extends Component<Props> {
options.push({ value: key, display });
}

if (enabled.ollama) {
for (const key in ollama) {
const title = ollama[key].display ?? key;
const display = (
<>
<strong>{title}</strong> (Ollama)
</>
);
options.push({ value: key, display });
}
}

return (
<LabeledRow
label={
Expand Down Expand Up @@ -470,9 +475,9 @@ export class OtherSettings extends Component<Props> {
redux.getStore("projects").clearOpenAICache();
}}
>
<strong>Disable all AI integrations</strong>, e.g.,
code generation or explanation buttons in Jupyter, @chatgpt
mentions, etc.
<strong>Disable all AI integrations</strong>, e.g., code
generation or explanation buttons in Jupyter, @chatgpt mentions,
etc.
</Checkbox>
)}
{this.render_language_model()}
Expand Down
31 changes: 27 additions & 4 deletions src/packages/frontend/account/useLanguageModelSetting.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,26 +2,49 @@ import { redux, useMemo, useTypedRedux } from "@cocalc/frontend/app-framework";
import {
LanguageModel,
USER_SELECTABLE_LANGUAGE_MODELS,
fromOllamaModel,
getValidLanguageModelName,
isOllamaLLM,
} from "@cocalc/util/db-schema/openai";

export const SETTINGS_LANGUAGE_MODEL_KEY = "language_model";

export function useLanguageModelSetting(): [
LanguageModel,
(llm: LanguageModel) => void,
LanguageModel | string,
(llm: LanguageModel | string) => void,
] {
const other_settings = useTypedRedux("account", "other_settings");
const ollama = useTypedRedux("customize", "ollama");
const haveOpenAI = useTypedRedux("customize", "openai_enabled");
const haveGoogle = useTypedRedux("customize", "google_vertexai_enabled");
const haveOllama = useTypedRedux("customize", "ollama_enabled");

const filter = useMemo(() => {
const projectsStore = redux.getStore("projects");
return projectsStore.llmEnabledSummary();
}, [haveOpenAI, haveGoogle, haveOllama]);

const llm = useMemo(() => {
return getValidLanguageModelName(other_settings?.get("language_model"));
return getValidLanguageModelName(
other_settings?.get("language_model"),
filter,
Object.keys(ollama?.toJS() ?? {}),
);
}, [other_settings]);

function setLLM(llm: LanguageModel) {
function setLLM(llm: LanguageModel | string) {
if (USER_SELECTABLE_LANGUAGE_MODELS.includes(llm as any)) {
redux
.getActions("account")
.set_other_settings(SETTINGS_LANGUAGE_MODEL_KEY, llm);
}

// check if llm is a key in the ollama typedmap
if (isOllamaLLM(llm) && ollama?.get(fromOllamaModel(llm))) {
redux
.getActions("account")
.set_other_settings(SETTINGS_LANGUAGE_MODEL_KEY, llm);
}
}

return [llm, setLLM];
Expand Down
3 changes: 2 additions & 1 deletion src/packages/frontend/admin/site-settings/row-entry.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,8 @@ export function RowEntry({
{displayed_val != null && (
<span>
{" "}
Interpreted as <code>{displayed_val}</code>.{" "}
{valid ? "Interpreted as" : "Invalid:"}{" "}
<code>{displayed_val}</code>.{" "}
</span>
)}
{valid != null && Array.isArray(valid) && (
Expand Down
2 changes: 1 addition & 1 deletion src/packages/frontend/chat/message.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ import {
import { Gap, Icon, TimeAgo, Tip } from "@cocalc/frontend/components";
import MostlyStaticMarkdown from "@cocalc/frontend/editors/slate/mostly-static-markdown";
import { IS_TOUCH } from "@cocalc/frontend/feature";
import { modelToName } from "@cocalc/frontend/frame-editors/chatgpt/model-switch";
import { modelToName } from "@cocalc/frontend/frame-editors/llm/model-switch";
import { COLORS } from "@cocalc/util/theme";
import { ChatActions } from "./actions";
import { getUserName } from "./chat-log";
Expand Down
29 changes: 23 additions & 6 deletions src/packages/frontend/codemirror/extensions/ai-formula.tsx
Original file line number Diff line number Diff line change
@@ -1,23 +1,28 @@
import { Button, Divider, Input, Modal, Space } from "antd";

import { useLanguageModelSetting } from "@cocalc/frontend/account/useLanguageModelSetting";
import { redux, useEffect, useState } from "@cocalc/frontend/app-framework";
import {
redux,
useEffect,
useState,
useTypedRedux,
} from "@cocalc/frontend/app-framework";
import {
HelpIcon,
Markdown,
Paragraph,
Title,
Text,
Title,
} from "@cocalc/frontend/components";
import { LanguageModelVendorAvatar } from "@cocalc/frontend/components/language-model-icon";
import ModelSwitch, {
modelToName,
} from "@cocalc/frontend/frame-editors/chatgpt/model-switch";
} from "@cocalc/frontend/frame-editors/llm/model-switch";
import { show_react_modal } from "@cocalc/frontend/misc";
import track from "@cocalc/frontend/user-tracking";
import { webapp_client } from "@cocalc/frontend/webapp-client";
import { isFreeModel, isLanguageModel } from "@cocalc/util/db-schema/openai";
import { unreachable } from "@cocalc/util/misc";
import { isFreeModel } from "@cocalc/util/db-schema/openai";
import track from "@cocalc/frontend/user-tracking";

type Mode = "tex" | "md";

Expand Down Expand Up @@ -47,6 +52,7 @@ function AiGenFormula({ mode, text = "", project_id, cb }: Props) {
const [formula, setFormula] = useState<string>("");
const [generating, setGenerating] = useState<boolean>(false);
const [error, setError] = useState<string | undefined>(undefined);
const ollama = useTypedRedux("customize", "ollama");

const enabled = redux
.getStore("projects")
Expand Down Expand Up @@ -134,12 +140,23 @@ function AiGenFormula({ mode, text = "", project_id, cb }: Props) {
}
}, [text]);

function renderModel2Name(): string {
if (isLanguageModel(model)) {
return modelToName(model);
}
const om = ollama?.get(model);
if (om) {
return om.get("title") ?? `Ollama ${model}`;
}
return model;
}

function renderTitle() {
return (
<>
<Title level={4}>
<LanguageModelVendorAvatar model={model} /> Generate LaTeX Formula
using {modelToName(model)}
using {renderModel2Name()}
</Title>
{enabled ? (
<>
Expand Down
28 changes: 21 additions & 7 deletions src/packages/frontend/customize.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

import { fromJS, List, Map } from "immutable";
import { join } from "path";

import {
Actions,
rclass,
Expand All @@ -22,13 +23,14 @@ import {
import {
A,
build_date,
Gap,
Loading,
r_join,
smc_git_rev,
smc_version,
Gap,
UNIT,
} from "@cocalc/frontend/components";
import { getGoogleCloudImages, getImages } from "@cocalc/frontend/compute/api";
import { appBasePath } from "@cocalc/frontend/customize/app-base-path";
import { callback2, retry_until_success } from "@cocalc/util/async-utils";
import {
Expand All @@ -37,23 +39,23 @@ import {
FALLBACK_SOFTWARE_ENV,
} from "@cocalc/util/compute-images";
import { DEFAULT_COMPUTE_IMAGE } from "@cocalc/util/db-schema";
import type {
GoogleCloudImages,
Images,
} from "@cocalc/util/db-schema/compute-servers";
import {
KUCALC_COCALC_COM,
KUCALC_DISABLED,
KUCALC_ON_PREMISES,
site_settings_conf,
} from "@cocalc/util/db-schema/site-defaults";
import { deep_copy, dict, YEAR } from "@cocalc/util/misc";
import { reuseInFlight } from "@cocalc/util/reuse-in-flight";
import { sanitizeSoftwareEnv } from "@cocalc/util/sanitize-software-envs";
import * as theme from "@cocalc/util/theme";
import { OllamaPublic } from "@cocalc/util/types/llm";
import { DefaultQuotaSetting, Upgrades } from "@cocalc/util/upgrades/quota";
export { TermsOfService } from "@cocalc/frontend/customize/terms-of-service";
import type {
GoogleCloudImages,
Images,
} from "@cocalc/util/db-schema/compute-servers";
import { getImages, getGoogleCloudImages } from "@cocalc/frontend/compute/api";
import { reuseInFlight } from "@cocalc/util/reuse-in-flight";

// this sets UI modes for using a kubernetes based back-end
// 'yes' (historic value) equals 'cocalc.com'
Expand Down Expand Up @@ -93,6 +95,8 @@ export type SoftwareEnvironments = TypedMap<{
export interface CustomizeState {
is_commercial: boolean;
openai_enabled: boolean;
google_vertexai_enabled: boolean;
ollama_enabled: boolean;
neural_search_enabled: boolean;
datastore: boolean;
ssh_gateway: boolean;
Expand Down Expand Up @@ -148,6 +152,8 @@ export interface CustomizeState {
compute_servers_dns?: string;
compute_servers_images?: TypedMap<Images> | string | null;
compute_servers_images_google?: TypedMap<GoogleCloudImages> | string | null;

ollama?: TypedMap<{ [key: string]: TypedMap<OllamaPublic> }>;
}

export class CustomizeStore extends Store<CustomizeState> {
Expand Down Expand Up @@ -238,10 +244,12 @@ async function init_customize() {
registration,
strategies,
software = null,
ollama = null, // the derived public information
} = customize;
process_kucalc(configuration);
process_software(software, configuration.is_cocalc_com);
process_customize(configuration); // this sets _is_configured to true
process_ollama(ollama);
const actions = redux.getActions("account");
// Which account creation strategies we support.
actions.setState({ strategies });
Expand All @@ -251,6 +259,12 @@ async function init_customize() {

init_customize();

function process_ollama(ollama) {
if (ollama) {
actions.setState({ ollama: fromJS(ollama) });
}
}

function process_kucalc(obj) {
// TODO make this a to_val function in site_settings_conf.kucalc
obj.kucalc = validate_kucalc(obj.kucalc);
Expand Down
4 changes: 2 additions & 2 deletions src/packages/frontend/frame-editors/code-editor/actions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,8 @@ import {
len,
uuid,
} from "@cocalc/util/misc";
import languageModelCreateChat, { Options } from "../chatgpt/create-chat";
import type { Scope as LanguageModelScope } from "../chatgpt/types";
import languageModelCreateChat, { Options } from "../llm/create-chat";
import type { Scope as LanguageModelScope } from "../llm/types";
import { default_opts } from "../codemirror/cm-options";
import { print_code } from "../frame-tree/print-code";
import * as tree_ops from "../frame-tree/tree-ops";
Expand Down
14 changes: 9 additions & 5 deletions src/packages/frontend/frame-editors/frame-tree/format-error.tsx
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
// A dismissable error message that appears when formatting code.

import { useMemo } from "react";
import { Alert, Button } from "antd";
import { useMemo } from "react";

import { file_associations } from "@cocalc/frontend/file-associations";
import { useFrameContext } from "@cocalc/frontend/frame-editors/frame-tree/frame-context";
import HelpMeFix from "@cocalc/frontend/frame-editors/llm/help-me-fix";
import { CodeMirrorStatic } from "@cocalc/frontend/jupyter/codemirror-static";
import HelpMeFix from "@cocalc/frontend/frame-editors/chatgpt/help-me-fix";
import { file_associations } from "@cocalc/frontend/file-associations";

interface Props {
formatError: string;
Expand All @@ -14,10 +15,13 @@ interface Props {

export default function FormatError({ formatError, formatInput }: Props) {
const { actions } = useFrameContext();
const language = useMemo(() => actions?.languageModelGetLanguage(), [actions]);
const language = useMemo(
() => actions?.languageModelGetLanguage(),
[actions],
);
const mode = useMemo(
() => file_associations[language]?.opts?.mode ?? language,
[language]
[language],
);

if (actions == null) return null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ import { Actions } from "../code-editor/actions";
import { is_safari } from "../generic/browser";
import { SaveButton } from "./save-button";
import { ConnectionStatus, EditorDescription, EditorSpec } from "./types";
import LanguageModelTitleBarButton from "../chatgpt/title-bar-button";
import LanguageModelTitleBarButton from "../llm/title-bar-button";
import userTracking from "@cocalc/frontend/user-tracking";
import TitleBarTour from "./title-bar-tour";
import { IS_MOBILE } from "@cocalc/frontend/feature";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ import {
useRedux,
} from "@cocalc/frontend/app-framework";
import { Icon, IconName, Loading } from "@cocalc/frontend/components";
import HelpMeFix from "@cocalc/frontend/frame-editors/chatgpt/help-me-fix";
import HelpMeFix from "@cocalc/frontend/frame-editors/llm/help-me-fix";
import { capitalize, is_different, path_split } from "@cocalc/util/misc";
import { COLORS } from "@cocalc/util/theme";
import { EditorState } from "../frame-tree/types";
Expand Down
9 changes: 5 additions & 4 deletions src/packages/frontend/frame-editors/latex-editor/gutters.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,13 @@
// one gets a gutter mark, with pref to errors. The main error log shows everything, so this should be OK.

import { Popover } from "antd";
import { capitalize } from "@cocalc/util/misc";

import { Icon } from "@cocalc/frontend/components";
import { SPEC, SpecItem } from "./errors-and-warnings";
import { IProcessedLatexLog, Error } from "./latex-log-parser";
import HelpMeFix from "@cocalc/frontend/frame-editors/chatgpt/help-me-fix";
import HelpMeFix from "@cocalc/frontend/frame-editors/llm/help-me-fix";
import { capitalize } from "@cocalc/util/misc";
import { Actions } from "../code-editor/actions";
import { SPEC, SpecItem } from "./errors-and-warnings";
import { Error, IProcessedLatexLog } from "./latex-log-parser";

export function update_gutters(opts: {
log: IProcessedLatexLog;
Expand Down
Loading

0 comments on commit 5b78197

Please sign in to comment.