diff --git a/examples/basic.cjs b/examples/basic.cjs index 26e5045..7b4c9d4 100755 --- a/examples/basic.cjs +++ b/examples/basic.cjs @@ -1,19 +1,16 @@ #!/usr/bin/env node -const { Substrate, GenerateText } = require("substrate"); +const { Substrate, ComputeText } = require("substrate"); async function main() { const SUBSTRATE_API_KEY = process.env["SUBSTRATE_API_KEY"]; - const substrate = new Substrate({ - apiKey: SUBSTRATE_API_KEY, - baseUrl: "https://api-staging.substrate.run", - }); + const substrate = new Substrate({ apiKey: SUBSTRATE_API_KEY }); - const a = new GenerateText({ + const a = new ComputeText({ prompt: "ask me a short trivia question in one sentence", }); - const b = new GenerateText({ prompt: a.future.text }); + const b = new ComputeText({ prompt: a.future.text }); const res = await substrate.run(a, b); diff --git a/examples/basic.js b/examples/basic.js index 967f928..0f653ea 100755 --- a/examples/basic.js +++ b/examples/basic.js @@ -1,19 +1,16 @@ #!/usr/bin/env node -import { Substrate, GenerateText } from "substrate"; +import { Substrate, ComputeText } from "substrate"; async function main() { const SUBSTRATE_API_KEY = process.env["SUBSTRATE_API_KEY"]; - const substrate = new Substrate({ - apiKey: SUBSTRATE_API_KEY, - baseUrl: "https://api-staging.substrate.run", - }); + const substrate = new Substrate({ apiKey: SUBSTRATE_API_KEY }); - const a = new GenerateText({ + const a = new ComputeText({ prompt: "ask me a short trivia question in one sentence", }); - const b = new GenerateText({ prompt: a.future.text }); + const b = new ComputeText({ prompt: a.future.text }); const res = await substrate.run(a, b); diff --git a/examples/basic.ts b/examples/basic.ts index 5f42623..8d55cce 100755 --- a/examples/basic.ts +++ b/examples/basic.ts @@ -1,14 +1,14 @@ #!/usr/bin/env -S npx ts-node --transpileOnly -import { Substrate, GenerateText, sb } from "substrate"; +import { Substrate, ComputeText, sb } from "substrate"; async function main() { const SUBSTRATE_API_KEY = process.env["SUBSTRATE_API_KEY"]; const substrate = new Substrate({ apiKey: SUBSTRATE_API_KEY }); - const story = new GenerateText({ prompt: "tell me a story" }); - const summary = new GenerateText({ + const story = new ComputeText({ prompt: "tell me a story" }); + const summary = new ComputeText({ prompt: sb.interpolate`summarize this story in one sentence: ${story.future.text}`, }); diff --git a/examples/image-generation.ts b/examples/image-generation.ts index c6500d8..c365a3d 100755 --- a/examples/image-generation.ts +++ b/examples/image-generation.ts @@ -1,16 +1,13 @@ #!/usr/bin/env -S npx ts-node --transpileOnly -import { Substrate, GenerateText, GenerateImage } from "substrate"; +import { Substrate, ComputeText, GenerateImage } from "substrate"; async function main() { const SUBSTRATE_API_KEY = process.env["SUBSTRATE_API_KEY"]; - const substrate = new Substrate({ - apiKey: SUBSTRATE_API_KEY, - baseUrl: "https://api-staging.substrate.run", - }); + const substrate = new Substrate({ apiKey: SUBSTRATE_API_KEY }); - const scene = new GenerateText({ + const scene = new ComputeText({ prompt: "describe a highly detailed forest scene with something suprising happening in one sentence, be concise, like hemmingway would write it.", }); diff --git a/examples/implicit-nodes.ts b/examples/implicit-nodes.ts index e2aa25b..b3d80d9 100755 --- a/examples/implicit-nodes.ts +++ b/examples/implicit-nodes.ts @@ -1,25 +1,25 @@ #!/usr/bin/env -S npx ts-node --transpileOnly -import { Substrate, GenerateText } from "substrate"; +import { Substrate, ComputeText } from "substrate"; async function main() { const SUBSTRATE_API_KEY = process.env["SUBSTRATE_API_KEY"]; const substrate = new Substrate({ apiKey: SUBSTRATE_API_KEY }); - const a = new GenerateText( + const a = new ComputeText( { prompt: "tell me about windmills", max_tokens: 10 }, { id: "a" }, ); - const b = new GenerateText( + const b = new ComputeText( { prompt: a.future.text, max_tokens: 10 }, { id: "b" }, ); - const c = new GenerateText( + const c = new ComputeText( { prompt: b.future.text, max_tokens: 10 }, { id: "c" }, ); - const d = new GenerateText( + const d = new ComputeText( { prompt: c.future.text, max_tokens: 10 }, { id: "d" }, ); diff --git a/examples/jina.ts b/examples/jina.ts index 0145055..1f06f46 100755 --- a/examples/jina.ts +++ b/examples/jina.ts @@ -1,16 +1,13 @@ #!/usr/bin/env -S npx ts-node --transpileOnly -import { Substrate, GenerateText, JinaV2 } from "substrate"; +import { Substrate, ComputeText, JinaV2 } from "substrate"; async function main() { const SUBSTRATE_API_KEY = process.env["SUBSTRATE_API_KEY"]; - const substrate = new Substrate({ - apiKey: SUBSTRATE_API_KEY, - baseUrl: "https://api-staging.substrate.run", - }); + const substrate = new Substrate({ apiKey: SUBSTRATE_API_KEY }); - const a = new GenerateText({ prompt: "hi" }); + const a = new ComputeText({ prompt: "hi" }); const input: JinaV2.Input = { items: [ diff --git a/examples/jq.ts b/examples/jq.ts index fb95391..25bf29e 100755 --- a/examples/jq.ts +++ b/examples/jq.ts @@ -1,16 +1,13 @@ #!/usr/bin/env -S npx ts-node --transpileOnly -import { Substrate, GenerateText, sb, GenerateJSON } from "substrate"; +import { Substrate, ComputeText, sb, ComputeJSON } from "substrate"; async function main() { const SUBSTRATE_API_KEY = process.env["SUBSTRATE_API_KEY"]; - const substrate = new Substrate({ - apiKey: SUBSTRATE_API_KEY, - baseUrl: "https://api.substrate.run", - }); + const substrate = new Substrate({ apiKey: SUBSTRATE_API_KEY }); - const a = new GenerateJSON({ + const a = new ComputeJSON({ prompt: "Give me an African capital city and its approximate population.", json_schema: { type: "object", @@ -23,7 +20,7 @@ async function main() { }, }); - const b = new GenerateText({ + const b = new ComputeText({ prompt: sb.concat( "give me the leader of the country: ", sb.jq<"string">(a.future.json_object, ".country"), diff --git a/examples/json.ts b/examples/json.ts index c1f7010..34694e0 100755 --- a/examples/json.ts +++ b/examples/json.ts @@ -1,13 +1,13 @@ #!/usr/bin/env -S npx ts-node --transpileOnly -import { Substrate, GenerateJSON, GenerateText, sb } from "substrate"; +import { Substrate, ComputeJSON, ComputeText, sb } from "substrate"; async function main() { const SUBSTRATE_API_KEY = process.env["SUBSTRATE_API_KEY"]; const substrate = new Substrate({ apiKey: SUBSTRATE_API_KEY }); - const author = new GenerateJSON({ + const author = new ComputeJSON({ prompt: "Who wrote Don Quixote?", json_schema: { type: "object", @@ -29,7 +29,7 @@ async function main() { const name = author.future.json_object.get("name"); const bio = author.future.json_object.get("bio"); - const report = new GenerateText({ + const report = new ComputeText({ prompt: sb.interpolate`Write a short summary about ${name} and make sure to use the following bio: ${bio}`, }); diff --git a/examples/kitchen-sink.ts b/examples/kitchen-sink.ts index 8fd2e97..d54aef8 100755 --- a/examples/kitchen-sink.ts +++ b/examples/kitchen-sink.ts @@ -2,37 +2,32 @@ import { Substrate, - GenerateText, - MultiGenerateText, - GenerateJSON, - MultiGenerateJSON, - GenerateTextVision, + ComputeText, + MultiComputeText, + ComputeJSON, + MultiComputeJSON, Mistral7BInstruct, Firellava13B, GenerateImage, MultiGenerateImage, - GenerativeEditImage, - MultiGenerativeEditImage, - StableDiffusionXL, + InpaintImage, + MultiInpaintImage, StableDiffusionXLLightning, StableDiffusionXLInpaint, - StableDiffusionXLIPAdapter, - StableDiffusionXLControlNet, - FillMask, + EraseImage, UpscaleImage, RemoveBackground, SegmentUnderPoint, SegmentAnything, - TranscribeMedia, + TranscribeSpeech, GenerateSpeech, - XTTSV2, EmbedText, MultiEmbedText, EmbedImage, MultiEmbedImage, JinaV2, CLIP, - CreateVectorStore, + FindOrCreateVectorStore, ListVectorStores, DeleteVectorStore, QueryVectorStore, @@ -42,26 +37,11 @@ import { Mixtral8x7BInstruct, Llama3Instruct8B, Llama3Instruct70B, - RunPython, } from "substrate"; -const urls = { - staging: { name: "staging", value: "https://api-staging.substrate.run" }, - production: { name: "production", value: "https://api.substrate.run" }, -}; -const backends = { - v0: { name: "v0", value: "v0" as const }, - v1: { name: "v1", value: "v1" as const }, -}; - -// Not all nodes are available in all backend+env combinations yet, so -// in order to only test nodes that should be operational we can target -// them specifically. -const STAGING_V0 = { url: urls.staging, backend: backends.v0 }; -const STAGING_V1 = { url: urls.staging, backend: backends.v1 }; -const PRODUCTION_V0 = { url: urls.production, backend: backends.v0 }; -const PRODUCTION_V1 = { url: urls.production, backend: backends.v1 }; -const ALL_ENVS = [STAGING_V0, STAGING_V1, PRODUCTION_V0, PRODUCTION_V1]; +const STAGING = "https://api-staging.substrate.run"; +const PRODUCTION = "https://api.substrate.run"; +const ALL_ENVS = [STAGING, PRODUCTION]; // Some state-changing interactions will create a store or modify it's contents // and some other calls require the store to exist to property work (eg embed to store). @@ -70,454 +50,260 @@ const ALL_ENVS = [STAGING_V0, STAGING_V1, PRODUCTION_V0, PRODUCTION_V1]; const VECTOR_STORE = "kitchen-sink"; const examples = [ - { - node: new CreateVectorStore({ - collection_name: VECTOR_STORE, - model: "jina-v2", - }), - envs: [STAGING_V1, PRODUCTION_V1], - }, - { - node: new GenerateText({ - prompt: "Who is Don Quixote?", - temperature: 0.4, - }), - envs: ALL_ENVS, - }, - { - node: new MultiGenerateText({ - prompt: "Who is Don Quixote?", - num_choices: 2, - temperature: 0.4, - }), - envs: ALL_ENVS, - }, - { - node: new GenerateJSON({ - prompt: "Who is Don Quixote?", - json_schema: { - title: "Person", - type: "object", - properties: { - firstName: { - type: "string", - description: "The person's first name.", - }, - lastName: { - type: "string", - description: "The person's last name.", - }, - occupation: { - type: "string", - description: "The person's occupation.", - }, - }, - }, - temperature: 0.4, - max_tokens: 300, - }), - envs: ALL_ENVS, - }, - { - node: new MultiGenerateJSON({ - prompt: "Who is Don Quixote?", - json_schema: { - title: "Person", - type: "object", - properties: { - firstName: { - type: "string", - description: "The person's first name.", - }, - lastName: { - type: "string", - description: "The person's last name.", - }, - occupation: { - type: "string", - description: "The person's occupation.", - }, - }, - }, - num_choices: 2, - temperature: 0.4, - max_tokens: 100, - }), - envs: ALL_ENVS, - }, - { - // NOTE: mainly supported on backend v1, but v0 works for legacy use - // NOTE: the input types are not the same between v0 and v1 and we want to keep it that way for legacy support - node: new GenerateTextVision({ - prompt: "what are these paintings of and who made them?", - image_uris: [ - "https://media.substrate.run/docs-fuji-red.jpg", - "https://media.substrate.run/docs-fuji-blue.jpg", - ], - }), - envs: [STAGING_V1, PRODUCTION_V1], - }, - { - node: new Mistral7BInstruct({ - prompt: "Who is Don Quixote?", - num_choices: 2, - temperature: 0.5, - max_tokens: 100, - }), - envs: [STAGING_V0, PRODUCTION_V0], - }, - { - // NOTE: mainly supported on backend v1, but v0 works for legacy use (see next node) - // NOTE: the input types are not the same between v0 and v1. - // FIXME: there seems to be an issue with v0 now though, when using either the new or - // the old params and the v0 node doesn't work with either. - node: new Firellava13B({ - prompt: "what are these paintings of and who made them?", - image_uris: [ - "https://media.substrate.run/docs-fuji-red.jpg", - "https://media.substrate.run/docs-fuji-blue.jpg", - ], - }), - envs: [STAGING_V1, PRODUCTION_V1], - }, - { - // NOTE: only supported by v0 - node: new GenerateImage({ - prompt: - "hokusai futuristic supercell spiral cloud with glowing core over turbulent ocean", - store: "hosted", - }), - envs: [STAGING_V0, PRODUCTION_V0], - }, - { - // NOTE: only supported by v0 - node: new MultiGenerateImage({ - prompt: - "hokusai futuristic supercell spiral cloud with glowing neon core over turbulent ocean", - store: "hosted", - num_images: 2, - }), - envs: [STAGING_V0, PRODUCTION_V0], - }, - { - // NOTE: only supported by v0 - node: new GenerativeEditImage({ - image_uri: "https://media.substrate.run/docs-seurat.jpg", - mask_image_uri: "https://media.substrate.run/spiral-logo.jpeg", - prompt: - "detailed cyberpunk anime characters in a futuristic city park by a pond at night, neon lights, dark noir cinematic HD", - store: "hosted", - }), - envs: [STAGING_V0, PRODUCTION_V0], - }, - { - // NOTE: only supported by v0 - node: new MultiGenerativeEditImage({ - image_uri: "https://media.substrate.run/docs-klimt-park.jpg", - mask_image_uri: "https://media.substrate.run/spiral-logo.jpeg", - prompt: - "large tropical colorful bright anime birds in a dark jungle full of vines, high resolution", - num_images: 2, - store: "hosted", - }), - envs: [STAGING_V0, PRODUCTION_V0], - }, - { - // NOTE: only supported by v0 - node: new StableDiffusionXL({ - prompt: - "hokusai futuristic supercell spiral cloud with glowing core over turbulent ocean", - store: "hosted", - guidance_scale: 20, - num_images: 2, - }), - envs: [STAGING_V0, PRODUCTION_V0], - }, - { - // NOTE: only supported by v0 - node: new StableDiffusionXLLightning({ - prompt: - "hokusai futuristic supercell spiral cloud with glowing core over turbulent ocean", - store: "hosted", - num_images: 2, - }), - envs: [STAGING_V0, PRODUCTION_V0], - }, - { - // NOTE: only supported by v0 - node: new StableDiffusionXLInpaint({ - image_uri: "https://media.substrate.run/docs-klimt-park.jpg", - mask_image_uri: "https://media.substrate.run/spiral-logo.jpeg", - prompt: - "large tropical colorful bright anime birds in a dark jungle full of vines, high resolution", - num_images: 2, - }), - envs: [STAGING_V0, PRODUCTION_V0], - }, - { - // FIXME: Running into the following error, this node does not seem operational yet - // error: cannot reshape tensor of 0 elements into shape [0, -1, 1, 512] because the unspecified dimension size -1 can be any value and is ambiguous - node: new StableDiffusionXLControlNet({ - image_uri: "https://media.substrate.run/spiral-logo.jpeg", - prompt: - "the futuristic solarpunk city of atlantis at sunset, cinematic bokeh HD", - control_method: "illusion", - num_images: 2, - store: "hosted", - }), - envs: [], - }, - { - // NOTE: only supported by v0 - node: new StableDiffusionXLIPAdapter({ - prompt: - "A blue and white painting of a large wave with a boat in the middle", - image_prompt_uri: "https://guides.substrate.run/hokusai.jpeg", - store: "hosted", - num_images: 2, - }), - envs: [STAGING_V0, PRODUCTION_V0], - }, - { - // FIXME: As far as I can tell this is based on BigLama, which isn't yet operational - node: new FillMask({ - image_uri: "https://media.substrate.run/docs-klimt-park.jpg", - mask_image_uri: "https://media.substrate.run/spiral-logo.jpeg", - store: "hosted", - }), - envs: [], - }, - { - node: new EmbedText({ - text: "Your text to embed", - collection_name: VECTOR_STORE, - }), - envs: ALL_ENVS, - }, - { - node: new MultiEmbedText({ - items: [ - { - text: "Some text", + new FindOrCreateVectorStore({ + collection_name: VECTOR_STORE, + model: "jina-v2", + }), + new ComputeText({ + prompt: "Who is Don Quixote?", + temperature: 0.4, + }), + new MultiComputeText({ + prompt: "Who is Don Quixote?", + num_choices: 2, + temperature: 0.4, + }), + new ComputeJSON({ + prompt: "Who is Don Quixote?", + json_schema: { + title: "Person", + type: "object", + properties: { + firstName: { + type: "string", + description: "The person's first name.", }, - { - text: "Other text", + lastName: { + type: "string", + description: "The person's last name.", }, - ], - collection_name: VECTOR_STORE, - }), - envs: ALL_ENVS, - }, - { - node: new EmbedImage({ - image_uri: "https://media.substrate.run/docs-fuji-red.jpg", - }), - envs: ALL_ENVS, - }, - { - // FIXME: This one works without the Vector Store. Presumably because the VectorStore only supports Jina and Embed Image relies on CLIP - node: new MultiEmbedImage({ - items: [ - { - image_uri: "https://media.substrate.run/docs-fuji-red.jpg", + occupation: { + type: "string", + description: "The person's occupation.", }, - { - image_uri: "https://media.substrate.run/docs-fuji-blue.jpg", - }, - ], - // store: VECTOR_STORE, - }), - envs: ALL_ENVS, - }, - { - // FIXME: This mostly works, but I'm running into issues using VectorStore here. - // It seems to only fail for me on staging v1 (maybe some state problem there?) - node: new JinaV2({ - items: [ - { - text: "Some text", - }, - { - text: "Other text", - }, - ], - // store: VECTOR_STORE, - }), - envs: ALL_ENVS, - }, - { - node: new CLIP({ - items: [ - { - image_uri: "https://media.substrate.run/docs-fuji-red.jpg", + }, + }, + temperature: 0.4, + max_tokens: 300, + }), + new MultiComputeJSON({ + prompt: "Who is Don Quixote?", + json_schema: { + title: "Person", + type: "object", + properties: { + firstName: { + type: "string", + description: "The person's first name.", }, - { - image_uri: "https://media.substrate.run/docs-fuji-blue.jpg", + lastName: { + type: "string", + description: "The person's last name.", }, - ], - }), - envs: ALL_ENVS, - }, - { - // NOTE: only supported by v1 - node: new ListVectorStores({}), - envs: [STAGING_V1, PRODUCTION_V1], - }, - { - // NOTE: only supported by v1 - node: new QueryVectorStore({ - collection_name: VECTOR_STORE, - model: "jina-v2", - query_strings: ["first_comment_body", "second_comment_body"], - }), - envs: [STAGING_V1, PRODUCTION_V1], - }, - { - // NOTE: only supported by v1 - node: new FetchVectors({ - collection_name: VECTOR_STORE, - model: "jina-v2", - ids: ["bar", "baz"], - }), - envs: [STAGING_V1, PRODUCTION_V1], - }, - { - // NOTE: only supported by v1 - node: new UpdateVectors({ - collection_name: VECTOR_STORE, - model: "jina-v2", - vectors: [ - { - id: "bar", - vector: [0.1, -1.5], - metadata: { title: "new_title" }, + occupation: { + type: "string", + description: "The person's occupation.", }, - { - id: "baz", - vector: [-0.05, 1.01], - metadata: { - title: "title", - }, + }, + }, + num_choices: 2, + temperature: 0.4, + max_tokens: 100, + }), + new Mistral7BInstruct({ + prompt: "Who is Don Quixote?", + num_choices: 2, + temperature: 0.5, + max_tokens: 100, + }), + new Firellava13B({ + prompt: "what are these paintings of and who made them?", + image_uris: [ + "https://media.substrate.run/docs-fuji-red.jpg", + "https://media.substrate.run/docs-fuji-blue.jpg", + ], + }), + new GenerateImage({ + prompt: + "hokusai futuristic supercell spiral cloud with glowing core over turbulent ocean", + store: "hosted", + }), + new MultiGenerateImage({ + prompt: + "hokusai futuristic supercell spiral cloud with glowing neon core over turbulent ocean", + store: "hosted", + num_images: 2, + }), + new InpaintImage({ + image_uri: "https://media.substrate.run/docs-seurat.jpg", + mask_image_uri: "https://media.substrate.run/spiral-logo.jpeg", + prompt: + "detailed cyberpunk anime characters in a futuristic city park by a pond at night, neon lights, dark noir cinematic HD", + store: "hosted", + }), + new MultiInpaintImage({ + image_uri: "https://media.substrate.run/docs-klimt-park.jpg", + mask_image_uri: "https://media.substrate.run/spiral-logo.jpeg", + prompt: + "large tropical colorful bright anime birds in a dark jungle full of vines, high resolution", + num_images: 2, + store: "hosted", + }), + new StableDiffusionXLLightning({ + prompt: + "hokusai futuristic supercell spiral cloud with glowing core over turbulent ocean", + store: "hosted", + num_images: 2, + }), + new StableDiffusionXLInpaint({ + image_uri: "https://media.substrate.run/docs-klimt-park.jpg", + mask_image_uri: "https://media.substrate.run/spiral-logo.jpeg", + prompt: + "large tropical colorful bright anime birds in a dark jungle full of vines, high resolution", + num_images: 2, + }), + new EraseImage({ + image_uri: "https://media.substrate.run/docs-klimt-park.jpg", + mask_image_uri: "https://media.substrate.run/spiral-logo.jpeg", + store: "hosted", + }), + new EmbedText({ + text: "Your text to embed", + collection_name: VECTOR_STORE, + }), + new MultiEmbedText({ + items: [ + { + text: "Some text", + }, + { + text: "Other text", + }, + ], + collection_name: VECTOR_STORE, + }), + new EmbedImage({ + image_uri: "https://media.substrate.run/docs-fuji-red.jpg", + }), + new MultiEmbedImage({ + items: [ + { + image_uri: "https://media.substrate.run/docs-fuji-red.jpg", + }, + { + image_uri: "https://media.substrate.run/docs-fuji-blue.jpg", + }, + ], + // store: VECTOR_STORE, + }), + new JinaV2({ + items: [ + { + text: "Some text", + }, + { + text: "Other text", + }, + ], + // store: VECTOR_STORE, + }), + new CLIP({ + items: [ + { + image_uri: "https://media.substrate.run/docs-fuji-red.jpg", + }, + { + image_uri: "https://media.substrate.run/docs-fuji-blue.jpg", + }, + ], + }), + new ListVectorStores({}), + new QueryVectorStore({ + collection_name: VECTOR_STORE, + model: "jina-v2", + query_strings: ["first_comment_body", "second_comment_body"], + }), + new FetchVectors({ + collection_name: VECTOR_STORE, + model: "jina-v2", + ids: ["bar", "baz"], + }), + new UpdateVectors({ + collection_name: VECTOR_STORE, + model: "jina-v2", + vectors: [ + { + id: "bar", + vector: [0.1, -1.5], + metadata: { title: "new_title" }, + }, + { + id: "baz", + vector: [-0.05, 1.01], + metadata: { + title: "title", }, - ], - }), - envs: [STAGING_V1, PRODUCTION_V1], - }, - { - // NOTE: only supported by v1 - node: new DeleteVectors({ - collection_name: VECTOR_STORE, - model: "jina-v2", - ids: ["bar", "baz"], - }), - envs: [STAGING_V1, PRODUCTION_V1], - }, - { - // NOTE: only supported by v0 - node: new TranscribeMedia({ - audio_uri: "https://media.substrate.run/dfw-10m.mp3", - prompt: - "David Foster Wallace interviewed about US culture, and Infinite Jest", - segment: true, - align: true, - diarize: true, - }), - envs: [STAGING_V0, PRODUCTION_V0], - }, - { - // FIXME: it looks like this should work in v0 and v1, but I'm seeing errors here from the server and haven't - // been able to track down any server side errors to understand what the problem is. - node: new GenerateSpeech({ - text: "Substrate: an underlying substance or layer.", - store: "hosted", - }), - envs: [STAGING_V0, STAGING_V1], - }, - { - // FIXME: it looks like this should work for v1, but doesn't yet - node: new XTTSV2({ - text: "Substrate: an underlying substance or layer.", - audio_uri: "https://media.substrate.run/docs-speaker.wav", - store: "hosted", - }), - envs: [STAGING_V0, STAGING_V1], - }, - { - // FIXME: it looks like this should work for v1, but doesn't yet - node: new RemoveBackground({ - image_uri: "https://media.substrate.run/docs-seurat.jpg", - background_color: "002244", - store: "hosted", - }), - envs: [STAGING_V0, STAGING_V1], - }, - { - // FIXME: it looks like this should work for v1, but doesn't yet - node: new UpscaleImage({ - image_uri: "https://media.substrate.run/docs-seurat.jpg", - store: "hosted", - }), - envs: [STAGING_V0, STAGING_V1], - }, - { - node: new SegmentUnderPoint({ - image_uri: "https://media.substrate.run/docs-vg-bedroom.jpg", - point: { + }, + ], + }), + new DeleteVectors({ + collection_name: VECTOR_STORE, + model: "jina-v2", + ids: ["bar", "baz"], + }), + new TranscribeSpeech({ + audio_uri: "https://media.substrate.run/dfw-10m.mp3", + prompt: + "David Foster Wallace interviewed about US culture, and Infinite Jest", + segment: true, + align: true, + diarize: true, + }), + new GenerateSpeech({ + text: "Substrate: an underlying substance or layer.", + store: "hosted", + }), + new RemoveBackground({ + image_uri: "https://media.substrate.run/docs-seurat.jpg", + background_color: "002244", + store: "hosted", + }), + new UpscaleImage({ + prompt: "high resolution detailed spiral shell", + image_uri: "https://media.substrate.run/docs-shell-emoji.jpg", + store: "hosted", + }), + new SegmentUnderPoint({ + image_uri: "https://media.substrate.run/docs-vg-bedroom.jpg", + point: { + x: 100, + y: 200, + }, + store: "hosted", + }), + new SegmentAnything({ + image_uri: "https://media.substrate.run/docs-vg-bedroom.jpg", + point_prompts: [ + { x: 100, y: 200, }, - store: "hosted", - }), - envs: ALL_ENVS, - }, - { - node: new SegmentAnything({ - image_uri: "https://media.substrate.run/docs-vg-bedroom.jpg", - point_prompts: [ - { - x: 100, - y: 200, - }, - ], - // store: "hosted", // FIXME: not working yet - }), - envs: ALL_ENVS, - }, - { - node: new DeleteVectorStore({ - collection_name: VECTOR_STORE, - model: "jina-v2", - }), - envs: [STAGING_V1, PRODUCTION_V1], - }, - { - node: new Mixtral8x7BInstruct({ - prompt: "what does quixotic mean?", - }), - envs: [STAGING_V1, PRODUCTION_V1], - }, - { - node: new Llama3Instruct70B({ - prompt: "what does quixotic mean?", - }), - envs: [STAGING_V1, PRODUCTION_V1], - }, - { - node: new Llama3Instruct8B({ - prompt: "what does quixotic mean?", - }), - envs: [STAGING_V1, PRODUCTION_V1], - }, - { - node: new RunPython({ - code: "import numpy as np; print(SB_IN['foo']); SB_OUT['result']=np.sum([1,2]).item()", - input: { - foo: "bar", - }, - pip_install: ["numpy"], - }), - envs: [STAGING_V1, PRODUCTION_V1], - }, + ], + // store: "hosted", // FIXME: not working yet + }), + new DeleteVectorStore({ + collection_name: VECTOR_STORE, + model: "jina-v2", + }), + new Mixtral8x7BInstruct({ + prompt: "what does quixotic mean?", + }), + new Llama3Instruct70B({ + prompt: "what does quixotic mean?", + }), + new Llama3Instruct8B({ + prompt: "what does quixotic mean?", + }), ]; const noColor = process.argv.includes("--no-color"); @@ -555,23 +341,10 @@ const measure = async (fn: any): Promise => { async function main() { const SUBSTRATE_API_KEY = process.env["SUBSTRATE_API_KEY"]; - for (let { node, envs } of examples) { - // const except = [] - // if (except.includes(node.node)) continue; - - // const only = ["GenerateJSON"]; - // if (!only.includes(node.node)) continue; - - if (envs.length === 0) { - warn(node.node, "Not enabled for any env."); - } - - for (let env of envs) { - const substrate = new Substrate({ - apiKey: SUBSTRATE_API_KEY, - baseUrl: env.url.value, - }); - const tag = `[${env.url.name}:${env.backend.value}]`; + for (let node of examples) { + for (let baseUrl of ALL_ENVS) { + const substrate = new Substrate({ apiKey: SUBSTRATE_API_KEY, baseUrl }); + const tag = `[${baseUrl}]`; // NOTE: measure doesn't throw const res = await measure(substrate.run(node)); diff --git a/examples/large-run.ts b/examples/large-run.ts index 2fc0c0b..63d04aa 100755 --- a/examples/large-run.ts +++ b/examples/large-run.ts @@ -1,19 +1,16 @@ #!/usr/bin/env -S npx ts-node --transpileOnly -import { Substrate, GenerateText } from "substrate"; +import { Substrate, ComputeText } from "substrate"; async function main() { const SUBSTRATE_API_KEY = process.env["SUBSTRATE_API_KEY"]; - const substrate = new Substrate({ - apiKey: SUBSTRATE_API_KEY, - baseUrl: "https://api-staging.substrate.run", - }); + const substrate = new Substrate({ apiKey: SUBSTRATE_API_KEY }); let nodes = []; let prompt: any = "once upon a time..."; for (let i = 0; i < 50; i++) { - const node = new GenerateText({ prompt }); + const node = new ComputeText({ prompt }); nodes.push(node); prompt = node.future.text.concat(" and then"); } diff --git a/examples/run-python.ts b/examples/run-python.ts deleted file mode 100755 index 354f07d..0000000 --- a/examples/run-python.ts +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env -S npx ts-node --transpileOnly - -import { Substrate, RunPython } from "substrate"; - -async function main() { - const SUBSTRATE_API_KEY = process.env["SUBSTRATE_API_KEY"]; - - const substrate = new Substrate({ apiKey: SUBSTRATE_API_KEY }); - - const node = new RunPython({ - code: "import numpy as np; print(SB_IN['foo']); SB_OUT['result']=np.sum([1,2]).item()", - input: { - foo: "bar", - }, - pip_install: ["numpy"], - }); - - const res = await substrate.run(node); - - console.log(res.get(node)); -} -main(); diff --git a/examples/streaming/nextjs-multiple-nodes/app/api/this-or-that/route.ts b/examples/streaming/nextjs-multiple-nodes/app/api/this-or-that/route.ts index 1f543ba..d22497d 100644 --- a/examples/streaming/nextjs-multiple-nodes/app/api/this-or-that/route.ts +++ b/examples/streaming/nextjs-multiple-nodes/app/api/this-or-that/route.ts @@ -2,7 +2,7 @@ import { z } from "zod"; import zodToJsonSchema from "zod-to-json-schema"; -import { Substrate, GenerateJSON, sb } from "substrate"; +import { Substrate, ComputeJSON, sb } from "substrate"; const SUBSTRATE_API_KEY = process.env["SUBSTRATE_API_KEY"]!; @@ -16,7 +16,7 @@ function extractThisAndThat(inputText: string) { }) .describe("Two items that are being compared"); - return new GenerateJSON({ + return new ComputeJSON({ prompt: ` === Instructions Examine the following input text and extract the two things that are being compared. @@ -45,7 +45,7 @@ function voter(about: string, items: any) { "Your vote on which item you prefer and how strongly you feel about it", ); - return new GenerateJSON( + return new ComputeJSON( { prompt: sb.interpolate` === About you diff --git a/examples/string-concat.ts b/examples/string-concat.ts index bd1dfd0..7e57dfa 100755 --- a/examples/string-concat.ts +++ b/examples/string-concat.ts @@ -1,22 +1,19 @@ #!/usr/bin/env -S npx ts-node --transpileOnly -import { Substrate, GenerateText, sb } from "substrate"; +import { Substrate, ComputeText, sb } from "substrate"; async function main() { const SUBSTRATE_API_KEY = process.env["SUBSTRATE_API_KEY"]; - const substrate = new Substrate({ - apiKey: SUBSTRATE_API_KEY, - baseUrl: "https://api-staging.substrate.run", - }); + const substrate = new Substrate({ apiKey: SUBSTRATE_API_KEY }); - const a = new GenerateText({ + const a = new ComputeText({ prompt: "name a random capital city: , ", }); const concatenated = sb.concat("tell me about visiting ", a.future.text); - const b = new GenerateText({ prompt: concatenated }); + const b = new ComputeText({ prompt: concatenated }); const res = await substrate.run(a, b); diff --git a/examples/string-interpolation.ts b/examples/string-interpolation.ts index 3695c90..1aefed4 100755 --- a/examples/string-interpolation.ts +++ b/examples/string-interpolation.ts @@ -1,23 +1,20 @@ #!/usr/bin/env -S npx ts-node --transpileOnly -import { Substrate, GenerateText, sb } from "substrate"; +import { Substrate, ComputeText, sb } from "substrate"; async function main() { const SUBSTRATE_API_KEY = process.env["SUBSTRATE_API_KEY"]; - const substrate = new Substrate({ - apiKey: SUBSTRATE_API_KEY, - baseUrl: "https://api-staging.substrate.run", - }); + const substrate = new Substrate({ apiKey: SUBSTRATE_API_KEY }); const concise = "(just give me the number, no punctuation, no empty spaces, no other text)"; - const a = new GenerateText({ + const a = new ComputeText({ prompt: `pick a random number between 1 and 100 ${concise}`, }); - const b = new GenerateText({ + const b = new ComputeText({ prompt: sb.interpolate`double the following number: ${a.future.text} ${concise}`, }); diff --git a/examples/vector-store.ts b/examples/vector-store.ts index 7ee6d75..385a2ba 100755 --- a/examples/vector-store.ts +++ b/examples/vector-store.ts @@ -5,7 +5,7 @@ import { QueryVectorStore, ListVectorStores, JinaV2, - CreateVectorStore, + FindOrCreateVectorStore, DeleteVectorStore, FetchVectors, UpdateVectors, @@ -19,7 +19,7 @@ async function main() { baseUrl: "https://api-staging.substrate.run", }); - const create = new CreateVectorStore({ + const create = new FindOrCreateVectorStore({ collection_name: "vibes", model: "jina-v2", }); diff --git a/package-lock.json b/package-lock.json index 3b69dcc..ce80a99 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "substrate", - "version": "120240612.0.0", + "version": "120240617.0.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "substrate", - "version": "120240612.0.0", + "version": "120240617.0.0", "license": "MIT", "dependencies": { "@types/node-fetch": "^2.6.11", diff --git a/package.json b/package.json index 75f301c..65d5183 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "substrate", - "version": "120240612.0.0", + "version": "120240617.0.0", "description": "The official SDK for the Substrate API", "repository": { "type": "git", diff --git a/src/GEN_VERSION b/src/GEN_VERSION index 9cb385b..28ffc7a 100644 --- a/src/GEN_VERSION +++ b/src/GEN_VERSION @@ -1 +1 @@ -20240612.20240613 \ No newline at end of file +20240617.20240620 \ No newline at end of file diff --git a/src/Nodes.ts b/src/Nodes.ts index 0b73579..2ee1ed5 100644 --- a/src/Nodes.ts +++ b/src/Nodes.ts @@ -1,7 +1,7 @@ /** * 𐃏 Substrate * @generated file - * 20240612.20240613 + * 20240617.20240620 */ import * as OpenAPI from "substrate/OpenAPI"; @@ -46,87 +46,87 @@ type FutureExpandAny = T extends (infer U)[][] export class ExperimentalInArgs extends FutureAnyObject {} export class ExperimentalOutOutput extends FutureAnyObject {} /** Image prompts. */ -export class GenerateTextInImageUris extends FutureArray { +export class ComputeTextInImageUris extends FutureArray { /** Returns `FutureString` at given index. */ override at(index: number) { return new FutureString(this._directive.next(index)); } - /** Returns the result for `GenerateTextInImageUris` once it's node has been run. */ + /** Returns the result for `ComputeTextInImageUris` once it's node has been run. */ protected override async result(): Promise { return super.result() as Promise; } } -export class GenerateTextInImageUrisItem extends FutureString {} -export class GenerateJSONInJsonSchema extends FutureAnyObject {} -export class GenerateJSONOutJsonObject extends FutureAnyObject {} +export class ComputeTextInImageUrisItem extends FutureString {} +export class ComputeJSONInJsonSchema extends FutureAnyObject {} +export class ComputeJSONOutJsonObject extends FutureAnyObject {} /** Response choices. */ -export class MultiGenerateTextOutChoices extends FutureArray { - /** Returns `GenerateTextOut` at given index. */ +export class MultiComputeTextOutChoices extends FutureArray { + /** Returns `ComputeTextOut` at given index. */ override at(index: number) { - return new GenerateTextOut(this._directive.next(index)); + return new ComputeTextOut(this._directive.next(index)); } - /** Returns the result for `MultiGenerateTextOutChoices` once it's node has been run. */ - protected override async result(): Promise { - return super.result() as Promise; + /** Returns the result for `MultiComputeTextOutChoices` once it's node has been run. */ + protected override async result(): Promise { + return super.result() as Promise; } } /** Batch input prompts. */ -export class BatchGenerateTextInPrompts extends FutureArray { +export class BatchComputeTextInPrompts extends FutureArray { /** Returns `FutureString` at given index. */ override at(index: number) { return new FutureString(this._directive.next(index)); } - /** Returns the result for `BatchGenerateTextInPrompts` once it's node has been run. */ + /** Returns the result for `BatchComputeTextInPrompts` once it's node has been run. */ protected override async result(): Promise { return super.result() as Promise; } } -export class BatchGenerateTextInPromptsItem extends FutureString {} +export class BatchComputeTextInPromptsItem extends FutureString {} /** Batch outputs. */ -export class BatchGenerateTextOutOutputs extends FutureArray { - /** Returns `GenerateTextOut` at given index. */ +export class BatchComputeTextOutOutputs extends FutureArray { + /** Returns `ComputeTextOut` at given index. */ override at(index: number) { - return new GenerateTextOut(this._directive.next(index)); + return new ComputeTextOut(this._directive.next(index)); } - /** Returns the result for `BatchGenerateTextOutOutputs` once it's node has been run. */ - protected override async result(): Promise { - return super.result() as Promise; + /** Returns the result for `BatchComputeTextOutOutputs` once it's node has been run. */ + protected override async result(): Promise { + return super.result() as Promise; } } -export class MultiGenerateJSONInJsonSchema extends FutureAnyObject {} +export class MultiComputeJSONInJsonSchema extends FutureAnyObject {} /** Response choices. */ -export class MultiGenerateJSONOutChoices extends FutureArray { - /** Returns `GenerateJSONOut` at given index. */ +export class MultiComputeJSONOutChoices extends FutureArray { + /** Returns `ComputeJSONOut` at given index. */ override at(index: number) { - return new GenerateJSONOut(this._directive.next(index)); + return new ComputeJSONOut(this._directive.next(index)); } - /** Returns the result for `MultiGenerateJSONOutChoices` once it's node has been run. */ - protected override async result(): Promise { - return super.result() as Promise; + /** Returns the result for `MultiComputeJSONOutChoices` once it's node has been run. */ + protected override async result(): Promise { + return super.result() as Promise; } } /** Batch input prompts. */ -export class BatchGenerateJSONInPrompts extends FutureArray { +export class BatchComputeJSONInPrompts extends FutureArray { /** Returns `FutureString` at given index. */ override at(index: number) { return new FutureString(this._directive.next(index)); } - /** Returns the result for `BatchGenerateJSONInPrompts` once it's node has been run. */ + /** Returns the result for `BatchComputeJSONInPrompts` once it's node has been run. */ protected override async result(): Promise { return super.result() as Promise; } } -export class BatchGenerateJSONInPromptsItem extends FutureString {} -export class BatchGenerateJSONInJsonSchema extends FutureAnyObject {} +export class BatchComputeJSONInPromptsItem extends FutureString {} +export class BatchComputeJSONInJsonSchema extends FutureAnyObject {} /** Batch outputs. */ -export class BatchGenerateJSONOutOutputs extends FutureArray { - /** Returns `GenerateJSONOut` at given index. */ +export class BatchComputeJSONOutOutputs extends FutureArray { + /** Returns `ComputeJSONOut` at given index. */ override at(index: number) { - return new GenerateJSONOut(this._directive.next(index)); + return new ComputeJSONOut(this._directive.next(index)); } - /** Returns the result for `BatchGenerateJSONOutOutputs` once it's node has been run. */ - protected override async result(): Promise { - return super.result() as Promise; + /** Returns the result for `BatchComputeJSONOutOutputs` once it's node has been run. */ + protected override async result(): Promise { + return super.result() as Promise; } } export class Mistral7BInstructInJsonSchema extends FutureAnyObject {} @@ -757,8 +757,8 @@ export class ExperimentalOut extends FutureObject { return super.result() as Promise; } } -/** GenerateTextIn */ -export class GenerateTextIn extends FutureObject { +/** ComputeTextIn */ +export class ComputeTextIn extends FutureObject { /** Input prompt. */ get prompt() { return new FutureString(this._directive.next("prompt")); @@ -766,7 +766,7 @@ export class GenerateTextIn extends FutureObject { /** (Optional) Image prompts. */ get image_uris() { - return new GenerateTextInImageUris(this._directive.next("image_uris")); + return new ComputeTextInImageUris(this._directive.next("image_uris")); } /** (Optional) Sampling temperature to use. Higher values make the output more random, lower values make the output more deterministic. */ get temperature() { @@ -780,24 +780,24 @@ export class GenerateTextIn extends FutureObject { get model() { return new FutureString(this._directive.next("model")); } - /** returns the result for `GenerateTextIn` once it's node has been run. */ - protected override async result(): Promise { - return super.result() as Promise; + /** returns the result for `ComputeTextIn` once it's node has been run. */ + protected override async result(): Promise { + return super.result() as Promise; } } -/** GenerateTextOut */ -export class GenerateTextOut extends FutureObject { +/** ComputeTextOut */ +export class ComputeTextOut extends FutureObject { /** Text response. */ get text() { return new FutureString(this._directive.next("text")); } - /** returns the result for `GenerateTextOut` once it's node has been run. */ - protected override async result(): Promise { - return super.result() as Promise; + /** returns the result for `ComputeTextOut` once it's node has been run. */ + protected override async result(): Promise { + return super.result() as Promise; } } -/** GenerateJSONIn */ -export class GenerateJSONIn extends FutureObject { +/** ComputeJSONIn */ +export class ComputeJSONIn extends FutureObject { /** Input prompt. */ get prompt() { return new FutureString(this._directive.next("prompt")); @@ -818,13 +818,13 @@ export class GenerateJSONIn extends FutureObject { get model() { return new FutureString(this._directive.next("model")); } - /** returns the result for `GenerateJSONIn` once it's node has been run. */ - protected override async result(): Promise { - return super.result() as Promise; + /** returns the result for `ComputeJSONIn` once it's node has been run. */ + protected override async result(): Promise { + return super.result() as Promise; } } -/** GenerateJSONOut */ -export class GenerateJSONOut extends FutureObject { +/** ComputeJSONOut */ +export class ComputeJSONOut extends FutureObject { /** JSON response. */ get json_object() { return new FutureAnyObject(this._directive.next("json_object")); @@ -833,13 +833,13 @@ export class GenerateJSONOut extends FutureObject { get text() { return new FutureString(this._directive.next("text")); } - /** returns the result for `GenerateJSONOut` once it's node has been run. */ - protected override async result(): Promise { - return super.result() as Promise; + /** returns the result for `ComputeJSONOut` once it's node has been run. */ + protected override async result(): Promise { + return super.result() as Promise; } } -/** MultiGenerateTextIn */ -export class MultiGenerateTextIn extends FutureObject { +/** MultiComputeTextIn */ +export class MultiComputeTextIn extends FutureObject { /** Input prompt. */ get prompt() { return new FutureString(this._directive.next("prompt")); @@ -860,27 +860,27 @@ export class MultiGenerateTextIn extends FutureObject { get model() { return new FutureString(this._directive.next("model")); } - /** returns the result for `MultiGenerateTextIn` once it's node has been run. */ - protected override async result(): Promise { - return super.result() as Promise; + /** returns the result for `MultiComputeTextIn` once it's node has been run. */ + protected override async result(): Promise { + return super.result() as Promise; } } -/** MultiGenerateTextOut */ -export class MultiGenerateTextOut extends FutureObject { +/** MultiComputeTextOut */ +export class MultiComputeTextOut extends FutureObject { /** Response choices. */ get choices() { - return new MultiGenerateTextOutChoices(this._directive.next("choices")); + return new MultiComputeTextOutChoices(this._directive.next("choices")); } - /** returns the result for `MultiGenerateTextOut` once it's node has been run. */ - protected override async result(): Promise { - return super.result() as Promise; + /** returns the result for `MultiComputeTextOut` once it's node has been run. */ + protected override async result(): Promise { + return super.result() as Promise; } } -/** BatchGenerateTextIn */ -export class BatchGenerateTextIn extends FutureObject { +/** BatchComputeTextIn */ +export class BatchComputeTextIn extends FutureObject { /** Batch input prompts. */ get prompts() { - return new BatchGenerateTextInPrompts(this._directive.next("prompts")); + return new BatchComputeTextInPrompts(this._directive.next("prompts")); } /** (Optional) Sampling temperature to use. Higher values make the output more random, lower values make the output more deterministic. */ get temperature() { @@ -894,24 +894,24 @@ export class BatchGenerateTextIn extends FutureObject { get model() { return new FutureString(this._directive.next("model")); } - /** returns the result for `BatchGenerateTextIn` once it's node has been run. */ - protected override async result(): Promise { - return super.result() as Promise; + /** returns the result for `BatchComputeTextIn` once it's node has been run. */ + protected override async result(): Promise { + return super.result() as Promise; } } -/** BatchGenerateTextOut */ -export class BatchGenerateTextOut extends FutureObject { +/** BatchComputeTextOut */ +export class BatchComputeTextOut extends FutureObject { /** Batch outputs. */ get outputs() { - return new BatchGenerateTextOutOutputs(this._directive.next("outputs")); + return new BatchComputeTextOutOutputs(this._directive.next("outputs")); } - /** returns the result for `BatchGenerateTextOut` once it's node has been run. */ - protected override async result(): Promise { - return super.result() as Promise; + /** returns the result for `BatchComputeTextOut` once it's node has been run. */ + protected override async result(): Promise { + return super.result() as Promise; } } -/** MultiGenerateJSONIn */ -export class MultiGenerateJSONIn extends FutureObject { +/** MultiComputeJSONIn */ +export class MultiComputeJSONIn extends FutureObject { /** Input prompt. */ get prompt() { return new FutureString(this._directive.next("prompt")); @@ -936,27 +936,27 @@ export class MultiGenerateJSONIn extends FutureObject { get model() { return new FutureString(this._directive.next("model")); } - /** returns the result for `MultiGenerateJSONIn` once it's node has been run. */ - protected override async result(): Promise { - return super.result() as Promise; + /** returns the result for `MultiComputeJSONIn` once it's node has been run. */ + protected override async result(): Promise { + return super.result() as Promise; } } -/** MultiGenerateJSONOut */ -export class MultiGenerateJSONOut extends FutureObject { +/** MultiComputeJSONOut */ +export class MultiComputeJSONOut extends FutureObject { /** Response choices. */ get choices() { - return new MultiGenerateJSONOutChoices(this._directive.next("choices")); + return new MultiComputeJSONOutChoices(this._directive.next("choices")); } - /** returns the result for `MultiGenerateJSONOut` once it's node has been run. */ - protected override async result(): Promise { - return super.result() as Promise; + /** returns the result for `MultiComputeJSONOut` once it's node has been run. */ + protected override async result(): Promise { + return super.result() as Promise; } } -/** BatchGenerateJSONIn */ -export class BatchGenerateJSONIn extends FutureObject { +/** BatchComputeJSONIn */ +export class BatchComputeJSONIn extends FutureObject { /** Batch input prompts. */ get prompts() { - return new BatchGenerateJSONInPrompts(this._directive.next("prompts")); + return new BatchComputeJSONInPrompts(this._directive.next("prompts")); } /** JSON schema to guide `json_object` response. */ get json_schema() { @@ -974,20 +974,20 @@ export class BatchGenerateJSONIn extends FutureObject { get model() { return new FutureString(this._directive.next("model")); } - /** returns the result for `BatchGenerateJSONIn` once it's node has been run. */ - protected override async result(): Promise { - return super.result() as Promise; + /** returns the result for `BatchComputeJSONIn` once it's node has been run. */ + protected override async result(): Promise { + return super.result() as Promise; } } -/** BatchGenerateJSONOut */ -export class BatchGenerateJSONOut extends FutureObject { +/** BatchComputeJSONOut */ +export class BatchComputeJSONOut extends FutureObject { /** Batch outputs. */ get outputs() { - return new BatchGenerateJSONOutOutputs(this._directive.next("outputs")); + return new BatchComputeJSONOutOutputs(this._directive.next("outputs")); } - /** returns the result for `BatchGenerateJSONOut` once it's node has been run. */ - protected override async result(): Promise { - return super.result() as Promise; + /** returns the result for `BatchComputeJSONOut` once it's node has been run. */ + protected override async result(): Promise { + return super.result() as Promise; } } /** Mistral7BInstructIn */ @@ -2840,37 +2840,37 @@ export class Experimental extends Node { return super.output() as OpenAPI.components["schemas"]["ExperimentalOut"]; } } -export namespace GenerateText { +export namespace ComputeText { /** - * GenerateText Input - * https://www.substrate.run/nodes#GenerateText + * ComputeText Input + * https://www.substrate.run/nodes#ComputeText */ export type Input = FutureExpandAny< - OpenAPI.components["schemas"]["GenerateTextIn"] + OpenAPI.components["schemas"]["ComputeTextIn"] >; /** - * GenerateText Output - * https://www.substrate.run/nodes#GenerateText + * ComputeText Output + * https://www.substrate.run/nodes#ComputeText */ - export type Output = OpenAPI.components["schemas"]["GenerateTextOut"]; + export type Output = OpenAPI.components["schemas"]["ComputeTextOut"]; } /** - * Generate text using a language model. + * Compute text using a language model. * - * https://www.substrate.run/nodes#GenerateText + * https://www.substrate.run/nodes#ComputeText */ -export class GenerateText extends Node { +export class ComputeText extends Node { /** * Input arguments: `prompt`, `image_uris` (optional), `temperature` (optional), `max_tokens` (optional), `model` (optional) * * Output fields: `text` * - * https://www.substrate.run/nodes#GenerateText + * https://www.substrate.run/nodes#ComputeText */ constructor( - args: FutureExpandAny, + args: FutureExpandAny, options?: Options, ) { super(args, options); @@ -2881,14 +2881,14 @@ export class GenerateText extends Node { * * Output fields: `text` * - * https://www.substrate.run/nodes#GenerateText + * https://www.substrate.run/nodes#ComputeText */ protected override async result(): Promise< - OpenAPI.components["schemas"]["GenerateTextOut"] | undefined + OpenAPI.components["schemas"]["ComputeTextOut"] | undefined > { return Promise.resolve( this._response ? this._response.get(this) : undefined, - ) as Promise; + ) as Promise; } /** @@ -2896,47 +2896,47 @@ export class GenerateText extends Node { * * Output fields: `text` * - * https://www.substrate.run/nodes#GenerateText + * https://www.substrate.run/nodes#ComputeText */ - override get future(): GenerateTextOut { - return new GenerateTextOut(new Trace([], this)); + override get future(): ComputeTextOut { + return new ComputeTextOut(new Trace([], this)); } - protected override output(): OpenAPI.components["schemas"]["GenerateTextOut"] { - return super.output() as OpenAPI.components["schemas"]["GenerateTextOut"]; + protected override output(): OpenAPI.components["schemas"]["ComputeTextOut"] { + return super.output() as OpenAPI.components["schemas"]["ComputeTextOut"]; } } -export namespace MultiGenerateText { +export namespace MultiComputeText { /** - * MultiGenerateText Input - * https://www.substrate.run/nodes#MultiGenerateText + * MultiComputeText Input + * https://www.substrate.run/nodes#MultiComputeText */ export type Input = FutureExpandAny< - OpenAPI.components["schemas"]["MultiGenerateTextIn"] + OpenAPI.components["schemas"]["MultiComputeTextIn"] >; /** - * MultiGenerateText Output - * https://www.substrate.run/nodes#MultiGenerateText + * MultiComputeText Output + * https://www.substrate.run/nodes#MultiComputeText */ - export type Output = OpenAPI.components["schemas"]["MultiGenerateTextOut"]; + export type Output = OpenAPI.components["schemas"]["MultiComputeTextOut"]; } /** * Generate multiple text choices using a language model. * - * https://www.substrate.run/nodes#MultiGenerateText + * https://www.substrate.run/nodes#MultiComputeText */ -export class MultiGenerateText extends Node { +export class MultiComputeText extends Node { /** * Input arguments: `prompt`, `num_choices`, `temperature` (optional), `max_tokens` (optional), `model` (optional) * * Output fields: `choices` * - * https://www.substrate.run/nodes#MultiGenerateText + * https://www.substrate.run/nodes#MultiComputeText */ constructor( - args: FutureExpandAny, + args: FutureExpandAny, options?: Options, ) { super(args, options); @@ -2947,15 +2947,15 @@ export class MultiGenerateText extends Node { * * Output fields: `choices` * - * https://www.substrate.run/nodes#MultiGenerateText + * https://www.substrate.run/nodes#MultiComputeText */ protected override async result(): Promise< - OpenAPI.components["schemas"]["MultiGenerateTextOut"] | undefined + OpenAPI.components["schemas"]["MultiComputeTextOut"] | undefined > { return Promise.resolve( this._response ? this._response.get(this) : undefined, ) as Promise< - OpenAPI.components["schemas"]["MultiGenerateTextOut"] | undefined + OpenAPI.components["schemas"]["MultiComputeTextOut"] | undefined >; } @@ -2964,47 +2964,47 @@ export class MultiGenerateText extends Node { * * Output fields: `choices` * - * https://www.substrate.run/nodes#MultiGenerateText + * https://www.substrate.run/nodes#MultiComputeText */ - override get future(): MultiGenerateTextOut { - return new MultiGenerateTextOut(new Trace([], this)); + override get future(): MultiComputeTextOut { + return new MultiComputeTextOut(new Trace([], this)); } - protected override output(): OpenAPI.components["schemas"]["MultiGenerateTextOut"] { - return super.output() as OpenAPI.components["schemas"]["MultiGenerateTextOut"]; + protected override output(): OpenAPI.components["schemas"]["MultiComputeTextOut"] { + return super.output() as OpenAPI.components["schemas"]["MultiComputeTextOut"]; } } -export namespace BatchGenerateText { +export namespace BatchComputeText { /** - * BatchGenerateText Input - * https://www.substrate.run/nodes#BatchGenerateText + * BatchComputeText Input + * https://www.substrate.run/nodes#BatchComputeText */ export type Input = FutureExpandAny< - OpenAPI.components["schemas"]["BatchGenerateTextIn"] + OpenAPI.components["schemas"]["BatchComputeTextIn"] >; /** - * BatchGenerateText Output - * https://www.substrate.run/nodes#BatchGenerateText + * BatchComputeText Output + * https://www.substrate.run/nodes#BatchComputeText */ - export type Output = OpenAPI.components["schemas"]["BatchGenerateTextOut"]; + export type Output = OpenAPI.components["schemas"]["BatchComputeTextOut"]; } /** - * Generate text for multiple prompts in batch using a language model. + * Compute text for multiple prompts in batch using a language model. * - * https://www.substrate.run/nodes#BatchGenerateText + * https://www.substrate.run/nodes#BatchComputeText */ -export class BatchGenerateText extends Node { +export class BatchComputeText extends Node { /** * Input arguments: `prompts`, `temperature` (optional), `max_tokens` (optional), `model` (optional) * * Output fields: `outputs` * - * https://www.substrate.run/nodes#BatchGenerateText + * https://www.substrate.run/nodes#BatchComputeText */ constructor( - args: FutureExpandAny, + args: FutureExpandAny, options?: Options, ) { super(args, options); @@ -3015,15 +3015,15 @@ export class BatchGenerateText extends Node { * * Output fields: `outputs` * - * https://www.substrate.run/nodes#BatchGenerateText + * https://www.substrate.run/nodes#BatchComputeText */ protected override async result(): Promise< - OpenAPI.components["schemas"]["BatchGenerateTextOut"] | undefined + OpenAPI.components["schemas"]["BatchComputeTextOut"] | undefined > { return Promise.resolve( this._response ? this._response.get(this) : undefined, ) as Promise< - OpenAPI.components["schemas"]["BatchGenerateTextOut"] | undefined + OpenAPI.components["schemas"]["BatchComputeTextOut"] | undefined >; } @@ -3032,47 +3032,47 @@ export class BatchGenerateText extends Node { * * Output fields: `outputs` * - * https://www.substrate.run/nodes#BatchGenerateText + * https://www.substrate.run/nodes#BatchComputeText */ - override get future(): BatchGenerateTextOut { - return new BatchGenerateTextOut(new Trace([], this)); + override get future(): BatchComputeTextOut { + return new BatchComputeTextOut(new Trace([], this)); } - protected override output(): OpenAPI.components["schemas"]["BatchGenerateTextOut"] { - return super.output() as OpenAPI.components["schemas"]["BatchGenerateTextOut"]; + protected override output(): OpenAPI.components["schemas"]["BatchComputeTextOut"] { + return super.output() as OpenAPI.components["schemas"]["BatchComputeTextOut"]; } } -export namespace BatchGenerateJSON { +export namespace BatchComputeJSON { /** - * BatchGenerateJSON Input - * https://www.substrate.run/nodes#BatchGenerateJSON + * BatchComputeJSON Input + * https://www.substrate.run/nodes#BatchComputeJSON */ export type Input = FutureExpandAny< - OpenAPI.components["schemas"]["BatchGenerateJSONIn"] + OpenAPI.components["schemas"]["BatchComputeJSONIn"] >; /** - * BatchGenerateJSON Output - * https://www.substrate.run/nodes#BatchGenerateJSON + * BatchComputeJSON Output + * https://www.substrate.run/nodes#BatchComputeJSON */ - export type Output = OpenAPI.components["schemas"]["BatchGenerateJSONOut"]; + export type Output = OpenAPI.components["schemas"]["BatchComputeJSONOut"]; } /** - * Generate JSON for multiple prompts in batch using a language model. + * Compute JSON for multiple prompts in batch using a language model. * - * https://www.substrate.run/nodes#BatchGenerateJSON + * https://www.substrate.run/nodes#BatchComputeJSON */ -export class BatchGenerateJSON extends Node { +export class BatchComputeJSON extends Node { /** * Input arguments: `prompts`, `json_schema`, `temperature` (optional), `max_tokens` (optional), `model` (optional) * * Output fields: `outputs` * - * https://www.substrate.run/nodes#BatchGenerateJSON + * https://www.substrate.run/nodes#BatchComputeJSON */ constructor( - args: FutureExpandAny, + args: FutureExpandAny, options?: Options, ) { super(args, options); @@ -3083,15 +3083,15 @@ export class BatchGenerateJSON extends Node { * * Output fields: `outputs` * - * https://www.substrate.run/nodes#BatchGenerateJSON + * https://www.substrate.run/nodes#BatchComputeJSON */ protected override async result(): Promise< - OpenAPI.components["schemas"]["BatchGenerateJSONOut"] | undefined + OpenAPI.components["schemas"]["BatchComputeJSONOut"] | undefined > { return Promise.resolve( this._response ? this._response.get(this) : undefined, ) as Promise< - OpenAPI.components["schemas"]["BatchGenerateJSONOut"] | undefined + OpenAPI.components["schemas"]["BatchComputeJSONOut"] | undefined >; } @@ -3100,47 +3100,47 @@ export class BatchGenerateJSON extends Node { * * Output fields: `outputs` * - * https://www.substrate.run/nodes#BatchGenerateJSON + * https://www.substrate.run/nodes#BatchComputeJSON */ - override get future(): BatchGenerateJSONOut { - return new BatchGenerateJSONOut(new Trace([], this)); + override get future(): BatchComputeJSONOut { + return new BatchComputeJSONOut(new Trace([], this)); } - protected override output(): OpenAPI.components["schemas"]["BatchGenerateJSONOut"] { - return super.output() as OpenAPI.components["schemas"]["BatchGenerateJSONOut"]; + protected override output(): OpenAPI.components["schemas"]["BatchComputeJSONOut"] { + return super.output() as OpenAPI.components["schemas"]["BatchComputeJSONOut"]; } } -export namespace GenerateJSON { +export namespace ComputeJSON { /** - * GenerateJSON Input - * https://www.substrate.run/nodes#GenerateJSON + * ComputeJSON Input + * https://www.substrate.run/nodes#ComputeJSON */ export type Input = FutureExpandAny< - OpenAPI.components["schemas"]["GenerateJSONIn"] + OpenAPI.components["schemas"]["ComputeJSONIn"] >; /** - * GenerateJSON Output - * https://www.substrate.run/nodes#GenerateJSON + * ComputeJSON Output + * https://www.substrate.run/nodes#ComputeJSON */ - export type Output = OpenAPI.components["schemas"]["GenerateJSONOut"]; + export type Output = OpenAPI.components["schemas"]["ComputeJSONOut"]; } /** - * Generate JSON using a language model. + * Compute JSON using a language model. * - * https://www.substrate.run/nodes#GenerateJSON + * https://www.substrate.run/nodes#ComputeJSON */ -export class GenerateJSON extends Node { +export class ComputeJSON extends Node { /** * Input arguments: `prompt`, `json_schema`, `temperature` (optional), `max_tokens` (optional), `model` (optional) * * Output fields: `json_object` (optional), `text` (optional) * - * https://www.substrate.run/nodes#GenerateJSON + * https://www.substrate.run/nodes#ComputeJSON */ constructor( - args: FutureExpandAny, + args: FutureExpandAny, options?: Options, ) { super(args, options); @@ -3151,14 +3151,14 @@ export class GenerateJSON extends Node { * * Output fields: `json_object` (optional), `text` (optional) * - * https://www.substrate.run/nodes#GenerateJSON + * https://www.substrate.run/nodes#ComputeJSON */ protected override async result(): Promise< - OpenAPI.components["schemas"]["GenerateJSONOut"] | undefined + OpenAPI.components["schemas"]["ComputeJSONOut"] | undefined > { return Promise.resolve( this._response ? this._response.get(this) : undefined, - ) as Promise; + ) as Promise; } /** @@ -3166,47 +3166,47 @@ export class GenerateJSON extends Node { * * Output fields: `json_object` (optional), `text` (optional) * - * https://www.substrate.run/nodes#GenerateJSON + * https://www.substrate.run/nodes#ComputeJSON */ - override get future(): GenerateJSONOut { - return new GenerateJSONOut(new Trace([], this)); + override get future(): ComputeJSONOut { + return new ComputeJSONOut(new Trace([], this)); } - protected override output(): OpenAPI.components["schemas"]["GenerateJSONOut"] { - return super.output() as OpenAPI.components["schemas"]["GenerateJSONOut"]; + protected override output(): OpenAPI.components["schemas"]["ComputeJSONOut"] { + return super.output() as OpenAPI.components["schemas"]["ComputeJSONOut"]; } } -export namespace MultiGenerateJSON { +export namespace MultiComputeJSON { /** - * MultiGenerateJSON Input - * https://www.substrate.run/nodes#MultiGenerateJSON + * MultiComputeJSON Input + * https://www.substrate.run/nodes#MultiComputeJSON */ export type Input = FutureExpandAny< - OpenAPI.components["schemas"]["MultiGenerateJSONIn"] + OpenAPI.components["schemas"]["MultiComputeJSONIn"] >; /** - * MultiGenerateJSON Output - * https://www.substrate.run/nodes#MultiGenerateJSON + * MultiComputeJSON Output + * https://www.substrate.run/nodes#MultiComputeJSON */ - export type Output = OpenAPI.components["schemas"]["MultiGenerateJSONOut"]; + export type Output = OpenAPI.components["schemas"]["MultiComputeJSONOut"]; } /** - * Generate multiple JSON choices using a language model. + * Compute multiple JSON choices using a language model. * - * https://www.substrate.run/nodes#MultiGenerateJSON + * https://www.substrate.run/nodes#MultiComputeJSON */ -export class MultiGenerateJSON extends Node { +export class MultiComputeJSON extends Node { /** * Input arguments: `prompt`, `json_schema`, `num_choices`, `temperature` (optional), `max_tokens` (optional), `model` (optional) * * Output fields: `choices` * - * https://www.substrate.run/nodes#MultiGenerateJSON + * https://www.substrate.run/nodes#MultiComputeJSON */ constructor( - args: FutureExpandAny, + args: FutureExpandAny, options?: Options, ) { super(args, options); @@ -3217,15 +3217,15 @@ export class MultiGenerateJSON extends Node { * * Output fields: `choices` * - * https://www.substrate.run/nodes#MultiGenerateJSON + * https://www.substrate.run/nodes#MultiComputeJSON */ protected override async result(): Promise< - OpenAPI.components["schemas"]["MultiGenerateJSONOut"] | undefined + OpenAPI.components["schemas"]["MultiComputeJSONOut"] | undefined > { return Promise.resolve( this._response ? this._response.get(this) : undefined, ) as Promise< - OpenAPI.components["schemas"]["MultiGenerateJSONOut"] | undefined + OpenAPI.components["schemas"]["MultiComputeJSONOut"] | undefined >; } @@ -3234,14 +3234,14 @@ export class MultiGenerateJSON extends Node { * * Output fields: `choices` * - * https://www.substrate.run/nodes#MultiGenerateJSON + * https://www.substrate.run/nodes#MultiComputeJSON */ - override get future(): MultiGenerateJSONOut { - return new MultiGenerateJSONOut(new Trace([], this)); + override get future(): MultiComputeJSONOut { + return new MultiComputeJSONOut(new Trace([], this)); } - protected override output(): OpenAPI.components["schemas"]["MultiGenerateJSONOut"] { - return super.output() as OpenAPI.components["schemas"]["MultiGenerateJSONOut"]; + protected override output(): OpenAPI.components["schemas"]["MultiComputeJSONOut"] { + return super.output() as OpenAPI.components["schemas"]["MultiComputeJSONOut"]; } } export namespace Mistral7BInstruct { @@ -3261,7 +3261,7 @@ export namespace Mistral7BInstruct { } /** - * Generate text using [Mistral 7B Instruct](https://mistral.ai/news/announcing-mistral-7b). + * Compute text using [Mistral 7B Instruct](https://mistral.ai/news/announcing-mistral-7b). * * https://www.substrate.run/nodes#Mistral7BInstruct */ @@ -3329,7 +3329,7 @@ export namespace Mixtral8x7BInstruct { } /** - * Generate text using instruct-tuned [Mixtral 8x7B](https://mistral.ai/news/mixtral-of-experts/). + * Compute text using instruct-tuned [Mixtral 8x7B](https://mistral.ai/news/mixtral-of-experts/). * * https://www.substrate.run/nodes#Mixtral8x7BInstruct */ @@ -3399,7 +3399,7 @@ export namespace Llama3Instruct8B { } /** - * Generate text using instruct-tuned [Llama 3 8B](https://llama.meta.com/llama3/). + * Compute text using instruct-tuned [Llama 3 8B](https://llama.meta.com/llama3/). * * https://www.substrate.run/nodes#Llama3Instruct8B */ @@ -3467,7 +3467,7 @@ export namespace Llama3Instruct70B { } /** - * Generate text using instruct-tuned [Llama 3 70B](https://llama.meta.com/llama3/). + * Compute text using instruct-tuned [Llama 3 70B](https://llama.meta.com/llama3/). * * https://www.substrate.run/nodes#Llama3Instruct70B */ @@ -3535,7 +3535,7 @@ export namespace Firellava13B { } /** - * Generate text with image input using [FireLLaVA 13B](https://fireworks.ai/blog/firellava-the-first-commercially-permissive-oss-llava-model). + * Compute text with image input using [FireLLaVA 13B](https://fireworks.ai/blog/firellava-the-first-commercially-permissive-oss-llava-model). * * https://www.substrate.run/nodes#Firellava13B */ @@ -4289,7 +4289,7 @@ export namespace EraseImage { } /** - * Erase the masked part of an image, e.g. to 'remove' an object. + * Erase the masked part of an image, e.g. to remove an object by inpainting. * * https://www.substrate.run/nodes#EraseImage */ @@ -5413,12 +5413,12 @@ export class DeleteVectors extends Node { } export type AnyNode = | Experimental - | GenerateText - | MultiGenerateText - | BatchGenerateText - | BatchGenerateJSON - | GenerateJSON - | MultiGenerateJSON + | ComputeText + | MultiComputeText + | BatchComputeText + | BatchComputeJSON + | ComputeJSON + | MultiComputeJSON | Mistral7BInstruct | Mixtral8x7BInstruct | Llama3Instruct8B @@ -5454,18 +5454,18 @@ export type AnyNode = export type NodeOutput = T extends Experimental ? OpenAPI.components["schemas"]["ExperimentalOut"] - : T extends GenerateText - ? OpenAPI.components["schemas"]["GenerateTextOut"] - : T extends MultiGenerateText - ? OpenAPI.components["schemas"]["MultiGenerateTextOut"] - : T extends BatchGenerateText - ? OpenAPI.components["schemas"]["BatchGenerateTextOut"] - : T extends BatchGenerateJSON - ? OpenAPI.components["schemas"]["BatchGenerateJSONOut"] - : T extends GenerateJSON - ? OpenAPI.components["schemas"]["GenerateJSONOut"] - : T extends MultiGenerateJSON - ? OpenAPI.components["schemas"]["MultiGenerateJSONOut"] + : T extends ComputeText + ? OpenAPI.components["schemas"]["ComputeTextOut"] + : T extends MultiComputeText + ? OpenAPI.components["schemas"]["MultiComputeTextOut"] + : T extends BatchComputeText + ? OpenAPI.components["schemas"]["BatchComputeTextOut"] + : T extends BatchComputeJSON + ? OpenAPI.components["schemas"]["BatchComputeJSONOut"] + : T extends ComputeJSON + ? OpenAPI.components["schemas"]["ComputeJSONOut"] + : T extends MultiComputeJSON + ? OpenAPI.components["schemas"]["MultiComputeJSONOut"] : T extends Mistral7BInstruct ? OpenAPI.components["schemas"]["Mistral7BInstructOut"] : T extends Mixtral8x7BInstruct diff --git a/src/OpenAPI.ts b/src/OpenAPI.ts index 09a7dea..d227f83 100644 --- a/src/OpenAPI.ts +++ b/src/OpenAPI.ts @@ -18,80 +18,80 @@ export interface paths { */ post: operations["RunPython"]; }; - "/GenerateText": { + "/ComputeText": { /** - * GenerateText - * @description Generate text using a language model. + * ComputeText + * @description Compute text using a language model. */ - post: operations["GenerateText"]; + post: operations["ComputeText"]; }; - "/MultiGenerateText": { + "/MultiComputeText": { /** - * MultiGenerateText + * MultiComputeText * @description Generate multiple text choices using a language model. */ - post: operations["MultiGenerateText"]; + post: operations["MultiComputeText"]; }; - "/BatchGenerateText": { + "/BatchComputeText": { /** - * BatchGenerateText - * @description Generate text for multiple prompts in batch using a language model. + * BatchComputeText + * @description Compute text for multiple prompts in batch using a language model. */ - post: operations["BatchGenerateText"]; + post: operations["BatchComputeText"]; }; - "/BatchGenerateJSON": { + "/BatchComputeJSON": { /** - * BatchGenerateJSON - * @description Generate JSON for multiple prompts in batch using a language model. + * BatchComputeJSON + * @description Compute JSON for multiple prompts in batch using a language model. */ - post: operations["BatchGenerateJSON"]; + post: operations["BatchComputeJSON"]; }; - "/GenerateJSON": { + "/ComputeJSON": { /** - * GenerateJSON - * @description Generate JSON using a language model. + * ComputeJSON + * @description Compute JSON using a language model. */ - post: operations["GenerateJSON"]; + post: operations["ComputeJSON"]; }; - "/MultiGenerateJSON": { + "/MultiComputeJSON": { /** - * MultiGenerateJSON - * @description Generate multiple JSON choices using a language model. + * MultiComputeJSON + * @description Compute multiple JSON choices using a language model. */ - post: operations["MultiGenerateJSON"]; + post: operations["MultiComputeJSON"]; }; "/Mistral7BInstruct": { /** * Mistral7BInstruct - * @description Generate text using [Mistral 7B Instruct](https://mistral.ai/news/announcing-mistral-7b). + * @description Compute text using [Mistral 7B Instruct](https://mistral.ai/news/announcing-mistral-7b). */ post: operations["Mistral7BInstruct"]; }; "/Mixtral8x7BInstruct": { /** * Mixtral8x7BInstruct - * @description Generate text using instruct-tuned [Mixtral 8x7B](https://mistral.ai/news/mixtral-of-experts/). + * @description Compute text using instruct-tuned [Mixtral 8x7B](https://mistral.ai/news/mixtral-of-experts/). */ post: operations["Mixtral8x7BInstruct"]; }; "/Llama3Instruct8B": { /** * Llama3Instruct8B - * @description Generate text using instruct-tuned [Llama 3 8B](https://llama.meta.com/llama3/). + * @description Compute text using instruct-tuned [Llama 3 8B](https://llama.meta.com/llama3/). */ post: operations["Llama3Instruct8B"]; }; "/Llama3Instruct70B": { /** * Llama3Instruct70B - * @description Generate text using instruct-tuned [Llama 3 70B](https://llama.meta.com/llama3/). + * @description Compute text using instruct-tuned [Llama 3 70B](https://llama.meta.com/llama3/). */ post: operations["Llama3Instruct70B"]; }; "/Firellava13B": { /** * Firellava13B - * @description Generate text with image input using [FireLLaVA 13B](https://fireworks.ai/blog/firellava-the-first-commercially-permissive-oss-llava-model). + * @description Compute text with image input using [FireLLaVA 13B](https://fireworks.ai/blog/firellava-the-first-commercially-permissive-oss-llava-model). */ post: operations["Firellava13B"]; }; @@ -168,7 +168,7 @@ export interface paths { "/EraseImage": { /** * EraseImage - * @description Erase the masked part of an image, e.g. to 'remove' an object. + * @description Erase the masked part of an image, e.g. to remove an object by inpainting. */ post: operations["EraseImage"]; }; @@ -296,7 +296,7 @@ export interface components { * @description The type of error returned. * @enum {string} */ - type: "api_error" | "invalid_request_error"; + type: "api_error" | "invalid_request_error" | "dependency_error"; /** @description A message providing more details about the error. */ message: string; }; @@ -345,8 +345,8 @@ export interface components { /** @description Contents of stderr if your code did not run successfully. */ stderr: string; }; - /** GenerateTextIn */ - GenerateTextIn: { + /** ComputeTextIn */ + ComputeTextIn: { /** @description Input prompt. */ prompt: string; /** @description Image prompts. */ @@ -371,13 +371,13 @@ export interface components { | "Llama3Instruct70B" | "Firellava13B"; }; - /** GenerateTextOut */ - GenerateTextOut: { + /** ComputeTextOut */ + ComputeTextOut: { /** @description Text response. */ text: string; }; - /** GenerateJSONIn */ - GenerateJSONIn: { + /** ComputeJSONIn */ + ComputeJSONIn: { /** @description Input prompt. */ prompt: string; /** @description JSON schema to guide `json_object` response. */ @@ -399,8 +399,8 @@ export interface components { */ model?: "Mistral7BInstruct" | "Mixtral8x7BInstruct" | "Llama3Instruct8B"; }; - /** GenerateJSONOut */ - GenerateJSONOut: { + /** ComputeJSONOut */ + ComputeJSONOut: { /** @description JSON response. */ json_object?: { [key: string]: unknown; @@ -408,8 +408,8 @@ export interface components { /** @description If the model output could not be parsed to JSON, this is the raw text output. */ text?: string; }; - /** MultiGenerateTextIn */ - MultiGenerateTextIn: { + /** MultiComputeTextIn */ + MultiComputeTextIn: { /** @description Input prompt. */ prompt: string; /** @@ -436,16 +436,16 @@ export interface components { | "Llama3Instruct8B" | "Llama3Instruct70B"; }; - /** MultiGenerateTextOut */ - MultiGenerateTextOut: { + /** MultiComputeTextOut */ + MultiComputeTextOut: { /** @description Response choices. */ choices: { /** @description Text response. */ text: string; }[]; }; - /** BatchGenerateTextIn */ - BatchGenerateTextIn: { + /** BatchComputeTextIn */ + BatchComputeTextIn: { /** @description Batch input prompts. */ prompts: string[]; /** @@ -463,16 +463,16 @@ export interface components { */ model?: "Mistral7BInstruct" | "Llama3Instruct8B"; }; - /** BatchGenerateTextOut */ - BatchGenerateTextOut: { + /** BatchComputeTextOut */ + BatchComputeTextOut: { /** @description Batch outputs. */ outputs: { /** @description Text response. */ text: string; }[]; }; - /** MultiGenerateJSONIn */ - MultiGenerateJSONIn: { + /** MultiComputeJSONIn */ + MultiComputeJSONIn: { /** @description Input prompt. */ prompt: string; /** @description JSON schema to guide `json_object` response. */ @@ -499,8 +499,8 @@ export interface components { */ model?: "Mistral7BInstruct" | "Mixtral8x7BInstruct" | "Llama3Instruct8B"; }; - /** MultiGenerateJSONOut */ - MultiGenerateJSONOut: { + /** MultiComputeJSONOut */ + MultiComputeJSONOut: { /** @description Response choices. */ choices: { /** @description JSON response. */ @@ -511,8 +511,8 @@ export interface components { text?: string; }[]; }; - /** BatchGenerateJSONIn */ - BatchGenerateJSONIn: { + /** BatchComputeJSONIn */ + BatchComputeJSONIn: { /** @description Batch input prompts. */ prompts: string[]; /** @description JSON schema to guide `json_object` response. */ @@ -534,8 +534,8 @@ export interface components { */ model?: "Mistral7BInstruct" | "Llama3Instruct8B"; }; - /** BatchGenerateJSONOut */ - BatchGenerateJSONOut: { + /** BatchComputeJSONOut */ + BatchComputeJSONOut: { /** @description Batch outputs. */ outputs: { /** @description JSON response. */ @@ -2040,10 +2040,10 @@ export interface operations { }; }; /** - * GenerateText - * @description Generate text using a language model. + * ComputeText + * @description Compute text using a language model. */ - GenerateText: { + ComputeText: { requestBody?: { content: { /** @@ -2093,10 +2093,10 @@ export interface operations { }; }; /** - * MultiGenerateText + * MultiComputeText * @description Generate multiple text choices using a language model. */ - MultiGenerateText: { + MultiComputeText: { requestBody?: { content: { /** @@ -2151,10 +2151,10 @@ export interface operations { }; }; /** - * BatchGenerateText - * @description Generate text for multiple prompts in batch using a language model. + * BatchComputeText + * @description Compute text for multiple prompts in batch using a language model. */ - BatchGenerateText: { + BatchComputeText: { requestBody?: { content: { /** @@ -2202,10 +2202,10 @@ export interface operations { }; }; /** - * BatchGenerateJSON - * @description Generate JSON for multiple prompts in batch using a language model. + * BatchComputeJSON + * @description Compute JSON for multiple prompts in batch using a language model. */ - BatchGenerateJSON: { + BatchComputeJSON: { requestBody?: { content: { /** @@ -2274,10 +2274,10 @@ export interface operations { }; }; /** - * GenerateJSON - * @description Generate JSON using a language model. + * ComputeJSON + * @description Compute JSON using a language model. */ - GenerateJSON: { + ComputeJSON: { requestBody?: { content: { /** @@ -2344,10 +2344,10 @@ export interface operations { }; }; /** - * MultiGenerateJSON - * @description Generate multiple JSON choices using a language model. + * MultiComputeJSON + * @description Compute multiple JSON choices using a language model. */ - MultiGenerateJSON: { + MultiComputeJSON: { requestBody?: { content: { /** @@ -2424,7 +2424,7 @@ export interface operations { }; /** * Mistral7BInstruct - * @description Generate text using [Mistral 7B Instruct](https://mistral.ai/news/announcing-mistral-7b). + * @description Compute text using [Mistral 7B Instruct](https://mistral.ai/news/announcing-mistral-7b). */ Mistral7BInstruct: { requestBody?: { @@ -2506,7 +2506,7 @@ export interface operations { }; /** * Mixtral8x7BInstruct - * @description Generate text using instruct-tuned [Mixtral 8x7B](https://mistral.ai/news/mixtral-of-experts/). + * @description Compute text using instruct-tuned [Mixtral 8x7B](https://mistral.ai/news/mixtral-of-experts/). */ Mixtral8x7BInstruct: { requestBody?: { @@ -2588,7 +2588,7 @@ export interface operations { }; /** * Llama3Instruct8B - * @description Generate text using instruct-tuned [Llama 3 8B](https://llama.meta.com/llama3/). + * @description Compute text using instruct-tuned [Llama 3 8B](https://llama.meta.com/llama3/). */ Llama3Instruct8B: { requestBody?: { @@ -2670,7 +2670,7 @@ export interface operations { }; /** * Llama3Instruct70B - * @description Generate text using instruct-tuned [Llama 3 70B](https://llama.meta.com/llama3/). + * @description Compute text using instruct-tuned [Llama 3 70B](https://llama.meta.com/llama3/). */ Llama3Instruct70B: { requestBody?: { @@ -2744,7 +2744,7 @@ export interface operations { }; /** * Firellava13B - * @description Generate text with image input using [FireLLaVA 13B](https://fireworks.ai/blog/firellava-the-first-commercially-permissive-oss-llava-model). + * @description Compute text with image input using [FireLLaVA 13B](https://fireworks.ai/blog/firellava-the-first-commercially-permissive-oss-llava-model). */ Firellava13B: { requestBody?: { @@ -2955,8 +2955,8 @@ export interface operations { * "negative_prompt": "night, moon", * "num_images": 2, * "seeds": [ - * 3306990332671669000, - * 13641924104177017000 + * 330699, + * 136464 * ], * "store": "hosted" * } @@ -3022,8 +3022,8 @@ export interface operations { * "num_images": 2, * "store": "hosted", * "seeds": [ - * 16072680593433106000, - * 17203982922585030000 + * 1607280, + * 1720395 * ] * } */ @@ -3093,8 +3093,8 @@ export interface operations { * "store": "hosted", * "num_images": 2, * "seeds": [ - * 16072680593433106000, - * 17203982922585030000 + * 1607226, + * 1720395 * ] * } */ @@ -3337,7 +3337,7 @@ export interface operations { }; /** * EraseImage - * @description Erase the masked part of an image, e.g. to 'remove' an object. + * @description Erase the masked part of an image, e.g. to remove an object by inpainting. */ EraseImage: { requestBody?: { diff --git a/src/index.ts b/src/index.ts index 2812247..ec8bb27 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,18 +1,18 @@ /** * 𐃏 Substrate TypeScript SDK * @generated file - * 20240612.20240613 + * 20240617.20240620 */ export { SubstrateError } from "substrate/Error"; export { Experimental, - GenerateText, - MultiGenerateText, - BatchGenerateText, - BatchGenerateJSON, - GenerateJSON, - MultiGenerateJSON, + ComputeText, + MultiComputeText, + BatchComputeText, + BatchComputeJSON, + ComputeJSON, + MultiComputeJSON, Mistral7BInstruct, Mixtral8x7BInstruct, Llama3Instruct8B, diff --git a/src/openapi.json b/src/openapi.json index e6b3c4b..a6c953c 100644 --- a/src/openapi.json +++ b/src/openapi.json @@ -2,7 +2,7 @@ "openapi": "3.1.0", "info": { "title": "Substrate API", - "version": "2024-06-12", + "version": "2024-06-17", "contact": { "email": "support@substrate.run" }, @@ -22,7 +22,7 @@ "properties": { "type": { "type": "string", - "enum": ["api_error", "invalid_request_error"], + "enum": ["api_error", "invalid_request_error", "dependency_error"], "description": "The type of error returned." }, "message": { @@ -121,8 +121,8 @@ }, "required": ["stdout", "stderr"] }, - "GenerateTextIn": { - "title": "GenerateTextIn", + "ComputeTextIn": { + "title": "ComputeTextIn", "type": "object", "properties": { "prompt": { @@ -166,8 +166,8 @@ }, "required": ["prompt"] }, - "GenerateTextOut": { - "title": "GenerateTextOut", + "ComputeTextOut": { + "title": "ComputeTextOut", "type": "object", "properties": { "text": { @@ -177,8 +177,8 @@ }, "required": ["text"] }, - "GenerateJSONIn": { - "title": "GenerateJSONIn", + "ComputeJSONIn": { + "title": "ComputeJSONIn", "type": "object", "properties": { "prompt": { @@ -218,8 +218,8 @@ }, "required": ["prompt", "json_schema"] }, - "GenerateJSONOut": { - "title": "GenerateJSONOut", + "ComputeJSONOut": { + "title": "ComputeJSONOut", "type": "object", "properties": { "json_object": { @@ -234,8 +234,8 @@ }, "required": [] }, - "MultiGenerateTextIn": { - "title": "MultiGenerateTextIn", + "MultiComputeTextIn": { + "title": "MultiComputeTextIn", "type": "object", "properties": { "prompt": { @@ -278,22 +278,22 @@ }, "required": ["prompt", "num_choices"] }, - "MultiGenerateTextOut": { - "title": "MultiGenerateTextOut", + "MultiComputeTextOut": { + "title": "MultiComputeTextOut", "type": "object", "properties": { "choices": { "type": "array", "description": "Response choices.", "items": { - "$ref": "#/components/schemas/GenerateTextOut" + "$ref": "#/components/schemas/ComputeTextOut" } } }, "required": ["choices"] }, - "BatchGenerateTextIn": { - "title": "BatchGenerateTextIn", + "BatchComputeTextIn": { + "title": "BatchComputeTextIn", "type": "object", "properties": { "prompts": { @@ -327,22 +327,22 @@ }, "required": ["prompts"] }, - "BatchGenerateTextOut": { - "title": "BatchGenerateTextOut", + "BatchComputeTextOut": { + "title": "BatchComputeTextOut", "type": "object", "properties": { "outputs": { "type": "array", "description": "Batch outputs.", "items": { - "$ref": "#/components/schemas/GenerateTextOut" + "$ref": "#/components/schemas/ComputeTextOut" } } }, "required": ["outputs"] }, - "MultiGenerateJSONIn": { - "title": "MultiGenerateJSONIn", + "MultiComputeJSONIn": { + "title": "MultiComputeJSONIn", "type": "object", "properties": { "prompt": { @@ -390,22 +390,22 @@ }, "required": ["prompt", "num_choices", "json_schema"] }, - "MultiGenerateJSONOut": { - "title": "MultiGenerateJSONOut", + "MultiComputeJSONOut": { + "title": "MultiComputeJSONOut", "type": "object", "properties": { "choices": { "type": "array", "description": "Response choices.", "items": { - "$ref": "#/components/schemas/GenerateJSONOut" + "$ref": "#/components/schemas/ComputeJSONOut" } } }, "required": ["choices"] }, - "BatchGenerateJSONIn": { - "title": "BatchGenerateJSONIn", + "BatchComputeJSONIn": { + "title": "BatchComputeJSONIn", "type": "object", "properties": { "prompts": { @@ -444,15 +444,15 @@ }, "required": ["prompts", "json_schema"] }, - "BatchGenerateJSONOut": { - "title": "BatchGenerateJSONOut", + "BatchComputeJSONOut": { + "title": "BatchComputeJSONOut", "type": "object", "properties": { "outputs": { "type": "array", "description": "Batch outputs.", "items": { - "$ref": "#/components/schemas/GenerateJSONOut" + "$ref": "#/components/schemas/ComputeJSONOut" } } }, @@ -2728,17 +2728,17 @@ } } }, - "/GenerateText": { + "/ComputeText": { "post": { - "summary": "GenerateText", - "operationId": "GenerateText", + "summary": "ComputeText", + "operationId": "ComputeText", "tags": ["category:language"], - "description": "Generate text using a language model.", + "description": "Compute text using a language model.", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/GenerateTextIn" + "$ref": "#/components/schemas/ComputeTextIn" }, "example": { "prompt": "Who is Don Quixote?", @@ -2754,7 +2754,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/GenerateTextOut" + "$ref": "#/components/schemas/ComputeTextOut" }, "example": { "text": "Don Quixote is a fictional character in the novel of the same name by Miguel de Cervantes." @@ -2765,17 +2765,17 @@ } } }, - "/MultiGenerateText": { + "/MultiComputeText": { "post": { - "summary": "MultiGenerateText", - "operationId": "MultiGenerateText", + "summary": "MultiComputeText", + "operationId": "MultiComputeText", "tags": ["category:language"], "description": "Generate multiple text choices using a language model.", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MultiGenerateTextIn" + "$ref": "#/components/schemas/MultiComputeTextIn" }, "example": { "prompt": "Who is Don Quixote?", @@ -2791,7 +2791,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MultiGenerateTextOut" + "$ref": "#/components/schemas/MultiComputeTextOut" }, "example": { "choices": [ @@ -2809,17 +2809,17 @@ } } }, - "/BatchGenerateText": { + "/BatchComputeText": { "post": { - "summary": "BatchGenerateText", - "operationId": "BatchGenerateText", + "summary": "BatchComputeText", + "operationId": "BatchComputeText", "tags": ["category:language"], - "description": "Generate text for multiple prompts in batch using a language model.", + "description": "Compute text for multiple prompts in batch using a language model.", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BatchGenerateTextIn" + "$ref": "#/components/schemas/BatchComputeTextIn" }, "example": { "prompts": ["Who is Don Quixote?", "Who is Sancho Panza?"], @@ -2834,7 +2834,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BatchGenerateTextOut" + "$ref": "#/components/schemas/BatchComputeTextOut" }, "example": { "outputs": [ @@ -2852,17 +2852,17 @@ } } }, - "/BatchGenerateJSON": { + "/BatchComputeJSON": { "post": { - "summary": "BatchGenerateJSON", - "operationId": "BatchGenerateJSON", + "summary": "BatchComputeJSON", + "operationId": "BatchComputeJSON", "tags": ["category:language"], - "description": "Generate JSON for multiple prompts in batch using a language model.", + "description": "Compute JSON for multiple prompts in batch using a language model.", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BatchGenerateJSONIn" + "$ref": "#/components/schemas/BatchComputeJSONIn" }, "example": { "prompts": ["Who is Don Quixote?", "Who is Sancho Panza?"], @@ -2890,7 +2890,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BatchGenerateJSONOut" + "$ref": "#/components/schemas/BatchComputeJSONOut" }, "example": { "outputs": [ @@ -2908,17 +2908,17 @@ } } }, - "/GenerateJSON": { + "/ComputeJSON": { "post": { - "summary": "GenerateJSON", - "operationId": "GenerateJSON", + "summary": "ComputeJSON", + "operationId": "ComputeJSON", "tags": ["category:language"], - "description": "Generate JSON using a language model.", + "description": "Compute JSON using a language model.", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/GenerateJSONIn" + "$ref": "#/components/schemas/ComputeJSONIn" }, "example": { "prompt": "Who wrote Don Quixote?", @@ -2947,7 +2947,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/GenerateJSONOut" + "$ref": "#/components/schemas/ComputeJSONOut" }, "example": { "json_object": {} @@ -2958,17 +2958,17 @@ } } }, - "/MultiGenerateJSON": { + "/MultiComputeJSON": { "post": { - "summary": "MultiGenerateJSON", - "operationId": "MultiGenerateJSON", + "summary": "MultiComputeJSON", + "operationId": "MultiComputeJSON", "tags": ["category:language"], - "description": "Generate multiple JSON choices using a language model.", + "description": "Compute multiple JSON choices using a language model.", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MultiGenerateJSONIn" + "$ref": "#/components/schemas/MultiComputeJSONIn" }, "example": { "prompt": "Who wrote Don Quixote?", @@ -2998,7 +2998,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MultiGenerateJSONOut" + "$ref": "#/components/schemas/MultiComputeJSONOut" }, "example": { "choices": [ @@ -3021,7 +3021,7 @@ "summary": "Mistral7BInstruct", "operationId": "Mistral7BInstruct", "tags": ["category:language", "type:low-level"], - "description": "Generate text using [Mistral 7B Instruct](https://mistral.ai/news/announcing-mistral-7b).", + "description": "Compute text using [Mistral 7B Instruct](https://mistral.ai/news/announcing-mistral-7b).", "requestBody": { "content": { "application/json": { @@ -3066,7 +3066,7 @@ "summary": "Mixtral8x7BInstruct", "operationId": "Mixtral8x7BInstruct", "tags": ["category:language", "type:low-level"], - "description": "Generate text using instruct-tuned [Mixtral 8x7B](https://mistral.ai/news/mixtral-of-experts/).", + "description": "Compute text using instruct-tuned [Mixtral 8x7B](https://mistral.ai/news/mixtral-of-experts/).", "requestBody": { "content": { "application/json": { @@ -3111,7 +3111,7 @@ "summary": "Llama3Instruct8B", "operationId": "Llama3Instruct8B", "tags": ["category:language", "type:low-level"], - "description": "Generate text using instruct-tuned [Llama 3 8B](https://llama.meta.com/llama3/).", + "description": "Compute text using instruct-tuned [Llama 3 8B](https://llama.meta.com/llama3/).", "requestBody": { "content": { "application/json": { @@ -3156,7 +3156,7 @@ "summary": "Llama3Instruct70B", "operationId": "Llama3Instruct70B", "tags": ["category:language", "type:low-level"], - "description": "Generate text using instruct-tuned [Llama 3 70B](https://llama.meta.com/llama3/).", + "description": "Compute text using instruct-tuned [Llama 3 70B](https://llama.meta.com/llama3/).", "requestBody": { "content": { "application/json": { @@ -3201,7 +3201,7 @@ "summary": "Firellava13B", "operationId": "Firellava13B", "tags": ["category:language", "type:low-level"], - "description": "Generate text with image input using [FireLLaVA 13B](https://fireworks.ai/blog/firellava-the-first-commercially-permissive-oss-llava-model).", + "description": "Compute text with image input using [FireLLaVA 13B](https://fireworks.ai/blog/firellava-the-first-commercially-permissive-oss-llava-model).", "requestBody": { "content": { "application/json": { @@ -3415,7 +3415,7 @@ "prompt": "hokusai futuristic supercell spiral cloud with glowing core over turbulent ocean", "negative_prompt": "night, moon", "num_images": 2, - "seeds": [3306990332671669418, 13641924104177017164], + "seeds": [330699, 136464], "store": "hosted" } } @@ -3433,11 +3433,11 @@ "outputs": [ { "image_uri": "https://assets.substrate.run/84848484.jpg", - "seed": 3306990332671669418 + "seed": 330418 }, { "image_uri": "https://assets.substrate.run/48484848.jpg", - "seed": 13641924104177017164 + "seed": 1364164 } ] } @@ -3467,7 +3467,7 @@ "strength": 0.8, "num_images": 2, "store": "hosted", - "seeds": [16072680593433107326, 17203982922585031095] + "seeds": [1607280, 1720395] } } } @@ -3484,11 +3484,11 @@ "outputs": [ { "image_uri": "https://assets.substrate.run/84848484.jpg", - "seed": 16072680593433107326 + "seed": 1607326 }, { "image_uri": "https://assets.substrate.run/48484848.jpg", - "seed": 17203982922585031095 + "seed": 1720398 } ] } @@ -3518,7 +3518,7 @@ "strength": 1.0, "store": "hosted", "num_images": 2, - "seeds": [16072680593433107326, 17203982922585031095] + "seeds": [1607226, 1720395] } } } @@ -3535,11 +3535,11 @@ "outputs": [ { "image_uri": "https://assets.substrate.run/84848484.jpg", - "seed": 16072680593433107326 + "seed": 1607266 }, { "image_uri": "https://assets.substrate.run/48484848.jpg", - "seed": 17203982922585031095 + "seed": 1720398 } ] } @@ -3698,7 +3698,7 @@ "summary": "EraseImage", "operationId": "EraseImage", "tags": ["category:image"], - "description": "Erase the masked part of an image, e.g. to 'remove' an object.", + "description": "Erase the masked part of an image, e.g. to remove an object by inpainting.", "requestBody": { "content": { "application/json": { diff --git a/src/version.ts b/src/version.ts index c0fdd76..8f08a62 100644 --- a/src/version.ts +++ b/src/version.ts @@ -1 +1 @@ -export const VERSION = "120240612.0.0"; +export const VERSION = "120240617.0.0"; diff --git a/tests/Nodes.test.ts b/tests/Nodes.test.ts index 56ebe5f..9a7c371 100644 --- a/tests/Nodes.test.ts +++ b/tests/Nodes.test.ts @@ -1,15 +1,15 @@ import { expect, describe, test } from "vitest"; -import { GenerateText } from "substrate/Nodes"; +import { ComputeText } from "substrate/Nodes"; -describe("GenerateText", () => { +describe("ComputeText", () => { test(".node", () => { - const n = new GenerateText({ prompt: "foo" }); - expect(n.node).toEqual("GenerateText"); + const n = new ComputeText({ prompt: "foo" }); + expect(n.node).toEqual("ComputeText"); }); test(".future", () => { - const a = new GenerateText({ prompt: "foo" }); - const b = new GenerateText({ prompt: a.future.text }); - expect(b).toBeInstanceOf(GenerateText); + const a = new ComputeText({ prompt: "foo" }); + const b = new ComputeText({ prompt: a.future.text }); + expect(b).toBeInstanceOf(ComputeText); }); });