Skip to content

Commit

Permalink
refactor(graph): Remove unused artifact generation stream logic
Browse files Browse the repository at this point in the history
Clean up server actions by removing the generateArtifactStream function and its
associated dependencies. Simplify the codebase by focusing on core graph
functionality.

- Remove OpenAI and Langfuse integrations
- Remove metrics tracking code
- Keep only essential graph-related server actions
  • Loading branch information
toyamarinyon committed Oct 30, 2024
1 parent 3dfc1e3 commit 8bf2c81
Showing 1 changed file with 1 addition and 73 deletions.
74 changes: 1 addition & 73 deletions app/(playground)/p/[agentId]/beta-proto/graph/server-actions.ts
Original file line number Diff line number Diff line change
@@ -1,88 +1,16 @@
"use server";

import { openai } from "@ai-sdk/openai";
import { streamObject } from "ai";
import { createStreamableValue } from "ai/rsc";
import { UnstructuredClient } from "unstructured-client";

import { getUserSubscriptionId, isRoute06User } from "@/app/(auth)/lib";
import { agents, db } from "@/drizzle";
import { metrics } from "@opentelemetry/api";
import { createId } from "@paralleldrive/cuid2";
import { put } from "@vercel/blob";
import { eq } from "drizzle-orm";
import { Langfuse } from "langfuse";
import { UnstructuredClient } from "unstructured-client";
import { Strategy } from "unstructured-client/sdk/models/shared";
import { schema as artifactSchema } from "../artifact/schema";
import type { FileId } from "../files/types";
import type { SourceIndex } from "../source/types";
import { sourceIndexesToSources } from "../source/utils";
import type { AgentId } from "../types";
import { elementsToMarkdown } from "../utils/unstructured";
import type { Graph } from "./types";

type GenerateArtifactStreamParams = {
agentId: AgentId;
userPrompt: string;
systemPrompt?: string;
sourceIndexes: SourceIndex[];
};
export async function generateArtifactStream(
params: GenerateArtifactStreamParams,
) {
const lf = new Langfuse();
const trace = lf.trace({
id: `giselle-${Date.now()}`,
});
const sources = sourceIndexesToSources({
input: {
agentId: params.agentId,
sourceIndexes: params.sourceIndexes,
},
});
const stream = createStreamableValue();

(async () => {
const model = "gpt-4o";
const generation = trace.generation({
input: params.userPrompt,
model,
});
const { partialObjectStream, object } = await streamObject({
model: openai(model),
system: params.systemPrompt ?? "You generate an answer to a question. ",
prompt: params.userPrompt,
schema: artifactSchema,
onFinish: async (result) => {
const meter = metrics.getMeter("OpenAI");
const tokenCounter = meter.createCounter("token_consumed", {
description: "Number of OpenAI API tokens consumed by each request",
});
const subscriptionId = await getUserSubscriptionId();
const isR06User = await isRoute06User();
tokenCounter.add(result.usage.totalTokens, {
subscriptionId,
isR06User,
});
generation.end({
output: result,
});
await lf.shutdownAsync();
},
});

for await (const partialObject of partialObjectStream) {
stream.update(partialObject);
}

const result = await object;

stream.done();
})();

return { object: stream.value };
}

export async function setGraphToDb(agentId: AgentId, graph: Graph) {
await db
.update(agents)
Expand Down

0 comments on commit 8bf2c81

Please sign in to comment.