diff --git a/packages/app/bundles/apiContext.ts b/packages/app/bundles/apiContext.ts
index be53821f1..43218469b 100644
--- a/packages/app/bundles/apiContext.ts
+++ b/packages/app/bundles/apiContext.ts
@@ -10,6 +10,7 @@ import os from 'protolib/bundles/os/context'
import os2 from 'protolib/bundles/os/context2'
import utils from 'protolib/bundles/utils/context'
import keys from 'protolib/bundles/keys/context'
+import chatGPT from 'protolib/bundles/chatgpt/context'
export default {
onEvent,
emitEvent,
@@ -25,6 +26,7 @@ export default {
sendMailWithResend,
executeAutomation,
keys,
+ chatGPT,
flow,
flow2,
object,
diff --git a/packages/app/bundles/masks.ts b/packages/app/bundles/masks.ts
index 63023993d..10925d274 100644
--- a/packages/app/bundles/masks.ts
+++ b/packages/app/bundles/masks.ts
@@ -17,6 +17,7 @@ import osMasks from 'protolib/bundles/os/masks'
import osMasks2 from 'protolib/bundles/os/masks2'
import utilsMasks from 'protolib/bundles/utils/masks'
import keyMasks from 'protolib/bundles/keys/masks'
+import chatGPTMasks from 'protolib/bundles/chatgpt/masks'
const paths = {
devices: [
@@ -64,7 +65,8 @@ export const getFlowsCustomComponents = (path: string, queryParams: {}) => {
...osMasks,
...osMasks2,
...keyMasks,
- ...utilsMasks
+ ...utilsMasks,
+ ...chatGPTMasks
]
return []
}
diff --git a/packages/protolib/bundles/chatgpt/context/index.ts b/packages/protolib/bundles/chatgpt/context/index.ts
new file mode 100644
index 000000000..8f0a76086
--- /dev/null
+++ b/packages/protolib/bundles/chatgpt/context/index.ts
@@ -0,0 +1,105 @@
+
+const chatGPTSession = async ({
+ apiKey = process.env.OPENAI_API_KEY,
+ done = (message) => { },
+ error = (error) => { },
+ ...props
+}: ChatGPTRequest) => {
+ const body: GPT4VCompletionRequest = {
+ model: "gpt-4-1106-preview",
+ max_tokens: 4096,
+ ...props
+ }
+
+ if (!apiKey) {
+ error("No api Key provided")
+ return
+ }
+
+ try {
+ const response = await fetch("https://api.openai.com/v1/chat/completions", {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: "Bearer " + apiKey,
+ },
+ body: JSON.stringify(body),
+ });
+ const json = await response.json();
+ if (done) done(json)
+ return json
+ } catch (e) {
+ if (error) error(e)
+ return
+ }
+}
+
+const chatGPTPrompt = async ({
+ message,
+ ...props
+
+}: ChatGPTRequest & { message: string }) => {
+ let response = await chatGPTSession({
+ messages: [
+ {
+ role: "user",
+ content: message
+ }
+ ],
+ ...props,
+ done: (response) => {
+ let message = ""
+ if(response.choices && response.choices.length){
+ message = response.choices[0].message.content
+ }
+ if(props.done) props.done(message)
+ }
+ })
+
+ if(!response.choices || !response.choices.length){ return "" }
+
+ return response.choices[0].message.content
+}
+
+type ChatGPTRequest = {
+ apiKey?: string;
+ done?: (message: any) => any;
+ error?: (error: any) => any;
+} & GPT4VCompletionRequest
+
+type GPT4VCompletionRequest = {
+ model: "gpt-4-vision-preview" | "gpt-4-1106-preview" | "gpt-4" | "gpt-4-32k" | "gpt-4-0613" | "gpt-4-32k-0613" | "gpt-4-0314" | "gpt-4-32k-0314"; // https://platform.openai.com/docs/models/overview
+ messages: Message[];
+ functions?: any[] | undefined;
+ function_call?: any | undefined;
+ stream?: boolean | undefined;
+ temperature?: number | undefined;
+ top_p?: number | undefined;
+ max_tokens?: number | undefined;
+ n?: number | undefined;
+ best_of?: number | undefined;
+ frequency_penalty?: number | undefined;
+ presence_penalty?: number | undefined;
+ logit_bias?:
+ | {
+ [x: string]: number;
+ }
+ | undefined;
+ stop?: (string[] | string) | undefined;
+};
+
+type Message = {
+ role: "system" | "user" | "assistant" | "function";
+ content: MessageContent;
+ name?: string | undefined;
+}
+
+type MessageContent =
+ | string // String prompt
+ | (string | { type: "image_url"; image_url: string })[]; // Image asset
+
+
+export default {
+ chatGPTSession,
+ chatGPTPrompt
+}
\ No newline at end of file
diff --git a/packages/protolib/bundles/chatgpt/masks/ChatGPTPrompt.tsx b/packages/protolib/bundles/chatgpt/masks/ChatGPTPrompt.tsx
new file mode 100644
index 000000000..95976bbf8
--- /dev/null
+++ b/packages/protolib/bundles/chatgpt/masks/ChatGPTPrompt.tsx
@@ -0,0 +1,75 @@
+import { Node, NodeOutput, FallbackPort, NodeParams, filterConnection, getId, connectNodes, filterObject, restoreObject } from 'protoflow';
+import { useColorFromPalette } from 'protoflow/src/diagram/Theme'
+import { MessageCircle } from 'lucide-react'
+
+const ChatGPTPrompt = ({ node = {}, nodeData = {}, children }: any) => {
+ const color = useColorFromPalette(11)
+ return (
+
+
+
+
+
+
+
+
+
+ )
+}
+
+export default {
+ id: 'chatGPT.chatGPTPrompt',
+ type: 'CallExpression',
+ category: "System",
+ keywords: ["prompt", "chat", "gpt", "chatgpt", "openai", "ai", "bot"],
+ check: (node, nodeData) => {
+ return node.type == "CallExpression" && nodeData.to?.startsWith('context.chatGPT.chatGPTPrompt')
+ },
+ getComponent: (node, nodeData, children) => ,
+ filterChildren: filterObject({
+ keys: {
+ message: 'input',
+ apiKey: 'input',
+ model: 'input',
+ max_tokens: 'input',
+ done: 'output',
+ error: 'output'
+ }
+ }),
+ restoreChildren: restoreObject({
+ keys: {
+ message: 'input',
+ apiKey: 'input',
+ model: 'input',
+ max_tokens: 'input',
+ done: { params: { 'param-done': { key: "message" } } },
+ error: { params: { 'param-error': { key: "err" } } }
+ }
+ }),
+ getInitialData: () => {
+ return {
+ await: true,
+ to: 'context.chatGPT.chatGPTPrompt',
+ "param-1": {
+ value: "{}",
+ kind: "Identifier"
+ },
+ "mask-apikey": {
+ value: "",
+ kind: "StringLiteral"
+ },
+ "mask-model": {
+ value: "gpt-4-1106-preview",
+ kind: "StringLiteral"
+ },
+ "mask-message": {
+ value: "",
+ kind: "StringLiteral"
+ },
+ "mask-max_tokens": {
+ value: "4096",
+ kind: "NumericLiteral"
+ }
+ }
+ }
+}
diff --git a/packages/protolib/bundles/chatgpt/masks/index.ts b/packages/protolib/bundles/chatgpt/masks/index.ts
new file mode 100644
index 000000000..84c2e73b0
--- /dev/null
+++ b/packages/protolib/bundles/chatgpt/masks/index.ts
@@ -0,0 +1,5 @@
+import ChatGPTPrompt from './ChatGPTPrompt'
+
+export default [
+ ChatGPTPrompt
+]
\ No newline at end of file