Skip to content

Commit

Permalink
chore: lint files
Browse files Browse the repository at this point in the history
  • Loading branch information
ansh-saini committed Aug 15, 2024
1 parent 1bb4c06 commit aef9047
Show file tree
Hide file tree
Showing 101 changed files with 3,667 additions and 2,095 deletions.
3 changes: 1 addition & 2 deletions .vscode/extensions.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
// Tailwind CSS Intellisense
"bradlc.vscode-tailwindcss",
"esbenp.prettier-vscode",
"dbaeumer.vscode-eslint",
"aaron-bond.better-comments"
"dbaeumer.vscode-eslint"
]
}
2 changes: 1 addition & 1 deletion components.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,4 @@
"components": "@/components",
"utils": "@/lib/utils"
}
}
}
7 changes: 6 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
"prepare": "husky install"
},
"dependencies": {
"@ai-sdk/openai": "^0.0.46",
"@hookform/resolvers": "^3.9.0",
"@radix-ui/react-accordion": "^1.2.0",
"@radix-ui/react-alert-dialog": "^1.1.1",
Expand Down Expand Up @@ -50,11 +51,13 @@
"@supabase/ssr": "^0.4.0",
"@supabase/supabase-js": "^2.45.1",
"@tanstack/react-table": "^8.20.1",
"ai": "^3.3.7",
"class-variance-authority": "^0.7.0",
"clsx": "^2.1.1",
"cmdk": "1.0.0",
"date-fns": "^3.6.0",
"embla-carousel-react": "^8.1.8",
"framer-motion": "^11.3.28",
"input-otp": "^1.2.4",
"lucide-react": "^0.427.0",
"next": "^14.0.4",
Expand Down Expand Up @@ -99,7 +102,6 @@
"next-sitemap": "^2.5.28",
"postcss": "^8.4.32",
"prettier": "^2.8.8",
"prettier-plugin-tailwindcss": "^0.5.0",
"tailwindcss": "^3.3.6",
"typescript": "^4.9.5"
},
Expand All @@ -111,5 +113,8 @@
"**/*.{json,css,scss,md,webmanifest}": [
"prettier -w"
]
},
"resolutions": {
"string-width": "4.2.3"
}
}
44 changes: 44 additions & 0 deletions src/app/api/assistant-v2/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import { openai } from '@ai-sdk/openai';
import { convertToCoreMessages, streamText } from 'ai';
import { z } from 'zod';

// Allow streaming responses up to 30 seconds
export const maxDuration = 30;

export async function POST(req: Request) {
const { messages } = await req.json();

const result = await streamText({
model: openai('gpt-4-turbo'),
messages: convertToCoreMessages(messages),
tools: {
// server-side tool with execute function:
getWeatherInformation: {
description: 'show the weather in a given city to the user',
parameters: z.object({ city: z.string() }),
// eslint-disable-next-line no-empty-pattern
execute: async ({}: { city: string }) => {
const weatherOptions = ['sunny', 'cloudy', 'rainy', 'snowy', 'windy'];
return weatherOptions[
Math.floor(Math.random() * weatherOptions.length)
];
},
},
// client-side tool that starts user interaction:
askForConfirmation: {
description: 'Ask the user for confirmation.',
parameters: z.object({
message: z.string().describe('The message to ask for confirmation.'),
}),
},
// client-side tool that is automatically executed on the client:
getLocation: {
description:
'Get the user location. Always ask for confirmation before using this tool.',
parameters: z.object({}),
},
},
});

return result.toDataStreamResponse();
}
4 changes: 2 additions & 2 deletions src/app/api/assistant/nylas.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import Nylas from "nylas";
import Nylas from 'nylas';

const NylasConfig = {
apiKey: process.env.NYLAS_API_KEY,
apiUri: 'https://api.us.nylas.com',
};

export const nylas = new Nylas(NylasConfig)
export const nylas = new Nylas(NylasConfig);
3 changes: 1 addition & 2 deletions src/app/api/assistant/openai.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import OpenAI from "openai";
import OpenAI from 'openai';

export const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
});

112 changes: 55 additions & 57 deletions src/app/api/assistant/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,10 @@ import logger from '@/lib/logger';
import { nylas } from '@/app/api/assistant/nylas';
import { openai } from '@/app/api/assistant/openai';


const companyWorkingHours = '9am to 5pm';
const samplePrompts = [
"What is the best time to schedule a meeting with Angela?",
]
'What is the best time to schedule a meeting with Angela?',
];

export async function GET() {
// const twiml = new MessagingResponse();
Expand All @@ -28,20 +27,19 @@ export async function GET() {
return NextResponse.json({ hello: 'World!' });
}



const getPersonName = async (msg: string) => {
const response = await openai.chat.completions.create({
model: "gpt-3.5-turbo",
model: 'gpt-3.5-turbo',
messages: [
{
"role": "system",
"content": "You are a string parser. You will be given a string that contains a person's name. You have to extract the name from the string. Only return the name and nothing else."
role: 'system',
content:
"You are a string parser. You will be given a string that contains a person's name. You have to extract the name from the string. Only return the name and nothing else.",
},
{
"role": "user",
"content": msg
}
role: 'user',
content: msg,
},
],
temperature: 0.7,
max_tokens: 64,
Expand All @@ -51,34 +49,35 @@ const getPersonName = async (msg: string) => {
const name = response.choices[0].message.content;
if (!name) {
logger('Response: ', JSON.stringify(response));
throw new Error("Name not found");
throw new Error('Name not found');
}
logger(`Got user ${name}`)
logger(`Got user ${name}`);
return name;
}
};

const getGrant = async (name: string) => {
const grants = await nylas.grants.list()
const grant = grants.data.find(grant => grant.email?.includes(name.toLowerCase()))

const grants = await nylas.grants.list();
const grant = grants.data.find((grant) =>
grant.email?.includes(name.toLowerCase())
);

if (!grant) {
throw new Error("Grant not found")
throw new Error('Grant not found');
}
logger(grant, "Got Grant")
return grant
}
logger(grant, 'Got Grant');
return grant;
};

const getUserCalendar = async (grant: Grant) => {
const now = Math.floor(Date.now() / 1000)
const now = Math.floor(Date.now() / 1000);

const calendars = await nylas.calendars.list({
identifier: grant.id
})
const primaryCalendar = calendars.data.find(calendar => calendar.isPrimary)
identifier: grant.id,
});
const primaryCalendar = calendars.data.find((calendar) => calendar.isPrimary);

if (!primaryCalendar) {
throw new Error("This person does not have a calendar")
throw new Error('This person does not have a calendar');
}

const events = await nylas.events.list({
Expand All @@ -88,66 +87,65 @@ const getUserCalendar = async (grant: Grant) => {
start: now.toString(),
// Fetching events for the next 7 days
end: (now + 3600 * 24 * 7).toString(),
}
})
},
});

return events.data
}
return events.data;
};

const getThreeAvailableTimeSlots = async (events: Event[]) => {
const bookedSlots = events.map(event => {
const { when } = event
const bookedSlots = events.map((event) => {
const { when } = event;

switch (when.object) {
case WhenType.Date:
return {
start: new Date(when.date),
end: null,
}
};
case WhenType.Datespan:
return {
start: new Date(when.startDate),
end: new Date(when.endDate),
}
};
case WhenType.Time:
return {
start: new Date(when.time * 1000),
end: null,
}
};
case WhenType.Timespan:
return {
start: new Date(when.startTime * 1000),
end: new Date(when.endTime * 1000),
}
};
}
})

});

const response = await openai.chat.completions.create({
model: "gpt-3.5-turbo",
model: 'gpt-3.5-turbo',
messages: [
{
"role": "system",
"content": `You are an availability checker. You will be given a array of events in a JSON format containing start and end times in ISO format. Those are the already booked slots.
role: 'system',
content: `You are an availability checker. You will be given a array of events in a JSON format containing start and end times in ISO format. Those are the already booked slots.
You have to find at most three suitable time slots for a meeting. The slots must be within the company's working hours.
Prefer the slots that are closer to the current time. If there are no available slots, return "No available slots".
`
`,
},
{
"role": "system",
"content": `Company working hours: ${companyWorkingHours}
role: 'system',
content: `Company working hours: ${companyWorkingHours}
Company working days: Monday to Friday
Try to Avoid meetings during lunch hours (1pm to 2pm)`
Try to Avoid meetings during lunch hours (1pm to 2pm)`,
},
{
"role": "system",
"content": `Format the output in a human read-able way.
Output format examples: ["10:00AM to 11:00AM", "2:00PM to 3:00PM", "4:00PM to 5:00PM"]`
role: 'system',
content: `Format the output in a human read-able way.
Output format examples: ["10:00AM to 11:00AM", "2:00PM to 3:00PM", "4:00PM to 5:00PM"]`,
},
{
"role": "user",
"content": JSON.stringify(bookedSlots)
}
role: 'user',
content: JSON.stringify(bookedSlots),
},
],
temperature: 0.7,
max_tokens: 64,
Expand All @@ -157,11 +155,11 @@ Output format examples: ["10:00AM to 11:00AM", "2:00PM to 3:00PM", "4:00PM to 5:
const slots = response.choices[0].message.content;
if (!slots) {
logger(JSON.stringify(response), 'Slots response');
throw new Error("Slots not available");
throw new Error('Slots not available');
}
logger(`Got user ${slots}`)
logger(`Got user ${slots}`);
return slots;
}
};

const processMessage = async (msg: string) => {
// const applicationDetails = await nylas.applications.getDetails()
Expand All @@ -170,11 +168,11 @@ const processMessage = async (msg: string) => {
const name = await getPersonName(msg);
// const name = 'Reena'

const user = await getGrant(name)
const user = await getGrant(name);

const events = await getUserCalendar(user);

const availableSlots = getThreeAvailableTimeSlots(events);

logger(availableSlots, "Available slots")
}
logger(availableSlots, 'Available slots');
};
24 changes: 24 additions & 0 deletions src/app/assistant/actions.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
'use server';

import { openai } from '@ai-sdk/openai';
import { streamText } from 'ai';
import { createStreamableValue } from 'ai/rsc';

export async function generate(input: string) {
const stream = createStreamableValue('');

(async () => {
const { textStream } = await streamText({
model: openai('gpt-3.5-turbo'),
prompt: input,
});

for await (const delta of textStream) {
stream.update(delta);
}

stream.done();
})();

return { output: stream.value };
}
Loading

0 comments on commit aef9047

Please sign in to comment.