Skip to content

Commit

Permalink
feat: Updated 2 files
Browse files Browse the repository at this point in the history
  • Loading branch information
sweep-ai[bot] authored Apr 29, 2024
1 parent f15c528 commit 12eca60
Show file tree
Hide file tree
Showing 2 changed files with 122 additions and 0 deletions.
51 changes: 51 additions & 0 deletions apps/nextjs/lib/conversation2measurements.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
import { conversation2MeasurementsPrompt, conversation2measurements } from './conversation2measurements';
import { textCompletion } from './llm';
import { Measurement } from '@/types/models/Measurement';

describe('conversation2measurements', () => {
it('should generate the correct prompt', () => {
const statement = 'I ate an apple';
const localDateTime = '2023-06-01T10:00:00';
const previousStatements = 'I had coffee for breakfast';

const prompt = conversation2MeasurementsPrompt(statement, localDateTime, previousStatements);

expect(prompt).toContain(statement);
expect(prompt).toContain(localDateTime);
expect(prompt).toContain(previousStatements);
});

it('should convert conversation to measurements', async () => {
const statement = 'I ate an apple';
const localDateTime = '2023-06-01T10:00:00';
const previousStatements = 'I had coffee for breakfast';

jest.mock('./llm', () => ({
textCompletion: jest.fn().mockResolvedValue(JSON.stringify({
measurements: [
{
variableName: 'Apple',
value: 1,
unitName: 'Count',
startAt: localDateTime,
combinationOperation: 'SUM',
variableCategoryName: 'Foods'
}
]
}))
}));

const measurements = await conversation2measurements(statement, localDateTime, previousStatements);

expect(textCompletion).toHaveBeenCalledWith(expect.any(String), 'json_object');
expect(measurements).toHaveLength(1);
expect(measurements[0]).toMatchObject({
variableName: 'Apple',
value: 1,
unitName: 'Count',
startAt: localDateTime,
combinationOperation: 'SUM',
variableCategoryName: 'Foods'
});
});
});
71 changes: 71 additions & 0 deletions apps/nextjs/lib/llm.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import { textCompletion } from './llm';
import OpenAI from 'openai';

describe('llm', () => {
it('should return text completion', async () => {
const promptText = 'What is the capital of France?';
const mockResponse = {
choices: [{ message: { content: 'Paris' } }]
};

jest.spyOn(OpenAI.prototype, 'chat').mockResolvedValue({
completions: {
create: jest.fn().mockResolvedValue(mockResponse)
}
});

const completion = await textCompletion(promptText, 'text');

expect(OpenAI.prototype.chat.completions.create).toHaveBeenCalledWith({
model: 'gpt-4-turbo',
stream: false,
messages: [
{"role": "system", "content": "You are a helpful assistant that translates user requests into JSON objects"},
{role: "user", "content": promptText},
],
response_format: { type: 'text' },
});
expect(completion).toBe('Paris');
});

it('should return JSON object completion', async () => {
const promptText = 'Generate a JSON object with a "message" property';
const mockResponse = {
choices: [{ message: { content: '{"message":"Hello, world!"}' } }]
};

jest.spyOn(OpenAI.prototype, 'chat').mockResolvedValue({
completions: {
create: jest.fn().mockResolvedValue(mockResponse)
}
});

const completion = await textCompletion(promptText, 'json_object');

expect(OpenAI.prototype.chat.completions.create).toHaveBeenCalledWith({
model: 'gpt-4-turbo',
stream: false,
messages: [
{"role": "system", "content": "You are a helpful assistant that translates user requests into JSON objects"},
{role: "user", "content": promptText},
],
response_format: { type: 'json_object' },
});
expect(completion).toBe('{"message":"Hello, world!"}');
});

it('should throw an error if no content is returned', async () => {
const promptText = 'What is the capital of France?';
const mockResponse = {
choices: [{ message: { content: undefined } }]
};

jest.spyOn(OpenAI.prototype, 'chat').mockResolvedValue({
completions: {
create: jest.fn().mockResolvedValue(mockResponse)
}
});

await expect(textCompletion(promptText, 'text')).rejects.toThrow('No content in response');
});
});

0 comments on commit 12eca60

Please sign in to comment.