-
Notifications
You must be signed in to change notification settings - Fork 19
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
f15c528
commit 12eca60
Showing
2 changed files
with
122 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,51 @@ | ||
import { conversation2MeasurementsPrompt, conversation2measurements } from './conversation2measurements'; | ||
import { textCompletion } from './llm'; | ||
import { Measurement } from '@/types/models/Measurement'; | ||
|
||
describe('conversation2measurements', () => { | ||
it('should generate the correct prompt', () => { | ||
const statement = 'I ate an apple'; | ||
const localDateTime = '2023-06-01T10:00:00'; | ||
const previousStatements = 'I had coffee for breakfast'; | ||
|
||
const prompt = conversation2MeasurementsPrompt(statement, localDateTime, previousStatements); | ||
|
||
expect(prompt).toContain(statement); | ||
expect(prompt).toContain(localDateTime); | ||
expect(prompt).toContain(previousStatements); | ||
}); | ||
|
||
it('should convert conversation to measurements', async () => { | ||
const statement = 'I ate an apple'; | ||
const localDateTime = '2023-06-01T10:00:00'; | ||
const previousStatements = 'I had coffee for breakfast'; | ||
|
||
jest.mock('./llm', () => ({ | ||
textCompletion: jest.fn().mockResolvedValue(JSON.stringify({ | ||
measurements: [ | ||
{ | ||
variableName: 'Apple', | ||
value: 1, | ||
unitName: 'Count', | ||
startAt: localDateTime, | ||
combinationOperation: 'SUM', | ||
variableCategoryName: 'Foods' | ||
} | ||
] | ||
})) | ||
})); | ||
|
||
const measurements = await conversation2measurements(statement, localDateTime, previousStatements); | ||
|
||
expect(textCompletion).toHaveBeenCalledWith(expect.any(String), 'json_object'); | ||
expect(measurements).toHaveLength(1); | ||
expect(measurements[0]).toMatchObject({ | ||
variableName: 'Apple', | ||
value: 1, | ||
unitName: 'Count', | ||
startAt: localDateTime, | ||
combinationOperation: 'SUM', | ||
variableCategoryName: 'Foods' | ||
}); | ||
}); | ||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,71 @@ | ||
import { textCompletion } from './llm'; | ||
import OpenAI from 'openai'; | ||
|
||
describe('llm', () => { | ||
it('should return text completion', async () => { | ||
const promptText = 'What is the capital of France?'; | ||
const mockResponse = { | ||
choices: [{ message: { content: 'Paris' } }] | ||
}; | ||
|
||
jest.spyOn(OpenAI.prototype, 'chat').mockResolvedValue({ | ||
completions: { | ||
create: jest.fn().mockResolvedValue(mockResponse) | ||
} | ||
}); | ||
|
||
const completion = await textCompletion(promptText, 'text'); | ||
|
||
expect(OpenAI.prototype.chat.completions.create).toHaveBeenCalledWith({ | ||
model: 'gpt-4-turbo', | ||
stream: false, | ||
messages: [ | ||
{"role": "system", "content": "You are a helpful assistant that translates user requests into JSON objects"}, | ||
{role: "user", "content": promptText}, | ||
], | ||
response_format: { type: 'text' }, | ||
}); | ||
expect(completion).toBe('Paris'); | ||
}); | ||
|
||
it('should return JSON object completion', async () => { | ||
const promptText = 'Generate a JSON object with a "message" property'; | ||
const mockResponse = { | ||
choices: [{ message: { content: '{"message":"Hello, world!"}' } }] | ||
}; | ||
|
||
jest.spyOn(OpenAI.prototype, 'chat').mockResolvedValue({ | ||
completions: { | ||
create: jest.fn().mockResolvedValue(mockResponse) | ||
} | ||
}); | ||
|
||
const completion = await textCompletion(promptText, 'json_object'); | ||
|
||
expect(OpenAI.prototype.chat.completions.create).toHaveBeenCalledWith({ | ||
model: 'gpt-4-turbo', | ||
stream: false, | ||
messages: [ | ||
{"role": "system", "content": "You are a helpful assistant that translates user requests into JSON objects"}, | ||
{role: "user", "content": promptText}, | ||
], | ||
response_format: { type: 'json_object' }, | ||
}); | ||
expect(completion).toBe('{"message":"Hello, world!"}'); | ||
}); | ||
|
||
it('should throw an error if no content is returned', async () => { | ||
const promptText = 'What is the capital of France?'; | ||
const mockResponse = { | ||
choices: [{ message: { content: undefined } }] | ||
}; | ||
|
||
jest.spyOn(OpenAI.prototype, 'chat').mockResolvedValue({ | ||
completions: { | ||
create: jest.fn().mockResolvedValue(mockResponse) | ||
} | ||
}); | ||
|
||
await expect(textCompletion(promptText, 'text')).rejects.toThrow('No content in response'); | ||
}); | ||
}); |