Skip to content

Commit

Permalink
Merge pull request #57 from a16z-infra/reflection-update
Browse files Browse the repository at this point in the history
adding reflection
  • Loading branch information
ykhli authored Aug 13, 2023
2 parents 367c745 + 385dd1b commit 54834ac
Show file tree
Hide file tree
Showing 4 changed files with 121 additions and 10 deletions.
3 changes: 2 additions & 1 deletion convex/agent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,8 @@ async function handleAgentSolo(ctx: ActionCtx, player: Player, memory: MemoryDB,
// console.debug('handleAgentSolo: ', player.name, player.id);
// Handle new observations: it can look at the agent's lastWakeTs for a delta.
// Calculate scores
// If there's enough observation score, trigger reflection?
// Run reflection on memories once in a while
await memory.reflectOnMemories(player.id, player.name);
// Future: Store observations about seeing players in conversation
// might include new observations -> add to memory with openai embeddings
// Later: handle object ownership?
Expand Down
17 changes: 14 additions & 3 deletions convex/conversation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -107,18 +107,29 @@ export async function converse(
const { embedding } = await fetchEmbedding(lastMessage ? lastMessage : '');
const memories = await memory.accessMemories(player.id, embedding);
const conversationMemories = filterMemoriesType(['conversation'], memories);
const reflectionMemories = filterMemoriesType(['reflection'], memories);
const lastConversationTs = conversationMemories[0]?.memory._creationTime;

const stop = nearbyPlayers.join(':');
const relevantReflections: string =
reflectionMemories.length > 0
? reflectionMemories
.slice(0, 2)
.map((r) => r.memory.description)
.join('\n')
: '';
const relevantMemories: string = conversationMemories
.slice(0, 2) // only use the first 2 memories
.map((r) => r.memory.description)
.join('\n');

// console.debug('relevantMemories: ', relevantMemories);
let prefixPrompt = `Your name is ${player.name}. About you: ${player.identity}.\n`;
if (relevantReflections.length > 0) {
prefixPrompt += relevantReflections;
console.log('relevantReflections', relevantReflections);
}

let prefixPrompt = `Your name is ${player.name}. About you: ${player.identity}.
You are talking to ${nearbyPlayersNames}, below are something about them: `;
prefixPrompt += `\nYou are talking to ${nearbyPlayersNames}, below are something about them: `;

nearbyPlayers.forEach((p) => {
prefixPrompt += `\nAbout ${p.name}: ${p.identity}\n`;
Expand Down
103 changes: 101 additions & 2 deletions convex/lib/memory.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import {
internalQuery,
} from '../_generated/server.js';
import { asyncMap } from './utils.js';
import { EntryOfType, Memories, MemoryOfType, MemoryType } from '../schema.js';
import { EntryOfType, Memories, Memory, MemoryOfType, MemoryType } from '../schema.js';
import { chatCompletion } from './openai.js';
import { clientMessageMapper } from '../chat.js';
import { pineconeAvailable, queryVectors, upsertVectors } from './pinecone.js';
Expand Down Expand Up @@ -40,6 +40,7 @@ export interface MemoryDB {
playerIdentity: string,
conversationId: Id<'conversations'>,
): Promise<boolean>;
reflectOnMemories(playerId: Id<'players'>, name: string): Promise<void>;
}

export function MemoryDB(ctx: ActionCtx): MemoryDB {
Expand Down Expand Up @@ -158,6 +159,66 @@ export function MemoryDB(ctx: ActionCtx): MemoryDB {
]);
return true;
},
async reflectOnMemories(playerId: Id<'players'>, name: string) {
const { memories, lastReflectionTs } = await ctx.runQuery(
internal.lib.memory.getReflectionMemories,
{
playerId,
numberOfItems: 100,
},
);

// should only reflect if lastest 100 items have importance score of >500
const sumOfImportanceScore = memories
.filter((m) => m._creationTime > (lastReflectionTs ?? 0))
.reduce((acc, curr) => acc + curr.importance, 0);
console.debug('sum of importance score = ', sumOfImportanceScore);
const shouldReflect = sumOfImportanceScore > 500;

if (shouldReflect) {
console.debug('Reflecting...');
let prompt = `[no prose]\n [Output only JSON] \nYou are ${name}, statements about you:\n`;
memories.forEach((m, idx) => {
prompt += `Statement ${idx}: ${m.description}\n`;
});
prompt += `What 3 high-level insights can you infer from the above statements?
Return in JSON format, where the key is a list of input statements that contributed to your insights and value is your insight. Make the response parseable by Typescript JSON.parse() function. DO NOT escape characters or include '\n' or white space in response.
Example: [{insight: "...", statementIds: [1,2]}, {insight: "...", statementIds: [1]}, ...]`;

const { content: reflection } = await chatCompletion({
messages: [
{
role: 'user',
content: prompt,
},
],
});

try {
const insights: { insight: string; statementIds: number[] }[] = JSON.parse(reflection);
let memoriesToSave: MemoryOfType<'reflection'>[] = [];
insights.forEach((item) => {
const relatedMemoryIds = item.statementIds.map((idx: number) => memories[idx]._id);
const reflectionMemory = {
playerId,
description: item.insight,
data: {
type: 'reflection',
relatedMemoryIds,
},
} as MemoryOfType<'reflection'>;
memoriesToSave.push(reflectionMemory);
});
console.debug('adding reflection memory...', memoriesToSave);

await this.addMemories(memoriesToSave);
} catch (e) {
console.error('error saving or parseing reflection', e);
console.debug('reflection', reflection);
return;
}
}
},
};
}

Expand All @@ -172,7 +233,7 @@ export const filterMemoriesType = <T extends MemoryType>(

export const getMemories = internalQuery({
args: { playerId: v.id('players'), embeddingIds: v.array(v.id('embeddings')) },
handler: async (ctx, args) => {
handler: async (ctx, args): Promise<Memory[]> => {
return await asyncMap(args.embeddingIds, (id) =>
getMemoryByEmbeddingId(ctx.db, args.playerId, id),
);
Expand Down Expand Up @@ -283,6 +344,44 @@ async function getMemoryByEmbeddingId(
return doc;
}

export const getReflectionMemories = internalQuery({
args: { playerId: v.id('players'), numberOfItems: v.number() },
handler: async (ctx, { playerId, numberOfItems }) => {
const conversations = await ctx.db
.query('memories')
.withIndex('by_playerId_type', (q) =>
//TODO - we should get memories of other types once we can
// Probably with an index just on playerId, so we can sort by time
q.eq('playerId', playerId).eq('data.type', 'conversation'),
)
.order('desc')
.take(numberOfItems);
console.debug('conversation memories lenth', conversations.length);
const reflections = await ctx.db
.query('memories')
.withIndex('by_playerId_type', (q) =>
q.eq('playerId', playerId).eq('data.type', 'reflection'),
)
.order('desc')
.take(numberOfItems);

const lastReflection = await ctx.db
.query('memories')
.withIndex('by_playerId_type', (q) =>
q.eq('playerId', playerId).eq('data.type', 'reflection'),
)
.order('desc')
.first();
const mergedList = reflections.concat(conversations);
mergedList.sort((a, b) => b._creationTime - a._creationTime);

return {
memories: mergedList.slice(0, numberOfItems),
lastReflectionTs: lastReflection?._creationTime,
};
},
});

export const getRecentMessages = internalQuery({
args: {
playerId: v.id('players'),
Expand Down
8 changes: 4 additions & 4 deletions convex/schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -160,10 +160,10 @@ export const Memories = Table('memories', {

// Exercises left to the reader:

// v.object({
// type: v.literal('reflection'),
// relatedMemoryIds: v.array(v.id('memories')),
// }),
v.object({
type: v.literal('reflection'),
relatedMemoryIds: v.array(v.id('memories')),
}),
// v.object({
// type: v.literal('observation'),
// object: v.string(),
Expand Down

0 comments on commit 54834ac

Please sign in to comment.