Skip to content

Commit

Permalink
stop words workaround for now
Browse files Browse the repository at this point in the history
  • Loading branch information
ianmacartney committed Aug 16, 2023
1 parent d06b397 commit 04e7d67
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 3 deletions.
23 changes: 21 additions & 2 deletions convex/conversation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ export async function startConversation(
];
const stop = stopWords(newFriendsNames);
const { content } = await chatCompletion({ messages: prompt, max_tokens: 300, stop });
return { content, memoryIds: memories.map((m) => m.memory._id) };
return { content: trimContent(content, stop), memoryIds: memories.map((m) => m.memory._id) };
}

function messageContent(m: Message): string {
Expand All @@ -54,10 +54,29 @@ function messageContent(m: Message): string {
}
}

// These are the words we ask the LLM to stop on. OpenAI only supports 4.
function stopWords(names: string[]): string[] {
return names.flatMap((name) => [name + ':', name.toLowerCase() + ':']);
}

// As a stopgap since the stop sequences don't always work, we trim the output
// based on the first stop word we find, lowercased.
function trimContent(content: string, stopWords: string[]) {
let foundWordAtIndex = -1;
const contentLower = content.toLowerCase();
stopWords.forEach((word) => {
const idx = contentLower.indexOf(word.toLowerCase());
if (idx > -1 && (foundWordAtIndex === -1 || idx < foundWordAtIndex)) {
foundWordAtIndex = idx;
console.debug('found stop word, trimming content', word, idx);
}
});
if (foundWordAtIndex > -1) {
return content.slice(0, foundWordAtIndex);
}
return content;
}

export function chatHistoryFromMessages(messages: Message[]): LLMMessage[] {
return (
messages
Expand Down Expand Up @@ -163,7 +182,7 @@ export async function converse(
const stop = stopWords(nearbyPlayers.map((p) => p.name));
const { content } = await chatCompletion({ messages: prompt, max_tokens: 300, stop });
// console.debug('converse result through chatgpt: ', content);
return { content, memoryIds: memories.map((m) => m.memory._id) };
return { content: trimContent(content, stop), memoryIds: memories.map((m) => m.memory._id) };
}

export async function walkAway(messages: LLMMessage[], player: Player): Promise<boolean> {
Expand Down
3 changes: 2 additions & 1 deletion convex/testing.ts
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,8 @@ export const listMessages = internalQuery({
.withIndex('by_playerId_type', (q) =>
q.eq('playerId', playerId as any).eq('data.type', 'talking'),
)
.collect() as Promise<EntryOfType<'talking'>[]>,
.order('desc')
.take(10) as Promise<EntryOfType<'talking'>[]>,
);
return (
await asyncMap(
Expand Down

0 comments on commit 04e7d67

Please sign in to comment.