Skip to content

Commit

Permalink
avoid error when sending the second message:TypeError: chatMessage._g…
Browse files Browse the repository at this point in the history
…etType is not a function
  • Loading branch information
david ullua committed Jun 16, 2023
1 parent 397cf89 commit acff0d6
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 6 deletions.
19 changes: 16 additions & 3 deletions pages/api/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,27 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { Chroma } from 'langchain/vectorstores/chroma';
import { makeChain } from '@/utils/makechain';
import {BaseChatMessage, HumanChatMessage, AIChatMessage} from 'langchain/schema';

export default async function handler(
req: NextApiRequest,
res: NextApiResponse,
) {
const { question, history } = req.body;

console.log('question', question);
let histories: BaseChatMessage[] = [];
history.forEach(hist => {
if(hist['type'] === 'human') {
let req: BaseChatMessage = new HumanChatMessage(question);
histories.push(req);
} else if (hist['type'] === 'ai') {
let respond: BaseChatMessage = new AIChatMessage(question);
histories.push(respond);
}
});


console.log('question:', question);

//only accept post requests
if (req.method !== 'POST') {
Expand Down Expand Up @@ -40,13 +53,13 @@ export default async function handler(
//Ask a question using chat history
const response = await chain.call({
question: sanitizedQuestion,
chat_history: history || [],
chat_history: histories || [],
});

console.log('response', response);
res.status(200).json(response);
} catch (error: any) {
console.log('error', error);
console.log('error:', error);
res.status(500).json({ error: error.message || 'Something went wrong' });
}
}
7 changes: 5 additions & 2 deletions pages/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import Image from 'next/image';
import ReactMarkdown from 'react-markdown';
import LoadingDots from '@/components/ui/LoadingDots';
import { Document } from 'langchain/document';
import {BaseChatMessage, HumanChatMessage, AIChatMessage} from 'langchain/schema';
import {
Accordion,
AccordionContent,
Expand All @@ -20,7 +21,7 @@ export default function Home() {
const [messageState, setMessageState] = useState<{
messages: Message[];
pending?: string;
history: [string, string][];
history: BaseChatMessage[];
pendingSourceDocs?: Document[];
}>({
messages: [
Expand Down Expand Up @@ -81,6 +82,8 @@ export default function Home() {
});
const data = await response.json();
console.log('data', data);
let req: BaseChatMessage = new HumanChatMessage(question);
let respond: BaseChatMessage = new AIChatMessage(data.text);

if (data.error) {
setError(data.error);
Expand All @@ -95,7 +98,7 @@ export default function Home() {
sourceDocs: data.sourceDocuments,
},
],
history: [...state.history, [question, data.text]],
history: [...state.history, req, respond],
}));
}
console.log('messageState', messageState);
Expand Down
3 changes: 2 additions & 1 deletion utils/makechain.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@ Helpful answer in markdown:`;
export const makeChain = (vectorstore: Chroma) => {
const model = new OpenAI({
temperature: 0, // increase temepreature to get more creative answers
modelName: 'gpt-3.5-turbo', //change this to gpt-4 if you have access
modelName: 'gpt-3.5-turbo-0613', //change this to gpt-4 if you have access
//modelName: 'gpt-4', //change this to gpt-4 if you have access
});

const chain = ConversationalRetrievalQAChain.fromLLM(
Expand Down

0 comments on commit acff0d6

Please sign in to comment.