diff --git a/src/index.js b/src/index.js index 43b575d..896c951 100644 --- a/src/index.js +++ b/src/index.js @@ -1,23 +1,67 @@ import 'dotenv/config'; -import { ChatOpenAI } from "@langchain/openai"; +process.env.OPENAI_API_KEY; +import { ChatOpenAI, OpenAIEmbeddings } from "@langchain/openai"; import { ChatPromptTemplate } from "@langchain/core/prompts"; import { StringOutputParser } from "@langchain/core/output_parsers"; +import { MemoryVectorStore } from "langchain/vectorstores/memory"; +import { createRetrievalChain } from "langchain/chains/retrieval"; +import { createStuffDocumentsChain } from "langchain/chains/combine_documents"; + +import { CheerioWebBaseLoader } from "langchain/document_loaders/web/cheerio"; +import { RecursiveCharacterTextSplitter } from "langchain/text_splitter"; + +const chatModel = new ChatOpenAI({}); +const loader = new CheerioWebBaseLoader( + "https://docs.smith.langchain.com/" +); + +const docs = await loader.load(); + +const splitter = new RecursiveCharacterTextSplitter(); + +const splitDocs = await splitter.splitDocuments(docs); + +const embeddings = new OpenAIEmbeddings(); + +const vectorstore = await MemoryVectorStore.fromDocuments( + splitDocs, + embeddings +); + +const retriever = vectorstore.asRetriever(); + +const prompt = + ChatPromptTemplate.fromTemplate(`Answer the following question based only on the provided context: + + +{context} + + +Question: {input}`); + +const documentChain = await createStuffDocumentsChain({ + llm: chatModel, + prompt, +}); + +const retrievalChain = await createRetrievalChain({ + combineDocsChain: documentChain, + retriever, +}); const outputParser = new StringOutputParser(); -const prompt = ChatPromptTemplate.fromMessages([ - ["system", "You are an old farmer"], - ["user", "{input}"], -]); +// const prompt = ChatPromptTemplate.fromMessages([ +// ["system", "You are an old farmer"], +// ["user", "{input}"], +// ]); -process.env.OPENAI_API_KEY; -const chatModel = new ChatOpenAI({}); -const chain = prompt.pipe(chatModel).pipe(outputParser); +// const chain = prompt.pipe(chatModel).pipe(outputParser); -const res = await await chain.invoke({ - input: "Write a beautiful poem no longer than 5 lines including the phrase 'hello world'", +const res = await retrievalChain.invoke({ + input: "What is langsmith", }); console.log(res); \ No newline at end of file