diff --git a/README.md b/README.md index eb045ec..8d8564d 100644 --- a/README.md +++ b/README.md @@ -1 +1 @@ -# knowledge-sidekick +# langchain-quickstart diff --git a/package.json b/package.json index 578c444..fcbe1b9 100644 --- a/package.json +++ b/package.json @@ -1,5 +1,5 @@ { - "name": "knowledge-sidekick", + "name": "langchain-quickstart", "packageManager": "yarn@4.1.0", "type": "module", "dependencies": { diff --git a/src/index.js b/src/index.js index 896c951..fd7b759 100644 --- a/src/index.js +++ b/src/index.js @@ -1,8 +1,7 @@ -import 'dotenv/config'; +import "dotenv/config"; process.env.OPENAI_API_KEY; import { ChatOpenAI, OpenAIEmbeddings } from "@langchain/openai"; import { ChatPromptTemplate } from "@langchain/core/prompts"; -import { StringOutputParser } from "@langchain/core/output_parsers"; import { MemoryVectorStore } from "langchain/vectorstores/memory"; import { createRetrievalChain } from "langchain/chains/retrieval"; import { createStuffDocumentsChain } from "langchain/chains/combine_documents"; @@ -11,9 +10,7 @@ import { CheerioWebBaseLoader } from "langchain/document_loaders/web/cheerio"; import { RecursiveCharacterTextSplitter } from "langchain/text_splitter"; const chatModel = new ChatOpenAI({}); -const loader = new CheerioWebBaseLoader( - "https://docs.smith.langchain.com/" -); +const loader = new CheerioWebBaseLoader("https://docs.smith.langchain.com/"); const docs = await loader.load(); @@ -30,14 +27,15 @@ const vectorstore = await MemoryVectorStore.fromDocuments( const retriever = vectorstore.asRetriever(); -const prompt = - ChatPromptTemplate.fromTemplate(`Answer the following question based only on the provided context: +const prompt = ChatPromptTemplate.fromTemplate( + `Answer the following question based only on the provided context: {context} -Question: {input}`); +Question: {input}` +); const documentChain = await createStuffDocumentsChain({ llm: chatModel, @@ -49,19 +47,8 @@ const retrievalChain = await createRetrievalChain({ retriever, }); -const outputParser = new StringOutputParser(); - -// const prompt = ChatPromptTemplate.fromMessages([ -// ["system", "You are an old farmer"], -// ["user", "{input}"], -// ]); - - - -// const chain = prompt.pipe(chatModel).pipe(outputParser); - const res = await retrievalChain.invoke({ input: "What is langsmith", }); -console.log(res); \ No newline at end of file +console.log(res);