This commit is contained in:
Chuck Dries 2024-02-15 00:13:51 -08:00
parent 298596e47e
commit e3b7dcb870
3 changed files with 9 additions and 22 deletions

View File

@ -1 +1 @@
# knowledge-sidekick
# langchain-quickstart

View File

@ -1,5 +1,5 @@
{
"name": "knowledge-sidekick",
"name": "langchain-quickstart",
"packageManager": "yarn@4.1.0",
"type": "module",
"dependencies": {

View File

@ -1,8 +1,7 @@
import 'dotenv/config';
import "dotenv/config";
process.env.OPENAI_API_KEY;
import { ChatOpenAI, OpenAIEmbeddings } from "@langchain/openai";
import { ChatPromptTemplate } from "@langchain/core/prompts";
import { StringOutputParser } from "@langchain/core/output_parsers";
import { MemoryVectorStore } from "langchain/vectorstores/memory";
import { createRetrievalChain } from "langchain/chains/retrieval";
import { createStuffDocumentsChain } from "langchain/chains/combine_documents";
@ -11,9 +10,7 @@ import { CheerioWebBaseLoader } from "langchain/document_loaders/web/cheerio";
import { RecursiveCharacterTextSplitter } from "langchain/text_splitter";
const chatModel = new ChatOpenAI({});
const loader = new CheerioWebBaseLoader(
"https://docs.smith.langchain.com/"
);
const loader = new CheerioWebBaseLoader("https://docs.smith.langchain.com/");
const docs = await loader.load();
@ -30,14 +27,15 @@ const vectorstore = await MemoryVectorStore.fromDocuments(
const retriever = vectorstore.asRetriever();
const prompt =
ChatPromptTemplate.fromTemplate(`Answer the following question based only on the provided context:
const prompt = ChatPromptTemplate.fromTemplate(
`Answer the following question based only on the provided context:
<context>
{context}
</context>
Question: {input}`);
Question: {input}`
);
const documentChain = await createStuffDocumentsChain({
llm: chatModel,
@ -49,17 +47,6 @@ const retrievalChain = await createRetrievalChain({
retriever,
});
const outputParser = new StringOutputParser();
// const prompt = ChatPromptTemplate.fromMessages([
// ["system", "You are an old farmer"],
// ["user", "{input}"],
// ]);
// const chain = prompt.pipe(chatModel).pipe(outputParser);
const res = await retrievalChain.invoke({
input: "What is langsmith",
});