Skip to content

Instantly share code, notes, and snippets.

@lgersman
Forked from waseemhnyc/lcel_rag_with_sources.py
Created November 28, 2023 14:24
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save lgersman/9502c85f1b6207257cacf0ff39b569d0 to your computer and use it in GitHub Desktop.
Save lgersman/9502c85f1b6207257cacf0ff39b569d0 to your computer and use it in GitHub Desktop.
from operator import itemgetter
from langchain.chat_models import ChatOpenAI
from langchain.embeddings import OpenAIEmbeddings
from langchain.schema import StrOutputParser
from langchain.schema.runnable import RunnablePassthrough, RunnableMap
from langchain.vectorstores import Chroma
from langchain.prompts import PromptTemplate
from langchain.document_loaders import JSONLoader
from dotenv import load_dotenv
load_dotenv()
def data_func(record: dict, metadata: dict) -> dict:
metadata["source"] = record.get("source", "")
return metadata
loader = JSONLoader(
file_path='videos.json',
jq_schema='.[]',
content_key="descriptions",
metadata_func=data_func
)
documents = loader.load()
vectorstore = Chroma.from_documents(documents=documents, embedding=OpenAIEmbeddings())
retriever = vectorstore.as_retriever()
template = """
Use the following pieces of context to answer the question at the end.
If you don't know the answer, just say that you don't know, don't try to make up an answer.
Try to keep the answer concise when possible.
{context}
Question: {question}
Helpful Answer:"""
rag_prompt_custom = PromptTemplate.from_template(template)
llm = ChatOpenAI(model_name="gpt-4-1106-preview", temperature=0)
def format_docs(docs):
formatted_docs = "\n\n".join(doc.page_content for doc in docs)
return formatted_docs
rag_chain_from_docs = (
{
"context": lambda input: format_docs(input["documents"]),
"question": itemgetter("question"),
}
| rag_prompt_custom
| llm
| StrOutputParser()
)
rag_chain_with_source = RunnableMap(
{"documents": retriever, "question": RunnablePassthrough()}
) | {
"documents": lambda input: [doc.metadata for doc in input["documents"]],
"answer": rag_chain_from_docs,
}
def chat():
while True:
question = input("Question (type 'exit' to quit): ").strip()
if question.lower() == "exit":
break
response = rag_chain_with_source.invoke(question)
documents = response["documents"]
answer = response["answer"]
print(f"Answer: {answer}")
print(f"Documents: {documents}")
if __name__ == "__main__":
chat()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment