This program loads a FAISS index with embeddings, creates a retrieval QA chain that returns source documents, asks a question, and prints the answer along with metadata and a snippet from each source document.
from langchain.chains import RetrievalQA
from langchain.llms import OpenAI
from langchain.vectorstores import FAISS
from langchain.embeddings.openai import OpenAIEmbeddings
# Load embeddings and FAISS index
embeddings = OpenAIEmbeddings()
retriever = FAISS.load_local("faiss_index", embeddings)
# Create QA chain with source documents returned
qa = RetrievalQA.from_chain_type(
llm=OpenAI(temperature=0),
chain_type="stuff",
retriever=retriever,
return_source_documents=True
)
# Ask a question
query = "What is the tallest mountain in the world?"
result = qa({'query': query})
# Print answer
print("Answer:", result['result'])
# Print source citations
print("Sources:")
for i, doc in enumerate(result['source_documents'], 1):
print(f"Source {i} metadata:", doc.metadata)
print(f"Source {i} content snippet:", doc.page_content[:100], "...")