-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain.py
More file actions
37 lines (29 loc) · 1.12 KB
/
main.py
File metadata and controls
37 lines (29 loc) · 1.12 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
from langchain_ollama.llms import OllamaLLM
from langchain_core.prompts import ChatPromptTemplate
from faissdb import retriever
model = OllamaLLM(model="llama3.2")
template = """
You are an exeprt in answering questions about a pizza restaurant
Here are some relevant reviews: {reviews}
Here is the question to answer: {question}
"""
prompt = ChatPromptTemplate.from_template(template)
chain = prompt | model
while True:
print("\n\n-------------------------------")
question = input("Ask your question (q to quit): ")
print("\n\n")
if question == "q":
break
""" #chromadb vector retreiver
reviews = retriever.invoke(question)
result = chain.invoke({"reviews": reviews, "question": question})
print(result)"""
#with faiss
# Fetch relevant documents from FAISS retriever:
docs = retriever.invoke(question)
# Combine retrieved texts for prompt context
reviews_text = "\n\n".join([doc.page_content for doc in docs]) or "No relevant reviews found."
# Generate answer
result = chain.invoke({"reviews": reviews_text, "question": question})
print("\nAnswer:\n", result)