mirror of
https://github.com/SirBlobby/Hoya26.git
synced 2026-02-04 03:34:34 -05:00
Database and Reports Update
This commit is contained in:
@@ -1,37 +1,69 @@
|
||||
from flask import Blueprint ,request ,jsonify
|
||||
from src .rag .gemeni import GeminiClient
|
||||
from src .gemini import ask_gemini_with_rag
|
||||
from flask import Blueprint, request, jsonify
|
||||
from src.rag.gemeni import GeminiClient
|
||||
from src.gemini import ask_gemini_with_rag
|
||||
from src.chroma.vector_store import search_documents
|
||||
from src.rag.embeddings import get_embedding
|
||||
|
||||
gemini_bp =Blueprint ('gemini',__name__ )
|
||||
brain =None
|
||||
gemini_bp = Blueprint('gemini', __name__)
|
||||
brain = None
|
||||
|
||||
def get_brain ():
|
||||
global brain
|
||||
if brain is None :
|
||||
brain =GeminiClient ()
|
||||
return brain
|
||||
def get_brain():
|
||||
global brain
|
||||
if brain is None:
|
||||
brain = GeminiClient()
|
||||
return brain
|
||||
|
||||
@gemini_bp .route ('/ask',methods =['POST'])
|
||||
def ask ():
|
||||
data =request .json
|
||||
prompt =data .get ("prompt")
|
||||
context =data .get ("context","")
|
||||
@gemini_bp.route('/ask', methods=['POST'])
|
||||
def ask():
|
||||
data = request.json
|
||||
prompt = data.get("prompt")
|
||||
context = data.get("context", "")
|
||||
|
||||
if not prompt :
|
||||
return jsonify ({"error":"No prompt provided"}),400
|
||||
if not prompt:
|
||||
return jsonify({"error": "No prompt provided"}), 400
|
||||
|
||||
try :
|
||||
client =get_brain ()
|
||||
response =client .ask (prompt ,context )
|
||||
return jsonify ({
|
||||
"status":"success",
|
||||
"reply":response
|
||||
try:
|
||||
# Step 1: Retrieve relevant context from ChromaDB (RAG)
|
||||
print(f"Generating embedding for prompt: {prompt}")
|
||||
query_embedding = get_embedding(prompt)
|
||||
|
||||
print("Searching ChromaDB for context...")
|
||||
search_results = search_documents(query_embedding, num_results=15)
|
||||
|
||||
retrieved_context = ""
|
||||
if search_results:
|
||||
print(f"Found {len(search_results)} documents.")
|
||||
retrieved_context = "RELEVANT INFORMATION FROM DATABASE:\n"
|
||||
for res in search_results:
|
||||
# Include metadata if useful, e.g. brand name or date
|
||||
meta = res.get('metadata', {})
|
||||
source_info = f"[Source: {meta.get('type', 'doc')} - {meta.get('product_name', 'Unknown')}]"
|
||||
retrieved_context += f"{source_info}\n{res['text']}\n\n"
|
||||
else:
|
||||
print("No relevant documents found.")
|
||||
|
||||
# Step 2: Combine manual context (if any) with retrieved context
|
||||
full_context = context
|
||||
if retrieved_context:
|
||||
if full_context:
|
||||
full_context += "\n\n" + retrieved_context
|
||||
else:
|
||||
full_context = retrieved_context
|
||||
|
||||
# Step 3: Ask Gemini
|
||||
client = get_brain()
|
||||
response = client.ask(prompt, full_context)
|
||||
|
||||
return jsonify({
|
||||
"status": "success",
|
||||
"reply": response
|
||||
})
|
||||
except Exception as e :
|
||||
return jsonify ({
|
||||
"status":"error",
|
||||
"message":str (e )
|
||||
}),500
|
||||
except Exception as e:
|
||||
print(f"Error in RAG flow: {e}")
|
||||
return jsonify({
|
||||
"status": "error",
|
||||
"message": str(e)
|
||||
}), 500
|
||||
|
||||
@gemini_bp .route ('/rag',methods =['POST'])
|
||||
def rag ():
|
||||
|
||||
Reference in New Issue
Block a user