shopify_1 / app.py
nileshhanotia's picture
Update app.py
d950c91 verified
raw
history blame
4.52 kB
import gradio as gr
from sql_generator import SQLGenerator
from intent_classifier import IntentClassifier
from rag_system import RAGSystem
from huggingface_hub import InferenceClient
# Initialize Hugging Face InferenceClient
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
# Unified System Class
class UnifiedSystem:
def __init__(self):
self.sql_generator = SQLGenerator()
self.intent_classifier = IntentClassifier()
self.rag_system = RAGSystem()
self.base_url = "https://agkd0n-fa.myshopify.com/products/"
def process_query(self, query):
intent, confidence = self.intent_classifier.classify(query)
if intent == "database_query":
sql_query = self.sql_generator.generate_query(query)
products = self.sql_generator.fetch_shopify_data("products")
if products and 'products' in products:
results = "\n".join([
f"Title: {p['title']}\nVendor: {p['vendor']}\nDescription: {p.get('body_html', 'No description available.')}\nURL: {self.base_url}{p['handle']}\n"
for p in products['products']
])
return f"Intent: Database Query (Confidence: {confidence:.2f})\n\n" \
f"SQL Query: {sql_query}\n\nResults:\n{results}"
else:
return "No results found or error fetching data from Shopify."
elif intent == "product_description":
rag_response = self.rag_system.process_query(query)
product_handles = rag_response.get('product_handles', [])
urls = [f"{self.base_url}{handle}" for handle in product_handles]
response = rag_response.get('response', "No description available.")
return f"Intent: Product Description (Confidence: {confidence:.2f})\n\n" \
f"Response: {response}\n\nProduct Details:\n" + "\n".join(
[f"Product URL: {url}" for url in urls]
)
return "Intent not recognized."
# Chatbot Response using Hugging Face's model
def respond(
message,
history: list[tuple[str, str]],
system_message,
max_tokens,
temperature,
top_p,
):
messages = [{"role": "system", "content": system_message}]
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": message})
response = ""
for message in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = message.choices[0].delta.content
response += token
yield response
# Create Gradio interface with integrated functionalities
def create_interface():
system = UnifiedSystem()
# Define the interface
iface = gr.Interface(
fn=system.process_query,
inputs=gr.Textbox(
label="Enter your query",
placeholder="e.g., 'Show me all T-shirts' or 'Describe the product features'"
),
outputs=gr.Textbox(label="Response"),
title="Unified Query Processing System",
description="Enter a natural language query to search products or get descriptions.",
examples=[
["Show me shirts less than 50 rupee"],
["Show me shirts with red color"],
["Show me T-shirts with M size"]
]
)
# Define Chat Interface for Hugging Face Model
chat_demo = gr.ChatInterface(
respond,
additional_inputs=[
gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label="Top-p (nucleus sampling)",
),
],
)
# Launch both interfaces (Unified System and Chatbot)
iface.launch(share=True) # Share the interface for public access
chat_demo.launch(share=True) # Launch the chatbot interface for user interaction
if __name__ == "__main__":
create_interface()