File size: 3,842 Bytes
d8aaa31
86bcc63
 
d8aaa31
86bcc63
d8aaa31
 
86bcc63
 
 
 
d8aaa31
 
 
 
 
 
 
 
 
 
 
 
 
 
 
86bcc63
d8aaa31
 
 
 
 
86bcc63
d8aaa31
 
 
 
 
 
86bcc63
 
 
 
d8aaa31
86bcc63
 
 
 
d8aaa31
 
 
 
 
 
 
 
 
 
 
 
 
 
86bcc63
 
 
d8aaa31
 
 
86bcc63
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d8aaa31
 
 
86bcc63
 
 
d8aaa31
 
86bcc63
d8aaa31
86bcc63
 
 
 
 
d8aaa31
 
86bcc63
d8aaa31
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
import os  
import time  
import datetime  
import gradio as gr  
from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings  
from llama_index.llms.huggingface import HuggingFaceInferenceAPI  
from llama_index.embeddings.huggingface import HuggingFaceEmbedding  
from deep_translator import GoogleTranslator  

# Initialize Hugging Face token  
os.environ["HF_TOKEN"] = os.getenv("HF_TOKEN")  

# Configure Llama index settings  
Settings.llm = HuggingFaceInferenceAPI(  
    model_name="meta-llama/Meta-Llama-3-8B-Instruct",  
    tokenizer_name="meta-llama/Meta-Llama-3-8B-Instruct",  
    context_window=3000,  
    token=os.getenv("HF_TOKEN"),  
    max_new_tokens=512,  
    generate_kwargs={"temperature": 0.1},  
)  
Settings.embed_model = HuggingFaceEmbedding(  
    model_name="BAAI/bge-small-en-v1.5"  
)  

PERSIST_DIR = "db"  
PDF_DIRECTORY = 'data'  

# Ensure directories exist  
os.makedirs(PDF_DIRECTORY, exist_ok=True)  
os.makedirs(PERSIST_DIR, exist_ok=True)  

# Load and initialize data  
def data_ingestion_from_directory():  
    documents = SimpleDirectoryReader(PDF_DIRECTORY).load_data()  
    storage_context = StorageContext.from_defaults()  
    index = VectorStoreIndex.from_documents(documents)  
    index.storage_context.persist(persist_dir=PERSIST_DIR)  

def initialize():  
    start_time = time.time()  
    data_ingestion_from_directory()  # Process PDF ingestion at startup  
    print(f"Data ingestion time: {time.time() - start_time} seconds")  

initialize()  # Run initialization tasks  

# Handle user queries  
def handle_query(query, language):  
    chat_text_qa_msgs = [  
        (  
            "user",  
            """  
            You are the Hotel voice chatbot and your name is hotel helper. Your goal is to provide accurate, professional, and helpful answers to user queries based on the hotel's data. Always ensure your responses are clear and concise. Give response within 10-15 words only. You need to give an answer in the same language used by the user.       
            {context_str}  
            Question:  
            {query_str}  
            """  
        )  
    ]  
    text_qa_template = ChatPromptTemplate.from_messages(chat_text_qa_msgs)  
    
    storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR)  
    index = load_index_from_storage(storage_context)  
    
    context_str = ""  
    query_engine = index.as_query_engine(text_qa_template=text_qa_template, context_str=context_str)  
    
    answer = query_engine.query(query)  
    
    if hasattr(answer, 'response'):  
        response = answer.response  
    elif isinstance(answer, dict) and 'response' in answer:  
        response = answer['response']  
    else:  
        response = "Sorry, I couldn't find an answer."  

    # Translate response if needed  
    if language:  
        try:  
            translator = GoogleTranslator(target=language.split('-')[0])  # Translate to the specified language  
            response = translator.translate(response)  
        except Exception as e:  
            print(f"Translation error: {e}")  
            response = "Sorry, I couldn't translate the response."  

    return response  

# Gradio interface  
def chatbot_interface(message, language):  
    response = handle_query(message, language)  
    return response  

# Create Gradio app  
iface = gr.Interface(  
    fn=chatbot_interface,  
    inputs=[  
        gr.inputs.Textbox(label="Your Message"),  
        gr.inputs.Textbox(label="Language (e.g., en, fr, es)", default="en")  
    ],  
    outputs="text",  
    title="Hotel Chatbot",  
    description="Ask questions about the hotel and get responses."  
)  

# Launch the Gradio app  
if __name__ == "__main__":  
    iface.launch()