Gopikanth123 commited on
Commit
f5c5e39
·
verified ·
1 Parent(s): 3c88d1c

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +121 -109
main.py CHANGED
@@ -1,114 +1,126 @@
1
- import os
2
- import shutil
3
- from flask import Flask, render_template, request, jsonify
4
- from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings
5
- from llama_index.llms.huggingface import HuggingFaceInferenceAPI
6
- from llama_index.embeddings.huggingface import HuggingFaceEmbedding
7
- from huggingface_hub import InferenceClient
8
-
9
- # Initialize environment and settings
10
- repo_id = "meta-llama/Meta-Llama-3-8B-Instruct"
11
- llm_client = InferenceClient(
12
- model=repo_id,
13
- token=os.getenv("HF_TOKEN"),
14
- )
15
-
16
- os.environ["HF_TOKEN"] = os.getenv("HF_TOKEN")
17
- Settings.llm = HuggingFaceInferenceAPI(
18
- model_name="meta-llama/Meta-Llama-3-8B-Instruct",
19
- tokenizer_name="meta-llama/Meta-Llama-3-8B-Instruct",
20
- context_window=3000,
21
- token=os.getenv("HF_TOKEN"),
22
- max_new_tokens=512,
23
- generate_kwargs={"temperature": 0.1},
24
- )
25
- Settings.embed_model = HuggingFaceEmbedding(
26
- model_name="BAAI/bge-small-en-v1.5"
27
- )
28
-
29
- PERSIST_DIR = "db"
30
- PDF_DIRECTORY = 'data'
31
-
32
- # Ensure directories exist
33
- os.makedirs(PDF_DIRECTORY, exist_ok=True)
34
- os.makedirs(PERSIST_DIR, exist_ok=True)
35
-
36
- chat_history = []
37
- current_chat_history = []
38
-
39
- def data_ingestion_from_directory():
40
- # Clear previous data by removing the persist directory
41
- if os.path.exists(PERSIST_DIR):
42
- shutil.rmtree(PERSIST_DIR)
43
-
44
- os.makedirs(PERSIST_DIR, exist_ok=True)
45
- new_documents = SimpleDirectoryReader(PDF_DIRECTORY).load_data()
46
 
47
- if not new_documents:
48
- print("No documents were found or loaded.")
49
- return
50
 
51
- index = VectorStoreIndex.from_documents(new_documents)
52
- index.storage_context.persist(persist_dir=PERSIST_DIR)
53
- print("Persist data cleared and updated with new data.")
54
-
55
- def handle_query(query):
56
- chat_text_qa_msgs = [
57
- ("user", """
58
- You are the Taj Hotel chatbot and your name is Taj Hotel Helper. Your goal is to provide accurate, professional, and helpful answers to user queries based on the given Taj hotel's data. Always ensure your responses are clear and concise. Give response within 10-15 words only. You need to give an answer in the same language used by the user.
59
- {context_str}
60
- Question:
61
- {query_str}
62
- """)
63
- ]
64
- text_qa_template = ChatPromptTemplate.from_messages(chat_text_qa_msgs)
65
 
66
- storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR)
67
- index = load_index_from_storage(storage_context)
68
 
69
- context_str = ""
70
- for past_query, response in reversed(current_chat_history):
71
- if past_query.strip():
72
- context_str += f"User asked: '{past_query}'\nBot answered: '{response}'\n"
73
-
74
- query_engine = index.as_query_engine(text_qa_template=text_qa_template, context_str=context_str)
75
- print(f"User query: {query}")
76
- answer = query_engine.query(query)
77
-
78
- if hasattr(answer, 'response'):
79
- response = answer.response
80
- elif isinstance(answer, dict) and 'response' in answer:
81
- response = answer['response']
82
- else:
83
- response = "Sorry, I couldn't find an answer."
 
84
 
85
- current_chat_history.append((query, response))
86
- return response
87
-
88
- app = Flask(__name__)
89
-
90
- def generate_response(query):
91
- try:
92
- bot_response = handle_query(query)
93
- return bot_response
94
- except Exception as e:
95
- return f"Error fetching the response: {str(e)}"
96
-
97
- @app.route('/')
98
- def index():
99
- return render_template('index.html')
100
-
101
- @app.route('/chat', methods=['POST'])
102
- def chat():
103
- try:
104
- user_message = request.json.get("message")
105
- if not user_message:
106
- return jsonify({"response": "Please say something!"})
107
-
108
- bot_response = generate_response(user_message)
109
- return jsonify({"response": bot_response})
110
- except Exception as e:
111
- return jsonify({"response": f"An error occurred: {str(e)}"})
112
-
113
- if __name__ == '__main__':
114
- app.run(debug=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import shutil
3
+ from flask import Flask, render_template, request, jsonify
4
+ from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings
5
+ from llama_index.llms.huggingface import HuggingFaceInferenceAPI
6
+ from llama_index.embeddings.huggingface import HuggingFaceEmbedding
7
+ from huggingface_hub import InferenceClient
8
+
9
+ # Ensure HF_TOKEN is set
10
+ HF_TOKEN = os.getenv("HF_TOKEN")
11
+ if not HF_TOKEN:
12
+ raise ValueError("HF_TOKEN environment variable not set.")
13
+
14
+ repo_id = "meta-llama/Meta-Llama-3-8B-Instruct"
15
+ llm_client = InferenceClient(
16
+ model=repo_id,
17
+ token=HF_TOKEN,
18
+ )
19
+
20
+ # Configure Llama index settings
21
+ Settings.llm = HuggingFaceInferenceAPI(
22
+ model_name=repo_id,
23
+ tokenizer_name=repo_id,
24
+ context_window=3000,
25
+ token=HF_TOKEN,
26
+ max_new_tokens=512,
27
+ generate_kwargs={"temperature": 0.1},
28
+ )
29
+ Settings.embed_model = HuggingFaceEmbedding(
30
+ model_name="BAAI/bge-small-en-v1.5"
31
+ )
32
+
33
+ PERSIST_DIR = "db"
34
+ PDF_DIRECTORY = 'data'
35
+
36
+ # Ensure directories exist
37
+ os.makedirs(PDF_DIRECTORY, exist_ok=True)
38
+ os.makedirs(PERSIST_DIR, exist_ok=True)
39
+ chat_history = []
40
+ current_chat_history = []
41
+
42
+ def data_ingestion_from_directory():
43
+ # Clear previous data by removing the persist directory
44
+ if os.path.exists(PERSIST_DIR):
45
+ shutil.rmtree(PERSIST_DIR) # Remove the persist directory and all its contents
46
 
47
+ # Recreate the persist directory after removal
48
+ os.makedirs(PERSIST_DIR, exist_ok=True)
 
49
 
50
+ # Load new documents from the directory
51
+ new_documents = SimpleDirectoryReader(PDF_DIRECTORY).load_data()
 
 
 
 
 
 
 
 
 
 
 
 
52
 
53
+ # Create a new index with the new documents
54
+ index = VectorStoreIndex.from_documents(new_documents)
55
 
56
+ # Persist the new index
57
+ index.storage_context.persist(persist_dir=PERSIST_DIR)
58
+
59
+ def handle_query(query):
60
+ chat_text_qa_msgs = [
61
+ (
62
+ "user",
63
+ """
64
+ You are the Taj Hotel chatbot and your name is Taj Hotel Helper. Your goal is to provide accurate, professional, and helpful answers to user queries based on the given Taj hotel's data. Always ensure your responses are clear and concise. Give response within 10-15 words only. You need to give an answer in the same language used by the user.
65
+ {context_str}
66
+ Question:
67
+ {query_str}
68
+ """
69
+ )
70
+ ]
71
+ text_qa_template = ChatPromptTemplate.from_messages(chat_text_qa_msgs)
72
 
73
+ storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR)
74
+ index = load_index_from_storage(storage_context)
75
+ context_str = ""
76
+ for past_query, response in reversed(current_chat_history):
77
+ if past_query.strip():
78
+ context_str += f"User asked: '{past_query}'\nBot answered: '{response}'\n"
79
+
80
+ query_engine = index.as_query_engine(text_qa_template=text_qa_template, context_str=context_str)
81
+ print(query)
82
+ answer = query_engine.query(query)
83
+
84
+ if hasattr(answer, 'response'):
85
+ response = answer.response
86
+ elif isinstance(answer, dict) and 'response' in answer:
87
+ response = answer['response']
88
+ else:
89
+ response = "Sorry, I couldn't find an answer."
90
+ current_chat_history.append((query, response))
91
+ return response
92
+
93
+ app = Flask(__name__)
94
+
95
+ # Data ingestion
96
+ data_ingestion_from_directory()
97
+
98
+ # Generate Response
99
+ def generate_response(query):
100
+ try:
101
+ # Call the handle_query function to get the response
102
+ bot_response = handle_query(query)
103
+ return bot_response
104
+ except Exception as e:
105
+ return f"Error fetching the response: {str(e)}"
106
+
107
+ # Route for the homepage
108
+ @app.route('/')
109
+ def index():
110
+ return render_template('index.html')
111
+
112
+ # Route to handle chatbot messages
113
+ @app.route('/chat', methods=['POST'])
114
+ def chat():
115
+ try:
116
+ user_message = request.json.get("message")
117
+ if not user_message:
118
+ return jsonify({"response": "Please say something!"})
119
+
120
+ bot_response = generate_response(user_message)
121
+ return jsonify({"response": bot_response})
122
+ except Exception as e:
123
+ return jsonify({"response": f"An error occurred: {str(e)}"})
124
+
125
+ if __name__ == '__main__':
126
+ app.run(debug=True)