Update app.py
Browse files
app.py
CHANGED
@@ -19,9 +19,9 @@ client = chromadb.Client()
|
|
19 |
collection = client.get_or_create_collection("patents")
|
20 |
collection.add(documents=docs,ids=ids)
|
21 |
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
|
26 |
def gen_context(query):
|
27 |
vector = text_embedding(query).tolist()
|
@@ -29,13 +29,20 @@ def gen_context(query):
|
|
29 |
res = "\n".join(str(item) for item in results['documents'][0])
|
30 |
return res
|
31 |
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
36 |
|
37 |
return client.text_generation(prompt=final_prompt,max_new_tokens = length).strip()
|
38 |
|
|
|
39 |
client = InferenceClient(model = "mistralai/Mixtral-8x7B-Instruct-v0.1")
|
40 |
|
41 |
demo = gr.Interface(fn=chat_completion,
|
|
|
19 |
collection = client.get_or_create_collection("patents")
|
20 |
collection.add(documents=docs,ids=ids)
|
21 |
|
22 |
+
def text_embedding(input):
|
23 |
+
model = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
|
24 |
+
return model.encode(input)
|
25 |
|
26 |
def gen_context(query):
|
27 |
vector = text_embedding(query).tolist()
|
|
|
29 |
res = "\n".join(str(item) for item in results['documents'][0])
|
30 |
return res
|
31 |
|
32 |
+
|
33 |
+
def chat_completion(query):
|
34 |
+
|
35 |
+
length = 1000
|
36 |
+
context = gen_context(query)
|
37 |
+
|
38 |
+
user_prompt = f"""Based on the context:{context}Answer the below query:{query}"""
|
39 |
+
system_prompt = """You are a helpful AI assistant that can answer questions on the patents dataset. Answer based on the context provided.If you cannot find the correct answer, say I don't know. Be concise and just include the response"""
|
40 |
+
final_prompt = f"""<s>[INST]<<SYS>>{system_prompt}<</SYS>>{user_prompt}[/INST]"""
|
41 |
+
|
42 |
|
43 |
return client.text_generation(prompt=final_prompt,max_new_tokens = length).strip()
|
44 |
|
45 |
+
|
46 |
client = InferenceClient(model = "mistralai/Mixtral-8x7B-Instruct-v0.1")
|
47 |
|
48 |
demo = gr.Interface(fn=chat_completion,
|