Spaces:
Sleeping
Sleeping
Update utils.py
Browse files
utils.py
CHANGED
@@ -20,9 +20,21 @@ def extracted_data(pages_data):
|
|
20 |
|
21 |
Expected output: remove any dollar symbols {{'Invoice no.': '1001329','Description': 'Office Chair','Quantity': '2','Date': '5/4/2023','Unit price': '1100.00$','Amount': '2200.00$','Total': '2200.00$','Email': '[email protected]','Phone number': '9999999999','Address': 'Mumbai, India'}}
|
22 |
"""
|
23 |
-
prompt_template = PromptTemplate(input_variables=["pages"], template=template)
|
24 |
-
llm = CTransformers(model="llama-2-7b-chat.ggmlv3.q8_0.bin",model_type='llama')
|
25 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
26 |
output_text=llm(prompt_template.format(pages=pages_data))
|
27 |
|
28 |
full_response = ''
|
|
|
20 |
|
21 |
Expected output: remove any dollar symbols {{'Invoice no.': '1001329','Description': 'Office Chair','Quantity': '2','Date': '5/4/2023','Unit price': '1100.00$','Amount': '2200.00$','Total': '2200.00$','Email': '[email protected]','Phone number': '9999999999','Address': 'Mumbai, India'}}
|
22 |
"""
|
23 |
+
# prompt_template = PromptTemplate(input_variables=["pages"], template=template)
|
24 |
+
# llm = CTransformers(model="llama-2-7b-chat.ggmlv3.q8_0.bin",model_type='llama')
|
25 |
|
26 |
+
|
27 |
+
|
28 |
+
|
29 |
+
llm = AutoModelForCausalLM.from_pretrained("TheBloke/Llama-2-7B-Chat-GGML", model_file="llama-2-7b-chat.ggmlv3.q8_0.bin")
|
30 |
+
|
31 |
+
#Creating the final PROMPT
|
32 |
+
prompt = PromptTemplate(
|
33 |
+
input_variables=["pages"],
|
34 |
+
template=template,)
|
35 |
+
#Generating the response using LLM
|
36 |
+
#Last week langchain has recommended to use 'invoke' function for the below please :)
|
37 |
+
response=llm(prompt.format(email_topic=form_input,sender=email_sender,recipient=email_recipient,style=email_style))
|
38 |
output_text=llm(prompt_template.format(pages=pages_data))
|
39 |
|
40 |
full_response = ''
|