Spaces:
Sleeping
Sleeping
atifsial123
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -23,27 +23,33 @@ def load_dataset(file_path='PEC_Numbers_and_Names.xlsx'):
|
|
23 |
df = pd.read_excel(file_path)
|
24 |
return df
|
25 |
|
26 |
-
#
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
|
|
|
|
|
|
|
|
33 |
if not result.empty:
|
34 |
-
|
|
|
35 |
else:
|
36 |
-
|
|
|
37 |
|
38 |
-
# Function to process the
|
39 |
-
def process_with_model(
|
40 |
-
inputs = tokenizer(
|
41 |
with torch.no_grad():
|
42 |
outputs = model(**inputs)
|
43 |
return outputs.last_hidden_state.mean(dim=1).squeeze().tolist()
|
44 |
|
45 |
# Combine both functions to create a prediction
|
46 |
-
def predict(
|
47 |
try:
|
48 |
# Load the dataset from the uploaded file if provided
|
49 |
if file is not None:
|
@@ -51,27 +57,28 @@ def predict(pec_number, file):
|
|
51 |
else:
|
52 |
df = load_dataset()
|
53 |
|
54 |
-
|
55 |
-
model_output = process_with_model(
|
56 |
-
return f"
|
57 |
except FileNotFoundError as e:
|
58 |
return str(e)
|
59 |
|
|
|
|
|
|
|
|
|
60 |
# Build the Gradio interface with file upload option
|
61 |
iface = gr.Interface(
|
62 |
fn=predict,
|
63 |
inputs=[
|
64 |
-
gr.Textbox(lines=1, placeholder="Enter
|
65 |
gr.File(label="Upload PEC Numbers and Names file (optional)")
|
66 |
],
|
67 |
outputs="text",
|
68 |
-
title="PEC Number Lookup with Model Integration",
|
69 |
-
description="Enter a
|
70 |
)
|
71 |
|
72 |
# Run the Gradio interface
|
73 |
if __name__ == "__main__":
|
74 |
iface.launch()
|
75 |
-
|
76 |
-
|
77 |
-
|
|
|
23 |
df = pd.read_excel(file_path)
|
24 |
return df
|
25 |
|
26 |
+
# Debugging function to get PEC number based on the name
|
27 |
+
def get_pec_number(name, df):
|
28 |
+
print("Column names in DataFrame:", df.columns.tolist()) # Print the column names
|
29 |
+
print(f"Looking for Name: '{name}'")
|
30 |
+
|
31 |
+
# Normalize the input and dataset
|
32 |
+
df['Name'] = df['Name'].str.strip().str.lower()
|
33 |
+
name = name.strip().str.lower()
|
34 |
+
|
35 |
+
result = df[df['Name'] == name]
|
36 |
+
|
37 |
if not result.empty:
|
38 |
+
print(f"Found PEC Number: {result.iloc[0]['PEC No.']}")
|
39 |
+
return result.iloc[0]['PEC No.']
|
40 |
else:
|
41 |
+
print("Name not found.")
|
42 |
+
return "Name not found."
|
43 |
|
44 |
+
# Function to process the name using the Hugging Face model
|
45 |
+
def process_with_model(name):
|
46 |
+
inputs = tokenizer(name, return_tensors="pt")
|
47 |
with torch.no_grad():
|
48 |
outputs = model(**inputs)
|
49 |
return outputs.last_hidden_state.mean(dim=1).squeeze().tolist()
|
50 |
|
51 |
# Combine both functions to create a prediction
|
52 |
+
def predict(name, file):
|
53 |
try:
|
54 |
# Load the dataset from the uploaded file if provided
|
55 |
if file is not None:
|
|
|
57 |
else:
|
58 |
df = load_dataset()
|
59 |
|
60 |
+
pec_number = get_pec_number(name, df)
|
61 |
+
model_output = process_with_model(name)
|
62 |
+
return f"PEC Number: {pec_number}\nModel Output: {model_output}"
|
63 |
except FileNotFoundError as e:
|
64 |
return str(e)
|
65 |
|
66 |
+
# Load the model and tokenizer from Hugging Face
|
67 |
+
tokenizer = AutoTokenizer.from_pretrained("Alibaba-NLP/gte-multilingual-base", trust_remote_code=True)
|
68 |
+
model = AutoModel.from_pretrained("Alibaba-NLP/gte-multilingual-base", trust_remote_code=True)
|
69 |
+
|
70 |
# Build the Gradio interface with file upload option
|
71 |
iface = gr.Interface(
|
72 |
fn=predict,
|
73 |
inputs=[
|
74 |
+
gr.Textbox(lines=1, placeholder="Enter Name..."),
|
75 |
gr.File(label="Upload PEC Numbers and Names file (optional)")
|
76 |
],
|
77 |
outputs="text",
|
78 |
+
title="Name to PEC Number Lookup with Model Integration",
|
79 |
+
description="Enter a name to retrieve the corresponding PEC number and process it with a Hugging Face model. Optionally, upload the Excel file if not found."
|
80 |
)
|
81 |
|
82 |
# Run the Gradio interface
|
83 |
if __name__ == "__main__":
|
84 |
iface.launch()
|
|
|
|
|
|