halimbahae's picture
Update app.py
66bf5cc verified
raw
history blame
4.61 kB
import gradio as gr
from huggingface_hub import InferenceClient
from PyPDF2 import PdfFileReader
import requests
from bs4 import BeautifulSoup
# Initialize the Inference Client
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
def extract_text_from_pdf(file):
reader = PdfFileReader(file)
text = ""
for page in range(reader.getNumPages()):
text += reader.getPage(page).extract_text()
return text
def ats_friendly_checker(file):
resume_text = extract_text_from_pdf(file)
# Implement ATS-friendly checker logic using LLM
system_message = "Evaluate the following resume for ATS-friendliness and provide a score and feedback."
message = resume_text
response = client.chat_completion(
[{"role": "system", "content": system_message}, {"role": "user", "content": message}],
max_tokens=512,
temperature=0.7,
top_p=0.95
).choices[0].message["content"]
score = response.split("\n")[0].split(":")[-1].strip()
feedback = "\n".join(response.split("\n")[1:])
return score, feedback
def scrape_job_description(url):
response = requests.get(url)
soup = BeautifulSoup(response.text, 'html.parser')
job_description = soup.get_text(separator=" ", strip=True)
return job_description
def resume_match_checker(file, job_url):
resume_text = extract_text_from_pdf(file)
job_description = scrape_job_description(job_url)
# Implement resume match checker logic using LLM
system_message = "Compare the following resume with the job description and provide a match score."
message = f"Resume: {resume_text}\n\nJob Description: {job_description}"
response = client.chat_completion(
[{"role": "system", "content": system_message}, {"role": "user", "content": message}],
max_tokens=512,
temperature=0.7,
top_p=0.95
).choices[0].message["content"]
match_score = response.split(":")[-1].strip()
return match_score
def resume_quality_score(file):
resume_text = extract_text_from_pdf(file)
# Implement resume quality scoring logic using LLM
system_message = "Evaluate the following resume for overall quality and provide a score."
message = resume_text
response = client.chat_completion(
[{"role": "system", "content": system_message}, {"role": "user", "content": message}],
max_tokens=512,
temperature=0.7,
top_p=0.95
).choices[0].message["content"]
quality_score = response.split(":")[-1].strip()
return quality_score
def text_to_overleaf(resume_text):
# Implement the conversion to Overleaf code using LLM
system_message = "Convert the following resume text to Overleaf code."
message = resume_text
response = client.chat_completion(
[{"role": "system", "content": system_message}, {"role": "user", "content": message}],
max_tokens=512,
temperature=0.7,
top_p=0.95
).choices[0].message["content"]
overleaf_code = response
return overleaf_code
# Define the Gradio interface
with gr.Blocks() as demo:
gr.Markdown("# Resume Enhancement Tool\nEnhance your resume with the following features.")
with gr.Tab("ATS-Friendly Checker"):
with gr.Row():
resume = gr.File(label="Upload your Resume (PDF)")
score = gr.Number(label="ATS Score", interactive=False)
feedback = gr.Textbox(label="Feedback", interactive=False)
resume.upload(ats_friendly_checker, resume, [score, feedback])
with gr.Tab("Resume Match Checker"):
with gr.Row():
resume = gr.File(label="Upload your Resume (PDF)")
job_url = gr.Textbox(label="Job Description URL")
match_score = gr.Number(label="Match Score", interactive=False)
gr.Button("Check Match").click(resume_match_checker, [resume, job_url], match_score)
with gr.Tab("Resume Quality Score"):
with gr.Row():
resume = gr.File(label="Upload your Resume (PDF)")
quality_score = gr.Number(label="Quality Score", interactive=False)
resume.upload(resume_quality_score, resume, quality_score)
with gr.Tab("Text to Overleaf Code"):
with gr.Row():
resume_text = gr.Textbox(label="Resume Text")
overleaf_code = gr.Textbox(label="Overleaf Code", interactive=False)
resume_text.submit(text_to_overleaf, resume_text, overleaf_code)
gr.Markdown("---\nBuilt with love by [Bahae Eddine HALIM](https://www.linkedin.com/in/halimbahae/)")
if __name__ == "__main__":
demo.launch()