QAME / app.py
medelharchaoui's picture
create app
bafe355
import streamlit as st
from transformers import AutoModelForQuestionAnswering, AutoTokenizer, pipeline
import requests
import pandas as pd
url = 'https://elharchaouiresume.s3.eu-west-3.amazonaws.com/resume'
response = requests.get(url)
resume_context=None
if response.status_code == 200:
resume_context = response.text
else:
load_error=f"Conext loading Error: {response.status_code}"
# with open('resume') as f:
# resume_context = f.read()
model_name = "deepset/tinyroberta-squad2"
# a) Get predictions
nlp = pipeline('question-answering', model=model_name, tokenizer=model_name)
# Streamlit app layout
st.title("Ask me any question about my career")
st.write("This app uses a pre-trained model from Hugging Face to perform question answering, about my career as context, on user input.")
# User input
user_input = st.text_area("Enter some a question :", value="", height=150, max_chars=500)
if user_input:
if resume_context:
print(resume_context)
QA_input = {
'question': user_input,
'context': resume_context
}
# Perform sentiment analysis on the user input
result = nlp(QA_input)
# Display the sentiment analysis result
answer = result["answer"]
else:
answer="It seems there is a problem loading context, here is more details :"+load_error
st.write(f"Response: {answer}")
else:
st.write("Please enter some text to analyze.")
# Display a table with example inputs
st.write("Example inputs:")
example_inputs = [
{"Inputs example": "What programming languages you have experience in?"},
{"Inputs example": "What cloud platforms are you familiar with?"},
{"Inputs example": "What are your skills in machine learning?"},
{"Inputs example": "The future of technology is..."},
]
example_df = pd.DataFrame(example_inputs)
st.table(example_df)