Spaces:
Runtime error
Runtime error
from distutils.command.upload import upload | |
import pandas as pd | |
import streamlit as st | |
from transformers import AutoTokenizer, TFAutoModelForQuestionAnswering | |
from transformers import pipeline | |
def load_data(file): | |
df = pd.read_csv(file, encoding='utf-8', nrows=50) | |
return df | |
#@st.cache # tokenier cannot be cached | |
def load_pipeline(model_cp, tokenizer_cp): | |
return pipeline("question-answering", model=model_cp, tokenizer=tokenizer_cp) | |
# Page config | |
title = "Recipe Improver" | |
icon = "🍣" | |
st.set_page_config(page_title=title, page_icon=icon) | |
st.title(title) | |
# Load tokenizer and model | |
model_cp = "aidan-o-brien/recipe-improver" | |
tokenizer_cp = "albert-base-v2" | |
question_answer = load_pipeline(model_cp, tokenizer_cp) | |
st.write("Model and tokenizer successfully loaded.") | |
# Load csv | |
uploaded_file = st.file_uploader("Choose a csv file", type="csv", key='file_uploader') | |
if uploaded_file is not None: | |
df = load_data(uploaded_file) | |
st.write(df.head()) | |
# Run inference on first example | |
first_example = df['review'][0] | |
question = "how to improve this recipe?" | |
answer = question_answer(question=question, context=first_example) | |
# Present results | |
st.write(answer) |