Spaces:
Sleeping
Sleeping
import numpy as np | |
import requests | |
import streamlit as st | |
#def main(): | |
st.title("Scientific Question Generation") | |
checkpoints = ['dhmeltzer/bart-large_askscience-qg', | |
'dhmeltzer/flan-t5-base_askscience-qg', | |
'google/flan-t5-xxl'] | |
headers = {"Authorization": f"Bearer {st.secrets['HF_token']}"} | |
def query(checkpoint, payload): | |
API_URL = f"https://api-inference.huggingface.co/models/{checkpoint}}" | |
response = requests.post(API_URL, | |
headers=headers, | |
json=payload) | |
return response.json() | |
# User search | |
user_input = st.text_area("Question Generator", | |
"""Black holes are the most gravitationally dense objects in the universe.""") | |
# Filters | |
st.sidebar.markdown("**Filters**") | |
temperature = st.sidebar.slider("Temperature", 0.0, 1.0, 0.0,.1) | |
vector = query([user_input]) | |
if user_input: | |
for checkpoint in checkpoints: | |
output = query(checkpoint,{ | |
"inputs": user_input, | |
"temperature":temperature, | |
"wait_for_model":True})[0][0]['generated_text'] | |
model_name = checkpoints.split('/')[1] | |
st.write(f'Model {model_name}: output') | |
#if __name__ == "__main__": | |
# main() | |