File size: 1,266 Bytes
dfbe641
 
 
 
27ee478
 
dfbe641
d21a4cc
 
 
dfbe641
d21a4cc
 
 
 
 
27ee478
 
 
d21a4cc
27ee478
dfbe641
27ee478
 
 
dfbe641
27ee478
 
dfbe641
27ee478
 
dfbe641
27ee478
d21a4cc
 
 
 
 
 
 
 
dfbe641
27ee478
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import numpy as np
import requests
import streamlit as st

#def main():
st.title("Scientific Question Generation")

checkpoints = ['dhmeltzer/bart-large_askscience-qg',
          'dhmeltzer/flan-t5-base_askscience-qg',
          'google/flan-t5-xxl']

headers = {"Authorization": f"Bearer {st.secrets['HF_token']}"}

def query(checkpoint, payload):
    API_URL = f"https://api-inference.huggingface.co/models/{checkpoint}}"
    
    response = requests.post(API_URL, 
                                headers=headers, 
                                json=payload)
    
    return response.json()

# User search
user_input = st.text_area("Question Generator", 
                            """Black holes are the most gravitationally dense objects in the universe.""")

# Filters
st.sidebar.markdown("**Filters**")

temperature = st.sidebar.slider("Temperature", 0.0, 1.0, 0.0,.1)
vector = query([user_input])

if user_input:
    for checkpoint in checkpoints:
        output = query(checkpoint,{
            "inputs": user_input,
            "temperature":temperature,
            "wait_for_model":True})[0][0]['generated_text']

        model_name = checkpoints.split('/')[1]
        st.write(f'Model {model_name}: output')


#if __name__ == "__main__":
#    main()