Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -27,13 +27,13 @@ def recommend(index):
|
|
27 |
|
28 |
st.set_page_config(page_title='DiscoverCourses', page_icon=':bird:')
|
29 |
st.header('DiscoverCourses')
|
30 |
-
st.
|
31 |
-
st.write('© 2023 Rushank Goyal. All rights reserved. Source for the all-MiniLM-L6-v2 model: Wang, Wenhui, et al. "MiniLM: Deep Self-Attention Distillation for Task-Agnostic Compression of Pre-Trained Transformers." arXiv, 25 Feb. 2020, doi:10.48550/arXiv.2002.10957.')
|
32 |
|
33 |
selected_course = st.selectbox('Pick a course from the dropdown:',course_title_list)
|
34 |
|
35 |
container = st.container()
|
36 |
maincol1, maincol2 = container.columns(2)
|
|
|
37 |
|
38 |
if maincol1.button('Recommend by title',use_container_width=True):
|
39 |
output=recommend(np.where((coursedf['ref']+": "+coursedf['title']) == selected_course)[0][0])
|
@@ -62,4 +62,6 @@ if maincol2.button('Recommend by description',use_container_width=True):
|
|
62 |
st.markdown(link, unsafe_allow_html=True)
|
63 |
link = "[Carta](https://carta-beta.stanford.edu/results/"+course_id+")"
|
64 |
st.markdown(link, unsafe_allow_html=True)
|
65 |
-
st.divider()
|
|
|
|
|
|
27 |
|
28 |
st.set_page_config(page_title='DiscoverCourses', page_icon=':bird:')
|
29 |
st.header('DiscoverCourses')
|
30 |
+
st.write('')
|
|
|
31 |
|
32 |
selected_course = st.selectbox('Pick a course from the dropdown:',course_title_list)
|
33 |
|
34 |
container = st.container()
|
35 |
maincol1, maincol2 = container.columns(2)
|
36 |
+
st.write('')
|
37 |
|
38 |
if maincol1.button('Recommend by title',use_container_width=True):
|
39 |
output=recommend(np.where((coursedf['ref']+": "+coursedf['title']) == selected_course)[0][0])
|
|
|
62 |
st.markdown(link, unsafe_allow_html=True)
|
63 |
link = "[Carta](https://carta-beta.stanford.edu/results/"+course_id+")"
|
64 |
st.markdown(link, unsafe_allow_html=True)
|
65 |
+
st.divider()
|
66 |
+
|
67 |
+
st.write('© 2023 Rushank Goyal. All rights reserved. Source for the all-MiniLM-L6-v2 model: Wang, Wenhui, et al. "MiniLM: Deep Self-Attention Distillation for Task-Agnostic Compression of Pre-Trained Transformers." arXiv, 25 Feb. 2020, doi:10.48550/arXiv.2002.10957.')
|