Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -28,8 +28,12 @@ def recommend(index):
|
|
28 |
st.set_page_config(page_title='DiscoverCourses', page_icon=':bird:')
|
29 |
st.header('DiscoverCourses')
|
30 |
st.write('')
|
31 |
-
st.write("Do you like the tech + social impact focus of CS51? Excited by film-centered courses like FILMEDIA245B? Saw a cool study-abroad course (OSPISTAN76) and want to study that topic on campus?
|
32 |
st.write('')
|
|
|
|
|
|
|
|
|
33 |
|
34 |
selected_course = st.selectbox('Pick a course from the dropdown (or click on it and start typing to search).',course_title_list)
|
35 |
#st.write("Description: "+coursedf.iloc[np.where((coursedf['ref']+": "+coursedf['title'])==selected_course)[0][0],3])
|
@@ -63,7 +67,7 @@ if maincol2.button('Discover by description',use_container_width=True):
|
|
63 |
st.write(coursedf.iloc[index,3]) #Using the new coursedf because it has proper descriptions for each course
|
64 |
link1 = "[ExploreCourses ↗](https://explorecourses.stanford.edu/search?q="+course_id+"+"+result.replace(" ","+")+")"
|
65 |
link2 = "[Carta ↗](https://carta-beta.stanford.edu/results/"+course_id+")"
|
66 |
-
st.markdown(
|
67 |
st.divider()
|
68 |
|
69 |
st.write('© 2023 Rushank Goyal. All rights reserved. Source for the all-MiniLM-L6-v2 model: Wang, Wenhui, et al. "MiniLM: Deep Self-Attention Distillation for Task-Agnostic Compression of Pre-Trained Transformers." arXiv, 25 Feb. 2020, doi:10.48550/arXiv.2002.10957.')
|
|
|
28 |
st.set_page_config(page_title='DiscoverCourses', page_icon=':bird:')
|
29 |
st.header('DiscoverCourses')
|
30 |
st.write('')
|
31 |
+
st.write("Do you like the tech + social impact focus of CS51? Excited by film-centered courses like FILMEDIA245B? Saw a cool study-abroad course (OSPISTAN76) and want to study that topic on campus?")
|
32 |
st.write('')
|
33 |
+
st.write("Enter DiscoverCourses. Just pick a course and get dozens of recommendations for similar courses based on titles or descriptions. Give it a go! If you have any thoughts on DiscoverCourses (or project ideas or a book recommendation or really anything), shoot me an email at [email protected].")
|
34 |
+
st.write('')
|
35 |
+
|
36 |
+
st.markdown('<style> a:link {color: white;background-color: transparent;text-decoration: none;}</style>',unsafe_allow_html=True)
|
37 |
|
38 |
selected_course = st.selectbox('Pick a course from the dropdown (or click on it and start typing to search).',course_title_list)
|
39 |
#st.write("Description: "+coursedf.iloc[np.where((coursedf['ref']+": "+coursedf['title'])==selected_course)[0][0],3])
|
|
|
67 |
st.write(coursedf.iloc[index,3]) #Using the new coursedf because it has proper descriptions for each course
|
68 |
link1 = "[ExploreCourses ↗](https://explorecourses.stanford.edu/search?q="+course_id+"+"+result.replace(" ","+")+")"
|
69 |
link2 = "[Carta ↗](https://carta-beta.stanford.edu/results/"+course_id+")"
|
70 |
+
st.markdown(+link1+" "+link2, unsafe_allow_html=True)
|
71 |
st.divider()
|
72 |
|
73 |
st.write('© 2023 Rushank Goyal. All rights reserved. Source for the all-MiniLM-L6-v2 model: Wang, Wenhui, et al. "MiniLM: Deep Self-Attention Distillation for Task-Agnostic Compression of Pre-Trained Transformers." arXiv, 25 Feb. 2020, doi:10.48550/arXiv.2002.10957.')
|