rushankg commited on
Commit
d9a6e1a
·
1 Parent(s): 72f2133

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -9
app.py CHANGED
@@ -28,8 +28,10 @@ def recommend(index):
28
  st.set_page_config(page_title='DiscoverCourses', page_icon=':bird:')
29
  st.header('DiscoverCourses')
30
  st.write('')
 
31
 
32
- selected_course = st.selectbox('Pick a course from the dropdown:',course_title_list)
 
33
 
34
  container = st.container()
35
  maincol1, maincol2 = container.columns(2)
@@ -43,10 +45,9 @@ if maincol1.button('Recommend by title',use_container_width=True):
43
  st.subheader(course_id+": "+result)
44
  with st.expander("See description"):
45
  st.write(coursedf.iloc[index,3]) #Using the new coursedf because it has proper descriptions for each course
46
- link = "[ExploreCourses](https://explorecourses.stanford.edu/search?q="+course_id+"+"+result.replace(" ","+")+")"
47
- st.markdown(link, unsafe_allow_html=True)
48
- link = "[Carta](https://carta-beta.stanford.edu/results/"+course_id+")"
49
- st.markdown(link, unsafe_allow_html=True)
50
  st.divider()
51
 
52
  if maincol2.button('Recommend by description',use_container_width=True):
@@ -58,10 +59,9 @@ if maincol2.button('Recommend by description',use_container_width=True):
58
  st.subheader(course_id+": "+result)
59
  with st.expander("See description"):
60
  st.write(coursedf.iloc[index,3]) #Using the new coursedf because it has proper descriptions for each course
61
- link = "[ExploreCourses](https://explorecourses.stanford.edu/search?q="+course_id+"+"+result.replace(" ","+")+")"
62
- st.markdown(link, unsafe_allow_html=True)
63
- link = "[Carta](https://carta-beta.stanford.edu/results/"+course_id+")"
64
- st.markdown(link, unsafe_allow_html=True)
65
  st.divider()
66
 
67
  st.write('© 2023 Rushank Goyal. All rights reserved. Source for the all-MiniLM-L6-v2 model: Wang, Wenhui, et al. "MiniLM: Deep Self-Attention Distillation for Task-Agnostic Compression of Pre-Trained Transformers." arXiv, 25 Feb. 2020, doi:10.48550/arXiv.2002.10957.')
 
28
  st.set_page_config(page_title='DiscoverCourses', page_icon=':bird:')
29
  st.header('DiscoverCourses')
30
  st.write('')
31
+ st.write("Do you like the tech + social impact focus of CS51? Excited by film-centered courses like FILMEDIA245B? Saw a cool study-abroad course (OSPISTAN76) and want similar on-campus options? Enter DiscoverCourses. Just pick a course and get dozens of recommendations based on its title or description.")
32
 
33
+ selected_course = st.selectbox('Pick a course from the dropdown (or click on it and start typing to search).',course_title_list)
34
+ st.write("Description: "+coursedf.iloc[np.where((coursedf['ref']+": "+coursedf['title'])==selected_course)[0][0],3])
35
 
36
  container = st.container()
37
  maincol1, maincol2 = container.columns(2)
 
45
  st.subheader(course_id+": "+result)
46
  with st.expander("See description"):
47
  st.write(coursedf.iloc[index,3]) #Using the new coursedf because it has proper descriptions for each course
48
+ link1 = "[ExploreCourses](https://explorecourses.stanford.edu/search?q="+course_id+"+"+result.replace(" ","+")+")"
49
+ link2 = "[Carta ↗](https://carta-beta.stanford.edu/results/"+course_id+")"
50
+ st.markdown(link1+" "+link2, unsafe_allow_html=True)
 
51
  st.divider()
52
 
53
  if maincol2.button('Recommend by description',use_container_width=True):
 
59
  st.subheader(course_id+": "+result)
60
  with st.expander("See description"):
61
  st.write(coursedf.iloc[index,3]) #Using the new coursedf because it has proper descriptions for each course
62
+ link1 = "[ExploreCourses](https://explorecourses.stanford.edu/search?q="+course_id+"+"+result.replace(" ","+")+")"
63
+ link2 = "[Carta ↗](https://carta-beta.stanford.edu/results/"+course_id+")"
64
+ st.markdown("<span style='color:white'>"+link1+" "+link2+"</span>", unsafe_allow_html=True)
 
65
  st.divider()
66
 
67
  st.write('© 2023 Rushank Goyal. All rights reserved. Source for the all-MiniLM-L6-v2 model: Wang, Wenhui, et al. "MiniLM: Deep Self-Attention Distillation for Task-Agnostic Compression of Pre-Trained Transformers." arXiv, 25 Feb. 2020, doi:10.48550/arXiv.2002.10957.')