omarsol commited on
Commit
352cf53
Β·
1 Parent(s): 5ddcfe5

add new source

Browse files
.github/workflows/deploy_hf.yaml DELETED
@@ -1,24 +0,0 @@
1
- name: Sync to Hugging Face hub
2
- on:
3
- push:
4
- branches: [main]
5
- paths:
6
- - 'scripts/**' # Replace with the path to your specific folder
7
- - 'requirements.txt' # Add this line to include requirements.txt
8
-
9
- # to run this workflow manually from the Actions tab
10
- workflow_dispatch:
11
-
12
- jobs:
13
- sync-to-hub:
14
- runs-on: ubuntu-latest
15
- steps:
16
- - uses: actions/checkout@v3
17
- with:
18
- fetch-depth: 0
19
- lfs: true
20
- - name: Push to hub
21
- env:
22
- HF_TOKEN: ${{ secrets.HF_TOKEN }}
23
- HF_USERNAME: ${{ secrets.HF_USERNAME }}
24
- run: git push --force https://$HF_USERNAME:[email protected]/spaces/towardsai-buster/ai-tutor-chatbot main:main
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
scripts/main.py CHANGED
@@ -59,7 +59,7 @@ def update_query_engine_tools(selected_sources) -> list[RetrieverTool]:
59
  "All Sources": (
60
  custom_retriever_all_sources,
61
  "all_sources_info",
62
- """Useful for questions asking about information in the field of AI.""",
63
  ),
64
  }
65
 
@@ -128,6 +128,7 @@ def generate_completion(
128
  "LangChain Docs": "langchain",
129
  "OpenAI Cookbooks": "openai_cookbooks",
130
  "Towards AI Blog": "tai_blog",
 
131
  }
132
 
133
  for source in sources:
@@ -144,7 +145,7 @@ def generate_completion(
144
  filters=filter_list,
145
  condition=FilterCondition.OR,
146
  )
147
- # logfire.info(f"Filters: {filters}")
148
  query_engine_tools[0].retriever._vector_retriever._filters = filters
149
 
150
  # pdb.set_trace()
@@ -243,6 +244,7 @@ sources = gr.CheckboxGroup(
243
  "LangChain Docs",
244
  "OpenAI Cookbooks",
245
  "Towards AI Blog",
 
246
  # "All Sources",
247
  ],
248
  interactive=True,
 
59
  "All Sources": (
60
  custom_retriever_all_sources,
61
  "all_sources_info",
62
+ """Useful tool that contains general information about the field of AI.""",
63
  ),
64
  }
65
 
 
128
  "LangChain Docs": "langchain",
129
  "OpenAI Cookbooks": "openai_cookbooks",
130
  "Towards AI Blog": "tai_blog",
131
+ "8 Hour Primer": "8-hour_primer",
132
  }
133
 
134
  for source in sources:
 
145
  filters=filter_list,
146
  condition=FilterCondition.OR,
147
  )
148
+ logfire.info(f"Filters: {filters}")
149
  query_engine_tools[0].retriever._vector_retriever._filters = filters
150
 
151
  # pdb.set_trace()
 
244
  "LangChain Docs",
245
  "OpenAI Cookbooks",
246
  "Towards AI Blog",
247
+ "8 Hour Primer",
248
  # "All Sources",
249
  ],
250
  interactive=True,
scripts/setup.py CHANGED
@@ -34,7 +34,7 @@ if not os.path.exists("data/chroma-db-all_sources"):
34
  from huggingface_hub import snapshot_download
35
 
36
  snapshot_download(
37
- repo_id="towardsai-buster/ai-tutor-vector-db",
38
  local_dir="data",
39
  repo_type="dataset",
40
  )
@@ -177,6 +177,7 @@ AVAILABLE_SOURCES_UI = [
177
  "LangChain Docs",
178
  "OpenAI Cookbooks",
179
  "Towards AI Blog",
 
180
  # "All Sources",
181
  ]
182
 
@@ -188,6 +189,7 @@ AVAILABLE_SOURCES = [
188
  "langchain",
189
  "openai_cookbooks",
190
  "tai_blog",
 
191
  # "all_sources",
192
  ]
193
 
 
34
  from huggingface_hub import snapshot_download
35
 
36
  snapshot_download(
37
+ repo_id="towardsai-tutors/ai-tutor-vector-db",
38
  local_dir="data",
39
  repo_type="dataset",
40
  )
 
177
  "LangChain Docs",
178
  "OpenAI Cookbooks",
179
  "Towards AI Blog",
180
+ "8 Hour Primer",
181
  # "All Sources",
182
  ]
183
 
 
189
  "langchain",
190
  "openai_cookbooks",
191
  "tai_blog",
192
+ "8-hour_primer",
193
  # "all_sources",
194
  ]
195