pdx97 commited on
Commit
71a8799
·
verified ·
1 Parent(s): ae7a494

Updated app.py

Browse files
Files changed (1) hide show
  1. app.py +43 -39
app.py CHANGED
@@ -1,69 +1,73 @@
1
- from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
2
  import datetime
3
  import requests
4
  import pytz
5
  import yaml
6
  from tools.final_answer import FinalAnswerTool
7
-
8
- from Gradio_UI import GradioUI
9
-
10
- # Below is an example of a tool that does nothing. Amaze us with your creativity !
11
- @tool
12
- def my_custom_tool(arg1:str, arg2:int)-> str: #it's import to specify the return type
13
- #Keep this format for the description / args / args description but feel free to modify the tool
14
- """A tool that does nothing yet
15
- Args:
16
- arg1: the first argument
17
- arg2: the second argument
18
- """
19
- return "What magic will you build ?"
20
 
21
  @tool
22
- def get_current_time_in_timezone(timezone: str) -> str:
23
- """A tool that fetches the current local time in a specified timezone.
24
  Args:
25
- timezone: A string representing a valid timezone (e.g., 'America/New_York').
 
26
  """
27
  try:
28
- # Create timezone object
29
- tz = pytz.timezone(timezone)
30
- # Get current time in that timezone
31
- local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
32
- return f"The current local time in {timezone} is: {local_time}"
 
 
 
 
 
 
 
 
 
33
  except Exception as e:
34
- return f"Error fetching time for timezone '{timezone}': {str(e)}"
35
-
36
 
37
  final_answer = FinalAnswerTool()
38
 
39
- # If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
40
- # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
41
-
42
  model = HfApiModel(
43
- max_tokens=2096,
44
- temperature=0.5,
45
- model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
46
- custom_role_conversions=None,
47
  )
48
 
49
-
50
- # Import tool from Hub
51
- image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
52
-
53
  with open("prompts.yaml", 'r') as stream:
54
  prompt_templates = yaml.safe_load(stream)
55
 
56
  agent = CodeAgent(
57
  model=model,
58
- tools=[final_answer], ## add your tools here (don't remove final answer)
59
  max_steps=6,
60
  verbosity_level=1,
61
  grammar=None,
62
  planning_interval=None,
63
- name=None,
64
- description=None,
65
  prompt_templates=prompt_templates
66
  )
67
 
 
 
 
 
 
 
 
 
 
 
 
 
 
68
 
69
- GradioUI(agent).launch()
 
1
+ from smolagents import CodeAgent, HfApiModel, load_tool, tool
2
  import datetime
3
  import requests
4
  import pytz
5
  import yaml
6
  from tools.final_answer import FinalAnswerTool
7
+ from scholarly import scholarly
8
+ import gradio as gr
 
 
 
 
 
 
 
 
 
 
 
9
 
10
  @tool
11
+ def fetch_latest_research_papers(keywords: list, num_results: int = 5) -> list:
12
+ """Fetches the latest research papers from Google Scholar based on provided keywords.
13
  Args:
14
+ keywords: A list of keywords to search for relevant papers.
15
+ num_results: The number of papers to fetch (default is 5).
16
  """
17
  try:
18
+ query = " ".join(keywords)
19
+ search_results = scholarly.search_pubs(query)
20
+ papers = []
21
+ for i in range(num_results):
22
+ paper = next(search_results, None)
23
+ if paper:
24
+ papers.append({
25
+ "title": paper['bib'].get('title', 'No Title'),
26
+ "authors": paper['bib'].get('author', 'Unknown Authors'),
27
+ "year": paper['bib'].get('pub_year', 'Unknown Year'),
28
+ "abstract": paper['bib'].get('abstract', 'No Abstract Available'),
29
+ "link": paper.get('pub_url', 'No Link Available')
30
+ })
31
+ return papers
32
  except Exception as e:
33
+ return [f"Error fetching research papers: {str(e)}"]
 
34
 
35
  final_answer = FinalAnswerTool()
36
 
 
 
 
37
  model = HfApiModel(
38
+ max_tokens=2096,
39
+ temperature=0.5,
40
+ model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
41
+ custom_role_conversions=None,
42
  )
43
 
 
 
 
 
44
  with open("prompts.yaml", 'r') as stream:
45
  prompt_templates = yaml.safe_load(stream)
46
 
47
  agent = CodeAgent(
48
  model=model,
49
+ tools=[final_answer, fetch_latest_research_papers],
50
  max_steps=6,
51
  verbosity_level=1,
52
  grammar=None,
53
  planning_interval=None,
54
+ name="ScholarAgent",
55
+ description="An AI agent that fetches the latest research papers from Google Scholar based on user-defined keywords and filters.",
56
  prompt_templates=prompt_templates
57
  )
58
 
59
+ def search_papers(user_input):
60
+ keywords = user_input.split(",") # Split input by commas for multiple keywords
61
+ results = fetch_latest_research_papers(keywords, num_results=5)
62
+ return "\n\n".join([f"**Title:** {paper['title']}\n**Authors:** {paper['authors']}\n**Year:** {paper['year']}\n**Abstract:** {paper['abstract']}\n[Read More]({paper['link']})" for paper in results])
63
+
64
+ # Create a simple Gradio interface
65
+ with gr.Blocks() as demo:
66
+ gr.Markdown("# Google Scholar Research Paper Fetcher")
67
+ keyword_input = gr.Textbox(label="Enter keywords (comma-separated)", placeholder="e.g., deep learning, reinforcement learning")
68
+ output_display = gr.Markdown()
69
+ search_button = gr.Button("Search")
70
+
71
+ search_button.click(search_papers, inputs=[keyword_input], outputs=[output_display])
72
 
73
+ demo.launch()