File size: 3,059 Bytes
67c90cc
99c3854
 
dd1c865
99c3854
 
67c90cc
99c3854
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
import sys
from crewai import Agent, Crew, Process, Task, LLM
from crewai.project import CrewBase, agent, crew, task
from research_agent.tools.tool import search_tool, website_search_tool,pdf_search_tool
import streamlit as st
import os
sys.path.append('..')
def streamlit_callback(step_output):
	"""Callback function to display step output in Streamlit."""
	st.markdown("---")
	for step in step_output:
		if isinstance(step, tuple) and len(step) == 2:
			action, observation = step
			if isinstance(action, dict) and "tool" in action and "tool_input" in action and "log" in action:
				st.markdown(f"# Action")
				st.markdown(f"**Tool:** {action['tool']}")
				st.markdown(f"**Tool Input:** {action['tool_input']}")
				st.markdown(f"**Log:** {action['log']}")
				if 'Action' in action:  # Check if 'Action' key exists before using it
					st.markdown(f"**Action:** {action['Action']}")
					st.markdown(f"**Action Input:** ```json\n{action['tool_input']}\n```")
			elif isinstance(action, str):
				st.markdown(f"**Action:** {action}")
			else:
				st.markdown(f"**Action:** {str(action)}")

			st.markdown(f"**Observation**")
			if isinstance(observation, str):
				observation_lines = observation.split('\n')
				for line in observation_lines:
					st.markdown(line)
			else:
				st.markdown(str(observation))
		else:
			st.markdown(step)

@CrewBase
class MarketUseCaseCrew:
	def llm(self):
		return LLM(model="gemini/gemini-1.5-flash-002", temperature=0.01, api_key=os.environ["GOOGLE_API_KEY"])
	
	@agent
	def researcher(self) -> Agent:
		return Agent(
			config=self.agents_config['researcher'],
			tools=[search_tool,website_search_tool], # Example of custom tool, loaded on the beginning of file
			verbose=True,
			llm=self.llm(),
			allow_delegation=True,
		)

	@agent
	def design_thinker(self) -> Agent:
		return Agent(
			config=self.agents_config['design_thinker'],
			verbose=True,
			tools=[search_tool, website_search_tool],
			llm=self.llm(),
			allow_delegation=True,
		)
	
	@agent	
	def developer(self) -> Agent:
		return Agent(
			config=self.agents_config['developer'],
			verbose=True,
			tools=[search_tool, website_search_tool, pdf_search_tool],
			llm=self.llm(),
			allow_delegation=True,
		)
	
	@task
	def research_task(self) -> Task:
		return Task(
			config=self.tasks_config['research_task'],
			output_file=f'output/researched_data.md',
		)

	@task
	def design_task(self) -> Task:
		return Task(
			config=self.tasks_config['design_task'],
			output_file=f'output/ideas.md'
		)
	
	@task
	def resource_collector(self) -> Task:
		return Task(
			config=self.tasks_config['resource_collector'],
			output_file=f'output/resouce.md'
		)
	@crew
	def crew(self) -> Crew:
		"""Creates the ResearchAgent crew"""
		return Crew(
			agents=self.agents, # Automatically created by the @agent decorator
			tasks=self.tasks, # Automatically created by the @task decorator
			process=Process.sequential,
			verbose=True,
			# process=Process.hierarchical, # In case you wanna use that instead https://docs.crewai.com/how-to/Hierarchical/
		)