Spaces:
Running
on
T4
Running
on
T4
File size: 4,158 Bytes
d2beadd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 |
# opentools/tools/base.py
from opentools.engine.openai import ChatOpenAI
class BaseTool:
"""
A base class for building tool classes that perform specific tasks, such as image processing or text detection.
"""
require_llm_engine = False # Default is False, tools that need LLM should set this to True
def __init__(self, tool_name=None, tool_description=None, tool_version=None, input_types=None, output_type=None, demo_commands=None, output_dir=None, user_metadata=None, model_string=None):
"""
Initialize the base tool with optional metadata.
Parameters:
tool_name (str): The name of the tool.
tool_description (str): A description of the tool.
tool_version (str): The version of the tool.
input_types (dict): The expected input types for the tool.
output_type (str): The expected output type for the tool.
demo_commands (list): A list of example commands for using the tool.
output_dir (str): The directory where the tool should save its output (optional).
user_metadata (dict): Additional metadata specific to user needs (optional).
model_string (str): The model string for the LLM engine (optional, only used if require_llm_engine is True).
"""
self.tool_name = tool_name
self.tool_description = tool_description
self.tool_version = tool_version
self.input_types = input_types
self.output_type = output_type
self.demo_commands = demo_commands
self.output_dir = output_dir
self.user_metadata = user_metadata
self.model_string = model_string
def set_metadata(self, tool_name, tool_description, tool_version, input_types, output_type, demo_commands, user_metadata=None):
"""
Set the metadata for the tool.
Parameters:
tool_name (str): The name of the tool.
tool_description (str): A description of the tool.
tool_version (str): The version of the tool.
input_types (dict): The expected input types for the tool.
output_type (str): The expected output type for the tool.
demo_commands (list): A list of example commands for using the tool.
user_metadata (dict): Additional metadata specific to user needs (optional).
"""
self.tool_name = tool_name
self.tool_description = tool_description
self.tool_version = tool_version
self.input_types = input_types
self.output_type = output_type
self.demo_commands = demo_commands
self.user_metadata = user_metadata
def get_metadata(self):
"""
Returns the metadata for the tool.
Returns:
dict: A dictionary containing the tool's metadata.
"""
metadata = {
"tool_name": self.tool_name,
"tool_description": self.tool_description,
"tool_version": self.tool_version,
"input_types": self.input_types,
"output_type": self.output_type,
"demo_commands": self.demo_commands,
"require_llm_engine": self.require_llm_engine,
}
if self.user_metadata:
metadata["user_metadata"] = self.user_metadata
return metadata
def set_custom_output_dir(self, output_dir):
"""
Set a custom output directory for the tool.
Parameters:
output_dir (str): The new output directory path.
"""
self.output_dir = output_dir
def set_llm_engine(self, model_string):
"""
Set the LLM engine for the tool.
Parameters:
model_string (str): The model string for the LLM engine.
"""
self.model_string = model_string
def execute(self, *args, **kwargs):
"""
Execute the tool's main functionality. This method should be overridden by subclasses.
Raises:
NotImplementedError: If the subclass does not implement this method.
"""
raise NotImplementedError("Subclasses must implement the execute method.") |