File size: 4,829 Bytes
3d87c18 c101c53 3d87c18 c101c53 3d87c18 c101c53 3d87c18 c101c53 0614630 3d87c18 c101c53 3d87c18 c101c53 3d87c18 0614630 c101c53 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 |
import sqlite3
import psycopg2
from .stats_tools import stats_tools
from .chart_tools import chart_tools
from utils import TEMP_DIR
def data_file_tools_call(session_hash):
dir_path = TEMP_DIR / str(session_hash)
connection = sqlite3.connect(f'{dir_path}/file_upload/data_source.db')
print("Querying Database in Tools.py");
cur=connection.execute('select * from data_source')
columns = [i[0] for i in cur.description]
print("COLUMNS 2")
print(columns)
cur.close()
connection.close()
column_string = (columns[:625] + '..') if len(columns) > 625 else columns
tools_calls = [
{
"type": "function",
"function": {
"name": "sql_query_func",
"description": f"""This is a tool useful to query a SQLite table called 'data_source' with the following Columns: {column_string}.
There may also be more columns in the table if the number of columns is too large to process.
This function also saves the results of the query to csv file called query.csv.""",
"parameters": {
"type": "object",
"properties": {
"queries": {
"type": "array",
"description": "The query to use in the search. Infer this from the user's message. It should be a question or a statement",
"items": {
"type": "string",
}
}
},
"required": ["queries"],
},
},
},
]
tools_calls.extend(chart_tools)
tools_calls.extend(stats_tools)
return tools_calls
def sql_tools_call(db_tables):
table_string = (db_tables[:625] + '..') if len(db_tables) > 625 else db_tables
tools_calls = [
{
"type": "function",
"function": {
"name": "sql_query_func",
"description": f"""This is a tool useful to query a PostgreSQL database with the following tables, {table_string}.
There may also be more tables in the database if the number of tables is too large to process.
This function also saves the results of the query to csv file called query.csv.""",
"parameters": {
"type": "object",
"properties": {
"queries": {
"type": "array",
"description": "The PostgreSQL query to use in the search. Infer this from the user's message. It should be a question or a statement",
"items": {
"type": "string",
}
}
},
"required": ["queries"],
},
},
},
]
tools_calls.extend(chart_tools)
tools_calls.extend(stats_tools)
return tools_calls
def doc_db_tools_call(db_collections):
collection_string = (db_collections[:625] + '..') if len(db_collections) > 625 else db_collections
tools_calls = [
{
"type": "function",
"function": {
"name": "doc_db_query_func",
"description": f"""This is a tool useful to build an aggregation pipeline to query a MongoDB NoSQL document database with the following collections, {collection_string}.
There may also be more collections in the database if the number of tables is too large to process.
This function also saves the results of the query to a csv file called query.csv.""",
"parameters": {
"type": "object",
"properties": {
"aggregation_pipeline": {
"type": "string",
"description": "The MongoDB aggregation pipeline to use in the search. Infer this from the user's message. It should be a question or a statement"
},
"db_collection": {
"type": "string",
"description": "The MongoDB collection to use in the search. Infer this from the user's message. It should be a question or a statement",
}
},
"required": ["queries","db_collection"],
},
},
},
]
tools_calls.extend(chart_tools)
tools_calls.extend(stats_tools)
return tools_calls |