Spaces:
Sleeping
Sleeping
File size: 1,991 Bytes
94b1e32 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 |
import pandas as pd
import random
import requests
import os
from dotenv import load_dotenv
# Load the CSV file
def load_csv(file_path):
try:
data = pd.read_csv(file_path)
print(f"File loaded successfully. Columns available: {list(data.columns)}")
return data
except Exception as e:
print(f"Error loading file: {e}")
return None
# Perform web search using SerpAPI
def search_web(query, api_key):
try:
# query = "Give me the name of director of " + query
url = f"https://serpapi.com/search.json?q={query}&api_key={api_key}"
response = requests.get(url)
if response.status_code == 200:
return response.json().get("organic_results", [])
else:
print(f"Error in search: {response.status_code}")
return []
except Exception as e:
print(f"Search failed: {e}")
return []
def get_raw_data(file_path, query):
# File path
load_dotenv()
# file_path = "example_input.csv" # Replace with your actual file path
api_key = os.getenv("SERPAPI_KEY")
# Load CSV
data = load_csv(file_path)
if data is None:
return
if not file_path or not api_key:
print("Error: Environment variables not set. Please check your .env file.")
return
# Load CSV
data = load_csv(file_path)
if data is None:
return
search_results = search_web(query, api_key)
# print(search_results)
return search_results
# Print the results
# for result in results:
# print(result)
def get_raw_data_sheets(query):
# File path
load_dotenv()
# file_path = "example_input.csv" # Replace with your actual file path
api_key = os.getenv("SERPAPI_KEY")
if not api_key:
print("Error: Environment variables not set. Please check your .env file.")
return
search_results = search_web(query, api_key)
# print(search_results)
return search_results
|