Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import gradio as gr
|
3 |
+
from groq import Groq
|
4 |
+
import numpy as np
|
5 |
+
import faiss
|
6 |
+
from sentence_transformers import SentenceTransformer
|
7 |
+
|
8 |
+
# Initialize Groq API Client
|
9 |
+
GROQ_API_KEY = "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"
|
10 |
+
client = Groq(api_key=GROQ_API_KEY)
|
11 |
+
|
12 |
+
# Load Pretrained Embedding Model
|
13 |
+
embedding_model = SentenceTransformer("all-MiniLM-L6-v2")
|
14 |
+
|
15 |
+
# Load Sample Environmental Dataset (Replace with your own)
|
16 |
+
environmental_data = [
|
17 |
+
{"text": "Deforestation leads to loss of biodiversity.", "category": "Biodiversity"},
|
18 |
+
{"text": "Construction projects can increase carbon emissions.", "category": "Air Quality"},
|
19 |
+
{"text": "Water usage must be monitored to prevent scarcity.", "category": "Water Resources"},
|
20 |
+
# Add more entries as needed
|
21 |
+
]
|
22 |
+
|
23 |
+
# Generate embeddings for the dataset
|
24 |
+
def create_dataset_index(data):
|
25 |
+
texts = [entry["text"] for entry in data]
|
26 |
+
embeddings = embedding_model.encode(texts)
|
27 |
+
faiss_index = faiss.IndexFlatL2(embeddings.shape[1])
|
28 |
+
faiss_index.add(np.array(embeddings))
|
29 |
+
return faiss_index, data
|
30 |
+
|
31 |
+
index, indexed_data = create_dataset_index(environmental_data)
|
32 |
+
|
33 |
+
# Function to retrieve relevant data
|
34 |
+
def retrieve_relevant_data(query, top_k=3):
|
35 |
+
query_embedding = embedding_model.encode([query])
|
36 |
+
distances, indices = index.search(np.array(query_embedding), top_k)
|
37 |
+
relevant_texts = [indexed_data[i]["text"] for i in indices[0]]
|
38 |
+
return relevant_texts
|
39 |
+
|
40 |
+
# Function to generate an EIA report
|
41 |
+
def generate_eia_report(project_type, location, size):
|
42 |
+
# Combine user input into a query
|
43 |
+
query = f"Project Type: {project_type}, Location: {location}, Size: {size}. Provide related environmental impact details."
|
44 |
+
|
45 |
+
# Retrieve relevant context
|
46 |
+
relevant_data = retrieve_relevant_data(query)
|
47 |
+
context = " ".join(relevant_data)
|
48 |
+
|
49 |
+
# Use Groq API to generate a detailed report
|
50 |
+
chat_completion = client.chat.completions.create(
|
51 |
+
messages=[
|
52 |
+
{
|
53 |
+
"role": "user",
|
54 |
+
"content": f"Generate an Environmental Impact Assessment report based on the following details:\n{query}\nContext:\n{context}"
|
55 |
+
}
|
56 |
+
],
|
57 |
+
model="llama3-8b-8192",
|
58 |
+
stream=False,
|
59 |
+
)
|
60 |
+
return chat_completion.choices[0].message.content
|
61 |
+
|
62 |
+
# Define Gradio Interface
|
63 |
+
def eia_interface(project_type, location, size):
|
64 |
+
try:
|
65 |
+
report = generate_eia_report(project_type, location, size)
|
66 |
+
return report
|
67 |
+
except Exception as e:
|
68 |
+
return f"An error occurred: {e}"
|
69 |
+
|
70 |
+
# Gradio App
|
71 |
+
interface = gr.Interface(
|
72 |
+
fn=eia_interface,
|
73 |
+
inputs=[
|
74 |
+
gr.Textbox(label="Project Type (e.g., Solar Farm, Highway)"),
|
75 |
+
gr.Textbox(label="Location (e.g., California, USA)"),
|
76 |
+
gr.Textbox(label="Project Size (e.g., 50 acres, 100 MW)"),
|
77 |
+
],
|
78 |
+
outputs="text",
|
79 |
+
title="Environmental Impact Assessment Generator",
|
80 |
+
description="Enter project details to generate a detailed Environmental Impact Assessment (EIA) report."
|
81 |
+
)
|
82 |
+
|
83 |
+
# Launch Gradio App
|
84 |
+
if __name__ == "__main__":
|
85 |
+
interface.launch()
|