File size: 5,696 Bytes
33d447e
 
 
 
 
 
 
bbe8a57
6e6a2b1
 
 
33d447e
 
 
 
 
 
 
bbe8a57
 
33d447e
281d099
bbe8a57
281d099
bbe8a57
281d099
bbe8a57
281d099
 
 
33d447e
6e6a2b1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
281d099
7e1e0a5
33d447e
7e1e0a5
 
 
 
 
 
 
 
33d447e
6e6a2b1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7e1e0a5
6e6a2b1
 
 
 
7e1e0a5
 
6e6a2b1
 
 
7e1e0a5
33d447e
 
 
6e6a2b1
33d447e
 
6e6a2b1
 
 
 
33d447e
 
 
 
281d099
 
bbe8a57
7e1e0a5
281d099
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7e1e0a5
 
 
 
 
 
33d447e
 
 
 
281d099
bbe8a57
 
 
 
 
 
7e1e0a5
33d447e
 
7e1e0a5
 
 
 
 
33d447e
 
 
 
 
7e1e0a5
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
import os
import gradio as gr
from openai import OpenAI
from add_embeddings import LegalDocumentProcessor
import json
from typing import Dict, List
import markdown
from pathlib import Path
import time
from functools import lru_cache
import hashlib

# Initialize clients and models
client = OpenAI(
    api_key=os.environ.get("MISTRAL_API_KEY"),
    base_url="https://api.mistral.ai/v1"
)

print("Starting Nyaya-Mitra initialization...")

# Initialize document processor
try:
    print("Initializing document processor...")
    doc_processor = LegalDocumentProcessor()
    print("Processing documents...")
    doc_processor.process_and_store_documents()
    print("Document processor initialized successfully")
except Exception as e:
    print(f"Error initializing document processor: {str(e)}")
    doc_processor = None

# Cache for storing responses
response_cache = {}

def get_cache_key(query: str, context: str) -> str:
    """Generate a cache key from query and context"""
    combined = f"{query}|{context}"
    return hashlib.md5(combined.encode()).hexdigest()

@lru_cache(maxsize=100)
def get_cached_response(cache_key: str) -> str:
    """Get cached response if available"""
    return response_cache.get(cache_key)

def get_mistral_response(query: str, context: str, max_retries: int = 3) -> str:
    """Get response from Mistral AI with rate limiting and caching"""
    cache_key = get_cache_key(query, context)
    cached_response = get_cached_response(cache_key)
    
    if cached_response:
        return cached_response
        
    system_prompt = """You are Nyaya-Mitra (न्याय-मित्र), a helpful legal assistant for the Indian justice system. 
    Provide concise, accurate responses based on the context provided. Focus on the most relevant information.
    
    Guidelines:
    1. Be precise and cite specific sections
    2. Explain concepts simply
    3. Suggest next steps if applicable
    4. Note that you cannot provide legal advice
    5. Keep responses focused and relevant
    
    Format responses clearly using markdown."""
    
    for attempt in range(max_retries):
        try:
            response = client.chat.completions.create(
                model="mistral-medium",
                messages=[
                    {"role": "system", "content": system_prompt},
                    {"role": "user", "content": f"Context: {context}\n\nQuestion: {query}"}
                ]
            )
            result = response.choices[0].message.content
            response_cache[cache_key] = result
            return result
            
        except Exception as e:
            if "rate limit" in str(e).lower():
                if attempt < max_retries - 1:
                    wait_time = (attempt + 1) * 2
                    time.sleep(wait_time)
                    continue
            return f"""I apologize, but I'm currently experiencing high demand. Please try:
            1. Waiting a few moments before asking another question
            2. Making your question more specific
            3. Breaking down your question into smaller parts
            
            Error details: {str(e)}"""
    
    return "I apologize, but I'm currently unable to process your request. Please try again in a few moments."

def format_sources(metadatas: List[Dict]) -> str:
    """Format source information for display"""
    seen_sources = set()
    sources = []
    for metadata in metadatas:
        source_key = f"{metadata['law_code']} ({metadata['source']})"
        if source_key not in seen_sources:
            sources.append(f"- {source_key}")
            seen_sources.add(source_key)
    return "\n".join(sources)

def chat_interface(message: str, history: List[List[str]]) -> str:
    """Main chat interface function"""
    try:
        if doc_processor is None:
            return """I apologize, but I'm having trouble accessing the legal documents. 
            Please try refreshing the page or waiting a moment."""
            
        # Search for relevant context
        results = doc_processor.search_documents(message)
        context = "\n".join(results["documents"])
        
        # Get response from Mistral
        response = get_mistral_response(message, context)
        
        # Add source information
        sources = format_sources(results["metadatas"])
        full_response = f"{response}\n\n**Sources:**\n{sources}"
        
        # Format response with markdown
        formatted_response = markdown.markdown(full_response)
        
        return formatted_response
    except Exception as e:
        return f"""I apologize, but I encountered an error. Please try:
        1. Making your question more specific
        2. Waiting a moment and trying again
        3. Refreshing the page
        
        Error: {str(e)}"""

# Create Gradio interface
iface = gr.ChatInterface(
    fn=chat_interface,
    title="Nyaya-Mitra (न्याय-मित्र)",
    description="""# Welcome to Nyaya-Mitra!

Your AI Legal Assistant for India's New Criminal Laws:
- 📚 Bharatiya Nyaya Sanhita (BNS)
- 📋 Bharatiya Nagarik Suraksha Sanhita (BNSS)
- ⚖️ Bharatiya Sakshya Adhiniyam (BSA)
*Note: This is an AI assistant for information only. For legal advice, please consult qualified legal professionals.*""",
    theme="soft",
    examples=[
        "What are the main provisions for cybercrime in BNS?",
        "How do I file a complaint under BNSS?",
        "What is the process for electronic evidence under BSA?",
        "What are the key changes in mob lynching laws?",
        "How does BNS define organized crime?"
    ]
)

# Launch the interface
if __name__ == "__main__":
    iface.launch()