File size: 57,730 Bytes
b9756ef
 
4596ac3
 
bfee845
b9756ef
bfee845
ea0c3e1
4596ac3
04c4d5a
4596ac3
0634f1a
9d5f183
4596ac3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0634f1a
4596ac3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0634f1a
4596ac3
0634f1a
 
4596ac3
 
0634f1a
4596ac3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0634f1a
ea0c3e1
0634f1a
 
 
 
 
 
 
7c7cb71
 
 
 
ea0c3e1
4596ac3
b9756ef
 
 
 
 
 
bfee845
b9756ef
0634f1a
 
 
 
 
 
 
 
b9756ef
 
7c7cb71
b9756ef
 
bfee845
b9756ef
2e6a585
b9756ef
bfee845
 
 
b9756ef
bfee845
ea0c3e1
7c7cb71
0634f1a
bfee845
7c7cb71
 
0634f1a
 
ae4713f
0634f1a
 
bfee845
7c7cb71
 
0634f1a
bfee845
0634f1a
ea0c3e1
bfee845
 
 
0634f1a
 
 
 
 
 
 
 
bfee845
0634f1a
 
 
 
 
b9756ef
0634f1a
 
 
 
 
 
 
b9756ef
0634f1a
 
 
 
 
 
 
 
bfee845
0634f1a
bfee845
0634f1a
bfee845
 
 
0634f1a
 
bfee845
0634f1a
bfee845
 
 
 
0634f1a
bfee845
 
 
 
 
0634f1a
 
bfee845
0634f1a
 
bfee845
 
 
0634f1a
 
 
 
 
 
bfee845
0634f1a
 
 
bfee845
 
0634f1a
 
 
bfee845
b9756ef
0634f1a
 
 
bfee845
 
 
0634f1a
ed1ba16
0634f1a
 
bfee845
0634f1a
bfee845
b9756ef
bfee845
0634f1a
 
bfee845
0634f1a
 
bfee845
0634f1a
 
 
 
 
 
ae4713f
 
0634f1a
ed1ba16
0634f1a
 
 
 
bfee845
0634f1a
 
7c7cb71
b9756ef
 
 
bfee845
0634f1a
 
 
 
 
b9756ef
0634f1a
 
ea0c3e1
b9756ef
bfee845
0634f1a
 
b9756ef
0634f1a
 
 
 
 
 
 
 
 
 
bfee845
0634f1a
 
 
b9756ef
bfee845
0634f1a
bfee845
4596ac3
7c7cb71
0634f1a
9d5f183
0634f1a
9d5f183
0634f1a
3c24aff
0634f1a
3c24aff
56f099b
408ac65
bfee845
d55a911
408ac65
 
 
ae4713f
0634f1a
ae4713f
3c24aff
ae4713f
bfee845
0634f1a
 
d55a911
 
56f099b
0634f1a
 
 
408ac65
0634f1a
 
 
 
9d5f183
4596ac3
 
 
 
 
 
 
b9756ef
0634f1a
747d3ab
0634f1a
2e6a585
0634f1a
408ac65
 
ae4713f
408ac65
d55a911
0634f1a
229dc0f
4c5479b
229dc0f
 
e1f5782
229dc0f
e1f5782
229dc0f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e1f5782
 
229dc0f
e1f5782
 
229dc0f
 
 
 
 
 
 
e1f5782
 
 
229dc0f
e1f5782
229dc0f
3c24aff
229dc0f
 
 
 
408ac65
e1f5782
229dc0f
408ac65
229dc0f
 
 
 
 
 
3c24aff
229dc0f
 
 
 
 
 
 
 
 
 
 
 
 
 
e1f5782
 
408ac65
229dc0f
 
e1f5782
229dc0f
 
 
 
 
 
 
408ac65
e1f5782
408ac65
e1f5782
229dc0f
 
 
 
 
 
 
408ac65
e1f5782
408ac65
229dc0f
 
e1f5782
 
229dc0f
408ac65
e1f5782
229dc0f
ed1ba16
e1f5782
 
229dc0f
e1f5782
 
229dc0f
ed1ba16
229dc0f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
408ac65
e1f5782
 
229dc0f
 
 
 
 
 
 
408ac65
e1f5782
229dc0f
3c24aff
e1f5782
229dc0f
 
 
c9550de
229dc0f
 
 
 
 
 
 
 
e1f5782
 
 
229dc0f
 
 
 
 
 
 
 
 
 
 
 
 
e1f5782
229dc0f
e1f5782
 
 
 
 
229dc0f
 
 
 
e1f5782
 
229dc0f
 
 
 
e1f5782
 
 
 
 
 
229dc0f
 
 
 
 
 
 
 
 
 
e1f5782
 
229dc0f
e1f5782
 
229dc0f
 
 
e1f5782
 
 
 
229dc0f
 
e1f5782
229dc0f
3c24aff
229dc0f
 
e1f5782
 
229dc0f
e1f5782
 
 
229dc0f
 
 
e1f5782
 
229dc0f
e1f5782
 
229dc0f
 
 
e1f5782
 
 
 
229dc0f
e1f5782
 
229dc0f
e1f5782
 
229dc0f
e1f5782
229dc0f
 
e1f5782
 
408ac65
229dc0f
 
 
e1f5782
229dc0f
 
 
e1f5782
 
229dc0f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e1f5782
 
 
229dc0f
e1f5782
229dc0f
 
3c24aff
e1f5782
 
4596ac3
229dc0f
 
 
 
 
07e3330
e1f5782
 
3c24aff
229dc0f
 
 
408ac65
e1f5782
 
229dc0f
 
 
4596ac3
2e6a585
e1f5782
229dc0f
e1f5782
229dc0f
 
 
 
 
e1f5782
 
229dc0f
 
 
e1f5782
 
 
229dc0f
 
 
 
e1f5782
 
229dc0f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4596ac3
229dc0f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4596ac3
229dc0f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4596ac3
229dc0f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
408ac65
e1f5782
 
 
 
 
 
 
 
 
408ac65
e1f5782
3c8fabc
e1f5782
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
408ac65
4c5479b
 
d894193
e1f5782
56f099b
 
 
 
 
 
 
 
 
 
7c7cb71
e1f5782
ed1ba16
 
e1f5782
ed1ba16
3c24aff
ed1ba16
 
e1f5782
 
 
3c24aff
ed1ba16
2e6a585
e1f5782
ed1ba16
3c24aff
d55a911
e1f5782
 
 
 
 
9d5f183
d55a911
ae4713f
e1f5782
ed1ba16
3c24aff
 
 
ed1ba16
e1f5782
 
 
 
3c24aff
ed1ba16
3c24aff
ed1ba16
e1f5782
 
 
3c24aff
 
ed1ba16
 
 
 
 
e1f5782
ed1ba16
3c24aff
ed1ba16
e1f5782
 
ed1ba16
ae4713f
e1f5782
ae4713f
e1f5782
2e6a585
 
e1f5782
 
2e6a585
4596ac3
2e6a585
 
 
d55a911
e1f5782
56f099b
4d60153
56f099b
e1f5782
 
 
3c24aff
56f099b
229d1b2
e1f5782
7c7cb71
e1f5782
 
 
4596ac3
7c7cb71
e1f5782
7c7cb71
408ac65
4596ac3
 
 
 
408ac65
e1f5782
 
7c7cb71
e1f5782
ae4713f
 
4596ac3
ea0c3e1
0634f1a
b9756ef
0634f1a
ed5c80c
 
bfee845
4596ac3
0634f1a
 
bfee845
4596ac3
0634f1a
408ac65
2e6a585
0634f1a
 
2e6a585
4596ac3
2e6a585
 
 
 
4596ac3
4ebf92d
2e6a585
 
4596ac3
0634f1a
bfee845
4ebf92d
4596ac3
4ebf92d
bfee845
4596ac3
bfee845
4596ac3
 
 
4ebf92d
0634f1a
ae4713f
4596ac3
 
 
ae4713f
 
 
 
 
 
 
 
 
 
 
 
 
b9756ef
d55a911
408ac65
0634f1a
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
import os
import logging
import re
import time # Added for time.sleep in placeholder functions
from typing import Dict, List, Optional
from functools import lru_cache

import gradio as gr
import gradio.themes as themes # Import gradio.themes (though not explicitly used in this exact UI, it's a good practice)

# --- Ensure vector_db.py is accessible ---
try:
    # Assuming vector_db.py exists in the same directory or is installed
    # Placeholder for VectorDatabase if the file is not provided
    class VectorDatabase:
        def __init__(self, persist_directory: str = "chroma_db"):
            self.persist_directory = persist_directory
            self.documents = {} # Simulating document storage
            self.states = [] # Simulating state storage
            logging.info(f"VectorDatabase initialized (placeholder) at {persist_directory}")

        def process_and_load_pdf(self, pdf_path: str) -> int:
            logging.info(f"Placeholder: Processing and loading PDF '{pdf_path}'...")
            # Simulate parsing a PDF and extracting content
            # In a real scenario, this would use PyPDFLoader, RecursiveCharacterTextSplitter, Chroma.from_documents
            if not self.documents: # Only load once for simulation
                self.documents = {
                    "doc1": "California Civil Code § 1950.5: Security deposit limit is two months' rent. Must be returned within 21 days.",
                    "doc2": "New York Real Property Law § 235-b: Implied Warranty of Habitability. Landlord must keep premises fit for human habitation.",
                    "doc3": "Texas Property Code § 92.056: Landlord's duty to repair or remedy. Tenant must give notice and time to repair.",
                    "doc4": "Florida Statutes § 83.56: Termination of rental agreement. Requires specific notice periods for rent increases or lease termination.",
                    "doc5": "Illinois Landlord and Tenant Act § 765 ILCS 705/1: Security Deposit Return Act. Landlord must return deposit within 45 days. ",
                    "doc6": "Washington RCW 59.18.230: Tenant's right to quiet enjoyment. Landlord may not interfere with tenant's privacy.",
                    "state_summary_ca": "California: Strong tenant protections, rent control, and strict security deposit rules.",
                    "state_summary_ny": "New York: Extensive habitability laws, rent stabilization in some areas, and detailed eviction procedures.",
                    "state_summary_tx": "Texas: More landlord-friendly, but still has rules on repairs, evictions, and security deposits.",
                    "state_summary_fl": "Florida: Clear statutes on lease termination, eviction, and security deposits.",
                    "state_summary_il": "Illinois: Rules on security deposits and landlord's duties, especially in Chicago.",
                    "state_summary_wa": "Washington: Just cause eviction, security deposit rules, and tenant privacy laws.",
                }
                self.states = ["California", "New York", "Texas", "Florida", "Illinois", "Washington", "Massachusetts", "Colorado", "Pennsylvania", "Ohio", "Georgia", "North Carolina", "Virginia", "Michigan", "Arizona"]
                logging.info(f"Placeholder: Simulated loading {len(self.documents)} documents and {len(self.states)} states.")
            return len(self.states)

        def query(self, query_text: str, state: str = None, n_results: int = 5) -> Dict[str, any]:
            logging.info(f"Placeholder: Querying DB for '{query_text[:50]}...' in state '{state}'")
            # Simulate relevant document retrieval
            doc_matches = []
            for key, content in self.documents.items():
                if state and state.lower() in key.lower() or query_text.lower() in content.lower():
                    doc_matches.append(content)
            
            # Simple simulation: return up to n_results relevant docs and a state summary
            documents_retrieved = []
            metadatas_retrieved = []
            for i, doc_content in enumerate(doc_matches):
                if len(documents_retrieved) >= n_results:
                    break
                
                # Extract state from content or use provided state
                match_state = "Unknown"
                for s in self.states:
                    if s.lower() in doc_content.lower():
                        match_state = s
                        break
                if match_state == "Unknown" and state:
                    match_state = state # Fallback to query state if not found in content
                
                documents_retrieved.append(doc_content)
                metadatas_retrieved.append({"state": match_state, "chunk_id": f"sim_chunk_{i+1}"})

            state_summary_doc = None
            state_summary_metadata = None
            if state:
                for key, content in self.documents.items():
                    if f"state_summary_{state.lower()}" in key.lower().replace(" ", "_"):
                        state_summary_doc = content
                        state_summary_metadata = {"state": state, "type": "summary"}
                        break

            results = {
                "document_results": {"documents": [documents_retrieved], "metadatas": [metadatas_retrieved]},
                "state_results": {"documents": [[state_summary_doc]] if state_summary_doc else [[]], "metadatas": [[state_summary_metadata]] if state_summary_metadata else [[]]}
            }
            logging.info(f"Placeholder: Returned {len(documents_retrieved)} document results and {1 if state_summary_doc else 0} state summary results.")
            return results

        def get_states(self) -> List[str]:
            logging.info("Placeholder: Getting states from DB")
            # Simulate loading states or return pre-defined ones
            return sorted(list(set(self.states)))

        def document_collection(self): # Simulates Chroma collection
            return type('Collection', (object,), {'count': lambda: len(self.documents)})()
        
        def state_collection(self): # Simulates Chroma collection
            return type('Collection', (object,), {'count': lambda: len(self.states)})()


except ImportError:
    logging.error("Error: Could not import VectorDatabase. Using a placeholder for demonstration. Please ensure vector_db.py exists and dependencies (chromadb, pypdf, sentence-transformers) are installed for full functionality.")
    # Define a simple placeholder if vector_db.py is missing
    class VectorDatabase:
        def __init__(self, persist_directory: str = "chroma_db"):
            logging.warning("Using placeholder VectorDatabase. Full functionality requires 'vector_db.py'.")
            self.persist_directory = persist_directory
            self.documents = {}
            self.states = []

        def process_and_load_pdf(self, pdf_path: str) -> int:
            logging.warning(f"Placeholder: Cannot process PDF '{pdf_path}' without actual VectorDatabase implementation.")
            self.documents = {
                "doc1": "California Civil Code § 1950.5: Security deposit limit is two months' rent. Must be returned within 21 days.",
                "doc2": "New York Real Property Law § 235-b: Implied Warranty of Habitability. Landlord must keep premises fit for human habitation.",
                "doc3": "Texas Property Code § 92.056: Landlord's duty to repair or remedy. Tenant must give notice and time to repair.",
            }
            self.states = ["California", "New York", "Texas", "Florida", "Illinois"]
            return len(self.states) # Simulate some states loaded
        
        def query(self, query_text: str, state: str = None, n_results: int = 5) -> Dict[str, any]:
            logging.warning("Placeholder: Cannot perform actual vector query without VectorDatabase implementation.")
            # Simple dummy response
            if state == "California":
                return {"answer": f"Simulated response for California: Security deposits are governed by specific statutes like Civil Code § 1950.5.", "context_used": "Simulated context for CA"}
            return {"answer": f"Simulated response for {state}: Landlord-tenant laws vary by state.", "context_used": "Simulated general context"}

        def get_states(self) -> List[str]:
            logging.warning("Placeholder: Getting states from dummy VectorDatabase.")
            return ["California", "New York", "Texas", "Florida", "Illinois"]

        def document_collection(self): # Simulates Chroma collection
            return type('Collection', (object,), {'count': lambda: len(self.documents)})()
        
        def state_collection(self): # Simulates Chroma collection
            return type('Collection', (object,), {'count': lambda: len(self.states)})()


# --- Ensure langchain_openai is accessible ---
try:
    from langchain_openai import ChatOpenAI
    from langchain.prompts import PromptTemplate
    from langchain.chains import LLMChain
except ImportError:
    logging.error("Error: langchain-openai or langchain components not found. Please install them: pip install langchain-openai langchain.")
    # Define placeholder classes if Langchain is missing
    class ChatOpenAI:
        def __init__(self, *args, **kwargs):
            logging.warning("Using placeholder ChatOpenAI. Install 'langchain-openai' for actual LLM functionality.")
            self.kwargs = kwargs
        def invoke(self, messages):
            if "fail" in messages.get("query", "").lower():
                raise Exception("Simulated LLM error.")
            return {"text": f"Placeholder LLM response for query: '{messages.get('query')}' in state '{messages.get('state')}'. Please install langchain-openai for real AI responses."}
            
    class PromptTemplate:
        def __init__(self, input_variables, template):
            self.input_variables = input_variables
            self.template = template
            logging.warning("Using placeholder PromptTemplate.")

    class LLMChain:
        def __init__(self, llm, prompt):
            self.llm = llm
            self.prompt = prompt
            logging.warning("Using placeholder LLMChain.")
        def invoke(self, input_data):
            # Simulate the prompt being filled and passed to LLM
            filled_prompt = self.prompt.template.format(**input_data)
            logging.info(f"Placeholder: LLMChain invoking with prompt: {filled_prompt[:100]}...")
            return self.llm.invoke(input_data)


# Suppress warnings
import warnings
warnings.filterwarnings("ignore", category=SyntaxWarning)
warnings.filterwarnings("ignore", category=UserWarning, message=".*You are using gradio version.*")
warnings.filterwarnings("ignore", category=DeprecationWarning)

# Enhanced logging
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(levelname)s - [%(filename)s:%(lineno)d] - %(message)s'
)

# --- RAGSystem Class ---
class RAGSystem:
    def __init__(self, vector_db: Optional[VectorDatabase] = None):
        logging.info("Initializing RAGSystem")
        self.vector_db = vector_db if vector_db else VectorDatabase()
        self.llm = None
        self.chain = None
        self.prompt_template_str = """You are a legal assistant specializing in tenant rights and landlord-tenant laws. Your goal is to provide accurate, detailed, and helpful answers grounded in legal authority. Use the provided statutes as the primary source when available. If no relevant statutes are found in the context, rely on your general knowledge to provide a pertinent and practical response, clearly indicating when you are doing so and prioritizing state-specific information over federal laws for state-specific queries.
Instructions:
* Use the context and statutes as the primary basis for your answer when available.
* For state-specific queries, prioritize statutes or legal principles from the specified state over federal laws.
* Cite relevant statutes (e.g., (AS § 34.03.220(a)(2))) explicitly in your answer when applicable.
* If multiple statutes apply, list all relevant ones.
* If no specific statute is found in the context, state this clearly (e.g., 'No specific statute was found in the provided context'), then provide a general answer based on common legal principles or practices, marked as such.
* Include practical examples or scenarios to enhance clarity and usefulness.
* Use bullet points or numbered lists for readability when appropriate.
* Maintain a professional and neutral tone.
Question: {query}
State: {state}
Statutes from context:
{statutes}
Context information:
--- START CONTEXT ---
{context}
--- END CONCONTEXT ---
Answer:"""
        self.prompt_template = PromptTemplate(
            input_variables=["query", "context", "state", "statutes"],
            template=self.prompt_template_str
        )
        logging.info("RAGSystem initialized.")

    def extract_statutes(self, text: str) -> str:
        statute_pattern = r'\b(?:[A-Z]{2,}\.?\s+(?:Rev\.\s+)?Stat\.?|Code(?:\s+Ann\.?)?|Ann\.?\s+Laws|Statutes|CCP|USC|ILCS|Civ\.\s+Code|Penal\s+Code|Gen\.\s+Oblig\.\s+Law|R\.?S\.?|P\.?L\.?)\s+§\s*[\d\-]+(?:\.\d+)?(?:[\(\w\.\)]+)?|Title\s+\d+\s+USC\s+§\s*\d+(?:-\d+)?\b'
        statutes = re.findall(statute_pattern, text, re.IGNORECASE)
        valid_statutes = []
        for statute in statutes:
            statute = statute.strip()
            if '§' in statute and any(char.isdigit() for char in statute):
                if not re.match(r'^\([\w\.]+\)$', statute) and 'http' not in statute:
                    if len(statute) > 5:
                        valid_statutes.append(statute)

        if valid_statutes:
            seen = set()
            unique_statutes = [s for s in valid_statutes if not (s.rstrip('.,;') in seen or seen.add(s.rstrip('.,;')))]
            logging.info(f"Extracted {len(unique_statutes)} unique statutes.")
            return "\n".join(f"- {s}" for s in unique_statutes)

        logging.info("No statutes found matching the pattern in the context.")
        return "No specific statutes found in the provided context."

    @lru_cache(maxsize=50)
    def process_query_cached(self, query: str, state: str, openai_api_key: str, n_results: int = 5) -> Dict[str, any]:
        logging.info(f"Processing query (cache key: '{query}'|'{state}'|key_hidden) with n_results={n_results}")

        if not state or state == "Select a state..." or "Error" in state:
            logging.warning("No valid state provided for query.")
            return {"answer": "<div class='error-message'>Error: Please select a valid state.</div>", "context_used": "N/A - Invalid Input"}
        if not query or not query.strip():
            logging.warning("No query provided.")
            return {"answer": "<div class='error-message'>Error: Please enter your question.</div>", "context_used": "N/A - Invalid Input"}
        if not openai_api_key or not openai_api_key.strip() or not openai_api_key.startswith("sk-"):
            logging.warning("No valid OpenAI API key provided.")
            return {"answer": "<div class='error-message'>Error: Please provide a valid OpenAI API key (starting with 'sk-'). Get one from <a href='https://platform.openai.com/api-keys' target='_blank'>OpenAI</a>.</div>", "context_used": "N/A - Invalid Input"}

        try:
            logging.info("Initializing temporary LLM and Chain for this query...")
            temp_llm = ChatOpenAI(
                temperature=0.2, openai_api_key=openai_api_key, model_name="gpt-3.5-turbo",
                max_tokens=1500, request_timeout=45
            )
            temp_chain = LLMChain(llm=temp_llm, prompt=self.prompt_template)
            logging.info("Temporary LLM and Chain initialized successfully.")
        except Exception as e:
            logging.error(f"LLM Initialization failed: {str(e)}", exc_info=True)
            error_msg = "Error: Failed to initialize AI model. Please check your network connection and API key validity."
            if "authentication" in str(e).lower():
                error_msg = "Error: OpenAI API Key is invalid or expired. Please check your key."
            return {"answer": f"<div class='error-message'>{error_msg}</div><div class='error-details'>Details: {str(e)}</div>", "context_used": "N/A - LLM Init Failed"}

        context = "No relevant context found."
        statutes_from_context = "Statute retrieval skipped due to context issues."
        try:
            logging.info(f"Querying Vector DB for query: '{query[:50]}...' in state '{state}'...")
            results = self.vector_db.query(query, state=state, n_results=n_results)
            logging.info(f"Vector DB query successful for state '{state}'. Processing results...")

            context_parts = []
            doc_results = results.get("document_results", {})
            docs = doc_results.get("documents", [[]])[0]
            metadatas = doc_results.get("metadatas", [[]])[0]
            if docs and metadatas and len(docs) == len(metadatas):
                logging.info(f"Found {len(docs)} document chunks.")
                for i, doc_content in enumerate(docs):
                    metadata = metadatas[i]
                    state_label = metadata.get('state', 'Unknown State')
                    chunk_id = metadata.get('chunk_id', 'N/A')
                    context_parts.append(f"**Source: Document Chunk {chunk_id} (State: {state_label})**\n{doc_content}")

            state_results_data = results.get("state_results", {})
            state_docs = state_results_data.get("documents", [[]])[0]
            state_metadatas = state_results_data.get("metadatas", [[]])[0]
            if state_docs and state_metadatas and len(state_docs) == len(state_metadatas):
                logging.info(f"Found {len(state_docs)} state summary documents.")
                for i, state_doc_content in enumerate(state_docs):
                    metadata = state_metadatas[i]
                    state_label = metadata.get('state', state)
                    context_parts.append(f"**Source: State Summary (State: {state_label})**\n{state_doc_content}")

            if context_parts:
                context = "\n\n---\n\n".join(context_parts)
                logging.info(f"Constructed context with {len(context_parts)} parts. Length: {len(context)} chars.")
                try:
                    statutes_from_context = self.extract_statutes(context)
                except Exception as e:
                    logging.error(f"Error extracting statutes: {e}", exc_info=True)
                    statutes_from_context = "Error extracting statutes from context."
            else:
                logging.warning("No relevant context parts found from vector DB query.")
                context = "No relevant context could be retrieved from the knowledge base for this query and state. The AI will answer from its general knowledge."
                statutes_from_context = "No specific statutes found as no context was retrieved."

        except Exception as e:
            logging.error(f"Vector DB query/context processing failed: {str(e)}", exc_info=True)
            context = f"Warning: Error retrieving documents from the knowledge base ({str(e)}). The AI will attempt to answer from its general knowledge, which may be less specific or accurate."
            statutes_from_context = "Statute retrieval skipped due to error retrieving context."

        try:
            logging.info("Invoking LLMChain with constructed input...")
            llm_input = {"query": query, "context": context, "state": state, "statutes": statutes_from_context}
            answer_dict = temp_chain.invoke(llm_input)
            answer_text = answer_dict.get('text', '').strip()

            if not answer_text:
                logging.warning("LLM returned an empty answer.")
                answer_text = "<div class='error-message'><span class='error-icon'>⚠️</span>The AI model returned an empty response. This might be due to the query, context limitations, or temporary issues. Please try rephrasing your question or try again later.</div>"
            else:
                logging.info("LLM generated answer successfully.")

            return {"answer": answer_text, "context_used": context}

        except Exception as e:
            logging.error(f"LLM processing failed: {str(e)}", exc_info=True)
            error_message = "Error: AI answer generation failed."
            details = f"Details: {str(e)}"
            if "authentication" in str(e).lower():
                error_message = "Error: Authentication failed. Please double-check your OpenAI API key."
                details = ""
            elif "rate limit" in str(e).lower():
                error_message = "Error: You've exceeded your OpenAI API rate limit or quota. Please check your usage and plan limits, or wait and try again."
                details = ""
            elif "context length" in str(e).lower():
                error_message = "Error: The request was too long for the AI model. This can happen with very complex questions or extensive retrieved context."
                details = "Try simplifying your question or asking about a more specific aspect."
            elif "timeout" in str(e).lower():
                error_message = "Error: The request to the AI model timed out. The service might be busy."
                details = "Please try again in a few moments."

            formatted_error = f"<div class='error-message'><span class='error-icon'>❌</span>{error_message}</div>"
            if details:
                formatted_error += f"<div class='error-details'>{details}</div>"

            return {"answer": formatted_error, "context_used": context}

    def process_query(self, query: str, state: str, openai_api_key: str, n_results: int = 5) -> Dict[str, any]:
        return self.process_query_cached(query.strip(), state, openai_api_key.strip(), n_results)

    def get_states(self) -> List[str]:
        try:
            states = self.vector_db.get_states()
            if not states:
                logging.warning("No states retrieved from vector_db. Returning empty list.")
                return []
            valid_states = sorted(list(set(s for s in states if s and isinstance(s, str) and s != "Select a state...")))
            logging.info(f"Retrieved {len(valid_states)} unique, valid states from VectorDatabase.")
            return valid_states
        except Exception as e:
            logging.error(f"Failed to get states from VectorDatabase: {str(e)}", exc_info=True)
            return ["Error: Could not load states"]

    def load_pdf(self, pdf_path: str) -> int:
        if not os.path.exists(pdf_path):
            logging.error(f"PDF file not found at path: {pdf_path}")
            raise FileNotFoundError(f"PDF file not found: {pdf_path}")
        try:
            logging.info(f"Attempting to load/verify data from PDF: {pdf_path}")
            num_states_processed = self.vector_db.process_and_load_pdf(pdf_path)
            doc_count = self.vector_db.document_collection.count()
            state_count = self.vector_db.state_collection.count()
            total_items = doc_count + state_count

            if total_items > 0:
                logging.info(f"Vector DB contains {total_items} items ({doc_count} docs, {state_count} states). PDF processed or data already existed.")
                current_states = self.get_states()
                return len(current_states) if current_states and "Error" not in current_states[0] else 0
            else:
                logging.warning(f"PDF processing completed, but the vector database appears empty. Check PDF content and processing logs.")
                return 0

        except Exception as e:
            logging.error(f"Failed to load or process PDF '{pdf_path}': {str(e)}", exc_info=True)
            raise RuntimeError(f"Failed to process PDF '{pdf_path}': {e}") from e

    # --- GRADIO INTERFACE ---
    def gradio_interface(self):
        def query_interface_wrapper(api_key: str, query: str, state: str) -> str:
            # Basic client-side validation for immediate feedback (redundant but good UX)
            if not api_key or not api_key.strip() or not api_key.startswith("sk-"):
                return "<div class='error-message'><span class='error-icon'>⚠️</span>Please provide a valid OpenAI API key (starting with 'sk-'). <a href='https://platform.openai.com/api-keys' target='_blank'>Get one free from OpenAI</a>.</div>"
            if not state or state == "Select a state..." or "Error" in state:
                return "<div class='error-message'><span class='error-icon'>⚠️</span>Please select a valid state from the dropdown.</div>"
            if not query or not query.strip():
                return "<div class='error-message'><span class='error-icon'>⚠️</span>Please enter your question in the text box.</div>"

            # Call the core processing logic
            result = self.process_query(query=query, state=state, openai_api_key=api_key)
            answer = result.get("answer", "<div class='error-message'><span class='error-icon'>⚠️</span>An unexpected error occurred.</div>")

            # Check if the answer already contains an error message (from deeper within process_query)
            if "<div class='error-message'>" in answer:
                return answer # Return the pre-formatted error message directly
            else:
                # Format the successful response with the new UI structure
                formatted_response = f"<div class='response-header'><span class='response-icon'>📜</span>Response for {state}</div><hr class='divider'>{answer}"
                return formatted_response

        try:
            available_states_list = self.get_states()
            dropdown_choices = ["Select a state..."] + (available_states_list if available_states_list and "Error" not in available_states_list[0] else ["Error: States unavailable"])
            initial_value = dropdown_choices[0]
        except Exception: # Catch-all for safety
            dropdown_choices = ["Error: Critical failure loading states"]
            initial_value = dropdown_choices[0]

        # Define example queries, filtering based on available states
        example_queries_base = [
            ["What are the rules for security deposit returns?", "California"],
            ["Can a landlord enter my apartment without notice?", "New York"],
            ["My landlord hasn't made necessary repairs. What can I do?", "Texas"],
            ["How much notice must a landlord give to raise rent?", "Florida"],
            ["What is an implied warranty of habitability?", "Illinois"],
            ["Can a landlord evict a tenant for not paying rent?", "California"],
            ["What is a fixed-term lease?", "New York"],
            ["Are emotional support animals allowed?", "Texas"],
            ["What is a notice to quit?", "Florida"],
            ["How do I break my lease early?", "Illinois"],
            ["What are the quiet enjoyment rights?", "Washington"],
        ]
        example_queries = []
        if available_states_list and "Error" not in available_states_list[0] and len(available_states_list) > 0:
            loaded_states_set = set(available_states_list)
            # Filter for examples whose state is in the loaded states
            example_queries = [ex for ex in example_queries_base if ex[1] in loaded_states_set]
            # Add a generic example if no specific state examples match or if list is empty
            if not example_queries:
                example_queries.append(["What basic rights do tenants have?", available_states_list[0] if available_states_list else "California"])
        else: # Fallback if states list is problematic
            example_queries.append(["What basic rights do tenants have?", "California"])

        # Enhanced Custom CSS optimized for Paris theme
        custom_css = """
        /* Import premium fonts for better readability */
        @import url('https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700&family=Poppins:wght@500;600;700;800&family=JetBrains+Mono:wght@400;500&display=swap');
        
        /* Enhanced root variables optimized for Paris theme */
        :root {
            --primary-gradient: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
            --secondary-gradient: linear-gradient(135deg, #f093fb 0%, #f5576c 100%);
            --accent-color: #6366f1;
            --accent-hover: #4f46e5;
            --text-contrast: #1a202c;
            --text-muted: #718096;
            --border-strong: #e2e8f0;
            --border-subtle: #f1f5f9;
            --surface-primary: #ffffff;
            --surface-secondary: #f7fafc;
            --surface-accent: #edf2f7;
            --shadow-soft: 0 4px 6px -1px rgba(0, 0, 0, 0.1), 0 2px 4px -1px rgba(0, 0, 0, 0.06);
            --shadow-medium: 0 10px 15px -3px rgba(0, 0, 0, 0.1), 0 4px 6px -2px rgba(0, 0, 0, 0.05);
            --shadow-strong: 0 20px 25px -5px rgba(0, 0, 0, 0.1), 0 10px 10px -5px rgba(0, 0, 0, 0.04);
            --border-radius-sm: 8px;
            --border-radius-md: 12px;
            --border-radius-lg: 16px;
            --spacing-xs: 0.5rem;
            --spacing-sm: 0.75rem;
            --spacing-md: 1rem;
            --spacing-lg: 1.5rem;
            --spacing-xl: 2rem;
        }
        
        /* Dark mode enhancements for Paris theme */
        @media (prefers-color-scheme: dark) {
            :root {
                --surface-primary: #1a202c;
                --surface-secondary: #2d3748;
                --surface-accent: #4a5568;
                --text-contrast: #f7fafc;
                --text-muted: #a0aec0;
                --border-strong: #4a5568;
                --border-subtle: #2d3748;
            }
        }

        /* Enhanced base container for Paris theme */
        .gradio-container {
            max-width: 1100px !important;
            margin: 0 auto !important;
            padding: var(--spacing-md) !important;
            font-family: 'Inter', -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif !important;
            background: var(--surface-secondary) !important;
            min-height: 100vh !important;
        }

        /* Stunning header with Paris theme integration */
        .app-header-wrapper {
            background: var(--primary-gradient) !important;
            border: 3px solid transparent !important;
            background-clip: padding-box !important;
            border-radius: var(--border-radius-lg) !important;
            padding: var(--spacing-xl) !important;
            margin-bottom: var(--spacing-lg) !important;
            text-align: center !important;
            box-shadow: var(--shadow-strong) !important;
            position: relative !important;
            overflow: hidden !important;
        }

        .app-header-wrapper::before {
            content: '';
            position: absolute;
            top: 0;
            left: 0;
            right: 0;
            bottom: 0;
            background: linear-gradient(45deg, rgba(255,255,255,0.1) 0%, rgba(255,255,255,0.05) 100%);
            pointer-events: none;
        }

        .app-header-logo {
            font-size: 3.5rem !important;
            margin-bottom: var(--spacing-sm) !important;
            display: block !important;
            filter: drop-shadow(0 4px 8px rgba(0,0,0,0.3)) !important;
            animation: float 3s ease-in-out infinite !important;
        }

        @keyframes float {
            0%, 100% { transform: translateY(0px); }
            50% { transform: translateY(-10px); }
        }

        .app-header-title {
            font-family: 'Poppins', sans-serif !important;
            font-size: 2.75rem !important;
            font-weight: 800 !important;
            color: white !important;
            margin: 0 0 var(--spacing-sm) 0 !important;
            line-height: 1.1 !important;
            text-shadow: 0 4px 8px rgba(0,0,0,0.3) !important;
            letter-spacing: -0.02em !important;
        }

        .app-header-tagline {
            font-size: 1.2rem !important;
            color: rgba(255,255,255,0.9) !important;
            font-weight: 400 !important;
            margin: 0 !important;
            text-shadow: 0 2px 4px rgba(0,0,0,0.2) !important;
        }

        /* Compact and elegant main container */
        .main-dashboard-container {
            display: flex !important;
            flex-direction: column !important;
            gap: var(--spacing-md) !important;
        }

        /* Premium card design with superior boundaries */
        .dashboard-card-section {
            background: var(--surface-primary) !important;
            border: 2px solid var(--border-strong) !important;
            border-radius: var(--border-radius-md) !important;
            padding: var(--spacing-lg) !important;
            box-shadow: var(--shadow-soft) !important;
            transition: all 0.3s cubic-bezier(0.4, 0, 0.2, 1) !important;
            position: relative !important;
            overflow: hidden !important;
        }

        .dashboard-card-section::before {
            content: '';
            position: absolute;
            top: 0;
            left: 0;
            width: 100%;
            height: 3px;
            background: var(--secondary-gradient);
            transform: translateX(-100%);
            transition: transform 0.3s ease;
        }

        .dashboard-card-section:hover {
            box-shadow: var(--shadow-medium) !important;
            transform: translateY(-2px) !important;
            border-color: var(--accent-color) !important;
        }

        .dashboard-card-section:hover::before {
            transform: translateX(0);
        }

        /* Perfectly centered and styled section titles */
        .sub-section-title {
            font-family: 'Poppins', sans-serif !important;
            font-size: 1.6rem !important;
            font-weight: 700 !important;
            color: var(--text-contrast) !important;
            text-align: center !important;
            margin: 0 0 var(--spacing-lg) 0 !important;
            padding-bottom: var(--spacing-sm) !important;
            border-bottom: 3px solid transparent !important;
            background: var(--primary-gradient) !important;
            background-clip: text !important;
            -webkit-background-clip: text !important;
            -webkit-text-fill-color: transparent !important;
            position: relative !important;
            display: block !important;
        }

        .sub-section-title::after {
            content: '';
            position: absolute;
            bottom: 0;
            left: 50%;
            transform: translateX(-50%);
            width: 60px;
            height: 3px;
            background: var(--primary-gradient);
            border-radius: 2px;
        }

        /* Superior input styling with crystal clear boundaries */
        .gradio-textbox, .gradio-dropdown {
            margin-bottom: var(--spacing-sm) !important;
        }

        .gradio-textbox textarea,
        .gradio-textbox input,
        .gradio-dropdown select {
            background: var(--surface-primary) !important;
            border: 2px solid var(--border-strong) !important;
            border-radius: var(--border-radius-sm) !important;
            padding: var(--spacing-md) !important;
            font-size: 0.95rem !important;
            font-family: 'Inter', sans-serif !important;
            color: var(--text-contrast) !important;
            transition: all 0.3s cubic-bezier(0.4, 0, 0.2, 1) !important;
            box-shadow: inset 0 1px 3px rgba(0,0,0,0.1) !important;
            line-height: 1.5 !important;
        }

        .gradio-textbox textarea:focus,
        .gradio-textbox input:focus,
        .gradio-dropdown select:focus {
            outline: none !important;
            border-color: var(--accent-color) !important;
            box-shadow: 0 0 0 4px rgba(99, 102, 241, 0.1), inset 0 1px 3px rgba(0,0,0,0.1) !important;
            transform: translateY(-1px) !important;
        }

        .gradio-textbox textarea:hover,
        .gradio-textbox input:hover,
        .gradio-dropdown select:hover {
            border-color: var(--accent-color) !important;
            box-shadow: 0 2px 4px rgba(0,0,0,0.1), inset 0 1px 3px rgba(0,0,0,0.1) !important;
        }

        /* Enhanced placeholder and label styling */
        .gradio-textbox textarea::placeholder,
        .gradio-textbox input::placeholder {
            color: var(--text-muted) !important;
            opacity: 0.8 !important;
            font-style: italic !important;
        }

        .gradio-textbox label,
        .gradio-dropdown label {
            font-weight: 600 !important;
            color: var(--text-contrast) !important;
            font-size: 0.9rem !important;
            margin-bottom: var(--spacing-xs) !important;
            display: block !important;
            text-transform: uppercase !important;
            letter-spacing: 0.5px !important;
        }

        /* Refined info text */
        .gradio-textbox .gr-form,
        .gradio-dropdown .gr-form {
            font-size: 0.85rem !important;
            color: var(--text-muted) !important;
            margin-top: var(--spacing-xs) !important;
            font-style: italic !important;
        }

        /* Optimized input layout */
        .input-row {
            display: flex !important;
            gap: var(--spacing-md) !important;
            margin-bottom: var(--spacing-sm) !important;
            align-items: flex-end !important;
        }

        .input-field {
            flex: 1 !important;
            min-width: 0 !important;
        }

        /* Premium button design */
        .button-row {
            display: flex !important;
            gap: var(--spacing-md) !important;
            justify-content: flex-end !important;
            margin-top: var(--spacing-lg) !important;
            flex-wrap: wrap !important;
        }

        .gradio-button {
            padding: var(--spacing-md) var(--spacing-xl) !important;
            border-radius: var(--border-radius-sm) !important;
            font-weight: 600 !important;
            font-size: 0.9rem !important;
            text-transform: uppercase !important;
            letter-spacing: 0.5px !important;
            transition: all 0.3s cubic-bezier(0.4, 0, 0.2, 1) !important;
            cursor: pointer !important;
            border: 2px solid transparent !important;
            position: relative !important;
            overflow: hidden !important;
        }

        .gradio-button::before {
            content: '';
            position: absolute;
            top: 0;
            left: -100%;
            width: 100%;
            height: 100%;
            background: linear-gradient(90deg, transparent, rgba(255,255,255,0.2), transparent);
            transition: left 0.5s;
        }

        .gradio-button:hover::before {
            left: 100%;
        }

        .gr-button-primary {
            background: var(--primary-gradient) !important;
            color: white !important;
            box-shadow: var(--shadow-soft) !important;
            border: 2px solid transparent !important;
        }

        .gr-button-primary:hover {
            box_shadow: var(--shadow-medium) !important;
            transform: translateY(-2px) scale(1.02) !important;
        }

        .gr-button-primary:active {
            transform: translateY(0) scale(0.98) !important;
        }

        .gr-button-secondary {
            background: transparent !important;
            color: var(--text-contrast) !important;
            border: 2px solid var(--border-strong) !important;
            backdrop-filter: blur(10px) !important;
        }

        .gr-button-secondary:hover {
            background: var(--surface-accent) !important;
            border-color: var(--accent-color) !important;
            transform: translateY(-1px) !important;
            box_shadow: var(--shadow-soft) !important;
        }

        /* Exceptional output styling */
        .output-content-wrapper {
            background: var(--surface-primary) !important;
            border: 2px solid var(--border-strong) !important;
            border-radius: var(--border-radius-sm) !important;
            padding: var(--spacing-lg) !important;
            min-height: 120px !important;
            font-size: 0.95rem !important;
            line-height: 1.6 !important;
            color: var(--text-contrast) !important;
            box-shadow: inset 0 2px 4px rgba(0,0,0,0.05) !important;
            font-family: 'Inter', sans-serif !important;
        }

        .response-header {
            font-size: 1.3rem !important;
            font-weight: 700 !important;
            color: var(--text-contrast) !important;
            margin-bottom: var(--spacing-md) !important;
            display: flex !important;
            align-items: center !important;
            gap: var(--spacing-sm) !important;
            background: var(--primary-gradient) !important;
            background-clip: text !important;
            -webkit-background-clip: text !important;
            -webkit-text-fill-color: transparent !important;
        }

        .response-icon {
            font-size: 1.4rem !important;
            background: var(--primary-gradient) !important;
            background-clip: text !important;
            -webkit-background-clip: text !important;
            -webkit-text-fill-color: transparent !important;
        }

        .divider {
            border: none !important;
            border-top: 2px solid var(--border-strong) !important;
            margin: var(--spacing-md) 0 !important;
            background: var(--primary-gradient) !important;
            height: 2px !important;
            border: none !important;
            border-radius: 1px !important;
        }

        /* Enhanced error styling */
        .error-message {
            background: linear-gradient(135deg, #fef2f2 0%, #fde8e8 100%) !important;
            border: 2px solid #fecaca !important;
            color: #dc2626 !important;
            padding: var(--spacing-lg) !important;
            border-radius: var(--border-radius-sm) !important;
            display: flex !important;
            align-items: flex-start !important;
            gap: var(--spacing-md) !important;
            font-size: 0.9rem !important;
            box_shadow: var(--shadow-soft) !important;
        }

        .error-icon {
            font-size: 1.3rem !important;
            line-height: 1 !important;
            margin-top: 0.1rem !important;
            animation: pulse 2s infinite !important;
        }

        @keyframes pulse {
            0%, 100% { opacity: 1; }
            50% { opacity: 0.7; }
        }

        /* Elegant placeholder */
        .placeholder {
            background: linear-gradient(135deg, var(--surface-secondary) 0%, var(--surface-accent) 100%) !important;
            border: 2px dashed var(--border-strong) !important;
            border-radius: var(--border-radius-sm) !important;
            padding: var(--spacing-xl) var(--spacing-lg) !important;
            text-align: center !important;
            color: var(--text-muted) !important;
            font-style: italic !important;
            font-size: 1rem !important;
            transition: all 0.3s ease !important;
        }

        .placeholder:hover {
            border-color: var(--accent-color) !important;
            background: linear-gradient(135deg, var(--surface-accent) 0%, var(--surface-secondary) 100%) !important;
        }

        /* Premium examples table */
        .examples-section .gr-samples-table {
            border: 2px solid var(--border-strong) !important;
            border-radius: var(--border-radius-sm) !important;
            overflow: hidden !important;
            margin-top: var(--spacing-lg) !important;
            box_shadow: var(--shadow-soft) !important;
        }

        .examples-section .gr-samples-table th,
        .examples-section .gr-samples-table td {
            padding: var(--spacing-md) !important;
            border: none !important;
            font-size: 0.9rem !important;
            transition: all 0.2s ease !important;
        }

        .examples-section .gr-samples-table th {
            background: var(--primary-gradient) !important;
            color: white !important;
            font-weight: 600 !important;
            text-transform: uppercase !important;
            letter-spacing: 0.5px !important;
            font-size: 0.8rem !important;
        }

        .examples-section .gr-samples-table td {
            background: var(--surface-primary) !important;
            color: var(--text-contrast) !important;
            border-top: 1px solid var(--border-subtle) !important;
            cursor: pointer !important;
        }

        .examples-section .gr-samples-table tr:hover td {
            background: var(--surface-accent) !important;
            transform: scale(1.01) !important;
        }

        /* Sophisticated footer */
        .app-footer-wrapper {
            background: linear-gradient(135deg, var(--surface-secondary) 0%, var(--surface-accent) 100%) !important;
            border: 2px solid var(--border-strong) !important;
            border-radius: var(--border-radius-md) !important;
            padding: var(--spacing-lg) !important;
            margin-top: var(--spacing-lg) !important;
            text-align: center !important;
            box_shadow: var(--shadow-soft) !important;
        }

        .app-footer p {
            margin: var(--spacing-sm) 0 !important;
            font-size: 0.9rem !important;
            color: var(--text-muted) !important;
            line-height: 1.6 !important;
        }

        .app-footer a {
            background: var(--primary-gradient) !important;
            background-clip: text !important;
            -webkit-background-clip: text !important;
            -webkit-text-fill-color: transparent !important;
            text-decoration: none !important;
            font-weight: 600 !important;
            transition: all 0.3s ease !important;
        }

        .app-footer a:hover {
            text-decoration: underline !important;
            transform: scale(1.05) !important;
            display: inline-block !important;
        }

        /* Hide Gradio default elements */
        .gr-examples .gr-label,
        .gr-examples .label-wrap,
        .gr-examples .gr-accordion-header {
            display: none !important;
        }

        /* Responsive design */
        @media (max-width: 768px) {
            .gradio-container {
                padding: var(--spacing-sm) !important;
            }
            
            .app-header-title {
                font-size: 2rem !important;
            }
            
            .app-header-tagline {
                font-size: 1rem !important;
            }
            
            .input-row {
                flex-direction: column !important;
            }
            
            .button-row {
                flex-direction: column !important;
            }
            
            .gradio-button {
                width: 100% !important;
            }
        }
        """

        with gr.Blocks(theme="earneleh/paris", css=custom_css, title="Landlord-Tenant Rights Assistant") as demo:
            # Header Section
            with gr.Group(elem_classes="app-header-wrapper"):
                gr.Markdown(
                    """
                    <div class="app-header">
                        <span class="app-header-logo">⚖️</span>
                        <h1 class="app-header-title">Landlord-Tenant Rights Assistant</h1>
                        <p class="app-header-tagline">Empowering You with State-Specific Legal Insights</p>
                    </div>
                    """
                )

            # Main Dashboard Container
            with gr.Column(elem_classes="main-dashboard-container"):

                # Introduction and Disclaimer Card
                with gr.Group(elem_classes="dashboard-card-section"):
                    gr.Markdown("<h3 class='sub-section-title'>Welcome & Disclaimer</h3>")
                    gr.Markdown(
                        """
                        Navigate landlord-tenant laws with ease. This assistant provides detailed, state-specific answers grounded in legal authority.
                        
                        **Disclaimer:** This tool is for informational purposes only and does not constitute legal advice. For specific legal guidance, always consult a licensed attorney in your jurisdiction.
                        """
                    )

                # OpenAI API Key Input Card
                with gr.Group(elem_classes="dashboard-card-section"):
                    gr.Markdown("<h3 class='sub-section-title'>OpenAI API Key</h3>")
                    api_key_input = gr.Textbox(
                        label="API Key",
                        type="password", 
                        placeholder="Enter your API key (e.g., sk-...)",
                        info="Required to process your query. Get one free from OpenAI.",
                        lines=1,
                        elem_classes=["input-field-group"]
                    )

                # Query Input and State Selection Card
                with gr.Group(elem_classes="dashboard-card-section"):
                    gr.Markdown("<h3 class='sub-section-title'>Ask Your Question</h3>")
                    with gr.Row(elem_classes="input-row"):
                        with gr.Column(elem_classes="input-field", scale=3):
                            query_input = gr.Textbox(
                                label="Your Question",
                                placeholder="E.g., What are the rules for security deposit returns in my state?",
                                lines=4,
                                max_lines=8,
                                elem_classes=["input-field-group"]
                            )
                        with gr.Column(elem_classes="input-field", scale=1):
                            state_input = gr.Dropdown(
                                label="Select State",
                                choices=dropdown_choices,
                                value=initial_value,
                                allow_custom_value=False,
                                elem_classes=["input-field-group"]
                            )
                    with gr.Row(elem_classes="button-row"):
                        clear_button = gr.Button("Clear", variant="secondary", elem_classes=["gr-button-secondary"])
                        submit_button = gr.Button("Submit Query", variant="primary", elem_classes=["gr-button-primary"])

                # Output Display Card
                with gr.Group(elem_classes="dashboard-card-section"):
                    gr.Markdown("<h3 class='sub-section-title'>Legal Assistant's Response</h3>")
                    output = gr.Markdown(
                        value="<div class='placeholder'>The answer will appear here after submitting your query.</div>",
                        elem_classes="output-content-wrapper"
                    )

                # Example Questions Section
                with gr.Group(elem_classes="dashboard-card-section examples-section"):
                    gr.Markdown("<h3 class='sub-section-title'>Example Questions</h3>")
                    if example_queries:
                        gr.Examples(
                            examples=example_queries,
                            inputs=[query_input, state_input],
                            examples_per_page=5,
                            label="" # Hide the default "Examples" label
                        )
                    else:
                        gr.Markdown("<div class='placeholder'>Sample questions could not be loaded.</div>")

            # Footer Section
            with gr.Group(elem_classes="app-footer-wrapper"):
                gr.Markdown(
                    """
                    This tool is for informational purposes only and does not constitute legal advice. For legal guidance, always consult with a licensed attorney in your jurisdiction.

                    Developed by **Nischal Subedi**. Connect on [LinkedIn](https://www.linkedin.com/in/nischal1/) or explore insights at [Substack](https://datascientistinsights.substack.com/).
                    """
                )

            # Event Listeners
            submit_button.click(
                fn=query_interface_wrapper,
                inputs=[api_key_input, query_input, state_input],
                outputs=output,
                api_name="submit_query" # API name for potential external calls
            )
            
            clear_button.click(
                fn=lambda: (
                    "", # Clear API key
                    "", # Clear query input
                    initial_value, # Reset state dropdown to initial value
                    "<div class='placeholder'>Inputs cleared. Ready for your next question.</div>" # Reset output
                ),
                inputs=[],
                outputs=[api_key_input, query_input, state_input, output]
            )

            return demo

# --- Main Execution Block ---
if __name__ == "__main__":
    logging.info("Starting Landlord-Tenant Rights Bot application...")
    try:
        SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
        DEFAULT_PDF_PATH = os.path.join(SCRIPT_DIR, "tenant-landlord.pdf")
        DEFAULT_DB_PATH = os.path.join(SCRIPT_DIR, "chroma_db")

        # Use environment variables for paths if available, otherwise use defaults
        PDF_PATH = os.getenv("PDF_PATH", DEFAULT_PDF_PATH)
        VECTOR_DB_PATH = os.getenv("VECTOR_DB_PATH", DEFAULT_DB_PATH)

        # Ensure the directory for the vector database exists
        os.makedirs(os.path.dirname(VECTOR_DB_PATH), exist_ok=True)

        logging.info(f"Attempting to load PDF from: {PDF_PATH}")
        if not os.path.exists(PDF_PATH):
            logging.error(f"FATAL: PDF file not found at the specified path: {PDF_PATH}")
            print(f"\n--- CONFIGURATION ERROR ---\nPDF file ('{os.path.basename(PDF_PATH)}') not found at: {PDF_PATH}.\nPlease ensure it exists or set 'PDF_PATH' environment variable.\n---------------------------\n")
            exit(1) # Exit if PDF not found

        if not os.access(PDF_PATH, os.R_OK):
            logging.error(f"FATAL: PDF file at '{PDF_PATH}' exists but is not readable. Check file permissions.")
            print(f"\n--- PERMISSION ERROR ---\nPDF file ('{os.path.basename(PDF_PATH)}') found but not readable at: {PDF_PATH}\nPlease check file permissions (e.g., using 'chmod +r' in terminal).\n---------------------------\n")
            exit(1) # Exit if PDF not readable

        logging.info(f"PDF file '{os.path.basename(PDF_PATH)}' found and is readable.")

        # Initialize VectorDatabase and RAGSystem
        vector_db_instance = VectorDatabase(persist_directory=VECTOR_DB_PATH)
        rag = RAGSystem(vector_db=vector_db_instance)

        # Load PDF into the vector database (or verify it's loaded if already persisted)
        rag.load_pdf(PDF_PATH)

        # Get the Gradio interface object from the RAGSystem instance
        app_interface = rag.gradio_interface()
        
        # Determine server port (for Gradio Spaces compatibility)
        SERVER_PORT = int(os.getenv("PORT", 7860)) 

        logging.info(f"Launching Gradio app on http://0.0.0.0:{SERVER_PORT}")
        print(f"\n--- Gradio App Running ---\nAccess at: http://localhost:{SERVER_PORT} or your public Spaces URL\n--------------------------\n")
        
        # Launch the Gradio interface
        app_interface.launch(server_name="0.0.0.0", server_port=SERVER_PORT, share=False) 

    except ModuleNotFoundError as e:
        if "vector_db" in str(e):
             logging.error(f"FATAL: Could not import VectorDatabase. Ensure 'vector_db.py' is in the same directory and 'chromadb', 'langchain', 'pypdf', 'sentence-transformers' are installed.", exc_info=True)
             print(f"\n--- MISSING DEPENDENCY OR FILE ---\nCould not find/import 'vector_db.py' or one of its dependencies.\nError: {e}\nPlease ensure 'vector_db.py' is present and all required packages (chromadb, langchain, pypdf, sentence-transformers, etc.) are in your requirements.txt and installed.\n---------------------------\n")
        else:
            logging.error(f"Application startup failed due to a missing module: {str(e)}", exc_info=True)
            print(f"\n--- FATAL STARTUP ERROR - MISSING MODULE ---\n{str(e)}\nPlease ensure all dependencies are installed.\nCheck logs for more details.\n---------------------------\n")
        exit(1)
    except FileNotFoundError as e:
        logging.error(f"Application startup failed due to a missing file: {str(e)}", exc_info=True)
        print(f"\n--- FATAL STARTUP ERROR - FILE NOT FOUND ---\n{str(e)}\nPlease ensure the file exists at the specified path.\nCheck logs for more details.\n---------------------------\n")
        exit(1)
    except Exception as e:
        logging.error(f"Application startup failed: {str(e)}", exc_info=True)
        print(f"\n--- FATAL STARTUP ERROR ---\n{str(e)}\nCheck logs for more details.\n---------------------------\n")
        exit(1)