File size: 5,430 Bytes
ae8ef07
ebc5123
ae8ef07
 
ebc5123
8f06d53
ebc5123
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ae8ef07
 
ebc5123
 
ae8ef07
ebc5123
ae8ef07
ebc5123
ae8ef07
ebc5123
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ae8ef07
 
ebc5123
 
ae8ef07
ebc5123
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ae8ef07
 
ebc5123
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8f06d53
ebc5123
 
 
 
8bee988
ebc5123
 
 
 
 
 
 
 
 
 
 
a91f2d1
ebc5123
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
import streamlit as st
from bs4 import BeautifulSoup
import requests
import os
import time
from openai import OpenAI
import google.generativeai as genai

genai.configure(api_key='AIzaSyBE9XAwJiAs6xY2UukvGYsy0ghtxA1F2q8')

generation_config = {
  "temperature": 0,
  "top_p": 0.95,
  "top_k": 64,
  "max_output_tokens": 8192,
  "response_mime_type": "text/plain",
}
     
headers = {
    "User-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36 Edge/18.19582"
}

proxies = {"http": os.getenv("HTTP_PROXY")}



@st.cache_data(ttl=3600)
def search_legal_cases(query, num_results=10):
    url = "https://scholar.google.com/scholar?hl=en&as_sdt=6"
    headers = {
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.63 Safari/537.3"
    }

    params = {
        "q": query,
        "hl": "en",
        "num": num_results,
        "as_sdt": "4",  # This parameter filters the search results to legal cases
    }

    response = requests.get(url, proxies=proxies, headers=headers, params=params)
    soup = BeautifulSoup(response.text, "html.parser")

    results = []
    for result in soup.find_all("div", class_="gs_ri"):
        title = result.find("h3", class_="gs_rt").text
        base_url = "https://scholar.google.com"
        link = base_url + result.find("a")["href"]
        citation = result.find("div", class_="gs_a").text.replace(" - Google Scholar", "")
        results.append((title, link, citation))

    return results

@st.cache_data(ttl=3600)
def extract_text_from_link(url):
    headers = {
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.63 Safari/537.3"
    }
    
    response = requests.get(url, headers=headers, proxies=proxies)
    soup = BeautifulSoup(response.content, "html.parser")
    
    text = soup.get_text(separator="\n")
    return text


# @st.cache_data(ttl=3600)
# def get_summary(text):
#    client = OpenAI(api_key='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')

#    completion = client.chat.completions.create(
#      model="gpt-4o",
#      messages=[
#        {"role": "system", "content": f'''You are a law professor specialized in legal writing and legal research. 
#                    When presented with a case by a user please summarize it according to the following requirements:
#                    Name of the court.
#                    Facts (name of the parties, what happened factually).
#                    Procedural history (what happened in the past procedurally, what were prior judgements). 
#                    Issues (what is in dispute).
#                    Holding (the applied rule of law). 
#                    Rationale (reasons for the holding).
#                    Decision (what did the court decide, e.g. affirmed, overruled).
#                    Other opinions (if there are any dissenting or concurring opinions, summarize majority opinion, dissenting opinion and concurring opinion).
#                    Cases cited (which cases the court cited and how it treated them).'''},
#        {"role": "user", "content": f"Please summarize this case according to the instructions: {text}. "}
#      ]
#    )
   
#    return completion.choices[0].message.content


def get_summary(text):
    model = genai.GenerativeModel('gemini-1.5-flash', generation_config=generation_config)
    response = model.generate_content(f'''You are a law professor specialized in legal writing and legal research. 
                   When presented with a case by a user please summarize it according to the following requirements:
                   Name of the court.
                   Facts (name of the parties, what happened factually).
                   Procedural history (what happened in the past procedurally, what were prior judgements). 
                   Issues (what is in dispute).
                   Holding (the applied rule of law). 
                   Rationale (reasons for the holding).
                   Decision (what did the court decide, e.g. affirmed, overruled).
                   Other opinions (if there are any dissenting or concurring opinions, summarize majority opinion, dissenting opinion and concurring opinion).
                   Cases cited (which cases the court cited and how it treated them).
                   Here is the text of the court decision: {text}''', 
                                      stream=False)
    return response

       
st.write("\n")
st.write("\n")
search_query = st.text_input("case name, e.g. brown v board supreme, 372 US 335, google v oracle appeal")

if search_query:
    with st.spinner("Searching for cases..."):
        results = search_legal_cases(search_query)
    if results:
        title, link, citation = results[0]
        st.write("Title:\n", title)
        #st.write("Link:\n", link)
        st.write("Citation:\n", citation)
        #with st.spinner("Extracting text from case / Generating summary"):
        text = extract_text_from_link(link)
        #st.write(text)  # Optionally display the extracted text
        
        summary = get_summary(text)
        #st.write(response)
        st.write(summary.text)
        #for chunk in summary:
          #st.write(chunk.text)
          #st.write("_"*80)

        
    else:
        st.write("No results found.")