hamzaherry commited on
Commit
de0c724
·
verified ·
1 Parent(s): 42c7324

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +125 -0
app.py ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import streamlit as st
3
+ from PyPDF2 import PdfReader
4
+ from sentence_transformers import SentenceTransformer
5
+ import faiss
6
+ import matplotlib.pyplot as plt
7
+ import numpy as np
8
+ from groq import Groq
9
+
10
+ GROQ_API_KEY = "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"
11
+ client = Groq(api_key=GROQ_API_KEY)
12
+
13
+ # Initialize Embedding Model
14
+ embedding_model = SentenceTransformer('all-MiniLM-L6-v2')
15
+
16
+ # Initialize FAISS Index
17
+ embedding_dim = 384 # Dimensionality of 'all-MiniLM-L6-v2'
18
+ faiss_index = faiss.IndexFlatL2(embedding_dim)
19
+
20
+ # Store Metadata
21
+ metadata_store = []
22
+
23
+ def extract_text_from_pdf(pdf_file):
24
+ pdf_reader = PdfReader(pdf_file)
25
+ text = ""
26
+ for page in pdf_reader.pages:
27
+ text += page.extract_text()
28
+ return text
29
+
30
+ def chunk_text(text, chunk_size=500):
31
+ words = text.split()
32
+ return [' '.join(words[i:i+chunk_size]) for i in range(0, len(words), chunk_size)]
33
+
34
+ def generate_embeddings(chunks):
35
+ return embedding_model.encode(chunks)
36
+
37
+ def store_embeddings(embeddings, metadata):
38
+ faiss_index.add(np.array(embeddings))
39
+ metadata_store.extend(metadata)
40
+
41
+ def retrieve_relevant_chunks(query, k=5):
42
+ query_embedding = embedding_model.encode([query])
43
+ distances, indices = faiss_index.search(query_embedding, k)
44
+
45
+ valid_results = [
46
+ (metadata_store[i], distances[0][j])
47
+ for j, i in enumerate(indices[0])
48
+ if i < len(metadata_store)
49
+ ]
50
+ return valid_results
51
+
52
+ def identify_research_gaps():
53
+ gap_summary = []
54
+ for i, chunk_1 in enumerate(metadata_store):
55
+ for j, chunk_2 in enumerate(metadata_store):
56
+ if i >= j: # Avoid duplicate comparisons
57
+ continue
58
+ # Simple comparison: finding contradictions or lack of mentions
59
+ if "not" in chunk_1["chunk"] and "is" in chunk_2["chunk"]:
60
+ gap_summary.append(f"Potential contradiction between chunks {i} and {j}")
61
+ return gap_summary
62
+
63
+ def ask_groq_api(question, context):
64
+ chat_completion = client.chat.completions.create(
65
+ messages=[
66
+ {"role": "user", "content": f"{context}\n\n{question}"}
67
+ ],
68
+ model="llama3-8b-8192"
69
+ )
70
+ return chat_completion.choices[0].message.content
71
+
72
+ # Streamlit App
73
+ st.title("RAG-Based Research Paper Analyzer with Gap Identification")
74
+
75
+ uploaded_files = st.file_uploader("Upload PDF Files", accept_multiple_files=True, type="pdf")
76
+
77
+ if uploaded_files:
78
+ all_chunks = []
79
+ all_metadata = []
80
+
81
+ for uploaded_file in uploaded_files:
82
+ text = extract_text_from_pdf(uploaded_file)
83
+ chunks = chunk_text(text)
84
+ embeddings = generate_embeddings(chunks)
85
+ metadata = [{"chunk": chunk, "file_name": uploaded_file.name} for chunk in chunks]
86
+ store_embeddings(embeddings, metadata)
87
+ all_chunks.extend(chunks)
88
+ all_metadata.extend(metadata)
89
+
90
+ st.success("Files uploaded and processed successfully!")
91
+
92
+ if st.button("View Topic Summaries"):
93
+ for chunk in all_chunks[:3]:
94
+ st.write(chunk)
95
+
96
+ user_question = st.text_input("Ask a question about the uploaded papers:")
97
+ if user_question:
98
+ relevant_chunks = retrieve_relevant_chunks(user_question)
99
+ if relevant_chunks:
100
+ context = "\n\n".join([chunk['chunk'] for chunk, _ in relevant_chunks])
101
+ answer = ask_groq_api(user_question, context)
102
+ st.write("**Answer:**", answer)
103
+ else:
104
+ st.write("No relevant sections found for your question.")
105
+
106
+ if st.button("Identify Research Gaps"):
107
+ research_gaps = identify_research_gaps()
108
+ if research_gaps:
109
+ st.write("### Research Gaps Identified:")
110
+ for gap in research_gaps:
111
+ st.write(f"- {gap}")
112
+ else:
113
+ st.write("No significant research gaps identified.")
114
+
115
+ if st.button("Generate Scatter Plot"):
116
+ st.write("Generating scatter plot for methods vs. results...")
117
+ # Example scatter plot (replace with real data)
118
+ x = np.random.rand(10)
119
+ y = np.random.rand(10)
120
+ plt.scatter(x, y)
121
+ plt.xlabel("Methods")
122
+ plt.ylabel("Results")
123
+ st.pyplot(plt)
124
+
125
+ st.text_area("Annotate Your Insights:", height=100, key="annotations")