Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -3,7 +3,8 @@ import streamlit as st
|
|
3 |
from PIL import Image
|
4 |
from streamlit_lottie import st_lottie
|
5 |
import json
|
6 |
-
from streamlit_option_menu import option_menu
|
|
|
7 |
|
8 |
#setting layout to wide
|
9 |
st.set_page_config(layout="wide")
|
@@ -69,208 +70,6 @@ def display_work_experience():
|
|
69 |
- Find my work on [Medium](https://medium.com/@nihar-palem) and [Substack](https://niharpalem.substack.com/publish/posts).
|
70 |
""")
|
71 |
|
72 |
-
def display_projects():
|
73 |
-
st.title('My Projects')
|
74 |
-
|
75 |
-
# Define tab titles
|
76 |
-
tab_titles = [
|
77 |
-
"Resume & CV Crafter",
|
78 |
-
"Multi-Agent Job Search",
|
79 |
-
"Resume Easz",
|
80 |
-
"Job Easz",
|
81 |
-
"Bitcoin Lightning Optimization",
|
82 |
-
"National Infrastructure Monitoring",
|
83 |
-
"Stock Market Analysis",
|
84 |
-
"Twitter Trend Analysis",
|
85 |
-
"Restaurant Recommendation",
|
86 |
-
"ASL Translator",
|
87 |
-
"Squat Easy"
|
88 |
-
]
|
89 |
-
|
90 |
-
# Create tabs
|
91 |
-
tabs = st.tabs(tab_titles)
|
92 |
-
|
93 |
-
# Add content to each tab
|
94 |
-
with tabs[0]:
|
95 |
-
st.subheader("LLM-powered Resume & CV Crafter")
|
96 |
-
st.markdown("""
|
97 |
-
- **Description**: Developed AI platform combining LLaMA-3 70B and Deepseek R1 with low-temperature settings for stable, tailored resume and CV generation
|
98 |
-
- **Key Features**:
|
99 |
-
• Smart Matching Algorithm analyzing profiles against job requirements
|
100 |
-
• LaTeX-Powered Resumes with professional formatting
|
101 |
-
• Automated 4-paragraph Cover Letter Generation
|
102 |
-
• Performance Metrics evaluating match quality
|
103 |
-
- **Technical Achievements**:
|
104 |
-
• Implemented dual-agent architecture: LLaMA-3 8B for profile analysis and 70B for LaTeX generation
|
105 |
-
• Engineered JSON schema validation system for error-free template integration
|
106 |
-
• Achieved 5,000+ LinkedIn impressions with 80% reduction in creation time
|
107 |
-
- **Technologies**: Streamlit, GROQ API (LLaMA-3 70B), LaTeX, JSON Schema
|
108 |
-
- **Reference**: [Link to Project](https://huggingface.co/spaces/Niharmahesh/Resume_and_CV_crafter)
|
109 |
-
""")
|
110 |
-
|
111 |
-
with tabs[1]:
|
112 |
-
st.subheader("Multi-Agent Job Search System")
|
113 |
-
st.markdown("""
|
114 |
-
- **Description**: Built an AI-powered job search assistant using dual-LLaMA architecture for comprehensive job matching and analysis
|
115 |
-
- **Key Features**:
|
116 |
-
• Real-time scraping across LinkedIn, Glassdoor, Indeed, ZipRecruiter
|
117 |
-
• Advanced resume parsing and job matching
|
118 |
-
• Intelligent compatibility scoring system
|
119 |
-
- **Technical Achievements**:
|
120 |
-
• Developed batch processing pipeline handling 60+ positions/search
|
121 |
-
• Reduced job search time by 80% through accurate matching
|
122 |
-
• Implemented specialized agents for input processing, scraping, and analysis
|
123 |
-
- **Technologies**: GROQ API, jobspy, Streamlit, Pandas, LLMOps
|
124 |
-
- **Reference**: [Link to Project](https://huggingface.co/spaces/Niharmahesh/Multi_Agent_Job_search_and_match)
|
125 |
-
""")
|
126 |
-
|
127 |
-
with tabs[2]:
|
128 |
-
st.subheader("Resume Easz")
|
129 |
-
st.markdown("""
|
130 |
-
- **Description**: Created an AI-driven resume analysis and enhancement tool using LLaMA 3.3 model
|
131 |
-
- **Key Features**:
|
132 |
-
• Quick and in-depth resume analysis options
|
133 |
-
• Comprehensive skill gap analysis
|
134 |
-
• ATS compatibility optimization
|
135 |
-
• Multiple output formats (DOCX, HTML, TXT)
|
136 |
-
- **Technical Implementation**:
|
137 |
-
• Integrated GROQ API for advanced language processing
|
138 |
-
• Built visual diff system for resume changes
|
139 |
-
• Developed custom prompt engineering pipeline
|
140 |
-
- **Technologies**: GROQ API, Streamlit, Python, LLM
|
141 |
-
- **Reference**: [Link to Project](https://resume-easz.streamlit.app/)
|
142 |
-
""")
|
143 |
-
|
144 |
-
with tabs[3]:
|
145 |
-
st.subheader("Job Easz")
|
146 |
-
st.markdown("""
|
147 |
-
- **Description**: Engineered comprehensive job aggregation platform for data roles with advanced analytics
|
148 |
-
- **Technical Achievements**:
|
149 |
-
• Designed Airflow pipeline with exponential backoff retry (120-480s intervals)
|
150 |
-
• Optimized concurrent processing reducing runtime from 2h to 40min
|
151 |
-
• Processes ~3000 daily job listings across various data roles
|
152 |
-
- **Key Features**:
|
153 |
-
• Daily updates with comprehensive job role coverage
|
154 |
-
• Custom filtering by role and location
|
155 |
-
• Interactive dashboard for market trends
|
156 |
-
• Automated ETL pipeline
|
157 |
-
- **Technologies**: Python, Airflow, ThreadPoolExecutor, Hugging Face Datasets
|
158 |
-
- **Reference**: [Link to Project](https://huggingface.co/spaces/Niharmahesh/job_easz)
|
159 |
-
""")
|
160 |
-
|
161 |
-
with tabs[4]:
|
162 |
-
st.subheader("Bitcoin Lightning Path Optimization")
|
163 |
-
st.markdown("""
|
164 |
-
- **Description**: Advanced payment routing optimization system for Bitcoin Lightning Network
|
165 |
-
- **Technical Achievements**:
|
166 |
-
• Developed ML classifiers achieving 98.77-99.10% accuracy
|
167 |
-
• Implemented tri-model consensus system for optimal routing
|
168 |
-
• Engineered ensemble models with 0.98 F1-scores
|
169 |
-
- **Implementation Details**:
|
170 |
-
• Created simulation environment for multi-channel transactions
|
171 |
-
• Optimized graph-based algorithms for payment routing
|
172 |
-
• Integrated with Lightning payment interceptor
|
173 |
-
- **Technologies**: XGBoost, Random Forest, AdaBoost, Graph Algorithms
|
174 |
-
""")
|
175 |
-
|
176 |
-
with tabs[5]:
|
177 |
-
st.subheader("National Infrastructure Monitoring")
|
178 |
-
st.markdown("""
|
179 |
-
- **Description**: Developed satellite imagery analysis system for infrastructure change detection
|
180 |
-
- **Technical Achievements**:
|
181 |
-
• Fine-tuned ViT+ResNet-101 ensemble on 40GB satellite dataset
|
182 |
-
• Achieved 85% accuracy in change detection
|
183 |
-
• Implemented 8 parallel GPU threads for enhanced performance
|
184 |
-
- **Key Features**:
|
185 |
-
• Temporal analysis with 1km resolution
|
186 |
-
• Interactive map interface with bounding box selection
|
187 |
-
• Automatic image chipping for 256x256 inputs
|
188 |
-
• Contrast adjustment optimization
|
189 |
-
- **Technologies**: Change ViT Model, Google Earth Engine, PyTorch, Computer Vision
|
190 |
-
- **Reference**: [Link to Project](https://huggingface.co/spaces/Niharmahesh/Data298)
|
191 |
-
""")
|
192 |
-
|
193 |
-
with tabs[6]:
|
194 |
-
st.subheader("Stock Market Analysis with OpenAI Integration")
|
195 |
-
st.markdown("""
|
196 |
-
- **Description**: Created comprehensive stock market analysis system with multilingual capabilities
|
197 |
-
- **Technical Achievements**:
|
198 |
-
• Built Spark streaming pipeline with 30% efficiency improvement
|
199 |
-
• Orchestrated Airflow Docker pipeline for Snowflake integration
|
200 |
-
• Developed bilingual GPT-3.5 chatbot for SQL query generation
|
201 |
-
- **Key Features**:
|
202 |
-
• Real-time financial metric calculations
|
203 |
-
• Custom indicator generation
|
204 |
-
• Multilingual query support
|
205 |
-
• Automated data warehousing
|
206 |
-
- **Technologies**: PySpark, Apache Airflow, Snowflake, OpenAI GPT-3.5
|
207 |
-
""")
|
208 |
-
|
209 |
-
with tabs[7]:
|
210 |
-
st.subheader("Twitter Trend Analysis")
|
211 |
-
st.markdown("""
|
212 |
-
- **Description**: Engineered comprehensive Twitter analytics platform using GCP services
|
213 |
-
- **Technical Achievements**:
|
214 |
-
• Developed GCP pipeline processing 40k tweets
|
215 |
-
• Achieved 40% efficiency improvement through custom Airflow operators
|
216 |
-
• Implemented real-time trend analysis algorithms
|
217 |
-
- **Key Features**:
|
218 |
-
• Automated ETL workflows
|
219 |
-
• Interactive Tableau dashboards
|
220 |
-
• Viral metrics tracking
|
221 |
-
• Engagement rate calculations
|
222 |
-
- **Technologies**: Google Cloud Platform, BigQuery, Apache Airflow, Tableau
|
223 |
-
""")
|
224 |
-
|
225 |
-
with tabs[8]:
|
226 |
-
st.subheader("Restaurant Recommendation System")
|
227 |
-
st.markdown("""
|
228 |
-
- **Description**: Built hybrid recommendation system combining multiple filtering approaches
|
229 |
-
- **Technical Achievements**:
|
230 |
-
• Created hybrid TF-IDF and SVD-based filtering system
|
231 |
-
• Achieved 43% improvement in recommendation relevance
|
232 |
-
• Reduced computation time by 65%
|
233 |
-
- **Key Features**:
|
234 |
-
• Location-based suggestions
|
235 |
-
• Personalized recommendations
|
236 |
-
• Interactive web interface
|
237 |
-
• Efficient matrix factorization
|
238 |
-
- **Technologies**: Collaborative Filtering, Content-Based Filtering, Flask, Folium
|
239 |
-
""")
|
240 |
-
|
241 |
-
with tabs[9]:
|
242 |
-
st.subheader("ASL Translator")
|
243 |
-
st.markdown("""
|
244 |
-
- **Description**: Developed real-time American Sign Language translation system
|
245 |
-
- **Technical Achievements**:
|
246 |
-
• Achieved 95% accuracy in real-time gesture interpretation
|
247 |
-
• Implemented adaptive hand skeleton GIF generator
|
248 |
-
• Optimized MediaPipe integration for point detection
|
249 |
-
- **Key Features**:
|
250 |
-
• Real-time hand tracking
|
251 |
-
• Visual feedback system
|
252 |
-
• Intuitive gesture recognition
|
253 |
-
• Accessible interface
|
254 |
-
- **Technologies**: MediaPipe Hand Detection, Random Forest, Hugging Face Platform
|
255 |
-
- **Reference**: [Link to Project](https://huggingface.co/spaces/Niharmahesh/slr-easz)
|
256 |
-
""")
|
257 |
-
|
258 |
-
with tabs[10]:
|
259 |
-
st.subheader("Squat Easy")
|
260 |
-
st.markdown("""
|
261 |
-
- **Description**: Developed deep learning system for squat form analysis and error detection
|
262 |
-
- **Technical Achievements**:
|
263 |
-
• Engineered custom BiLSTM architecture in PyTorch
|
264 |
-
• Achieved 81% training and 75% test accuracy
|
265 |
-
• Implemented CUDA-based GPU acceleration
|
266 |
-
- **Key Features**:
|
267 |
-
• Real-time form analysis
|
268 |
-
• Six-type error classification
|
269 |
-
• Video processing pipeline
|
270 |
-
• Performance optimization
|
271 |
-
- **Technologies**: PyTorch, BiLSTM, CUDA, Object-Oriented Programming
|
272 |
-
- **Reference**: [Link to Project](https://github.com/niharpalem/squateasy_DL)
|
273 |
-
""")
|
274 |
def display_skills():
|
275 |
st.title('Skills')
|
276 |
|
|
|
3 |
from PIL import Image
|
4 |
from streamlit_lottie import st_lottie
|
5 |
import json
|
6 |
+
from streamlit_option_menu import option_menu
|
7 |
+
from projects import display_projects
|
8 |
|
9 |
#setting layout to wide
|
10 |
st.set_page_config(layout="wide")
|
|
|
70 |
- Find my work on [Medium](https://medium.com/@nihar-palem) and [Substack](https://niharpalem.substack.com/publish/posts).
|
71 |
""")
|
72 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
73 |
def display_skills():
|
74 |
st.title('Skills')
|
75 |
|