Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -77,17 +77,17 @@ def display_projects():
|
|
77 |
|
78 |
# Define tab titles
|
79 |
tab_titles = [
|
80 |
-
"
|
81 |
-
"
|
82 |
-
"
|
83 |
-
"Stock Market Chatbot",
|
84 |
-
"Twitter Trend Analysis",
|
85 |
-
"Restaurant Recommendation System",
|
86 |
-
"Bitcoin Lightning Path Optimization",
|
87 |
-
"National Infrastructure Monitoring",
|
88 |
"Job Easz",
|
89 |
-
"
|
90 |
-
"
|
|
|
|
|
|
|
|
|
|
|
91 |
]
|
92 |
|
93 |
# Create tabs
|
@@ -95,145 +95,185 @@ def display_projects():
|
|
95 |
|
96 |
# Add content to each tab
|
97 |
with tabs[0]:
|
98 |
-
st.header("
|
99 |
st.markdown("""
|
100 |
-
- **Description**:
|
101 |
-
- **
|
102 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
103 |
""")
|
104 |
|
105 |
with tabs[1]:
|
106 |
-
st.header("
|
107 |
st.markdown("""
|
108 |
-
- **Description**:
|
109 |
-
- **
|
110 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
111 |
""")
|
112 |
|
113 |
with tabs[2]:
|
114 |
-
st.header("
|
115 |
st.markdown("""
|
116 |
-
- **Description**:
|
117 |
-
- **
|
118 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
119 |
""")
|
120 |
|
121 |
with tabs[3]:
|
122 |
-
st.header("
|
123 |
st.markdown("""
|
124 |
-
- **Description**:
|
125 |
-
- **
|
126 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
127 |
""")
|
128 |
|
129 |
with tabs[4]:
|
130 |
-
st.header("
|
131 |
st.markdown("""
|
132 |
-
- **Description**:
|
133 |
-
- **
|
134 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
135 |
""")
|
136 |
|
137 |
with tabs[5]:
|
138 |
-
st.header("
|
139 |
st.markdown("""
|
140 |
-
- **Description**:
|
141 |
-
- **
|
142 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
143 |
""")
|
144 |
|
145 |
with tabs[6]:
|
146 |
-
st.header("
|
147 |
st.markdown("""
|
148 |
-
- **Description**:
|
149 |
-
- **
|
150 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
151 |
""")
|
152 |
|
153 |
with tabs[7]:
|
154 |
-
st.header("
|
155 |
st.markdown("""
|
156 |
-
- **Description**:
|
157 |
-
- **
|
158 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
159 |
""")
|
|
|
160 |
with tabs[8]:
|
161 |
-
st.header("
|
162 |
st.markdown("""
|
163 |
-
- **Description**:
|
164 |
-
- **
|
|
|
|
|
|
|
165 |
- **Key Features**:
|
166 |
-
•
|
167 |
-
•
|
168 |
-
•
|
169 |
-
•
|
170 |
-
|
171 |
-
- **Data Engineering Highlights**:
|
172 |
-
• Efficient data parsing and processing using PyArrow
|
173 |
-
• Scalable data storage and retrieval with Hugging Face Datasets
|
174 |
-
• Real-time data transformation and interactive visualizations
|
175 |
-
- **Skills**: Web Scraping, Python, Streamlit, Hugging Face, ETL, Data Cleaning, Dashboard Building
|
176 |
-
- **Reference**: [Link to Project](https://huggingface.co/spaces/Niharmahesh/job_easz)
|
177 |
""")
|
|
|
178 |
with tabs[9]:
|
179 |
-
st.header("
|
180 |
st.markdown("""
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
|
185 |
-
|
186 |
-
-
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
-
|
|
|
|
|
|
|
|
|
191 |
with tabs[10]:
|
192 |
-
st.header("
|
193 |
st.markdown("""
|
194 |
-
|
195 |
-
|
196 |
-
|
197 |
-
|
198 |
-
|
199 |
-
|
200 |
-
|
201 |
-
|
202 |
-
|
203 |
-
|
204 |
-
|
205 |
-
|
206 |
-
|
207 |
-
- Skills Match: Top 5 required skills with proficiency ratings
|
208 |
-
- Experience Alignment: Comparison of required vs. demonstrated experience
|
209 |
-
- Pros and Cons: Top 3 strengths and areas for improvement
|
210 |
-
- Match Percentage: Overall compatibility score with explanation
|
211 |
-
2. **In-Depth Analysis**:
|
212 |
-
- Comprehensive Skill Gap Analysis
|
213 |
-
- Detailed Experience and Impact Analysis
|
214 |
-
- Content Enhancement Recommendations
|
215 |
-
- Strategic Recommendations for Competitive Edge
|
216 |
-
- Application Strategy Suggestions
|
217 |
-
3. **Resume Enhancement**:
|
218 |
-
- Optimizes content based on analysis
|
219 |
-
- Improves formatting and structure
|
220 |
-
- Highlights key achievements and skills
|
221 |
-
- Ensures ATS compatibility
|
222 |
-
|
223 |
-
**User Experience**:
|
224 |
-
- Intuitive Streamlit interface
|
225 |
-
- Visual diff to highlight resume changes
|
226 |
-
- Multiple download options (DOCX, HTML, TXT)
|
227 |
-
|
228 |
-
**Limitations**:
|
229 |
-
- GROQ API token limit (100,000 tokens per model)
|
230 |
-
- Potential wait times for API rate limits
|
231 |
-
|
232 |
-
This powerful tool streamlines the job application process by providing tailored resume optimization, increasing candidates' chances of success in their job search.
|
233 |
-
- **Reference**: [Link to Project](https://resume-easz.streamlit.app/)
|
234 |
-
""")
|
235 |
-
|
236 |
-
|
237 |
def display_skills():
|
238 |
st.markdown('## Skills')
|
239 |
st.write("""
|
|
|
77 |
|
78 |
# Define tab titles
|
79 |
tab_titles = [
|
80 |
+
"Resume & CV Crafter",
|
81 |
+
"Multi-Agent Job Search",
|
82 |
+
"Resume Easz",
|
|
|
|
|
|
|
|
|
|
|
83 |
"Job Easz",
|
84 |
+
"Bitcoin Lightning Optimization",
|
85 |
+
"National Infrastructure Monitoring",
|
86 |
+
"Stock Market Analysis",
|
87 |
+
"Twitter Trend Analysis",
|
88 |
+
"Restaurant Recommendation",
|
89 |
+
"ASL Translator",
|
90 |
+
"Squat Easy"
|
91 |
]
|
92 |
|
93 |
# Create tabs
|
|
|
95 |
|
96 |
# Add content to each tab
|
97 |
with tabs[0]:
|
98 |
+
st.header("LLM-powered Resume & CV Crafter")
|
99 |
st.markdown("""
|
100 |
+
- **Description**: Developed AI platform combining LLaMA-3 70B and Deepseek R1 with low-temperature settings for stable, tailored resume and CV generation
|
101 |
+
- **Key Features**:
|
102 |
+
• Smart Matching Algorithm analyzing profiles against job requirements
|
103 |
+
• LaTeX-Powered Resumes with professional formatting
|
104 |
+
• Automated 4-paragraph Cover Letter Generation
|
105 |
+
• Performance Metrics evaluating match quality
|
106 |
+
- **Technical Achievements**:
|
107 |
+
• Implemented dual-agent architecture: LLaMA-3 8B for profile analysis and 70B for LaTeX generation
|
108 |
+
• Engineered JSON schema validation system for error-free template integration
|
109 |
+
• Achieved 5,000+ LinkedIn impressions with 80% reduction in creation time
|
110 |
+
- **Technologies**: Streamlit, GROQ API (LLaMA-3 70B), LaTeX, JSON Schema
|
111 |
+
- **Reference**: [Link to Project](https://huggingface.co/spaces/Niharmahesh/Resume_and_CV_crafter)
|
112 |
""")
|
113 |
|
114 |
with tabs[1]:
|
115 |
+
st.header("Multi-Agent Job Search System")
|
116 |
st.markdown("""
|
117 |
+
- **Description**: Built an AI-powered job search assistant using dual-LLaMA architecture for comprehensive job matching and analysis
|
118 |
+
- **Key Features**:
|
119 |
+
• Real-time scraping across LinkedIn, Glassdoor, Indeed, ZipRecruiter
|
120 |
+
• Advanced resume parsing and job matching
|
121 |
+
• Intelligent compatibility scoring system
|
122 |
+
- **Technical Achievements**:
|
123 |
+
• Developed batch processing pipeline handling 60+ positions/search
|
124 |
+
• Reduced job search time by 80% through accurate matching
|
125 |
+
• Implemented specialized agents for input processing, scraping, and analysis
|
126 |
+
- **Technologies**: GROQ API, jobspy, Streamlit, Pandas, LLMOps
|
127 |
+
- **Reference**: [Link to Project](https://huggingface.co/spaces/Niharmahesh/Multi_Agent_Job_search_and_match)
|
128 |
""")
|
129 |
|
130 |
with tabs[2]:
|
131 |
+
st.header("Resume Easz")
|
132 |
st.markdown("""
|
133 |
+
- **Description**: Created an AI-driven resume analysis and enhancement tool using LLaMA 3.3 model
|
134 |
+
- **Key Features**:
|
135 |
+
• Quick and in-depth resume analysis options
|
136 |
+
• Comprehensive skill gap analysis
|
137 |
+
• ATS compatibility optimization
|
138 |
+
• Multiple output formats (DOCX, HTML, TXT)
|
139 |
+
- **Technical Implementation**:
|
140 |
+
• Integrated GROQ API for advanced language processing
|
141 |
+
• Built visual diff system for resume changes
|
142 |
+
• Developed custom prompt engineering pipeline
|
143 |
+
- **Technologies**: GROQ API, Streamlit, Python, LLM
|
144 |
+
- **Reference**: [Link to Project](https://resume-easz.streamlit.app/)
|
145 |
""")
|
146 |
|
147 |
with tabs[3]:
|
148 |
+
st.header("Job Easz")
|
149 |
st.markdown("""
|
150 |
+
- **Description**: Engineered comprehensive job aggregation platform for data roles with advanced analytics
|
151 |
+
- **Technical Achievements**:
|
152 |
+
• Designed Airflow pipeline with exponential backoff retry (120-480s intervals)
|
153 |
+
• Optimized concurrent processing reducing runtime from 2h to 40min
|
154 |
+
• Processes ~3000 daily job listings across various data roles
|
155 |
+
- **Key Features**:
|
156 |
+
• Daily updates with comprehensive job role coverage
|
157 |
+
• Custom filtering by role and location
|
158 |
+
• Interactive dashboard for market trends
|
159 |
+
• Automated ETL pipeline
|
160 |
+
- **Technologies**: Python, Airflow, ThreadPoolExecutor, Hugging Face Datasets
|
161 |
+
- **Reference**: [Link to Project](https://huggingface.co/spaces/Niharmahesh/job_easz)
|
162 |
""")
|
163 |
|
164 |
with tabs[4]:
|
165 |
+
st.header("Bitcoin Lightning Path Optimization")
|
166 |
st.markdown("""
|
167 |
+
- **Description**: Advanced payment routing optimization system for Bitcoin Lightning Network
|
168 |
+
- **Technical Achievements**:
|
169 |
+
• Developed ML classifiers achieving 98.77-99.10% accuracy
|
170 |
+
• Implemented tri-model consensus system for optimal routing
|
171 |
+
• Engineered ensemble models with 0.98 F1-scores
|
172 |
+
- **Implementation Details**:
|
173 |
+
• Created simulation environment for multi-channel transactions
|
174 |
+
• Optimized graph-based algorithms for payment routing
|
175 |
+
• Integrated with Lightning payment interceptor
|
176 |
+
- **Technologies**: XGBoost, Random Forest, AdaBoost, Graph Algorithms
|
177 |
""")
|
178 |
|
179 |
with tabs[5]:
|
180 |
+
st.header("National Infrastructure Monitoring")
|
181 |
st.markdown("""
|
182 |
+
- **Description**: Developed satellite imagery analysis system for infrastructure change detection
|
183 |
+
- **Technical Achievements**:
|
184 |
+
• Fine-tuned ViT+ResNet-101 ensemble on 40GB satellite dataset
|
185 |
+
• Achieved 85% accuracy in change detection
|
186 |
+
• Implemented 8 parallel GPU threads for enhanced performance
|
187 |
+
- **Key Features**:
|
188 |
+
• Temporal analysis with 1km resolution
|
189 |
+
• Interactive map interface with bounding box selection
|
190 |
+
• Automatic image chipping for 256x256 inputs
|
191 |
+
• Contrast adjustment optimization
|
192 |
+
- **Technologies**: Change ViT Model, Google Earth Engine, PyTorch, Computer Vision
|
193 |
+
- **Reference**: [Link to Project](https://huggingface.co/spaces/Niharmahesh/Data298)
|
194 |
""")
|
195 |
|
196 |
with tabs[6]:
|
197 |
+
st.header("Stock Market Analysis with OpenAI Integration")
|
198 |
st.markdown("""
|
199 |
+
- **Description**: Created comprehensive stock market analysis system with multilingual capabilities
|
200 |
+
- **Technical Achievements**:
|
201 |
+
• Built Spark streaming pipeline with 30% efficiency improvement
|
202 |
+
• Orchestrated Airflow Docker pipeline for Snowflake integration
|
203 |
+
• Developed bilingual GPT-3.5 chatbot for SQL query generation
|
204 |
+
- **Key Features**:
|
205 |
+
• Real-time financial metric calculations
|
206 |
+
• Custom indicator generation
|
207 |
+
• Multilingual query support
|
208 |
+
• Automated data warehousing
|
209 |
+
- **Technologies**: PySpark, Apache Airflow, Snowflake, OpenAI GPT-3.5
|
210 |
""")
|
211 |
|
212 |
with tabs[7]:
|
213 |
+
st.header("Twitter Trend Analysis")
|
214 |
st.markdown("""
|
215 |
+
- **Description**: Engineered comprehensive Twitter analytics platform using GCP services
|
216 |
+
- **Technical Achievements**:
|
217 |
+
• Developed GCP pipeline processing 40k tweets
|
218 |
+
• Achieved 40% efficiency improvement through custom Airflow operators
|
219 |
+
• Implemented real-time trend analysis algorithms
|
220 |
+
- **Key Features**:
|
221 |
+
• Automated ETL workflows
|
222 |
+
• Interactive Tableau dashboards
|
223 |
+
• Viral metrics tracking
|
224 |
+
• Engagement rate calculations
|
225 |
+
- **Technologies**: Google Cloud Platform, BigQuery, Apache Airflow, Tableau
|
226 |
""")
|
227 |
+
|
228 |
with tabs[8]:
|
229 |
+
st.header("Restaurant Recommendation System")
|
230 |
st.markdown("""
|
231 |
+
- **Description**: Built hybrid recommendation system combining multiple filtering approaches
|
232 |
+
- **Technical Achievements**:
|
233 |
+
• Created hybrid TF-IDF and SVD-based filtering system
|
234 |
+
• Achieved 43% improvement in recommendation relevance
|
235 |
+
• Reduced computation time by 65%
|
236 |
- **Key Features**:
|
237 |
+
• Location-based suggestions
|
238 |
+
• Personalized recommendations
|
239 |
+
• Interactive web interface
|
240 |
+
• Efficient matrix factorization
|
241 |
+
- **Technologies**: Collaborative Filtering, Content-Based Filtering, Flask, Folium
|
|
|
|
|
|
|
|
|
|
|
|
|
242 |
""")
|
243 |
+
|
244 |
with tabs[9]:
|
245 |
+
st.header("ASL Translator")
|
246 |
st.markdown("""
|
247 |
+
- **Description**: Developed real-time American Sign Language translation system
|
248 |
+
- **Technical Achievements**:
|
249 |
+
• Achieved 95% accuracy in real-time gesture interpretation
|
250 |
+
• Implemented adaptive hand skeleton GIF generator
|
251 |
+
• Optimized MediaPipe integration for point detection
|
252 |
+
- **Key Features**:
|
253 |
+
• Real-time hand tracking
|
254 |
+
• Visual feedback system
|
255 |
+
• Intuitive gesture recognition
|
256 |
+
• Accessible interface
|
257 |
+
- **Technologies**: MediaPipe Hand Detection, Random Forest, Hugging Face Platform
|
258 |
+
- **Reference**: [Link to Project](https://huggingface.co/spaces/Niharmahesh/slr-easz)
|
259 |
+
""")
|
260 |
+
|
261 |
with tabs[10]:
|
262 |
+
st.header("Squat Easy")
|
263 |
st.markdown("""
|
264 |
+
- **Description**: Developed deep learning system for squat form analysis and error detection
|
265 |
+
- **Technical Achievements**:
|
266 |
+
• Engineered custom BiLSTM architecture in PyTorch
|
267 |
+
• Achieved 81% training and 75% test accuracy
|
268 |
+
• Implemented CUDA-based GPU acceleration
|
269 |
+
- **Key Features**:
|
270 |
+
• Real-time form analysis
|
271 |
+
• Six-type error classification
|
272 |
+
• Video processing pipeline
|
273 |
+
• Performance optimization
|
274 |
+
- **Technologies**: PyTorch, BiLSTM, CUDA, Object-Oriented Programming
|
275 |
+
- **Reference**: [Link to Project](https://github.com/niharpalem/squateasy_DL)
|
276 |
+
""")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
277 |
def display_skills():
|
278 |
st.markdown('## Skills')
|
279 |
st.write("""
|