geethareddy commited on
Commit
b2e75c7
·
verified ·
1 Parent(s): 85240d3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -19,7 +19,7 @@ KPI_FLAG_DEFAULT = os.getenv('KPI_FLAG', 'True') == 'True'
19
  ENGAGEMENT_SCORE_DEFAULT = float(os.getenv('ENGAGEMENT_SCORE', '85.0'))
20
 
21
  # Load model and tokenizer
22
- model_name = "gpt2-medium" # Changed from distilgpt2 to gpt2-medium for better generation
23
  tokenizer = AutoTokenizer.from_pretrained(model_name, use_fast=True)
24
  model = AutoModelForCausalLM.from_pretrained(model_name, low_cpu_mem_usage=True)
25
 
@@ -155,7 +155,7 @@ def generate_fallback_checklist(role, milestones):
155
 
156
  # If milestones are provided, add them
157
  if milestones and milestones.strip() and milestones != "A milestone is a numbered marker placed on a route such as a road, railway line, canal or boundary.":
158
- kpis = [kpi.strip() for kpi in milestones split(",")]
159
  for kpi in kpis:
160
  checklist_items.append(f"- Ensure progress on {kpi}")
161
  else:
@@ -169,7 +169,7 @@ def generate_fallback_suggestions(reflection):
169
  reflection_lower = reflection.lower()
170
  if "student" in reflection_lower or "learning" in reflection_lower:
171
  suggestions_items.append("- Ensure students are logging incidents consistently")
172
- suggestions_items.append("- Provide Provide guidance on timely incident recording")
173
  if "incident" in reflection_lower:
174
  suggestions_items.append("- Follow up on reported incidents with corrective actions")
175
 
@@ -179,7 +179,7 @@ def generate_fallback_suggestions(reflection):
179
 
180
  return "\n".join(suggestions_items)
181
 
182
- # Generate Function (Updated)
183
  def generate_outputs(role, supervisor_name, project_id, milestones, reflection):
184
  if not all([role, supervisor_name, project_id, milestones, reflection]):
185
  return "❗ Please fill all fields.", "", ""
 
19
  ENGAGEMENT_SCORE_DEFAULT = float(os.getenv('ENGAGEMENT_SCORE', '85.0'))
20
 
21
  # Load model and tokenizer
22
+ model_name = "gpt2-medium" # Using gpt2-medium as previously updated
23
  tokenizer = AutoTokenizer.from_pretrained(model_name, use_fast=True)
24
  model = AutoModelForCausalLM.from_pretrained(model_name, low_cpu_mem_usage=True)
25
 
 
155
 
156
  # If milestones are provided, add them
157
  if milestones and milestones.strip() and milestones != "A milestone is a numbered marker placed on a route such as a road, railway line, canal or boundary.":
158
+ kpis = [kpi.strip() for kpi in milestones.split(",")] # Fixed syntax error here
159
  for kpi in kpis:
160
  checklist_items.append(f"- Ensure progress on {kpi}")
161
  else:
 
169
  reflection_lower = reflection.lower()
170
  if "student" in reflection_lower or "learning" in reflection_lower:
171
  suggestions_items.append("- Ensure students are logging incidents consistently")
172
+ suggestions_items.append("- Provide guidance on timely incident recording") # Fixed typo "Provide Provide"
173
  if "incident" in reflection_lower:
174
  suggestions_items.append("- Follow up on reported incidents with corrective actions")
175
 
 
179
 
180
  return "\n".join(suggestions_items)
181
 
182
+ # Generate Function
183
  def generate_outputs(role, supervisor_name, project_id, milestones, reflection):
184
  if not all([role, supervisor_name, project_id, milestones, reflection]):
185
  return "❗ Please fill all fields.", "", ""