Danielos100 commited on
Commit
9769a4a
ยท
verified ยท
1 Parent(s): 078cf80

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +88 -24
app.py CHANGED
@@ -21,7 +21,7 @@ import torch
21
  print(f"===== Application Startup at {pd.Timestamp.now().strftime('%Y-%m-%d %H:%M:%S')} =====")
22
 
23
  # --------------------- Config ---------------------
24
- TITLE = "# ๐ŸŽ GIfty+ Smart Gift Recommender\n*Top-3 catalog picks + 1 DIY gift + personalized message*"
25
 
26
  DATASET_ID = os.getenv("DATASET_ID", "Danielos100/Amazon_products_clean")
27
  DATASET_SPLIT = os.getenv("DATASET_SPLIT", "train")
@@ -673,8 +673,11 @@ def diy_generate(profile: Dict) -> Tuple[dict, str]:
673
  "estimated_time_minutes": minutes,
674
  })
675
  return idea, "ok"
 
 
676
 
677
  # --------------------- Personalized Message (FLAN, ืžื’ื•ื•ืŸ + ื•ืœื™ื“ืฆื™ื”) ---------------------
 
678
  MSG_MODEL_ID = "google/flan-t5-small"
679
  MSG_DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
680
  TEMP_RANGE = (0.88, 1.10)
@@ -705,9 +708,9 @@ TONE_STYLES: Dict[str, Dict[str, List[str]]] = {
705
  "Funny": {
706
  "system": "Write 2โ€“3 witty sentences with playful humor.",
707
  "rules": [
708
- "Add one subtle pun linked to the occasion or interests.",
709
- "No slapstick; keep it tasteful.",
710
- "End with a cheeky nudge."
711
  ],
712
  },
713
  "Heartfelt": {
@@ -796,7 +799,7 @@ def _sentences_n(s: str) -> int:
796
 
797
  def _contains_any(text: str, terms: List[str]) -> bool:
798
  t = text.lower()
799
- return any(term and term.lower() in t for term in terms)
800
 
801
  def _too_similar(a: str, b: str, n=3, thr=0.85) -> bool:
802
  def ngrams(txt):
@@ -870,9 +873,10 @@ def generate_personal_message(profile: Dict[str, Any], seed: Optional[int]=None,
870
  )
871
  text = _norm(tok.decode(out_ids[0], skip_special_tokens=True))
872
 
 
873
  ok_len = 1 <= _sentences_n(text) <= 3
874
- name_ok = _contains_any(text, [need["name"]])
875
- occ_ok = _contains_any(text, [need["occ"], need["occ"].split()[0]])
876
  ban_ok = not _contains_any(text, BAN_PHRASES)
877
  prev = previous_message or _last_msg
878
  dup_ok = (prev is None) or (not _too_similar(text, prev, n=3, thr=0.85))
@@ -887,6 +891,7 @@ def generate_personal_message(profile: Dict[str, Any], seed: Optional[int]=None,
887
  fallback = tried[-1]["text"] if tried else f"Happy {(_clean_occasion(profile.get('occ_ui') or 'day')).lower()}, {profile.get('recipient_name','Friend')}!"
888
  _last_msg = fallback
889
  return {"message": fallback, "meta": {"failed": True, "model": MSG_MODEL_ID, "tone": profile.get("tone","Heartfelt")}}
 
890
 
891
  # --------------------- Rendering ---------------------
892
  def md_escape(text: str) -> str:
@@ -931,28 +936,91 @@ def render_top3_html(df: pd.DataFrame, age_label: str) -> str:
931
  with gr.Blocks(
932
  title="๐ŸŽ GIfty โ€” Recommender + DIY",
933
  css="""
 
934
  #explain {opacity:.85;font-size:.92em;margin-bottom:8px;}
935
- /* ื”ืกืชืจืช ืžืกื’ืจืช/ื”ื™ื™ืœื™ื™ื˜ ื›ืชื•ืžื™ื ืฉืœ ื˜ื‘ืœืช ื”ื“ื•ื’ืžืื•ืช */
936
- .handsontable .wtBorder, .handsontable .htBorders, .handsontable .wtBorder.current { display: none !important; }
937
- .gr-dataframe table td:focus { outline: none !important; box-shadow: none !important; }
938
 
939
- /* === ื”ื•ืคืš ื›ืœ ืฉื•ืจืช ื“ื•ื’ืžื” ืœ"ื›ืคืชื•ืจ" === */
940
  .gr-dataframe thead { display:none; }
941
- .gr-dataframe table { border-collapse: separate !important; border-spacing: 0 8px !important; }
 
 
942
  .gr-dataframe tbody tr {
943
  cursor: pointer;
944
- background: #fff;
945
- border-radius: 12px;
946
- box-shadow: 0 1px 0 rgba(0,0,0,.06);
 
 
 
 
 
 
 
 
 
947
  }
 
 
948
  .gr-dataframe tbody tr td {
949
- border-top: 1px solid #eee !important;
950
- border-bottom: 1px solid #eee !important;
 
 
 
951
  }
952
- .gr-dataframe tbody tr:hover {
953
- background: #f7fafc;
954
- box-shadow: 0 0 0 2px #e2e8f0 inset;
 
 
 
 
 
 
 
 
 
955
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
956
  """
957
  ) as demo:
958
  gr.Markdown(TITLE)
@@ -1061,11 +1129,9 @@ with gr.Blocks(
1061
 
1062
  # ---- NEW: split into 3 functions (partial results) + token check ----
1063
  def start_run(curr_token):
1064
- # increments token (very fast; allows immediate second click)
1065
  return int(curr_token or 0) + 1
1066
 
1067
  def predict_recs_only(rt, interests_list, occasion_val, bmin, bmax, name, rel, age_label, gender_val, tone_val):
1068
- # compute only recommendations; ignore if token is stale
1069
  latest = rt
1070
  profile = _build_profile(interests_list, occasion_val, bmin, bmax, name, rel, age_label, gender_val, tone_val)
1071
  top3 = recommend_top3_budget_first(profile)
@@ -1087,7 +1153,6 @@ with gr.Blocks(
1087
  return gr.update(value=msg, visible=True), latest
1088
 
1089
  # --- Wire events: one short "start", then 3 parallel tasks that each update its output ASAP ---
1090
- # Start: bump token
1091
  ev_start = go.click(
1092
  start_run,
1093
  inputs=[run_token],
@@ -1095,7 +1160,6 @@ with gr.Blocks(
1095
  queue=True,
1096
  )
1097
 
1098
- # Run three tasks in parallel (each returns its output + echoes token to keep it "fresh")
1099
  ev_rec = ev_start.then(
1100
  predict_recs_only,
1101
  inputs=[run_token, interests, occasion, budget_min, budget_max, recipient_name, relationship, age, gender, tone],
 
21
  print(f"===== Application Startup at {pd.Timestamp.now().strftime('%Y-%m-%d %H:%M:%S')} =====")
22
 
23
  # --------------------- Config ---------------------
24
+ TITLE = "# ๐ŸŽ GIftyPlus - Smart Gift Recommender\n*Top-3 catalog picks + 1 DIY gift + personalized message*"
25
 
26
  DATASET_ID = os.getenv("DATASET_ID", "Danielos100/Amazon_products_clean")
27
  DATASET_SPLIT = os.getenv("DATASET_SPLIT", "train")
 
673
  "estimated_time_minutes": minutes,
674
  })
675
  return idea, "ok"
676
+ # --------------------- END DIY ---------------------
677
+
678
 
679
  # --------------------- Personalized Message (FLAN, ืžื’ื•ื•ืŸ + ื•ืœื™ื“ืฆื™ื”) ---------------------
680
+ # >>>>>>>>>>>>>>>>>>>>>> ื”ืฉื™ื ื•ื™ ื”ื™ื—ื™ื“ ื‘ืงื•ื‘ืฅ โ€“ ืžื™ืžื•ืฉ ืžื”ืงื•ืœืื‘ <<<<<<<<<<<<<<<<<<<<<<
681
  MSG_MODEL_ID = "google/flan-t5-small"
682
  MSG_DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
683
  TEMP_RANGE = (0.88, 1.10)
 
708
  "Funny": {
709
  "system": "Write 2โ€“3 witty sentences with playful humor.",
710
  "rules": [
711
+ "Add one subtle pun linked to the occasion or interests.",
712
+ "No slapstick; keep it tasteful.",
713
+ "End with a cheeky nudge."
714
  ],
715
  },
716
  "Heartfelt": {
 
799
 
800
  def _contains_any(text: str, terms: List[str]) -> bool:
801
  t = text.lower()
802
+ return any(term for term in terms if term) and any((term or "").lower() in t for term in terms)
803
 
804
  def _too_similar(a: str, b: str, n=3, thr=0.85) -> bool:
805
  def ngrams(txt):
 
873
  )
874
  text = _norm(tok.decode(out_ids[0], skip_special_tokens=True))
875
 
876
+ # ===== Validators (ื›ืžื• ื‘ืงื•ืœืื‘) =====
877
  ok_len = 1 <= _sentences_n(text) <= 3
878
+ name_ok = _contains_any(text, [need["name"].lower()])
879
+ occ_ok = _contains_any(text, [need["occ"].lower(), need["occ"].split()[0].lower()])
880
  ban_ok = not _contains_any(text, BAN_PHRASES)
881
  prev = previous_message or _last_msg
882
  dup_ok = (prev is None) or (not _too_similar(text, prev, n=3, thr=0.85))
 
891
  fallback = tried[-1]["text"] if tried else f"Happy {(_clean_occasion(profile.get('occ_ui') or 'day')).lower()}, {profile.get('recipient_name','Friend')}!"
892
  _last_msg = fallback
893
  return {"message": fallback, "meta": {"failed": True, "model": MSG_MODEL_ID, "tone": profile.get("tone","Heartfelt")}}
894
+ # --------------------- END Personalized Message ---------------------
895
 
896
  # --------------------- Rendering ---------------------
897
  def md_escape(text: str) -> str:
 
936
  with gr.Blocks(
937
  title="๐ŸŽ GIfty โ€” Recommender + DIY",
938
  css="""
939
+ /* ื›ื•ืชืจืช ื”ื”ืกื‘ืจ */
940
  #explain {opacity:.85;font-size:.92em;margin-bottom:8px;}
 
 
 
941
 
942
+ /* --- ืฉื“ืจื•ื’ ื“ื•ื’ืžืื•ืช: ื”ื•ืคืš ื›ืœ ืฉื•ืจื” ืœื›ืจื˜ื™ืก ืžื•ื“ืจื ื™ ื•ื“ืง --- */
943
  .gr-dataframe thead { display:none; }
944
+ .gr-dataframe table { border-collapse: separate !important; border-spacing: 0 10px !important; table-layout: fixed; width:100%; }
945
+
946
+ /* ื”ืฉื•ืจื” ื›ื›ืจื˜ื™ืก */
947
  .gr-dataframe tbody tr {
948
  cursor: pointer;
949
+ display: block;
950
+ background: linear-gradient(180deg,#fff, #fafafa);
951
+ border-radius: 14px;
952
+ border: 1px solid #e9eef5;
953
+ box-shadow: 0 1px 1px rgba(16,24,40,.04), 0 1px 2px rgba(16,24,40,.06);
954
+ padding: 10px 12px;
955
+ transition: transform .06s ease, box-shadow .12s ease, background .12s ease;
956
+ }
957
+ .gr-dataframe tbody tr:hover {
958
+ transform: translateY(-1px);
959
+ background: #f8fafc;
960
+ box-shadow: 0 3px 10px rgba(16,24,40,.08);
961
  }
962
+
963
+ /* ืชืื™ื ืžื™ื ื™ืžืœื™ืกื˜ื™ื™ื */
964
  .gr-dataframe tbody tr td {
965
+ border: 0 !important;
966
+ padding: 4px 8px !important;
967
+ vertical-align: middle;
968
+ font-size: .92rem;
969
+ line-height: 1.3;
970
  }
971
+
972
+ /* ื˜ื™ืคื•ื’ืจืคื™ื”: ืฉื (Recipient) ืžื•ื“ื’ืฉ ื•ื’ื“ื•ืœ ื™ื•ืชืจ */
973
+ .gr-dataframe tbody tr td:nth-child(1) {
974
+ font-weight: 700;
975
+ font-size: 1rem;
976
+ letter-spacing: .2px;
977
+ }
978
+
979
+ /* ื™ื—ืกื™ื/ืื™ืจื•ืข โ€“ ื˜ืงืกื˜ ืขื“ื™ืŸ ื™ื•ืชืจ */
980
+ .gr-dataframe tbody tr td:nth-child(2),
981
+ .gr-dataframe tbody tr td:nth-child(4) {
982
+ opacity: .8;
983
  }
984
+
985
+ /* ืชื’ื™ื•ืช ืงื•ืžืคืงื˜ื™ื•ืช (Interests, Tone, Gender, Age) */
986
+ .gr-dataframe tbody tr td:nth-child(3),
987
+ .gr-dataframe tbody tr td:nth-child(9),
988
+ .gr-dataframe tbody tr td:nth-child(6),
989
+ .gr-dataframe tbody tr td:nth-child(5) {
990
+ display: inline-block;
991
+ background: #eff4ff;
992
+ color: #243b6b;
993
+ border: 1px solid #dbe5ff;
994
+ border-radius: 999px;
995
+ padding: 2px 10px !important;
996
+ font-size: .84rem;
997
+ margin: 2px 6px 2px 0;
998
+ }
999
+
1000
+ /* ืชืงืฆื™ื‘ โ€“ ืฆ'ื™ืค ืžื—ื•ืฉื‘ (Min/Max) */
1001
+ .gr-dataframe tbody tr td:nth-child(7),
1002
+ .gr-dataframe tbody tr td:nth-child(8) {
1003
+ display: inline-block;
1004
+ background: #f1f5f9;
1005
+ border: 1px solid #e2e8f0;
1006
+ color: #0f172a;
1007
+ border-radius: 10px;
1008
+ padding: 2px 8px !important;
1009
+ font-variant-numeric: tabular-nums;
1010
+ margin: 2px 6px 2px 0;
1011
+ }
1012
+
1013
+ /* ืคืจื™ืกื” ืงื•ืžืคืงื˜ื™ืช ื‘ืฉื•ืจื” ืื—ืช */
1014
+ .gr-dataframe tbody tr td {
1015
+ margin-right: 2px;
1016
+ }
1017
+ .gr-dataframe tbody tr td:last-child {
1018
+ margin-right: 0;
1019
+ }
1020
+
1021
+ /* ื”ืขืœืžืช ื”ื™ื™ืœื™ื™ื˜/ืžืกื’ืจื•ืช ืžื™ื•ืชืจื•ืช ืฉืœ Handsontable */
1022
+ .handsontable .wtBorder, .handsontable .htBorders, .handsontable .wtBorder.current { display: none !important; }
1023
+ .gr-dataframe table td:focus { outline: none !important; box-shadow: none !important; }
1024
  """
1025
  ) as demo:
1026
  gr.Markdown(TITLE)
 
1129
 
1130
  # ---- NEW: split into 3 functions (partial results) + token check ----
1131
  def start_run(curr_token):
 
1132
  return int(curr_token or 0) + 1
1133
 
1134
  def predict_recs_only(rt, interests_list, occasion_val, bmin, bmax, name, rel, age_label, gender_val, tone_val):
 
1135
  latest = rt
1136
  profile = _build_profile(interests_list, occasion_val, bmin, bmax, name, rel, age_label, gender_val, tone_val)
1137
  top3 = recommend_top3_budget_first(profile)
 
1153
  return gr.update(value=msg, visible=True), latest
1154
 
1155
  # --- Wire events: one short "start", then 3 parallel tasks that each update its output ASAP ---
 
1156
  ev_start = go.click(
1157
  start_run,
1158
  inputs=[run_token],
 
1160
  queue=True,
1161
  )
1162
 
 
1163
  ev_rec = ev_start.then(
1164
  predict_recs_only,
1165
  inputs=[run_token, interests, occasion, budget_min, budget_max, recipient_name, relationship, age, gender, tone],