AlGe commited on
Commit
a0ade0a
·
verified ·
1 Parent(s): a0548c6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +72 -47
app.py CHANGED
@@ -1,20 +1,20 @@
1
  from __future__ import annotations
2
- from typing import Iterable, Tuple
 
3
  import gradio as gr
 
 
4
  from gradio.themes.monochrome import Monochrome
 
 
 
5
  import spaces
6
  import torch
7
  from transformers import AutoTokenizer, AutoModelForSequenceClassification, AutoModelForTokenClassification, pipeline
8
  import os
9
- import plotly.graph_objects as go
10
  import colorsys
11
- import csv
12
- import sys
13
-
14
- # Increase the CSV field size limit
15
- csv.field_size_limit(sys.maxsize)
16
 
17
- # Utility functions for color conversions and brightness adjustment
18
  def hex_to_rgb(hex_color: str) -> tuple[int, int, int]:
19
  hex_color = hex_color.lstrip('#')
20
  return tuple(int(hex_color[i:i+2], 16) for i in (0, 2, 4))
@@ -29,27 +29,25 @@ def adjust_brightness(rgb_color: tuple[int, int, int], factor: float) -> tuple[i
29
  return tuple(int(v * 255) for v in new_rgb)
30
 
31
  monochrome = Monochrome()
 
32
  auth_token = os.environ['HF_TOKEN']
33
 
34
- # Load the tokenizer and models for the first pipeline
35
  tokenizer_bin = AutoTokenizer.from_pretrained("AlGe/deberta-v3-large_token", token=auth_token)
36
  model_bin = AutoModelForTokenClassification.from_pretrained("AlGe/deberta-v3-large_token", token=auth_token)
37
  tokenizer_bin.model_max_length = 512
38
  pipe_bin = pipeline("ner", model=model_bin, tokenizer=tokenizer_bin)
39
 
40
- # Load the tokenizer and models for the second pipeline
41
  tokenizer_ext = AutoTokenizer.from_pretrained("AlGe/deberta-v3-large_AIS-token", token=auth_token)
42
  model_ext = AutoModelForTokenClassification.from_pretrained("AlGe/deberta-v3-large_AIS-token", token=auth_token)
43
  tokenizer_ext.model_max_length = 512
44
  pipe_ext = pipeline("ner", model=model_ext, tokenizer=tokenizer_ext)
45
 
46
- # Load the tokenizer and models for the third pipeline
47
  model1 = AutoModelForSequenceClassification.from_pretrained("AlGe/deberta-v3-large_Int_segment", num_labels=1, token=auth_token)
48
  tokenizer1 = AutoTokenizer.from_pretrained("AlGe/deberta-v3-large_Int_segment", token=auth_token)
 
49
  model2 = AutoModelForSequenceClassification.from_pretrained("AlGe/deberta-v3-large_seq_ext", num_labels=1, token=auth_token)
50
 
51
- # Define functions to process inputs
52
- def process_ner(text, pipeline):
53
  output = pipeline(text)
54
  entities = []
55
  current_entity = None
@@ -76,7 +74,7 @@ def process_ner(text, pipeline):
76
 
77
  return {"text": text, "entities": entities}
78
 
79
- def process_classification(text, model1, model2, tokenizer1) -> Tuple[float, float, float]:
80
  inputs1 = tokenizer1(text, max_length=512, return_tensors='pt', truncation=True, padding=True)
81
 
82
  with torch.no_grad():
@@ -87,51 +85,78 @@ def process_classification(text, model1, model2, tokenizer1) -> Tuple[float, flo
87
  prediction2 = outputs2[0].item()
88
  score = prediction1 / (prediction2 + prediction1)
89
 
90
- return prediction1, prediction2, score
 
 
 
 
 
 
 
 
 
 
91
 
92
- def generate_pie_chart(values: list, labels: list, title: str):
93
- fig = go.Figure(data=[go.Pie(labels=labels, values=values, hole=.3)])
94
- fig.update_layout(title_text=title)
95
- return fig.to_html(full_html=False)
 
 
 
 
 
 
 
96
 
97
  @spaces.GPU
98
- def all(text):
99
- ner_bin = process_ner(text, pipe_bin)
100
- ner_ext = process_ner(text, pipe_ext)
101
- int_count, ext_count, int_ext_ratio = process_classification(text, model1, model2, tokenizer1)
102
 
103
- # Create pie charts
104
- pie_chart_html_int_ext = generate_pie_chart([int_count, ext_count], ['Internal', 'External'], "Internal vs External Details")
105
- subclass_labels = [entity['entity'] for entity in ner_ext['entities']]
106
- subclass_values = [1] * len(subclass_labels) # Each entity is counted once; adjust as needed for actual counts
107
- pie_chart_html_subclass = generate_pie_chart(subclass_values, subclass_labels, "Detail Subclasses")
108
-
109
- return ner_bin, ner_ext, f"{round(int_count, 1)}", f"{round(ext_count, 1)}", f"{round(int_ext_ratio, 2)}", pie_chart_html_int_ext, pie_chart_html_subclass
110
 
111
  examples = [
112
  ['Bevor ich meinen Hund kaufte bin ich immer alleine durch den Park gelaufen. Gestern war ich aber mit dem Hund losgelaufen. Das Wetter war sehr schön, nicht wie sonst im Winter. Ich weiß nicht genau. Mir fällt sonst nichts dazu ein. Wir trafen auf mehrere Spaziergänger. Ein Mann mit seinem Kind. Das Kind hat ein Eis gegessen.'],
113
- ['Also, ich kann mir total vorstellen, dass ich in so zwei Jahren eine mega Geburtstagsparty für meinen besten Freund organisieren werde. Also, das wird echt krass, ich schwöre es dir. Ich werde eine coole Location finden, wahrscheinlich so ein Haus am See oder so, und dann lade ich echt alle seine Freunde und Familie ein. Und dann, das wird der Hammer, ich werde eine Band organisieren, die so seine ganze Lieblingsmusik spielt, weißt du? Und dann, weil ich ja keine Lust habe, selbst zu kochen, hol ich mir so einen professionellen Catering-Service, die dann für alle Gäste kochen. Na ja, ich hoff mal, dass das Wetter mitspielt und wir alle draußen feiern können. Ich sag dir, das wird echt ne unvergessliche Feier, und mein Freund wird ausflippen vor Überraschung, echt jetzt.'],
114
- ["So, I really imagine that in two years, I'll finally be living my dream and writing a novel. I'll find a quiet place where I can fully concentrate on writing. I'll tell a story that really engages me and that I want to share with others. I'll draw inspiration from my experiences and the people around me, just like in real life. I'll spend many hours putting my thoughts on paper and bringing my characters to life. Well, I hope that readers also find my story fascinating; that would be really cool."],
115
- ['Oh mein Gott, ich muss dir diese total lustige Geschichte aus meiner Schulzeit erzählen! Du wirst es nicht glauben. Also, ich kam eines Tages zu spät zur Schule, richtig? Du weißt, wie das ist, man ist in Eile und achtet nicht wirklich darauf. Ich habe einfach ein Paar Schuhe gegriffen und bin aus dem Haus gerannt. Erst als ich in der Schule war und mich zum Mittagessen hinsetzte, bemerkte ich, dass ich zwei völlig unterschiedliche Schuhe anhatte! Ich hatte einen schwarzen und einen weißen Turnschuh an. Und ich mache keine Witze, die Leute haben es sofort bemerkt. Ein Typ namens Tommy hat mich Mismatch Mike genannt und bald haben alle mitgemacht. Oh Mann, ich war damals so peinlich berührt! Jetzt finde ich es einfach nur witzig und frage mich, wie mir das nicht aufgefallen ist. Das ist eine dieser Geschichten, die ich jetzt auf Partys erzähle, und die Leute finden es total lustig.'],
116
- ["You know, this conversation reminded me of an incredible experience I had at a music festival in college. I'll never forget it. It was a rainy day, but we didn't care, and the band that was playing was my absolute favorite. Even though we were all soaked, the crowd kept on dancing and singing along. The energy was incredible, and I remember feeling so connected to everyone around me. It was as if the rain made the whole experience even more magical. I was surrounded by friends, and we all shared this special moment together. It was one of the best moments of my life, and I still get goosebumps when I think about it. Sometimes, it's the unexpected things that create the most amazing memories, you know?"],
117
  ]
118
 
119
- # Define Gradio interface
120
  iface = gr.Interface(
121
  fn=all,
122
- inputs=gr.Textbox(lines=5, label="Input Text"),
123
  outputs=[
124
- gr.JSON(label="NER Binary"),
125
- gr.JSON(label="NER External"),
126
- gr.Textbox(label="Internal Count"),
127
- gr.Textbox(label="External Count"),
128
- gr.Textbox(label="Ratio Int/Ext"),
129
- gr.HTML(label="Pie Chart of Internal vs External Details", elem_id="pie_chart_int_ext"),
130
- gr.HTML(label="Pie Chart of Subclass Details", elem_id="pie_chart_subclass"),
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
131
  ],
132
- theme=monochrome,
133
- examples=examples
 
 
134
  )
135
 
136
- # Launch the interface
137
- iface.launch(debug=True)
 
1
  from __future__ import annotations
2
+ from typing import Iterable, List, Dict, Tuple
3
+
4
  import gradio as gr
5
+ from gradio.themes.base import Base
6
+ from gradio.themes.soft import Soft
7
  from gradio.themes.monochrome import Monochrome
8
+ from gradio.themes.default import Default
9
+ from gradio.themes.utils import colors, fonts, sizes
10
+
11
  import spaces
12
  import torch
13
  from transformers import AutoTokenizer, AutoModelForSequenceClassification, AutoModelForTokenClassification, pipeline
14
  import os
 
15
  import colorsys
16
+ import matplotlib.pyplot as plt
 
 
 
 
17
 
 
18
  def hex_to_rgb(hex_color: str) -> tuple[int, int, int]:
19
  hex_color = hex_color.lstrip('#')
20
  return tuple(int(hex_color[i:i+2], 16) for i in (0, 2, 4))
 
29
  return tuple(int(v * 255) for v in new_rgb)
30
 
31
  monochrome = Monochrome()
32
+
33
  auth_token = os.environ['HF_TOKEN']
34
 
 
35
  tokenizer_bin = AutoTokenizer.from_pretrained("AlGe/deberta-v3-large_token", token=auth_token)
36
  model_bin = AutoModelForTokenClassification.from_pretrained("AlGe/deberta-v3-large_token", token=auth_token)
37
  tokenizer_bin.model_max_length = 512
38
  pipe_bin = pipeline("ner", model=model_bin, tokenizer=tokenizer_bin)
39
 
 
40
  tokenizer_ext = AutoTokenizer.from_pretrained("AlGe/deberta-v3-large_AIS-token", token=auth_token)
41
  model_ext = AutoModelForTokenClassification.from_pretrained("AlGe/deberta-v3-large_AIS-token", token=auth_token)
42
  tokenizer_ext.model_max_length = 512
43
  pipe_ext = pipeline("ner", model=model_ext, tokenizer=tokenizer_ext)
44
 
 
45
  model1 = AutoModelForSequenceClassification.from_pretrained("AlGe/deberta-v3-large_Int_segment", num_labels=1, token=auth_token)
46
  tokenizer1 = AutoTokenizer.from_pretrained("AlGe/deberta-v3-large_Int_segment", token=auth_token)
47
+
48
  model2 = AutoModelForSequenceClassification.from_pretrained("AlGe/deberta-v3-large_seq_ext", num_labels=1, token=auth_token)
49
 
50
+ def process_ner(text: str, pipeline) -> dict:
 
51
  output = pipeline(text)
52
  entities = []
53
  current_entity = None
 
74
 
75
  return {"text": text, "entities": entities}
76
 
77
+ def process_classification(text: str, model1, model2, tokenizer1) -> Tuple[str, str, str]:
78
  inputs1 = tokenizer1(text, max_length=512, return_tensors='pt', truncation=True, padding=True)
79
 
80
  with torch.no_grad():
 
85
  prediction2 = outputs2[0].item()
86
  score = prediction1 / (prediction2 + prediction1)
87
 
88
+ return f"{round(prediction1, 1)}", f"{round(prediction2, 1)}", f"{round(score, 2)}"
89
+
90
+ def generate_charts(ner_output_bin: dict, ner_output_ext: dict) -> Tuple[plt.Figure, plt.Figure]:
91
+ entities_bin = [entity['entity'] for entity in ner_output_bin['entities']]
92
+ entities_ext = [entity['entity'] for entity in ner_output_ext['entities']]
93
+
94
+ all_entities = entities_bin + entities_ext
95
+ entity_counts = {entity: all_entities.count(entity) for entity in set(all_entities)}
96
+
97
+ pie_labels = list(entity_counts.keys())
98
+ pie_sizes = list(entity_counts.values())
99
 
100
+ fig1, ax1 = plt.subplots()
101
+ ax1.pie(pie_sizes, labels=pie_labels, autopct='%1.1f%%', startangle=90)
102
+ ax1.axis('equal')
103
+
104
+ fig2, ax2 = plt.subplots()
105
+ ax2.bar(entity_counts.keys(), entity_counts.values())
106
+ ax2.set_ylabel('Count')
107
+ ax2.set_xlabel('Entity Type')
108
+ ax2.set_title('Entity Counts')
109
+
110
+ return fig1, fig2
111
 
112
  @spaces.GPU
113
+ def all(text: str):
114
+ ner_output_bin = process_ner(text, pipe_bin)
115
+ ner_output_ext = process_ner(text, pipe_ext)
116
+ classification_output = process_classification(text, model1, model2, tokenizer1)
117
 
118
+ pie_chart, bar_chart = generate_charts(ner_output_bin, ner_output_ext)
119
+
120
+ return (ner_output_bin, ner_output_ext,
121
+ classification_output[0], classification_output[1], classification_output[2],
122
+ pie_chart, bar_chart)
 
 
123
 
124
  examples = [
125
  ['Bevor ich meinen Hund kaufte bin ich immer alleine durch den Park gelaufen. Gestern war ich aber mit dem Hund losgelaufen. Das Wetter war sehr schön, nicht wie sonst im Winter. Ich weiß nicht genau. Mir fällt sonst nichts dazu ein. Wir trafen auf mehrere Spaziergänger. Ein Mann mit seinem Kind. Das Kind hat ein Eis gegessen.'],
 
 
 
 
126
  ]
127
 
 
128
  iface = gr.Interface(
129
  fn=all,
130
+ inputs=gr.Textbox(lines=5, label="Input Text", placeholder="Write about how your breakfast went or anything else that happened or might happen to you ..."),
131
  outputs=[
132
+ gr.HighlightedText(label="Binary Sequence Classification",
133
+ color_map={
134
+ "External": "#6ad5bcff",
135
+ "Internal": "#ee8bacff"}
136
+ ),
137
+ gr.HighlightedText(label="Extended Sequence Classification",
138
+ color_map={
139
+ "INTemothou": "#FF7F50", # Coral
140
+ "INTpercept": "#FF4500", # OrangeRed
141
+ "INTtime": "#FF6347", # Tomato
142
+ "INTplace": "#FFD700", # Gold
143
+ "INTevent": "#FFA500", # Orange
144
+
145
+ "EXTsemantic": "#4682B4", # SteelBlue
146
+ "EXTrepetition": "#5F9EA0", # CadetBlue
147
+ "EXTother": "#00CED1", # DarkTurquoise
148
+ }
149
+ ),
150
+ gr.Label(label="Internal Detail Count"),
151
+ gr.Label(label="External Detail Count"),
152
+ gr.Label(label="Approximated Internal Detail Ratio"),
153
+ gr.Plot(label="Entity Distribution Pie Chart"),
154
+ gr.Plot(label="Entity Count Bar Chart")
155
  ],
156
+ title="Scoring Demo",
157
+ description="Autobiographical Memory Analysis: This demo combines two text - and two sequence classification models to showcase our automated Autobiographical Interview scoring method. Submit a narrative to see the results.",
158
+ examples=examples,
159
+ theme=monochrome
160
  )
161
 
162
+ iface.launch()