Sephfox commited on
Commit
f372e0a
·
verified ·
1 Parent(s): 28b3c8a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -9
app.py CHANGED
@@ -2,25 +2,36 @@ import warnings
2
  # Suppress FutureWarnings
3
  warnings.filterwarnings("ignore", category=FutureWarning)
4
 
5
- # --- Monkey Patch for Gradio's Schema Parsing ---
6
- # This patch prevents errors when a boolean appears in a schema where an iterable is expected.
7
  try:
8
  import gradio_client.utils as client_utils
 
 
 
 
 
 
 
 
 
 
 
 
 
9
  original_get_type = client_utils.get_type
10
 
11
  def patched_get_type(schema):
12
- # If schema is a bool, simply return a generic type string.
13
  if isinstance(schema, bool):
14
  return "Any"
15
  if not isinstance(schema, dict):
16
  return "Any"
17
- # Otherwise, call the original function.
18
  return original_get_type(schema)
19
 
20
  client_utils.get_type = patched_get_type
 
21
  except Exception as e:
22
- # If the patch fails for some reason, log the error and continue.
23
- print("Warning: Failed to patch gradio_client.utils.get_type:", e)
24
 
25
  import random
26
  import pandas as pd
@@ -30,6 +41,7 @@ import nltk
30
  import gradio as gr
31
  from nltk.sentiment import SentimentIntensityAnalyzer
32
  from textblob import TextBlob
 
33
  from transformers import (
34
  AutoTokenizer,
35
  AutoModelForCausalLM,
@@ -106,10 +118,9 @@ class EmotionalAnalyzer:
106
  plt.close()
107
  return path
108
  except Exception:
109
- return None
110
 
111
  # --- Text Completion LLM ---
112
- # Load the fine-tuned LLaMA model and tokenizer
113
  tokenizer = AutoTokenizer.from_pretrained("diabolic6045/ELN-Llama-1B-base")
114
  model = AutoModelForCausalLM.from_pretrained("diabolic6045/ELN-Llama-1B-base")
115
 
@@ -194,5 +205,5 @@ with gr.Blocks(title="ELN LLaMA 1B Enhanced Demo") as app:
194
  comp_button = gr.Button("Complete Text")
195
  comp_button.click(generate_completion, inputs=[comp_text, comp_temp, comp_len], outputs=comp_output)
196
 
197
- # Launch the Gradio app (remove share=True if not needed)
198
  app.launch(share=True)
 
2
  # Suppress FutureWarnings
3
  warnings.filterwarnings("ignore", category=FutureWarning)
4
 
5
+ # --- Monkey Patch for Gradio Schema Parsing ---
6
+ # This patch prevents APIInfoParseError by handling boolean schema values.
7
  try:
8
  import gradio_client.utils as client_utils
9
+
10
+ # Patch the helper function to handle bool types in the schema.
11
+ original_json_schema_to_python_type = client_utils._json_schema_to_python_type
12
+
13
+ def patched_json_schema_to_python_type(schema, defs=None):
14
+ if isinstance(schema, bool):
15
+ # If the schema is a boolean, simply return a generic type.
16
+ return "Any"
17
+ return original_json_schema_to_python_type(schema, defs)
18
+
19
+ client_utils._json_schema_to_python_type = patched_json_schema_to_python_type
20
+
21
+ # Also patch get_type to be extra safe.
22
  original_get_type = client_utils.get_type
23
 
24
  def patched_get_type(schema):
 
25
  if isinstance(schema, bool):
26
  return "Any"
27
  if not isinstance(schema, dict):
28
  return "Any"
 
29
  return original_get_type(schema)
30
 
31
  client_utils.get_type = patched_get_type
32
+
33
  except Exception as e:
34
+ print("Warning: Failed to patch gradio_client schema utils:", e)
 
35
 
36
  import random
37
  import pandas as pd
 
41
  import gradio as gr
42
  from nltk.sentiment import SentimentIntensityAnalyzer
43
  from textblob import TextBlob
44
+ import torch
45
  from transformers import (
46
  AutoTokenizer,
47
  AutoModelForCausalLM,
 
118
  plt.close()
119
  return path
120
  except Exception:
121
+ return None # Ensures that if there's an issue, we return None
122
 
123
  # --- Text Completion LLM ---
 
124
  tokenizer = AutoTokenizer.from_pretrained("diabolic6045/ELN-Llama-1B-base")
125
  model = AutoModelForCausalLM.from_pretrained("diabolic6045/ELN-Llama-1B-base")
126
 
 
205
  comp_button = gr.Button("Complete Text")
206
  comp_button.click(generate_completion, inputs=[comp_text, comp_temp, comp_len], outputs=comp_output)
207
 
208
+ # Launch the Gradio app (remove share=True if running in an environment that doesn't support it)
209
  app.launch(share=True)