Sephfox commited on
Commit
e47c579
·
verified ·
1 Parent(s): 2f964be

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -10
app.py CHANGED
@@ -9,7 +9,7 @@ from sklearn.model_selection import train_test_split
9
  from sklearn.preprocessing import OneHotEncoder
10
  from sklearn.neural_network import MLPClassifier
11
  from deap import base, creator, tools, algorithms
12
- from transformers import BloomForCausalLM, BloomTokenizerFast
13
  import torch
14
 
15
  # Initialize Example Emotions Dataset
@@ -221,9 +221,9 @@ def handle_idle_state():
221
 
222
  # S.O.U.L. (Self-Organizing Universal Learning) Function
223
  class SOUL:
224
- def __init__(self, model_name='bigscience/bloom-1b1'):
225
- self.tokenizer = BloomTokenizerFast.from_pretrained(model_name)
226
- self.model = BloomForCausalLM.from_pretrained(model_name)
227
  self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
228
  self.model.to(self.device)
229
 
@@ -246,20 +246,20 @@ class SOUL:
246
  return self.tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
247
 
248
  def bridge_ai(self, prompt):
249
- # Generate the response using BLOOM
250
- bloom_response = self.generate_text(prompt)
251
 
252
  # Get the emotional response
253
- emotional_response = get_emotional_response(bloom_response)
254
 
255
- return bloom_response, emotional_response
256
 
257
  # Example usage of S.O.U.L. function
258
  soul = SOUL()
259
 
260
  def interact_with_soul(user_input):
261
- bloom_response, emotional_response = soul.bridge_ai(user_input)
262
- return bloom_response, emotional_response
263
 
264
  iface = gr.Interface(
265
  fn=interact_with_soul,
 
9
  from sklearn.preprocessing import OneHotEncoder
10
  from sklearn.neural_network import MLPClassifier
11
  from deap import base, creator, tools, algorithms
12
+ from transformers import GPTNeoXForCausalLM, GPTNeoXTokenizerFast
13
  import torch
14
 
15
  # Initialize Example Emotions Dataset
 
221
 
222
  # S.O.U.L. (Self-Organizing Universal Learning) Function
223
  class SOUL:
224
+ def __init__(self, model_name='EleutherAI/gpt-neox-20b'):
225
+ self.tokenizer = GPTNeoXTokenizerFast.from_pretrained(model_name)
226
+ self.model = GPTNeoXForCausalLM.from_pretrained(model_name)
227
  self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
228
  self.model.to(self.device)
229
 
 
246
  return self.tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
247
 
248
  def bridge_ai(self, prompt):
249
+ # Generate the response using GPT-NeoX
250
+ neox_response = self.generate_text(prompt)
251
 
252
  # Get the emotional response
253
+ emotional_response = get_emotional_response(neox_response)
254
 
255
+ return neox_response, emotional_response
256
 
257
  # Example usage of S.O.U.L. function
258
  soul = SOUL()
259
 
260
  def interact_with_soul(user_input):
261
+ neox_response, emotional_response = soul.bridge_ai(user_input)
262
+ return neox_response, emotional_response
263
 
264
  iface = gr.Interface(
265
  fn=interact_with_soul,