acecalisto3 commited on
Commit
81806d9
·
verified ·
1 Parent(s): 2d4cdd7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -1170,7 +1170,7 @@ def load_model():
1170
  model_name = "mistralai/Mixtral-8x7B-Instruct-v0.1"
1171
  try:
1172
  # Load tokenizer with warning suppression
1173
- tokenizer = AutoTokenizer.from_pretrained("mistralai/Mixtral-8x7B-Instruct-v0.1", clean_up_tokenization_spaces=True)
1174
 
1175
  model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
1176
  pipe = pipeline(
@@ -1179,7 +1179,7 @@ def load_model():
1179
  tokenizer=tokenizer,
1180
  device=0 if torch.cuda.is_available() else -1,
1181
  )
1182
- logging.info("Mistral model loaded successfully.")
1183
  return pipe
1184
  except Exception as e:
1185
  logging.error(f"Error loading Mistral model: {e}")
 
1170
  model_name = "mistralai/Mixtral-8x7B-Instruct-v0.1"
1171
  try:
1172
  # Load tokenizer with warning suppression
1173
+ tokenizer = AutoTokenizer.from_pretrained("T5Config", clean_up_tokenization_spaces=True)
1174
 
1175
  model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
1176
  pipe = pipeline(
 
1179
  tokenizer=tokenizer,
1180
  device=0 if torch.cuda.is_available() else -1,
1181
  )
1182
+ logging.info("Model loaded successfully.")
1183
  return pipe
1184
  except Exception as e:
1185
  logging.error(f"Error loading Mistral model: {e}")