acecalisto3 commited on
Commit
972bb7b
·
verified ·
1 Parent(s): 4864e08

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -4
app.py CHANGED
@@ -1167,11 +1167,12 @@ def display_historical_data(storage_location: str, url: str):
1167
  # Function to load the "google/flan-t5-xl" model
1168
  def load_model():
1169
  """
1170
- Loads the FlanT5XL model and tokenizer once and returns the pipeline.
1171
  """
1172
- model_name = "openlm-research/open_llama_3b_v2"
1173
- tokenizer = AutoTokenizer.from_pretrained(model_name, use_fast=False, legacy=False)
1174
- model = AutoModelForCausalLM.from_pretrained(model_name) # Use AutoModelForCausalLM
 
1175
 
1176
  # Determine the maximum supported length for the model
1177
  max_supported_length = 2048 # You might need to adjust this
 
1167
  # Function to load the "google/flan-t5-xl" model
1168
  def load_model():
1169
  """
1170
+ Loads the openLlama model and tokenizer once and returns the pipeline.
1171
  """
1172
+ try:
1173
+ model_name = "openlm-research/open_llama_3b_v2" # Correct indentation here
1174
+ tokenizer = AutoTokenizer.from_pretrained(model_name, use_fast=False, legacy=False)
1175
+ model = AutoModelForCausalLM.from_pretrained(model_name)
1176
 
1177
  # Determine the maximum supported length for the model
1178
  max_supported_length = 2048 # You might need to adjust this