abetlen commited on
Commit
b206e22
·
1 Parent(s): 7731867
Files changed (1) hide show
  1. models.py +1 -1
models.py CHANGED
@@ -59,7 +59,7 @@ MAX_RAM_CACHE = int(float(os.environ.get('RAM_CACHE_GB', '0')) * 1e9)
59
 
60
  def get_cached_model(
61
  model_name: str,
62
- ) -> tuple[paligemma_bv.PaliGemmaModel, paligemma_bv.ParamsCpu]:
63
  """Returns model and params, using RAM cache."""
64
  res, seq = MODELS_RES_SEQ[model_name]
65
  model_path = gradio_helpers.get_paths()[model_name]
 
59
 
60
  def get_cached_model(
61
  model_name: str,
62
+ ):# -> tuple[paligemma_bv.PaliGemmaModel, paligemma_bv.ParamsCpu]:
63
  """Returns model and params, using RAM cache."""
64
  res, seq = MODELS_RES_SEQ[model_name]
65
  model_path = gradio_helpers.get_paths()[model_name]