eaglelandsonce commited on
Commit
72ca3fe
·
verified ·
1 Parent(s): 91d0eed

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -2
app.py CHANGED
@@ -1,13 +1,24 @@
1
  import streamlit as st
2
  from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
 
 
3
 
4
  def main():
5
  st.title("Codestral Inference with Hugging Face")
6
 
 
 
 
 
 
 
 
 
 
7
  # Load the model and tokenizer
8
  st.text("Loading model...")
9
- tokenizer = AutoTokenizer.from_pretrained("mistralai/Codestral-22B-v0.1")
10
- model = AutoModelForCausalLM.from_pretrained("mistralai/Codestral-22B-v0.1")
11
  generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
12
  st.success("Model loaded successfully!")
13
 
 
1
  import streamlit as st
2
  from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
3
+ from huggingface_hub import snapshot_download
4
+ from pathlib import Path
5
 
6
  def main():
7
  st.title("Codestral Inference with Hugging Face")
8
 
9
+ # Download the model files
10
+ st.text("Downloading model...")
11
+ model_id = "mistralai/Codestral-22B-v0.1"
12
+ local_model_path = Path.home().joinpath('mistral_models', model_id)
13
+ local_model_path.mkdir(parents=True, exist_ok=True)
14
+
15
+ snapshot_download(repo_id=model_id, allow_patterns=["*.bin", "*.json", "*.model"], local_dir=local_model_path)
16
+ st.success("Model downloaded successfully!")
17
+
18
  # Load the model and tokenizer
19
  st.text("Loading model...")
20
+ tokenizer = AutoTokenizer.from_pretrained(local_model_path)
21
+ model = AutoModelForCausalLM.from_pretrained(local_model_path)
22
  generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
23
  st.success("Model loaded successfully!")
24