|
import os |
|
import gradio as gr |
|
from transformers import AutoModel, AutoTokenizer |
|
from fastapi import FastAPI |
|
|
|
def process_models(model_name, save_dir, additional_models): |
|
log_lines = [] |
|
|
|
|
|
log_lines.append(f"π Loading model: **{model_name}**") |
|
try: |
|
model = AutoModel.from_pretrained(model_name) |
|
tokenizer = AutoTokenizer.from_pretrained(model_name) |
|
model_save_path = os.path.join(save_dir, model_name.replace("/", "_")) |
|
os.makedirs(model_save_path, exist_ok=True) |
|
model.save_pretrained(model_save_path) |
|
log_lines.append(f"β
Saved **{model_name}** to `{model_save_path}`") |
|
except Exception as e: |
|
log_lines.append(f"β Error with **{model_name}**: {e}") |
|
|
|
|
|
if additional_models: |
|
for m in additional_models: |
|
log_lines.append(f"π Loading model: **{m}**") |
|
try: |
|
model = AutoModel.from_pretrained(m) |
|
tokenizer = AutoTokenizer.from_pretrained(m) |
|
model_save_path = os.path.join(save_dir, m.replace("/", "_")) |
|
os.makedirs(model_save_path, exist_ok=True) |
|
model.save_pretrained(model_save_path) |
|
log_lines.append(f"β
Saved **{m}** to `{model_save_path}`") |
|
except Exception as e: |
|
log_lines.append(f"β Error with **{m}**: {e}") |
|
|
|
return "\n".join(log_lines) |
|
|
|
|
|
mermaid_glossary = """ |
|
```mermaid |
|
graph LR |
|
A[π Model Input] --> B[Load Model] |
|
B --> C[πΎ Save Model] |
|
D[π§© Additional Models] --> B |
|
``` |
|
""" |