asd
Browse files
app.py
CHANGED
@@ -1,7 +1,60 @@
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
|
|
2 |
|
3 |
-
|
4 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
|
6 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
demo.launch()
|
|
|
1 |
+
import torch
|
2 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
3 |
+
from huggingface_hub import HfApi
|
4 |
import gradio as gr
|
5 |
+
import os
|
6 |
|
7 |
+
# Function to merge models
|
8 |
+
def merge_models():
|
9 |
+
base_model_name = "meta-llama/Meta-Llama-3-8B-Instruct"
|
10 |
+
finetuned_model_name = "NoaiGPT/autotrain-14mrs-fc44l"
|
11 |
+
|
12 |
+
# Load the base model
|
13 |
+
base_model = AutoModelForCausalLM.from_pretrained(base_model_name)
|
14 |
+
base_tokenizer = AutoTokenizer.from_pretrained(base_model_name)
|
15 |
+
|
16 |
+
# Load the fine-tuned model
|
17 |
+
finetuned_model = AutoModelForCausalLM.from_pretrained(finetuned_model_name)
|
18 |
+
|
19 |
+
# Merge the models (simple weight averaging here for demonstration; adjust as needed)
|
20 |
+
for param_base, param_finetuned in zip(base_model.parameters(), finetuned_model.parameters()):
|
21 |
+
param_base.data = (param_base.data + param_finetuned.data) / 2
|
22 |
+
|
23 |
+
# Save the merged model
|
24 |
+
merged_model_name = "./merged_model"
|
25 |
+
base_model.save_pretrained(merged_model_name)
|
26 |
+
base_tokenizer.save_pretrained(merged_model_name)
|
27 |
+
|
28 |
+
return merged_model_name
|
29 |
|
30 |
+
# Function to upload the merged model to Hugging Face Hub
|
31 |
+
def upload_to_hf(repo_id, merged_model_name):
|
32 |
+
api = HfApi()
|
33 |
+
model_files = [os.path.join(merged_model_name, f) for f in os.listdir(merged_model_name)]
|
34 |
+
|
35 |
+
for file in model_files:
|
36 |
+
api.upload_file(
|
37 |
+
path_or_fileobj=file,
|
38 |
+
path_in_repo=os.path.basename(file),
|
39 |
+
repo_id=repo_id,
|
40 |
+
repo_type="model"
|
41 |
+
)
|
42 |
+
|
43 |
+
return f"Model uploaded to Hugging Face Hub at {repo_id}."
|
44 |
+
|
45 |
+
# Gradio function to handle the merge button click
|
46 |
+
def merge_button_clicked():
|
47 |
+
merged_model_name = merge_models()
|
48 |
+
repo_id = "NoaiGPT/autotrain-14mrs-fc44l"
|
49 |
+
return upload_to_hf(repo_id, merged_model_name)
|
50 |
+
|
51 |
+
# Create the Gradio interface
|
52 |
+
with gr.Blocks() as demo:
|
53 |
+
with gr.Row():
|
54 |
+
merge_button = gr.Button("Merge Models")
|
55 |
+
output = gr.Textbox(label="Output")
|
56 |
+
|
57 |
+
merge_button.click(merge_button_clicked, outputs=output)
|
58 |
+
|
59 |
+
# Launch the Gradio interface
|
60 |
demo.launch()
|