File size: 7,357 Bytes
70297ae
58a16f2
a261ff8
 
7e4d548
64c34c3
58a16f2
4bab2f3
58a16f2
 
 
 
6cc4234
70297ae
 
 
 
 
 
 
 
 
 
 
 
64c34c3
 
 
70297ae
a261ff8
58a16f2
db0cf1b
a261ff8
58a16f2
f950f71
7e4d548
a261ff8
58a16f2
 
a261ff8
58a16f2
 
a261ff8
58a16f2
 
 
a261ff8
58a16f2
6cc4234
58a16f2
 
a261ff8
58a16f2
 
 
 
 
 
 
64c34c3
58a16f2
 
 
 
64c34c3
58a16f2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4bab2f3
 
8295404
4bab2f3
58a16f2
 
 
4bab2f3
58a16f2
70297ae
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58a16f2
122c44e
84ccd93
 
 
70297ae
 
84ccd93
 
70297ae
84ccd93
0bc9204
84ccd93
 
 
 
 
58a16f2
 
 
122c44e
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
import os
import json
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer
from peft import PeftModel
from huggingface_hub import HfApi, HfFolder
import gradio as gr
import time

# Global variables to store the model and tokenizer
merged_model = None
tokenizer = None

def load_model_and_tokenizer():
    global merged_model, tokenizer
    
    model_name = "NoaiGPT/merged-llama3-8b-instruct"
    local_model_path = model_name.replace("/", "_")
    
    if os.path.exists(local_model_path):
        print("Loading model from local path...")
        tokenizer = AutoTokenizer.from_pretrained(local_model_path)
        merged_model = AutoModelForCausalLM.from_pretrained(local_model_path)
    else:
        print("Downloading model from Hugging Face Hub...")
        token = os.getenv("HF_AUTH_TOKEN")  # Ensure you set this environment variable with your Hugging Face token
        tokenizer = AutoTokenizer.from_pretrained(model_name, use_auth_token=token)
        merged_model = AutoModelForCausalLM.from_pretrained(model_name, use_auth_token=token)

def merge_models():
    print("Loading base model...")
    base_model = AutoModelForCausalLM.from_pretrained("meta-llama/Meta-Llama-3-8B")
    
    print("Loading fine-tuned model...")
    fine_tuned_model_path = "NoaiGPT/autotrain-14mrs-fc44l"
    fine_tuned_model = PeftModel.from_pretrained(base_model, fine_tuned_model_path)
    
    print("Merging models...")
    merged_model = fine_tuned_model.merge_and_unload()
    
    print("Saving merged model...")
    merged_model.save_pretrained("merged_model")
    
    print("Saving tokenizer...")
    tokenizer = AutoTokenizer.from_pretrained("meta-llama/Meta-Llama-3-8B-Instruct")
    tokenizer.save_pretrained("merged_model")
    
    return merged_model, tokenizer

def push_to_hub(repo_name):
    print(f"Pushing merged model to Hugging Face Hub: {repo_name}")
    api = HfApi()
    
    try:
        api.create_repo(repo_name, private=True)
        print(f"Created new repository: {repo_name}")
    except Exception as e:
        print(f"Repository already exists or error occurred: {e}")
    
    token = os.getenv("HF_AUTH_TOKEN")  # Ensure you set this environment variable with your Hugging Face token
    api.upload_folder(
        folder_path="merged_model",
        repo_id=repo_name,
        repo_type="model",
        token=token
    )
    print("Model pushed successfully!")

def generate_text(input_text):
    global merged_model, tokenizer
    
    if merged_model is None or tokenizer is None:
        return "Model not loaded. Please run the merge process first."
    
    input_ids = tokenizer.encode(input_text, return_tensors="pt")
    
    with torch.no_grad():
        output = merged_model.generate(input_ids, max_length=200, num_return_sequences=1)
    
    generated_text = tokenizer.decode(output[0], skip_special_tokens=True)
    return generated_text

def run_merge_and_push():
    global merged_model, tokenizer
    
    # Merge models
    merged_model, tokenizer = merge_models()
    
    # Generate a unique repository name
    timestamp = int(time.time())
    hub_repo_name = f"NoaiGPT/merged-llama3-8b-instruct-{timestamp}"
    
    # Push to Hugging Face Hub
    push_to_hub(hub_repo_name)
    
    return f"Model merged and pushed to Hugging Face Hub successfully! Repository: {hub_repo_name}"

# Load the model and tokenizer
load_model_and_tokenizer()

# Default input text
default_input_text = """### Human: Humanize the following AI-generated text. Ensure the output maintains the same format, headings, and bullet points as the input:
Monitor the implementation, adjusting the plan as needed.. Furthermore, introducing healthier snacks such as fresh fruits, vegetables, and alternative grains can help counteract the effects of processed and sugary snacks that are popular amongst students. 
3. Introduce nutritious whole foods such as fresh fruits and vegetables, grains, seeds, and nuts, as well as plant-based substitutes to traditional lunch items. 
5. Set aside time for students to participate in activities that promote healthy eating habits. 

To effectively implement healthier food options in school cafeterias, the following steps should be taken: 

1. 
6. 
4. Studies have found that students who eat healthier, balanced meals during the school day perform better in class, have higher cognitive functioning, and feel more energized and ready to learn. Survey the students to learn their preferences for healthy snacks. Establish a partnership with local farmers and food vendors to provide fresh, wholesome food items for the school lunchroom. Conduct an audit of the current food items offered in the school lunchroom. Educate the students on the importance of making healthier food choices. 
7. 
2. 
Research has shown that there are numerous benefits to providing healthier food options in school cafeterias
### Assistant:Its better to take away the opition of getting junk food that the cafeteria puts out for students. Changing our school meun to much healthier choice's is a good idea because its good for the students to eat much healthier food in school. Some students tend to buy food and then throw it out becuase of how the food is made. And staff members that get lunch from the cafeteria could enjoy having a healthier lunch.

in conclusion changing the schools cafeterias meun to much healthier choice would be good for students and staff members so they could stay healthy and focus.. Eating junk food could make you tired and you won't have theenergy to do anything. The school could make a slald bar for those who chose to eat it and they could add there own toppings so they could eat somthing they like instead of throwing it out.

I think having fruits smoothies, organic bake goods, and a salad bar could be very good for every school to have because the school could make a lot of money selling this at school and its healthy for students to eat and enjoy. Students wont be so tempted to eat it, and could chose a better and healthier choice.

Most students want to stay in shape, so they try to eat healthier. most students don't workout so they depend on the food they eat. most students wont have to get uber eats to get food that they like if the school sold smoothies or bake goods. adding more healthier choices in th school caferteria meun could help them stay in shape. Eating more fresh fruits and water can help students focus and not fall asleep in class"""

# Gradio interface
with gr.Blocks() as demo:
    with gr.Tab("Generate Text"):
        iface = gr.Interface(
            fn=generate_text,
            inputs=gr.Textbox(lines=10, label="Input Text", placeholder="Enter text to generate a response..."),
            outputs=gr.Textbox(lines=15, label="Generated Text"),
            title="Merged Llama 3 8B Instruct Model",
            description="Enter text to generate a response from the merged model.",
            examples=[[default_input_text]]
        )
        iface.render()
    
    with gr.Tab("Merge and Push"):
        merge_and_push_button = gr.Button("Merge Models and Push to Hub")
        merge_and_push_output = gr.Textbox(lines=20, label="Merge and Push Output")
        merge_and_push_button.click(run_merge_and_push, outputs=merge_and_push_output)

# Launch the interface
if __name__ == "__main__":
    demo.launch()