Spaces:
Sleeping
Sleeping
Leetmonkey In Action. Darn LeetMonkey these days
Browse files
app.py
CHANGED
@@ -6,6 +6,7 @@ from datasets import load_dataset
|
|
6 |
import random
|
7 |
import autopep8
|
8 |
import textwrap
|
|
|
9 |
|
10 |
# Define the model options
|
11 |
gguf_models = {
|
@@ -15,11 +16,23 @@ gguf_models = {
|
|
15 |
"Super Block Q6": "leetmonkey_peft_super_block_q6.gguf"
|
16 |
}
|
17 |
|
|
|
|
|
|
|
18 |
# Function to download and load the model
|
19 |
def load_model(model_name):
|
20 |
model_path = hf_hub_download(repo_id="sugiv/leetmonkey-peft-gguf", filename=model_name)
|
21 |
return Llama(model_path=model_path, n_ctx=2048, n_threads=4, n_gpu_layers=0, verbose=False)
|
22 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
# Generation parameters
|
24 |
generation_kwargs = {
|
25 |
"max_tokens": 2048,
|
@@ -33,8 +46,7 @@ generation_kwargs = {
|
|
33 |
|
34 |
def generate_solution(instruction, model):
|
35 |
system_prompt = "You are a Python coding assistant specialized in solving LeetCode problems. Provide only the complete implementation of the given function. Ensure proper indentation and formatting. Do not include any explanations or multiple solutions."
|
36 |
-
full_prompt = f"""
|
37 |
-
### Instruction:
|
38 |
{system_prompt}
|
39 |
|
40 |
Implement the following function for the LeetCode problem:
|
@@ -89,15 +101,16 @@ def extract_and_format_code(text):
|
|
89 |
dataset = load_dataset("sugiv/leetmonkey_python_dataset")
|
90 |
val_dataset = dataset["train"].train_test_split(test_size=0.1)["test"]
|
91 |
|
92 |
-
def
|
93 |
-
model = load_model(gguf_models[model_name])
|
94 |
sample = random.choice(val_dataset)
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
|
|
|
|
101 |
|
102 |
with gr.Blocks() as demo:
|
103 |
gr.Markdown("# LeetCode Problem Solver")
|
@@ -109,18 +122,9 @@ with gr.Blocks() as demo:
|
|
109 |
|
110 |
with gr.Column():
|
111 |
model_dropdown = gr.Dropdown(choices=list(gguf_models.keys()), label="Select GGUF Model", value="Exact Copy")
|
112 |
-
solution_display = gr.Code(label="Generated Solution", language="python")
|
113 |
generate_btn = gr.Button("Generate Solution")
|
114 |
|
115 |
-
def update_problem():
|
116 |
-
sample = random.choice(val_dataset)
|
117 |
-
return sample['instruction']
|
118 |
-
|
119 |
-
def update_solution(problem, model_name):
|
120 |
-
model = load_model(gguf_models[model_name])
|
121 |
-
generated_output = generate_solution(problem, model)
|
122 |
-
return extract_and_format_code(generated_output)
|
123 |
-
|
124 |
select_problem_btn.click(update_problem, outputs=problem_display)
|
125 |
generate_btn.click(update_solution, inputs=[problem_display, model_dropdown], outputs=solution_display)
|
126 |
|
|
|
6 |
import random
|
7 |
import autopep8
|
8 |
import textwrap
|
9 |
+
import threading
|
10 |
|
11 |
# Define the model options
|
12 |
gguf_models = {
|
|
|
16 |
"Super Block Q6": "leetmonkey_peft_super_block_q6.gguf"
|
17 |
}
|
18 |
|
19 |
+
# Global dictionary to store loaded models
|
20 |
+
loaded_models = {}
|
21 |
+
|
22 |
# Function to download and load the model
|
23 |
def load_model(model_name):
|
24 |
model_path = hf_hub_download(repo_id="sugiv/leetmonkey-peft-gguf", filename=model_name)
|
25 |
return Llama(model_path=model_path, n_ctx=2048, n_threads=4, n_gpu_layers=0, verbose=False)
|
26 |
|
27 |
+
# Function to preload all models
|
28 |
+
def preload_models():
|
29 |
+
for name, file in gguf_models.items():
|
30 |
+
loaded_models[name] = load_model(file)
|
31 |
+
print("All models loaded successfully!")
|
32 |
+
|
33 |
+
# Start preloading models in a separate thread
|
34 |
+
threading.Thread(target=preload_models, daemon=True).start()
|
35 |
+
|
36 |
# Generation parameters
|
37 |
generation_kwargs = {
|
38 |
"max_tokens": 2048,
|
|
|
46 |
|
47 |
def generate_solution(instruction, model):
|
48 |
system_prompt = "You are a Python coding assistant specialized in solving LeetCode problems. Provide only the complete implementation of the given function. Ensure proper indentation and formatting. Do not include any explanations or multiple solutions."
|
49 |
+
full_prompt = f"""### Instruction:
|
|
|
50 |
{system_prompt}
|
51 |
|
52 |
Implement the following function for the LeetCode problem:
|
|
|
101 |
dataset = load_dataset("sugiv/leetmonkey_python_dataset")
|
102 |
val_dataset = dataset["train"].train_test_split(test_size=0.1)["test"]
|
103 |
|
104 |
+
def update_problem():
|
|
|
105 |
sample = random.choice(val_dataset)
|
106 |
+
return sample['instruction']
|
107 |
+
|
108 |
+
def update_solution(problem, model_name):
|
109 |
+
model = loaded_models.get(model_name)
|
110 |
+
if model is None:
|
111 |
+
return "Model is still loading. Please wait and try again."
|
112 |
+
generated_output = generate_solution(problem, model)
|
113 |
+
return extract_and_format_code(generated_output)
|
114 |
|
115 |
with gr.Blocks() as demo:
|
116 |
gr.Markdown("# LeetCode Problem Solver")
|
|
|
122 |
|
123 |
with gr.Column():
|
124 |
model_dropdown = gr.Dropdown(choices=list(gguf_models.keys()), label="Select GGUF Model", value="Exact Copy")
|
125 |
+
solution_display = gr.Code(label="Generated Solution", language="python", lines=25)
|
126 |
generate_btn = gr.Button("Generate Solution")
|
127 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
128 |
select_problem_btn.click(update_problem, outputs=problem_display)
|
129 |
generate_btn.click(update_solution, inputs=[problem_display, model_dropdown], outputs=solution_display)
|
130 |
|