File size: 2,722 Bytes
c08abba
 
 
b4c29e6
 
af0f401
 
c08abba
e1517ae
c08abba
 
 
 
 
 
 
 
 
 
af0f401
c08abba
af0f401
e1517ae
f65533b
9788361
 
c08abba
b4c29e6
 
 
c08abba
 
e1517ae
 
c08abba
 
 
 
b4c29e6
c08abba
 
af0f401
c08abba
 
 
 
b4c29e6
c08abba
 
 
 
 
 
 
 
 
 
 
 
af0f401
c08abba
e4efe74
 
 
 
9788361
 
e4efe74
 
 
 
 
b4c29e6
e4efe74
 
 
 
 
 
 
 
 
 
 
 
 
 
9788361
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
import json
import os
from datetime import datetime, timezone
import gradio as gr

from src.display.formatting import styled_error, styled_message
from src.envs import API, EVAL_REQUESTS_PATH, QUEUE_REPO

def add_new_eval(model: str, weight_type: str, gguf_filename=None):
    user_name = ""
    model_path = model
    if "/" in model:
        user_name = model.split("/")[0]
        model_path = model.split("/")[1]

    current_time = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")

    # Is the model info correctly filled?
    try:
        model_info = API.model_info(repo_id=model, revision='main')
    except Exception:
        return styled_error("Could not get your model information.")
    
    if weight_type == "safetensors":
        if len(gguf_filename) != 0:
            return styled_error("GGUF filename should be empty when using safetensors.")

    # Seems good, creating the eval
    print("Adding new eval")

    eval_entry = {
        "model": model,
        "weight_type": weight_type,
        "gguf_filename": gguf_filename,
        "status": "PENDING",
        "submitted_time": current_time,
    }

    print("Creating eval file")
    OUT_DIR = f"{EVAL_REQUESTS_PATH}/{user_name}"
    os.makedirs(OUT_DIR, exist_ok=True)
    out_path = f"{OUT_DIR}/{model_path}_eval_request_{current_time}.json"

    with open(out_path, "w") as f:
        f.write(json.dumps(eval_entry))

    print("Uploading eval file")
    API.upload_file(
        path_or_fileobj=out_path,
        path_in_repo=out_path.split("eval-queue/")[1],
        repo_id=QUEUE_REPO,
        repo_type="dataset",
        commit_message=f"Add {model} to eval queue",
    )

    # Remove the local file
    os.remove(out_path)

    return styled_message(
        "Your request has been submitted to the evaluation queue!\nPlease wait for up to five minutes for the model to show in the PENDING list."
    )

def update_gguf_input(weight_type):
    return gr.update(interactive=weight_type != "safetensors")


with gr.Blocks() as app:
    model_input = gr.Textbox(label="Model")
    weight_type_input = gr.Dropdown(
        label="Weight Type",
        choices=["default", "safetensors", "other"],
        value="default",
        interactive=True
    )
    gguf_filename_input = gr.Textbox(label="GGUF Filename", interactive=True)
    submit_btn = gr.Button("Submit")
    output = gr.Markdown()

    # Update gguf_filename input based on weight_type selection
    weight_type_input.change(fn=update_gguf_input, inputs=weight_type_input, outputs=gguf_filename_input)

    submit_btn.click(
        fn=add_new_eval,
        inputs=[model_input, weight_type_input, gguf_filename_input],
        outputs=output
    )

# app.launch()