File size: 3,160 Bytes
db1f459
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
064404a
db1f459
 
 
 
 
 
 
 
 
 
 
 
 
 
064404a
 
 
db1f459
 
 
 
 
064404a
 
db1f459
 
 
 
 
 
064404a
2e98fa0
db1f459
 
 
 
 
 
 
 
 
 
 
 
 
 
 
064404a
db1f459
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
import gradio as gr
import requests
import json

def fetch_manifest(model_id, tag_name):
    try:
        model_id = model_id.replace("hf.co", "").replace("://", "").replace("/", " ").strip().replace(" ", "/")

        # Fetch manifest
        manifest_url = f"https://huggingface.co/v2/{model_id}/manifests/{tag_name}"
        manifest_response = requests.get(manifest_url)
        manifest_response.raise_for_status()
        manifest_data = manifest_response.json()
        
        # Initialize output
        output = f"Manifest for {model_id}:{tag_name}\n"
        output += json.dumps(manifest_data, indent=2) + "\n"
        
        # Find template and params layers
        system_layer = next((layer for layer in manifest_data["layers"] 
                             if layer["mediaType"] == "application/vnd.ollama.image.system"), None)
        template_layer = next((layer for layer in manifest_data["layers"] 
                             if layer["mediaType"] == "application/vnd.ollama.image.template"), None)
        params_layer = next((layer for layer in manifest_data["layers"] 
                           if layer["mediaType"] == "application/vnd.ollama.image.params"), None)

        # Fetch and display system if found
        if system_layer:
            system_url = f"https://huggingface.co/v2/{model_id}/blobs/{system_layer['digest']}"
            system_response = requests.get(system_url)
            system_response.raise_for_status()
            output += "\n\n======================\n\nSystem message:\n"
            output += system_response.text + "\n"

        # Fetch and display template if found
        if template_layer:
            template_url = f"https://huggingface.co/v2/{model_id}/blobs/{template_layer['digest']}"
            template_response = requests.get(template_url)
            template_response.raise_for_status()
            output += "\n\n======================\n\nTemplate:\n"
            output += template_response.text + "\n"
        
        # Fetch and display params if found
        if params_layer:
            params_url = f"https://huggingface.co/v2/{model_id}/blobs/{params_layer['digest']}"
            params_response = requests.get(params_url)
            params_response.raise_for_status()
            output += "\n\n======================\n\nParameters:\n"
            output += json.dumps(params_response.json(), indent=2) + "\n"
            
        return output
        
    except requests.exceptions.RequestException as e:
        raise gr.Error(f"Error occurred: {str(e)}")
    except Exception as e:
        raise gr.Error(f"Unexpected error: {str(e)}")

# Create Gradio interface
iface = gr.Interface(
    fn=fetch_manifest,
    inputs=[
        gr.Textbox(label="Model ID (e.g., bartowski/Meta-Llama-3.1-8B-Instruct-GGUF)", placeholder="Enter model ID"),
        gr.Textbox("latest", label="Tag Name (e.g., latest)", placeholder="Enter tag name")
    ],
    outputs=gr.Code(label="Results", lines=40),
    title="Hugging Face Model Manifest Viewer",
    description="Enter a Hugging Face model ID and tag name to view its manifest, template, and parameters.",
)

iface.launch()