S-Dreamer commited on
Commit
4c513c9
·
verified ·
1 Parent(s): 99f19f2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +94 -22
app.py CHANGED
@@ -1,31 +1,103 @@
1
- # Step 1: Improved layout and clean sidebar
 
 
 
 
 
 
2
  import gradio as gr
3
 
4
  MODEL_ID = "Salesforce/codet5-large"
5
 
6
- with gr.Blocks(fill_height=True, theme=gr.themes.Soft()) as demo:
7
- with gr.Sidebar():
8
- gr.Markdown("## 🤖 Inference Provider")
9
- gr.Markdown(
10
- f"This Space showcases the `{MODEL_ID}` model, served via the Hugging Face Inference API.\n\n"
11
- "🔐 Sign in with your Hugging Face account to use this API."
12
- )
13
- login_button = gr.LoginButton("🔐 Sign in")
14
 
15
- gr.Markdown("---")
16
- gr.Markdown(f"**Model:** `{MODEL_ID}`")
17
- gr.Markdown("[📄 View Model Card](https://huggingface.co/Salesforce/codet5-large)")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
 
19
- gr.Markdown("# 🧠 CodeT5 Inference UI")
20
 
21
- # Placeholder: we'll add inputs, buttons, outputs in next steps
22
- gr.Markdown("_Interface under construction..._")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
 
24
- # Load HF inference API model
25
- gr.load(
26
- f"models/{MODEL_ID}",
27
- accept_token=login_button,
28
- provider="hf-inference"
29
- )
30
 
31
- demo.launch()
 
 
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ Enhanced Gradio UI for the Salesforce/codet5-large model using the Hugging Face Inference API.
5
+ Adheres to best practices, PEP8, flake8, and the Zen of Python.
6
+ """
7
+
8
  import gradio as gr
9
 
10
  MODEL_ID = "Salesforce/codet5-large"
11
 
 
 
 
 
 
 
 
 
12
 
13
+ def prepare_payload(prompt: str, max_tokens: int) -> dict:
14
+ """
15
+ Prepare the payload dictionary for the Hugging Face inference call.
16
+
17
+ Args:
18
+ prompt (str): The input code containing `<extra_id_0>`.
19
+ max_tokens (int): Maximum number of tokens for generation.
20
+
21
+ Returns:
22
+ dict: Payload for the model API call.
23
+ """
24
+ return {"inputs": prompt, "parameters": {"max_length": max_tokens}}
25
+
26
+
27
+ def extract_generated_text(api_response: dict) -> str:
28
+ """
29
+ Extract generated text from the API response.
30
+
31
+ Args:
32
+ api_response (dict): The response dictionary from the model API call.
33
+
34
+ Returns:
35
+ str: The generated text, or string representation of the response.
36
+ """
37
+ return api_response.get("generated_text", str(api_response))
38
 
 
39
 
40
+ def main():
41
+ with gr.Blocks(fill_height=True, theme=gr.themes.Soft()) as demo:
42
+ with gr.Sidebar():
43
+ gr.Markdown("## 🤖 Inference Provider")
44
+ gr.Markdown(
45
+ (
46
+ "This Space showcases the `{}` model, served via the Hugging Face Inference API.\n\n"
47
+ "Sign in with your Hugging Face account to access the model."
48
+ ).format(MODEL_ID)
49
+ )
50
+ login_button = gr.LoginButton("🔐 Sign in")
51
+ gr.Markdown("---")
52
+ gr.Markdown(f"**Model:** `{MODEL_ID}`")
53
+ gr.Markdown("[📄 View Model Card](https://huggingface.co/Salesforce/codet5-large)")
54
+
55
+ gr.Markdown("# 🧠 CodeT5 Inference UI")
56
+ gr.Markdown("Enter your Python code snippet with `<extra_id_0>` as the mask token.")
57
+
58
+ with gr.Row():
59
+ with gr.Column(scale=1):
60
+ code_input = gr.Code(
61
+ label="Input Code",
62
+ language="python",
63
+ value="def greet(user): print(f'hello <extra_id_0>!')",
64
+ lines=10,
65
+ autofocus=True,
66
+ )
67
+ max_tokens = gr.Slider(
68
+ minimum=8, maximum=128, value=32, step=8, label="Max Tokens"
69
+ )
70
+ submit_btn = gr.Button("🚀 Run Inference")
71
+ with gr.Column(scale=1):
72
+ output_text = gr.Textbox(
73
+ label="Inference Output",
74
+ lines=10,
75
+ interactive=False,
76
+ placeholder="Model output will appear here...",
77
+ )
78
+
79
+ # Load the model from Hugging Face Inference API.
80
+ model_iface = gr.load(
81
+ f"models/{MODEL_ID}",
82
+ accept_token=login_button,
83
+ provider="hf-inference",
84
+ )
85
+
86
+ # Chain click events: prepare payload -> API call -> extract output.
87
+ submit_btn.click(
88
+ fn=prepare_payload,
89
+ inputs=[code_input, max_tokens],
90
+ outputs=model_iface,
91
+ api_name="prepare_payload",
92
+ ).then(
93
+ fn=extract_generated_text,
94
+ inputs=model_iface,
95
+ outputs=output_text,
96
+ api_name="extract_output",
97
+ )
98
+
99
+ demo.launch()
100
 
 
 
 
 
 
 
101
 
102
+ if __name__ == "__main__":
103
+ main()