Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -6,10 +6,6 @@ import os
|
|
6 |
# Initialize the Hugging Face Inference Client
|
7 |
client = InferenceClient()
|
8 |
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
# Function to generate and format AI response
|
14 |
def generate_response(prompt_template, **kwargs):
|
15 |
# Simulate processing/loading
|
@@ -22,6 +18,7 @@ def generate_response(prompt_template, **kwargs):
|
|
22 |
top_p=0.8
|
23 |
)
|
24 |
response_content = response.choices[0].message["content"]
|
|
|
25 |
return gr.update(value=f"{response_content}")
|
26 |
|
27 |
# Gradio app interface
|
|
|
6 |
# Initialize the Hugging Face Inference Client
|
7 |
client = InferenceClient()
|
8 |
|
|
|
|
|
|
|
|
|
9 |
# Function to generate and format AI response
|
10 |
def generate_response(prompt_template, **kwargs):
|
11 |
# Simulate processing/loading
|
|
|
18 |
top_p=0.8
|
19 |
)
|
20 |
response_content = response.choices[0].message["content"]
|
21 |
+
print(response_content)
|
22 |
return gr.update(value=f"{response_content}")
|
23 |
|
24 |
# Gradio app interface
|