Spaces:
Sleeping
Sleeping
File size: 1,604 Bytes
ccec886 43a5d8c afc2013 99e1d75 c8f7bea 1248eba 99e1d75 91bbd67 215d371 ea0fbdd 43a5d8c c8f7bea 99e1d75 43a5d8c 91bbd67 ea0fbdd 91bbd67 afc2013 243d3e9 afc2013 99e1d75 243d3e9 c8f7bea 215d371 3110228 e464d27 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 |
import gradio as gr
from huggingface_hub import list_models
def hello(profile: gr.OAuthProfile | None) -> str:
# ^ expect a gr.OAuthProfile object as input to get the user's profile
# if the user is not logged in, profile will be None
if profile is None:
return "I don't know you."
return f"Hello {profile.name}"
def message(message,history,profile: gr.OAuthProfile | None):
if profile is None:
raise gr.Error("Please login to...")
return
return f"hello {profile.name}"
def list_private_models(profile: gr.OAuthProfile | None, oauth_token: gr.OAuthToken | None) -> str:
# ^ expect a gr.OAuthToken object as input to get the user's token
# if the user is not logged in, oauth_token will be None
if oauth_token is None:
return "Please log in to list private models."
models = [
f"{model.id} ({'private' if model.private else 'public'})"
for model in list_models(author=profile.username, token=oauth_token.token)
]
return "Models:\n\n" + "\n - ".join(models) + "."
with gr.Blocks() as demo:
gr.Markdown(
"# ChatGPT-4o"
"\n\nThis is GPT-4o, you can use the text and image capabilities now. More capabilities like audio and video will be rolled out iteratively in the future. Stay tuned."
)
gr.LoginButton()
gr.ChatInterface(message,multimodal=True)
# ^ add a login button to the Space
# m1 = gr.Markdown()
# m2 = gr.Markdown()
# demo.load(hello, inputs=None, outputs=m1)
# demo.load(list_private_models, inputs=None, outputs=m2)
demo.launch()
|