File size: 2,395 Bytes
ccec886
43a5d8c
afc2013
 
99e1d75
c8f7bea
 
1248eba
99e1d75
 
91bbd67
6c00950
41737a5
 
 
59def8e
41737a5
d97926c
41737a5
215d371
be827cd
7b5f63c
 
645bac4
6c00950
215d371
ea0fbdd
43a5d8c
c8f7bea
 
99e1d75
43a5d8c
 
 
 
 
 
91bbd67
ea0fbdd
35bd019
afc2013
243d3e9
 
afc2013
99e1d75
243d3e9
5544b38
243d3e9
35bd019
 
 
 
 
 
 
 
 
c7a54bd
 
c8f7bea
215d371
 
 
 
3110228
47d0994
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
import gradio as gr
from huggingface_hub import list_models


def hello(profile: gr.OAuthProfile | None) -> str:
    # ^ expect a gr.OAuthProfile object as input to get the user's profile
    # if the user is not logged in, profile will be None
    if profile is None:
        return "I don't know you."
    return f"Hello {profile.name}"

def message(message,history,profile: gr.OAuthProfile | None,oauth_token: gr.OAuthToken | None, request: gr.Request):
    print("Request headers dictionary:", request.headers)
    print("IP address:", request.client.host)
    print("Query parameters:", dict(request.query_params))
    print("sign: ",request.query_params.get('__sign'))
    print("username:", request.username)
    print("Session hash:", request.session_hash)
    
    if profile is None:
        raise gr.Error('Click "Sign in with Hugging Face" to continue')
    else:
        print(profile)
        print(f'oauth token: {oauth_token.token}')
        
    return f"hello {profile.name}"

def list_private_models(profile: gr.OAuthProfile | None, oauth_token: gr.OAuthToken | None) -> str:
    # ^ expect a gr.OAuthToken object as input to get the user's token
    # if the user is not logged in, oauth_token will be None
    if oauth_token is None:
        return "Please log in to list private models."
    models = [
        f"{model.id} ({'private' if model.private else 'public'})"
        for model in list_models(author=profile.username, token=oauth_token.token)
    ]
    return "Models:\n\n" + "\n - ".join(models) + "."


with gr.Blocks(fill_height=True) as demo:
    gr.Markdown(
        "# ChatGPT-4o"
        "\n\nThis is GPT-4o, you can use the text and image capabilities now. More capabilities like audio and video will be rolled out iteratively in the future. Stay tuned."
    )
    gr.LoginButton()
    
    gr.ChatInterface(message,multimodal=True)
    
    # style = """
    #     <style>
    #         #chat-interface {
    #             height: 500px; /* 设置所需的高度,这里是 500 像素 */
    #             overflow-y: auto; /* 启用垂直滚动条 */
    #         }
    #     </style>
    # """
    # gr.Markdown(style)

    
    # ^ add a login button to the Space
    # m1 = gr.Markdown()
    # m2 = gr.Markdown()
    # demo.load(hello, inputs=None, outputs=m1)
    # demo.load(list_private_models, inputs=None, outputs=m2)

demo.launch(share=True)