XThomasBU commited on
Commit
a15b13b
·
verified ·
1 Parent(s): 62ab1f0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +123 -70
app.py CHANGED
@@ -1,83 +1,136 @@
1
- from flask import Flask, redirect, request, session, url_for
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  import os
3
- from authlib.integrations.flask_client import OAuth
4
- from langchain.llms.huggingface_hub import HuggingFaceHub
5
- from langchain.prompts import ChatPromptTemplate
6
- from langchain.schema import StrOutputParser
7
- from langchain.schema.runnable import Runnable
8
- from langchain.schema.runnable.config import RunnableConfig
9
- import chainlit as cl
10
 
11
  app = Flask(__name__)
12
  app.secret_key = 'YourSecretKey' # Change this to a real secret key for production
13
 
14
- # OAuth setup with Authlib
15
- oauth = OAuth(app)
16
- oauth.register(
17
- name='oauth_provider',
18
- client_id=os.getenv("OAUTH_CLIENT_ID"),
19
- client_secret=os.getenv("OAUTH_CLIENT_SECRET"),
20
- authorize_url=os.getenv("OPENID_PROVIDER_URL") + '/authorize',
21
- access_token_url=os.getenv("OPENID_PROVIDER_URL") + '/token',
22
- client_kwargs={'scope': os.getenv("OAUTH_SCOPES").split(',')},
23
- redirect_uri=f"https://{os.getenv('SPACE_HOST')}/login/callback"
24
- )
25
-
26
- print(f"REDIRECT URI: https://{os.getenv('SPACE_HOST')}/login/callback")
27
-
28
- # Instantiate the LLM
29
- llm = HuggingFaceHub(
30
- model_kwargs={"max_length": 500},
31
- repo_id="google/flan-t5-xxl",
32
- huggingfacehub_api_token=os.getenv("HUGGINGFACE_API_TOKEN"),
33
- )
34
-
35
- # Initialize ChainLit with LLM
36
- def initialize_chainlit():
37
- add_llm_provider(
38
- LangchainGenericProvider(
39
- id=llm._llm_type,
40
- name="HuggingFaceHub",
41
- llm=llm,
42
- is_chat=False,
43
- )
44
- )
45
-
46
- # Setup chainlit callbacks
47
- @cl.on_chat_start
48
- async def on_chat_start():
49
- prompt = ChatPromptTemplate.from_messages([("human", "{question}")])
50
- runnable = prompt | llm | StrOutputParser()
51
- cl.user_session.set("runnable", runnable)
52
-
53
- @cl.on_message
54
- async def on_message(message: cl.Message):
55
- runnable = cl.user_session.get("runnable") # type: Runnable
56
- msg = cl.Message(content="")
57
- async for chunk in runnable.astream(
58
- {"question": message.content},
59
- config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]),
60
- ):
61
- await msg.stream_token(chunk)
62
- await msg.send()
63
 
64
  @app.route('/')
65
  def home():
66
- return 'Home - <a href="/login">Login with OAuth Provider</a>'
67
-
68
- @app.route('/login')
69
- def login():
70
- redirect_uri = url_for('authorize', _external=True)
71
- return oauth.oauth_provider.authorize_redirect(redirect_uri)
72
 
73
  @app.route('/login/callback')
74
- def authorize():
75
- print('Logged in and language model initialized. Proceed with operations.')
76
- token = oauth.oauth_provider.authorize_access_token()
77
- # Initialize ChainLit or perform actions based on the authenticated user
78
- initialize_chainlit()
79
- return 'Logged in and language model initialized. Proceed with operations.'
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80
 
81
  if __name__ == "__main__":
82
  app.run(debug=True)
83
-
 
1
+ # from flask import Flask, redirect, request, session, url_for
2
+ # import os
3
+ # from authlib.integrations.flask_client import OAuth
4
+ # from langchain.llms.huggingface_hub import HuggingFaceHub
5
+ # from langchain.prompts import ChatPromptTemplate
6
+ # from langchain.schema import StrOutputParser
7
+ # from langchain.schema.runnable import Runnable
8
+ # from langchain.schema.runnable.config import RunnableConfig
9
+ # import chainlit as cl
10
+
11
+ # app = Flask(__name__)
12
+ # app.secret_key = 'YourSecretKey' # Change this to a real secret key for production
13
+
14
+ # # OAuth setup with Authlib
15
+ # oauth = OAuth(app)
16
+ # oauth.register(
17
+ # name='oauth_provider',
18
+ # client_id=os.getenv("OAUTH_CLIENT_ID"),
19
+ # client_secret=os.getenv("OAUTH_CLIENT_SECRET"),
20
+ # authorize_url=os.getenv("OPENID_PROVIDER_URL") + '/authorize',
21
+ # access_token_url=os.getenv("OPENID_PROVIDER_URL") + '/token',
22
+ # client_kwargs={'scope': os.getenv("OAUTH_SCOPES").split(',')},
23
+ # redirect_uri=f"https://{os.getenv('SPACE_HOST')}/login/callback"
24
+ # )
25
+
26
+ # print(f"REDIRECT URI: https://{os.getenv('SPACE_HOST')}/login/callback")
27
+
28
+ # # Instantiate the LLM
29
+ # llm = HuggingFaceHub(
30
+ # model_kwargs={"max_length": 500},
31
+ # repo_id="google/flan-t5-xxl",
32
+ # huggingfacehub_api_token=os.getenv("HUGGINGFACE_API_TOKEN"),
33
+ # )
34
+
35
+ # # Initialize ChainLit with LLM
36
+ # def initialize_chainlit():
37
+ # add_llm_provider(
38
+ # LangchainGenericProvider(
39
+ # id=llm._llm_type,
40
+ # name="HuggingFaceHub",
41
+ # llm=llm,
42
+ # is_chat=False,
43
+ # )
44
+ # )
45
+
46
+ # # Setup chainlit callbacks
47
+ # @cl.on_chat_start
48
+ # async def on_chat_start():
49
+ # prompt = ChatPromptTemplate.from_messages([("human", "{question}")])
50
+ # runnable = prompt | llm | StrOutputParser()
51
+ # cl.user_session.set("runnable", runnable)
52
+
53
+ # @cl.on_message
54
+ # async def on_message(message: cl.Message):
55
+ # runnable = cl.user_session.get("runnable") # type: Runnable
56
+ # msg = cl.Message(content="")
57
+ # async for chunk in runnable.astream(
58
+ # {"question": message.content},
59
+ # config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]),
60
+ # ):
61
+ # await msg.stream_token(chunk)
62
+ # await msg.send()
63
+
64
+ # @app.route('/')
65
+ # def home():
66
+ # return 'Home - <a href="/login">Login with OAuth Provider</a>'
67
+
68
+ # @app.route('/login')
69
+ # def login():
70
+ # redirect_uri = url_for('authorize', _external=True)
71
+ # return oauth.oauth_provider.authorize_redirect(redirect_uri)
72
+
73
+ # @app.route('/login/callback')
74
+ # def authorize():
75
+ # print('Logged in and language model initialized. Proceed with operations.')
76
+ # token = oauth.oauth_provider.authorize_access_token()
77
+ # # Initialize ChainLit or perform actions based on the authenticated user
78
+ # initialize_chainlit()
79
+ # return 'Logged in and language model initialized. Proceed with operations.'
80
+
81
+ # if __name__ == "__main__":
82
+ # app.run(debug=True)
83
+
84
+
85
+ from flask import Flask, redirect, request, session, url_for, jsonify
86
  import os
87
+ import requests
88
+ from base64 import b64encode
 
 
 
 
 
89
 
90
  app = Flask(__name__)
91
  app.secret_key = 'YourSecretKey' # Change this to a real secret key for production
92
 
93
+ # OAuth Configuration
94
+ CLIENT_ID = os.getenv("OAUTH_CLIENT_ID")
95
+ CLIENT_SECRET = os.getenv("OAUTH_CLIENT_SECRET")
96
+ REDIRECT_URI = f"https://{os.getenv('SPACE_HOST')}/login/callback"
97
+ AUTHORIZE_URL = "https://huggingface.co/oauth/authorize"
98
+ TOKEN_URL = "https://huggingface.co/oauth/token"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
99
 
100
  @app.route('/')
101
  def home():
102
+ # Generate a random state for CSRF protection
103
+ state = os.urandom(16).hex()
104
+ session['state'] = state
105
+ # Redirect URL for "Sign-in with HF"
106
+ return redirect(f"{AUTHORIZE_URL}?redirect_uri={REDIRECT_URI}&scope=openid%20profile&client_id={CLIENT_ID}&state={state}")
 
107
 
108
  @app.route('/login/callback')
109
+ def login_callback():
110
+ # Verify state matches
111
+ state = request.args.get('state')
112
+ if state != session.pop('state', None):
113
+ return 'State mismatch', 400
114
+
115
+ # Exchange code for token
116
+ code = request.args.get('code')
117
+ headers = {'Authorization': 'Basic ' + b64encode(f"{CLIENT_ID}:{CLIENT_SECRET}".encode()).decode()}
118
+ data = {
119
+ 'grant_type': 'authorization_code',
120
+ 'code': code,
121
+ 'redirect_uri': REDIRECT_URI,
122
+ 'client_id': CLIENT_ID
123
+ }
124
+ response = requests.post(TOKEN_URL, headers=headers, data=data)
125
+ response_data = response.json()
126
+
127
+ if response.status_code != 200:
128
+ return jsonify(response_data), response.status_code
129
+
130
+ # At this point, you have access_token and id_token in response_data
131
+ # You can use these tokens to authenticate against the Hugging Face API or your application's backend
132
+
133
+ return 'Logged in successfully.', 200
134
 
135
  if __name__ == "__main__":
136
  app.run(debug=True)