awacke1 commited on
Commit
4ce11e4
·
1 Parent(s): 4451326

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -19
app.py CHANGED
@@ -5,13 +5,12 @@ import requests
5
 
6
  #Streaming endpoint
7
  API_URL = "https://api.openai.com/v1/chat/completions" #os.getenv("API_URL") + "/generate_stream"
8
- # OPENAI_API_KEY = os.getenv("ChatGPT") # Key 03-23
9
-
10
  OPENAI_API_KEY= os.environ["HF_TOKEN"] # Add a token to this space . Then copy it to the repository secret in this spaces settings panel. os.environ reads from there.
11
  # Keys for Open AI ChatGPT API usage are created from here: https://platform.openai.com/account/api-keys
12
 
13
  def predict(inputs, top_p, temperature, chat_counter, chatbot=[], history=[]): #repetition_penalty, top_k
14
 
 
15
  payload = {
16
  "model": "gpt-3.5-turbo",
17
  "messages": [{"role": "user", "content": f"{inputs}"}],
@@ -23,11 +22,13 @@ def predict(inputs, top_p, temperature, chat_counter, chatbot=[], history=[]):
23
  "frequency_penalty":0,
24
  }
25
 
 
26
  headers = {
27
  "Content-Type": "application/json",
28
  "Authorization": f"Bearer {OPENAI_API_KEY}"
29
  }
30
 
 
31
  print(f"chat_counter - {chat_counter}")
32
  if chat_counter != 0 :
33
  messages=[]
@@ -55,9 +56,9 @@ def predict(inputs, top_p, temperature, chat_counter, chatbot=[], history=[]):
55
  "presence_penalty":0,
56
  "frequency_penalty":0,
57
  }
58
-
59
  chat_counter+=1
60
 
 
61
  history.append(inputs)
62
  print(f"payload is - {payload}")
63
  # make a POST request to the API endpoint using the requests.post method, passing in stream=True
@@ -66,6 +67,8 @@ def predict(inputs, top_p, temperature, chat_counter, chatbot=[], history=[]):
66
  token_counter = 0
67
  partial_words = ""
68
 
 
 
69
  counter=0
70
  for chunk in response.iter_lines():
71
  #Skipping first chunk
@@ -93,18 +96,34 @@ def predict(inputs, top_p, temperature, chat_counter, chatbot=[], history=[]):
93
  def reset_textbox():
94
  return gr.update(value='')
95
 
96
- title = """<h1 align="center">🔥ChatGPT API 🚀Streaming🚀</h1>"""
97
- description = """Language models can be conditioned to act like dialogue agents through a conversational prompt that typically takes the form:
98
- ```
99
- User: <utterance>
100
- Assistant: <utterance>
101
- User: <utterance>
102
- Assistant: <utterance>
103
- ...
104
- ```
105
- In this app, you can explore the outputs of a gpt-3.5-turbo LLM.
106
- """
107
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
108
  with gr.Blocks(css = """#col_container {width: 1000px; margin-left: auto; margin-right: auto;}
109
  #chatbot {height: 520px; overflow: auto;}""") as demo:
110
  gr.HTML(title)
@@ -115,12 +134,9 @@ with gr.Blocks(css = """#col_container {width: 1000px; margin-left: auto; margin
115
  state = gr.State([]) #s
116
  b1 = gr.Button()
117
 
118
- #inputs, top_p, temperature, top_k, repetition_penalty
119
  with gr.Accordion("Parameters", open=False):
120
  top_p = gr.Slider( minimum=-0, maximum=1.0, value=1.0, step=0.05, interactive=True, label="Top-p (nucleus sampling)",)
121
  temperature = gr.Slider( minimum=-0, maximum=5.0, value=1.0, step=0.1, interactive=True, label="Temperature",)
122
- #top_k = gr.Slider( minimum=1, maximum=50, value=4, step=1, interactive=True, label="Top-k",)
123
- #repetition_penalty = gr.Slider( minimum=0.1, maximum=3.0, value=1.03, step=0.01, interactive=True, label="Repetition Penalty", )
124
  chat_counter = gr.Number(value=0, visible=False, precision=0)
125
 
126
  inputs.submit( predict, [inputs, top_p, temperature,chat_counter, chatbot, state], [chatbot, state, chat_counter],)
@@ -128,5 +144,5 @@ with gr.Blocks(css = """#col_container {width: 1000px; margin-left: auto; margin
128
  b1.click(reset_textbox, [], [inputs])
129
  inputs.submit(reset_textbox, [], [inputs])
130
 
131
- #gr.Markdown(description)
132
  demo.queue().launch(debug=True)
 
5
 
6
  #Streaming endpoint
7
  API_URL = "https://api.openai.com/v1/chat/completions" #os.getenv("API_URL") + "/generate_stream"
 
 
8
  OPENAI_API_KEY= os.environ["HF_TOKEN"] # Add a token to this space . Then copy it to the repository secret in this spaces settings panel. os.environ reads from there.
9
  # Keys for Open AI ChatGPT API usage are created from here: https://platform.openai.com/account/api-keys
10
 
11
  def predict(inputs, top_p, temperature, chat_counter, chatbot=[], history=[]): #repetition_penalty, top_k
12
 
13
+ # 1. Set up a payload
14
  payload = {
15
  "model": "gpt-3.5-turbo",
16
  "messages": [{"role": "user", "content": f"{inputs}"}],
 
22
  "frequency_penalty":0,
23
  }
24
 
25
+ # 2. Define your headers and add a key from https://platform.openai.com/account/api-keys
26
  headers = {
27
  "Content-Type": "application/json",
28
  "Authorization": f"Bearer {OPENAI_API_KEY}"
29
  }
30
 
31
+ # 3. Create a chat counter loop that feeds [Predict next best anything based on last input and attention with memory defined by introspective attention over time]
32
  print(f"chat_counter - {chat_counter}")
33
  if chat_counter != 0 :
34
  messages=[]
 
56
  "presence_penalty":0,
57
  "frequency_penalty":0,
58
  }
 
59
  chat_counter+=1
60
 
61
+ # 4. POST it to OPENAI API
62
  history.append(inputs)
63
  print(f"payload is - {payload}")
64
  # make a POST request to the API endpoint using the requests.post method, passing in stream=True
 
67
  token_counter = 0
68
  partial_words = ""
69
 
70
+ # 5. Iterate through response lines and structure readable response
71
+ # TODO - make this parse out markdown so we can have similar interface
72
  counter=0
73
  for chunk in response.iter_lines():
74
  #Skipping first chunk
 
96
  def reset_textbox():
97
  return gr.update(value='')
98
 
99
+ title = """<h1 align="center">Memory Chat Story Generator ChatGPT</h1>"""
100
+ description = """
101
+
102
+ ## ChatGPT Datasets 📚
103
+ - WebText
104
+ - Common Crawl
105
+ - BooksCorpus
106
+ - English Wikipedia
107
+ - Toronto Books Corpus
108
+ - OpenWebText
109
+
110
+ ## ChatGPT Datasets - Details 📚
111
+ - **WebText:** A dataset of web pages crawled from domains on the Alexa top 5,000 list. This dataset was used to pretrain GPT-2.
112
+ - [WebText: A Large-Scale Unsupervised Text Corpus by Radford et al.](https://paperswithcode.com/dataset/webtext)
113
+ - **Common Crawl:** A dataset of web pages from a variety of domains, which is updated regularly. This dataset was used to pretrain GPT-3.
114
+ - [Language Models are Few-Shot Learners](https://paperswithcode.com/dataset/common-crawl) by Brown et al.
115
+ - **BooksCorpus:** A dataset of over 11,000 books from a variety of genres.
116
+ - [Scalable Methods for 8 Billion Token Language Modeling](https://paperswithcode.com/dataset/bookcorpus) by Zhu et al.
117
+ - **English Wikipedia:** A dump of the English-language Wikipedia as of 2018, with articles from 2001-2017.
118
+ - [Improving Language Understanding by Generative Pre-Training](https://huggingface.co/spaces/awacke1/WikipediaUltimateAISearch?logs=build) Space for Wikipedia Search
119
+ - **Toronto Books Corpus:** A dataset of over 7,000 books from a variety of genres, collected by the University of Toronto.
120
+ - [Massively Multilingual Sentence Embeddings for Zero-Shot Cross-Lingual Transfer and Beyond](https://paperswithcode.com/dataset/bookcorpus) by Schwenk and Douze.
121
+ - **OpenWebText:** A dataset of web pages that were filtered to remove content that was likely to be low-quality or spammy. This dataset was used to pretrain GPT-3.
122
+ - [Language Models are Few-Shot Learners](https://paperswithcode.com/dataset/openwebtext) by Brown et al.
123
+
124
+ """
125
+
126
+ # 6. Use Gradio to pull it all together
127
  with gr.Blocks(css = """#col_container {width: 1000px; margin-left: auto; margin-right: auto;}
128
  #chatbot {height: 520px; overflow: auto;}""") as demo:
129
  gr.HTML(title)
 
134
  state = gr.State([]) #s
135
  b1 = gr.Button()
136
 
 
137
  with gr.Accordion("Parameters", open=False):
138
  top_p = gr.Slider( minimum=-0, maximum=1.0, value=1.0, step=0.05, interactive=True, label="Top-p (nucleus sampling)",)
139
  temperature = gr.Slider( minimum=-0, maximum=5.0, value=1.0, step=0.1, interactive=True, label="Temperature",)
 
 
140
  chat_counter = gr.Number(value=0, visible=False, precision=0)
141
 
142
  inputs.submit( predict, [inputs, top_p, temperature,chat_counter, chatbot, state], [chatbot, state, chat_counter],)
 
144
  b1.click(reset_textbox, [], [inputs])
145
  inputs.submit(reset_textbox, [], [inputs])
146
 
147
+ gr.Markdown(description)
148
  demo.queue().launch(debug=True)