Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,6 +3,7 @@ import os
|
|
3 |
import io
|
4 |
import PIL
|
5 |
from freeGPT import Client
|
|
|
6 |
|
7 |
hf_token = os.environ.get("API_TOKEN")
|
8 |
API_URL = "https://api-inference.huggingface.co/models/Lykon/dreamshaper-xl-v2-turbo"
|
@@ -55,16 +56,22 @@ if search_btn:
|
|
55 |
|
56 |
with texts:
|
57 |
st.write("We found these results on your query: ")
|
|
|
58 |
st.header(result1.split()[0], divider='rainbow')
|
59 |
st.text(result1)
|
|
|
60 |
st.header(result2.split()[0], divider='rainbow')
|
61 |
st.text(result2)
|
|
|
62 |
st.header(result3.split()[0], divider='rainbow')
|
63 |
st.text(result3)
|
|
|
64 |
st.header(result4.split()[0], divider='rainbow')
|
65 |
st.text(result4)
|
|
|
66 |
st.header(result5.split()[0], divider='rainbow')
|
67 |
st.text(result5)
|
|
|
68 |
st.header(result6.split()[0], divider='rainbow')
|
69 |
st.text(result6)
|
70 |
st.caption("That's the end!")
|
|
|
3 |
import io
|
4 |
import PIL
|
5 |
from freeGPT import Client
|
6 |
+
import requests
|
7 |
|
8 |
hf_token = os.environ.get("API_TOKEN")
|
9 |
API_URL = "https://api-inference.huggingface.co/models/Lykon/dreamshaper-xl-v2-turbo"
|
|
|
56 |
|
57 |
with texts:
|
58 |
st.write("We found these results on your query: ")
|
59 |
+
st.caption("gptedia.com")
|
60 |
st.header(result1.split()[0], divider='rainbow')
|
61 |
st.text(result1)
|
62 |
+
st.caption("reppit.com")
|
63 |
st.header(result2.split()[0], divider='rainbow')
|
64 |
st.text(result2)
|
65 |
+
st.caption("llama-answers.net")
|
66 |
st.header(result3.split()[0], divider='rainbow')
|
67 |
st.text(result3)
|
68 |
+
st.caption("reply-answers.com")
|
69 |
st.header(result4.split()[0], divider='rainbow')
|
70 |
st.text(result4)
|
71 |
+
st.caption("grow.org")
|
72 |
st.header(result5.split()[0], divider='rainbow')
|
73 |
st.text(result5)
|
74 |
+
st.caption("pedropedia.com")
|
75 |
st.header(result6.split()[0], divider='rainbow')
|
76 |
st.text(result6)
|
77 |
st.caption("That's the end!")
|