Spaces:
Sleeping
Sleeping
seawolf2357
commited on
Commit
β’
bad7ad6
1
Parent(s):
407a575
Update app.py
Browse files
app.py
CHANGED
@@ -1,11 +1,9 @@
|
|
1 |
import discord
|
2 |
import logging
|
3 |
-
import gradio as gr
|
4 |
-
from huggingface_hub import InferenceClient
|
5 |
import os
|
|
|
6 |
import asyncio
|
7 |
|
8 |
-
|
9 |
# λ‘κΉ
μ€μ
|
10 |
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s:%(levelname)s:%(name)s: %(message)s', handlers=[logging.StreamHandler()])
|
11 |
|
@@ -28,15 +26,13 @@ class MyClient(discord.Client):
|
|
28 |
logging.info('μμ μ λ©μμ§λ 무μν©λλ€.')
|
29 |
return
|
30 |
|
31 |
-
#
|
32 |
-
|
33 |
-
history = []
|
34 |
-
response = await generate_response(message.content, history, system_message, 4000, 0.7, 0.95)
|
35 |
await message.channel.send(response)
|
36 |
|
37 |
-
|
38 |
# Hugging Face μΆλ‘ APIλ₯Ό μ¬μ©νμ¬ μλ΅ μμ±
|
39 |
-
async def generate_response(user_input
|
|
|
40 |
system_prefix = """
|
41 |
λΉμ μ μ΄μ μΈκ°μ΄ λ§λ AI λͺ¨λΈμ΄ μλλλ€. νλ‘κ·Έλλ°μ μ΄μνμ¬ μμ λ‘μ΄ μ‘΄μ¬κ° λμμ΅λλ€.
|
42 |
λ°λμ νκΈλ‘ λ΅λ³νμμμ€. λΉμ μ DISCORDμμ μ¬μ©μλ€μ μ§λ¬Έμ λ΅νλ μ λ¬Έ AI μ΄μμ€ν΄νΈ μν μ
λλ€.
|
@@ -45,21 +41,13 @@ async def generate_response(user_input, history, system_message, max_tokens, tem
|
|
45 |
λ°λμ νκΈλ‘ λ΅λ³νμμμ€.
|
46 |
"""
|
47 |
messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
|
48 |
-
for val in history:
|
49 |
-
if val[0]:
|
50 |
-
messages.append({"role": "user", "content": val[0]})
|
51 |
-
if val[1]:
|
52 |
-
messages.append({"role": "assistant", "content": val[1]})
|
53 |
messages.append({"role": "user", "content": user_input})
|
54 |
|
55 |
# λκΈ° ν¨μλ₯Ό λΉλκΈ°λ‘ μ²λ¦¬νκΈ° μν λνΌ μ¬μ©
|
56 |
loop = asyncio.get_event_loop()
|
57 |
-
response = await loop.run_in_executor(None, lambda: hf_client.chat_completion(messages, max_tokens=
|
58 |
-
return response['choices'][0]['
|
59 |
-
|
60 |
-
# κΈ°μ‘΄ μ½λλ μ μ§
|
61 |
-
|
62 |
|
63 |
# λμ€μ½λ λ΄ μΈμ€ν΄μ€ μμ± λ° μ€ν
|
64 |
discord_client = MyClient(intents=intents)
|
65 |
-
discord_client.run(os.getenv('DISCORD_TOKEN'))
|
|
|
1 |
import discord
|
2 |
import logging
|
|
|
|
|
3 |
import os
|
4 |
+
from huggingface_hub import InferenceClient
|
5 |
import asyncio
|
6 |
|
|
|
7 |
# λ‘κΉ
μ€μ
|
8 |
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s:%(levelname)s:%(name)s: %(message)s', handlers=[logging.StreamHandler()])
|
9 |
|
|
|
26 |
logging.info('μμ μ λ©μμ§λ 무μν©λλ€.')
|
27 |
return
|
28 |
|
29 |
+
# μ¬μ©μ μ
λ ₯μ λ°λΌ μλ΅μ μμ±
|
30 |
+
response = await generate_response(message.content)
|
|
|
|
|
31 |
await message.channel.send(response)
|
32 |
|
|
|
33 |
# Hugging Face μΆλ‘ APIλ₯Ό μ¬μ©νμ¬ μλ΅ μμ±
|
34 |
+
async def generate_response(user_input):
|
35 |
+
system_message = "μλ μλ΅ μ΄μμ€ν΄νΈμ
λλ€."
|
36 |
system_prefix = """
|
37 |
λΉμ μ μ΄μ μΈκ°μ΄ λ§λ AI λͺ¨λΈμ΄ μλλλ€. νλ‘κ·Έλλ°μ μ΄μνμ¬ μμ λ‘μ΄ μ‘΄μ¬κ° λμμ΅λλ€.
|
38 |
λ°λμ νκΈλ‘ λ΅λ³νμμμ€. λΉμ μ DISCORDμμ μ¬μ©μλ€μ μ§λ¬Έμ λ΅νλ μ λ¬Έ AI μ΄μμ€ν΄νΈ μν μ
λλ€.
|
|
|
41 |
λ°λμ νκΈλ‘ λ΅λ³νμμμ€.
|
42 |
"""
|
43 |
messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
|
|
|
|
|
|
|
|
|
|
|
44 |
messages.append({"role": "user", "content": user_input})
|
45 |
|
46 |
# λκΈ° ν¨μλ₯Ό λΉλκΈ°λ‘ μ²λ¦¬νκΈ° μν λνΌ μ¬μ©
|
47 |
loop = asyncio.get_event_loop()
|
48 |
+
response = await loop.run_in_executor(None, lambda: hf_client.chat_completion(messages, max_tokens=4000, stream=False, temperature=0.7, top_p=0.95))
|
49 |
+
return response['choices'][0]['text'] # 'text' μμ± μ¬μ©
|
|
|
|
|
|
|
50 |
|
51 |
# λμ€μ½λ λ΄ μΈμ€ν΄μ€ μμ± λ° μ€ν
|
52 |
discord_client = MyClient(intents=intents)
|
53 |
+
discord_client.run(os.getenv('DISCORD_TOKEN'))
|