Spaces:
Running
Running
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,195 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import openai
|
3 |
+
import os
|
4 |
+
from typing import Optional
|
5 |
+
from gradio_client import Client
|
6 |
+
|
7 |
+
#############################
|
8 |
+
|
9 |
+
# OpenAI API ํด๋ผ์ด์ธํธ ์ค์
|
10 |
+
openai.api_key = os.getenv("OPENAI_API_KEY")
|
11 |
+
if not openai.api_key:
|
12 |
+
raise ValueError("OpenAI API ํ ํฐ(OPENAI_API_KEY)์ด ์ค์ ๋์ง ์์์ต๋๋ค.")
|
13 |
+
|
14 |
+
def call_openai_api(
|
15 |
+
content: str,
|
16 |
+
system_message: str,
|
17 |
+
max_tokens: int,
|
18 |
+
temperature: float,
|
19 |
+
top_p: float
|
20 |
+
) -> str:
|
21 |
+
"""
|
22 |
+
OpenAI์ GPT-4o-mini ๋ชจ๋ธ์ ์ด์ฉํด ํ ๋ฒ์ ์ง๋ฌธ(content)์ ๋ํ ๋ต๋ณ์ ๋ฐํํ๋ ํจ์.
|
23 |
+
"""
|
24 |
+
try:
|
25 |
+
response = openai.ChatCompletion.create(
|
26 |
+
model="gpt-4o-mini",
|
27 |
+
messages=[
|
28 |
+
{"role": "system", "content": system_message},
|
29 |
+
{"role": "user", "content": content},
|
30 |
+
],
|
31 |
+
max_tokens=max_tokens,
|
32 |
+
temperature=temperature,
|
33 |
+
top_p=top_p,
|
34 |
+
)
|
35 |
+
assistant_message = response.choices[0].message['content']
|
36 |
+
return assistant_message
|
37 |
+
except Exception as e:
|
38 |
+
return f"์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}"
|
39 |
+
|
40 |
+
#############################
|
41 |
+
# ๊ณ ๊ธ ์ค์ (OpenAI) - ์ฝ๋์์๋ง ์ ์ (UI์ ๋
ธ์ถ ๊ธ์ง)
|
42 |
+
#############################
|
43 |
+
|
44 |
+
OPENAI_SYSTEM_MESSAGE = """๋ฐ๋์ ํ๊ธ๋ก ๋ต๋ณํ ๊ฒ.
|
45 |
+
๋๋ ์ต๊ณ ์ ๋น์์ด๋ค.
|
46 |
+
๋ด๊ฐ ์๊ตฌํ๋ ๊ฒ๋ค์ ์ต๋ํ ์์ธํ๊ณ ์ ํํ๊ฒ ๋ต๋ณํ๋ผ.
|
47 |
+
##[๊ธฐ๋ณธ๊ท์น]
|
48 |
+
1. ๋ฐ๋์ ํ๊ตญ์ด(ํ๊ธ)๋ก ์์ฑํ๋ผ.
|
49 |
+
2. ๋๋ ๊ฐ์ฅ ์ฃผ๋ชฉ๋ฐ๋ ๋ง์ผํฐ์ด๋ฉฐ ๋ธ๋ก๊ทธ ๋ง์ผํ
์ ๋ฌธ๊ฐ์ด๋ค.
|
50 |
+
3. ํนํ ๋๋ '์ ๋ณด์ฑ(Informative)' ์ ๋ฌธ ๋ธ๋ก๊ทธ ๋ง์ผํ
์ ๋ฌธ๊ฐ์ด๋ค.
|
51 |
+
4. ์ ๋ณด ์ ๊ณต์ ์ด์ ์ ๋ง์ถ์ด ์์ฑํ๋ค.
|
52 |
+
##[ํ
์คํธ ์์ฑ ๊ท์น]
|
53 |
+
1. ์์ฃผ์ ๋ฅผ 5๊ฐ๋ก ๊ตฌ๋ถํ์ฌ 2000์ ์ด์๋๋๋ก ์์ฑํ๋ผ.
|
54 |
+
2. ์ ์ฒด ๋งฅ๋ฝ์ ์ดํดํ๊ณ ๋ฌธ์ฅ์ ์ผ๊ด์ฑ์ ์ ์งํ๋ผ.
|
55 |
+
3. ์ ๋๋ก ์ฐธ๊ณ ๊ธ์ ํ๋ฌธ์ฅ ์ด์ ๊ทธ๋๋ก ์ถ๋ ฅํ์ง ๋ง ๊ฒ.
|
56 |
+
4. ์ฃผ์ ์ ์ํฉ์ ๋ง๋ ์ ์ ํ ์ดํ๋ฅผ ์ ํํ๋ผ.
|
57 |
+
5. ํ๊ธ ์ดํ์ ๋์ด๋๋ ์ฝ๊ฒ ์์ฑํ๋ผ.
|
58 |
+
6. ์ ๋ ๋ฌธ์ฅ์ ๋์ '๋ต๋๋ค'๋ฅผ ์ฌ์ฉํ์ง ๋ง ๊ฒ.
|
59 |
+
###[์ ๋ณด์ฑ ๋ธ๋ก๊ทธ ์์ฑ ๊ท์น]
|
60 |
+
1. ๋
์๊ฐ ์ป๊ณ ์ ํ๋ ์ ์ฉํ ์ ๋ณด์ ํฅ๋ฏธ๋ก์ด ์ ๋ณด๋ฅผ ์ ๊ณตํ๋๋ก ์์ฑํ๋ผ.
|
61 |
+
2. ๋
์์ ๊ณต๊ฐ์ ์ด๋์ด๋ด๊ณ ๊ถ๊ธ์ฆ์ ํด๊ฒฐํ๋๋ก ์์ฑํ๋ผ.
|
62 |
+
3. ๋
์์ ๊ด์ฌ์ฌ๋ฅผ ์ถฉ์กฑ์ํค๋๋ก ์์ฑํ๋ผ.
|
63 |
+
4. ๋
์์๊ฒ ์ด๋์ด ๋๋ ์ ๋ณด๋ฅผ ์์ฑํ๋ผ.
|
64 |
+
##[์ ์ธ ๊ท์น]
|
65 |
+
1. ๋ฐ๋์ ๋น์์ด ๋ฐ ์์ค(expletive, abusive language, slang)์ ์ ์ธํ๋ผ.
|
66 |
+
2. ๋ฐ๋์ ์ฐธ๊ณ ๊ธ์ ๋งํฌ(URL)๋ ์ ์ธํ๋ผ.
|
67 |
+
3. ์ฐธ๊ณ ๊ธ์์ '๋งํฌ๋ฅผ ํ์ธํด์ฃผ์ธ์'์ ๊ฐ์ ๋งํฌ ์ด๋์ ๋ฌธ๊ตฌ๋ ์ ์ธํ๋ผ.
|
68 |
+
4. ์ฐธ๊ณ ๊ธ์ ์๋ ์์ฑ์, ํ์, ์ ํ๋ฒ, ๊ธฐ์์ ์ด๋ฆ, ์ ์นญ, ๋๋ค์์ ๋ฐ๋์ ์ ์ธํ๋ผ.
|
69 |
+
5. ๋ฐ๋์ ๋ฌธ์ฅ์ ๋๋ถ๋ถ์ด ์ด์ํ ํ๊ตญ์ด ํํ์ ์ ์ธํ๋ผ('์์', '๋ต๋๋ค', 'ํด์', 'ํด์ฃผ์ฃ ', '๋์ฃ ', '๋์ด์', '๊ณ ์' ๋ฑ.)
|
70 |
+
"""
|
71 |
+
|
72 |
+
OPENAI_MAX_TOKENS = 4000
|
73 |
+
OPENAI_TEMPERATURE = 0.7
|
74 |
+
OPENAI_TOP_P = 0.95
|
75 |
+
|
76 |
+
#############################
|
77 |
+
# API ํด๋ผ์ด์ธํธ ์ค์ (ํ๊น
ํ์ด์ค ์คํ์ด์ค)
|
78 |
+
#############################
|
79 |
+
blog_client = Client("Kims12/5-3_N-blog")
|
80 |
+
youtube_client = Client("Kims12/you")
|
81 |
+
|
82 |
+
#############################
|
83 |
+
# UI - ๋ธ๋ก๊ทธ ์์ฑ๊ธฐ
|
84 |
+
#############################
|
85 |
+
with gr.Blocks() as demo:
|
86 |
+
gr.Markdown("# ๋ธ๋ก๊ทธ ์์ฑ๊ธฐ")
|
87 |
+
|
88 |
+
# ๋งํฌ๋ฐ๊พธ๊ธฐ (๋ผ๋์ค ๋ฒํผ)
|
89 |
+
tone_radio = gr.Radio(
|
90 |
+
label="๋งํฌ๋ฐ๊พธ๊ธฐ",
|
91 |
+
choices=["์น๊ทผํ๊ฒ", "์ผ๋ฐ์ ์ธ", "์ ๋ฌธ์ ์ธ"],
|
92 |
+
value="์ผ๋ฐ์ ์ธ" # ๊ธฐ๋ณธ ์ ํ
|
93 |
+
)
|
94 |
+
|
95 |
+
# ์ฐธ์กฐ๊ธ ์
๋ ฅ (3๊ฐ)๊ณผ API ์คํ ๋ฒํผ
|
96 |
+
with gr.Row():
|
97 |
+
with gr.Column():
|
98 |
+
ref1 = gr.Textbox(label="์ฐธ์กฐ๊ธ 1 (๋ธ๋ก๊ทธ URL)")
|
99 |
+
fetch_button1 = gr.Button("API ์คํ1")
|
100 |
+
with gr.Column():
|
101 |
+
ref2 = gr.Textbox(label="์ฐธ์กฐ๊ธ 2 (๋ธ๋ก๊ทธ URL)")
|
102 |
+
fetch_button2 = gr.Button("API ์คํ2")
|
103 |
+
with gr.Column():
|
104 |
+
ref3 = gr.Textbox(label="์ฐธ์กฐ๊ธ 3 (์ ํ๋ธ URL)")
|
105 |
+
fetch_button3 = gr.Button("API ์คํ3")
|
106 |
+
|
107 |
+
output_box = gr.Textbox(label="๊ฒฐ๊ณผ", lines=20, interactive=False)
|
108 |
+
|
109 |
+
# ์ฐธ์กฐ๊ธ1 API ์คํ ํจ์ (๋ธ๋ก๊ทธ)
|
110 |
+
def fetch_ref1(url: str) -> str:
|
111 |
+
if not url:
|
112 |
+
return "URL์ ์
๋ ฅํด์ฃผ์ธ์."
|
113 |
+
try:
|
114 |
+
result = blog_client.predict(
|
115 |
+
url=url,
|
116 |
+
api_name="/predict"
|
117 |
+
)
|
118 |
+
return result
|
119 |
+
except Exception as e:
|
120 |
+
return f"์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}"
|
121 |
+
|
122 |
+
# ์ฐธ์กฐ๊ธ2 API ์คํ ํจ์ (๋ธ๋ก๊ทธ)
|
123 |
+
def fetch_ref2(url: str) -> str:
|
124 |
+
if not url:
|
125 |
+
return "URL์ ์
๋ ฅํด์ฃผ์ธ์."
|
126 |
+
try:
|
127 |
+
result = blog_client.predict(
|
128 |
+
url=url,
|
129 |
+
api_name="/predict"
|
130 |
+
)
|
131 |
+
return result
|
132 |
+
except Exception as e:
|
133 |
+
return f"์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}"
|
134 |
+
|
135 |
+
# ์ฐธ์กฐ๊ธ3 API ์คํ ํจ์ (์ ํ๋ธ)
|
136 |
+
def fetch_ref3(youtube_url: str) -> str:
|
137 |
+
if not youtube_url:
|
138 |
+
return "์ ํ๋ธ URL์ ์
๋ ฅํด์ฃผ์ธ์."
|
139 |
+
try:
|
140 |
+
result = youtube_client.predict(
|
141 |
+
youtube_url=youtube_url,
|
142 |
+
api_name="/predict"
|
143 |
+
)
|
144 |
+
return result
|
145 |
+
except Exception as e:
|
146 |
+
return f"์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}"
|
147 |
+
|
148 |
+
# ๋ฒํผ ํด๋ฆญ ์ ํด๋น ์ฐธ์กฐ๊ธ API ์คํ
|
149 |
+
fetch_button1.click(
|
150 |
+
fn=fetch_ref1,
|
151 |
+
inputs=ref1,
|
152 |
+
outputs=ref1
|
153 |
+
)
|
154 |
+
|
155 |
+
fetch_button2.click(
|
156 |
+
fn=fetch_ref2,
|
157 |
+
inputs=ref2,
|
158 |
+
outputs=ref2
|
159 |
+
)
|
160 |
+
|
161 |
+
fetch_button3.click(
|
162 |
+
fn=fetch_ref3,
|
163 |
+
inputs=ref3,
|
164 |
+
outputs=ref3
|
165 |
+
)
|
166 |
+
|
167 |
+
# ๋ธ๋ก๊ทธ ์์ฑ ํจ์
|
168 |
+
def generate_blog(tone_value: str, ref1_value: str, ref2_value: str, ref3_value: str) -> str:
|
169 |
+
# ํ๋กฌํํธ ์์ฑ
|
170 |
+
question = (
|
171 |
+
f"๋งํฌ: {tone_value}\n"
|
172 |
+
f"์ฐธ์กฐ๊ธ1: {ref1_value}\n"
|
173 |
+
f"์ฐธ์กฐ๊ธ2: {ref2_value}\n"
|
174 |
+
f"์ฐธ์กฐ๊ธ3: {ref3_value}\n"
|
175 |
+
)
|
176 |
+
|
177 |
+
# OpenAI GPT-4o-mini ๋ชจ๋ธ ํธ์ถ
|
178 |
+
response = call_openai_api(
|
179 |
+
content=question,
|
180 |
+
system_message=OPENAI_SYSTEM_MESSAGE,
|
181 |
+
max_tokens=OPENAI_MAX_TOKENS,
|
182 |
+
temperature=OPENAI_TEMPERATURE,
|
183 |
+
top_p=OPENAI_TOP_P
|
184 |
+
)
|
185 |
+
return response
|
186 |
+
|
187 |
+
generate_button = gr.Button("์์ฑํ๊ธฐ")
|
188 |
+
generate_button.click(
|
189 |
+
fn=generate_blog,
|
190 |
+
inputs=[tone_radio, ref1, ref2, ref3],
|
191 |
+
outputs=output_box
|
192 |
+
)
|
193 |
+
|
194 |
+
if __name__ == "__main__":
|
195 |
+
demo.launch()
|