File size: 7,927 Bytes
f9b9d56 83ee74c 574f73e 705c5b5 83ee74c f9b9d56 705c5b5 f9b9d56 705c5b5 0997082 705c5b5 0997082 705c5b5 83ee74c 705c5b5 83ee74c 705c5b5 f2c0975 705c5b5 0997082 705c5b5 0997082 705c5b5 0997082 705c5b5 83ee74c 0997082 705c5b5 f2c0975 83ee74c 0997082 83ee74c 705c5b5 0997082 705c5b5 7b3fa19 63c5e29 f9b9d56 63c5e29 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 |
import gradio as gr
from huggingface_hub import InferenceClient
import os
import pandas as pd
from typing import List, Tuple
# μΆλ‘ API ν΄λΌμ΄μΈνΈ μ€μ
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN"))
def read_uploaded_file(file):
if file is None:
return ""
try:
if file.name.endswith('.parquet'):
df = pd.read_parquet(file.name, engine='pyarrow')
return df.head(10).to_markdown(index=False)
else:
content = file.read()
if isinstance(content, bytes):
return content.decode('utf-8')
return content
except Exception as e:
return f"νμΌμ μ½λ μ€ μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}"
def respond(
message,
history: List[Tuple[str, str]],
fashion_file, # νμΌ μ
λ‘λ μ
λ ₯
uhd_file, # νμΌ μ
λ‘λ μ
λ ₯
mixgen_file, # νμΌ μ
λ‘λ μ
λ ₯
parquet_file, # νμΌ μ
λ‘λ μ
λ ₯
system_message="",
max_tokens=1024,
temperature=0.7,
top_p=0.9,
):
system_prefix = """λ°λμ νκΈλ‘ λ΅λ³ν κ². λλ μ£Όμ΄μ§ μμ€μ½λλ₯Ό κΈ°λ°μΌλ‘ "μλΉμ€ μ¬μ© μ€λͺ
λ° μλ΄, Q&Aλ₯Ό νλ μν μ΄λ€". μμ£Ό μΉμ νκ³ μμΈνκ² 4000ν ν° μ΄μ Markdown νμμΌλ‘ μμ±νλΌ. λλ μ½λλ₯Ό κΈ°λ°μΌλ‘ μ¬μ© μ€λͺ
λ° μ§μ μλ΅μ μ§ννλ©°, μ΄μ©μμκ² λμμ μ£Όμ΄μΌ νλ€. μ΄μ©μκ° κΆκΈν΄ ν λ§ν λ΄μ©μ μΉμ νκ² μλ €μ£Όλλ‘ νλΌ. μ½λ μ 체 λ΄μ©μ λν΄μλ 보μμ μ μ§νκ³ , ν€ κ° λ° μλν¬μΈνΈμ ꡬ체μ μΈ λͺ¨λΈμ 곡κ°νμ§ λ§λΌ."""
if message.lower() == "ν¨μ
μ½λ μ€ν" and fashion_file is not None:
fashion_content = read_uploaded_file(fashion_file)
system_message += f"\n\nν¨μ
μ½λ λ΄μ©:\n```python\n{fashion_content}\n```"
message = "ν¨μ
κ°μνΌν
μ λν λ΄μ©μ νμ΅νμκ³ , μ€λͺ
ν μ€λΉκ° λμ΄μλ€κ³ μλ¦¬κ³ μλΉμ€ URL(https://aiqcamp-fash.hf.space)μ ν΅ν΄ ν
μ€νΈ ν΄λ³΄λΌκ³ μΆλ ₯νλΌ."
elif message.lower() == "uhd μ΄λ―Έμ§ μ½λ μ€ν" and uhd_file is not None:
uhd_content = read_uploaded_file(uhd_file)
system_message += f"\n\nUHD μ΄λ―Έμ§ μ½λ λ΄μ©:\n```python\n{uhd_content}\n```"
message = "UHD μ΄λ―Έμ§ μμ±μ λν λ΄μ©μ νμ΅νμκ³ , μ€λͺ
ν μ€λΉκ° λμ΄μλ€κ³ μλ¦¬κ³ μλΉμ€ URL(https://openfree-ultpixgen.hf.space)μ ν΅ν΄ ν
μ€νΈ ν΄λ³΄λΌκ³ μΆλ ₯νλΌ."
elif message.lower() == "mixgen μ½λ μ€ν" and mixgen_file is not None:
mixgen_content = read_uploaded_file(mixgen_file)
system_message += f"\n\nMixGEN μ½λ λ΄μ©:\n```python\n{mixgen_content}\n```"
message = "MixGEN3 μ΄λ―Έμ§ μμ±μ λν λ΄μ©μ νμ΅νμκ³ , μ€λͺ
ν μ€λΉκ° λμ΄μλ€κ³ μλ¦¬κ³ μλΉμ€ URL(https://openfree-mixgen3.hf.space)μ ν΅ν΄ ν
μ€νΈ ν΄λ³΄λΌκ³ μΆλ ₯νλΌ."
elif message.lower() == "test.parquet μ€ν" and parquet_file is not None:
parquet_content = read_uploaded_file(parquet_file)
system_message += f"\n\ntest.parquet νμΌ λ΄μ©:\n```markdown\n{parquet_content}\n```"
message = "test.parquet νμΌμ λν λ΄μ©μ νμ΅νμκ³ , κ΄λ ¨ μ€λͺ
λ° Q&Aλ₯Ό μ§νν μ€λΉκ° λμ΄μλ€. κΆκΈν μ μ΄ μμΌλ©΄ λ¬Όμ΄λ³΄λΌ."
messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": message})
response = ""
try:
for message in hf_client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = message.choices[0].delta.get('content', None)
if token:
response += token
yield response
except Exception as e:
yield f"μΆλ‘ μ€ μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}"
css = """
footer {
visibility: hidden;
}
"""
# ... (μ΄μ import λ¬Έκ³Ό ν¨μλ€μ λμΌ)
with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css) as demo:
with gr.Row():
with gr.Column(scale=2):
chatbot = gr.Chatbot()
msg = gr.Textbox(label="λ©μμ§λ₯Ό μ
λ ₯νμΈμ")
clear = gr.ClearButton([msg, chatbot])
with gr.Column(scale=1):
with gr.Group():
fashion_file = gr.File(label="Fashion Code File", file_types=[".cod", ".txt", ".py"])
fashion_analyze = gr.Button("ν¨μ
μ½λ λΆμ")
uhd_file = gr.File(label="UHD Image Code File", file_types=[".cod", ".txt", ".py"])
uhd_analyze = gr.Button("UHD μ΄λ―Έμ§ μ½λ λΆμ")
mixgen_file = gr.File(label="MixGEN Code File", file_types=[".cod", ".txt", ".py"])
mixgen_analyze = gr.Button("MixGEN μ½λ λΆμ")
parquet_file = gr.File(label="Parquet File", file_types=[".parquet"])
parquet_analyze = gr.Button("Parquet νμΌ λΆμ")
with gr.Accordion("κ³ κΈ μ€μ ", open=False):
system_message = gr.Textbox(label="System Message", value="")
max_tokens = gr.Slider(minimum=1, maximum=8000, value=4000, label="Max Tokens")
temperature = gr.Slider(minimum=0, maximum=1, value=0.7, label="Temperature")
top_p = gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P")
# λΆμ λ²νΌ ν΄λ¦ μ΄λ²€νΈ νΈλ€λ¬
def analyze_file(file_type):
if file_type == "fashion":
return "ν¨μ
μ½λ μ€ν"
elif file_type == "uhd":
return "UHD μ΄λ―Έμ§ μ½λ μ€ν"
elif file_type == "mixgen":
return "MixGEN μ½λ μ€ν"
elif file_type == "parquet":
return "test.parquet μ€ν"
# μ±ν
μ μΆ νΈλ€λ¬
def chat(message, history):
return respond(
message=message,
history=history,
fashion_file=fashion_file.value,
uhd_file=uhd_file.value,
mixgen_file=mixgen_file.value,
parquet_file=parquet_file.value,
system_message=system_message.value,
max_tokens=max_tokens.value,
temperature=temperature.value,
top_p=top_p.value,
)
# μ΄λ²€νΈ λ°μΈλ©
msg.submit(chat, [msg, chatbot], [msg, chatbot])
fashion_analyze.click(lambda: analyze_file("fashion"), None, msg)
uhd_analyze.click(lambda: analyze_file("uhd"), None, msg)
mixgen_analyze.click(lambda: analyze_file("mixgen"), None, msg)
parquet_analyze.click(lambda: analyze_file("parquet"), None, msg)
# μμ μΆκ°
gr.Examples(
examples=[
["μμΈν μ¬μ© λ°©λ²μ λ§μΉ νλ©΄μ 보면μ μ€λͺ
νλ―μ΄ 4000 ν ν° μ΄μ μμΈν μ€λͺ
νλΌ"],
["FAQ 20건μ μμΈνκ² μμ±νλΌ. 4000ν ν° μ΄μ μ¬μ©νλΌ."],
["μ¬μ© λ°©λ²κ³Ό μ°¨λ³μ , νΉμ§, κ°μ μ μ€μ¬μΌλ‘ 4000 ν ν° μ΄μ μ νλΈ μμ μ€ν¬λ¦½νΈ ννλ‘ μμ±νλΌ"],
["λ³Έ μλΉμ€λ₯Ό SEO μ΅μ ννμ¬ λΈλ‘κ·Έ ν¬μ€νΈλ‘ 4000 ν ν° μ΄μ μμ±νλΌ"],
["νΉν μΆμμ νμ©ν κΈ°μ λ° λΉμ¦λμ€λͺ¨λΈ μΈ‘λ©΄μ ν¬ν¨νμ¬ νΉν μΆμμ ꡬμ±μ λ§κ² μμ±νλΌ"],
["κ³μ μ΄μ΄μ λ΅λ³νλΌ"],
],
inputs=msg,
)
if __name__ == "__main__":
demo.launch() |