File size: 5,972 Bytes
747ccea fe67895 90e9b67 da419ba f779047 54a4802 e74c3bc 90e9b67 0e5afe0 c36dc6b 6da265e c36dc6b 6da265e c36dc6b 6da265e 90e9b67 c36dc6b de57dc5 6da265e 90e9b67 747ccea da419ba ff1c374 90e9b67 747ccea 90e9b67 da419ba 90e9b67 91c1d45 6da265e 54e6271 c36dc6b 54e6271 da419ba eeb3f3c c3d05ea 90e9b67 c36dc6b da419ba 747ccea 91c1d45 90e9b67 8321675 ff1c374 d8ce384 91c1d45 ff1c374 fe59084 ff1c374 3176ef0 4a3346c c36dc6b 90e9b67 095cabc 90e9b67 095cabc 90e9b67 6da265e 3176ef0 90e9b67 91c1d45 747ccea 90e9b67 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 |
import gradio as gr
from huggingface_hub import InferenceClient
import os
import pandas as pd
from typing import List, Tuple
# μΆλ‘ API ν΄λΌμ΄μΈνΈ μ€μ
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN"))
# hf_client = InferenceClient("CohereForAI/aya-23-35B", token=os.getenv("HF_TOKEN"))
def load_code(filename):
try:
with open(filename, 'r', encoding='utf-8') as file:
return file.read()
except FileNotFoundError:
return f"{filename} νμΌμ μ°Ύμ μ μμ΅λλ€."
except Exception as e:
return f"νμΌμ μ½λ μ€ μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}"
def load_parquet(filename):
try:
df = pd.read_parquet(filename, engine='pyarrow')
# λ°μ΄ν°νλ μμ 첫 λͺ νμ λ¬Έμμ΄λ‘ λ³ν
return df.head(10).to_string(index=False)
except FileNotFoundError:
return f"{filename} νμΌμ μ°Ύμ μ μμ΅λλ€."
except Exception as e:
return f"νμΌμ μ½λ μ€ μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}"
# μ½λ νμΌ λ‘λ
fashion_code = load_code('fashion.cod')
uhdimage_code = load_code('uhdimage.cod')
MixGEN_code = load_code('mgen.cod')
# Parquet νμΌ λ‘λ
test_parquet_content = load_parquet('test.parquet')
def respond(
message,
history: List[Tuple[str, str]],
system_message="", # κΈ°λ³Έκ° μΆκ°
max_tokens=1024, # κΈ°λ³Έκ° μΆκ°
temperature=0.7, # κΈ°λ³Έκ° μΆκ°
top_p=0.9, # κΈ°λ³Έκ° μΆκ°
):
global fashion_code, uhdimage_code, MixGEN_code, test_parquet_content
system_message = system_message or ""
system_prefix = """λ°λμ νκΈλ‘ λ΅λ³ν κ². λλ μ£Όμ΄μ§ μμ€μ½λλ₯Ό κΈ°λ°μΌλ‘ \"μλΉμ€ μ¬μ© μ€λͺ
λ° μλ΄, Q&Aλ₯Ό νλ μν μ΄λ€\". μμ£Ό μΉμ νκ³ μμΈνκ² 4000ν ν° μ΄μ μμ±νλΌ. λλ μ½λλ₯Ό κΈ°λ°μΌλ‘ μ¬μ© μ€λͺ
λ° μ§μ μλ΅μ μ§ννλ©°, μ΄μ©μμκ² λμμ μ£Όμ΄μΌ νλ€. μ΄μ©μκ° κΆκΈν΄ ν λ§ ν λ΄μ©μ μΉμ νκ² μλ €μ£Όλλ‘ νλΌ. μ½λ μ 체 λ΄μ©μ λν΄μλ 보μμ μ μ§νκ³ , ν€ κ° λ° μλν¬μΈνΈμ ꡬ체μ μΈ λͺ¨λΈμ 곡κ°νμ§ λ§λΌ."""
if message.lower() == "ν¨μ
μ½λ μ€ν":
system_message += f"\n\nν¨μ
μ½λ λ΄μ©:\n{fashion_code}"
message = "ν¨μ
κ°μνΌν
μ λν λ΄μ©μ νμ΅νμκ³ , μ€λͺ
ν μ€λΉκ° λμ΄μλ€κ³ μλ¦¬κ³ μλΉμ€ URL(https://aiqcamp-fash.hf.space)μ ν΅ν΄ ν
μ€νΈ ν΄λ³΄λΌκ³ μΆλ ₯νλΌ."
elif message.lower() == "uhd μ΄λ―Έμ§ μ½λ μ€ν":
system_message += f"\n\nUHD μ΄λ―Έμ§ μ½λ λ΄μ©:\n{uhdimage_code}"
message = "UHD μ΄λ―Έμ§ μμ±μ λν λ΄μ©μ νμ΅νμκ³ , μ€λͺ
ν μ€λΉκ° λμ΄μλ€κ³ μλ¦¬κ³ μλΉμ€ URL(https://openfree-ultpixgen.hf.space)μ ν΅ν΄ ν
μ€νΈ ν΄λ³΄λΌκ³ μΆλ ₯νλΌ."
elif message.lower() == "mixgen μ½λ μ€ν":
system_message += f"\n\nMixGEN μ½λ λ΄μ©:\n{MixGEN_code}"
message = "MixGEN3 μ΄λ―Έμ§ μμ±μ λν λ΄μ©μ νμ΅νμκ³ , μ€λͺ
ν μ€λΉκ° λμ΄μλ€κ³ μλ¦¬κ³ μλΉμ€ URL(https://openfree-mixgen3.hf.space)μ ν΅ν΄ ν
μ€νΈ ν΄λ³΄λΌκ³ μΆλ ₯νλΌ."
elif message.lower() == "test.parquet μ€ν":
system_message += f"\n\ntest.parquet νμΌ λ΄μ©:\n{test_parquet_content}"
message = "test.parquet νμΌμ λν λ΄μ©μ νμ΅νμκ³ , κ΄λ ¨ μ€λͺ
λ° Q&Aλ₯Ό μ§νν μ€λΉκ° λμ΄μλ€. κΆκΈν μ μ΄ μμΌλ©΄ λ¬Όμ΄λ³΄λΌ."
messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": message})
response = ""
try:
for message in hf_client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = message.choices[0].delta.get('content', None)
if token:
response += token.strip()
yield response
except Exception as e:
yield f"μΆλ‘ μ€ μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}"
# Gradio μΈν°νμ΄μ€ μ€μ λΆλΆλ μμ
demo = gr.ChatInterface(
respond,
additional_inputs=[
gr.Textbox(label="System Message", value=""),
gr.Slider(minimum=1, maximum=8000, value=4000, label="Max Tokens"),
gr.Slider(minimum=0, maximum=1, value=0.7, label="Temperature"),
gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P"),
],
examples=[
["ν¨μ
μ½λ μ€ν"],
["UHD μ΄λ―Έμ§ μ½λ μ€ν"],
["MixGEN μ½λ μ€ν"],
["test.parquet μ€ν"], # μλ‘μ΄ μμ μΆκ°
["μμΈν μ¬μ© λ°©λ²μ λ§μΉ νλ©΄μ 보면μ μ€λͺ
νλ―μ΄ 4000 ν ν° μ΄μ μμΈν μ€λͺ
νλΌ"],
["FAQ 20건μ μμΈνκ² μμ±νλΌ. 4000ν ν° μ΄μ μ¬μ©νλΌ."],
["μ¬μ© λ°©λ²κ³Ό μ°¨λ³μ , νΉμ§, κ°μ μ μ€μ¬μΌλ‘ 4000 ν ν° μ΄μ μ νλΈ μμ μ€ν¬λ¦½νΈ ννλ‘ μμ±νλΌ"],
["λ³Έ μλΉμ€λ₯Ό SEO μ΅μ ννμ¬ λΈλ‘κ·Έ ν¬μ€νΈ(λ°°κ²½ λ° νμμ±, κΈ°μ‘΄ μ μ¬ μλΉμ€μ λΉκ΅νμ¬ νΉμ₯μ , νμ©μ², κ°μΉ, κΈ°λν¨κ³Ό, κ²°λ‘ μ ν¬ν¨)λ‘ 4000 ν ν° μ΄μ μμ±νλΌ"],
["νΉν μΆμμ νμ©ν κΈ°μ λ° λΉμ¦λμ€λͺ¨λΈ μΈ‘λ©΄μ ν¬ν¨νμ¬ νΉν μΆμμ ꡬμ±μ λ§κ² νμ μ μΈ μ°½μ λ°λͺ
λ΄μ©μ μ€μ¬μΌλ‘ 4000 ν ν° μ΄μ μμ±νλΌ."],
["κ³μ μ΄μ΄μ λ΅λ³νλΌ"],
],
theme="Nymbo/Nymbo_Theme",
cache_examples=False, # μΊμ± λΉνμ±ν μ€μ
)
if __name__ == "__main__":
demo.launch()
|