File size: 4,187 Bytes
5ece497 b4400fd 1b95e3f 534c638 11a8509 534c638 b4400fd 1269210 534c638 b4400fd 1269210 534c638 11a8509 534c638 b4400fd 534c638 b4400fd 534c638 b4400fd 11a8509 534c638 b4400fd 534c638 b4400fd 8bbe4be 1269210 534c638 b4400fd 1269210 b4400fd 1269210 534c638 b4400fd 534c638 b4400fd 11a8509 534c638 b4400fd 534c638 b4400fd 534c638 b4400fd 1269210 f3ae789 1269210 534c638 b4400fd 534c638 b4400fd 534c638 b4400fd 534c638 b4400fd 1269210 534c638 b4400fd 534c638 b4400fd 1269210 a5550ee 534c638 1269210 eb04e6a 1269210 a5550ee 534c638 a5550ee 1269210 a5550ee 1269210 a5550ee 534c638 a5550ee 1269210 a5550ee 1269210 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 |
import gradio as gr
from huggingfacehub import InferenceClient, HfApi
import os
import requests
import pandas as pd
import json
# Hugging Face ν ν° νμΈ
hftoken = os.getenv("H")
if not hftoken:
raise ValueError("H νκ²½ λ³μκ° μ€μ λμ§ μμμ΅λλ€.")
# λͺ¨λΈ μ 보 νμΈ
api = HfApi(token=hftoken)
try:
client = InferenceClient("meta-llama/Meta-Llama-3-70B-Instruct", token="H")
except Exception as e:
print(f"rror initializing InferenceClient: {e}")
# λ체 λͺ¨λΈμ μ¬μ©νκ±°λ μ€λ₯ μ²λ¦¬λ₯Ό μννμΈμ.
# μ: client = InferenceClient("gpt2", token=hftoken)
# νμ¬ μ€ν¬λ¦½νΈμ λλ ν 리λ₯Ό κΈ°μ€μΌλ‘ μλ κ²½λ‘ μ€μ
currentdir = os.path.dirname(os.path.abspath(file))
csvpath = os.path.join(currentdir, 'prompts.csv')
datapath = os.path.join(currentdir, 'newdataset.parquet')
# CSV νμΌ λ‘λ
promptsdf = pd.readcsv(csvpath)
datadf = pd.readparquet(datapath)
def getprompt(act):
matchingprompt = promptsdf[promptsdf['act'] == act]['prompt'].values
return matchingprompt[0] if len(matchingprompt) 0 else None
def respond(
message,
history: list[tuple[str, str]],
systemmessage,
maxtokens,
temperature,
topp,
):
# μ¬μ©μ μ
λ ₯μ λ°λ₯Έ ν둬ννΈ μ ν
prompt = getprompt(message)
if prompt:
response = prompt # CSVμμ μ°Ύμ ν둬ννΈλ₯Ό μ§μ λ°ν
else:
systemprefix = """
λΉμ μ μ±λ΄μ
λλ€. λͺ¨λ μ§λ¬Έμ λν΄ μΉμ νκ³ μ νν λ΅λ³μ μ 곡νμΈμ.
μ§λ¬Έμ λν λ΅λ³μ μ°Ύμ μ μλ κ²½μ°, μ μ ν λμμ μ κ³΅ν΄ μ£ΌμΈμ.
"""
fullprompt = f"{systemprefix} {systemmessage}\n\n"
for user, assistant in history:
fullprompt += f"Human: {user}\nAI: {assistant}\n"
fullprompt += f"Human: {message}\nAI:"
APIL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-70B-Instruct"
headers = {"Authorization": f"Bearer {hftoken}"}
def query(payload):
response = requests.post(APIL, headers=headers, json=payload)
return response.text # μμ μλ΅ ν
μ€νΈ λ°ν
try:
payload = {
"inputs": fullprompt,
"parameters": {
"maxnewtokens": maxtokens,
"temperature": temperature,
"topp": topp,
"returnfulltext": False
},
}
rawresponse = query(payload)
print("aw API response:", rawresponse) # λλ²κΉ
μ μν΄ μμ μλ΅ μΆλ ₯
try:
output = json.loads(rawresponse)
if isinstance(output, list) and len(output) 0 and "generatedtext" in output[0]:
response = output[0]["generatedtext"]
else:
response = f"μμμΉ λͺ»ν μλ΅ νμμ
λλ€: {output}"
except json.JSecoderror:
response = f"JS λμ½λ© μ€λ₯. μμ μλ΅: {rawresponse}"
except Exception as e:
print(f"μ€λ₯: μλ΅ μμ± μ€ μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}")
response = f"μ£μ‘ν©λλ€. μλ΅ μμ± μ€ μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}"
demo = gr.ChatInterface(
respond,
title="My Chatbot",
description="This is my chatbot!",
additional_inputs=[
gr.Textbox(value="""
λΉμ μ μ±λ΄μ
λλ€. λͺ¨λ μ§λ¬Έμ λν΄ μΉμ νκ³ μ νν λ΅λ³μ μ 곡νμΈμ.
μ§λ¬Έμ λν λ΅λ³μ μ°Ύμ μ μλ κ²½μ°, μ μ ν λμμ μ κ³΅ν΄ μ£ΌμΈμ.
""", label="μμ€ν
ν둬ννΈ"),
gr.Slider(minimum=1, maximum=4000, value=2000, step=1, label="μ΅λ ν ν° μ"),
gr.Slider(minimum=0.1, maximum=4.0, value=1.0, step=0.1, label="μ¨λ"),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label="top-p (ν΅μ¬ μνλ§)",
),
],
examples=[
["μλ
"],
["κ³μ μ΄μ΄μ μμ±ν΄λ΄"],
],
cache_examples=False,
)
if __name__ == "__main__":
demo.launch() |