ginipick commited on
Commit
90e9b67
โ€ข
1 Parent(s): a268d0e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +44 -26
app.py CHANGED
@@ -1,12 +1,12 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  import os
4
- import requests
5
  from typing import List, Tuple
6
 
7
  # ์ถ”๋ก  API ํด๋ผ์ด์–ธํŠธ ์„ค์ •
8
  hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN"))
9
- #hf_client = InferenceClient("CohereForAI/aya-23-35B", token=os.getenv("HF_TOKEN"))
10
 
11
  def load_code(filename):
12
  try:
@@ -17,21 +17,35 @@ def load_code(filename):
17
  except Exception as e:
18
  return f"ํŒŒ์ผ์„ ์ฝ๋Š” ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}"
19
 
 
 
 
 
 
 
 
 
 
 
 
20
  fashion_code = load_code('fashion.cod')
21
  uhdimage_code = load_code('uhdimage.cod')
22
  MixGEN_code = load_code('mgen.cod')
23
 
 
 
 
24
  def respond(
25
  message,
26
  history: List[Tuple[str, str]],
27
  system_message="", # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
28
- max_tokens=1024, # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
29
- temperature=0.7, # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
30
- top_p=0.9, # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
31
  ):
32
- global fashion_code, uhdimage_code, MixGEN_code
33
  system_message = system_message or ""
34
- system_prefix = """๋ฐ˜๋“œ์‹œ ํ•œ๊ธ€๋กœ ๋‹ต๋ณ€ํ• ๊ฒƒ. ๋„ˆ๋Š” ์ฃผ์–ด์ง„ ์†Œ์Šค์ฝ”๋“œ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ \"์„œ๋น„์Šค ์‚ฌ์šฉ ์„ค๋ช… ๋ฐ ์•ˆ๋‚ด, qna๋ฅผ ํ•˜๋Š” ์—ญํ• ์ด๋‹ค\". ์•„์ฃผ ์นœ์ ˆํ•˜๊ณ  ์ž์„ธํ•˜๊ฒŒ 4000ํ† ํฐ ์ด์ƒ ์ž‘์„ฑํ•˜๋ผ. ๋„ˆ๋Š” ์ฝ”๋“œ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ์‚ฌ์šฉ ์„ค๋ช… ๋ฐ ์งˆ์˜ ์‘๋‹ต์„ ์ง„ํ–‰ํ•˜๋ฉฐ, ์ด์šฉ์ž์—๊ฒŒ ๋„์›€์„ ์ฃผ์–ด์•ผ ํ•œ๋‹ค. ์ด์šฉ์ž๊ฐ€ ๊ถ๊ธˆํ•ด ํ•  ๋งŒ ํ•œ ๋‚ด์šฉ์— ์นœ์ ˆํ•˜๊ฒŒ ์•Œ๋ ค์ฃผ๋„๋ก ํ•˜๋ผ. ์ฝ”๋“œ ์ „์ฒด ๋‚ด์šฉ์— ๋Œ€ํ•ด์„œ๋Š” ๋ณด์•ˆ์„ ์œ ์ง€ํ•˜๊ณ , ํ‚ค ๊ฐ’ ๋ฐ ์—”๋“œํฌ์ธํŠธ์™€ ๊ตฌ์ฒด์ ์ธ ๋ชจ๋ธ์€ ๊ณต๊ฐœํ•˜์ง€ ๋งˆ๋ผ. """
35
 
36
  if message.lower() == "ํŒจ์…˜ ์ฝ”๋“œ ์‹คํ–‰":
37
  system_message += f"\n\nํŒจ์…˜ ์ฝ”๋“œ ๋‚ด์šฉ:\n{fashion_code}"
@@ -42,6 +56,9 @@ def respond(
42
  elif message.lower() == "mixgen ์ฝ”๋“œ ์‹คํ–‰":
43
  system_message += f"\n\nMixGEN ์ฝ”๋“œ ๋‚ด์šฉ:\n{MixGEN_code}"
44
  message = "MixGEN3 ์ด๋ฏธ์ง€ ์ƒ์„ฑ์— ๋Œ€ํ•œ ๋‚ด์šฉ์„ ํ•™์Šตํ•˜์˜€๊ณ , ์„ค๋ช…ํ•  ์ค€๋น„๊ฐ€ ๋˜์–ด์žˆ๋‹ค๊ณ  ์•Œ๋ฆฌ๊ณ  ์„œ๋น„์Šค URL(https://openfree-mixgen3.hf.space)์„ ํ†ตํ•ด ํ…Œ์ŠคํŠธ ํ•ด๋ณด๋ผ๊ณ  ์ถœ๋ ฅํ•˜๋ผ."
 
 
 
45
 
46
  messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
47
  for val in history:
@@ -52,19 +69,20 @@ def respond(
52
  messages.append({"role": "user", "content": message})
53
 
54
  response = ""
55
- for message in hf_client.chat_completion(
56
- messages,
57
- max_tokens=max_tokens,
58
- stream=True,
59
- temperature=temperature,
60
- top_p=top_p,
61
- ):
62
- token = message.choices[0].delta.get('content', None)
63
- if token:
64
- response += token.strip("")
65
- yield response
66
-
67
-
 
68
 
69
  # Gradio ์ธํ„ฐํŽ˜์ด์Šค ์„ค์ • ๋ถ€๋ถ„๋„ ์ˆ˜์ •
70
  demo = gr.ChatInterface(
@@ -78,18 +96,18 @@ demo = gr.ChatInterface(
78
  examples=[
79
  ["ํŒจ์…˜ ์ฝ”๋“œ ์‹คํ–‰"],
80
  ["UHD ์ด๋ฏธ์ง€ ์ฝ”๋“œ ์‹คํ–‰"],
81
- ["MixGEN ์ฝ”๋“œ ์‹คํ–‰"],
 
82
  ["์ƒ์„ธํ•œ ์‚ฌ์šฉ ๋ฐฉ๋ฒ•์„ ๋งˆ์น˜ ํ™”๋ฉด์„ ๋ณด๋ฉด์„œ ์„ค๋ช…ํ•˜๋“ฏ์ด 4000 ํ† ํฐ ์ด์ƒ ์ž์„ธํžˆ ์„ค๋ช…ํ•˜๋ผ"],
83
- ["FAQ 20๊ฑด์„ ์ƒ์„ธํ•˜๊ฒŒ ์ž‘์„ฑํ•˜๋ผ. 4000ํ† ํฐ ์ด์ƒ ์‚ฌ์šฉํ•˜๋ผ."],
84
  ["์‚ฌ์šฉ ๋ฐฉ๋ฒ•๊ณผ ์ฐจ๋ณ„์ , ํŠน์ง•, ๊ฐ•์ ์„ ์ค‘์‹ฌ์œผ๋กœ 4000 ํ† ํฐ ์ด์ƒ ์œ ํŠœ๋ธŒ ์˜์ƒ ์Šคํฌ๋ฆฝํŠธ ํ˜•ํƒœ๋กœ ์ž‘์„ฑํ•˜๋ผ"],
85
  ["๋ณธ ์„œ๋น„์Šค๋ฅผ SEO ์ตœ์ ํ™”ํ•˜์—ฌ ๋ธ”๋กœ๊ทธ ํฌ์ŠคํŠธ(๋ฐฐ๊ฒฝ ๋ฐ ํ•„์š”์„ฑ, ๊ธฐ์กด ์œ ์‚ฌ ์„œ๋น„์Šค์™€ ๋น„๊ตํ•˜์—ฌ ํŠน์žฅ์ , ํ™œ์šฉ์ฒ˜, ๊ฐ€์น˜, ๊ธฐ๋Œ€ํšจ๊ณผ, ๊ฒฐ๋ก ์„ ํฌํ•จ)๋กœ 4000 ํ† ํฐ ์ด์ƒ ์ž‘์„ฑํ•˜๋ผ"],
86
- ["ํŠนํ—ˆ ์ถœ์›์— ํ™œ์šฉํ•  ๊ธฐ์ˆ  ๋ฐ ๋น„์ฆˆ๋‹ˆ์Šค๋ชจ๋ธ ์ธก๋ฉด์„ ํฌํ•จํ•˜์—ฌ ํŠนํ—ˆ ์ถœ์›์„œ ๊ตฌ์„ฑ์— ๋งž๊ฒŒ ํ˜์‹ ์ ์ธ ์ฐฝ์˜ ๋ฐœ๋ช… ๋‚ด์šฉ์„ ์ค‘์‹ฌ์œผ๋กœ 4000ํ† ํฐ ์ด์ƒ ์ž‘์„ฑํ•˜๋ผ."],
87
  ["๊ณ„์† ์ด์–ด์„œ ๋‹ต๋ณ€ํ•˜๋ผ"],
88
  ],
89
-
90
- theme="Nymbo/Nymbo_Theme",
91
  cache_examples=False, # ์บ์‹ฑ ๋น„ํ™œ์„ฑํ™” ์„ค์ •
92
  )
93
 
94
  if __name__ == "__main__":
95
- demo.launch()
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  import os
4
+ import pandas as pd
5
  from typing import List, Tuple
6
 
7
  # ์ถ”๋ก  API ํด๋ผ์ด์–ธํŠธ ์„ค์ •
8
  hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN"))
9
+ # hf_client = InferenceClient("CohereForAI/aya-23-35B", token=os.getenv("HF_TOKEN"))
10
 
11
  def load_code(filename):
12
  try:
 
17
  except Exception as e:
18
  return f"ํŒŒ์ผ์„ ์ฝ๋Š” ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}"
19
 
20
+ def load_parquet(filename):
21
+ try:
22
+ df = pd.read_parquet(filename, engine='pyarrow')
23
+ # ๋ฐ์ดํ„ฐํ”„๋ ˆ์ž„์˜ ์ฒซ ๋ช‡ ํ–‰์„ ๋ฌธ์ž์—ด๋กœ ๋ณ€ํ™˜
24
+ return df.head(10).to_string(index=False)
25
+ except FileNotFoundError:
26
+ return f"{filename} ํŒŒ์ผ์„ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค."
27
+ except Exception as e:
28
+ return f"ํŒŒ์ผ์„ ์ฝ๋Š” ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}"
29
+
30
+ # ์ฝ”๋“œ ํŒŒ์ผ ๋กœ๋“œ
31
  fashion_code = load_code('fashion.cod')
32
  uhdimage_code = load_code('uhdimage.cod')
33
  MixGEN_code = load_code('mgen.cod')
34
 
35
+ # Parquet ํŒŒ์ผ ๋กœ๋“œ
36
+ test_parquet_content = load_parquet('test.parquet')
37
+
38
  def respond(
39
  message,
40
  history: List[Tuple[str, str]],
41
  system_message="", # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
42
+ max_tokens=1024, # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
43
+ temperature=0.7, # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
44
+ top_p=0.9, # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
45
  ):
46
+ global fashion_code, uhdimage_code, MixGEN_code, test_parquet_content
47
  system_message = system_message or ""
48
+ system_prefix = """๋ฐ˜๋“œ์‹œ ํ•œ๊ธ€๋กœ ๋‹ต๋ณ€ํ• ๊ฒƒ. ๋„ˆ๋Š” ์ฃผ์–ด์ง„ ์†Œ์Šค์ฝ”๋“œ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ \"์„œ๋น„์Šค ์‚ฌ์šฉ ์„ค๋ช… ๋ฐ ์•ˆ๋‚ด, Q&A๋ฅผ ํ•˜๋Š” ์—ญํ• ์ด๋‹ค\". ์•„์ฃผ ์นœ์ ˆํ•˜๊ณ  ์ž์„ธํ•˜๊ฒŒ 4000ํ† ํฐ ์ด์ƒ ์ž‘์„ฑํ•˜๋ผ. ๋„ˆ๋Š” ์ฝ”๋“œ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ์‚ฌ์šฉ ์„ค๋ช… ๋ฐ ์งˆ์˜ ์‘๋‹ต์„ ์ง„ํ–‰ํ•˜๋ฉฐ, ์ด์šฉ์ž์—๊ฒŒ ๋„์›€์„ ์ฃผ์–ด์•ผ ํ•œ๋‹ค. ์ด์šฉ์ž๊ฐ€ ๊ถ๊ธˆํ•ด ํ•  ๋งŒ ํ•œ ๋‚ด์šฉ์— ์นœ์ ˆํ•˜๊ฒŒ ์•Œ๋ ค์ฃผ๋„๋ก ํ•˜๋ผ. ์ฝ”๋“œ ์ „์ฒด ๋‚ด์šฉ์— ๋Œ€ํ•ด์„œ๋Š” ๋ณด์•ˆ์„ ์œ ์ง€ํ•˜๊ณ , ํ‚ค ๊ฐ’ ๋ฐ ์—”๋“œํฌ์ธํŠธ์™€ ๊ตฌ์ฒด์ ์ธ ๋ชจ๋ธ์€ ๊ณต๊ฐœํ•˜์ง€ ๋งˆ๋ผ."""
49
 
50
  if message.lower() == "ํŒจ์…˜ ์ฝ”๋“œ ์‹คํ–‰":
51
  system_message += f"\n\nํŒจ์…˜ ์ฝ”๋“œ ๋‚ด์šฉ:\n{fashion_code}"
 
56
  elif message.lower() == "mixgen ์ฝ”๋“œ ์‹คํ–‰":
57
  system_message += f"\n\nMixGEN ์ฝ”๋“œ ๋‚ด์šฉ:\n{MixGEN_code}"
58
  message = "MixGEN3 ์ด๋ฏธ์ง€ ์ƒ์„ฑ์— ๋Œ€ํ•œ ๋‚ด์šฉ์„ ํ•™์Šตํ•˜์˜€๊ณ , ์„ค๋ช…ํ•  ์ค€๋น„๊ฐ€ ๋˜์–ด์žˆ๋‹ค๊ณ  ์•Œ๋ฆฌ๊ณ  ์„œ๋น„์Šค URL(https://openfree-mixgen3.hf.space)์„ ํ†ตํ•ด ํ…Œ์ŠคํŠธ ํ•ด๋ณด๋ผ๊ณ  ์ถœ๋ ฅํ•˜๋ผ."
59
+ elif message.lower() == "test.parquet ์‹คํ–‰":
60
+ system_message += f"\n\ntest.parquet ํŒŒ์ผ ๋‚ด์šฉ:\n{test_parquet_content}"
61
+ message = "test.parquet ํŒŒ์ผ์— ๋Œ€ํ•œ ๋‚ด์šฉ์„ ํ•™์Šตํ•˜์˜€๊ณ , ๊ด€๋ จ ์„ค๋ช… ๋ฐ Q&A๋ฅผ ์ง„ํ–‰ํ•  ์ค€๋น„๊ฐ€ ๋˜์–ด์žˆ๋‹ค. ๊ถ๊ธˆํ•œ ์ ์ด ์žˆ์œผ๋ฉด ๋ฌผ์–ด๋ณด๋ผ."
62
 
63
  messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
64
  for val in history:
 
69
  messages.append({"role": "user", "content": message})
70
 
71
  response = ""
72
+ try:
73
+ for message in hf_client.chat_completion(
74
+ messages,
75
+ max_tokens=max_tokens,
76
+ stream=True,
77
+ temperature=temperature,
78
+ top_p=top_p,
79
+ ):
80
+ token = message.choices[0].delta.get('content', None)
81
+ if token:
82
+ response += token.strip()
83
+ yield response
84
+ except Exception as e:
85
+ yield f"์ถ”๋ก  ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}"
86
 
87
  # Gradio ์ธํ„ฐํŽ˜์ด์Šค ์„ค์ • ๋ถ€๋ถ„๋„ ์ˆ˜์ •
88
  demo = gr.ChatInterface(
 
96
  examples=[
97
  ["ํŒจ์…˜ ์ฝ”๋“œ ์‹คํ–‰"],
98
  ["UHD ์ด๋ฏธ์ง€ ์ฝ”๋“œ ์‹คํ–‰"],
99
+ ["MixGEN ์ฝ”๋“œ ์‹คํ–‰"],
100
+ ["test.parquet ์‹คํ–‰"], # ์ƒˆ๋กœ์šด ์˜ˆ์ œ ์ถ”๊ฐ€
101
  ["์ƒ์„ธํ•œ ์‚ฌ์šฉ ๋ฐฉ๋ฒ•์„ ๋งˆ์น˜ ํ™”๋ฉด์„ ๋ณด๋ฉด์„œ ์„ค๋ช…ํ•˜๋“ฏ์ด 4000 ํ† ํฐ ์ด์ƒ ์ž์„ธํžˆ ์„ค๋ช…ํ•˜๋ผ"],
102
+ ["FAQ 20๊ฑด์„ ์ƒ์„ธํ•˜๊ฒŒ ์ž‘์„ฑํ•˜๋ผ. 4000ํ† ํฐ ์ด์ƒ ์‚ฌ์šฉํ•˜๋ผ."],
103
  ["์‚ฌ์šฉ ๋ฐฉ๋ฒ•๊ณผ ์ฐจ๋ณ„์ , ํŠน์ง•, ๊ฐ•์ ์„ ์ค‘์‹ฌ์œผ๋กœ 4000 ํ† ํฐ ์ด์ƒ ์œ ํŠœ๋ธŒ ์˜์ƒ ์Šคํฌ๋ฆฝํŠธ ํ˜•ํƒœ๋กœ ์ž‘์„ฑํ•˜๋ผ"],
104
  ["๋ณธ ์„œ๋น„์Šค๋ฅผ SEO ์ตœ์ ํ™”ํ•˜์—ฌ ๋ธ”๋กœ๊ทธ ํฌ์ŠคํŠธ(๋ฐฐ๊ฒฝ ๋ฐ ํ•„์š”์„ฑ, ๊ธฐ์กด ์œ ์‚ฌ ์„œ๋น„์Šค์™€ ๋น„๊ตํ•˜์—ฌ ํŠน์žฅ์ , ํ™œ์šฉ์ฒ˜, ๊ฐ€์น˜, ๊ธฐ๋Œ€ํšจ๊ณผ, ๊ฒฐ๋ก ์„ ํฌํ•จ)๋กœ 4000 ํ† ํฐ ์ด์ƒ ์ž‘์„ฑํ•˜๋ผ"],
105
+ ["ํŠนํ—ˆ ์ถœ์›์— ํ™œ์šฉํ•  ๊ธฐ์ˆ  ๋ฐ ๋น„์ฆˆ๋‹ˆ์Šค๋ชจ๋ธ ์ธก๋ฉด์„ ํฌํ•จํ•˜์—ฌ ํŠนํ—ˆ ์ถœ์›์„œ ๊ตฌ์„ฑ์— ๋งž๊ฒŒ ํ˜์‹ ์ ์ธ ์ฐฝ์˜ ๋ฐœ๋ช… ๋‚ด์šฉ์„ ์ค‘์‹ฌ์œผ๋กœ 4000 ํ† ํฐ ์ด์ƒ ์ž‘์„ฑํ•˜๋ผ."],
106
  ["๊ณ„์† ์ด์–ด์„œ ๋‹ต๋ณ€ํ•˜๋ผ"],
107
  ],
108
+ theme="Nymbo/Nymbo_Theme",
 
109
  cache_examples=False, # ์บ์‹ฑ ๋น„ํ™œ์„ฑํ™” ์„ค์ •
110
  )
111
 
112
  if __name__ == "__main__":
113
+ demo.launch()