fantos commited on
Commit
ee30c79
Β·
verified Β·
1 Parent(s): a7c8ed0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -21
app.py CHANGED
@@ -5,6 +5,7 @@ from typing import Iterator
5
  from PIL import Image
6
  import base64
7
  from PyPDF2 import PdfReader
 
8
 
9
  API_KEY = os.getenv("TOGETHER_API_KEY")
10
  if not API_KEY:
@@ -33,9 +34,28 @@ def process_file(file) -> str:
33
  st.error(f"파일 처리 쀑 였λ₯˜ λ°œμƒ: {str(e)}")
34
  return ""
35
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  def generate_response(
37
  message: str,
38
- history: list[dict], # νžˆμŠ€ν† λ¦¬ ν˜•μ‹ λ³€κ²½
39
  system_message: str,
40
  max_tokens: int,
41
  temperature: float,
@@ -50,13 +70,11 @@ def generate_response(
50
 
51
  # μ‹œμŠ€ν…œ λ©”μ‹œμ§€ μΆ”κ°€
52
  if system_message.strip():
53
- messages.append({
54
- "role": "system",
55
- "content": system_message
56
- })
57
 
58
- # λŒ€ν™” νžˆμŠ€ν† λ¦¬ μΆ”κ°€ - ν˜•μ‹ μˆ˜μ •
59
- messages.extend(history)
 
60
 
61
  # ν˜„μž¬ λ©”μ‹œμ§€μ™€ 파일 λ‚΄μš© μ€€λΉ„
62
  current_content = message
@@ -70,10 +88,10 @@ def generate_response(
70
  current_content = current_content + "\n\n" + "\n\n".join(file_contents)
71
 
72
  # ν˜„μž¬ λ©”μ‹œμ§€ μΆ”κ°€
73
- messages.append({
74
- "role": "user",
75
- "content": current_content
76
- })
77
 
78
  # API μš”μ²­
79
  try:
@@ -94,11 +112,11 @@ def generate_response(
94
  if "rate limit" in str(e).lower():
95
  yield "API 호좜 ν•œλ„μ— λ„λ‹¬ν–ˆμŠ΅λ‹ˆλ‹€. μž μ‹œ ν›„ λ‹€μ‹œ μ‹œλ„ν•΄μ£Όμ„Έμš”."
96
  else:
97
- st.error(f"API 였λ₯˜ 상세: {str(e)}") # 디버깅을 μœ„ν•œ 였λ₯˜ 좜λ ₯
98
  yield "μ£„μ†‘ν•©λ‹ˆλ‹€. μž μ‹œ ν›„ λ‹€μ‹œ μ‹œλ„ν•΄μ£Όμ„Έμš”."
99
 
100
  except Exception as e:
101
- st.error(f"전체 였λ₯˜ 상세: {str(e)}") # 디버깅을 μœ„ν•œ 였λ₯˜ 좜λ ₯
102
  yield "였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€. μž μ‹œ ν›„ λ‹€μ‹œ μ‹œλ„ν•΄μ£Όμ„Έμš”."
103
 
104
  def main():
@@ -107,9 +125,6 @@ def main():
107
  # μ„Έμ…˜ μƒνƒœ μ΄ˆκΈ°ν™”
108
  if "messages" not in st.session_state:
109
  st.session_state.messages = []
110
-
111
- if "conversation_history" not in st.session_state: # μƒˆλ‘œμš΄ λŒ€ν™” νžˆμŠ€ν† λ¦¬ μ €μž₯μ†Œ
112
- st.session_state.conversation_history = []
113
 
114
  st.title("DeepSeek μ±„νŒ…")
115
  st.markdown("DeepSeek AI λͺ¨λΈκ³Ό λŒ€ν™”ν•˜μ„Έμš”. ν•„μš”ν•œ 경우 νŒŒμΌμ„ μ—…λ‘œλ“œν•  수 μžˆμŠ΅λ‹ˆλ‹€.")
@@ -138,8 +153,8 @@ def main():
138
  # μ±„νŒ… μž…λ ₯
139
  if prompt := st.chat_input("무엇을 μ•Œκ³  μ‹ΆμœΌμ‹ κ°€μš”?"):
140
  # μ‚¬μš©μž λ©”μ‹œμ§€ μΆ”κ°€
141
- st.session_state.messages.append({"role": "user", "content": prompt})
142
- st.session_state.conversation_history.append({"role": "user", "content": prompt})
143
 
144
  with st.chat_message("user"):
145
  st.markdown(prompt)
@@ -152,7 +167,7 @@ def main():
152
  # generate_response 호좜
153
  for response_chunk in generate_response(
154
  prompt,
155
- st.session_state.conversation_history, # μˆ˜μ •λœ νžˆμŠ€ν† λ¦¬ 전달
156
  system_message,
157
  max_tokens,
158
  temperature,
@@ -165,8 +180,8 @@ def main():
165
  response_placeholder.markdown(full_response)
166
 
167
  # 응닡 μ €μž₯
168
- st.session_state.messages.append({"role": "assistant", "content": full_response})
169
- st.session_state.conversation_history.append({"role": "assistant", "content": full_response})
170
 
171
  if __name__ == "__main__":
172
  main()
 
5
  from PIL import Image
6
  import base64
7
  from PyPDF2 import PdfReader
8
+ import json # λ””λ²„κΉ…μš© μΆ”κ°€
9
 
10
  API_KEY = os.getenv("TOGETHER_API_KEY")
11
  if not API_KEY:
 
34
  st.error(f"파일 처리 쀑 였λ₯˜ λ°œμƒ: {str(e)}")
35
  return ""
36
 
37
+ def format_message(role: str, content: str) -> dict:
38
+ """API λ©”μ‹œμ§€ ν˜•μ‹μ— 맞게 λ©”μ‹œμ§€λ₯Ό ν¬λ§·νŒ…ν•©λ‹ˆλ‹€."""
39
+ return {
40
+ "role": role,
41
+ "content": content
42
+ }
43
+
44
+ def get_formatted_history(messages: list) -> list:
45
+ """λŒ€ν™” νžˆμŠ€ν† λ¦¬λ₯Ό API ν˜•μ‹μ— 맞게 λ³€ν™˜ν•©λ‹ˆλ‹€."""
46
+ formatted_messages = []
47
+ for msg in messages:
48
+ if isinstance(msg, dict) and "role" in msg and "content" in msg:
49
+ # 역할이 μ˜¬λ°”λ₯Έμ§€ ν™•μΈν•˜κ³  μˆ˜μ •
50
+ role = msg["role"]
51
+ if role not in ["system", "user", "assistant"]:
52
+ role = "user" if role == "human" else "assistant"
53
+ formatted_messages.append(format_message(role, msg["content"]))
54
+ return formatted_messages
55
+
56
  def generate_response(
57
  message: str,
58
+ history: list,
59
  system_message: str,
60
  max_tokens: int,
61
  temperature: float,
 
70
 
71
  # μ‹œμŠ€ν…œ λ©”μ‹œμ§€ μΆ”κ°€
72
  if system_message.strip():
73
+ messages.append(format_message("system", system_message))
 
 
 
74
 
75
+ # λŒ€ν™” νžˆμŠ€ν† λ¦¬ μΆ”κ°€
76
+ formatted_history = get_formatted_history(history)
77
+ messages.extend(formatted_history)
78
 
79
  # ν˜„μž¬ λ©”μ‹œμ§€μ™€ 파일 λ‚΄μš© μ€€λΉ„
80
  current_content = message
 
88
  current_content = current_content + "\n\n" + "\n\n".join(file_contents)
89
 
90
  # ν˜„μž¬ λ©”μ‹œμ§€ μΆ”κ°€
91
+ messages.append(format_message("user", current_content))
92
+
93
+ # 디버깅: API μš”μ²­ λ‚΄μš© 좜λ ₯
94
+ st.write("API μš”μ²­ λ©”μ‹œμ§€:", json.dumps(messages, ensure_ascii=False, indent=2))
95
 
96
  # API μš”μ²­
97
  try:
 
112
  if "rate limit" in str(e).lower():
113
  yield "API 호좜 ν•œλ„μ— λ„λ‹¬ν–ˆμŠ΅λ‹ˆλ‹€. μž μ‹œ ν›„ λ‹€μ‹œ μ‹œλ„ν•΄μ£Όμ„Έμš”."
114
  else:
115
+ st.error(f"API 였λ₯˜ 상세: {str(e)}")
116
  yield "μ£„μ†‘ν•©λ‹ˆλ‹€. μž μ‹œ ν›„ λ‹€μ‹œ μ‹œλ„ν•΄μ£Όμ„Έμš”."
117
 
118
  except Exception as e:
119
+ st.error(f"전체 였λ₯˜ 상세: {str(e)}")
120
  yield "였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€. μž μ‹œ ν›„ λ‹€μ‹œ μ‹œλ„ν•΄μ£Όμ„Έμš”."
121
 
122
  def main():
 
125
  # μ„Έμ…˜ μƒνƒœ μ΄ˆκΈ°ν™”
126
  if "messages" not in st.session_state:
127
  st.session_state.messages = []
 
 
 
128
 
129
  st.title("DeepSeek μ±„νŒ…")
130
  st.markdown("DeepSeek AI λͺ¨λΈκ³Ό λŒ€ν™”ν•˜μ„Έμš”. ν•„μš”ν•œ 경우 νŒŒμΌμ„ μ—…λ‘œλ“œν•  수 μžˆμŠ΅λ‹ˆλ‹€.")
 
153
  # μ±„νŒ… μž…λ ₯
154
  if prompt := st.chat_input("무엇을 μ•Œκ³  μ‹ΆμœΌμ‹ κ°€μš”?"):
155
  # μ‚¬μš©μž λ©”μ‹œμ§€ μΆ”κ°€
156
+ user_message = format_message("user", prompt)
157
+ st.session_state.messages.append(user_message)
158
 
159
  with st.chat_message("user"):
160
  st.markdown(prompt)
 
167
  # generate_response 호좜
168
  for response_chunk in generate_response(
169
  prompt,
170
+ st.session_state.messages,
171
  system_message,
172
  max_tokens,
173
  temperature,
 
180
  response_placeholder.markdown(full_response)
181
 
182
  # 응닡 μ €μž₯
183
+ assistant_message = format_message("assistant", full_response)
184
+ st.session_state.messages.append(assistant_message)
185
 
186
  if __name__ == "__main__":
187
  main()