prithivMLmods commited on
Commit
b0f6b9b
β€’
1 Parent(s): ad0387f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +40 -91
app.py CHANGED
@@ -1,21 +1,17 @@
1
  import gradio as gr
2
  from openai import OpenAI
3
  import os
4
- from io import BytesIO
5
- from reportlab.lib.pagesizes import letter
6
- from reportlab.pdfgen import canvas
7
  from docx import Document
8
 
9
- # Custom CSS
10
  css = '''
11
- .gradio-container{max-width: 1500px !important}
12
  h1{text-align:center}
13
  footer {
14
  visibility: hidden
15
  }
16
  '''
17
 
18
- # Set up OpenAI client
19
  ACCESS_TOKEN = os.getenv("HF_TOKEN")
20
 
21
  client = OpenAI(
@@ -23,7 +19,6 @@ client = OpenAI(
23
  api_key=ACCESS_TOKEN,
24
  )
25
 
26
- # Function to handle chat responses
27
  def respond(
28
  message,
29
  history: list[tuple[str, str]],
@@ -44,7 +39,7 @@ def respond(
44
 
45
  response = ""
46
 
47
- for message in client.chat.completions.create(
48
  model="meta-llama/Meta-Llama-3.1-8B-Instruct",
49
  max_tokens=max_tokens,
50
  stream=True,
@@ -53,99 +48,53 @@ def respond(
53
  messages=messages,
54
  ):
55
  token = message.choices[0].delta.content
 
56
  response += token
57
  yield response
58
 
59
- # Function to save chat history to a text file
60
- def save_as_txt(history):
61
- file_path = "chat_history.txt"
62
- with open(file_path, "w") as f:
 
 
63
  for user_message, assistant_message in history:
64
- f.write(f"User: {user_message}\n")
65
- f.write(f"Assistant: {assistant_message}\n")
66
- return file_path
67
-
68
- # Function to save chat history to a DOCX file
69
- def save_as_docx(history):
70
- file_path = "chat_history.docx"
71
- doc = Document()
72
- doc.add_heading('Chat History', 0)
73
-
74
- for user_message, assistant_message in history:
75
- doc.add_paragraph(f"User: {user_message}")
76
- doc.add_paragraph(f"Assistant: {assistant_message}")
77
-
78
- doc.save(file_path)
79
- return file_path
80
-
81
- # Function to save chat history to a PDF file
82
- def save_as_pdf(history):
83
- file_path = "chat_history.pdf"
84
- buffer = BytesIO()
85
- c = canvas.Canvas(buffer, pagesize=letter)
86
- width, height = letter
87
- y = height - 40
88
-
89
- c.drawString(30, y, "Chat History")
90
- y -= 30
91
-
92
- for user_message, assistant_message in history:
93
- c.drawString(30, y, f"User: {user_message}")
94
- y -= 20
95
- c.drawString(30, y, f"Assistant: {assistant_message}")
96
- y -= 30
97
 
98
- if y < 40:
99
- c.showPage()
100
- y = height - 40
101
-
102
- c.save()
103
- buffer.seek(0)
104
-
105
- with open(file_path, "wb") as f:
106
- f.write(buffer.read())
107
-
108
- return file_path
109
-
110
- # Function to handle file saving based on format
111
- def handle_file_save(history, file_format):
112
- if file_format == "txt":
113
- return save_as_txt(history)
114
- elif file_format == "docx":
115
- return save_as_docx(history)
116
- elif file_format == "pdf":
117
- return save_as_pdf(history)
118
- return None
119
 
120
- # Handler function for Gradio app
121
- def save_handler(message, history, system_message, max_tokens, temperature, top_p, file_format):
122
- new_history = history + [(message, next(respond(message, history, system_message, max_tokens, temperature, top_p)))]
123
- saved_file = handle_file_save(new_history, file_format)
124
- return saved_file, new_history
125
 
126
- # Gradio interface
127
- demo = gr.Interface(
128
- fn=save_handler,
129
- inputs=[
130
- gr.Textbox(value="", label="Type a message..", lines=5),
131
- gr.State([]), # Initialize state as an empty list
132
- gr.Textbox(value="", label="System message", visible=False),
133
  gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
134
  gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
135
- gr.Slider(
136
- minimum=0.1,
137
- maximum=1.0,
138
- value=0.95,
139
- step=0.05,
140
- label="Top-P",
141
- ),
142
- gr.Dropdown(
143
- choices=["txt", "docx", "pdf"],
144
- label="Save as",
145
- value="pdf",
146
- ),
147
  ],
148
- outputs=[gr.File(label="Download Chat History"), gr.State()],
149
  css=css,
150
  theme="allenai/gradio-theme",
151
  )
 
1
  import gradio as gr
2
  from openai import OpenAI
3
  import os
4
+ from fpdf import FPDF
 
 
5
  from docx import Document
6
 
 
7
  css = '''
8
+ .gradio-container{max-width: 1000px !important}
9
  h1{text-align:center}
10
  footer {
11
  visibility: hidden
12
  }
13
  '''
14
 
 
15
  ACCESS_TOKEN = os.getenv("HF_TOKEN")
16
 
17
  client = OpenAI(
 
19
  api_key=ACCESS_TOKEN,
20
  )
21
 
 
22
  def respond(
23
  message,
24
  history: list[tuple[str, str]],
 
39
 
40
  response = ""
41
 
42
+ for message in client.chat.completions.create(
43
  model="meta-llama/Meta-Llama-3.1-8B-Instruct",
44
  max_tokens=max_tokens,
45
  stream=True,
 
48
  messages=messages,
49
  ):
50
  token = message.choices[0].delta.content
51
+
52
  response += token
53
  yield response
54
 
55
+ def save_to_file(history, file_format):
56
+ if file_format == "PDF":
57
+ pdf = FPDF()
58
+ pdf.add_page()
59
+ pdf.set_auto_page_break(auto=True, margin=15)
60
+ pdf.set_font("Arial", size=12)
61
  for user_message, assistant_message in history:
62
+ pdf.multi_cell(0, 10, f"User: {user_message}")
63
+ pdf.multi_cell(0, 10, f"Assistant: {assistant_message}")
64
+ file_name = "chat_history.pdf"
65
+ pdf.output(file_name)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
66
 
67
+ elif file_format == "DOCX":
68
+ doc = Document()
69
+ for user_message, assistant_message in history:
70
+ doc.add_paragraph(f"User: {user_message}")
71
+ doc.add_paragraph(f"Assistant: {assistant_message}")
72
+ file_name = "chat_history.docx"
73
+ doc.save(file_name)
74
+
75
+ elif file_format == "TXT":
76
+ file_name = "chat_history.txt"
77
+ with open(file_name, "w") as file:
78
+ for user_message, assistant_message in history:
79
+ file.write(f"User: {user_message}\n")
80
+ file.write(f"Assistant: {assistant_message}\n")
81
+
82
+ return file_name
 
 
 
 
 
83
 
84
+ def save_conversation(history, file_format):
85
+ file_name = save_to_file(history, file_format)
86
+ return file_name
 
 
87
 
88
+ demo = gr.ChatInterface(
89
+ respond,
90
+ additional_inputs=[
91
+ gr.Textbox(value="", label="System message"),
 
 
 
92
  gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
93
  gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
94
+ gr.Slider(minimum=0.1, maximum 1.0, value=0.95, step=0.05, label="Top-P"),
95
+ gr.Radio(["PDF", "DOCX", "TXT"], label="Save As"),
 
 
 
 
 
 
 
 
 
 
96
  ],
97
+ button_fn=save_conversation,
98
  css=css,
99
  theme="allenai/gradio-theme",
100
  )