prithivMLmods commited on
Commit
ad773e5
β€’
1 Parent(s): 19bf7a8

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +115 -0
app.py ADDED
@@ -0,0 +1,115 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from openai import OpenAI
3
+ import os
4
+ from fpdf import FPDF # For PDF conversion
5
+ from docx import Document # For DOCX conversion
6
+
7
+ css = '''
8
+ .gradio-container{max-width: 1000px !important}
9
+ h1{text-align:center}
10
+ footer {
11
+ visibility: hidden
12
+ }
13
+ '''
14
+
15
+ ACCESS_TOKEN = os.getenv("HF_TOKEN")
16
+
17
+ client = OpenAI(
18
+ base_url="https://api-inference.huggingface.co/v1/",
19
+ api_key=ACCESS_TOKEN,
20
+ )
21
+
22
+ def respond(
23
+ message,
24
+ history: list[tuple[str, str]],
25
+ system_message,
26
+ max_tokens,
27
+ temperature,
28
+ top_p,
29
+ ):
30
+ messages = [{"role": "system", "content": system_message}]
31
+
32
+ for val in history:
33
+ if val[0]:
34
+ messages.append({"role": "user", "content": val[0]})
35
+ if val[1]:
36
+ messages.append({"role": "assistant", "content": val[1]})
37
+
38
+ messages.append({"role": "user", "content": message})
39
+
40
+ response = ""
41
+
42
+ for message in client.chat.completions.create(
43
+ model="meta-llama/Meta-Llama-3.1-8B-Instruct",
44
+ max_tokens=max_tokens,
45
+ stream=True,
46
+ temperature=temperature,
47
+ top_p=top_p,
48
+ messages=messages,
49
+ ):
50
+ token = message.choices[0].delta.content
51
+
52
+ response += token
53
+ yield response
54
+
55
+ def save_as_file(input_text, output_text, conversion_type):
56
+ if conversion_type == "PDF":
57
+ pdf = FPDF()
58
+ pdf.add_page()
59
+ pdf.set_font("Arial", size=12)
60
+ pdf.multi_cell(0, 10, f"User Query: {input_text}\n\nResponse: {output_text}")
61
+ file_name = "output.pdf"
62
+ pdf.output(file_name)
63
+ elif conversion_type == "DOCX":
64
+ doc = Document()
65
+ doc.add_heading('Conversation', 0)
66
+ doc.add_paragraph(f"User Query: {input_text}\n\nResponse: {output_text}")
67
+ file_name = "output.docx"
68
+ doc.save(file_name)
69
+ elif conversion_type == "TXT":
70
+ file_name = "output.txt"
71
+ with open(file_name, "w") as f:
72
+ f.write(f"User Query: {input_text}\n\nResponse: {output_text}")
73
+ else:
74
+ return None
75
+
76
+ return file_name
77
+
78
+ def convert_and_download(history, conversion_type):
79
+ if not history:
80
+ return None
81
+
82
+ input_text = "\n".join([f"User: {h[0]}" for h in history if h[0]])
83
+ output_text = "\n".join([f"Assistant: {h[1]}" for h in history if h[1]])
84
+
85
+ file_path = save_as_file(input_text, output_text, conversion_type)
86
+ return file_path
87
+
88
+ demo = gr.ChatInterface(
89
+ respond,
90
+ additional_inputs=[
91
+ gr.Textbox(value="", label="System message"),
92
+ gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
93
+ gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
94
+ gr.Slider(
95
+ minimum=0.1,
96
+ maximum=1.0,
97
+ value=0.95,
98
+ step=0.05,
99
+ label="Top-P",
100
+ ),
101
+ gr.Dropdown(choices=["PDF", "DOCX", "TXT"], label="Conversion Type"),
102
+ gr.Button("Convert and Download"),
103
+ ],
104
+ css=css,
105
+ theme="allenai/gradio-theme",
106
+ )
107
+
108
+ def on_convert_and_download(history, conversion_type):
109
+ file_path = convert_and_download(history, conversion_type)
110
+ return file_path
111
+
112
+ demo.launch(on_event={"Convert and Download": on_convert_and_download})
113
+
114
+ if __name__ == "__main__":
115
+ demo.launch()