Tonic commited on
Commit
58755ce
·
verified ·
1 Parent(s): 582c792

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -1
app.py CHANGED
@@ -10,8 +10,30 @@ You can also use 🔮DeepSeekMath📉 by cloning this space. Simply click here:
10
  Join us : 🌟TeamTonic🌟 is always making cool demos! Join our active builder's 🛠️community 👻 [![Join us on Discord](https://img.shields.io/discord/1109943800132010065?label=Discord&logo=discord&style=flat-square)](https://discord.gg/GWpVpekp) On 🤗Huggingface: [TeamTonic](https://huggingface.co/TeamTonic) & [MultiTransformer](https://huggingface.co/MultiTransformer) Math with [introspector](https://huggingface.co/introspector) On 🌐Github: [Tonic-AI](https://github.com/tonic-ai) & contribute to🌟 [SciTonic](https://github.com/Tonic-AI/scitonic)🤗Big thanks to Yuvi Sharma and all the folks at huggingface for the community grant 🤗
11
  """
12
 
 
 
 
 
 
 
 
 
 
 
 
13
  additional_info_prompt = "Explain the above using mathematics, print entire answer in latex format:"
14
 
 
 
 
 
 
 
 
 
 
 
 
15
  unimath1 = """Goal:
16
 
17
  X : UU
@@ -144,7 +166,8 @@ def parse_final_answer(answer):
144
 
145
  @spaces.GPU
146
  def solve_math_problem(question, additional_info, max_tokens):
147
- prompt = f"User: {question}\n{additional_info}.\nAssistant:"
 
148
  input_ids = tokenizer(prompt, return_tensors="pt").input_ids.to(model.device)
149
  outputs = model.generate(input_ids, max_length=max_tokens + input_ids.shape[1], pad_token_id=model.generation_config.pad_token_id)
150
  result = tokenizer.decode(outputs[0], skip_special_tokens=True)
@@ -153,6 +176,7 @@ def solve_math_problem(question, additional_info, max_tokens):
153
  return full_answer, final_answer
154
 
155
 
 
156
  def main():
157
  iface = gr.Interface(
158
  title="# 🙋🏻‍♂️Welcome to🌟Tonic's🔮Goedel Prover📉",
 
10
  Join us : 🌟TeamTonic🌟 is always making cool demos! Join our active builder's 🛠️community 👻 [![Join us on Discord](https://img.shields.io/discord/1109943800132010065?label=Discord&logo=discord&style=flat-square)](https://discord.gg/GWpVpekp) On 🤗Huggingface: [TeamTonic](https://huggingface.co/TeamTonic) & [MultiTransformer](https://huggingface.co/MultiTransformer) Math with [introspector](https://huggingface.co/introspector) On 🌐Github: [Tonic-AI](https://github.com/tonic-ai) & contribute to🌟 [SciTonic](https://github.com/Tonic-AI/scitonic)🤗Big thanks to Yuvi Sharma and all the folks at huggingface for the community grant 🤗
11
  """
12
 
13
+
14
+ LEAN4_DEFAULT_HEADER = """import Mathlib
15
+ import Aesop
16
+
17
+ set_option maxHeartbeats 0
18
+
19
+ open BigOperators Real Nat Topology Rat
20
+
21
+ """
22
+
23
+
24
  additional_info_prompt = "Explain the above using mathematics, print entire answer in latex format:"
25
 
26
+
27
+ def format_prompt(question, additional_info):
28
+ return f"""Complete the following Lean 4 code with explanatory comments preceding each line of code:
29
+
30
+ ```lean4
31
+ {LEAN4_DEFAULT_HEADER}
32
+ {question}
33
+ {additional_info}
34
+ """
35
+
36
+
37
  unimath1 = """Goal:
38
 
39
  X : UU
 
166
 
167
  @spaces.GPU
168
  def solve_math_problem(question, additional_info, max_tokens):
169
+ # Format the prompt using Lean4 structure
170
+ prompt = format_prompt(question, additional_info)
171
  input_ids = tokenizer(prompt, return_tensors="pt").input_ids.to(model.device)
172
  outputs = model.generate(input_ids, max_length=max_tokens + input_ids.shape[1], pad_token_id=model.generation_config.pad_token_id)
173
  result = tokenizer.decode(outputs[0], skip_special_tokens=True)
 
176
  return full_answer, final_answer
177
 
178
 
179
+
180
  def main():
181
  iface = gr.Interface(
182
  title="# 🙋🏻‍♂️Welcome to🌟Tonic's🔮Goedel Prover📉",