Spaces:
Runtime error
Runtime error
Upload with huggingface_hub
Browse files- app.py +24 -33
- requirements.txt +2 -1
app.py
CHANGED
@@ -1,48 +1,39 @@
|
|
1 |
-
|
|
|
2 |
|
3 |
-
|
4 |
|
5 |
-
|
|
|
6 |
|
7 |
-
|
8 |
-
template_file = "pal.pmpt.tpl"
|
9 |
|
10 |
-
|
11 |
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
|
16 |
-
|
17 |
-
|
|
|
18 |
|
19 |
-
|
|
|
20 |
|
21 |
-
|
22 |
-
prompt = PalPrompt(backend.OpenAI()).chain(PyPrompt(backend.Python()))
|
23 |
-
# result = prompt({"question": question})
|
24 |
|
25 |
question = "Melanie is a door-to-door saleswoman. She sold a third of her " \
|
26 |
"vacuum cleaners at the green house, 2 more to the red house, and half of " \
|
27 |
"what was left at the orange house. If Melanie has 5 vacuum cleaners left, " \
|
28 |
"how many did she start with?"
|
29 |
-
|
30 |
-
prompt.to_gradio(fields =["question"],
|
31 |
-
examples=[question]).launch()
|
32 |
|
33 |
-
|
|
|
|
|
|
|
|
|
|
|
34 |
|
35 |
-
|
36 |
-
|
37 |
-
# {"question": "Joe has 10 cars and Bobby has 12. How many do they have together?"},
|
38 |
-
# "def solution():\n\treturn 10 + 12",
|
39 |
-
# )
|
40 |
-
# # -
|
41 |
-
|
42 |
-
# # + tags=["hide_inp"]
|
43 |
-
# PyPrompt().show("def solution():\n\treturn 10 + 12", "22")
|
44 |
-
# # -
|
45 |
-
|
46 |
-
# # View the log.
|
47 |
-
|
48 |
-
# minichain.show_log("pal.log")
|
|
|
1 |
+
desc = """
|
2 |
+
### Prompt-aided Language Models
|
3 |
|
4 |
+
Chain for answering complex problems by code generation and execution. [[Code](https://github.com/srush/MiniChain/blob/main/examples/pal.py)]
|
5 |
|
6 |
+
(Adapted from Prompt-aided Language Models [PAL](https://arxiv.org/pdf/2211.10435.pdf)).
|
7 |
+
"""
|
8 |
|
9 |
+
# $
|
|
|
10 |
|
11 |
+
from minichain import prompt, show, OpenAI, Python
|
12 |
|
13 |
+
@prompt(OpenAI(), template_file="pal.pmpt.tpl")
|
14 |
+
def pal_prompt(model, question):
|
15 |
+
return model(dict(question=question))
|
16 |
|
17 |
+
@prompt(Python())
|
18 |
+
def python(model, inp):
|
19 |
+
return int(model(inp + "\nprint(solution())"))
|
20 |
|
21 |
+
def pal(question):
|
22 |
+
return python(pal_prompt(question))
|
23 |
|
24 |
+
# $
|
|
|
|
|
25 |
|
26 |
question = "Melanie is a door-to-door saleswoman. She sold a third of her " \
|
27 |
"vacuum cleaners at the green house, 2 more to the red house, and half of " \
|
28 |
"what was left at the orange house. If Melanie has 5 vacuum cleaners left, " \
|
29 |
"how many did she start with?"
|
|
|
|
|
|
|
30 |
|
31 |
+
gradio = show(pal,
|
32 |
+
examples=[question],
|
33 |
+
subprompts=[pal_prompt, python],
|
34 |
+
description=desc,
|
35 |
+
code=open("pal.py", "r").read().split("$")[1].strip().strip("#").strip(),
|
36 |
+
)
|
37 |
|
38 |
+
if __name__ == "__main__":
|
39 |
+
gradio.launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
requirements.txt
CHANGED
@@ -1,3 +1,4 @@
|
|
1 |
-
gradio
|
2 |
git+https://github.com/srush/minichain@gradio
|
3 |
manifest-ml
|
|
|
|
1 |
+
gradio==3.21.0
|
2 |
git+https://github.com/srush/minichain@gradio
|
3 |
manifest-ml
|
4 |
+
faiss-cpu
|