WilliamGazeley commited on
Commit
ecd63b4
·
1 Parent(s): 7392cc1

Move to streamlit

Browse files
Files changed (3) hide show
  1. README.md +3 -2
  2. app.py +42 -0
  3. requirements.txt +5 -0
README.md CHANGED
@@ -3,9 +3,10 @@ title: LLM ADE Dev
3
  emoji: 🔥
4
  colorFrom: pink
5
  colorTo: red
6
- sdk: docker
 
7
  pinned: false
8
- license: unknown
9
  ---
10
 
11
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
3
  emoji: 🔥
4
  colorFrom: pink
5
  colorTo: red
6
+ sdk_version: 1.33.0
7
+ app_file: app.py
8
  pinned: false
9
+ license: apache-2.0
10
  ---
11
 
12
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
app.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from vllm import LLM, SamplingParams
3
+
4
+ llm = LLM(model="InvestmentResearchAI/LLM-ADE-small-v0.1.0")
5
+ tok = llm.get_tokenizer()
6
+ tok.eos_token = '<|eot_id|>' # Override to use turns
7
+
8
+
9
+ template = """<|begin_of_text|><|start_header_id|>system<|end_header_id|>
10
+
11
+ You are a helpful financial assistant that answers the user as accurately, truthfully, and concisely as possible.<|eot_id|><|start_header_id|>user<|end_header_id|>
12
+
13
+ {user_message}<|eot_id|><|start_header_id|>assistant<|end_header_id|>
14
+
15
+ """
16
+
17
+
18
+ def get_response(prompt):
19
+ try:
20
+ prompts = [template.format(user_message=prompt)]
21
+ sampling_params = SamplingParams(temperature=0.3, top_p=0.95)
22
+ outputs = llm.generate(prompts, sampling_params)
23
+ for output in outputs:
24
+ return output.outputs[0].text
25
+ except Exception as e:
26
+ return f"An error occurred: {str(e)}"
27
+
28
+ def main():
29
+ st.title("LLM-ADE 9B Demo")
30
+
31
+ input_text = st.text_area("Enter your text here:", value="", height=200)
32
+ if st.button("Generate"):
33
+ if input_text:
34
+ with st.spinner('Generating response...'):
35
+ response_text = get_response(input_text)
36
+ st.write(response_text)
37
+ else:
38
+ st.warning("Please enter some text to generate a response.")
39
+
40
+ if __name__ == "__main__":
41
+ main()
42
+
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ streamlit
2
+ transformers
3
+ torch
4
+ vllm
5
+