MSey commited on
Commit
4522029
·
verified ·
1 Parent(s): 99f17f9

Update app.py

Browse files

change to CaBERT

Files changed (1) hide show
  1. app.py +8 -7
app.py CHANGED
@@ -1,24 +1,25 @@
1
  from transformers import pipeline, AutoTokenizer
2
  import streamlit as st
 
 
3
 
4
- @st.cache_resource
5
- def context_text(text): return f"### Context\n{text}\n\n### Answer"
6
 
7
  @st.cache_resource
8
  def load_pipe():
9
- model_name = "MSey/pbt_CaBERT_7_c10731"
10
  return pipeline("token-classification", model=model_name), AutoTokenizer.from_pretrained(model_name)
11
 
12
  pipe, tokenizer = load_pipe()
13
 
14
- st.header("Test Environment for pbt_CaBERT_7_c10731")
15
  user_input = st.text_input("Enter your Prompt here:", "")
16
- contexted_ipnut = context_text(user_input)
17
- context_len = len(contexted_ipnut)
18
 
19
  if user_input:
20
  with st.spinner('Generating response...'):
21
- response = pipe(contexted_ipnut)
22
  st.write("Response:")
23
  tuples = ""
24
  # Process each entity and highlight the labeled words
 
1
  from transformers import pipeline, AutoTokenizer
2
  import streamlit as st
3
+ # Access the secret token from HF secrets
4
+ hf_token = os.getenv("HF_MODEL_TOKEN")
5
 
6
+ # Login to Hugging Face
7
+ login(token=hf_token)
8
 
9
  @st.cache_resource
10
  def load_pipe():
11
+ model_name = "MSey/_table_CaBERT_0003_gbert-base_fl32_checkpoint-15852"
12
  return pipeline("token-classification", model=model_name), AutoTokenizer.from_pretrained(model_name)
13
 
14
  pipe, tokenizer = load_pipe()
15
 
16
+ st.header("Test Environment for GBERT Ca Model")
17
  user_input = st.text_input("Enter your Prompt here:", "")
18
+
 
19
 
20
  if user_input:
21
  with st.spinner('Generating response...'):
22
+ response = pipe(user_input)
23
  st.write("Response:")
24
  tuples = ""
25
  # Process each entity and highlight the labeled words