Canstralian commited on
Commit
c954503
·
verified ·
1 Parent(s): 7ffe7e2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -20
app.py CHANGED
@@ -1,25 +1,41 @@
 
1
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
2
- import os
3
 
4
- # Define the model name (replace with your actual model name)
5
- model_name = "huggingface/transformers" # Example model name
 
6
 
7
- # Load the tokenizer and model
8
- try:
9
- tokenizer = AutoTokenizer.from_pretrained(model_name)
10
- model = AutoModelForSequenceClassification.from_pretrained(model_name)
11
- print("Model and tokenizer loaded successfully!")
12
- except Exception as e:
13
- print(f"Error loading model: {e}")
 
 
 
 
14
 
15
- # Add your app logic here (e.g., for inference, etc.)
16
- def predict(text):
17
- inputs = tokenizer(text, return_tensors="pt")
18
- outputs = model(**inputs)
19
- return outputs
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
 
21
- # Example usage
22
- if __name__ == "__main__":
23
- test_text = "Hello, world!"
24
- result = predict(test_text)
25
- print(result)
 
1
+ import streamlit as st
2
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
3
+ import torch
4
 
5
+ # Sidebar for user input
6
+ st.sidebar.header("Model Configuration")
7
+ model_name = st.sidebar.text_input("Enter model name", "huggingface/transformers")
8
 
9
+ # Load model and tokenizer on demand
10
+ @st.cache_resource
11
+ def load_model(model_name):
12
+ try:
13
+ # Load the model and tokenizer
14
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
15
+ model = AutoModelForSequenceClassification.from_pretrained(model_name)
16
+ return tokenizer, model
17
+ except Exception as e:
18
+ st.error(f"Error loading model: {e}")
19
+ return None, None
20
 
21
+ # Load the model and tokenizer
22
+ tokenizer, model = load_model(model_name)
23
+
24
+ # Input text box in the main panel
25
+ st.title("Text Classification with Hugging Face Models")
26
+ user_input = st.text_area("Enter text for classification:")
27
+
28
+ # Make prediction if user input is provided
29
+ if user_input and model and tokenizer:
30
+ inputs = tokenizer(user_input, return_tensors="pt")
31
+ with torch.no_grad():
32
+ outputs = model(**inputs)
33
+
34
+ # Display results (e.g., classification logits)
35
+ logits = outputs.logits
36
+ predicted_class = torch.argmax(logits, dim=-1).item()
37
+ st.write(f"Predicted Class: {predicted_class}")
38
+ st.write(f"Logits: {logits}")
39
+ else:
40
+ st.info("Please enter some text to classify.")
41