pinyuchen commited on
Commit
1c1e204
·
verified ·
1 Parent(s): 3c768ef
Files changed (1) hide show
  1. software.py +18 -12
software.py CHANGED
@@ -98,26 +98,19 @@ class BiScope:
98
 
99
  class Software:
100
  def __init__(self):
101
- self.device_div = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
102
- if torch.cuda.device_count() > 1:
103
- self.device_bi = torch.device("cuda:1" if torch.cuda.is_available() else "cpu")
104
- else:
105
- self.device_bi = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
106
-
107
  self.token = os.getenv("HF_TOKEN")
108
 
109
  self.div_tokenizer = AutoTokenizer.from_pretrained("tiiuae/falcon-7b", use_fast=False, trust_remote_code=True, use_auth_token=self.token)
110
  self.div_model = AutoModelForCausalLM.from_pretrained(
111
- "tiiuae/falcon-7b", device_map=self.device_div, torch_dtype=torch.float16, trust_remote_code=True, use_auth_token=self.token
112
  )
113
 
114
  self.bi_tokenizer = AutoTokenizer.from_pretrained("google/gemma-1.1-2b-it", use_fast=False, trust_remote_code=True, use_auth_token=self.token)
115
  self.bi_model = AutoModelForCausalLM.from_pretrained(
116
- "google/gemma-1.1-2b-it", device_map=self.device_bi, torch_dtype=torch.float16, trust_remote_code=True, use_auth_token=self.token
117
  )
118
 
119
- self.diveye = Diversity(self.div_model, self.div_tokenizer, self.device_div)
120
- self.biscope = BiScope(self.bi_model, self.bi_tokenizer, self.device_bi)
121
  self.model_path = Path(__file__).parent / "model.json"
122
 
123
  self.model = xgb.XGBClassifier()
@@ -134,8 +127,21 @@ class Software:
134
 
135
  @spaces.GPU
136
  def evaluate(self, text):
137
- diveye_features = self.diveye.compute_features(text)
138
- biscope_features = self.biscope.detect_single_sample(text)
 
 
 
 
 
 
 
 
 
 
 
 
 
139
 
140
  for f in biscope_features:
141
  diveye_features.append(f)
 
98
 
99
  class Software:
100
  def __init__(self):
 
 
 
 
 
 
101
  self.token = os.getenv("HF_TOKEN")
102
 
103
  self.div_tokenizer = AutoTokenizer.from_pretrained("tiiuae/falcon-7b", use_fast=False, trust_remote_code=True, use_auth_token=self.token)
104
  self.div_model = AutoModelForCausalLM.from_pretrained(
105
+ "tiiuae/falcon-7b", torch_dtype=torch.float16, trust_remote_code=True, use_auth_token=self.token
106
  )
107
 
108
  self.bi_tokenizer = AutoTokenizer.from_pretrained("google/gemma-1.1-2b-it", use_fast=False, trust_remote_code=True, use_auth_token=self.token)
109
  self.bi_model = AutoModelForCausalLM.from_pretrained(
110
+ "google/gemma-1.1-2b-it", torch_dtype=torch.float16, trust_remote_code=True, use_auth_token=self.token
111
  )
112
 
113
+
 
114
  self.model_path = Path(__file__).parent / "model.json"
115
 
116
  self.model = xgb.XGBClassifier()
 
127
 
128
  @spaces.GPU
129
  def evaluate(self, text):
130
+ # Load models to GPUs.
131
+ device_div = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
132
+ if torch.cuda.device_count() > 1:
133
+ device_bi = torch.device("cuda:1" if torch.cuda.is_available() else "cpu")
134
+
135
+ if not next(self.div_model.parameters()).is_cuda:
136
+ self.div_model = self.div_model.to(device_div)
137
+ if not next(self.bi_model.parameters()).is_cuda:
138
+ self.bi_model = self.bi_model.to(device_bi)
139
+
140
+ diveye = Diversity(self.div_model, self.div_tokenizer, device_div)
141
+ biscope = BiScope(self.bi_model, self.bi_tokenizer, self.device_bi)
142
+
143
+ diveye_features = diveye.compute_features(text)
144
+ biscope_features = biscope.detect_single_sample(text)
145
 
146
  for f in biscope_features:
147
  diveye_features.append(f)