ThorbenF commited on
Commit
aae512c
·
1 Parent(s): 6643342

Update requirements and make necessary code changes

Browse files
Files changed (2) hide show
  1. .ipynb_checkpoints/app-checkpoint.py +3 -3
  2. app.py +3 -3
.ipynb_checkpoints/app-checkpoint.py CHANGED
@@ -42,6 +42,9 @@ checkpoint='ThorbenF/prot_t5_xl_uniref50'
42
  max_length=1500
43
 
44
  model, tokenizer = load_model(checkpoint,max_length)
 
 
 
45
 
46
  def create_dataset(tokenizer,seqs,labels,checkpoint):
47
 
@@ -97,9 +100,6 @@ def predict_protein_sequence(test_one_letter_sequence):
97
 
98
  test_loader = DataLoader(test_dataset, batch_size=1, collate_fn=data_collator)
99
 
100
- device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
101
- model.to(device)
102
- model.eval()
103
  for batch in test_loader:
104
  input_ids = batch['input_ids'].to(device)
105
  attention_mask = batch['attention_mask'].to(device)
 
42
  max_length=1500
43
 
44
  model, tokenizer = load_model(checkpoint,max_length)
45
+ device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
46
+ model.to(device)
47
+ model.eval()
48
 
49
  def create_dataset(tokenizer,seqs,labels,checkpoint):
50
 
 
100
 
101
  test_loader = DataLoader(test_dataset, batch_size=1, collate_fn=data_collator)
102
 
 
 
 
103
  for batch in test_loader:
104
  input_ids = batch['input_ids'].to(device)
105
  attention_mask = batch['attention_mask'].to(device)
app.py CHANGED
@@ -42,6 +42,9 @@ checkpoint='ThorbenF/prot_t5_xl_uniref50'
42
  max_length=1500
43
 
44
  model, tokenizer = load_model(checkpoint,max_length)
 
 
 
45
 
46
  def create_dataset(tokenizer,seqs,labels,checkpoint):
47
 
@@ -97,9 +100,6 @@ def predict_protein_sequence(test_one_letter_sequence):
97
 
98
  test_loader = DataLoader(test_dataset, batch_size=1, collate_fn=data_collator)
99
 
100
- device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
101
- model.to(device)
102
- model.eval()
103
  for batch in test_loader:
104
  input_ids = batch['input_ids'].to(device)
105
  attention_mask = batch['attention_mask'].to(device)
 
42
  max_length=1500
43
 
44
  model, tokenizer = load_model(checkpoint,max_length)
45
+ device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
46
+ model.to(device)
47
+ model.eval()
48
 
49
  def create_dataset(tokenizer,seqs,labels,checkpoint):
50
 
 
100
 
101
  test_loader = DataLoader(test_dataset, batch_size=1, collate_fn=data_collator)
102
 
 
 
 
103
  for batch in test_loader:
104
  input_ids = batch['input_ids'].to(device)
105
  attention_mask = batch['attention_mask'].to(device)