HostClassifier / app.py
hiyata's picture
Update app.py
6c88c65 verified
raw
history blame
9.38 kB
import gradio as gr
import torch
import joblib
import numpy as np
from itertools import product
import torch.nn as nn
import matplotlib.pyplot as plt
import io
from PIL import Image
class VirusClassifier(nn.Module):
def __init__(self, input_shape: int):
super(VirusClassifier, self).__init__()
self.network = nn.Sequential(
nn.Linear(input_shape, 64),
nn.GELU(),
nn.BatchNorm1d(64),
nn.Dropout(0.3),
nn.Linear(64, 32),
nn.GELU(),
nn.BatchNorm1d(32),
nn.Dropout(0.3),
nn.Linear(32, 32),
nn.GELU(),
nn.Linear(32, 2)
)
def forward(self, x):
return self.network(x)
def get_feature_importance(self, x):
"""Calculate feature importance using gradient-based method"""
x.requires_grad_(True)
output = self.network(x)
probs = torch.softmax(output, dim=1)
# Get importance for human class (index 1)
human_prob = probs[..., 1]
if x.grad is not None:
x.grad.zero_()
human_prob.backward()
importance = x.grad
return importance, float(human_prob)
def sequence_to_kmer_vector(sequence: str, k: int = 4) -> np.ndarray:
"""Convert sequence to k-mer frequency vector"""
kmers = [''.join(p) for p in product("ACGT", repeat=k)]
kmer_dict = {km: i for i, km in enumerate(kmers)}
vec = np.zeros(len(kmers), dtype=np.float32)
for i in range(len(sequence) - k + 1):
kmer = sequence[i:i+k]
if kmer in kmer_dict:
vec[kmer_dict[kmer]] += 1
total_kmers = len(sequence) - k + 1
if total_kmers > 0:
vec = vec / total_kmers
return vec
def parse_fasta(text):
sequences = []
current_header = None
current_sequence = []
for line in text.split('\n'):
line = line.strip()
if not line:
continue
if line.startswith('>'):
if current_header:
sequences.append((current_header, ''.join(current_sequence)))
current_header = line[1:]
current_sequence = []
else:
current_sequence.append(line.upper())
if current_header:
sequences.append((current_header, ''.join(current_sequence)))
return sequences
def create_visualization(important_kmers, human_prob, title):
"""Create a comprehensive visualization of k-mer impacts"""
fig = plt.figure(figsize=(15, 10))
# Create grid for subplots
gs = plt.GridSpec(2, 1, height_ratios=[1.5, 1], hspace=0.3)
# 1. Probability Step Plot
ax1 = plt.subplot(gs[0])
current_prob = 0.5
steps = [('Start', current_prob, 0)]
for kmer in important_kmers:
change = kmer['impact'] * (-1 if kmer['direction'] == 'non-human' else 1)
current_prob += change
steps.append((kmer['kmer'], current_prob, change))
x = range(len(steps))
y = [step[1] for step in steps]
# Plot steps
ax1.step(x, y, 'b-', where='post', label='Probability', linewidth=2)
ax1.plot(x, y, 'b.', markersize=10)
# Add reference line
ax1.axhline(y=0.5, color='r', linestyle='--', label='Neutral (0.5)')
# Customize plot
ax1.grid(True, linestyle='--', alpha=0.7)
ax1.set_ylim(0, 1)
ax1.set_ylabel('Human Probability')
ax1.set_title(f'K-mer Contributions to Prediction (final prob: {human_prob:.3f})')
# Add labels for each point
for i, (kmer, prob, change) in enumerate(steps):
# Add k-mer label
ax1.annotate(kmer,
(i, prob),
xytext=(0, 10 if i % 2 == 0 else -20),
textcoords='offset points',
ha='center',
rotation=45)
# Add change value
if i > 0:
change_text = f'{change:+.3f}'
color = 'green' if change > 0 else 'red'
ax1.annotate(change_text,
(i, prob),
xytext=(0, -20 if i % 2 == 0 else 10),
textcoords='offset points',
ha='center',
color=color)
ax1.legend()
# 2. K-mer Frequency and Sigma Plot
ax2 = plt.subplot(gs[1])
# Prepare data
kmers = [k['kmer'] for k in important_kmers]
frequencies = [k['occurrence'] for k in important_kmers]
sigmas = [k['sigma'] for k in important_kmers]
colors = ['g' if k['direction'] == 'human' else 'r' for k in important_kmers]
# Create bar plot for frequencies
x = np.arange(len(kmers))
width = 0.35
ax2.bar(x - width/2, frequencies, width, label='Frequency (%)', color=colors, alpha=0.6)
ax2_twin = ax2.twinx()
ax2_twin.bar(x + width/2, sigmas, width, label='σ from mean', color=[c if s > 0 else 'gray' for c, s in zip(colors, sigmas)], alpha=0.3)
# Customize plot
ax2.set_xticks(x)
ax2.set_xticklabels(kmers, rotation=45)
ax2.set_ylabel('Frequency (%)')
ax2_twin.set_ylabel('Standard Deviations (σ) from Mean')
ax2.set_title('K-mer Frequencies and Statistical Significance')
# Add legends
lines1, labels1 = ax2.get_legend_handles_labels()
lines2, labels2 = ax2_twin.get_legend_handles_labels()
ax2.legend(lines1 + lines2, labels1 + labels2, loc='upper right')
plt.tight_layout()
return fig
def predict(file_obj):
if file_obj is None:
return "Please upload a FASTA file", None
try:
if isinstance(file_obj, str):
text = file_obj
else:
text = file_obj.decode('utf-8')
except Exception as e:
return f"Error reading file: {str(e)}", None
k = 4
kmers = [''.join(p) for p in product("ACGT", repeat=k)]
kmer_dict = {km: i for i, km in enumerate(kmers)}
try:
device = 'cuda' if torch.cuda.is_available() else 'cpu'
model = VirusClassifier(256).to(device)
state_dict = torch.load('model.pt', map_location=device)
model.load_state_dict(state_dict)
scaler = joblib.load('scaler.pkl')
model.eval()
except Exception as e:
return f"Error loading model: {str(e)}", None
results_text = ""
plot_image = None
try:
sequences = parse_fasta(text)
header, seq = sequences[0]
raw_freq_vector = sequence_to_kmer_vector(seq)
kmer_vector = scaler.transform(raw_freq_vector.reshape(1, -1))
X_tensor = torch.FloatTensor(kmer_vector).to(device)
# Get model predictions
with torch.no_grad():
output = model(X_tensor)
probs = torch.softmax(output, dim=1)
# Get feature importance
importance, _ = model.get_feature_importance(X_tensor)
kmer_importance = importance[0].cpu().numpy()
# Get top k-mers
top_k = 10
top_indices = np.argsort(np.abs(kmer_importance))[-top_k:][::-1]
important_kmers = []
for idx in top_indices:
kmer = list(kmer_dict.keys())[list(kmer_dict.values()).index(idx)]
imp = float(abs(kmer_importance[idx]))
direction = 'human' if kmer_importance[idx] > 0 else 'non-human'
freq = float(raw_freq_vector[idx] * 100) # Convert to percentage
sigma = float(kmer_vector[0][idx])
important_kmers.append({
'kmer': kmer,
'impact': imp,
'direction': direction,
'occurrence': freq,
'sigma': sigma
})
# Generate text results
pred_class = 1 if probs[0][1] > probs[0][0] else 0
pred_label = 'human' if pred_class == 1 else 'non-human'
human_prob = float(probs[0][1])
results_text = f"""Sequence: {header}
Prediction: {pred_label}
Confidence: {float(max(probs[0])):0.4f}
Human probability: {human_prob:0.4f}
Non-human probability: {float(probs[0][0]):0.4f}
Most influential k-mers (ranked by importance):"""
for kmer in important_kmers:
results_text += f"\n {kmer['kmer']}: "
results_text += f"pushes toward {kmer['direction']} (impact={kmer['impact']:.4f}), "
results_text += f"occurrence={kmer['occurrence']:.2f}% of sequence "
results_text += f"(appears {abs(kmer['sigma']):.2f}σ "
results_text += "more" if kmer['sigma'] > 0 else "less"
results_text += " than average)"
# Create visualization
fig = create_visualization(important_kmers, human_prob, header)
# Save plot
buf = io.BytesIO()
fig.savefig(buf, format='png', bbox_inches='tight', dpi=300)
buf.seek(0)
plot_image = Image.open(buf)
plt.close(fig)
except Exception as e:
return f"Error processing sequences: {str(e)}", None
return results_text, plot_image
iface = gr.Interface(
fn=predict,
inputs=gr.File(label="Upload FASTA file", type="binary"),
outputs=[
gr.Textbox(label="Results"),
gr.Image(label="K-mer Analysis Visualization")
],
title="Virus Host Classifier"
)
if __name__ == "__main__":
iface.launch(share=True)