Hack90 commited on
Commit
61488b4
·
verified ·
1 Parent(s): 5ff6c29

Update utils.py

Browse files
Files changed (1) hide show
  1. utils.py +118 -0
utils.py CHANGED
@@ -0,0 +1,118 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from collections import namedtuple
2
+
3
+ import numpy as np
4
+
5
+ from scipy.interpolate import interp1d
6
+ import torch
7
+ import matplotlib.pyplot as plt
8
+ # Mapping of nucleotides to float coordinates
9
+ mapping_easy = {
10
+ "A": np.array([0.5, -0.8660254037844386]),
11
+ "T": np.array([0.5, 0.8660254037844386]),
12
+ "G": np.array([0.8660254037844386, -0.5]),
13
+ "C": np.array([0.8660254037844386, 0.5]),
14
+ "N": np.array([0, 0]),
15
+ }
16
+
17
+ # coordinates for x+iy
18
+ Coord = namedtuple("Coord", ["x", "y"])
19
+ # coordinates for a CGR encoding
20
+ CGRCoords = namedtuple("CGRCoords", ["N", "x", "y"])
21
+ # coordinates for each nucleotide in the 2d-plane
22
+ DEFAULT_COORDS = {"A": Coord(1, 1), "C": Coord(-1, 1), "G": Coord(-1, -1), "T": Coord(1, -1)}
23
+
24
+
25
+ # Function to convert a DNA sequence to a list of coordinates
26
+ def _dna_to_coordinates(dna_sequence: str, mapping: dict[str, np.ndarray]) -> np.ndarray:
27
+ dna_sequence = dna_sequence.upper()
28
+ coordinates = np.array([mapping.get(nucleotide, mapping["N"]) for nucleotide in dna_sequence])
29
+ return coordinates
30
+
31
+
32
+ # Function to create the cumulative sum of a list of coordinates
33
+ def _get_cumulative_coords(mapped_coords):
34
+ cumulative_coords = np.cumsum(mapped_coords, axis=0)
35
+ return cumulative_coords
36
+
37
+
38
+ def generate_2d_sequence(seq):
39
+ dna_sequence = seq.upper()
40
+ mapped_coords = _dna_to_coordinates(dna_sequence, mapping_easy)
41
+ cumulative_coords = _get_cumulative_coords(mapped_coords)
42
+
43
+ # Scale the input data using standardization
44
+ x_train = cumulative_coords[:, 0]
45
+ y_train = cumulative_coords[:, 1]
46
+ x_train_scaled = (x_train - x_train.mean()) / x_train.std()
47
+ y_train_scaled = (y_train - y_train.mean()) / y_train.std()
48
+ scaled_coords = np.column_stack((x_train_scaled, y_train_scaled))
49
+
50
+ # example["2D_Sequence"] = cumulative_coords.tolist()
51
+ # example["2D_Sequence_Scaled"] = scaled_coords.tolist()
52
+
53
+ # Interpolate the 2D sequences to have exactly 1000 pairs
54
+ interpolated_coords = y_train_scaled # default to filter out bad examples
55
+ if len(scaled_coords) != 1000:
56
+ try:
57
+ t = np.linspace(0, 1, len(scaled_coords))
58
+ t_new = np.linspace(0, 1, 1000)
59
+
60
+ interp_func_x = interp1d(t, scaled_coords[:, 0], kind="linear")
61
+ interp_func_y = interp1d(t, scaled_coords[:, 1], kind="linear")
62
+
63
+ interpolated_coords = interp_func_x(t_new)
64
+ except Exception as e:
65
+ print(f"Interpolation error: {e}")
66
+
67
+ tensor_2d_rep_y = torch.Tensor(interpolated_coords).reshape(1,1000)
68
+
69
+ return y_train_scaled, x_train_scaled
70
+
71
+
72
+ def generate_2d_sequence_small(seq):
73
+ dna_sequence = seq.upper()
74
+ mapped_coords = _dna_to_coordinates(dna_sequence, mapping_easy)
75
+ cumulative_coords = _get_cumulative_coords(mapped_coords)
76
+
77
+ # Scale the input data using standardization
78
+ x_train = cumulative_coords[:, 0]
79
+ y_train = cumulative_coords[:, 1]
80
+ x_train_scaled = (x_train - x_train.mean()) / x_train.std()
81
+ y_train_scaled = (y_train - y_train.mean()) / y_train.std()
82
+ scaled_coords = np.column_stack((x_train_scaled, y_train_scaled))
83
+
84
+ # example["2D_Sequence"] = cumulative_coords.tolist()
85
+ # example["2D_Sequence_Scaled"] = scaled_coords.tolist()
86
+
87
+ # Interpolate the 2D sequences to have exactly 1000 pairs
88
+ interpolated_coords = y_train_scaled # default to filter out bad examples
89
+ if len(scaled_coords) != 1000:
90
+ try:
91
+ t = np.linspace(0, 1, len(scaled_coords))
92
+ t_new = np.linspace(0, 1, 400)
93
+
94
+ interp_func_x = interp1d(t, scaled_coords[:, 0], kind="linear")
95
+ interp_func_y = interp1d(t, scaled_coords[:, 1], kind="linear")
96
+
97
+ interpolated_coords = interp_func_y(t_new)
98
+ except Exception as e:
99
+ print(f"Interpolation error: {e}")
100
+
101
+ tensor_2d_rep_y = torch.Tensor(interpolated_coords).reshape(400)
102
+
103
+ return tensor_2d_rep_y
104
+
105
+ def plot_seq_full_label(df, filter):
106
+ ncols = len(filter)
107
+ unique_ids = df.label_id.unique()
108
+ print(unique_ids)
109
+ unique_ids_plot = [id for id in unique_ids if id in filter]
110
+ print(unique_ids_plot)
111
+ fig, axs = plt.subplots(ncols=ncols)
112
+ for i, id in enumerate(unique_ids_plot):
113
+ # data = (df[df['label_id'] == id].sample((n=3)))['seq'].values[0]
114
+ # print(data)
115
+ data = generate_2d_sequence_small(df[df['label_id'] == id].sample(n=1)['seq'].values[0]).numpy()
116
+ # two_d = generate_2d_sequence(data)[0]
117
+ axs[i].plot(data)
118
+ return fig