import torch class min_max_scaler(): def __init__(self, upper_bound=1, lower_bound=0): self.upper = upper_bound self.lower = lower_bound self.minimum = torch.ones(1) * torch.inf self.maximum = - torch.ones(1) *torch.inf def fit(self, set_maximum=0.0, set_minimum=-100.0): """Find min and max of given subset OR set min and max manually. Since dB-spectrograms are on the scale [-100, 0] by default, default values are set to those values. Args: set_maximum (float, optional): set maximum value manually. Defaults to 0.0. set_minimum (float, optional): set minimum value manually. Defaults to -100.0. Returns: None: None """ if set_minimum is not None and set_maximum is not None: self.minimum = set_minimum self.maximum = set_maximum return None def transform(self, spectrogram): if self.minimum == torch.inf: raise ValueError("Cannot transform before scaler is fitted with min-max-values") return (self.upper - self.lower) * (spectrogram - self.minimum) / (self.maximum - self.minimum) + self.lower def inverse_transform(self, spectrogram): if self.minimum == torch.inf: raise ValueError("Cannot inverse transform before scaler is fitted with min-max-values") return (spectrogram - self.lower) * (self.maximum - self.minimum) / (self.upper - self.lower) + self.minimum