|
import h5py |
|
import numpy as np |
|
from pathlib import Path |
|
from tqdm import tqdm |
|
|
|
def load_stimulus_features(root_data_dir: str, modality: str) -> dict: |
|
""" |
|
Loads stimulus features stored in .h5 files under the given root directory. |
|
|
|
The function expects each .h5 file to contain one or more datasets corresponding to |
|
different layers. If more than one layer is present, they are concatenated along the |
|
feature dimension (axis=1). |
|
|
|
Parameters: |
|
root_data_dir (str): Root directory containing the .h5 files. |
|
modality (str): Modality name (e.g., 'audio', 'video', etc.). This key will be used in the returned dict. |
|
|
|
Returns: |
|
dict: A dictionary of the form { modality: { movie_name: features_array } } where: |
|
- movie_name is the stem of the .h5 file (e.g. "s01e01a") |
|
- features_array is a NumPy array of shape (num_intervals, feature_dim) containing the concatenated features. |
|
""" |
|
features = {modality: {}} |
|
root_path = Path(root_data_dir) |
|
|
|
|
|
for h5_file in tqdm(root_path.rglob("*.h5")): |
|
movie_name = h5_file.stem |
|
|
|
if movie_name.startswith("friends_"): |
|
movie_name = movie_name[len("friends_"):] |
|
|
|
datasets = [] |
|
with h5py.File(h5_file, 'r') as f: |
|
|
|
for layer in f.keys(): |
|
data = f[layer][:] |
|
datasets.append(data) |
|
|
|
|
|
if len(datasets) > 1: |
|
concatenated_features = np.concatenate(datasets, axis=1) |
|
elif datasets: |
|
concatenated_features = datasets[0] |
|
else: |
|
continue |
|
|
|
features[modality][movie_name] = concatenated_features[:,0,0:200] |
|
|
|
return features |
|
|
|
|
|
|
|
|
|
|
|
whisper_root_data_dir = "/content/drive/MyDrive/features/whisper" |
|
modality = "audio" |
|
|
|
|
|
features = load_stimulus_features(whisper_root_data_dir, modality) |
|
|
|
|
|
for key_modality, value_modality in features.items(): |
|
print(f"\n{key_modality} features movie splits name and shape:") |
|
for key_movie, value_movie in value_modality.items(): |
|
print(f"{key_movie} {value_movie.shape}") |
|
|