File size: 633 Bytes
dd9600d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
import torch
import laion_clap
import os
from tqdm import tqdm
import numpy as np

with open("events.txt", "r") as f:
    events = [line.strip() for line in f]
    
save_path = './clap_embs'

model = laion_clap.CLAP_Module(enable_fusion=False)
model.load_ckpt("./630k-best.pt")

with torch.no_grad():
    for event in tqdm(events):
        text_data = [event.lower()] 
        text_embed = model.get_text_embedding(text_data, use_tensor=True)
        text_embed = text_embed.squeeze().cpu().numpy()
        save_fn = os.path.join(save_path, event.lower().replace(" ", "_")+".npz")
        np.savez_compressed(save_fn, text_embed)