metadata
dataset_info:
- config_name: mistral-7b-v0.1-sft
features:
- name: task_name
dtype: string
- name: layer_model
dtype: string
- name: layer_name
dtype: string
- name: pre_ft_name
dtype: string
- name: pre_ft_weight
sequence:
sequence: float32
- name: lora_0_name
dtype: string
- name: lora_0_A_weight
sequence:
sequence: float32
- name: lora_0_B_weight
sequence:
sequence: float32
- name: lora_0_rank
dtype: int64
- name: lora_0_alpha
dtype: int64
- name: lora_1_name
dtype: string
- name: lora_1_A_weight
sequence:
sequence: float32
- name: lora_1_B_weight
sequence:
sequence: float32
- name: lora_1_rank
dtype: int64
- name: lora_1_alpha
dtype: int64
- name: lora_2_name
dtype: string
- name: lora_2_A_weight
sequence:
sequence: float32
- name: lora_2_B_weight
sequence:
sequence: float32
- name: lora_2_rank
dtype: int64
- name: lora_2_alpha
dtype: int64
- name: lora_3_name
dtype: string
- name: lora_3_A_weight
sequence:
sequence: float32
- name: lora_3_B_weight
sequence:
sequence: float32
- name: lora_3_rank
dtype: int64
- name: lora_3_alpha
dtype: int64
- name: lora_4_name
dtype: string
- name: lora_4_A_weight
sequence:
sequence: float32
- name: lora_4_B_weight
sequence:
sequence: float32
- name: lora_4_rank
dtype: int64
- name: lora_4_alpha
dtype: int64
- name: lora_5_name
dtype: string
- name: lora_5_A_weight
sequence:
sequence: float32
- name: lora_5_B_weight
sequence:
sequence: float32
- name: lora_5_rank
dtype: int64
- name: lora_5_alpha
dtype: int64
- name: lora_6_name
dtype: string
- name: lora_6_A_weight
sequence:
sequence: float32
- name: lora_6_B_weight
sequence:
sequence: float32
- name: lora_6_rank
dtype: int64
- name: lora_6_alpha
dtype: int64
- name: lora_7_name
dtype: string
- name: lora_7_A_weight
sequence:
sequence: float32
- name: lora_7_B_weight
sequence:
sequence: float32
- name: lora_7_rank
dtype: int64
- name: lora_7_alpha
dtype: int64
- name: lora_8_name
dtype: string
- name: lora_8_A_weight
sequence:
sequence: float32
- name: lora_8_B_weight
sequence:
sequence: float32
- name: lora_8_rank
dtype: int64
- name: lora_8_alpha
dtype: int64
- name: lora_9_name
dtype: string
- name: lora_9_A_weight
sequence:
sequence: float32
- name: lora_9_B_weight
sequence:
sequence: float32
- name: lora_9_rank
dtype: int64
- name: lora_9_alpha
dtype: int64
- name: lora_10_name
dtype: string
- name: lora_10_A_weight
sequence:
sequence: float32
- name: lora_10_B_weight
sequence:
sequence: float32
- name: lora_10_rank
dtype: int64
- name: lora_10_alpha
dtype: int64
- name: lora_11_name
dtype: string
- name: lora_11_A_weight
sequence:
sequence: float32
- name: lora_11_B_weight
sequence:
sequence: float32
- name: lora_11_rank
dtype: int64
- name: lora_11_alpha
dtype: int64
- name: lora_12_name
dtype: string
- name: lora_12_A_weight
sequence:
sequence: float32
- name: lora_12_B_weight
sequence:
sequence: float32
- name: lora_12_rank
dtype: int64
- name: lora_12_alpha
dtype: int64
- name: lora_13_name
dtype: string
- name: lora_13_A_weight
sequence:
sequence: float32
- name: lora_13_B_weight
sequence:
sequence: float32
- name: lora_13_rank
dtype: int64
- name: lora_13_alpha
dtype: int64
- name: lora_14_name
dtype: string
- name: lora_14_A_weight
sequence:
sequence: float32
- name: lora_14_B_weight
sequence:
sequence: float32
- name: lora_14_rank
dtype: int64
- name: lora_14_alpha
dtype: int64
splits:
- name: train
num_bytes: 8661875544
num_examples: 128
download_size: 5791365905
dataset_size: 8661875544
- config_name: stable-diffusion-1.5
features:
- name: task_name
dtype: string
- name: layer_model
dtype: string
- name: layer_name
dtype: string
- name: pre_ft_name
dtype: string
- name: pre_ft_weight
sequence:
sequence: float32
- name: lora_0_name
dtype: string
- name: lora_0_A_weight
sequence:
sequence: float32
- name: lora_0_B_weight
sequence:
sequence: float32
- name: lora_0_rank
dtype: int64
- name: lora_0_alpha
dtype: float64
- name: lora_1_name
dtype: string
- name: lora_1_A_weight
sequence:
sequence: float32
- name: lora_1_B_weight
sequence:
sequence: float32
- name: lora_1_rank
dtype: int64
- name: lora_1_alpha
dtype: float64
- name: lora_2_name
dtype: string
- name: lora_2_A_weight
sequence:
sequence: float32
- name: lora_2_B_weight
sequence:
sequence: float32
- name: lora_2_rank
dtype: int64
- name: lora_2_alpha
dtype: float64
- name: lora_3_name
dtype: string
- name: lora_3_A_weight
sequence:
sequence: float32
- name: lora_3_B_weight
sequence:
sequence: float32
- name: lora_3_rank
dtype: int64
- name: lora_3_alpha
dtype: float64
- name: lora_4_name
dtype: string
- name: lora_4_A_weight
sequence:
sequence: float32
- name: lora_4_B_weight
sequence:
sequence: float32
- name: lora_4_rank
dtype: int64
- name: lora_4_alpha
dtype: float64
- name: lora_5_name
dtype: string
- name: lora_5_A_weight
sequence:
sequence: float32
- name: lora_5_B_weight
sequence:
sequence: float32
- name: lora_5_rank
dtype: int64
- name: lora_5_alpha
dtype: float64
- name: lora_6_name
dtype: string
- name: lora_6_A_weight
sequence:
sequence: float32
- name: lora_6_B_weight
sequence:
sequence: float32
- name: lora_6_rank
dtype: int64
- name: lora_6_alpha
dtype: float64
- name: lora_7_name
dtype: string
- name: lora_7_A_weight
sequence:
sequence: float32
- name: lora_7_B_weight
sequence:
sequence: float32
- name: lora_7_rank
dtype: int64
- name: lora_7_alpha
dtype: float64
- name: lora_8_name
dtype: string
- name: lora_8_A_weight
sequence:
sequence: float32
- name: lora_8_B_weight
sequence:
sequence: float32
- name: lora_8_rank
dtype: int64
- name: lora_8_alpha
dtype: float64
- name: lora_9_name
dtype: string
- name: lora_9_A_weight
sequence:
sequence: float32
- name: lora_9_B_weight
sequence:
sequence: float32
- name: lora_9_rank
dtype: int64
- name: lora_9_alpha
dtype: float64
- name: lora_10_name
dtype: string
- name: lora_10_A_weight
sequence:
sequence: float32
- name: lora_10_B_weight
sequence:
sequence: float32
- name: lora_10_rank
dtype: int64
- name: lora_10_alpha
dtype: float64
- name: lora_11_name
dtype: string
- name: lora_11_A_weight
sequence:
sequence: float32
- name: lora_11_B_weight
sequence:
sequence: float32
- name: lora_11_rank
dtype: int64
- name: lora_11_alpha
dtype: float64
- name: lora_12_name
dtype: string
- name: lora_12_A_weight
sequence:
sequence: float32
- name: lora_12_B_weight
sequence:
sequence: float32
- name: lora_12_rank
dtype: int64
- name: lora_12_alpha
dtype: float64
- name: lora_13_name
dtype: string
- name: lora_13_A_weight
sequence:
sequence: float32
- name: lora_13_B_weight
sequence:
sequence: float32
- name: lora_13_rank
dtype: int64
- name: lora_13_alpha
dtype: float64
- name: lora_14_name
dtype: string
- name: lora_14_A_weight
sequence:
sequence: float32
- name: lora_14_B_weight
sequence:
sequence: float32
- name: lora_14_rank
dtype: int64
- name: lora_14_alpha
dtype: float64
splits:
- name: train
num_bytes: 2561357508
num_examples: 264
download_size: 1724766354
dataset_size: 2561357508
configs:
- config_name: mistral-7b-v0.1-sft
data_files:
- split: train
path: mistral-7b-v0.1-sft/train-*
- config_name: stable-diffusion-1.5
data_files:
- split: train
path: stable-diffusion-1.5/train-*