Codex_Prime / configs /model_config.yaml
dnnsdunca's picture
Create configs/model_config.yaml
d3329f5 verified
raw
history blame contribute delete
348 Bytes
model:
name: "meta-llama/Llama-2-8b-chat-hf"
max_length: 2048
training:
batch_size: 4
learning_rate: 2e-5
num_epochs: 3
save_every: 1000
seed: 42
data:
train_path: "data/processed/train"
val_path: "data/processed/val"
test_path: "data/processed/test"
generation:
max_length: 512
temperature: 0.7
top_k: 50
top_p: 0.95