File size: 587 Bytes
5a1cca1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
model_name: pythia-160m
out_dir: out/custom-model
resume: false
data:
class_path: litgpt.data.TextFiles
init_args:
train_data_path: train
seed: 42
num_workers: 4
train:
save_interval: 1000
log_interval: 1
global_batch_size: 512
micro_batch_size: 4
lr_warmup_steps: 2000
max_tokens: 150000000
tie_embeddings: false
learning_rate: 0.0004
weight_decay: 0.1
beta1: 0.9
beta2: 0.95
max_norm: 1.0
min_lr: 4.0e-05
eval:
interval: 1000
max_iters: 100
devices: auto
tokenizer_dir: checkpoints/EleutherAI/pythia-160m
logger_name: tensorboard
seed: 42
|