mirror of
https://github.com/rasbt/LLMs-from-scratch.git
synced 2025-12-14 00:27:34 +00:00
Remove unused params for hparam script (#710)
This commit is contained in:
parent
190c66b3b0
commit
8b3e4b24b0
@ -64,8 +64,7 @@ def evaluate_model(model, train_loader, val_loader, device, eval_iter):
|
|||||||
|
|
||||||
|
|
||||||
def train_model(model, train_loader, val_loader, optimizer, device,
|
def train_model(model, train_loader, val_loader, optimizer, device,
|
||||||
n_epochs, eval_freq, eval_iter,
|
n_epochs, eval_iter, warmup_iters=10,
|
||||||
encoded_start_context, tokenizer, warmup_iters=10,
|
|
||||||
initial_lr=3e-05, min_lr=1e-6):
|
initial_lr=3e-05, min_lr=1e-6):
|
||||||
global_step = 0
|
global_step = 0
|
||||||
|
|
||||||
@ -192,9 +191,7 @@ if __name__ == "__main__":
|
|||||||
train_loss, val_loss = train_model(
|
train_loss, val_loss = train_model(
|
||||||
model, train_loader, val_loader, optimizer, device,
|
model, train_loader, val_loader, optimizer, device,
|
||||||
n_epochs=HPARAM_CONFIG["n_epochs"],
|
n_epochs=HPARAM_CONFIG["n_epochs"],
|
||||||
eval_freq=5, eval_iter=1,
|
eval_iter=1,
|
||||||
encoded_start_context=encoded_tensor,
|
|
||||||
tokenizer=tokenizer,
|
|
||||||
warmup_iters=HPARAM_CONFIG["warmup_iters"],
|
warmup_iters=HPARAM_CONFIG["warmup_iters"],
|
||||||
initial_lr=HPARAM_CONFIG["initial_lr"],
|
initial_lr=HPARAM_CONFIG["initial_lr"],
|
||||||
min_lr=HPARAM_CONFIG["min_lr"]
|
min_lr=HPARAM_CONFIG["min_lr"]
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user