Update learning_rate_decay.py

Updating power from 1.0 to 0.5 for better tuning
This commit is contained in:
Shreyansh Pathak 2023-04-01 19:11:57 +05:30 committed by GitHub
parent ec83d01dca
commit 68955489b9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 1 additions and 1 deletions

View File

@ -70,7 +70,7 @@ def get_learning_rate_decay_fn(params):
global_step=global_step,
decay_steps=params.decay_steps,
end_learning_rate=params.end_learning_rate,
power=params.polynomial_power if 'polynomial_power' in paramsv else 1.0,
power=params.polynomial_power if 'polynomial_power' in paramsv else 0.5,
)
return polynomial_decay_fn