You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Traceback (most recent call last):
File "latent-diffusion/main.py", line 720, in
trainer.fit(model, data)
File "/home/edsr/anaconda3/envs/ldm/lib/python3.8/site-packages/pytorch_lightning/trainer/trainer.py", line 770, in fit
self._call_and_handle_interrupt(
File "/home/edsr/anaconda3/envs/ldm/lib/python3.8/site-packages/pytorch_lightning/trainer/trainer.py", line 721, in _call_and_handle_interrupt
return self.strategy.launcher.launch(trainer_fn, *args, trainer=self, **kwargs)
File "/home/edsr/anaconda3/envs/ldm/lib/python3.8/site-packages/pytorch_lightning/strategies/launchers/subprocess_script.py", line 93, in launch
return function(*args, **kwargs)
File "/home/edsr/anaconda3/envs/ldm/lib/python3.8/site-packages/pytorch_lightning/trainer/trainer.py", line 811, in _fit_impl
results = self._run(model, ckpt_path=self.ckpt_path)
File "/home/edsr/anaconda3/envs/ldm/lib/python3.8/site-packages/pytorch_lightning/trainer/trainer.py", line 1217, in _run
self.strategy.setup(self)
File "/home/edsr/anaconda3/envs/ldm/lib/python3.8/site-packages/pytorch_lightning/strategies/ddp.py", line 179, in setup
self.setup_optimizers(trainer)
File "/home/edsr/anaconda3/envs/ldm/lib/python3.8/site-packages/pytorch_lightning/strategies/strategy.py", line 128, in setup_optimizers
self.optimizers, self.lr_scheduler_configs, self.optimizer_frequencies = _init_optimizers_and_lr_schedulers(
File "/home/edsr/anaconda3/envs/ldm/lib/python3.8/site-packages/pytorch_lightning/core/optimizer.py", line 195, in _init_optimizers_and_lr_schedulers
_validate_scheduler_api(lr_scheduler_configs, model)
File "/home/edsr/anaconda3/envs/ldm/lib/python3.8/site-packages/pytorch_lightning/core/optimizer.py", line 350, in _validate_scheduler_api
raise MisconfigurationException(
pytorch_lightning.utilities.exceptions.MisconfigurationException: The provided lr scheduler LambdaLR doesn't follow PyTorch's LRScheduler API. You should override the LightningModule.lr_scheduler_step hook with your own logic if you are using a custom LR scheduler.
Please can anyone help me with it?
It will be so appreciated
The text was updated successfully, but these errors were encountered:
Hi I am facing trouble using LambdaLR scheduler
The details of the error is shown as followings:
Traceback (most recent call last):
File "latent-diffusion/main.py", line 720, in
trainer.fit(model, data)
File "/home/edsr/anaconda3/envs/ldm/lib/python3.8/site-packages/pytorch_lightning/trainer/trainer.py", line 770, in fit
self._call_and_handle_interrupt(
File "/home/edsr/anaconda3/envs/ldm/lib/python3.8/site-packages/pytorch_lightning/trainer/trainer.py", line 721, in _call_and_handle_interrupt
return self.strategy.launcher.launch(trainer_fn, *args, trainer=self, **kwargs)
File "/home/edsr/anaconda3/envs/ldm/lib/python3.8/site-packages/pytorch_lightning/strategies/launchers/subprocess_script.py", line 93, in launch
return function(*args, **kwargs)
File "/home/edsr/anaconda3/envs/ldm/lib/python3.8/site-packages/pytorch_lightning/trainer/trainer.py", line 811, in _fit_impl
results = self._run(model, ckpt_path=self.ckpt_path)
File "/home/edsr/anaconda3/envs/ldm/lib/python3.8/site-packages/pytorch_lightning/trainer/trainer.py", line 1217, in _run
self.strategy.setup(self)
File "/home/edsr/anaconda3/envs/ldm/lib/python3.8/site-packages/pytorch_lightning/strategies/ddp.py", line 179, in setup
self.setup_optimizers(trainer)
File "/home/edsr/anaconda3/envs/ldm/lib/python3.8/site-packages/pytorch_lightning/strategies/strategy.py", line 128, in setup_optimizers
self.optimizers, self.lr_scheduler_configs, self.optimizer_frequencies = _init_optimizers_and_lr_schedulers(
File "/home/edsr/anaconda3/envs/ldm/lib/python3.8/site-packages/pytorch_lightning/core/optimizer.py", line 195, in _init_optimizers_and_lr_schedulers
_validate_scheduler_api(lr_scheduler_configs, model)
File "/home/edsr/anaconda3/envs/ldm/lib/python3.8/site-packages/pytorch_lightning/core/optimizer.py", line 350, in _validate_scheduler_api
raise MisconfigurationException(
pytorch_lightning.utilities.exceptions.MisconfigurationException: The provided lr scheduler
LambdaLR
doesn't follow PyTorch's LRScheduler API. You should override theLightningModule.lr_scheduler_step
hook with your own logic if you are using a custom LR scheduler.Please can anyone help me with it?
It will be so appreciated
The text was updated successfully, but these errors were encountered: