Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ValueError: optimizer got an empty parameter list #96

Open
yihahn opened this issue Nov 10, 2022 · 2 comments
Open

ValueError: optimizer got an empty parameter list #96

yihahn opened this issue Nov 10, 2022 · 2 comments

Comments

@yihahn
Copy link

yihahn commented Nov 10, 2022

param_grid = {'k' : [3, 4, 6],
              'distribution' : ['LogNormal', 'Weibull'],
              'learning_rate' : [1e-4, 1e-3],
              'batch_size': [64, 128],
              'hidden': [50, 100],
              'layers': [3, 2, 1],
              'typ': ['LSTM', 'GRU', 'RNN'],
              'optim': ['Adam', 'SGD'],
             }
params = ParameterGrid(param_grid)

models = []
for param in params:
    model = DeepRecurrentSurvivalMachines(k = param['k'],
                                          distribution = param['distribution'],
                                          hidden = param['hidden'], 
                                          typ = param['typ'],
                                          layers = param['layers'])
    # The fit method is called to train the model
    model.fit(x_train, t_train, e_train, iters = 1, learning_rate=param['learning_rate'], 
             batch_size=param['batch_size'], optimizer=param['optim'])
    models.append([[model.compute_nll(x_valid, t_valid, e_valid), model]])

best_model = min(models)
model = best_model[0][1]

As soon as I ran above script, I got below error. what should i do to solve this problem?

`---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
in
8 # The fit method is called to train the model
9 model.fit(x_train, t_train, e_train, iters = 1, learning_rate=param['learning_rate'],
---> 10 batch_size=param['batch_size'], optimizer=param['optim'])
11 models.append([[model.compute_nll(x_valid, t_valid, e_valid), model]])
12

~/data/nas125/hepa/codes/auton_survival/models/dsm/init.py in fit(self, x, t, e, vsize, val_data, iters, learning_rate, batch_size, elbo, optimizer)
265 elbo=elbo,
266 bs=batch_size,
--> 267 random_seed=self.random_seed)
268
269 self.torch_model = model.eval()

~/data/nas125/hepa/codes/auton_survival/models/dsm/utilities.py in train_dsm(model, x_train, t_train, e_train, x_valid, t_valid, e_valid, n_iter, lr, elbo, bs, random_seed)
137 n_iter=10000,
138 lr=1e-2,
--> 139 thres=1e-4)
140
141 for r in range(model.risks):

~/data/nas125/hepa/codes/auton_survival/models/dsm/utilities.py in pretrain_dsm(model, t_train, e_train, t_valid, e_valid, n_iter, lr, thres)
61 premodel.double()
62
---> 63 optimizer = get_optimizer(premodel, lr)
64
65 oldcost = float('inf')

~/data/nas125/hepa/codes/auton_survival/models/dsm/utilities.py in get_optimizer(model, lr)
43
44 if model.optimizer == 'Adam':
---> 45 return torch.optim.Adam(model.parameters(), lr=lr)
46 elif model.optimizer == 'SGD':
47 return torch.optim.SGD(model.parameters(), lr=lr)

~/anaconda3/envs/ml/lib/python3.7/site-packages/torch/optim/adam.py in init(self, params, lr, betas, eps, weight_decay, amsgrad)
40 defaults = dict(lr=lr, betas=betas, eps=eps,
41 weight_decay=weight_decay, amsgrad=amsgrad)
---> 42 super(Adam, self).init(params, defaults)
43
44 def setstate(self, state):

~/anaconda3/envs/ml/lib/python3.7/site-packages/torch/optim/optimizer.py in init(self, params, defaults)
44 param_groups = list(params)
45 if len(param_groups) == 0:
---> 46 raise ValueError("optimizer got an empty parameter list")
47 if not isinstance(param_groups[0], dict):
48 param_groups = [{'params': param_groups}]

ValueError: optimizer got an empty parameter list`

@chiragnagpal
Copy link
Collaborator

Interesting. . Can you try removing the ,
'optim' hyperparam from the grid and rerunning it.. it should default to using Adam

@yihahn
Copy link
Author

yihahn commented Nov 11, 2022

Thank you for rapid reply. I removed the 'optim' and 'batch_size' hyperparameters and ran the code again, but I still got the same error. Are there any other ways to fix this? Oh~! by the way The torch and torchvision versions below are installed.

pip list | grep torch
torch                         1.4.0
torchvision                   0.5.0
param_grid = {'k' : [3, 4, 6],
              'distribution' : ['LogNormal', 'Weibull'],
              'learning_rate' : [1e-4, 1e-3],
              'hidden': [50, 100],
              'layers': [3, 2, 1],
              'typ': ['LSTM', 'GRU', 'RNN']
             }
params = ParameterGrid(param_grid)

models = []
for param in params:
    model = DeepRecurrentSurvivalMachines(k = param['k'],
                                          distribution = param['distribution'],
                                          hidden = param['hidden'], 
                                          typ = param['typ'],
                                          layers = param['layers'])
    # The fit method is called to train the model
    model.fit(x_train, t_train, e_train, iters = 1, learning_rate=param['learning_rate'])
    models.append([[model.compute_nll(x_valid, t_valid, e_valid), model]])

best_model = min(models)
model = best_model[0][1]
---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-17-4e3a1c40b7ae> in <module>
      7                                           layers = param['layers'])
      8     # The fit method is called to train the model
----> 9     model.fit(x_train, t_train, e_train, iters = 1, learning_rate=param['learning_rate'])
     10     models.append([[model.compute_nll(x_valid, t_valid, e_valid), model]])
     11 

~/data/nas125/hepa/codes/auton_survival/models/dsm/__init__.py in fit(self, x, t, e, vsize, val_data, iters, learning_rate, batch_size, elbo, optimizer)
    265                          elbo=elbo,
    266                          bs=batch_size,
--> 267                          random_seed=self.random_seed)
    268 
    269     self.torch_model = model.eval()

~/data/nas125/hepa/codes/auton_survival/models/dsm/utilities.py in train_dsm(model, x_train, t_train, e_train, x_valid, t_valid, e_valid, n_iter, lr, elbo, bs, random_seed)
    137                           n_iter=10000,
    138                           lr=1e-2,
--> 139                           thres=1e-4)
    140 
    141   for r in range(model.risks):

~/data/nas125/hepa/codes/auton_survival/models/dsm/utilities.py in pretrain_dsm(model, t_train, e_train, t_valid, e_valid, n_iter, lr, thres)
     61   premodel.double()
     62 
---> 63   optimizer = get_optimizer(premodel, lr)
     64 
     65   oldcost = float('inf')

~/data/nas125/hepa/codes/auton_survival/models/dsm/utilities.py in get_optimizer(model, lr)
     43 
     44   if model.optimizer == 'Adam':
---> 45     return torch.optim.Adam(model.parameters(), lr=lr)
     46   elif model.optimizer == 'SGD':
     47     return torch.optim.SGD(model.parameters(), lr=lr)

~/anaconda3/envs/ml/lib/python3.7/site-packages/torch/optim/adam.py in __init__(self, params, lr, betas, eps, weight_decay, amsgrad)
     40         defaults = dict(lr=lr, betas=betas, eps=eps,
     41                         weight_decay=weight_decay, amsgrad=amsgrad)
---> 42         super(Adam, self).__init__(params, defaults)
     43 
     44     def __setstate__(self, state):

~/anaconda3/envs/ml/lib/python3.7/site-packages/torch/optim/optimizer.py in __init__(self, params, defaults)
     44         param_groups = list(params)
     45         if len(param_groups) == 0:
---> 46             raise ValueError("optimizer got an empty parameter list")
     47         if not isinstance(param_groups[0], dict):
     48             param_groups = [{'params': param_groups}]

ValueError: optimizer got an empty parameter list

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

2 participants