- 3.0.2 optimal control module.
NlpSolverSettings.h
Go to the documentation of this file.
1 /**********************************************************************************************************************
2 This file is part of the Control Toolbox (https://github.com/ethz-adrl/control-toolbox), copyright by ETH Zurich
3 Licensed under the BSD-2 license (see LICENSE file in main directory)
4 **********************************************************************************************************************/
5 
6 #pragma once
7 
8 #include <iostream>
9 #include <boost/property_tree/ptree.hpp>
10 #include <boost/property_tree/info_parser.hpp>
11 
12 namespace ct {
13 namespace optcon {
14 
15 
19 enum class NlpSolverType : uint8_t
20 {
21  IPOPT = 0,
22  SNOPT = 1,
24 };
25 
33 {
34 public:
35  EIGEN_MAKE_ALIGNED_OPERATOR_NEW
40  : scale_option_param_(0),
41  derivative_option_param_(1),
42  verify_level_param_(-1),
43  major_iteration_limit_param_(500),
44  minor_iteration_limit_param_(500),
45  iteration_limit_param_(5000),
46  major_optimality_tolerance_param_(1e-6),
47  major_feasibility_tolerance_param_(1e-6),
48  minor_feasibility_tolerance_param_(1e-6),
49  print_file_param_(0),
50  minor_print_level_param_(0),
51  major_print_level_param_(0),
52  new_basis_file_param_(0),
53  old_basis_file_param_(0),
54  backup_basis_file_param_(0),
55  line_search_tolerance_param_(0.9),
56  crash_option_(3),
57  hessian_updates_(5)
58  {
59  }
60 
79 
83  void print()
84  {
85  std::cout << "SNOPT settings:" << std::endl;
86  std::cout << "Max Major Iterations: " << major_iteration_limit_param_ << std::endl;
87  std::cout << "Max Minor Iterations: " << minor_iteration_limit_param_ << std::endl;
88  }
89 
95  bool parametersOk() const { return true; }
103  void load(const std::string& filename, bool verbose = true, const std::string& ns = "dms.solver.snopt")
104  {
105  boost::property_tree::ptree pt;
106  boost::property_tree::read_info(filename, pt);
107 
108  minor_iteration_limit_param_ = pt.get<unsigned int>(ns + ".MaxMinorIterations");
109  major_iteration_limit_param_ = pt.get<unsigned int>(ns + ".MaxMajorIterations");
110  minor_print_level_param_ = pt.get<unsigned int>(ns + ".MinorPrintLevelVerbosity");
111  major_print_level_param_ = pt.get<unsigned int>(ns + ".MajorPrintLevelVerbosity");
112  major_optimality_tolerance_param_ = pt.get<double>(ns + ".OptimalityTolerance");
113  }
114 };
115 
123 {
124 public:
129  : tol_(1e-8),
130  constr_viol_tol_(1e-4),
131  max_iter_(200),
132  restoTol_(1e-7),
133  acceptableTol_(1e-6),
134  restoAcceptableTol_(1e-7),
135  linear_scaling_on_demand_("yes"),
136  hessian_approximation_("limited-memory"),
137  nlp_scaling_method_("gradient-based"),
138  printLevel_(5),
139  print_user_options_("no"),
140  print_frequency_iter_(1),
141  printInfoString_("no"),
142  derivativeTest_("none"),
143  derivativeTestTol_(1e-4),
144  derivativeTestPerturbation_(1e-8),
145  point_perturbation_radius_(10),
146  checkDerivativesForNaninf_("no"),
147  derivativeTestPrintAll_("no"),
148  linearSystemScaling_("none"),
149  linear_solver_("mumps"),
150  jacobianApproximation_("finite-difference-values")
151  {
152  }
153 
154  double tol_;
157  double restoTol_;
162  std::string nlp_scaling_method_;
164  std::string print_user_options_;
166  std::string printInfoString_;
167  std::string derivativeTest_;
173  std::string linearSystemScaling_;
174  std::string linear_solver_;
176 
180  void print()
181  {
182  std::cout << "IPOPT SETTINGS: " << std::endl;
183  std::cout << "Using " << hessian_approximation_ << " hessian approximation" << std::endl;
184  std::cout << "MaxIterations: " << max_iter_ << std::endl;
185  }
186 
192  bool parametersOk() const { return true; }
200  void load(const std::string& filename, bool verbose = true, const std::string& ns = "dms.nlp.ipopt")
201  {
202  boost::property_tree::ptree pt;
203  boost::property_tree::read_info(filename, pt); //
204  max_iter_ = pt.get<unsigned int>(ns + ".MaxIterations");
205  bool checkDerivatives = pt.get<bool>(ns + ".CheckDerivatives");
206  if (checkDerivatives)
207  derivativeTest_ = "first-order";
208  if (!checkDerivatives)
209  derivativeTest_ = "none";
210  bool exactHessian = pt.get<bool>(ns + ".ExactHessian");
211  if (exactHessian)
212  hessian_approximation_ = "exact";
213  if (!exactHessian)
214  hessian_approximation_ = "limited-memory";
215 
216  printLevel_ = pt.get<unsigned int>(ns + ".Verbosity");
217  tol_ = pt.get<double>(ns + ".OptimalityTolerance");
218 
219  setParameterIfExists(pt, jacobianApproximation_, "jacobianApproximation", ns);
220  setParameterIfExists(pt, restoAcceptableTol_, "restoAcceptableTol", ns);
221  setParameterIfExists(pt, restoAcceptableTol_, "restoAcceptableTol", ns);
222  setParameterIfExists(pt, constr_viol_tol_, "constr_viol_tol", ns);
223  setParameterIfExists(pt, restoTol_, "restoTol", ns);
224  setParameterIfExists(pt, acceptableTol_, "acceptableTol", ns);
225  setParameterIfExists(pt, restoAcceptableTol_, "restoAcceptableTol", ns);
226  setParameterIfExists(pt, linear_scaling_on_demand_, "linear_scaling_on_demand", ns);
227  setParameterIfExists(pt, nlp_scaling_method_, "nlp_scaling_method", ns);
228  setParameterIfExists(pt, print_user_options_, "print_user_options", ns);
229  setParameterIfExists(pt, print_frequency_iter_, "print_frequency_iter_", ns);
230  setParameterIfExists(pt, printInfoString_, "printInfoString", ns);
231  setParameterIfExists(pt, derivativeTestTol_, "derivativeTestTol", ns);
232  setParameterIfExists(pt, derivativeTestPerturbation_, "derivativeTestPerturbation", ns);
233  setParameterIfExists(pt, point_perturbation_radius_, "point_perturbation_radius", ns);
234  setParameterIfExists(pt, checkDerivativesForNaninf_, "checkDerivativesForNaninf", ns);
235  setParameterIfExists(pt, derivativeTestPrintAll_, "derivativeTestPrintAll", ns);
236  setParameterIfExists(pt, linear_solver_, "linear_solver", ns);
237  setParameterIfExists(pt, linearSystemScaling_, "linearSystemScaling", ns);
238  setParameterIfExists(pt, jacobianApproximation_, "jacobianApproximation", ns);
239  }
240 
241  template <typename TYPE>
242  static void setParameterIfExists(const boost::property_tree::ptree& pt,
243  TYPE& param,
244  const std::string& param_name,
245  const std::string& ns)
246  {
247  const std::string full_param_name = ns + "." + param_name;
248  boost::optional<const boost::property_tree::ptree&> child = pt.get_child_optional(full_param_name);
249  if (child)
250  {
251  param = child->get_value<TYPE>();
252  }
253  }
254 };
255 
262 {
263 public:
268  : solverType_(NlpSolverType::IPOPT), useGeneratedCostGradient_(false), useGeneratedConstraintJacobian_(false)
269  {
270  }
271 
277 
281  void print()
282  {
283  std::cout << "=============================================================" << std::endl;
284  std::cout << "\tNLP Solver Settings: " << std::endl;
285  std::cout << "=============================================================" << std::endl;
286 
287  std::cout << "Using nlp solver: " << solverToString[solverType_] << std::endl;
288  if (useGeneratedCostGradient_)
289  std::cout << "Using generated Cost Gradient" << std::endl;
290  else
291  std::cout << "Using analytical cost Gradient" << std::endl;
292  if (useGeneratedConstraintJacobian_)
293  std::cout << "Using generated Constraints Jacobian" << std::endl;
294  else
295  std::cout << "Using anlyitical Constraints Jacobian" << std::endl;
296 
297  if (solverType_ == NlpSolverType::IPOPT)
298  ipoptSettings_.print();
299  else if (solverType_ == NlpSolverType::SNOPT)
300  snoptSettings_.print();
301  }
302 
308  bool parametersOk() const
309  {
310  if (solverType_ == NlpSolverType::IPOPT)
311  return ipoptSettings_.parametersOk();
312  else if (solverType_ == NlpSolverType::SNOPT)
313  return snoptSettings_.parametersOk();
314  else
315  return false;
316  }
317 
325  void load(const std::string& filename, bool verbose = true, const std::string& ns = "solver")
326  {
327  boost::property_tree::ptree pt;
328  boost::property_tree::read_info(filename, pt);
329 
330  solverType_ = static_cast<NlpSolverType>(pt.get<unsigned int>(ns + ".SolverType"));
331  useGeneratedCostGradient_ = pt.get<bool>(ns + ".UseGeneratedCostGradient");
332  useGeneratedConstraintJacobian_ = pt.get<bool>(ns + ".UseGeneratedConstraintJacobian");
333 
334  if (solverType_ == NlpSolverType::IPOPT)
335  ipoptSettings_.load(filename, verbose, ns + ".ipopt");
336  else if (solverType_ == NlpSolverType::SNOPT)
337  snoptSettings_.load(filename, verbose, ns + ".snopt");
338 
339  if (verbose)
340  {
341  std::cout << "Loaded NLP Solver settings from " << filename << ": " << std::endl;
342  print();
343  }
344  }
345 
346 private:
347  std::map<NlpSolverType, std::string> solverToString = {
348  {NlpSolverType::IPOPT, "IPOPT"}, {NlpSolverType::SNOPT, "SNOPT"}};
349 };
350 
351 
352 } // namespace optcon
353 } // namespace ct
std::string checkDerivativesForNaninf_
Definition: NlpSolverSettings.h:171
int new_basis_file_param_
Definition: NlpSolverSettings.h:73
bool parametersOk() const
Checks whether to settings are filled with meaningful values.
Definition: NlpSolverSettings.h:95
void print()
Prints out information about the settings.
Definition: NlpSolverSettings.h:180
int hessian_updates_
Definition: NlpSolverSettings.h:78
std::string linearSystemScaling_
Definition: NlpSolverSettings.h:173
double major_feasibility_tolerance_param_
Definition: NlpSolverSettings.h:68
void load(const std::string &filename, bool verbose=true, const std::string &ns="dms.solver.snopt")
Loads the settings from a .info file.
Definition: NlpSolverSettings.h:103
std::string hessian_approximation_
Definition: NlpSolverSettings.h:161
int derivative_option_param_
Definition: NlpSolverSettings.h:62
Contains the NLP solver settings.
Definition: NlpSolverSettings.h:261
double derivativeTestTol_
Definition: NlpSolverSettings.h:168
std::string printInfoString_
Definition: NlpSolverSettings.h:166
std::string derivativeTest_
Definition: NlpSolverSettings.h:167
std::string nlp_scaling_method_
Definition: NlpSolverSettings.h:162
bool parametersOk() const
Checks whether to settings are filled with meaningful values.
Definition: NlpSolverSettings.h:308
NlpSolverType
The available types of NLP solvers.
Definition: NlpSolverSettings.h:19
bool parametersOk() const
Checks whether to settings are filled with meaningful values.
Definition: NlpSolverSettings.h:192
int iteration_limit_param_
Definition: NlpSolverSettings.h:66
EIGEN_MAKE_ALIGNED_OPERATOR_NEW SnoptSettings()
Default constructor, sets the parameters to default values.
Definition: NlpSolverSettings.h:39
double point_perturbation_radius_
Definition: NlpSolverSettings.h:170
std::string linear_scaling_on_demand_
Definition: NlpSolverSettings.h:160
int backup_basis_file_param_
Definition: NlpSolverSettings.h:75
double restoAcceptableTol_
Definition: NlpSolverSettings.h:159
SnoptSolver settings. Details about the parameters can be found in the SNOPT documentation.
Definition: NlpSolverSettings.h:32
std::string derivativeTestPrintAll_
Definition: NlpSolverSettings.h:172
int print_file_param_
Definition: NlpSolverSettings.h:70
double constr_viol_tol_
Definition: NlpSolverSettings.h:155
double derivativeTestPerturbation_
Definition: NlpSolverSettings.h:169
void load(const std::string &filename, bool verbose=true, const std::string &ns="dms.nlp.ipopt")
Loads the settings from a .info file.
Definition: NlpSolverSettings.h:200
void print()
Prints out settings.
Definition: NlpSolverSettings.h:281
std::string linear_solver_
Definition: NlpSolverSettings.h:174
int old_basis_file_param_
Definition: NlpSolverSettings.h:74
double line_search_tolerance_param_
Definition: NlpSolverSettings.h:76
int max_iter_
Definition: NlpSolverSettings.h:156
IpoptSettings()
Default constructor, sets the parameters to default values.
Definition: NlpSolverSettings.h:128
double minor_feasibility_tolerance_param_
Definition: NlpSolverSettings.h:69
int print_frequency_iter_
Definition: NlpSolverSettings.h:165
int scale_option_param_
Definition: NlpSolverSettings.h:61
int minor_print_level_param_
Definition: NlpSolverSettings.h:71
IPOPT settings. Details about the parameters can be found in the IPOPT documentation.
Definition: NlpSolverSettings.h:122
NlpSolverType solverType_
Definition: NlpSolverSettings.h:272
int crash_option_
Definition: NlpSolverSettings.h:77
std::string print_user_options_
Definition: NlpSolverSettings.h:164
void load(const std::string &filename, bool verbose=true, const std::string &ns="solver")
Loads the settings from a .info file.
Definition: NlpSolverSettings.h:325
double tol_
Definition: NlpSolverSettings.h:154
double restoTol_
Definition: NlpSolverSettings.h:157
bool useGeneratedCostGradient_
Definition: NlpSolverSettings.h:273
std::string jacobianApproximation_
Definition: NlpSolverSettings.h:175
bool useGeneratedConstraintJacobian_
Definition: NlpSolverSettings.h:274
int printLevel_
Definition: NlpSolverSettings.h:163
void print()
Prints out information about the settings.
Definition: NlpSolverSettings.h:83
SnoptSettings snoptSettings_
Definition: NlpSolverSettings.h:275
IpoptSettings ipoptSettings_
Definition: NlpSolverSettings.h:276
NlpSolverSettings()
Default constructor, set default settings.
Definition: NlpSolverSettings.h:267
int verify_level_param_
Definition: NlpSolverSettings.h:63
const bool verbose
Definition: ConstraintComparison.h:18
double acceptableTol_
Definition: NlpSolverSettings.h:158
int minor_iteration_limit_param_
Definition: NlpSolverSettings.h:65
int major_print_level_param_
Definition: NlpSolverSettings.h:72
static void setParameterIfExists(const boost::property_tree::ptree &pt, TYPE &param, const std::string &param_name, const std::string &ns)
Definition: NlpSolverSettings.h:242
double major_optimality_tolerance_param_
Definition: NlpSolverSettings.h:67
int major_iteration_limit_param_
Definition: NlpSolverSettings.h:64