hpo.py 5.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384
  1. from clearml import Task
  2. # Connecting ClearML with the current process,
  3. # from here on everything is logged automatically
  4. from clearml.automation import HyperParameterOptimizer, UniformParameterRange
  5. from clearml.automation.optuna import OptimizerOptuna
  6. task = Task.init(project_name='Hyper-Parameter Optimization',
  7. task_name='YOLOv5',
  8. task_type=Task.TaskTypes.optimizer,
  9. reuse_last_task_id=False)
  10. # Example use case:
  11. optimizer = HyperParameterOptimizer(
  12. # This is the experiment we want to optimize
  13. base_task_id='<your_template_task_id>',
  14. # here we define the hyper-parameters to optimize
  15. # Notice: The parameter name should exactly match what you see in the UI: <section_name>/<parameter>
  16. # For Example, here we see in the base experiment a section Named: "General"
  17. # under it a parameter named "batch_size", this becomes "General/batch_size"
  18. # If you have `argparse` for example, then arguments will appear under the "Args" section,
  19. # and you should instead pass "Args/batch_size"
  20. hyper_parameters=[
  21. UniformParameterRange('Hyperparameters/lr0', min_value=1e-5, max_value=1e-1),
  22. UniformParameterRange('Hyperparameters/lrf', min_value=0.01, max_value=1.0),
  23. UniformParameterRange('Hyperparameters/momentum', min_value=0.6, max_value=0.98),
  24. UniformParameterRange('Hyperparameters/weight_decay', min_value=0.0, max_value=0.001),
  25. UniformParameterRange('Hyperparameters/warmup_epochs', min_value=0.0, max_value=5.0),
  26. UniformParameterRange('Hyperparameters/warmup_momentum', min_value=0.0, max_value=0.95),
  27. UniformParameterRange('Hyperparameters/warmup_bias_lr', min_value=0.0, max_value=0.2),
  28. UniformParameterRange('Hyperparameters/box', min_value=0.02, max_value=0.2),
  29. UniformParameterRange('Hyperparameters/cls', min_value=0.2, max_value=4.0),
  30. UniformParameterRange('Hyperparameters/cls_pw', min_value=0.5, max_value=2.0),
  31. UniformParameterRange('Hyperparameters/obj', min_value=0.2, max_value=4.0),
  32. UniformParameterRange('Hyperparameters/obj_pw', min_value=0.5, max_value=2.0),
  33. UniformParameterRange('Hyperparameters/iou_t', min_value=0.1, max_value=0.7),
  34. UniformParameterRange('Hyperparameters/anchor_t', min_value=2.0, max_value=8.0),
  35. UniformParameterRange('Hyperparameters/fl_gamma', min_value=0.0, max_value=4.0),
  36. UniformParameterRange('Hyperparameters/hsv_h', min_value=0.0, max_value=0.1),
  37. UniformParameterRange('Hyperparameters/hsv_s', min_value=0.0, max_value=0.9),
  38. UniformParameterRange('Hyperparameters/hsv_v', min_value=0.0, max_value=0.9),
  39. UniformParameterRange('Hyperparameters/degrees', min_value=0.0, max_value=45.0),
  40. UniformParameterRange('Hyperparameters/translate', min_value=0.0, max_value=0.9),
  41. UniformParameterRange('Hyperparameters/scale', min_value=0.0, max_value=0.9),
  42. UniformParameterRange('Hyperparameters/shear', min_value=0.0, max_value=10.0),
  43. UniformParameterRange('Hyperparameters/perspective', min_value=0.0, max_value=0.001),
  44. UniformParameterRange('Hyperparameters/flipud', min_value=0.0, max_value=1.0),
  45. UniformParameterRange('Hyperparameters/fliplr', min_value=0.0, max_value=1.0),
  46. UniformParameterRange('Hyperparameters/mosaic', min_value=0.0, max_value=1.0),
  47. UniformParameterRange('Hyperparameters/mixup', min_value=0.0, max_value=1.0),
  48. UniformParameterRange('Hyperparameters/copy_paste', min_value=0.0, max_value=1.0)],
  49. # this is the objective metric we want to maximize/minimize
  50. objective_metric_title='metrics',
  51. objective_metric_series='mAP_0.5',
  52. # now we decide if we want to maximize it or minimize it (accuracy we maximize)
  53. objective_metric_sign='max',
  54. # let us limit the number of concurrent experiments,
  55. # this in turn will make sure we do dont bombard the scheduler with experiments.
  56. # if we have an auto-scaler connected, this, by proxy, will limit the number of machine
  57. max_number_of_concurrent_tasks=1,
  58. # this is the optimizer class (actually doing the optimization)
  59. # Currently, we can choose from GridSearch, RandomSearch or OptimizerBOHB (Bayesian optimization Hyper-Band)
  60. optimizer_class=OptimizerOptuna,
  61. # If specified only the top K performing Tasks will be kept, the others will be automatically archived
  62. save_top_k_tasks_only=5, # 5,
  63. compute_time_limit=None,
  64. total_max_jobs=20,
  65. min_iteration_per_job=None,
  66. max_iteration_per_job=None,
  67. )
  68. # report every 10 seconds, this is way too often, but we are testing here
  69. optimizer.set_report_period(10 / 60)
  70. # You can also use the line below instead to run all the optimizer tasks locally, without using queues or agent
  71. # an_optimizer.start_locally(job_complete_callback=job_complete_callback)
  72. # set the time limit for the optimization process (2 hours)
  73. optimizer.set_time_limit(in_minutes=120.0)
  74. # Start the optimization process in the local environment
  75. optimizer.start_locally()
  76. # wait until process is done (notice we are controlling the optimization process in the background)
  77. optimizer.wait()
  78. # make sure background optimization stopped
  79. optimizer.stop()
  80. print('We are done, good bye')