clearml.py 5.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139
  1. # Ultralytics YOLO 🚀, AGPL-3.0 license
  2. import re
  3. import matplotlib.image as mpimg
  4. import matplotlib.pyplot as plt
  5. from ultralytics.utils import LOGGER, SETTINGS, TESTS_RUNNING
  6. from ultralytics.utils.torch_utils import model_info_for_loggers
  7. try:
  8. import clearml
  9. from clearml import Task
  10. from clearml.binding.frameworks.pytorch_bind import PatchPyTorchModelIO
  11. from clearml.binding.matplotlib_bind import PatchedMatplotlib
  12. assert hasattr(clearml, '__version__') # verify package is not directory
  13. assert not TESTS_RUNNING # do not log pytest
  14. assert SETTINGS['clearml'] is True # verify integration is enabled
  15. except (ImportError, AssertionError):
  16. clearml = None
  17. def _log_debug_samples(files, title='Debug Samples') -> None:
  18. """
  19. Log files (images) as debug samples in the ClearML task.
  20. Args:
  21. files (list): A list of file paths in PosixPath format.
  22. title (str): A title that groups together images with the same values.
  23. """
  24. if task := Task.current_task():
  25. for f in files:
  26. if f.exists():
  27. it = re.search(r'_batch(\d+)', f.name)
  28. iteration = int(it.groups()[0]) if it else 0
  29. task.get_logger().report_image(title=title,
  30. series=f.name.replace(it.group(), ''),
  31. local_path=str(f),
  32. iteration=iteration)
  33. def _log_plot(title, plot_path) -> None:
  34. """
  35. Log an image as a plot in the plot section of ClearML.
  36. Args:
  37. title (str): The title of the plot.
  38. plot_path (str): The path to the saved image file.
  39. """
  40. img = mpimg.imread(plot_path)
  41. fig = plt.figure()
  42. ax = fig.add_axes([0, 0, 1, 1], frameon=False, aspect='auto', xticks=[], yticks=[]) # no ticks
  43. ax.imshow(img)
  44. Task.current_task().get_logger().report_matplotlib_figure(title=title,
  45. series='',
  46. figure=fig,
  47. report_interactive=False)
  48. def on_pretrain_routine_start(trainer):
  49. """Runs at start of pretraining routine; initializes and connects/ logs task to ClearML."""
  50. try:
  51. if task := Task.current_task():
  52. # Make sure the automatic pytorch and matplotlib bindings are disabled!
  53. # We are logging these plots and model files manually in the integration
  54. PatchPyTorchModelIO.update_current_task(None)
  55. PatchedMatplotlib.update_current_task(None)
  56. else:
  57. task = Task.init(project_name=trainer.args.project or 'YOLOv8',
  58. task_name=trainer.args.name,
  59. tags=['YOLOv8'],
  60. output_uri=True,
  61. reuse_last_task_id=False,
  62. auto_connect_frameworks={
  63. 'pytorch': False,
  64. 'matplotlib': False})
  65. LOGGER.warning('ClearML Initialized a new task. If you want to run remotely, '
  66. 'please add clearml-init and connect your arguments before initializing YOLO.')
  67. task.connect(vars(trainer.args), name='General')
  68. except Exception as e:
  69. LOGGER.warning(f'WARNING ⚠️ ClearML installed but not initialized correctly, not logging this run. {e}')
  70. def on_train_epoch_end(trainer):
  71. """Logs debug samples for the first epoch of YOLO training and report current training progress."""
  72. if task := Task.current_task():
  73. # Log debug samples
  74. if trainer.epoch == 1:
  75. _log_debug_samples(sorted(trainer.save_dir.glob('train_batch*.jpg')), 'Mosaic')
  76. # Report the current training progress
  77. for k, v in trainer.validator.metrics.results_dict.items():
  78. task.get_logger().report_scalar('train', k, v, iteration=trainer.epoch)
  79. def on_fit_epoch_end(trainer):
  80. """Reports model information to logger at the end of an epoch."""
  81. if task := Task.current_task():
  82. # You should have access to the validation bboxes under jdict
  83. task.get_logger().report_scalar(title='Epoch Time',
  84. series='Epoch Time',
  85. value=trainer.epoch_time,
  86. iteration=trainer.epoch)
  87. if trainer.epoch == 0:
  88. for k, v in model_info_for_loggers(trainer).items():
  89. task.get_logger().report_single_value(k, v)
  90. def on_val_end(validator):
  91. """Logs validation results including labels and predictions."""
  92. if Task.current_task():
  93. # Log val_labels and val_pred
  94. _log_debug_samples(sorted(validator.save_dir.glob('val*.jpg')), 'Validation')
  95. def on_train_end(trainer):
  96. """Logs final model and its name on training completion."""
  97. if task := Task.current_task():
  98. # Log final results, CM matrix + PR plots
  99. files = [
  100. 'results.png', 'confusion_matrix.png', 'confusion_matrix_normalized.png',
  101. *(f'{x}_curve.png' for x in ('F1', 'PR', 'P', 'R'))]
  102. files = [(trainer.save_dir / f) for f in files if (trainer.save_dir / f).exists()] # filter
  103. for f in files:
  104. _log_plot(title=f.stem, plot_path=f)
  105. # Report final metrics
  106. for k, v in trainer.validator.metrics.results_dict.items():
  107. task.get_logger().report_single_value(k, v)
  108. # Log the final model
  109. task.update_output_model(model_path=str(trainer.best), model_name=trainer.args.name, auto_delete_file=False)
  110. callbacks = {
  111. 'on_pretrain_routine_start': on_pretrain_routine_start,
  112. 'on_train_epoch_end': on_train_epoch_end,
  113. 'on_fit_epoch_end': on_fit_epoch_end,
  114. 'on_val_end': on_val_end,
  115. 'on_train_end': on_train_end} if clearml else {}