mirror of
https://github.com/ultralytics/yolov5.git
synced 2025-06-03 14:49:29 +08:00
* Add license line to .github/ISSUE_TEMPLATE/bug-report.yml * Add license line to .github/ISSUE_TEMPLATE/config.yml * Add license line to .github/ISSUE_TEMPLATE/feature-request.yml * Add license line to .github/ISSUE_TEMPLATE/question.yml * Add license line to .github/dependabot.yml * Add license line to .github/workflows/ci-testing.yml * Add license line to .github/workflows/cla.yml * Add license line to .github/workflows/codeql-analysis.yml * Add license line to .github/workflows/docker.yml * Add license line to .github/workflows/format.yml * Add license line to .github/workflows/greetings.yml * Add license line to .github/workflows/links.yml * Add license line to .github/workflows/merge-main-into-prs.yml * Add license line to .github/workflows/stale.yml * Add license line to benchmarks.py * Add license line to classify/predict.py * Add license line to classify/train.py * Add license line to classify/val.py * Add license line to data/Argoverse.yaml * Add license line to data/GlobalWheat2020.yaml * Add license line to data/ImageNet.yaml * Add license line to data/ImageNet10.yaml * Add license line to data/ImageNet100.yaml * Add license line to data/ImageNet1000.yaml * Add license line to data/Objects365.yaml * Add license line to data/SKU-110K.yaml * Add license line to data/VOC.yaml * Add license line to data/VisDrone.yaml * Add license line to data/coco.yaml * Add license line to data/coco128-seg.yaml * Add license line to data/coco128.yaml * Add license line to data/hyps/hyp.Objects365.yaml * Add license line to data/hyps/hyp.VOC.yaml * Add license line to data/hyps/hyp.no-augmentation.yaml * Add license line to data/hyps/hyp.scratch-high.yaml * Add license line to data/hyps/hyp.scratch-low.yaml * Add license line to data/hyps/hyp.scratch-med.yaml * Add license line to data/xView.yaml * Add license line to detect.py * Add license line to export.py * Add license line to hubconf.py * Add license line to models/common.py * Add license line to models/experimental.py * Add license line to models/hub/anchors.yaml * Add license line to models/hub/yolov3-spp.yaml * Add license line to models/hub/yolov3-tiny.yaml * Add license line to models/hub/yolov3.yaml * Add license line to models/hub/yolov5-bifpn.yaml * Add license line to models/hub/yolov5-fpn.yaml * Add license line to models/hub/yolov5-p2.yaml * Add license line to models/hub/yolov5-p34.yaml * Add license line to models/hub/yolov5-p6.yaml * Add license line to models/hub/yolov5-p7.yaml * Add license line to models/hub/yolov5-panet.yaml * Add license line to models/hub/yolov5l6.yaml * Add license line to models/hub/yolov5m6.yaml * Add license line to models/hub/yolov5n6.yaml * Add license line to models/hub/yolov5s-LeakyReLU.yaml * Add license line to models/hub/yolov5s-ghost.yaml * Add license line to models/hub/yolov5s-transformer.yaml * Add license line to models/hub/yolov5s6.yaml * Add license line to models/hub/yolov5x6.yaml * Add license line to models/segment/yolov5l-seg.yaml * Add license line to models/segment/yolov5m-seg.yaml * Add license line to models/segment/yolov5n-seg.yaml * Add license line to models/segment/yolov5s-seg.yaml * Add license line to models/segment/yolov5x-seg.yaml * Add license line to models/tf.py * Add license line to models/yolo.py * Add license line to models/yolov5l.yaml * Add license line to models/yolov5m.yaml * Add license line to models/yolov5n.yaml * Add license line to models/yolov5s.yaml * Add license line to models/yolov5x.yaml * Add license line to pyproject.toml * Add license line to segment/predict.py * Add license line to segment/train.py * Add license line to segment/val.py * Add license line to train.py * Add license line to utils/__init__.py * Add license line to utils/activations.py * Add license line to utils/augmentations.py * Add license line to utils/autoanchor.py * Add license line to utils/autobatch.py * Add license line to utils/aws/resume.py * Add license line to utils/callbacks.py * Add license line to utils/dataloaders.py * Add license line to utils/downloads.py * Add license line to utils/flask_rest_api/example_request.py * Add license line to utils/flask_rest_api/restapi.py * Add license line to utils/general.py * Add license line to utils/google_app_engine/app.yaml * Add license line to utils/loggers/__init__.py * Add license line to utils/loggers/clearml/clearml_utils.py * Add license line to utils/loggers/clearml/hpo.py * Add license line to utils/loggers/comet/__init__.py * Add license line to utils/loggers/comet/comet_utils.py * Add license line to utils/loggers/comet/hpo.py * Add license line to utils/loggers/wandb/wandb_utils.py * Add license line to utils/loss.py * Add license line to utils/metrics.py * Add license line to utils/plots.py * Add license line to utils/segment/augmentations.py * Add license line to utils/segment/dataloaders.py * Add license line to utils/segment/general.py * Add license line to utils/segment/loss.py * Add license line to utils/segment/metrics.py * Add license line to utils/segment/plots.py * Add license line to utils/torch_utils.py * Add license line to utils/triton.py * Add license line to val.py * Auto-format by https://ultralytics.com/actions * Update ImageNet1000.yaml Signed-off-by: Glenn Jocher <glenn.jocher@ultralytics.com> * Auto-format by https://ultralytics.com/actions --------- Signed-off-by: Glenn Jocher <glenn.jocher@ultralytics.com> Co-authored-by: Glenn Jocher <glenn.jocher@ultralytics.com>
91 lines
5.2 KiB
Python
91 lines
5.2 KiB
Python
# Ultralytics YOLOv5 🚀, AGPL-3.0 license
|
|
|
|
from clearml import Task
|
|
|
|
# Connecting ClearML with the current process,
|
|
# from here on everything is logged automatically
|
|
from clearml.automation import HyperParameterOptimizer, UniformParameterRange
|
|
from clearml.automation.optuna import OptimizerOptuna
|
|
|
|
task = Task.init(
|
|
project_name="Hyper-Parameter Optimization",
|
|
task_name="YOLOv5",
|
|
task_type=Task.TaskTypes.optimizer,
|
|
reuse_last_task_id=False,
|
|
)
|
|
|
|
# Example use case:
|
|
optimizer = HyperParameterOptimizer(
|
|
# This is the experiment we want to optimize
|
|
base_task_id="<your_template_task_id>",
|
|
# here we define the hyper-parameters to optimize
|
|
# Notice: The parameter name should exactly match what you see in the UI: <section_name>/<parameter>
|
|
# For Example, here we see in the base experiment a section Named: "General"
|
|
# under it a parameter named "batch_size", this becomes "General/batch_size"
|
|
# If you have `argparse` for example, then arguments will appear under the "Args" section,
|
|
# and you should instead pass "Args/batch_size"
|
|
hyper_parameters=[
|
|
UniformParameterRange("Hyperparameters/lr0", min_value=1e-5, max_value=1e-1),
|
|
UniformParameterRange("Hyperparameters/lrf", min_value=0.01, max_value=1.0),
|
|
UniformParameterRange("Hyperparameters/momentum", min_value=0.6, max_value=0.98),
|
|
UniformParameterRange("Hyperparameters/weight_decay", min_value=0.0, max_value=0.001),
|
|
UniformParameterRange("Hyperparameters/warmup_epochs", min_value=0.0, max_value=5.0),
|
|
UniformParameterRange("Hyperparameters/warmup_momentum", min_value=0.0, max_value=0.95),
|
|
UniformParameterRange("Hyperparameters/warmup_bias_lr", min_value=0.0, max_value=0.2),
|
|
UniformParameterRange("Hyperparameters/box", min_value=0.02, max_value=0.2),
|
|
UniformParameterRange("Hyperparameters/cls", min_value=0.2, max_value=4.0),
|
|
UniformParameterRange("Hyperparameters/cls_pw", min_value=0.5, max_value=2.0),
|
|
UniformParameterRange("Hyperparameters/obj", min_value=0.2, max_value=4.0),
|
|
UniformParameterRange("Hyperparameters/obj_pw", min_value=0.5, max_value=2.0),
|
|
UniformParameterRange("Hyperparameters/iou_t", min_value=0.1, max_value=0.7),
|
|
UniformParameterRange("Hyperparameters/anchor_t", min_value=2.0, max_value=8.0),
|
|
UniformParameterRange("Hyperparameters/fl_gamma", min_value=0.0, max_value=4.0),
|
|
UniformParameterRange("Hyperparameters/hsv_h", min_value=0.0, max_value=0.1),
|
|
UniformParameterRange("Hyperparameters/hsv_s", min_value=0.0, max_value=0.9),
|
|
UniformParameterRange("Hyperparameters/hsv_v", min_value=0.0, max_value=0.9),
|
|
UniformParameterRange("Hyperparameters/degrees", min_value=0.0, max_value=45.0),
|
|
UniformParameterRange("Hyperparameters/translate", min_value=0.0, max_value=0.9),
|
|
UniformParameterRange("Hyperparameters/scale", min_value=0.0, max_value=0.9),
|
|
UniformParameterRange("Hyperparameters/shear", min_value=0.0, max_value=10.0),
|
|
UniformParameterRange("Hyperparameters/perspective", min_value=0.0, max_value=0.001),
|
|
UniformParameterRange("Hyperparameters/flipud", min_value=0.0, max_value=1.0),
|
|
UniformParameterRange("Hyperparameters/fliplr", min_value=0.0, max_value=1.0),
|
|
UniformParameterRange("Hyperparameters/mosaic", min_value=0.0, max_value=1.0),
|
|
UniformParameterRange("Hyperparameters/mixup", min_value=0.0, max_value=1.0),
|
|
UniformParameterRange("Hyperparameters/copy_paste", min_value=0.0, max_value=1.0),
|
|
],
|
|
# this is the objective metric we want to maximize/minimize
|
|
objective_metric_title="metrics",
|
|
objective_metric_series="mAP_0.5",
|
|
# now we decide if we want to maximize it or minimize it (accuracy we maximize)
|
|
objective_metric_sign="max",
|
|
# let us limit the number of concurrent experiments,
|
|
# this in turn will make sure we don't bombard the scheduler with experiments.
|
|
# if we have an auto-scaler connected, this, by proxy, will limit the number of machine
|
|
max_number_of_concurrent_tasks=1,
|
|
# this is the optimizer class (actually doing the optimization)
|
|
# Currently, we can choose from GridSearch, RandomSearch or OptimizerBOHB (Bayesian optimization Hyper-Band)
|
|
optimizer_class=OptimizerOptuna,
|
|
# If specified only the top K performing Tasks will be kept, the others will be automatically archived
|
|
save_top_k_tasks_only=5, # 5,
|
|
compute_time_limit=None,
|
|
total_max_jobs=20,
|
|
min_iteration_per_job=None,
|
|
max_iteration_per_job=None,
|
|
)
|
|
|
|
# report every 10 seconds, this is way too often, but we are testing here
|
|
optimizer.set_report_period(10 / 60)
|
|
# You can also use the line below instead to run all the optimizer tasks locally, without using queues or agent
|
|
# an_optimizer.start_locally(job_complete_callback=job_complete_callback)
|
|
# set the time limit for the optimization process (2 hours)
|
|
optimizer.set_time_limit(in_minutes=120.0)
|
|
# Start the optimization process in the local environment
|
|
optimizer.start_locally()
|
|
# wait until process is done (notice we are controlling the optimization process in the background)
|
|
optimizer.wait()
|
|
# make sure background optimization stopped
|
|
optimizer.stop()
|
|
|
|
print("We are done, good bye")
|