From 008f6eccf849439d1db24db502a281a1821b8066 Mon Sep 17 00:00:00 2001 From: Pablo Gonzalez Date: Wed, 14 Jan 2026 23:36:45 -0500 Subject: [PATCH 1/9] Quick fix: Add user conf to yolo reference implementation --- .../yolo/yolo_loadgen.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/vision/classification_and_detection/yolo/yolo_loadgen.py b/vision/classification_and_detection/yolo/yolo_loadgen.py index 5a93ff16ec..43410b8d4a 100644 --- a/vision/classification_and_detection/yolo/yolo_loadgen.py +++ b/vision/classification_and_detection/yolo/yolo_loadgen.py @@ -110,11 +110,17 @@ def main(): default=None, help="Number of samples to run") parser.add_argument("--output", type=str, help="Directory for MLPerf logs") + parser.add_argument( + "--user_conf", + default="user.conf", + help="user config for user LoadGen settings such as target QPS", + ) # mode flags mode_group = parser.add_mutually_exclusive_group(required=True) mode_group.add_argument("--AccuracyOnly", action="store_true") mode_group.add_argument("--PerformanceOnly", action="store_true") + # scenario selection parser.add_argument( @@ -152,6 +158,13 @@ def flush_queries(): pass settings = lg.TestSettings() + # Load user configuration + user_conf = os.path.abspath(args.user_conf) + if not os.path.exists(user_conf): + print("{} not found".format(user_conf)) + sys.exit(1) + settings.FromConfig(user_conf, args.model_name, args.scenario) + # scenario configurations scenario_map = { "SingleStream": lg.TestScenario.SingleStream, From 4b25029219082b00dfc25d0f2cd4d68cbfd49f09 Mon Sep 17 00:00:00 2001 From: Pablo Gonzalez Date: Thu, 15 Jan 2026 16:28:59 -0500 Subject: [PATCH 2/9] Add model_name to yolo implementation --- vision/classification_and_detection/yolo/yolo_loadgen.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/vision/classification_and_detection/yolo/yolo_loadgen.py b/vision/classification_and_detection/yolo/yolo_loadgen.py index 43410b8d4a..e0dac3fbe9 100644 --- a/vision/classification_and_detection/yolo/yolo_loadgen.py +++ b/vision/classification_and_detection/yolo/yolo_loadgen.py @@ -115,6 +115,12 @@ def main(): default="user.conf", help="user config for user LoadGen settings such as target QPS", ) + parser.add_argument( + "--model-name", + type=str, + required=False, + default="yolo-v11" + ) # mode flags mode_group = parser.add_mutually_exclusive_group(required=True) From ef62852dcb3dbbe83ecd6c9e96443d116678d7da Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Thu, 15 Jan 2026 21:29:57 +0000 Subject: [PATCH 3/9] [Automated Commit] Format Codebase --- vision/classification_and_detection/yolo/yolo_loadgen.py | 1 - 1 file changed, 1 deletion(-) diff --git a/vision/classification_and_detection/yolo/yolo_loadgen.py b/vision/classification_and_detection/yolo/yolo_loadgen.py index e0dac3fbe9..8be526300c 100644 --- a/vision/classification_and_detection/yolo/yolo_loadgen.py +++ b/vision/classification_and_detection/yolo/yolo_loadgen.py @@ -126,7 +126,6 @@ def main(): mode_group = parser.add_mutually_exclusive_group(required=True) mode_group.add_argument("--AccuracyOnly", action="store_true") mode_group.add_argument("--PerformanceOnly", action="store_true") - # scenario selection parser.add_argument( From c4e2b0e98b93844e46f78e1a622ccf305697da47 Mon Sep 17 00:00:00 2001 From: Pablo Gonzalez Date: Fri, 16 Jan 2026 09:32:12 -0500 Subject: [PATCH 4/9] Fix default name --- vision/classification_and_detection/yolo/yolo_loadgen.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vision/classification_and_detection/yolo/yolo_loadgen.py b/vision/classification_and_detection/yolo/yolo_loadgen.py index 8be526300c..0499a3756f 100644 --- a/vision/classification_and_detection/yolo/yolo_loadgen.py +++ b/vision/classification_and_detection/yolo/yolo_loadgen.py @@ -119,7 +119,7 @@ def main(): "--model-name", type=str, required=False, - default="yolo-v11" + default="yolo" ) # mode flags From d34e8da330806625393cb1f61dcef03bfa590952 Mon Sep 17 00:00:00 2001 From: ANANDHU S <71482562+anandhu-eng@users.noreply.github.com> Date: Fri, 16 Jan 2026 20:04:03 +0530 Subject: [PATCH 5/9] Fix logging for YOLO benchmark (#2450) --- .../yolo/yolo_loadgen.py | 20 +++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/vision/classification_and_detection/yolo/yolo_loadgen.py b/vision/classification_and_detection/yolo/yolo_loadgen.py index 0499a3756f..55ed25081a 100644 --- a/vision/classification_and_detection/yolo/yolo_loadgen.py +++ b/vision/classification_and_detection/yolo/yolo_loadgen.py @@ -1,4 +1,8 @@ -""" +parser.add_argument( + "--enable-log-trace", + action="store_true", + help="Enable log tracing. This file can become quite large", + )""" YOLOv11 LoadGen MLPerf """ import argparse @@ -121,6 +125,11 @@ def main(): required=False, default="yolo" ) + parser.add_argument( + "--enable-log-trace", + action="store_true", + help="Enable log tracing. This file can become quite large", + ) # mode flags mode_group = parser.add_mutually_exclusive_group(required=True) @@ -197,9 +206,12 @@ def flush_queries(): pass # ... # configure logs + log_output_settings = lg.LogOutputSettings() + log_output_settings.outdir = log_path + log_output_settings.copy_summary_to_stdout = True log_settings = lg.LogSettings() - log_settings.log_output.outdir = log_path - log_settings.log_output.copy_summary_to_stdout = True + log_settings.log_output = log_output_settings + log_settings.enable_trace = args.enable_log_trace print(f"Starting MLPerf run") print(f"Scenario: {args.scenario}") @@ -207,7 +219,7 @@ def flush_queries(): pass print(f"Log directory: {log_path}") try: - lg.StartTestWithLogSettings(sut, qsl, settings, lg.LogSettings()) + lg.StartTestWithLogSettings(sut, qsl, settings, log_settings) print(f"MLPerf run complete - cleaning up") except Exception as e: print(f"An error occured during StartTest: {e}") From 97d502c84c1e574a6ca4f5a1d6bf5f877ffe1ad6 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 16 Jan 2026 14:34:45 +0000 Subject: [PATCH 6/9] [Automated Commit] Format Codebase --- .../yolo/yolo_loadgen.py | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/vision/classification_and_detection/yolo/yolo_loadgen.py b/vision/classification_and_detection/yolo/yolo_loadgen.py index 55ed25081a..5bfc2993a3 100644 --- a/vision/classification_and_detection/yolo/yolo_loadgen.py +++ b/vision/classification_and_detection/yolo/yolo_loadgen.py @@ -1,22 +1,22 @@ +from ultralytics import YOLO +import mlperf_loadgen as lg +import numpy as np +from pathlib import Path +from datetime import datetime +import time +import struct +import sys +import os +import json +import array +import argparse parser.add_argument( - "--enable-log-trace", - action="store_true", - help="Enable log tracing. This file can become quite large", - )""" + "--enable-log-trace", + action="store_true", + help="Enable log tracing. This file can become quite large", +)""" YOLOv11 LoadGen MLPerf """ -import argparse -import array -import json -import os -import sys -import struct -import time -from datetime import datetime -from pathlib import Path -import numpy as np -import mlperf_loadgen as lg -from ultralytics import YOLO # Standard YOLO (80 classes) to COCO (91 classes) mapping From 09537da212a780471f563fc63ff9d1f6edea4910 Mon Sep 17 00:00:00 2001 From: ANANDHU S <71482562+anandhu-eng@users.noreply.github.com> Date: Thu, 22 Jan 2026 11:13:01 +0530 Subject: [PATCH 7/9] Updates for YOLO (#2452) * Updates for YOLO * fix formatting * Remove duplicate log tracing argument Removed duplicate argument for enabling log tracing. * Add performance_sample_count_override for yolo * Update version check and filter scenarios for 6.0 * Remove min_query_count - interfering with runs Remove minimum query count requirement for performance mode. --- loadgen/mlperf.conf | 1 + tools/submission/generate_final_report.py | 57 ++++++++++++++++++- .../submission_checker/constants.py | 17 +++++- .../yolo/yolo_loadgen.py | 8 +-- 4 files changed, 74 insertions(+), 9 deletions(-) diff --git a/loadgen/mlperf.conf b/loadgen/mlperf.conf index 0d6b6c23b3..b2a6c8bc6b 100644 --- a/loadgen/mlperf.conf +++ b/loadgen/mlperf.conf @@ -28,6 +28,7 @@ deepseek-r1.*.performance_sample_count_override = 4388 deepseek-r1-interactive.*.performance_sample_count_override = 4388 whisper.*.performance_sample_count_override = 1633 qwen3-vl-235b-a22b.*.performance_sample_count_override = 48289 +yolo.*.performance_sample_count_override = 5000 # set to 0 to let entire sample set to be performance sample 3d-unet.*.performance_sample_count_override = 0 diff --git a/tools/submission/generate_final_report.py b/tools/submission/generate_final_report.py index 6be6656202..6bb64164f1 100644 --- a/tools/submission/generate_final_report.py +++ b/tools/submission/generate_final_report.py @@ -164,7 +164,7 @@ def main(): ], ] - if args.version == "5.0": + if args.version == "6.0": filter_scenarios = { "datacenter": { "resnet": ["Server", "Offline"], @@ -185,7 +185,60 @@ def main(): "mixtral-8x7b": ["Server", "Offline"], "rgat": ["Offline"], "llama3.1-405b": ["Offline", "Server"], - "pointpainting": [] + "pointpainting": [], + "deepseek-r1": ["Server", "Offline"], + "whisper": ["Offline"], + }, + "edge": { + "resnet": ["SingleStream", "MultiStream", "Offline"], + "retinanet": ["SingleStream", "MultiStream", "Offline"], + "bert-99": ["SingleStream", "Offline"], + "bert-99.9": ["SingleStream", "Offline"], + "dlrm-v2-99": [], + "dlrm-v2-99.9": [], + "3d-unet-99": ["SingleStream", "Offline"], + "3d-unet-99.9": ["SingleStream", "Offline"], + "llama2-70b-99": [], + "llama2-70b-99.9": [], + "llama2-70b-interactive-99": [], + "llama2-70b-interactive-99.9": [], + "llama3.1-405b": [], + "gptj-99": ["SingleStream", "Offline"], + "gptj-99.9": ["SingleStream", "Offline"], + "rgat": [], + "stable-diffusion-xl": ["SingleStream", "Offline"], + "pointpainting": ["SingleStream"], + "whisper": ["Offline"], + "yolo-95": ["SingleStream", "MultiStream", "Offline"], + "yolo-99": ["SingleStream", "MultiStream", "Offline"], + }, + } + elif args.version == "5.0": + filter_scenarios = { + "datacenter": { + "resnet": ["Server", "Offline"], + "retinanet": ["Server", "Offline"], + "bert-99": [], + "bert-99.9": [], + "dlrm-v2-99": ["Server", "Offline"], + "dlrm-v2-99.9": ["Server", "Offline"], + "3d-unet-99": ["Offline"], + "3d-unet-99.9": ["Offline"], + "gptj-99": ["Server", "Offline"], + "gptj-99.9": ["Server", "Offline"], + "stable-diffusion-xl": ["Server", "Offline"], + "llama2-70b-99": ["Server", "Offline"], + "llama2-70b-99.9": ["Server", "Offline"], + "llama2-70b-interactive-99": ["Server", "Offline"], + "llama2-70b-interactive-99.9": ["Server", "Offline"], + "mixtral-8x7b": ["Server", "Offline"], + "rgat": ["Offline"], + "llama3.1-405b": ["Offline", "Server"], + "rgat": ["Offline"], + "pointpainting": [], + "gpt-oss-120b": ["Offline"], + "qwen3-vl-235b-a22b": ["Server", "Offline"], + "dlrm-v3": ["Server", "Offline"], }, "edge": { "resnet": ["SingleStream", "MultiStream", "Offline"], diff --git a/tools/submission/submission_checker/constants.py b/tools/submission/submission_checker/constants.py index c81d8b76e5..13a3c80970 100644 --- a/tools/submission/submission_checker/constants.py +++ b/tools/submission/submission_checker/constants.py @@ -22,9 +22,10 @@ "wan-2.2-t2v-a14b", "qwen3-vl-235b-a22b", "dlrm-v3", + "yolo-95", + "yolo-99", ], "required-scenarios-datacenter": { - "retinanet": ["Server", "Offline"], "dlrm-v3": ["Server", "Offline"], "3d-unet-99": ["Offline"], "3d-unet-99.9": ["Offline"], @@ -60,6 +61,8 @@ "stable-diffusion-xl": ["SingleStream", "Offline"], "pointpainting": ["SingleStream"], "whisper": ["Offline"], + "yolo-95": ["SingleStream", "MultiStream", "Offline"], + "yolo-99": ["SingleStream", "MultiStream", "Offline"], }, "optional-scenarios-edge": {}, "required-scenarios-datacenter-edge": { @@ -83,6 +86,8 @@ "gpt-oss-120b": ["Offline"], "qwen3-vl-235b-a22b": ["Offline"], "dlrm-v3": ["Offline", "Server"], + "yolo-95": ["SingleStream", "MultiStream", "Offline"], + "yolo-99": ["SingleStream", "MultiStream", "Offline"], }, "optional-scenarios-datacenter-edge": { "llama2-70b-99": ["Interactive", "Server"], @@ -231,6 +236,8 @@ "gpt-oss-120b": 6396, "qwen3-vl-235b-a22b": 48289, "dlrm-v3": 34996, + "yolo-95": 5000, + "yolo-99": 5000, }, "dataset-size": { "resnet": 50000, @@ -256,6 +263,8 @@ "gpt-oss-120b": 6396, "qwen3-vl-235b-a22b": 48289, "dlrm-v3": 34996, + "yolo-95": 1525, + "yolo-99": 1525, }, # model_mapping.json is expected in the root directory of the # submission folder for open submissions and so the below dictionary is @@ -329,6 +338,8 @@ "gpt-oss-120b": {"SingleStream": 1024, "Server": 270336, "Offline": 1}, "qwen3-vl-235b-a22b": {"SingleStream": 1024, "Server": 270336, "Offline": 1}, "dlrm-v3": {"Server": 270336, "Offline": 1}, + "yolo-95": {"SingleStream": 1024, "MultiStream": 270336, "Offline": 1}, + "yolo-99": {"SingleStream": 1024, "MultiStream": 270336, "Offline": 1}, }, "models_TEST01": [ "resnet", @@ -343,6 +354,8 @@ "rgat", "pointpainting", "whisper", + "yolo-99", + "yolo-95", ], "models_TEST04": [ "resnet", @@ -1115,6 +1128,8 @@ "deepseek-r1": 4388, "whisper": 1633, "pointpainting": 6636, + "yolo-99": 1525, + "yolo-95": 1525, } SCENARIO_MAPPING = { diff --git a/vision/classification_and_detection/yolo/yolo_loadgen.py b/vision/classification_and_detection/yolo/yolo_loadgen.py index 5bfc2993a3..4efcf7acf4 100644 --- a/vision/classification_and_detection/yolo/yolo_loadgen.py +++ b/vision/classification_and_detection/yolo/yolo_loadgen.py @@ -10,11 +10,8 @@ import json import array import argparse -parser.add_argument( - "--enable-log-trace", - action="store_true", - help="Enable log tracing. This file can become quite large", -)""" + +""" YOLOv11 LoadGen MLPerf """ @@ -199,7 +196,6 @@ def flush_queries(): pass settings.mode = lg.TestMode.PerformanceOnly # NOTE MLPerf requirement: minimum 10 minute run for performance settings.min_duration_ms = 600000 - settings.min_query_count = 100 # NOTE: user configs can override this in submission, this is the reference implementation so purposely left barebones # settings.target_qps = ... From 5a61d79d4982d9a303140df7491fbf714886702e Mon Sep 17 00:00:00 2001 From: ANANDHU S <71482562+anandhu-eng@users.noreply.github.com> Date: Fri, 23 Jan 2026 19:19:16 +0530 Subject: [PATCH 8/9] Update models from submission checker constants (#2464) Removed 'stable-diffusion-xl' and 'dlrm-v3' from scenarios. --- tools/submission/submission_checker/constants.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tools/submission/submission_checker/constants.py b/tools/submission/submission_checker/constants.py index 13a3c80970..d3b73ec26f 100644 --- a/tools/submission/submission_checker/constants.py +++ b/tools/submission/submission_checker/constants.py @@ -32,7 +32,6 @@ "llama3.1-8b": ["Offline"], "llama2-70b-99": ["Offline"], "llama2-70b-99.9": ["Offline"], - "stable-diffusion-xl": ["Server", "Offline"], "mixtral-8x7b": ["Server", "Offline"], "llama3.1-405b": ["Offline"], "rgat": ["Offline"], @@ -40,7 +39,6 @@ "deepseek-r1": ["Offline"], "gpt-oss-120b": ["Offline"], "qwen3-vl-235b-a22b": ["Server", "Offline"], - "dlrm-v3": ["Server", "Offline"], }, "optional-scenarios-datacenter": { "llama2-70b-99": ["Interactive", "Server"], @@ -52,7 +50,6 @@ }, "required-scenarios-edge": { "resnet": ["SingleStream", "MultiStream", "Offline"], - "retinanet": ["SingleStream", "MultiStream", "Offline"], "bert-99": ["SingleStream", "Offline"], "bert-99.9": ["SingleStream", "Offline"], "3d-unet-99": ["SingleStream", "Offline"], From 1463483f53f386b94724ba47a13c0fc54bdaee94 Mon Sep 17 00:00:00 2001 From: ANANDHU S <71482562+anandhu-eng@users.noreply.github.com> Date: Fri, 23 Jan 2026 19:19:26 +0530 Subject: [PATCH 9/9] Generate final report: Update filter scenarios for version 6.0 (#2465) * Generate final report: Update filter scenarios for version 6.0 * Updations --- tools/submission/generate_final_report.py | 28 ++++++++++------------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/tools/submission/generate_final_report.py b/tools/submission/generate_final_report.py index 6bb64164f1..ddbf7bc528 100644 --- a/tools/submission/generate_final_report.py +++ b/tools/submission/generate_final_report.py @@ -167,31 +167,28 @@ def main(): if args.version == "6.0": filter_scenarios = { "datacenter": { - "resnet": ["Server", "Offline"], - "retinanet": ["Server", "Offline"], + "resnet": [], "bert-99": [], "bert-99.9": [], - "dlrm-v2-99": ["Server", "Offline"], - "dlrm-v2-99.9": ["Server", "Offline"], + "stable-diffusion-xl": [], + "pointpainting": [], + "dlrm-v3": ["Server", "Offline"], "3d-unet-99": ["Offline"], "3d-unet-99.9": ["Offline"], - "gptj-99": ["Server", "Offline"], - "gptj-99.9": ["Server", "Offline"], - "stable-diffusion-xl": ["Server", "Offline"], - "llama2-70b-99": ["Server", "Offline"], - "llama2-70b-99.9": ["Server", "Offline"], - "llama2-70b-interactive-99": ["Server", "Offline"], - "llama2-70b-interactive-99.9": ["Server", "Offline"], + "llama2-70b-99": ["Server", "Offline", "Interactive"], + "llama2-70b-99.9": ["Server", "Offline", "Interactive"], "mixtral-8x7b": ["Server", "Offline"], "rgat": ["Offline"], + "llama3.1-8b": ["Server", "Offline", "Interactive"], "llama3.1-405b": ["Offline", "Server"], - "pointpainting": [], - "deepseek-r1": ["Server", "Offline"], + "deepseek-r1": ["Server", "Offline", "Interactive"], "whisper": ["Offline"], + "gpt-oss-120b": ["Offline", "Interactive", "Server"], + "qwen3-vl-235b-a22b": ["Server", "Offline", "Interactive"], + "wan-2.2-t2v-a14b": ["Offline", "SingleStream"], }, "edge": { "resnet": ["SingleStream", "MultiStream", "Offline"], - "retinanet": ["SingleStream", "MultiStream", "Offline"], "bert-99": ["SingleStream", "Offline"], "bert-99.9": ["SingleStream", "Offline"], "dlrm-v2-99": [], @@ -203,8 +200,7 @@ def main(): "llama2-70b-interactive-99": [], "llama2-70b-interactive-99.9": [], "llama3.1-405b": [], - "gptj-99": ["SingleStream", "Offline"], - "gptj-99.9": ["SingleStream", "Offline"], + "llama3.1-8b-edge": ["SingleStream", "Offline"], "rgat": [], "stable-diffusion-xl": ["SingleStream", "Offline"], "pointpainting": ["SingleStream"],