From 0726b6084b4d7c62841a64784ce37b0600cfb6b6 Mon Sep 17 00:00:00 2001 From: Matthew Rodriguez Date: Wed, 8 Apr 2026 23:51:04 -0500 Subject: [PATCH 1/3] Add validation to ensure model outputs have names --- src/model_config_utils.cc | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/src/model_config_utils.cc b/src/model_config_utils.cc index 465ec0089..91bdac445 100644 --- a/src/model_config_utils.cc +++ b/src/model_config_utils.cc @@ -1866,6 +1866,12 @@ ValidateModelOutput( const inference::ModelOutput& io, int32_t max_batch_size, const std::string& platform) { + if (io.name().empty()) { + return Status( + Status::Code::INVALID_ARG, + "model output must have a name"); + } + RETURN_IF_ERROR(ValidateIOShape(io, max_batch_size, "model output ")); if ((platform != kTensorRTPlanPlatform) && io.is_shape_tensor()) { @@ -2549,5 +2555,21 @@ InstanceConfigSignature(const inference::ModelInstanceGroup& instance_config) config.set_count(1); return config.SerializeAsString(); } - +TEST(ModelConfigUtils, ValidateOutputEmptyName) { + // Create a ModelOutput object (this is a Protobuf object) + inference::ModelOutput io; + + // Purposefully DO NOT set the name (leave it empty) + io.set_data_type(inference::DataType::TYPE_FP32); + io.add_dims(1); + + // Call the function you just edited + // We use 0 for max_batch_size and a dummy platform name + Status status = ValidateModelOutput(io, 0, "tensorrt_plan"); + + // ASSERTION: We expect this to fail. + // If it returns Success, the test will fail and tell us our fix isn't working. + EXPECT_FALSE(status.IsOk()) << "Error: Server allowed an output with an empty name!"; + EXPECT_EQ(status.Message(), "model output must have a name"); +} }} // namespace triton::core From c802d2499ab3ec31271eb631995155f985b7da13 Mon Sep 17 00:00:00 2001 From: mattrodriguez154 Date: Thu, 9 Apr 2026 00:37:46 -0500 Subject: [PATCH 2/3] Final cleanup of validation logic --- src/model_config_utils.cc | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/src/model_config_utils.cc b/src/model_config_utils.cc index 91bdac445..ae069f314 100644 --- a/src/model_config_utils.cc +++ b/src/model_config_utils.cc @@ -2549,6 +2549,7 @@ EquivalentInInstanceConfig( std::string InstanceConfigSignature(const inference::ModelInstanceGroup& instance_config) + { inference::ModelInstanceGroup config = instance_config; *config.mutable_name() = "[Normalized]"; @@ -2556,20 +2557,14 @@ InstanceConfigSignature(const inference::ModelInstanceGroup& instance_config) return config.SerializeAsString(); } TEST(ModelConfigUtils, ValidateOutputEmptyName) { - // Create a ModelOutput object (this is a Protobuf object) inference::ModelOutput io; - // Purposefully DO NOT set the name (leave it empty) io.set_data_type(inference::DataType::TYPE_FP32); io.add_dims(1); - // Call the function you just edited - // We use 0 for max_batch_size and a dummy platform name Status status = ValidateModelOutput(io, 0, "tensorrt_plan"); - // ASSERTION: We expect this to fail. - // If it returns Success, the test will fail and tell us our fix isn't working. - EXPECT_FALSE(status.IsOk()) << "Error: Server allowed an output with an empty name!"; + EXPECT_FALSE(status.IsOk()) << "Error: Server outputted with empty name"; EXPECT_EQ(status.Message(), "model output must have a name"); } }} // namespace triton::core From 0a89daec7bc9cfe539c74d7f5740e406ed624927 Mon Sep 17 00:00:00 2001 From: mattrodriguez154 Date: Thu, 9 Apr 2026 00:43:09 -0500 Subject: [PATCH 3/3] Cleanup: removed unnecessary spaces and comments --- src/model_config_utils.cc | 1 - 1 file changed, 1 deletion(-) diff --git a/src/model_config_utils.cc b/src/model_config_utils.cc index ae069f314..63742173e 100644 --- a/src/model_config_utils.cc +++ b/src/model_config_utils.cc @@ -2549,7 +2549,6 @@ EquivalentInInstanceConfig( std::string InstanceConfigSignature(const inference::ModelInstanceGroup& instance_config) - { inference::ModelInstanceGroup config = instance_config; *config.mutable_name() = "[Normalized]";