@@ -680,22 +680,22 @@ static void AddInitializerAsInput(onnxruntime::Graph& dst_graph,
680
680
// To check if the input parameters of a DQ or Q node are quantization parameters
681
681
// Scale and Zero point parameters are quantization parameters
682
682
static bool IsQuantizationParameter (const std::string& initializer_name,
683
- const onnxruntime::GraphViewer& src_graph) {
684
- // Check if this initializer is used as scale or zero_point in any DQ/Q node
685
- for (auto & node_idx : src_graph.GetNodesInTopologicalOrder ()) {
686
- const auto * node = src_graph.GetNode (node_idx);
687
- if (node->OpType () == " DequantizeLinear" || node->OpType () == " QuantizeLinear" ) {
688
- const auto & input_defs = node->InputDefs ();
689
- // Check if this initializer is used as scale (input 1) or zero_point (input 2)
690
- if (input_defs.size () >= 2 && input_defs[1 ]->Name () == initializer_name) {
691
- return true ; // This is a scale parameter
692
- }
693
- if (input_defs.size () >= 3 && input_defs[2 ]->Name () == initializer_name) {
694
- return true ; // This is a zero_point parameter
695
- }
696
- }
683
+ const onnxruntime::GraphViewer& src_graph) {
684
+ // Check if this initializer is used as scale or zero_point in any DQ/Q node
685
+ for (auto & node_idx : src_graph.GetNodesInTopologicalOrder ()) {
686
+ const auto * node = src_graph.GetNode (node_idx);
687
+ if (node->OpType () == " DequantizeLinear" || node->OpType () == " QuantizeLinear" ) {
688
+ const auto & input_defs = node->InputDefs ();
689
+ // Check if this initializer is used as scale (input 1) or zero_point (input 2)
690
+ if (input_defs.size () >= 2 && input_defs[1 ]->Name () == initializer_name) {
691
+ return true ; // This is a scale parameter
692
+ }
693
+ if (input_defs.size () >= 3 && input_defs[2 ]->Name () == initializer_name) {
694
+ return true ; // This is a zero_point parameter
695
+ }
697
696
}
698
- return false ;
697
+ }
698
+ return false ;
699
699
}
700
700
701
701
// Creates a new model without the DQ/Q operators in the src graph.
@@ -866,31 +866,29 @@ Status CreateModelWithStrippedQDQNodes(const GraphViewer& src_graph,
866
866
if (!init_with_data &&
867
867
utils::HasExternalData (initializer_tensor) &&
868
868
enable_ovep_weight_sharing) {
869
+ // Only convert to input if it's not a quantization parameter
870
+ bool is_quant_param = IsQuantizationParameter (name, src_graph);
869
871
870
- // Only convert to input if it's not a quantization parameter
871
- bool is_quant_param = IsQuantizationParameter (name, src_graph);
872
-
873
- if (!is_quant_param) {
874
- // This is actual weight data - so to convert to input for weight sharing
875
- insert_metadata (initializer_tensor);
876
- AddInitializerAsInput (dst_graph, accumulated_inputs, src_graph, name);
877
- } else {
878
- // This is a quantization parameter - keep as initializer even if external
879
-
880
- if (initializers_to_keep.count (name) > 0 ) {
872
+ if (!is_quant_param) {
873
+ // This is actual weight data - so to convert to input for weight sharing
874
+ insert_metadata (initializer_tensor);
875
+ AddInitializerAsInput (dst_graph, accumulated_inputs, src_graph, name);
876
+ } else {
877
+ // This is a quantization parameter - keep as initializer even if external
881
878
882
- dst_graph. AddInitializedTensor (initializer_tensor);
883
- }
879
+ if (initializers_to_keep. count (name) > 0 ) {
880
+ dst_graph. AddInitializedTensor (initializer_tensor);
884
881
}
882
+ }
885
883
} else {
886
- // Add as an initialized tensor if it does not have external data
887
- if (initializers_to_keep.count (name) > 0 ) {
888
- if (init_with_data) {
889
- dst_graph.AddInitializedTensor (*init_with_data);
890
- } else {
891
- dst_graph.AddInitializedTensor (initializer_tensor);
892
- }
884
+ // Add as an initialized tensor if it does not have external data
885
+ if (initializers_to_keep.count (name) > 0 ) {
886
+ if (init_with_data) {
887
+ dst_graph.AddInitializedTensor (*init_with_data);
888
+ } else {
889
+ dst_graph.AddInitializedTensor (initializer_tensor);
893
890
}
891
+ }
894
892
}
895
893
896
894
current_scope_initializer_set.insert (name);
0 commit comments