Skip to content

Commit bed79c3

Browse files
tianleiwuRyanUnderhill
authored andcommitted
Fix memory exception in Layer Norm Fusion (#2644)
1 parent dc40bf6 commit bed79c3

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

onnxruntime/core/optimizer/layer_norm_fusion.cc

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -254,12 +254,12 @@ Status LayerNormFusion::ApplyImpl(Graph& graph, bool& modified, int graph_level,
254254
layer_norm_input_defs,
255255
{}, {}, kOnnxDomain);
256256

257-
// Get constant "epsilon" from "Add2" node if available. Else, default value will be used.
257+
// Get constant "epsilon" from "Add2" node if available. Else, default value will be used.
258258
const ONNX_NAMESPACE::TensorProto* tensor_proto = graph_utils::GetConstantInitializer(graph, add2_node.MutableInputDefs()[1]->Name());
259259
if (tensor_proto != nullptr) {
260260
if (tensor_proto->data_type() == ONNX_NAMESPACE::TensorProto_DataType_FLOAT) {
261-
const float* val = onnxruntime::make_unique<Initializer>(*tensor_proto)->data<float>();
262-
layer_norm_node.AddAttribute("epsilon", val[0]);
261+
auto initializer = onnxruntime::make_unique<Initializer>(*tensor_proto);
262+
layer_norm_node.AddAttribute("epsilon", initializer->data<float>()[0]);
263263
}
264264
}
265265

0 commit comments

Comments
 (0)