Skip to content

Commit a7a56de

Browse files
committed
Removed Add v0 op
1 parent 0ab1d5a commit a7a56de

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

58 files changed

+372
-613
lines changed

inference-engine/tests_deprecated/unit/engines/gna/layers/gna_eltwise_test.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ class GNAEltwiseTest : public GNATest<>, public testing::WithParamInterface<GNAE
6969
FC2 = std::make_shared<ngraph::op::v1::Reshape>(FC2, reshape_pattern, false);
7070
}
7171

72-
auto add = std::make_shared<ngraph::op::Add>(FC1, FC2);
72+
auto add = std::make_shared<ngraph::op::v1::Add>(FC1, FC2);
7373

7474
auto function = std::make_shared<ngraph::Function>(ngraph::NodeVector{ add }, ngraph::ParameterVector{input1, input2});
7575

ngraph/src/ngraph/autodiff/adjoints.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -175,7 +175,7 @@ void autodiff::Adjoints::add_delta(const Output<Node>& x, const Output<Node>& de
175175
}
176176
else
177177
{
178-
deltas = std::make_shared<op::Add>(deltas, delta);
178+
deltas = std::make_shared<op::v1::Add>(deltas, delta);
179179
}
180180
}
181181

@@ -205,7 +205,7 @@ void autodiff::Adjoints::add_delta_to_slice(const Output<Node>& x,
205205
{
206206
deltas = std::make_shared<op::ReplaceSlice>(
207207
deltas,
208-
std::make_shared<op::Add>(
208+
std::make_shared<op::v1::Add>(
209209
std::make_shared<op::Slice>(deltas, lower_bounds, upper_bounds, strides), delta),
210210
lower_bounds,
211211
upper_bounds,

ngraph/src/ngraph/op/add.cpp

Lines changed: 5 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -21,51 +21,6 @@
2121
using namespace std;
2222
using namespace ngraph;
2323

24-
// ------------------------------- v0 ------------------------------------------
25-
26-
constexpr NodeTypeInfo op::v0::Add::type_info;
27-
28-
op::v0::Add::Add(const Output<Node>& arg0,
29-
const Output<Node>& arg1,
30-
const AutoBroadcastSpec& auto_broadcast)
31-
: BinaryElementwiseArithmetic(arg0, arg1, auto_broadcast)
32-
{
33-
constructor_validate_and_infer_types();
34-
}
35-
36-
shared_ptr<Node> op::v0::Add::clone_with_new_inputs(const OutputVector& new_args) const
37-
{
38-
check_new_args_count(this, new_args);
39-
return make_shared<op::v0::Add>(new_args.at(0), new_args.at(1), this->get_autob());
40-
}
41-
42-
bool op::v0::Add::visit_attributes(AttributeVisitor& visitor)
43-
{
44-
BinaryElementwiseArithmetic::visit_attributes(visitor);
45-
return true;
46-
}
47-
48-
void op::v0::Add::generate_adjoints(autodiff::Adjoints& adjoints, const OutputVector& deltas)
49-
{
50-
if (get_autob().m_type != op::AutoBroadcastType::NONE)
51-
{
52-
throw ngraph_error("Autodiff not supported with auto broadcasting");
53-
}
54-
55-
auto delta = deltas.at(0);
56-
57-
auto x = input_value(0);
58-
auto y = input_value(1);
59-
60-
adjoints.add_delta(x, delta);
61-
adjoints.add_delta(y, delta);
62-
}
63-
64-
shared_ptr<Node> ngraph::operator+(const Output<Node>& arg0, const Output<Node>& arg1)
65-
{
66-
return make_shared<op::Add>(arg0, arg1);
67-
}
68-
6924
namespace
7025
{
7126
template <element::Type_t ET>
@@ -120,11 +75,6 @@ namespace
12075
}
12176
}
12277

123-
bool op::v0::Add::evaluate(const HostTensorVector& outputs, const HostTensorVector& inputs)
124-
{
125-
return evaluate_add(inputs[0], inputs[1], outputs[0], get_autob());
126-
}
127-
12878
// ------------------------------- v1 ------------------------------------------
12979

13080
constexpr NodeTypeInfo op::v1::Add::type_info;
@@ -169,3 +119,8 @@ bool op::v1::Add::evaluate(const HostTensorVector& outputs, const HostTensorVect
169119
{
170120
return evaluate_add(inputs[0], inputs[1], outputs[0], get_autob());
171121
}
122+
123+
shared_ptr<Node> ngraph::operator+(const Output<Node>& arg0, const Output<Node>& arg1)
124+
{
125+
return make_shared<op::v1::Add>(arg0, arg1);
126+
}

ngraph/src/ngraph/op/add.hpp

Lines changed: 1 addition & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -24,49 +24,6 @@ namespace ngraph
2424
{
2525
namespace op
2626
{
27-
namespace v0
28-
{
29-
/// \brief Elementwise addition operation.
30-
///
31-
class NGRAPH_API Add : public util::BinaryElementwiseArithmetic
32-
{
33-
public:
34-
static constexpr NodeTypeInfo type_info{"Add", 0};
35-
const NodeTypeInfo& get_type_info() const override { return type_info; }
36-
/// \brief Constructs an uninitialized addition operation
37-
Add()
38-
: util::BinaryElementwiseArithmetic(AutoBroadcastSpec::NONE)
39-
{
40-
}
41-
42-
/// \brief Constructs an addition operation.
43-
///
44-
/// \param arg0 Output that produces the first input tensor.<br>
45-
/// `[d0, ...]`
46-
/// \param arg1 Output that produces the second input tensor.<br>
47-
/// `[d0, ...]`
48-
/// \param auto_broadcast Auto broadcast specification
49-
///
50-
/// Output `[d0, ...]`
51-
///
52-
Add(const Output<Node>& arg0,
53-
const Output<Node>& arg1,
54-
const AutoBroadcastSpec& auto_broadcast = AutoBroadcastSpec());
55-
56-
std::shared_ptr<Node>
57-
clone_with_new_inputs(const OutputVector& new_args) const override;
58-
59-
bool visit_attributes(AttributeVisitor& visitor) override;
60-
virtual bool is_commutative() const override { return true; }
61-
bool evaluate(const HostTensorVector& outputs,
62-
const HostTensorVector& inputs) override;
63-
64-
protected:
65-
virtual void generate_adjoints(autodiff::Adjoints& adjoints,
66-
const OutputVector& deltas) override;
67-
};
68-
} // namespace v0
69-
7027
namespace v1
7128
{
7229
/// \brief Elementwise addition operation.
@@ -110,10 +67,8 @@ namespace ngraph
11067
virtual void generate_adjoints(autodiff::Adjoints& adjoints,
11168
const OutputVector& deltas) override;
11269
};
113-
11470
} // namespace v1
115-
using v0::Add;
116-
} // namespace op
71+
} // namespace op
11772

11873
NGRAPH_API
11974
std::shared_ptr<Node> operator+(const Output<Node>& arg0, const Output<Node>& arg1);

ngraph/src/ngraph/op/fused/conv_fused.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -212,7 +212,7 @@ NodeVector op::ConvolutionBias::decompose_op() const
212212
bcast_axes.insert(i);
213213
}
214214

215-
auto conv_bias = make_shared<op::Add>(
215+
auto conv_bias = make_shared<op::v1::Add>(
216216
conv, make_shared<op::Broadcast>(input_value(2), conv->get_shape(), bcast_axes));
217217
if (m_with_relu)
218218
{
@@ -336,7 +336,7 @@ NodeVector op::ConvolutionBiasAdd::decompose_op() const
336336
bcast_axes.insert(i);
337337
}
338338

339-
auto conv_bias = make_shared<op::Add>(
339+
auto conv_bias = make_shared<op::v1::Add>(
340340
conv, make_shared<op::Broadcast>(input_value(2), conv->get_shape(), bcast_axes));
341341
if (m_with_relu)
342342
{

ngraph/src/ngraph/op/fused/gemm.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ NodeVector op::Gemm::decompose_op() const
7979
// alpha * A' * B' + beta * C
8080
// The input tensor `C` should be "unidirectionally broadcastable" to the `a_dot_b` tensor.
8181
auto broadcasted_c = builder::numpy_broadcast(C, a_dot_b->get_shape());
82-
return {std::make_shared<op::Add>(a_dot_b, broadcasted_c)};
82+
return {std::make_shared<op::v1::Add>(a_dot_b, broadcasted_c)};
8383
}
8484

8585
shared_ptr<Node> op::Gemm::clone_with_new_inputs(const OutputVector& new_args) const

ngraph/src/ngraph/op/op_version_tbl.hpp

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@
2929
NGRAPH_OP(Abs, ngraph::op::v0, 0)
3030
NGRAPH_OP(Acos, ngraph::op::v0, 0)
3131
NGRAPH_OP(Acosh, ngraph::op::v3, 3)
32-
NGRAPH_OP(Add, ngraph::op::v0, 0)
3332
NGRAPH_OP(Add, ngraph::op::v1, 1)
3433
NGRAPH_OP(All, ngraph::op::v0, 0)
3534
NGRAPH_OP(AllReduce, ngraph::op::v0, 0)

ngraph/src/ngraph/op/util/rnn_cell_base.cpp

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,8 @@ op::util::ActivationFunction op::util::RNNCellBase::get_activation_function(size
8686

8787
shared_ptr<Node> op::util::RNNCellBase::add(const Output<Node>& lhs, const Output<Node>& rhs)
8888
{
89-
return {make_shared<op::Add>(lhs, rhs, op::AutoBroadcastSpec(op::AutoBroadcastType::NUMPY))};
89+
return {
90+
make_shared<op::v1::Add>(lhs, rhs, op::AutoBroadcastSpec(op::AutoBroadcastType::NUMPY))};
9091
}
9192

9293
shared_ptr<Node> op::util::RNNCellBase::sub(const Output<Node>& lhs, const Output<Node>& rhs)

ngraph/src/ngraph/pass/algebraic_simplification.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -795,7 +795,7 @@ static bool replace_transpose_with_reshape(shared_ptr<Node> transpose)
795795
static unordered_map<NodeTypeInfo, function<bool(shared_ptr<Node>)>> initialize_ops_to_simplifiers()
796796
{
797797
return unordered_map<NodeTypeInfo, function<bool(shared_ptr<Node>)>>(
798-
{{op::v0::Add::type_info, simplify_add},
798+
{{op::v1::Add::type_info, simplify_add},
799799
{op::v0::Multiply::type_info, simplify_multiply},
800800
{opset3::Gather::type_info, simplify_gather},
801801
{op::v0::Concat::type_info, simplify_concat},

ngraph/src/ngraph/pass/core_fusion.cpp

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -137,7 +137,7 @@ void pass::CoreFusion::construct_sigmoid()
137137
auto skip_broadcast =
138138
make_shared<pattern::op::Skip>(constant, pattern::has_class<op::Broadcast>());
139139

140-
auto add_exp = make_shared<op::Add>(exp_neg_input, skip_broadcast);
140+
auto add_exp = make_shared<op::v1::Add>(exp_neg_input, skip_broadcast);
141141
auto divide_1_over_exp = make_shared<op::Divide>(skip_broadcast, add_exp);
142142

143143
// Define a call back that needs to called once the DFG matches the pattern
@@ -221,7 +221,7 @@ void pass::CoreFusion::construct_folded_batch_norm()
221221
// new biases = -mean * gamma / sqrt(variance + epsilon) + beta
222222

223223
auto bn_eps = op::Constant::create(element::f32, Shape{}, {m_bn->get_eps_value()});
224-
auto var_eps = make_shared<op::Add>(
224+
auto var_eps = make_shared<op::v1::Add>(
225225
pattern_map[var],
226226
make_shared<op::Broadcast>(bn_eps, pattern_map[var]->get_shape(), AxisSet{0}));
227227
auto sqrt_var_eps = make_shared<op::Sqrt>(var_eps);
@@ -277,7 +277,7 @@ void pass::CoreFusion::construct_conv_affine_folding()
277277
auto B = make_shared<op::Broadcast>(Bc, Shape{2, 2, 1, 1}, AxisSet{0, 2, 3});
278278
auto B_label = make_shared<pattern::op::Label>(B, nullptr, NodeVector{B});
279279
auto multiply = make_shared<op::Multiply>(conv_label, A_label);
280-
auto add = make_shared<op::Add>(multiply, B_label);
280+
auto add = make_shared<op::v1::Add>(multiply, B_label);
281281

282282
auto callback = [input, filters, conv_label, A_label, B_label](pattern::Matcher& m) {
283283
NGRAPH_DEBUG << "In callback for conv affine folding against node = "
@@ -512,19 +512,19 @@ void pass::CoreFusion::construct_optimized_strided_conv()
512512
make_shared<pattern::op::Label>(conv_stride3, nullptr, NodeVector{conv_stride3});
513513

514514
auto broadcast_w3_label = make_shared<pattern::op::Label>(conv_stride3_label, is_bc);
515-
auto add_w3 = make_shared<op::Add>(conv_stride3_label, broadcast_w3_label);
515+
auto add_w3 = make_shared<op::v1::Add>(conv_stride3_label, broadcast_w3_label);
516516
auto relu_w3 = make_shared<op::Relu>(add_w3);
517517

518518
auto weights_stride1 = make_shared<pattern::op::Label>(element::f32, win_size_1);
519519
auto conv_stride1 = make_shared<op::Convolution>(relu_w3, weights_stride1);
520520
auto conv_stride1_label =
521521
make_shared<pattern::op::Label>(conv_stride1, nullptr, NodeVector{conv_stride1});
522522
auto broadcast_w1_label = make_shared<pattern::op::Label>(conv_stride1_label, is_bc);
523-
auto add_w1 = make_shared<op::Add>(conv_stride1_label, broadcast_w1_label);
523+
auto add_w1 = make_shared<op::v1::Add>(conv_stride1_label, broadcast_w1_label);
524524

525525
auto eltwise_arg_label =
526526
make_shared<pattern::op::Label>(element::f32, conv_stride1->get_shape());
527-
auto add_two_convs = make_shared<op::Add>(add_w1, eltwise_arg_label);
527+
auto add_two_convs = make_shared<op::v1::Add>(add_w1, eltwise_arg_label);
528528

529529
auto relu_two_convs = make_shared<op::Relu>(add_two_convs);
530530

@@ -648,19 +648,19 @@ void pass::CoreFusion::construct_optimized_strided_conv()
648648
pad_1,
649649
pad_1);
650650

651-
auto new_add_conv_28w3s2 =
652-
make_shared<op::Add>(conv_28w3s2, reduce_broadcast(pattern_map[broadcast_w3_label]));
651+
auto new_add_conv_28w3s2 = make_shared<op::v1::Add>(
652+
conv_28w3s2, reduce_broadcast(pattern_map[broadcast_w3_label]));
653653
auto new_relu_28w3s2 = make_shared<op::Relu>(new_add_conv_28w3s2);
654654

655655
auto conv_28w1s1 = make_shared<op::Convolution>(
656656
new_relu_28w3s2, m_conv_stride1->get_argument(1), stride_1, stride_1);
657657

658-
auto new_add_conv28s1 =
659-
make_shared<op::Add>(conv_28w1s1, reduce_broadcast(pattern_map[broadcast_w1_label]));
658+
auto new_add_conv28s1 = make_shared<op::v1::Add>(
659+
conv_28w1s1, reduce_broadcast(pattern_map[broadcast_w1_label]));
660660

661661
auto maxpool =
662662
make_shared<op::MaxPool>(pattern_map[eltwise_arg_label], Shape{1, 1}, stride_2);
663-
auto new_add_two_convs = make_shared<op::Add>(new_add_conv28s1, maxpool);
663+
auto new_add_two_convs = make_shared<op::v1::Add>(new_add_conv28s1, maxpool);
664664
auto new_relu_two_convs = make_shared<op::Relu>(new_add_two_convs);
665665

666666
for (auto sconv : sconvs)
@@ -1044,7 +1044,7 @@ void pass::CoreFusion::construct_conv_bias_add()
10441044
CoordinateDiff{0, 0},
10451045
Strides{1, 1});
10461046
auto add_input = make_shared<pattern::op::Label>(element::f32, pconv->get_shape());
1047-
auto padd = make_shared<op::Add>(add_input, pconv);
1047+
auto padd = make_shared<op::v1::Add>(add_input, pconv);
10481048

10491049
auto callback = [data_batch, filters](pattern::Matcher& m) {
10501050
NGRAPH_DEBUG << "In a callback for construct_conv_sum against "

ngraph/src/ngraph/pass/cse.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -195,7 +195,7 @@ static unordered_map<type_index, function<bool(shared_ptr<Node>, shared_ptr<Node
195195
{TI(op::Sqrt), cse_unarywise},
196196
{TI(op::Tan), cse_unarywise},
197197
{TI(op::Tanh), cse_unarywise},
198-
{TI(op::Add), cse_binarywise},
198+
{TI(op::v1::Add), cse_binarywise},
199199
{TI(op::Divide), cse_binarywise},
200200
{TI(op::Maximum), cse_binarywise},
201201
{TI(op::Minimum), cse_binarywise},

ngraph/src/ngraph/serializer.cpp

Lines changed: 0 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -952,12 +952,6 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js)
952952
node = make_shared<op::Acos>(args[0]);
953953
break;
954954
}
955-
case OP_TYPEID::Add:
956-
{
957-
node = make_shared<op::v0::Add>(
958-
args[0], args[1], read_auto_broadcast(node_js, "auto_broadcast"));
959-
break;
960-
}
961955
case OP_TYPEID::All:
962956
{
963957
auto reduction_axes = deserialize_axis_set(node_js.at("reduction_axes"));
@@ -2502,16 +2496,6 @@ json JSONSerializer::serialize_node(const Node& n)
25022496
}
25032497
case OP_TYPEID::Acos: { break;
25042498
}
2505-
case OP_TYPEID::Add:
2506-
{
2507-
const op::util::BinaryElementwiseArithmetic* tmp = nullptr;
2508-
tmp = static_cast<const op::v0::Add*>(&n);
2509-
if (tmp != nullptr && tmp->get_autob().m_type != op::AutoBroadcastType::NONE)
2510-
{
2511-
node["auto_broadcast"] = write_auto_broadcast(tmp->get_autob());
2512-
}
2513-
break;
2514-
}
25152499
case OP_TYPEID::ArgMin:
25162500
{
25172501
auto tmp = static_cast<const op::ArgMin*>(&n);

ngraph/test/CMakeLists.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -247,7 +247,7 @@ foreach(HEADER ${NGRAPH_HEADER_LIST})
247247
endforeach()
248248

249249
if(NGRAPH_JSON_ENABLE)
250-
list(APPEND SRC core.cpp serialize.cpp)
250+
list(APPEND SRC serialize.cpp)
251251
endif()
252252

253253
set_source_files_properties(includes.cpp PROPERTIES COMPILE_DEFINITIONS

ngraph/test/algebraic_simplification.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ TEST(algebraic_simplification, add_types_shapes)
8080
ParameterVector{a, b, c});
8181
pass_manager.run_passes(f);
8282

83-
ASSERT_EQ(count_ops_of_type<op::Add>(f), 0);
83+
ASSERT_EQ(count_ops_of_type<op::v1::Add>(f), 0);
8484
auto expected = ngraph::NodeVector{a, b, a, c, b};
8585
auto results = f->get_results();
8686
for (size_t i = 0; i < results.size(); i++)
@@ -154,7 +154,7 @@ TEST(algebraic_simplification, add_broadcast)
154154
ParameterVector{a, b, c});
155155
pass_manager.run_passes(f);
156156

157-
ASSERT_EQ(count_ops_of_type<op::Add>(f), 0);
157+
ASSERT_EQ(count_ops_of_type<op::v1::Add>(f), 0);
158158
auto expected = ngraph::NodeVector{a, b, a, c, b};
159159
auto results = f->get_results();
160160
for (size_t i = 0; i < results.size(); i++)

ngraph/test/backend/add.in.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ NGRAPH_TEST(${BACKEND_NAME}, add)
5151
Shape shape{2, 2};
5252
auto A = make_shared<op::Parameter>(element::f32, shape);
5353
auto B = make_shared<op::Parameter>(element::f32, shape);
54-
auto f = make_shared<Function>(make_shared<op::Add>(A, B), ParameterVector{A, B});
54+
auto f = make_shared<Function>(make_shared<op::v1::Add>(A, B), ParameterVector{A, B});
5555

5656
auto backend = runtime::Backend::create("${BACKEND_NAME}");
5757

ngraph/test/backend/api.in.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ NGRAPH_TEST(${BACKEND_NAME}, create_tensor_1)
3535
Shape shape{2, 2};
3636
auto A = make_shared<op::Parameter>(element::f32, shape);
3737
auto B = make_shared<op::Parameter>(element::f32, shape);
38-
auto f = make_shared<Function>(make_shared<op::Add>(A, B), ParameterVector{A, B});
38+
auto f = make_shared<Function>(make_shared<op::v1::Add>(A, B), ParameterVector{A, B});
3939

4040
auto backend = runtime::Backend::create("${BACKEND_NAME}");
4141

0 commit comments

Comments
 (0)