Skip to content

Commit

Permalink
Revise exp (openvinotoolkit#5236)
Browse files Browse the repository at this point in the history
* refactor exp class

* add backend and type_prop tests

* add SLT for exp operator

* add SLT for serialzation for activation type ops

* remove redundant files
  • Loading branch information
bszmelcz authored Apr 21, 2021
1 parent 76fd791 commit 0f2569f
Show file tree
Hide file tree
Showing 5 changed files with 158 additions and 4 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//

#include <vector>
#include "shared_test_classes/single_layer/activation.hpp"
#include "common_test_utils/test_constants.hpp"

using namespace LayerTestsDefinitions;
using namespace ngraph::helpers;
namespace {
TEST_P(ActivationLayerTest, Serialize) {
Serialize();
}
// Common params
const std::vector<InferenceEngine::Precision> inputPrecisions = {
InferenceEngine::Precision::FP32
// TODO: Fix Issue-27390
// InferenceEngine::Precision::I16,
// InferenceEngine::Precision::U8
};

const std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::FP32,
InferenceEngine::Precision::FP16
};

const std::map<ActivationTypes, std::vector<std::vector<float>>> activationTypes = {
{Sigmoid, {}},
{Tanh, {}},
{Relu, {}},
{Exp, {}},
{Log, {}},
{Sign, {}},
{Abs, {}},
{Clamp, {{-2.0f, 2.0f}}},
{Negative, {}},
{Acos, {}},
{Asin, {}},
{Atan, {}},
{Cos, {}},
{Cosh, {}},
{Floor, {}},
{Sin, {}},
{Sinh, {}},
{Sqrt, {}},
{Tan, {}},
{Elu, {{0.1f}}},
{Erf, {}},
{HardSigmoid, {{0.2f, 0.5f}}},
{Selu, {{1.6732f, 1.0507f}}},
{Ceiling, {}},
{Mish, {}},
{HSwish, {}},
{SoftPlus, {}},
{HSigmoid, {}},
{RoundHalfToEven, {}},
{RoundHalfAwayFromZero, {}},
{Erf, {}},
{GeluErf, {}},
{GeluTanh, {}}
};

const std::map<ActivationTypes, std::vector<std::vector<float>>> activationParamTypes = {
{PReLu, {{-0.01f}}},
{LeakyRelu, {{0.01f}}}
};

std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = {
{{1, 50}, {{}}},
{{1, 128}, {{}}},
};

std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> preluBasic = {
{{1, 50}, {{1}, {50}}},
{{1, 128}, {{1}, {128}}},
};

const auto basicCases = ::testing::Combine(
::testing::ValuesIn(CommonTestUtils::combineParams(activationTypes)),
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(CommonTestUtils::combineParams(basic)),
::testing::Values(CommonTestUtils::DEVICE_CPU)
);

const auto basicPreluCases = ::testing::Combine(
::testing::ValuesIn(CommonTestUtils::combineParams(activationParamTypes)),
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(CommonTestUtils::combineParams(preluBasic)),
::testing::Values(CommonTestUtils::DEVICE_CPU)
);


INSTANTIATE_TEST_CASE_P(smoke_Activation_Basic, ActivationLayerTest, basicCases, ActivationLayerTest::getTestCaseName);
INSTANTIATE_TEST_CASE_P(smoke_Activation_Basic_Prelu, ActivationLayerTest, basicPreluCases, ActivationLayerTest::getTestCaseName);

INSTANTIATE_TEST_CASE_P(smoke_Activation_Basic, ActivationParamLayerTest, basicPreluCases, ActivationLayerTest::getTestCaseName);

INSTANTIATE_TEST_CASE_P(smoke_Activation_Basic, ActivationDynamicLayerTest, basicCases, ActivationLayerTest::getTestCaseName);

} // namespace
4 changes: 2 additions & 2 deletions ngraph/core/include/ngraph/op/exp.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ namespace ngraph
class NGRAPH_API Exp : public util::UnaryElementwiseArithmetic
{
public:
static constexpr NodeTypeInfo type_info{"Exp", 0};
const NodeTypeInfo& get_type_info() const override { return type_info; }
NGRAPH_RTTI_DECLARATION;

/// \brief Constructs an exponential operation.
Exp() = default;
/// \brief Constructs an exponential operation.
Expand Down
2 changes: 1 addition & 1 deletion ngraph/core/src/op/exp.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
using namespace std;
using namespace ngraph;

constexpr NodeTypeInfo op::Exp::type_info;
NGRAPH_RTTI_DEFINITION(op::Exp, "Exp", 0, UnaryElementwiseArithmetic);

op::Exp::Exp(const Output<Node>& arg)
: UnaryElementwiseArithmetic(arg)
Expand Down
45 changes: 45 additions & 0 deletions ngraph/test/backend/exp.in.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -43,3 +43,48 @@ NGRAPH_TEST(${BACKEND_NAME}, exp)
shape, {expf(-4), expf(-3), expf(-2), expf(-1), expf(0), expf(1), expf(2), expf(3)});
test_case.run();
}


NGRAPH_TEST(${BACKEND_NAME}, exp_negative)
{
Shape shape{5};
auto A = make_shared<op::Parameter>(element::f32, shape);
auto f = make_shared<Function>(make_shared<op::Exp>(A), ParameterVector{A});

auto test_case = test::TestCase<TestEngine>(f);
test_case.add_input<float>({-4, -3, -2, -1, -5});
test_case.add_expected_output<float>(
shape, {expf(-4), expf(-3), expf(-2), expf(-1), expf(-5)});
test_case.run();
}

NGRAPH_TEST(${BACKEND_NAME}, exp_scalar)
{
Shape shape{};
auto A = make_shared<op::Parameter>(element::f32, shape);
auto f = make_shared<Function>(make_shared<op::Exp>(A), ParameterVector{A});

vector<float> a{13};

auto test_case = test::TestCase<TestEngine>(f);
test_case.add_input<float>({a});
test_case.add_expected_output<float>(shape, {expf(13)});
test_case.run();
}

NGRAPH_TEST(${BACKEND_NAME}, exp_in_place)
{
Shape shape{2};
auto A = make_shared<op::Parameter>(element::f32, shape);;
auto T = make_shared<op::Exp>(A);
auto T2 = make_shared<op::Exp>(T);

auto f = make_shared<Function>(T2, ParameterVector{A});

vector<float> a{1, 3};

auto test_case = test::TestCase<TestEngine>(f);
test_case.add_input<float>({a});
test_case.add_expected_output<float>(shape, {expf(expf(1)), expf(expf(3))});
test_case.run();
}
2 changes: 1 addition & 1 deletion ngraph/test/type_prop/unary_ops.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,6 @@ REGISTER_TYPED_TEST_CASE_P(UnaryOperator,
dynamic_rank_input_shape_3D,
dynamic_rank_input_shape_full);

using Types = ::testing::Types<op::Acos, op::Asin, op::Abs, op::Sin>;
using Types = ::testing::Types<op::Acos, op::Asin, op::Abs, op::Sin, op::Exp>;

INSTANTIATE_TYPED_TEST_CASE_P(type_prop, UnaryOperator, Types);

0 comments on commit 0f2569f

Please sign in to comment.