Skip to content

Commit

Permalink
Deprecated dynamism in old IE API (openvinotoolkit#7812)
Browse files Browse the repository at this point in the history
  • Loading branch information
ilya-lavrenov authored Oct 4, 2021
1 parent a1ee7dc commit f762751
Show file tree
Hide file tree
Showing 17 changed files with 120 additions and 61 deletions.
11 changes: 11 additions & 0 deletions docs/template_plugin/src/template_infer_request.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
#include <utility>

#include "blob_factory.hpp"
#include "ie_api.h"
#include "ie_common.h"
#include "ie_ngraph_utils.hpp"
#include "openvino/core/except.hpp"
Expand Down Expand Up @@ -104,7 +105,9 @@ static void AllocateImpl(const BlobDataMap& userDataMap,
GetNetworkPrecisionF&& GetNetworkPrecision,
bool isInputBlob = true) {
for (const auto& userData : userDataMap) {
IE_SUPPRESS_DEPRECATED_START
auto partialShape = userData.second->getPartialShape();
IE_SUPPRESS_DEPRECATED_END
SizeVector dims;
if (partialShape.is_static()) {
dims = userData.second->getTensorDesc().getDims();
Expand Down Expand Up @@ -373,6 +376,7 @@ InferenceEngine::Blob::Ptr TemplateInferRequest::GetBlob(const std::string& name
} else {
data = _outputs[name];
SizeVector dims;
IE_SUPPRESS_DEPRECATED_START
if (!foundOutput->isDynamic()) {
dims = foundOutput->getTensorDesc().getDims();
} else if (_outputTensors[_executableNetwork->_outputIndex.at(name)] &&
Expand All @@ -382,6 +386,7 @@ InferenceEngine::Blob::Ptr TemplateInferRequest::GetBlob(const std::string& name
auto rank = foundOutput->getPartialShape().rank();
dims = SizeVector(rank.is_dynamic() ? 1 : rank.get_length(), 0);
}
IE_SUPPRESS_DEPRECATED_END

if (data->getTensorDesc().getDims() != dims) {
auto&& results = _executableNetwork->_function->get_results();
Expand Down Expand Up @@ -416,10 +421,12 @@ void TemplateInferRequest::SetBlob(const std::string& name, const InferenceEngin
const bool remoteBlobPassed = userBlob->is<RemoteBlob>();
if (!compoundBlobPassed && !remoteBlobPassed && userBlob->buffer() == nullptr)
IE_THROW(NotAllocated) << "Input data was not allocated. Input name: \'" << name << "\'";
IE_SUPPRESS_DEPRECATED_START
if (userBlob->size() == 0 &&
!((foundInput && foundInput->getInputData()->isDynamic()) || (foundOutput && foundOutput->isDynamic()))) {
IE_THROW() << "Input data is empty. Input name: \'" << name << "\'";
}
IE_SUPPRESS_DEPRECATED_END

size_t dataSize = userBlob->size();
if (isInput) {
Expand All @@ -436,11 +443,13 @@ void TemplateInferRequest::SetBlob(const std::string& name, const InferenceEngin
auto devDims = devBlob->getTensorDesc().getDims();
auto devLayout = devBlob->getTensorDesc().getLayout();
auto devPrecision = devBlob->getTensorDesc().getPrecision();
IE_SUPPRESS_DEPRECATED_START
if (foundInput->getInputData()->isDynamic() && (devDims != usrDims || devLayout != usrLayout)) {
devBlob = make_blob_with_precision({devPrecision, usrDims, TensorDesc::getLayoutByDims(usrDims)});
devBlob->allocate();
_deviceInputs[name] = devBlob;
}
IE_SUPPRESS_DEPRECATED_END
const bool preProcRequired = preProcessingRequired(foundInput, userBlob, devBlob);
if (compoundBlobPassed && !preProcRequired) {
IE_THROW(NotImplemented) << "cannot set compound blob: supported only for input pre-processing";
Expand Down Expand Up @@ -469,11 +478,13 @@ void TemplateInferRequest::SetBlob(const std::string& name, const InferenceEngin
auto devDims = devBlob->getTensorDesc().getDims();
auto devLayout = devBlob->getTensorDesc().getLayout();
auto devPrecision = devBlob->getTensorDesc().getPrecision();
IE_SUPPRESS_DEPRECATED_START
if (foundOutput->isDynamic() && (devDims != usrDims || devLayout != usrLayout)) {
devBlob = make_blob_with_precision({devPrecision, usrDims, TensorDesc::getLayoutByDims(usrDims)});
devBlob->allocate();
_networkOutputBlobs[name] = devBlob;
}
IE_SUPPRESS_DEPRECATED_END
size_t outputSize = devBlob->getTensorDesc().getLayout() != InferenceEngine::Layout::SCALAR
? details::product(devBlob->getTensorDesc().getDims())
: 1;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,9 @@ InferenceEnginePython::IENetwork InferenceEnginePython::read_network(std::string

PyObject* InferenceEnginePython::getPartialShape_capsule(InferenceEngine::CDataPtr data) {
const char* py_capsule_name = "ngraph_partial_shape";
IE_SUPPRESS_DEPRECATED_START
auto ngraph_pShape_ptr = std::make_shared<ngraph::PartialShape>(data->getPartialShape());
IE_SUPPRESS_DEPRECATED_END
auto* sp_copy = new std::shared_ptr<const ngraph::PartialShape>(ngraph_pShape_ptr);
auto sp_deleter = [](PyObject* capsule) {
auto* capsule_ptr = PyCapsule_GetPointer(capsule, "ngraph_partial_shape");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -186,16 +186,20 @@ class INFERENCE_ENGINE_API_CLASS(CNNNetwork) {
void reshape(const ICNNNetwork::InputShapes& inputShapes);

/**
* @deprecated void InferenceEngine::CNNNetwork::reshape(const ICNNNetwork::InputShapes&)
* @brief Run shape inference with new input shapes for the network
* @param inputShapes A map of pairs: name of corresponding data and its dimension.
*/
INFERENCE_ENGINE_DEPRECATED("InferenceEngine::CNNNetwork::reshape(const ICNNNetwork::InputShapes&)")
void reshape(const std::initializer_list<ICNNNetwork::InputShapes::value_type>& inputShapes);
IE_SUPPRESS_DEPRECATED_END

/**
* @deprecated Use ov::Function::reshape for dynamic shapes
* @brief Run shape inference with new input partial shapes for the network
* @param inputShapes A map of pairs: name of corresponding data and its dimension.
*/
INFERENCE_ENGINE_DEPRECATED("Use ov::Function::reshape for dynamic shapes")
void reshape(const std::map<std::string, ngraph::PartialShape>& inputShapes);

/**
Expand Down
17 changes: 14 additions & 3 deletions inference-engine/src/inference_engine/include/ie/ie_data.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,14 @@

#include <map>
#include <memory>
#include <ngraph/partial_shape.hpp>
#include <string>
#include <vector>

#include "ie_api.h"
#include "ie_common.h"
#include "ie_layouts.h"
#include "ie_precision.hpp"
#include "ngraph/partial_shape.hpp"

namespace InferenceEngine {

Expand All @@ -40,13 +41,15 @@ class INFERENCE_ENGINE_API_CLASS(Data) {
Data(const std::string& name, Precision _precision, Layout layout = NCHW);

/**
* @deprecated Use OpenVINO 2.0 API for dynamic shapes support
* @brief A constructor with partial shape
*
* @param name Name of the data node
* @param _precision Precision of the data
* @param shape Partial shape of the data
* @param layout Data layout
*/
INFERENCE_ENGINE_DEPRECATED("Use OpenVINO 2.0 API for dynamic shapes support")
Data(const std::string& name, Precision _precision, const ngraph::PartialShape& shape, Layout layout = BLOCKED);

/**
Expand Down Expand Up @@ -102,19 +105,25 @@ class INFERENCE_ENGINE_API_CLASS(Data) {
* @param layout new layout
*/
void reshape(const SizeVector& dims, Layout layout);

/**
* @deprecated Use InferenceEngine::Data::reshape(const SizeVector&, Layout)
* @brief changes dims and layout at same time
*
* @param dims new dimensions
* @param layout new layout
*/
INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::Data::reshape(const SizeVector&, Layout)")
void reshape(const std::initializer_list<size_t>& dims, Layout layout);

/**
* @deprecated Use OpenVINO 2.0 API for dynamic shapes support
* @brief changes dims and layout at same time
*
* @param dims new dimensions
* @param layout new layout
*/
INFERENCE_ENGINE_DEPRECATED("Use OpenVINO 2.0 API for dynamic shapes support")
void reshape(const ngraph::PartialShape& dims, Layout layout);

/**
Expand Down Expand Up @@ -168,15 +177,19 @@ class INFERENCE_ENGINE_API_CLASS(Data) {
const UserValue& getUserObject() const;

/**
* @deprecated Use OpenVINO 2.0 API for dynamic shapes support
* @brief Checks if current data has dynamic shapes
* @return true if data has dynamic shapes
*/
INFERENCE_ENGINE_DEPRECATED("Use OpenVINO 2.0 API for dynamic shapes support")
bool isDynamic() const;

/**
* @deprecated Use OpenVINO 2.0 API for dynamic shapes support
* @brief Returns partial shapes
* @return shapes which can have dynamic dimensions
*/
INFERENCE_ENGINE_DEPRECATED("Use OpenVINO 2.0 API for dynamic shapes support")
const ngraph::PartialShape& getPartialShape() const;

/**
Expand All @@ -200,7 +213,5 @@ class INFERENCE_ENGINE_API_CLASS(Data) {
* @brief A tensor descriptor
*/
mutable TensorDesc tensorDesc;

ngraph::PartialShape pShape;
};
} // namespace InferenceEngine
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
#include <memory>
#include <string>

#include "ie_api.h"
#include "ie_blob.h"
#include "ie_common.h"
#include "ie_data.h"
Expand Down Expand Up @@ -139,14 +140,18 @@ class InputInfo {
}

/**
* @deprecated Use OpenVINO 2.0 API for dynamic shapes support
* @brief Returns the input shape. May have undefined dimensions.
* @return PartialShape object describing input shape.
*/
INFERENCE_ENGINE_DEPRECATED("Use OpenVINO 2.0 API for dynamic shapes support")
ngraph::PartialShape getPartialShape() {
if (!_inputData) {
IE_THROW() << "Data is empty!";
}
IE_SUPPRESS_DEPRECATED_START
return _inputData->getPartialShape();
IE_SUPPRESS_DEPRECATED_END
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ void CNNNetworkNGraphImpl::createDataForResult(const ::ngraph::Output<::ngraph::
IE_THROW() << outName << " has zero dimension which is not allowed";
}

IE_SUPPRESS_DEPRECATED_START
const Layout rankLayout = rank < 0 ? Layout::BLOCKED : TensorDesc::getLayoutByRank(rank);
if (ptr) {
const auto origLayout = ptr->getTensorDesc().getLayout();
Expand All @@ -83,6 +84,7 @@ void CNNNetworkNGraphImpl::createDataForResult(const ::ngraph::Output<::ngraph::
const auto precision = details::convertPrecision(output.get_element_type());
ptr.reset(new Data(outName, precision, shape, rankLayout));
}
IE_SUPPRESS_DEPRECATED_END
}

void CNNNetworkNGraphImpl::validateFunctionNames() const {
Expand Down Expand Up @@ -189,14 +191,18 @@ CNNNetworkNGraphImpl::CNNNetworkNGraphImpl(const CNNNetwork& network) {
InputInfo::Ptr info = std::make_shared<InputInfo>();
const auto& name = inputInfo.second->getInputData()->getName();
const auto& inData = inputInfo.second->getInputData();
IE_SUPPRESS_DEPRECATED_START
DataPtr input =
std::make_shared<Data>(name, inData->getPrecision(), inData->getPartialShape(), inData->getLayout());
IE_SUPPRESS_DEPRECATED_END
_data[name] = input;
info->setInputData(input);
info->getPreProcess() = inputInfo.second->getPreProcess();
info->setPrecision(inputInfo.second->getPrecision());
IE_SUPPRESS_DEPRECATED_START
if (!inData->isDynamic())
info->setLayout(inputInfo.second->getLayout());
IE_SUPPRESS_DEPRECATED_END
_inputData[name] = info;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,9 @@ void IInferRequestInternal::checkBlob(const Blob::Ptr& blob,
if (foundInputPair == std::end(_networkInputs)) {
IE_THROW(NotFound) << "Failed to find input with name: \'" << name << "\'";
}
IE_SUPPRESS_DEPRECATED_START
isDynamic = foundInputPair->second->getInputData()->getPartialShape().is_dynamic();
IE_SUPPRESS_DEPRECATED_END
dims = foundInputPair->second->getTensorDesc().getDims();
refSize = foundInputPair->second->getTensorDesc().getLayout() != SCALAR ? details::product(dims) : 1;
} else {
Expand All @@ -271,6 +273,7 @@ void IInferRequestInternal::checkBlob(const Blob::Ptr& blob,
if (foundOutputPair == std::end(_networkOutputs)) {
IE_THROW(NotFound) << "Failed to find output with name: \'" << name << "\'";
}
IE_SUPPRESS_DEPRECATED_START
isDynamic = foundOutputPair->second->getPartialShape().is_dynamic();
ngraph::PartialShape blobPartialShape(blob->getTensorDesc().getDims());
if (foundOutputPair->second->getPartialShape().compatible(blobPartialShape)) {
Expand All @@ -280,6 +283,7 @@ void IInferRequestInternal::checkBlob(const Blob::Ptr& blob,
// need to immediately throw here
dims = foundOutputPair->second->getTensorDesc().getDims();
}
IE_SUPPRESS_DEPRECATED_END
refSize = foundOutputPair->second->getTensorDesc().getLayout() != SCALAR ? details::product(dims) : 1;
}
} else {
Expand Down
Loading

0 comments on commit f762751

Please sign in to comment.