Skip to content

Commit

Permalink
ngraph::opset -> ov::opset
Browse files Browse the repository at this point in the history
  • Loading branch information
vurusovs committed Dec 5, 2023
1 parent 4baa7ed commit 368db6d
Show file tree
Hide file tree
Showing 11 changed files with 29 additions and 29 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ void FusedConvBackpropData::validate_and_infer_types() {
// If auto_pad has one of following mode we infer paddings. Otherwise in
// EXPLICIT auto_pad mode we use what is provided.
if (auto_pad_ == ov::op::PadType::SAME_UPPER || auto_pad_ == ov::op::PadType::SAME_LOWER) {
ngraph::opset1::infer_conv_backprop_auto_padding(
ov::opset1::infer_conv_backprop_auto_padding(
ov::Shape{std::next(data_shape.begin(), 2), std::end(data_shape)},
ov::Shape{std::next(filters_shape.begin(), 2), std::end(filters_shape)},
output_shape,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ class ConvolutionBackpropDataExtendedLayerTest
const ov::op::PadType &autoPad,
bool addBiases = false,
const std::vector<float> &biasesWeights = {}) {
return std::make_shared<ngraph::opset1::ConvolutionBackpropData>(
return std::make_shared<ov::opset1::ConvolutionBackpropData>(
in, weights, output, strides, padsBegin, padsEnd, dilations, autoPad);
}

Expand Down Expand Up @@ -158,7 +158,7 @@ class ConvolutionBackpropDataExtendedLayerTest

auto outputShapeNode = std::make_shared<ov::op::v0::Constant>(
ov::element::Type_t::i64, ov::Shape{outputShapeData.size()}, outputShapeData);
auto convBackpropData = std::dynamic_pointer_cast<ngraph::opset1::ConvolutionBackpropData>(
auto convBackpropData = std::dynamic_pointer_cast<ov::opset1::ConvolutionBackpropData>(
makeConvolutionBackpropData(params[0],
outputShapeNode,
ngPrc,
Expand All @@ -169,7 +169,7 @@ class ConvolutionBackpropDataExtendedLayerTest
dilation,
padType,
convOutChannels));
ov::ResultVector results{std::make_shared<ngraph::opset1::Result>(convBackpropData)};
ov::ResultVector results{std::make_shared<ov::opset1::Result>(convBackpropData)};
function = std::make_shared<ngraph::Function>(results, params, "convolutionBackpropData");
}
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ class ConvolutionBackpropDataAddExtendedLayerTest
const ov::op::PadType &autoPad,
bool addBiases = false,
const std::vector<float> &biasesWeights = {}) {
return std::make_shared<ngraph::opset1::ConvolutionBackpropData>(
return std::make_shared<ov::opset1::ConvolutionBackpropData>(
in, weights, output, strides, padsBegin, padsEnd, dilations, autoPad);
}

Expand Down Expand Up @@ -156,7 +156,7 @@ class ConvolutionBackpropDataAddExtendedLayerTest

auto outputShapeNode = std::make_shared<ov::op::v0::Constant>(
ov::element::Type_t::i64, ov::Shape{outputShapeData.size()}, outputShapeData);
auto convBackpropData = std::dynamic_pointer_cast<ngraph::opset1::ConvolutionBackpropData>(
auto convBackpropData = std::dynamic_pointer_cast<ov::opset1::ConvolutionBackpropData>(
makeConvolutionBackpropData(params[0],
outputShapeNode,
ngPrc,
Expand All @@ -172,7 +172,7 @@ class ConvolutionBackpropDataAddExtendedLayerTest
ov::test::utils::fill_tensor_random(random_tensor);
auto addConstant = std::make_shared<ov::op::v0::Constant>(random_tensor);
auto add = std::make_shared<ov::op::v1::Add>(convBackpropData, addConstant);
ov::ResultVector results{std::make_shared<ngraph::opset1::Result>(add)};
ov::ResultVector results{std::make_shared<ov::opset1::Result>(add)};
function = std::make_shared<ngraph::Function>(results, params, "convolutionBackpropData");
}
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ struct ConvBAATraits {};

template <>
struct ConvBAATraits<ConvolutionLayerTest> {
using ConvNode = ngraph::opset1::Convolution;
using ConvNode = ov::opset1::Convolution;
using ConvParamSet = convLayerTestParamsSet;
using ConvSpecParamsSet = convSpecificParams;
using ConvBAAParamSet = convBAATestParamSet;
Expand All @@ -59,7 +59,7 @@ struct ConvBAATraits<ConvolutionLayerTest> {

template <>
struct ConvBAATraits<GroupConvolutionLayerTest> {
using ConvNode = ngraph::opset1::GroupConvolution;
using ConvNode = ov::opset1::GroupConvolution;
using ConvParamSet = groupConvLayerTestParamsSet;
using ConvSpecParamsSet = groupConvSpecificParams;
using ConvBAAParamSet = groupConvBAATestParamSet;
Expand Down Expand Up @@ -127,7 +127,7 @@ class BasicConvolutionBiasAddActivationLayerTest

std::shared_ptr<ov::Node> lastNode;
if constexpr (HasAddNode) {
auto addParam = std::make_shared<ngraph::opset1::Parameter>(ngNetPrc, convLayer->get_output_shape(0));
auto addParam = std::make_shared<ov::opset1::Parameter>(ngNetPrc, convLayer->get_output_shape(0));
params.push_back(addParam);
auto addLayer = std::make_shared<ov::op::v1::Add>(biasAddLayer, addParam);
lastNode = addLayer;
Expand All @@ -138,7 +138,7 @@ class BasicConvolutionBiasAddActivationLayerTest
lastNode = ngraph::builder::makeActivation(lastNode, ngNetPrc, activation);
}

ov::ResultVector results{std::make_shared<ngraph::opset1::Result>(lastNode)};
ov::ResultVector results{std::make_shared<ov::opset1::Result>(lastNode)};
function = std::make_shared<ngraph::Function>(results, params, Traits::name);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ class FullyConnectedLayerTest : public testing::WithParamInterface<FullyConnecte
auto MatMul = std::make_shared<ov::op::v0::MatMul>(
params[0], secondaryInput, shapeRelatedParams.input1.second, shapeRelatedParams.input2.second);
auto Add = std::make_shared<ov::op::v1::Add>(MatMul, thirdInput);
ov::ResultVector results{std::make_shared<ngraph::opset1::Result>(Add)};
ov::ResultVector results{std::make_shared<ov::opset1::Result>(Add)};
function = std::make_shared<ngraph::Function>(results, params, "FullyConnected");
}
};
Expand Down Expand Up @@ -224,7 +224,7 @@ class FullyConnectedLayer2MatMulTest : public testing::WithParamInterface<FullyC
shapeRelatedParams.matmul2_input1.second,
shapeRelatedParams.matmul2_input2.second);
auto Add = std::make_shared<ov::op::v1::Add>(matMul0, matMul1);
ov::ResultVector results{std::make_shared<ngraph::opset1::Result>(Add)};
ov::ResultVector results{std::make_shared<ov::opset1::Result>(Add)};
function = std::make_shared<ngraph::Function>(results, params, "FullyConnected");
}
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ class CUDNNGRUCellTest : public UnsymmetricalComparer<GRUCellTest> {
const auto& ops = function->get_ordered_ops();
int seed = SEED_FIRST;
for (const auto& op : ops) {
if (std::dynamic_pointer_cast<ngraph::opset1::Constant>(op)) {
if (std::dynamic_pointer_cast<ov::opset1::Constant>(op)) {
ov::Tensor random_tensor(op->get_element_type(), op->get_shape());
ov::test::utils::fill_tensor_random(random_tensor, up_to - start_from, start_from, 1, seed++);
function->replace_node(op, std::make_shared<ov::op::v0::Constant>(random_tensor));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ class CUDNNGRUSequenceTest : public UnsymmetricalComparer<GRUSequenceTest> {
const auto& ops = function->get_ordered_ops();
int seed = 1;
for (const auto& op : ops) {
if (std::dynamic_pointer_cast<ngraph::opset1::Constant>(op)) {
if (std::dynamic_pointer_cast<ov::opset1::Constant>(op)) {
if (op->get_element_type() == ov::element::Type_t::f32) {
ov::Tensor random_tensor(op->get_element_type(), op->get_shape());
ov::test::utils::fill_tensor_random(random_tensor, up_to - start_from, start_from, 1, seed++);
Expand Down Expand Up @@ -56,7 +56,7 @@ class LPCNetCUDNNGRUSequenceTest : public UnsymmetricalComparer<GRUSequenceTest>
const auto& ops = function->get_ordered_ops();
int seed = 1;
for (const auto& op : ops) {
if (std::dynamic_pointer_cast<ngraph::opset1::Constant>(op)) {
if (std::dynamic_pointer_cast<ov::opset1::Constant>(op)) {
if (op->get_element_type() == ov::element::Type_t::f32) {
ov::Tensor random_tensor(op->get_element_type(), op->get_shape());
ov::test::utils::fill_tensor_random(random_tensor, up_to - start_from, start_from, 1, seed++);
Expand Down Expand Up @@ -118,8 +118,8 @@ class LPCNetCUDNNGRUSequenceTest : public UnsymmetricalComparer<GRUSequenceTest>
true,
direction,
mode);
ov::ResultVector results{std::make_shared<ngraph::opset1::Result>(gru_sequence->output(0)),
std::make_shared<ngraph::opset1::Result>(gru_sequence->output(1))};
ov::ResultVector results{std::make_shared<ov::opset1::Result>(gru_sequence->output(0)),
std::make_shared<ov::opset1::Result>(gru_sequence->output(1))};
function = std::make_shared<ngraph::Function>(results, params, "gru_sequence");
bool ti_found = ngraph::helpers::is_tensor_iterator_exist(function);
EXPECT_EQ(ti_found, false);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ class CUDNNLSTMCellTest : public LSTMCellTest {
const auto& ops = function->get_ordered_ops();
int seed = SEED_FIRST;
for (const auto& op : ops) {
if (std::dynamic_pointer_cast<ngraph::opset1::Constant>(op)) {
if (std::dynamic_pointer_cast<ov::opset1::Constant>(op)) {
ov::Tensor random_tensor(op->get_element_type(), op->get_shape());
ov::test::utils::fill_tensor_random(random_tensor, up_to - start_from, start_from, 1, seed);
auto constant = std::make_shared<ov::op::v0::Constant>(random_tensor);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ class CUDALSTMSequenceTest : public UnsymmetricalComparer<LSTMSequenceTest> {
int counter = 1;
const auto& ops = function->get_ordered_ops();
for (const auto& op : ops) {
if (std::dynamic_pointer_cast<ngraph::opset1::Constant>(op)) {
if (std::dynamic_pointer_cast<ov::opset1::Constant>(op)) {
if (op->get_element_type() == ov::element::Type_t::f32) {
ov::Tensor random_tensor(op->get_element_type(), op->get_shape());
ov::test::utils::fill_tensor_random(random_tensor, up_to - start_from, start_from, 1, counter++);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,14 @@ const std::vector<int64_t> k = {
10,
};

const std::vector<ngraph::opset4::TopK::Mode> modes = {
ngraph::opset4::TopK::Mode::MIN,
ngraph::opset4::TopK::Mode::MAX,
const std::vector<ov::opset4::TopK::Mode> modes = {
ov::opset4::TopK::Mode::MIN,
ov::opset4::TopK::Mode::MAX,
};

const std::vector<ngraph::opset4::TopK::SortType> sortTypes = {
ngraph::opset4::TopK::SortType::SORT_INDICES,
ngraph::opset4::TopK::SortType::SORT_VALUES,
const std::vector<ov::opset4::TopK::SortType> sortTypes = {
ov::opset4::TopK::SortType::SORT_INDICES,
ov::opset4::TopK::SortType::SORT_VALUES,
};

const std::vector<int64_t> axes3D = {
Expand Down
6 changes: 3 additions & 3 deletions modules/nvidia_plugin/tests/unit/cuda_multi_graph_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ class AddMul {
const auto add1 = ngraph::builder::makeEltwise(params[2], params[3], EltwiseTypes::ADD);

const auto mul = ngraph::builder::makeEltwise(add0, add1, EltwiseTypes::MULTIPLY);
const auto result = std::make_shared<ngraph::opset1::Result>(mul);
const auto result = std::make_shared<ov::opset1::Result>(mul);
return std::make_shared<ov::Model>(result, params, "AddMul");
}

Expand Down Expand Up @@ -114,8 +114,8 @@ class AddConcat {

constexpr int64_t axis = CONCAT_AXIS;
const auto concat =
std::make_shared<ngraph::opset1::Concat>(ov::OutputVector{add0, add1}, axis);
const auto result = std::make_shared<ngraph::opset1::Result>(concat);
std::make_shared<ov::opset1::Concat>(ov::OutputVector{add0, add1}, axis);
const auto result = std::make_shared<ov::opset1::Result>(concat);
return std::make_shared<ov::Model>(result, params, "AddConcat");
}

Expand Down

0 comments on commit 368db6d

Please sign in to comment.