Skip to content

Commit fcce60c

Browse files
committed
fix metadata name in compile
1 parent adc2201 commit fcce60c

File tree

12 files changed

+129
-141
lines changed

12 files changed

+129
-141
lines changed

src/plugins/intel_npu/src/compiler_adapter/src/plugin_compiler_adapter.cpp

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -123,20 +123,24 @@ std::shared_ptr<IGraph> PluginCompilerAdapter::compile(const std::shared_ptr<con
123123
ov::Tensor tensor = make_tensor_from_vector(networkDesc.compiledNetwork);
124124
GraphDescriptor graphDesc;
125125
NetworkMetadata networkMeta;
126+
std::cout << "====1====networkMeta.name is =" << networkMeta.name << "-" << std::endl;
126127

127128
if (_zeGraphExt) {
128129
// Depending on the config, we may get an error when trying to get the graph handle from the compiled
129130
// network
130131
try {
131132
graphDesc = _zeGraphExt->getGraphDescriptor(tensor.data(), tensor.get_byte_size());
132133
networkMeta = _zeGraphExt->getNetworkMeta(graphDesc);
134+
std::cout << "====2====networkMeta.name is =" << networkMeta.name << "-" << std::endl;
135+
networkMeta.name = model->get_friendly_name();
133136
} catch (...) {
134137
_logger.info("Failed to obtain the level zero graph handle. Inference requests for this model are not "
135138
"allowed. Only exports are available");
136139
}
137140
} else {
138141
_logger.warning("no zeGraphExt, metadata is empty from vcl compiler");
139142
}
143+
std::cout << "====3====networkMeta.name is =" << networkMeta.name << "-" << std::endl;
140144

141145
return std::make_shared<Graph>(
142146
_zeGraphExt,
@@ -310,9 +314,12 @@ std::shared_ptr<IGraph> PluginCompilerAdapter::parse(
310314
// If the metadata is empty, we can try to get it from the driver parser
311315
_logger.info("Metadata is empty, trying to get it from the driver parser");
312316
networkMeta = _zeGraphExt->getNetworkMeta(mainGraphDesc);
317+
std::cout << "RUN here == for vcl adapter call===" << std::endl;
313318
if (model) {
319+
std::cout << "RUN here == for vcl adapter call 1===" << std::endl;
314320
networkMeta.name = model.value()->get_friendly_name();
315321
} else {
322+
std::cout << "RUN here == for vcl adapter call 2===" << std::endl;
316323
_logger.warning("networkMeta name is empty!");
317324
}
318325
}

src/plugins/intel_npu/src/plugin/src/compiled_model.cpp

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -166,11 +166,16 @@ void CompiledModel::set_property(const ov::AnyMap& properties) {
166166
}
167167

168168
ov::Any CompiledModel::get_property(const std::string& name) const {
169+
std::cout << "=========CompiledModel::get_property() ov::model_name.name() is " << ov::model_name.name()
170+
<< std::endl;
171+
std::cout << "=========CompiledModel::get_property() passed name is " << name << std::endl;
169172
// special cases
170173
if (name == ov::model_name.name()) {
171174
OPENVINO_ASSERT(_graph != nullptr, "Missing graph");
175+
std::cout << "=========CompiledModel::get_property() 1 " << std::endl;
172176
return _graph->get_metadata().name;
173177
} else {
178+
std::cout << "=========CompiledModel::get_property() 2 " << std::endl;
174179
// default behaviour
175180
return _properties->get_property(name);
176181
}

src/plugins/intel_npu/src/plugin/src/plugin.cpp

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -762,7 +762,9 @@ std::shared_ptr<ov::ICompiledModel> Plugin::compile_model(const std::shared_ptr<
762762
const auto set_cache_dir = localConfig.get<CACHE_DIR>();
763763
if (!set_cache_dir.empty()) {
764764
const auto compilerType = localConfig.get<COMPILER_TYPE>();
765+
std::cout << "----call set_cache_dir and compilertype is mlir---" << std::endl;
765766
if (compilerType == ov::intel_npu::CompilerType::PLUGIN) {
767+
std::cout << "----throw ov::cache dir excpetion--" << std::endl;
766768
OPENVINO_THROW("Option 'CACHE_DIR' is not supported with PLUGIN compiler type");
767769
}
768770
}

src/plugins/intel_npu/tests/functional/behavior/ov_infer_request/infer_request_run.hpp

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -82,8 +82,7 @@ class InferRequestRunTests : public ov::test::behavior::OVPluginTestBase,
8282
auto hash = std::to_string(std::hash<std::string>()(test_name));
8383
std::stringstream ss;
8484
auto ts = duration_cast<nanoseconds>(high_resolution_clock::now().time_since_epoch());
85-
ss << hash << "_"
86-
<< "_" << ts.count();
85+
ss << hash << "_" << "_" << ts.count();
8786
return ss.str();
8887
}
8988

@@ -467,7 +466,7 @@ TEST_P(BatchingRunTests, CheckBatchingSupportInfer) {
467466
TEST_P(BatchingRunTests, CheckBatchingSupportAsync) {
468467
SKIP_IF_CURRENT_TEST_IS_DISABLED();
469468

470-
ov::CompiledModel compiled_model;
469+
ov::CompiledModel compiled_model;
471470
ov::InferRequest inference_request;
472471
auto batch_shape = Shape{4, 2, 32, 32};
473472
std::shared_ptr<ov::Model> ov_model_batch = createModel(element::f32, batch_shape, "N...");
@@ -557,7 +556,6 @@ TEST_P(BatchingRunTests, SetInputTensorAsync) {
557556
}
558557

559558
TEST_P(BatchingRunTests, SetInputTensorInfer_Caching) {
560-
// need to skip or set driver compiler type
561559
auto batch_shape = Shape{4, 2, 2, 2};
562560
auto shape_size = ov::shape_size(batch_shape);
563561
auto model = createModel(element::f32, batch_shape, "N...");
@@ -567,7 +565,6 @@ TEST_P(BatchingRunTests, SetInputTensorInfer_Caching) {
567565

568566
m_cache_dir = generateCacheDirName(GetTestName());
569567
core->set_property({ov::cache_dir(m_cache_dir)});
570-
core->set_property({ov::intel_npu::compiler_type(ov::intel_npu::CompilerType::DRIVER)});
571568
auto compiled_model_no_cache = core->compile_model(model, target_device, configuration);
572569
compiled_model = core->compile_model(model, target_device, configuration);
573570
ov::InferRequest inference_request;

src/plugins/intel_npu/tests/functional/behavior/ov_plugin/caching_tests.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ using namespace ov::test::behavior;
1212

1313
namespace {
1414

15-
std::vector<ov::AnyMap> config = {{ov::intel_npu::compiler_type(ov::intel_npu::CompilerType::DRIVER)}};
15+
std::vector<ov::AnyMap> config = {{}};
1616

1717
INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests,
1818
OVCompileModelLoadFromFileTestBaseNPU,

src/plugins/intel_npu/tests/functional/behavior/ov_plugin/caching_tests.hpp

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111

1212
#include "intel_npu/utils/zero/zero_init.hpp"
1313
#include "openvino/core/log_util.hpp"
14-
// #include "intel_npu/npu_private_properties.hpp"
1514

1615
namespace ov {
1716
namespace test {
@@ -21,7 +20,6 @@ using OVCompileModelLoadFromFileTestBaseNPU = CompileModelLoadFromFileTestBase;
2120

2221
TEST_P(OVCompileModelLoadFromFileTestBaseNPU, BlobWithOVHeaderAligmentCanBeImported) {
2322
core->set_property(ov::cache_dir(m_cacheFolderName));
24-
// core->set_property({intel_npu::compiler_type(ov::intel_npu::CompilerType::DRIVER)});
2523

2624
ze_device_external_memory_properties_t externalMemorydDesc = {};
2725
externalMemorydDesc.stype = ZE_STRUCTURE_TYPE_DEVICE_EXTERNAL_MEMORY_PROPERTIES;

src/plugins/intel_npu/tests/functional/behavior/remote_tensor_tests/remote_run.hpp

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -74,8 +74,7 @@ class RemoteRunTests : public ov::test::behavior::OVPluginTestBase,
7474
auto hash = std::to_string(std::hash<std::string>()(test_name));
7575
std::stringstream ss;
7676
auto ts = duration_cast<nanoseconds>(high_resolution_clock::now().time_since_epoch());
77-
ss << hash << "_"
78-
<< "_" << ts.count();
77+
ss << hash << "_" << "_" << ts.count();
7978
return ss.str();
8079
}
8180

@@ -557,7 +556,6 @@ TEST_P(RemoteRunTests, CheckImportModelPath) {
557556

558557
m_cache_dir = generateCacheDirName(GetTestName());
559558
core->set_property({ov::cache_dir(m_cache_dir)});
560-
core->set_property({ov::intel_npu::compiler_type(ov::intel_npu::CompilerType::DRIVER)});
561559
auto compiled_model_no_cache = core->compile_model(ov_model, zero_context, configuration);
562560
compiled_model = core->compile_model(ov_model, zero_context, configuration);
563561

src/plugins/intel_npu/tests/functional/shared_tests_instances/behavior/ov_plugin/caching_tests.cpp

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -78,11 +78,9 @@ static std::string getTestCaseName(const testing::TestParamInfo<compileModelLoad
7878

7979
const std::vector<ov::AnyMap> LoadFromFileConfigs = {
8080
{ov::device::properties(ov::test::utils::DEVICE_NPU, {}),
81-
ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT),
82-
ov::intel_npu::compiler_type(ov::intel_npu::CompilerType::DRIVER)},
81+
ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)},
8382
{ov::device::properties(ov::test::utils::DEVICE_NPU, {}),
84-
ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY),
85-
ov::intel_npu::compiler_type(ov::intel_npu::CompilerType::DRIVER)}};
83+
ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY)}};
8684

8785
const std::vector<std::string> TestTargets = {
8886
ov::test::utils::DEVICE_AUTO,
@@ -96,10 +94,8 @@ INSTANTIATE_TEST_SUITE_P(DISABLED_smoke_Auto_BehaviorTests_CachingSupportCase_NP
9694
getTestCaseName);
9795

9896
const std::vector<ov::AnyMap> NPULoadFromFileConfigs = {
99-
{ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT),
100-
ov::intel_npu::compiler_type(ov::intel_npu::CompilerType::DRIVER)},
101-
{ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY),
102-
ov::intel_npu::compiler_type(ov::intel_npu::CompilerType::DRIVER)}};
97+
{ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)},
98+
{ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY)}};
10399

104100
const std::vector<std::pair<ov::AnyMap, std::string>> NPUCompiledKernelsCacheTest = {
105101
std::make_pair<ov::AnyMap, std::string>({ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)},

src/plugins/intel_npu/tests/functional/shared_tests_instances/behavior/ov_plugin/core_threading_tests.cpp

Lines changed: 4 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -10,30 +10,17 @@
1010

1111
namespace {
1212

13-
// const Params params[] = {
14-
// std::tuple<Device, Config>{ov::test::utils::DEVICE_NPU,
15-
// {{ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY)}}},
16-
// std::tuple<Device, Config>{ov::test::utils::DEVICE_NPU,
17-
// {{ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)}}}};
18-
19-
// const Params params_disable_umd_cache[] = {std::tuple<Device, Config>{
20-
// ov::test::utils::DEVICE_NPU,
21-
// {{ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT), ov::intel_npu::bypass_umd_caching(true)}}}};
22-
23-
// const Params params_cached[] = {std::tuple<Device, Config>{ov::test::utils::DEVICE_NPU, {}}};
24-
2513
const Params params[] = {
2614
std::tuple<Device, Config>{ov::test::utils::DEVICE_NPU,
27-
{{ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY),ov::intel_npu::compiler_type(ov::intel_npu::CompilerType::DRIVER)}}},
15+
{{ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY)}}},
2816
std::tuple<Device, Config>{ov::test::utils::DEVICE_NPU,
29-
{{ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT), ov::intel_npu::compiler_type(ov::intel_npu::CompilerType::DRIVER)}}}
30-
};
17+
{{ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)}}}};
3118

3219
const Params params_disable_umd_cache[] = {std::tuple<Device, Config>{
3320
ov::test::utils::DEVICE_NPU,
34-
{{ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT), ov::intel_npu::bypass_umd_caching(true), ov::intel_npu::compiler_type(ov::intel_npu::CompilerType::DRIVER)}}}};
21+
{{ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT), ov::intel_npu::bypass_umd_caching(true)}}}};
3522

36-
const Params params_cached[] = {std::tuple<Device, Config>{ov::test::utils::DEVICE_NPU, {ov::intel_npu::compiler_type(ov::intel_npu::CompilerType::DRIVER)}}};
23+
const Params params_cached[] = {std::tuple<Device, Config>{ov::test::utils::DEVICE_NPU, {}}};
3724

3825
} // namespace
3926

src/tests/functional/base_func_tests/src/behavior/compiled_model/properties.cpp

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,6 @@ void OVCompileModelGetExecutionDeviceTests::SetUp() {
8181
TEST_P(OVClassCompiledModelPropertiesTests, CanUseCache) {
8282
std::string cache_dir = "./test_cache";
8383
core->set_property(ov::cache_dir(cache_dir));
84-
// core->set_property({ov::intel_npu::compiler_type(ov::intel_npu::CompilerType::DRIVER)});
8584
OV_ASSERT_NO_THROW(core->compile_model(model, target_device, properties));
8685
OV_ASSERT_NO_THROW(core->compile_model(model, target_device, properties));
8786
ov::test::utils::removeFilesWithExt<opt::FORCE>(cache_dir, "blob");

0 commit comments

Comments
 (0)