Skip to content

Commit 86f284f

Browse files
Sarkars/update for 17rc1 (#164)
1 parent 383c9f9 commit 86f284f

13 files changed

+312
-53
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ Once TensorFlow's dependencies are installed, clone the `ngraph-bridge` repo:
8888

8989
git clone https://github.com/tensorflow/ngraph-bridge.git
9090
cd ngraph-bridge
91-
git checkout v0.17.0-rc0
91+
git checkout v0.17.0-rc1
9292

9393
Run the following Python script to build TensorFlow, nGraph, and the bridge. Use Python 3.5:
9494

ngraph_bridge/grappler/ngraph_optimizer.cc

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@ Status NgraphOptimizer::Init(
5151
}
5252
}
5353
config_backend_name = params.at("ngraph_backend").s();
54+
config_device_id = params.at("device_id").s();
5455
NGRAPH_VLOG(3) << "Backend name from config: " << config_backend_name;
5556
for (auto i : params) {
5657
if (i.first != "ngraph_backend") {
@@ -195,19 +196,21 @@ Status NgraphOptimizer::Optimize(tensorflow::grappler::Cluster* cluster,
195196

196197
// Get backend + its configurations, to be attached to the nodes
197198
// using RewriteConfig
198-
string backend_name;
199+
string backend_creation_string = BackendManager::GetBackendCreationString(
200+
config_backend_name, config_device_id);
199201
if (!config_backend_name.empty()) {
200-
if (!BackendManager::IsSupportedBackend(config_backend_name)) {
201-
return errors::Internal("NGRAPH_TF_BACKEND: ", config_backend_name,
202+
if (!BackendManager::IsSupportedBackend(backend_creation_string)) {
203+
return errors::Internal("NGRAPH_TF_BACKEND: ", backend_creation_string,
202204
" is not supported");
203205
}
204-
backend_name = config_backend_name;
205-
NGRAPH_VLOG(1) << "Setting backend from the RewriteConfig " << backend_name;
206+
NGRAPH_VLOG(1) << "Setting backend from the RewriteConfig "
207+
<< backend_creation_string;
206208
}
207-
NGRAPH_VLOG(0) << "NGraph using backend: " << backend_name;
209+
NGRAPH_VLOG(0) << "NGraph using backend: " << backend_creation_string;
208210

209211
// 1. Mark for clustering then, if requested, dump the graphs.
210-
TF_RETURN_IF_ERROR(MarkForClustering(&graph, skip_these_nodes, backend_name));
212+
TF_RETURN_IF_ERROR(
213+
MarkForClustering(&graph, skip_these_nodes, backend_creation_string));
211214
if (DumpMarkedGraphs()) {
212215
DumpGraphs(graph, idx, "marked", "Graph Marked for Clustering");
213216
}

ngraph_bridge/grappler/ngraph_optimizer.h

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,7 @@ class NgraphOptimizer : public tensorflow::grappler::CustomGraphOptimizer {
7373

7474
private:
7575
std::string config_backend_name;
76+
std::string config_device_id;
7677
std::unordered_map<std::string, std::string> config_map;
7778
std::vector<string> compulsory_attrs = {"ngraph_backend", "device_id"};
7879

ngraph_bridge/ngraph_builder.cc

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1090,7 +1090,8 @@ static Status TranslateCombinedNonMaxSuppressionOp(
10901090
std::string backend_name;
10911091
TF_RETURN_IF_ERROR(ngraph_bridge::GetNodeBackend(op, &backend_name));
10921092

1093-
if (backend_name != "NNPI") {
1093+
auto config_map = BackendManager::GetBackendAttributeValues(backend_name);
1094+
if (config_map.at("ngraph_backend") != "NNPI") {
10941095
return errors::Internal("In translating CombinedNonMaxSuppression op ",
10951096
op->name(), " found requested backend ",
10961097
backend_name, " which is unsupported");
@@ -2230,7 +2231,10 @@ static Status TranslateGatherV2Op(
22302231
std::string backend_name;
22312232
TF_RETURN_IF_ERROR(ngraph_bridge::GetNodeBackend(op, &backend_name));
22322233

2233-
if (backend_name != "NNPI") {
2234+
// split and check the first part only, since the node attribute contains
2235+
// the full backend creation string
2236+
auto config_map = BackendManager::GetBackendAttributeValues(backend_name);
2237+
if (config_map.at("ngraph_backend") != "NNPI") {
22342238
return errors::Internal("In translating GatherV2 op ", op->name(),
22352239
" found requested backend ", backend_name,
22362240
" which is unsupported");
@@ -2763,7 +2767,8 @@ static Status TranslateNonMaxSuppressionV4Op(
27632767
std::string backend_name;
27642768
TF_RETURN_IF_ERROR(ngraph_bridge::GetNodeBackend(op, &backend_name));
27652769

2766-
if (backend_name != "NNPI") {
2770+
auto config_map = BackendManager::GetBackendAttributeValues(backend_name);
2771+
if (config_map.at("ngraph_backend") != "NNPI") {
27672772
return errors::Internal("In translating NonMaxSuppressionV4 op ",
27682773
op->name(), " found requested backend ",
27692774
backend_name, " which is unsupported");

ngraph_bridge/ngraph_encapsulate_clusters.cc

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -329,14 +329,17 @@ Status EncapsulateClusters(
329329
}
330330

331331
Node* n;
332-
NodeBuilder nb = NodeBuilder(ss.str(), "NGraphEncapsulate")
333-
.Attr("ngraph_cluster", cluster_idx)
334-
.Attr("ngraph_backend", cluster_backend)
335-
.Attr("Targuments", input_types)
336-
.Attr("Tresults", cluster_output_dt_map[cluster_idx])
337-
.Attr("ngraph_graph_id", graph_id)
338-
.Device(device_name_map[cluster_idx])
339-
.Input(inputs);
332+
NodeBuilder nb =
333+
NodeBuilder(ss.str(), "NGraphEncapsulate")
334+
.Attr("ngraph_cluster", cluster_idx)
335+
.Attr("ngraph_backend",
336+
BackendManager::GetBackendAttributeValues(cluster_backend)
337+
.at("ngraph_backend"))
338+
.Attr("Targuments", input_types)
339+
.Attr("Tresults", cluster_output_dt_map[cluster_idx])
340+
.Attr("ngraph_graph_id", graph_id)
341+
.Device(device_name_map[cluster_idx])
342+
.Input(inputs);
340343
if (!device_config.empty()) {
341344
NGRAPH_VLOG(3) << "Device config is not empty";
342345
for (auto const& i : device_config) {

ngraph_bridge/ngraph_mark_for_clustering.cc

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -657,19 +657,25 @@ Status MarkForClustering(Graph* graph, const std::set<string> skip_these_nodes,
657657
bool* result) {
658658
// TODO: replace current_backend ->
659659
// BackendManager::GetCurrentlySetBackendName()
660-
*result = (current_backend == "NNPI");
660+
auto config_map =
661+
BackendManager::GetBackendAttributeValues(current_backend);
662+
*result = (config_map.at("ngraph_backend") == "NNPI");
661663
return Status::OK();
662664
};
663665

664666
confirmation_function_map["NonMaxSuppressionV4"] = [&current_backend](
665667
Node* n, bool* result) {
666-
*result = (current_backend == "NNPI");
668+
auto config_map =
669+
BackendManager::GetBackendAttributeValues(current_backend);
670+
*result = (config_map.at("ngraph_backend") == "NNPI");
667671
return Status::OK();
668672
};
669673

670674
confirmation_function_map["CombinedNonMaxSuppression"] = [&current_backend](
671675
Node* n, bool* result) {
672-
*result = (current_backend == "NNPI");
676+
auto config_map =
677+
BackendManager::GetBackendAttributeValues(current_backend);
678+
*result = (config_map.at("ngraph_backend") == "NNPI");
673679
return Status::OK();
674680
};
675681

ngraph_bridge/ngraph_rewrite_pass.cc

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -221,26 +221,26 @@ class NGraphEncapsulationPass : public NGraphRewritePass {
221221
// to be attached to the nodes
222222
// Precedence Order: Env Variable > BackendManager
223223
std::unordered_map<std::string, std::string> config_map;
224-
string backend_name;
224+
string backend_creation_string;
225+
// GetCurrentlySetBackendName could return GPU:0 (not just GPU)
225226
TF_RETURN_IF_ERROR(
226-
BackendManager::GetCurrentlySetBackendName(&backend_name));
227+
BackendManager::GetCurrentlySetBackendName(&backend_creation_string));
227228

228-
// splits into {"ngraph_backend", "_ngraph_device_config"}
229+
// splits into {"ngraph_backend", "ngraph_device_id"}
229230
config_map = BackendManager::GetBackendAttributeValues(
230-
backend_name); // SplitBackendConfig
231-
backend_name = config_map.at("ngraph_backend");
231+
backend_creation_string); // SplitBackendConfig
232232
config_map.erase("ngraph_backend");
233233

234234
if ((std::getenv("NGRAPH_TF_LOG_0_DISABLED") == nullptr)) {
235-
NGRAPH_VLOG(0) << "NGraph using backend: " << backend_name;
235+
NGRAPH_VLOG(0) << "NGraph using backend: " << backend_creation_string;
236236
}
237237

238238
// Now Process the Graph
239239

240240
// 1. Mark for clustering then, if requested, dump the graphs.
241241
std::set<string> skip_these_nodes = {};
242242
TF_RETURN_IF_ERROR(MarkForClustering(options.graph->get(), skip_these_nodes,
243-
backend_name));
243+
backend_creation_string));
244244
if (DumpMarkedGraphs()) {
245245
DumpGraphs(options, idx, "marked", "Graph Marked for Clustering");
246246
}

ngraph_bridge/version.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
// candidate such as v0.7.0-rc0
3333
// The code in master will always have the last released version number
3434
// with a suffix of '-master'
35-
#define NG_TF_VERSION_SUFFIX "-rc0"
35+
#define NG_TF_VERSION_SUFFIX "-rc1"
3636

3737
#define VERSION_STR_HELPER(x) #x
3838
#define VERSION_STR(x) VERSION_STR_HELPER(x)

python/setup.in.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ def get_tag(self):
5959

6060
setup(
6161
name='ngraph_tensorflow_bridge',
62-
version='0.17.0rc0',
62+
version='0.17.0rc1',
6363
description='Intel nGraph compiler and runtime for TensorFlow',
6464
long_description=long_description,
6565
long_description_content_type="text/markdown",
Lines changed: 87 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,87 @@
1+
# ==============================================================================
2+
# Copyright 2019 Intel Corporation
3+
#
4+
# Licensed under the Apache License, Version 2.0 (the "License");
5+
# you may not use this file except in compliance with the License.
6+
# You may obtain a copy of the License at
7+
#
8+
# http://www.apache.org/licenses/LICENSE-2.0
9+
#
10+
# Unless required by applicable law or agreed to in writing, software
11+
# distributed under the License is distributed on an "AS IS" BASIS,
12+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
# See the License for the specific language governing permissions and
14+
# limitations under the License.
15+
# ==============================================================================
16+
"""nGraph TensorFlow bridge test for checking backend setting using rewriter config for grappler
17+
18+
"""
19+
from __future__ import absolute_import
20+
from __future__ import division
21+
from __future__ import print_function
22+
23+
import pytest
24+
import os
25+
import numpy as np
26+
import shutil
27+
import tensorflow as tf
28+
from tensorflow.core.protobuf import rewriter_config_pb2
29+
import ngraph_bridge
30+
31+
from common import NgraphTest
32+
33+
34+
class TestRewriterConfigBackendSetting(NgraphTest):
35+
36+
@pytest.mark.skipif(
37+
not ngraph_bridge.is_grappler_enabled(),
38+
reason='Rewriter config only works for grappler path')
39+
@pytest.mark.parametrize(("backend",), (
40+
('CPU',),
41+
('INTERPRETER',),
42+
))
43+
def test_config_updater_api(self, backend):
44+
dim1 = 3
45+
dim2 = 4
46+
a = tf.placeholder(tf.float32, shape=(dim1, dim2), name='a')
47+
x = tf.placeholder(tf.float32, shape=(dim1, dim2), name='x')
48+
b = tf.placeholder(tf.float32, shape=(dim1, dim2), name='y')
49+
axpy = (a * x) + b
50+
51+
config = tf.ConfigProto()
52+
rewriter_options = rewriter_config_pb2.RewriterConfig()
53+
rewriter_options.meta_optimizer_iterations = (
54+
rewriter_config_pb2.RewriterConfig.ONE)
55+
rewriter_options.min_graph_nodes = -1
56+
ngraph_optimizer = rewriter_options.custom_optimizers.add()
57+
ngraph_optimizer.name = "ngraph-optimizer"
58+
ngraph_optimizer.parameter_map["ngraph_backend"].s = backend.encode()
59+
ngraph_optimizer.parameter_map["device_id"].s = b'0'
60+
# TODO: This test will pass if grappler fails silently.
61+
# Need to do something about that
62+
backend_extra_params_map = {
63+
'CPU': {
64+
'device_config': ''
65+
},
66+
'INTERPRETER': {
67+
'test_echo': '42',
68+
'hello': '3'
69+
}
70+
}
71+
extra_params = backend_extra_params_map[backend]
72+
for k in extra_params:
73+
ngraph_optimizer.parameter_map[k].s = extra_params[k].encode()
74+
config.MergeFrom(
75+
tf.ConfigProto(
76+
graph_options=tf.GraphOptions(
77+
rewrite_options=rewriter_options)))
78+
79+
with tf.Session(config=config) as sess:
80+
outval = sess.run(
81+
axpy,
82+
feed_dict={
83+
a: 1.5 * np.ones((dim1, dim2)),
84+
b: np.ones((dim1, dim2)),
85+
x: np.ones((dim1, dim2))
86+
})
87+
assert (outval == 2.5 * (np.ones((dim1, dim2)))).all()

0 commit comments

Comments
 (0)