@@ -2513,6 +2513,21 @@ static Status TranslateLogSoftmaxOp(
25132513 return Status::OK ();
25142514}
25152515
2516+ static Status TranslateSoftplusOp (
2517+ const Node* op, const std::vector<const Tensor*>& static_input_map,
2518+ Builder::OpMap& ng_op_map) {
2519+ shared_ptr<ng::Node> ng_inp;
2520+ TF_RETURN_IF_ERROR (GetInputNodes (ng_op_map, op, &ng_inp));
2521+ auto ng_exp = ConstructNgNode<ng::op::Exp>(op->name (), ng_inp);
2522+ auto constant_1 = ConstructNgNode<ng::op::Constant>(
2523+ op->name (), ng_inp->get_element_type (), ng_inp->get_shape (),
2524+ std::vector<std::string>(ng::shape_size (ng_inp->get_shape ()), " 1" ));
2525+ auto ng_output = ConstructNgNode<ng::op::Log>(
2526+ op->name (), ConstructNgNode<ng::op::Add>(op->name (), ng_exp, constant_1));
2527+ SaveNgOp (ng_op_map, op->name (), ng_output);
2528+ return Status::OK ();
2529+ }
2530+
25162531static Status TranslateMatMulOp (
25172532 const Node* op, const std::vector<const Tensor*>& static_input_map,
25182533 Builder::OpMap& ng_op_map) {
@@ -4870,7 +4885,7 @@ const static std::map<
48704885 {" Sigmoid" , TranslateSigmoidOp}, {" SigmoidGrad" , TranslateSigmoidGradOp},
48714886 {" Size" , TranslateSizeOp}, {" Sign" , TranslateUnaryOp<ngraph::op::Sign>},
48724887 {" Slice" , TranslateSliceOp}, {" Snapshot" , TranslateIdentityOp},
4873- {" Softmax" , TranslateSoftmaxOp},
4888+ {" Softmax" , TranslateSoftmaxOp}, { " Softplus " , TranslateSoftplusOp},
48744889 {" SpaceToDepth" , TranslateSpaceToDepthOp},
48754890 {" SparseSoftmaxCrossEntropyWithLogits" ,
48764891 TranslateSparseSoftmaxCrossEntropyWithLogitsOp},
0 commit comments