-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathtest.html
13 lines (11 loc) · 202 KB
/
test.html
1
2
3
4
5
6
7
8
9
10
11
12
13
<iframe seamless style="width:1200px;height:620px;border:0" srcdoc="
<script>
function load() {
document.getElementById("graph0.8855588178068329").pbtxt = 'node {\n name: "relevant_layer"\n op: "Placeholder"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "shape"\n value {\n shape {\n dim {\n size: -1\n }\n dim {\n size: 75\n }\n dim {\n size: 100\n }\n dim {\n size: 2\n }\n }\n }\n }\n}\nnode {\n name: "model_input"\n op: "Placeholder"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "shape"\n value {\n shape {\n dim {\n size: -1\n }\n dim {\n size: 150\n }\n dim {\n size: 200\n }\n dim {\n size: 3\n }\n }\n }\n }\n}\nnode {\n name: "mean"\n op: "Placeholder"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "shape"\n value {\n shape {\n dim {\n size: 1\n }\n dim {\n size: 150\n }\n dim {\n size: 200\n }\n dim {\n size: 3\n }\n }\n }\n }\n}\nnode {\n name: "sub"\n op: "Sub"\n input: "model_input"\n input: "mean"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "truediv/y"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n }\n float_val: 255.0\n }\n }\n }\n}\nnode {\n name: "truediv"\n op: "RealDiv"\n input: "sub"\n input: "truediv/y"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "target"\n op: "Placeholder"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "shape"\n value {\n shape {\n dim {\n size: -1\n }\n dim {\n size: 75\n }\n dim {\n size: 100\n }\n dim {\n size: 2\n }\n }\n }\n }\n}\nnode {\n name: "conv2d/kernel/Initializer/random_uniform/shape"\n op: "Const"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n dim {\n size: 4\n }\n }\n tensor_content: "\\003\\000\\000\\000\\003\\000\\000\\000\\003\\000\\000\\0002\\000\\000\\000"\n }\n }\n }\n}\nnode {\n name: "conv2d/kernel/Initializer/random_uniform/min"\n op: "Const"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n }\n float_val: -0.11215443164110184\n }\n }\n }\n}\nnode {\n name: "conv2d/kernel/Initializer/random_uniform/max"\n op: "Const"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n }\n float_val: 0.11215443164110184\n }\n }\n }\n}\nnode {\n name: "conv2d/kernel/Initializer/random_uniform/RandomUniform"\n op: "RandomUniform"\n input: "conv2d/kernel/Initializer/random_uniform/shape"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "seed"\n value {\n i: 0\n }\n }\n attr {\n key: "seed2"\n value {\n i: 0\n }\n }\n}\nnode {\n name: "conv2d/kernel/Initializer/random_uniform/sub"\n op: "Sub"\n input: "conv2d/kernel/Initializer/random_uniform/max"\n input: "conv2d/kernel/Initializer/random_uniform/min"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n}\nnode {\n name: "conv2d/kernel/Initializer/random_uniform/mul"\n op: "Mul"\n input: "conv2d/kernel/Initializer/random_uniform/RandomUniform"\n input: "conv2d/kernel/Initializer/random_uniform/sub"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n}\nnode {\n name: "conv2d/kernel/Initializer/random_uniform"\n op: "Add"\n input: "conv2d/kernel/Initializer/random_uniform/mul"\n input: "conv2d/kernel/Initializer/random_uniform/min"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n}\nnode {\n name: "conv2d/kernel"\n op: "VarHandleOp"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n attr {\n key: "container"\n value {\n s: ""\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "shape"\n value {\n shape {\n dim {\n size: 3\n }\n dim {\n size: 3\n }\n dim {\n size: 3\n }\n dim {\n size: 50\n }\n }\n }\n }\n attr {\n key: "shared_name"\n value {\n s: "conv2d/kernel"\n }\n }\n}\nnode {\n name: "conv2d/kernel/IsInitialized/VarIsInitializedOp"\n op: "VarIsInitializedOp"\n input: "conv2d/kernel"\n}\nnode {\n name: "conv2d/kernel/Assign"\n op: "AssignVariableOp"\n input: "conv2d/kernel"\n input: "conv2d/kernel/Initializer/random_uniform"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv2d/kernel/Read/ReadVariableOp"\n op: "ReadVariableOp"\n input: "conv2d/kernel"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv2d/bias/Initializer/zeros"\n op: "Const"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n dim {\n size: 50\n }\n }\n float_val: 0.0\n }\n }\n }\n}\nnode {\n name: "conv2d/bias"\n op: "VarHandleOp"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "container"\n value {\n s: ""\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "shape"\n value {\n shape {\n dim {\n size: 50\n }\n }\n }\n }\n attr {\n key: "shared_name"\n value {\n s: "conv2d/bias"\n }\n }\n}\nnode {\n name: "conv2d/bias/IsInitialized/VarIsInitializedOp"\n op: "VarIsInitializedOp"\n input: "conv2d/bias"\n}\nnode {\n name: "conv2d/bias/Assign"\n op: "AssignVariableOp"\n input: "conv2d/bias"\n input: "conv2d/bias/Initializer/zeros"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv2d/bias/Read/ReadVariableOp"\n op: "ReadVariableOp"\n input: "conv2d/bias"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv2d/dilation_rate"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n dim {\n size: 2\n }\n }\n tensor_content: "\\001\\000\\000\\000\\001\\000\\000\\000"\n }\n }\n }\n}\nnode {\n name: "conv2d/Conv2D/ReadVariableOp"\n op: "ReadVariableOp"\n input: "conv2d/kernel"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv2d/Conv2D"\n op: "Conv2D"\n input: "truediv"\n input: "conv2d/Conv2D/ReadVariableOp"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "data_format"\n value {\n s: "NHWC"\n }\n }\n attr {\n key: "dilations"\n value {\n list {\n i: 1\n i: 1\n i: 1\n i: 1\n }\n }\n }\n attr {\n key: "explicit_paddings"\n value {\n list {\n }\n }\n }\n attr {\n key: "padding"\n value {\n s: "SAME"\n }\n }\n attr {\n key: "strides"\n value {\n list {\n i: 1\n i: 1\n i: 1\n i: 1\n }\n }\n }\n attr {\n key: "use_cudnn_on_gpu"\n value {\n b: true\n }\n }\n}\nnode {\n name: "conv2d/BiasAdd/ReadVariableOp"\n op: "ReadVariableOp"\n input: "conv2d/bias"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv2d/BiasAdd"\n op: "BiasAdd"\n input: "conv2d/Conv2D"\n input: "conv2d/BiasAdd/ReadVariableOp"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "data_format"\n value {\n s: "NHWC"\n }\n }\n}\nnode {\n name: "conv2d/Relu"\n op: "Relu"\n input: "conv2d/BiasAdd"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "max_pooling2d/MaxPool"\n op: "MaxPool"\n input: "conv2d/Relu"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "data_format"\n value {\n s: "NHWC"\n }\n }\n attr {\n key: "ksize"\n value {\n list {\n i: 1\n i: 2\n i: 2\n i: 1\n }\n }\n }\n attr {\n key: "padding"\n value {\n s: "VALID"\n }\n }\n attr {\n key: "strides"\n value {\n list {\n i: 1\n i: 2\n i: 2\n i: 1\n }\n }\n }\n}\nnode {\n name: "conv_clasf/kernel/Initializer/random_uniform/shape"\n op: "Const"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n dim {\n size: 4\n }\n }\n tensor_content: "\\003\\000\\000\\000\\003\\000\\000\\0002\\000\\000\\000\\002\\000\\000\\000"\n }\n }\n }\n}\nnode {\n name: "conv_clasf/kernel/Initializer/random_uniform/min"\n op: "Const"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n }\n float_val: -0.11322770267724991\n }\n }\n }\n}\nnode {\n name: "conv_clasf/kernel/Initializer/random_uniform/max"\n op: "Const"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n }\n float_val: 0.11322770267724991\n }\n }\n }\n}\nnode {\n name: "conv_clasf/kernel/Initializer/random_uniform/RandomUniform"\n op: "RandomUniform"\n input: "conv_clasf/kernel/Initializer/random_uniform/shape"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "seed"\n value {\n i: 0\n }\n }\n attr {\n key: "seed2"\n value {\n i: 0\n }\n }\n}\nnode {\n name: "conv_clasf/kernel/Initializer/random_uniform/sub"\n op: "Sub"\n input: "conv_clasf/kernel/Initializer/random_uniform/max"\n input: "conv_clasf/kernel/Initializer/random_uniform/min"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/kernel"\n }\n }\n }\n}\nnode {\n name: "conv_clasf/kernel/Initializer/random_uniform/mul"\n op: "Mul"\n input: "conv_clasf/kernel/Initializer/random_uniform/RandomUniform"\n input: "conv_clasf/kernel/Initializer/random_uniform/sub"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/kernel"\n }\n }\n }\n}\nnode {\n name: "conv_clasf/kernel/Initializer/random_uniform"\n op: "Add"\n input: "conv_clasf/kernel/Initializer/random_uniform/mul"\n input: "conv_clasf/kernel/Initializer/random_uniform/min"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/kernel"\n }\n }\n }\n}\nnode {\n name: "conv_clasf/kernel"\n op: "VarHandleOp"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/kernel"\n }\n }\n }\n attr {\n key: "container"\n value {\n s: ""\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "shape"\n value {\n shape {\n dim {\n size: 3\n }\n dim {\n size: 3\n }\n dim {\n size: 50\n }\n dim {\n size: 2\n }\n }\n }\n }\n attr {\n key: "shared_name"\n value {\n s: "conv_clasf/kernel"\n }\n }\n}\nnode {\n name: "conv_clasf/kernel/IsInitialized/VarIsInitializedOp"\n op: "VarIsInitializedOp"\n input: "conv_clasf/kernel"\n}\nnode {\n name: "conv_clasf/kernel/Assign"\n op: "AssignVariableOp"\n input: "conv_clasf/kernel"\n input: "conv_clasf/kernel/Initializer/random_uniform"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv_clasf/kernel/Read/ReadVariableOp"\n op: "ReadVariableOp"\n input: "conv_clasf/kernel"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv_clasf/bias/Initializer/zeros"\n op: "Const"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n dim {\n size: 2\n }\n }\n float_val: 0.0\n }\n }\n }\n}\nnode {\n name: "conv_clasf/bias"\n op: "VarHandleOp"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/bias"\n }\n }\n }\n attr {\n key: "container"\n value {\n s: ""\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "shape"\n value {\n shape {\n dim {\n size: 2\n }\n }\n }\n }\n attr {\n key: "shared_name"\n value {\n s: "conv_clasf/bias"\n }\n }\n}\nnode {\n name: "conv_clasf/bias/IsInitialized/VarIsInitializedOp"\n op: "VarIsInitializedOp"\n input: "conv_clasf/bias"\n}\nnode {\n name: "conv_clasf/bias/Assign"\n op: "AssignVariableOp"\n input: "conv_clasf/bias"\n input: "conv_clasf/bias/Initializer/zeros"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv_clasf/bias/Read/ReadVariableOp"\n op: "ReadVariableOp"\n input: "conv_clasf/bias"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv_clasf/dilation_rate"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n dim {\n size: 2\n }\n }\n tensor_content: "\\001\\000\\000\\000\\001\\000\\000\\000"\n }\n }\n }\n}\nnode {\n name: "conv_clasf/Conv2D/ReadVariableOp"\n op: "ReadVariableOp"\n input: "conv_clasf/kernel"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv_clasf/Conv2D"\n op: "Conv2D"\n input: "max_pooling2d/MaxPool"\n input: "conv_clasf/Conv2D/ReadVariableOp"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "data_format"\n value {\n s: "NHWC"\n }\n }\n attr {\n key: "dilations"\n value {\n list {\n i: 1\n i: 1\n i: 1\n i: 1\n }\n }\n }\n attr {\n key: "explicit_paddings"\n value {\n list {\n }\n }\n }\n attr {\n key: "padding"\n value {\n s: "SAME"\n }\n }\n attr {\n key: "strides"\n value {\n list {\n i: 1\n i: 1\n i: 1\n i: 1\n }\n }\n }\n attr {\n key: "use_cudnn_on_gpu"\n value {\n b: true\n }\n }\n}\nnode {\n name: "conv_clasf/BiasAdd/ReadVariableOp"\n op: "ReadVariableOp"\n input: "conv_clasf/bias"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv_clasf/BiasAdd"\n op: "BiasAdd"\n input: "conv_clasf/Conv2D"\n input: "conv_clasf/BiasAdd/ReadVariableOp"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "data_format"\n value {\n s: "NHWC"\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/xentropy/zeros_like"\n op: "ZerosLike"\n input: "conv_clasf/BiasAdd"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/xentropy/GreaterEqual"\n op: "GreaterEqual"\n input: "conv_clasf/BiasAdd"\n input: "sigmoid_cross_entropy_loss/xentropy/zeros_like"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/xentropy/Select"\n op: "Select"\n input: "sigmoid_cross_entropy_loss/xentropy/GreaterEqual"\n input: "conv_clasf/BiasAdd"\n input: "sigmoid_cross_entropy_loss/xentropy/zeros_like"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/xentropy/Neg"\n op: "Neg"\n input: "conv_clasf/BiasAdd"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/xentropy/Select_1"\n op: "Select"\n input: "sigmoid_cross_entropy_loss/xentropy/GreaterEqual"\n input: "sigmoid_cross_entropy_loss/xentropy/Neg"\n input: "conv_clasf/BiasAdd"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/xentropy/mul"\n op: "Mul"\n input: "conv_clasf/BiasAdd"\n input: "target"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/xentropy/sub"\n op: "Sub"\n input: "sigmoid_cross_entropy_loss/xentropy/Select"\n input: "sigmoid_cross_entropy_loss/xentropy/mul"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/xentropy/Exp"\n op: "Exp"\n input: "sigmoid_cross_entropy_loss/xentropy/Select_1"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/xentropy/Log1p"\n op: "Log1p"\n input: "sigmoid_cross_entropy_loss/xentropy/Exp"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/xentropy"\n op: "Add"\n input: "sigmoid_cross_entropy_loss/xentropy/sub"\n input: "sigmoid_cross_entropy_loss/xentropy/Log1p"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/weights/shape"\n op: "Shape"\n input: "relevant_layer"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "out_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/weights/rank"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n }\n int_val: 4\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/values/shape"\n op: "Shape"\n input: "sigmoid_cross_entropy_loss/xentropy"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "out_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/values/rank"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n }\n int_val: 4\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_scalar/x"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n }\n int_val: 0\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_scalar"\n op: "Equal"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_scalar/x"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/weights/rank"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/Switch"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_scalar"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_scalar"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/switch_t"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/Switch:1"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/switch_f"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/Switch"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/pred_id"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_scalar"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/Switch_1"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_scalar"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/pred_id"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/assert_broadcastable/is_scalar"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/is_same_rank"\n op: "Equal"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/is_same_rank/Switch"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/is_same_rank/Switch_1"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/is_same_rank/Switch"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/values/rank"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/pred_id"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/assert_broadcastable/values/rank"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/is_same_rank/Switch_1"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/weights/rank"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/pred_id"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/assert_broadcastable/weights/rank"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/Switch"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/is_same_rank"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/is_same_rank"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/switch_t"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/Switch:1"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/switch_f"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/Switch"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/pred_id"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/is_same_rank"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims/dim"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/switch_t"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n }\n int_val: -1\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims"\n op: "ExpandDims"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims/Switch_1:1"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims/dim"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "Tdim"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims/Switch"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/values/shape"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/pred_id"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/assert_broadcastable/values/shape"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims/Switch_1"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims/Switch"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/pred_id"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/assert_broadcastable/values/shape"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ones_like/Shape"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/switch_t"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n dim {\n size: 2\n }\n }\n tensor_content: "\\004\\000\\000\\000\\001\\000\\000\\000"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ones_like/Const"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/switch_t"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n }\n int_val: 1\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ones_like"\n op: "Fill"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ones_like/Shape"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ones_like/Const"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "index_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/concat/axis"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/switch_t"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n }\n int_val: 1\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/concat"\n op: "ConcatV2"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ones_like"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/concat/axis"\n attr {\n key: "N"\n value {\n i: 2\n }\n }\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "Tidx"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims_1/dim"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/switch_t"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n }\n int_val: -1\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims_1"\n op: "ExpandDims"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims_1/Switch_1:1"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims_1/dim"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "Tdim"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims_1/Switch"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/weights/shape"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/pred_id"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/assert_broadcastable/weights/shape"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims_1/Switch_1"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims_1/Switch"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/pred_id"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/assert_broadcastable/weights/shape"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/DenseToDenseSetOperation"\n op: "DenseToDenseSetOperation"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims_1"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/concat"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "set_operation"\n value {\n s: "a-b"\n }\n }\n attr {\n key: "validate_indices"\n value {\n b: true\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/num_invalid_dims"\n op: "Size"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/DenseToDenseSetOperation:1"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "out_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/x"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/switch_t"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n }\n int_val: 0\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims"\n op: "Equal"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/x"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/num_invalid_dims"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/Switch_1"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/is_same_rank"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/pred_id"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/is_same_rank"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/Merge"\n op: "Merge"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/Switch_1"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims"\n attr {\n key: "N"\n value {\n i: 2\n }\n }\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/Merge"\n op: "Merge"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/Merge"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/Switch_1:1"\n attr {\n key: "N"\n value {\n i: 2\n }\n }\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/Const"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "weights can not be broadcast to values."\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/Const_1"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "weights.shape="\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/Const_2"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "relevant_layer:0"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/Const_3"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "values.shape="\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/Const_4"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "sigmoid_cross_entropy_loss/xentropy:0"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/Const_5"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "is_scalar="\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Switch"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/Merge"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/Merge"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/switch_t"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Switch:1"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/switch_f"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Switch"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/pred_id"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/Merge"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/NoOp"\n op: "NoOp"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/switch_t"\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/control_dependency"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/switch_t"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/NoOp"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/switch_t"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert/data_0"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/switch_f"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "weights can not be broadcast to values."\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert/data_1"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/switch_f"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "weights.shape="\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert/data_2"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/switch_f"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "relevant_layer:0"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert/data_4"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/switch_f"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "values.shape="\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert/data_5"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/switch_f"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "sigmoid_cross_entropy_loss/xentropy:0"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert/data_7"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/switch_f"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "is_scalar="\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert"\n op: "Assert"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert/Switch"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert/data_0"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert/data_1"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert/data_2"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert/Switch_1"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert/data_4"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert/data_5"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert/Switch_2"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert/data_7"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert/Switch_3"\n attr {\n key: "T"\n value {\n list {\n type: DT_STRING\n type: DT_STRING\n type: DT_STRING\n type: DT_INT32\n type: DT_STRING\n type: DT_STRING\n type: DT_INT32\n type: DT_STRING\n type: DT_BOOL\n }\n }\n }\n attr {\n key: "summarize"\n value {\n i: 3\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert/Switch"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/Merge"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/pred_id"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/assert_broadcastable/is_valid_shape/Merge"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert/Switch_1"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/weights/shape"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/pred_id"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/assert_broadcastable/weights/shape"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert/Switch_2"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/values/shape"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/pred_id"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/assert_broadcastable/values/shape"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert/Switch_3"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/is_scalar"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/pred_id"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/assert_broadcastable/is_scalar"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/control_dependency_1"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/switch_f"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Assert"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/switch_f"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n op: "Merge"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/control_dependency_1"\n input: "sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/control_dependency"\n attr {\n key: "N"\n value {\n i: 2\n }\n }\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/Mul"\n op: "Mul"\n input: "sigmoid_cross_entropy_loss/xentropy"\n input: "relevant_layer"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/Const"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n dim {\n size: 4\n }\n }\n tensor_content: "\\000\\000\\000\\000\\001\\000\\000\\000\\002\\000\\000\\000\\003\\000\\000\\000"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/Sum"\n op: "Sum"\n input: "sigmoid_cross_entropy_loss/Mul"\n input: "sigmoid_cross_entropy_loss/Const"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tidx"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "keep_dims"\n value {\n b: false\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/Equal/y"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n }\n float_val: 0.0\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/Equal"\n op: "Equal"\n input: "relevant_layer"\n input: "sigmoid_cross_entropy_loss/num_present/Equal/y"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/zeros_like"\n op: "ZerosLike"\n input: "relevant_layer"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/ones_like/Shape"\n op: "Shape"\n input: "relevant_layer"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "out_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/ones_like/Const"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n }\n float_val: 1.0\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/ones_like"\n op: "Fill"\n input: "sigmoid_cross_entropy_loss/num_present/ones_like/Shape"\n input: "sigmoid_cross_entropy_loss/num_present/ones_like/Const"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "index_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/Select"\n op: "Select"\n input: "sigmoid_cross_entropy_loss/num_present/Equal"\n input: "sigmoid_cross_entropy_loss/num_present/zeros_like"\n input: "sigmoid_cross_entropy_loss/num_present/ones_like"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/weights/shape"\n op: "Shape"\n input: "sigmoid_cross_entropy_loss/num_present/Select"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "out_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/weights/rank"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n }\n int_val: 4\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/values/shape"\n op: "Shape"\n input: "sigmoid_cross_entropy_loss/xentropy"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "out_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/values/rank"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n }\n int_val: 4\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_scalar/x"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n }\n int_val: 0\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_scalar"\n op: "Equal"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_scalar/x"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/weights/rank"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/Switch"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_scalar"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_scalar"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/switch_t"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/Switch:1"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/switch_f"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/Switch"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/pred_id"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_scalar"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/Switch_1"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_scalar"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/pred_id"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_scalar"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/is_same_rank"\n op: "Equal"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/is_same_rank/Switch"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/is_same_rank/Switch_1"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/is_same_rank/Switch"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/values/rank"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/pred_id"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/values/rank"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/is_same_rank/Switch_1"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/weights/rank"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/pred_id"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/weights/rank"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/Switch"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/is_same_rank"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/is_same_rank"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/switch_t"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/Switch:1"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/switch_f"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/Switch"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/pred_id"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/is_same_rank"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims/dim"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n input: "^sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/switch_t"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n }\n int_val: -1\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims"\n op: "ExpandDims"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims/Switch_1:1"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims/dim"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "Tdim"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims/Switch"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/values/shape"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/pred_id"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/values/shape"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims/Switch_1"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims/Switch"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/pred_id"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/values/shape"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ones_like/Shape"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n input: "^sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/switch_t"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n dim {\n size: 2\n }\n }\n tensor_content: "\\004\\000\\000\\000\\001\\000\\000\\000"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ones_like/Const"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n input: "^sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/switch_t"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n }\n int_val: 1\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ones_like"\n op: "Fill"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ones_like/Shape"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ones_like/Const"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "index_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/concat/axis"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n input: "^sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/switch_t"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n }\n int_val: 1\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/concat"\n op: "ConcatV2"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ones_like"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/concat/axis"\n attr {\n key: "N"\n value {\n i: 2\n }\n }\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "Tidx"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims_1/dim"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n input: "^sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/switch_t"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n }\n int_val: -1\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims_1"\n op: "ExpandDims"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims_1/Switch_1:1"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims_1/dim"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "Tdim"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims_1/Switch"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/weights/shape"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/pred_id"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/weights/shape"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims_1/Switch_1"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims_1/Switch"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/pred_id"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/weights/shape"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/DenseToDenseSetOperation"\n op: "DenseToDenseSetOperation"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/ExpandDims_1"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/concat"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "set_operation"\n value {\n s: "a-b"\n }\n }\n attr {\n key: "validate_indices"\n value {\n b: true\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/num_invalid_dims"\n op: "Size"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/DenseToDenseSetOperation:1"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "out_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/x"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n input: "^sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/switch_t"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n }\n int_val: 0\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims"\n op: "Equal"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/x"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims/num_invalid_dims"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/Switch_1"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/is_same_rank"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/pred_id"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/is_same_rank"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/Merge"\n op: "Merge"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/Switch_1"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/has_invalid_dims"\n attr {\n key: "N"\n value {\n i: 2\n }\n }\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/Merge"\n op: "Merge"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/has_valid_nonscalar_shape/Merge"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/Switch_1:1"\n attr {\n key: "N"\n value {\n i: 2\n }\n }\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/Const"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "weights can not be broadcast to values."\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/Const_1"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "weights.shape="\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/Const_2"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "sigmoid_cross_entropy_loss/num_present/Select:0"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/Const_3"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "values.shape="\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/Const_4"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "sigmoid_cross_entropy_loss/xentropy:0"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/Const_5"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "is_scalar="\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Switch"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/Merge"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/Merge"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/switch_t"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Switch:1"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/switch_f"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Switch"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/pred_id"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/Merge"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/NoOp"\n op: "NoOp"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n input: "^sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/switch_t"\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/control_dependency"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/switch_t"\n input: "^sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/NoOp"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/switch_t"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert/data_0"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n input: "^sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/switch_f"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "weights can not be broadcast to values."\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert/data_1"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n input: "^sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/switch_f"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "weights.shape="\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert/data_2"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n input: "^sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/switch_f"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "sigmoid_cross_entropy_loss/num_present/Select:0"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert/data_4"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n input: "^sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/switch_f"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "values.shape="\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert/data_5"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n input: "^sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/switch_f"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "sigmoid_cross_entropy_loss/xentropy:0"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert/data_7"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n input: "^sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/switch_f"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "is_scalar="\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert"\n op: "Assert"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert/Switch"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert/data_0"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert/data_1"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert/data_2"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert/Switch_1"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert/data_4"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert/data_5"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert/Switch_2"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert/data_7"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert/Switch_3"\n attr {\n key: "T"\n value {\n list {\n type: DT_STRING\n type: DT_STRING\n type: DT_STRING\n type: DT_INT32\n type: DT_STRING\n type: DT_STRING\n type: DT_INT32\n type: DT_STRING\n type: DT_BOOL\n }\n }\n }\n attr {\n key: "summarize"\n value {\n i: 3\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert/Switch"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/Merge"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/pred_id"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_valid_shape/Merge"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert/Switch_1"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/weights/shape"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/pred_id"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/weights/shape"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert/Switch_2"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/values/shape"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/pred_id"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/values/shape"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert/Switch_3"\n op: "Switch"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_scalar"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/pred_id"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/is_scalar"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/control_dependency_1"\n op: "Identity"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/switch_f"\n input: "^sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Assert"\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/switch_f"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Merge"\n op: "Merge"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/control_dependency_1"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/control_dependency"\n attr {\n key: "N"\n value {\n i: 2\n }\n }\n attr {\n key: "T"\n value {\n type: DT_BOOL\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/ones_like/Shape"\n op: "Shape"\n input: "sigmoid_cross_entropy_loss/xentropy"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n input: "^sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Merge"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "out_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/ones_like/Const"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n input: "^sigmoid_cross_entropy_loss/num_present/broadcast_weights/assert_broadcastable/AssertGuard/Merge"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n }\n float_val: 1.0\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/ones_like"\n op: "Fill"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/ones_like/Shape"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/ones_like/Const"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "index_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/broadcast_weights"\n op: "Mul"\n input: "sigmoid_cross_entropy_loss/num_present/Select"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights/ones_like"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present/Const"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n dim {\n size: 4\n }\n }\n tensor_content: "\\000\\000\\000\\000\\001\\000\\000\\000\\002\\000\\000\\000\\003\\000\\000\\000"\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/num_present"\n op: "Sum"\n input: "sigmoid_cross_entropy_loss/num_present/broadcast_weights"\n input: "sigmoid_cross_entropy_loss/num_present/Const"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tidx"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "keep_dims"\n value {\n b: false\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/Const_1"\n op: "Const"\n input: "^sigmoid_cross_entropy_loss/assert_broadcastable/AssertGuard/Merge"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n dim {\n }\n }\n }\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/Sum_1"\n op: "Sum"\n input: "sigmoid_cross_entropy_loss/Sum"\n input: "sigmoid_cross_entropy_loss/Const_1"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tidx"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "keep_dims"\n value {\n b: false\n }\n }\n}\nnode {\n name: "sigmoid_cross_entropy_loss/value"\n op: "DivNoNan"\n input: "sigmoid_cross_entropy_loss/Sum_1"\n input: "sigmoid_cross_entropy_loss/num_present"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "activation/Sigmoid"\n op: "Sigmoid"\n input: "conv_clasf/BiasAdd"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/Shape"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n dim {\n }\n }\n }\n }\n }\n}\nnode {\n name: "gradients/grad_ys_0"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n }\n float_val: 1.0\n }\n }\n }\n}\nnode {\n name: "gradients/Fill"\n op: "Fill"\n input: "gradients/Shape"\n input: "gradients/grad_ys_0"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "index_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/value_grad/Shape"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n dim {\n }\n }\n }\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/value_grad/Shape_1"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n dim {\n }\n }\n }\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/value_grad/BroadcastGradientArgs"\n op: "BroadcastGradientArgs"\n input: "gradients/sigmoid_cross_entropy_loss/value_grad/Shape"\n input: "gradients/sigmoid_cross_entropy_loss/value_grad/Shape_1"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/value_grad/div_no_nan"\n op: "DivNoNan"\n input: "gradients/Fill"\n input: "sigmoid_cross_entropy_loss/num_present"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/value_grad/Sum"\n op: "Sum"\n input: "gradients/sigmoid_cross_entropy_loss/value_grad/div_no_nan"\n input: "gradients/sigmoid_cross_entropy_loss/value_grad/BroadcastGradientArgs"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tidx"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "keep_dims"\n value {\n b: false\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/value_grad/Reshape"\n op: "Reshape"\n input: "gradients/sigmoid_cross_entropy_loss/value_grad/Sum"\n input: "gradients/sigmoid_cross_entropy_loss/value_grad/Shape"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tshape"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/value_grad/Neg"\n op: "Neg"\n input: "sigmoid_cross_entropy_loss/Sum_1"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/value_grad/div_no_nan_1"\n op: "DivNoNan"\n input: "gradients/sigmoid_cross_entropy_loss/value_grad/Neg"\n input: "sigmoid_cross_entropy_loss/num_present"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/value_grad/div_no_nan_2"\n op: "DivNoNan"\n input: "gradients/sigmoid_cross_entropy_loss/value_grad/div_no_nan_1"\n input: "sigmoid_cross_entropy_loss/num_present"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/value_grad/mul"\n op: "Mul"\n input: "gradients/Fill"\n input: "gradients/sigmoid_cross_entropy_loss/value_grad/div_no_nan_2"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/value_grad/Sum_1"\n op: "Sum"\n input: "gradients/sigmoid_cross_entropy_loss/value_grad/mul"\n input: "gradients/sigmoid_cross_entropy_loss/value_grad/BroadcastGradientArgs:1"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tidx"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "keep_dims"\n value {\n b: false\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/value_grad/Reshape_1"\n op: "Reshape"\n input: "gradients/sigmoid_cross_entropy_loss/value_grad/Sum_1"\n input: "gradients/sigmoid_cross_entropy_loss/value_grad/Shape_1"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tshape"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/value_grad/tuple/group_deps"\n op: "NoOp"\n input: "^gradients/sigmoid_cross_entropy_loss/value_grad/Reshape"\n input: "^gradients/sigmoid_cross_entropy_loss/value_grad/Reshape_1"\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/value_grad/tuple/control_dependency"\n op: "Identity"\n input: "gradients/sigmoid_cross_entropy_loss/value_grad/Reshape"\n input: "^gradients/sigmoid_cross_entropy_loss/value_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/sigmoid_cross_entropy_loss/value_grad/Reshape"\n }\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/value_grad/tuple/control_dependency_1"\n op: "Identity"\n input: "gradients/sigmoid_cross_entropy_loss/value_grad/Reshape_1"\n input: "^gradients/sigmoid_cross_entropy_loss/value_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/sigmoid_cross_entropy_loss/value_grad/Reshape_1"\n }\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/Sum_1_grad/Reshape/shape"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n dim {\n }\n }\n }\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/Sum_1_grad/Reshape"\n op: "Reshape"\n input: "gradients/sigmoid_cross_entropy_loss/value_grad/tuple/control_dependency"\n input: "gradients/sigmoid_cross_entropy_loss/Sum_1_grad/Reshape/shape"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tshape"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/Sum_1_grad/Const"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n dim {\n }\n }\n }\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/Sum_1_grad/Tile"\n op: "Tile"\n input: "gradients/sigmoid_cross_entropy_loss/Sum_1_grad/Reshape"\n input: "gradients/sigmoid_cross_entropy_loss/Sum_1_grad/Const"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tmultiples"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/Sum_grad/Reshape/shape"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n dim {\n size: 4\n }\n }\n tensor_content: "\\001\\000\\000\\000\\001\\000\\000\\000\\001\\000\\000\\000\\001\\000\\000\\000"\n }\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/Sum_grad/Reshape"\n op: "Reshape"\n input: "gradients/sigmoid_cross_entropy_loss/Sum_1_grad/Tile"\n input: "gradients/sigmoid_cross_entropy_loss/Sum_grad/Reshape/shape"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tshape"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/Sum_grad/Shape"\n op: "Shape"\n input: "sigmoid_cross_entropy_loss/Mul"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "out_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/Sum_grad/Tile"\n op: "Tile"\n input: "gradients/sigmoid_cross_entropy_loss/Sum_grad/Reshape"\n input: "gradients/sigmoid_cross_entropy_loss/Sum_grad/Shape"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tmultiples"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/Mul_grad/Shape"\n op: "Shape"\n input: "sigmoid_cross_entropy_loss/xentropy"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "out_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/Mul_grad/Shape_1"\n op: "Shape"\n input: "relevant_layer"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "out_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/Mul_grad/BroadcastGradientArgs"\n op: "BroadcastGradientArgs"\n input: "gradients/sigmoid_cross_entropy_loss/Mul_grad/Shape"\n input: "gradients/sigmoid_cross_entropy_loss/Mul_grad/Shape_1"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/Mul_grad/Mul"\n op: "Mul"\n input: "gradients/sigmoid_cross_entropy_loss/Sum_grad/Tile"\n input: "relevant_layer"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/Mul_grad/Sum"\n op: "Sum"\n input: "gradients/sigmoid_cross_entropy_loss/Mul_grad/Mul"\n input: "gradients/sigmoid_cross_entropy_loss/Mul_grad/BroadcastGradientArgs"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tidx"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "keep_dims"\n value {\n b: false\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/Mul_grad/Reshape"\n op: "Reshape"\n input: "gradients/sigmoid_cross_entropy_loss/Mul_grad/Sum"\n input: "gradients/sigmoid_cross_entropy_loss/Mul_grad/Shape"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tshape"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/Mul_grad/Mul_1"\n op: "Mul"\n input: "sigmoid_cross_entropy_loss/xentropy"\n input: "gradients/sigmoid_cross_entropy_loss/Sum_grad/Tile"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/Mul_grad/Sum_1"\n op: "Sum"\n input: "gradients/sigmoid_cross_entropy_loss/Mul_grad/Mul_1"\n input: "gradients/sigmoid_cross_entropy_loss/Mul_grad/BroadcastGradientArgs:1"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tidx"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "keep_dims"\n value {\n b: false\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/Mul_grad/Reshape_1"\n op: "Reshape"\n input: "gradients/sigmoid_cross_entropy_loss/Mul_grad/Sum_1"\n input: "gradients/sigmoid_cross_entropy_loss/Mul_grad/Shape_1"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tshape"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/Mul_grad/tuple/group_deps"\n op: "NoOp"\n input: "^gradients/sigmoid_cross_entropy_loss/Mul_grad/Reshape"\n input: "^gradients/sigmoid_cross_entropy_loss/Mul_grad/Reshape_1"\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/Mul_grad/tuple/control_dependency"\n op: "Identity"\n input: "gradients/sigmoid_cross_entropy_loss/Mul_grad/Reshape"\n input: "^gradients/sigmoid_cross_entropy_loss/Mul_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/sigmoid_cross_entropy_loss/Mul_grad/Reshape"\n }\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/Mul_grad/tuple/control_dependency_1"\n op: "Identity"\n input: "gradients/sigmoid_cross_entropy_loss/Mul_grad/Reshape_1"\n input: "^gradients/sigmoid_cross_entropy_loss/Mul_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/sigmoid_cross_entropy_loss/Mul_grad/Reshape_1"\n }\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/Shape"\n op: "Shape"\n input: "sigmoid_cross_entropy_loss/xentropy/sub"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "out_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/Shape_1"\n op: "Shape"\n input: "sigmoid_cross_entropy_loss/xentropy/Log1p"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "out_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/BroadcastGradientArgs"\n op: "BroadcastGradientArgs"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/Shape"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/Shape_1"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/Sum"\n op: "Sum"\n input: "gradients/sigmoid_cross_entropy_loss/Mul_grad/tuple/control_dependency"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/BroadcastGradientArgs"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tidx"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "keep_dims"\n value {\n b: false\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/Reshape"\n op: "Reshape"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/Sum"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/Shape"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tshape"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/Sum_1"\n op: "Sum"\n input: "gradients/sigmoid_cross_entropy_loss/Mul_grad/tuple/control_dependency"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/BroadcastGradientArgs:1"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tidx"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "keep_dims"\n value {\n b: false\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/Reshape_1"\n op: "Reshape"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/Sum_1"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/Shape_1"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tshape"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/tuple/group_deps"\n op: "NoOp"\n input: "^gradients/sigmoid_cross_entropy_loss/xentropy_grad/Reshape"\n input: "^gradients/sigmoid_cross_entropy_loss/xentropy_grad/Reshape_1"\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/tuple/control_dependency"\n op: "Identity"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/Reshape"\n input: "^gradients/sigmoid_cross_entropy_loss/xentropy_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/sigmoid_cross_entropy_loss/xentropy_grad/Reshape"\n }\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/tuple/control_dependency_1"\n op: "Identity"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/Reshape_1"\n input: "^gradients/sigmoid_cross_entropy_loss/xentropy_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/sigmoid_cross_entropy_loss/xentropy_grad/Reshape_1"\n }\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/Shape"\n op: "Shape"\n input: "sigmoid_cross_entropy_loss/xentropy/Select"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "out_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/Shape_1"\n op: "Shape"\n input: "sigmoid_cross_entropy_loss/xentropy/mul"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "out_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/BroadcastGradientArgs"\n op: "BroadcastGradientArgs"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/Shape"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/Shape_1"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/Sum"\n op: "Sum"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/tuple/control_dependency"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/BroadcastGradientArgs"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tidx"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "keep_dims"\n value {\n b: false\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/Reshape"\n op: "Reshape"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/Sum"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/Shape"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tshape"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/Sum_1"\n op: "Sum"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/tuple/control_dependency"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/BroadcastGradientArgs:1"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tidx"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "keep_dims"\n value {\n b: false\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/Neg"\n op: "Neg"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/Sum_1"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/Reshape_1"\n op: "Reshape"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/Neg"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/Shape_1"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tshape"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/tuple/group_deps"\n op: "NoOp"\n input: "^gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/Reshape"\n input: "^gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/Reshape_1"\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/tuple/control_dependency"\n op: "Identity"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/Reshape"\n input: "^gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/Reshape"\n }\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/tuple/control_dependency_1"\n op: "Identity"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/Reshape_1"\n input: "^gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/Reshape_1"\n }\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/Log1p_grad/add/x"\n op: "Const"\n input: "^gradients/sigmoid_cross_entropy_loss/xentropy_grad/tuple/control_dependency_1"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n }\n float_val: 1.0\n }\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/Log1p_grad/add"\n op: "Add"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/Log1p_grad/add/x"\n input: "sigmoid_cross_entropy_loss/xentropy/Exp"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/Log1p_grad/Reciprocal"\n op: "Reciprocal"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/Log1p_grad/add"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/Log1p_grad/mul"\n op: "Mul"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy_grad/tuple/control_dependency_1"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/Log1p_grad/Reciprocal"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_grad/zeros_like"\n op: "ZerosLike"\n input: "conv_clasf/BiasAdd"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_grad/Select"\n op: "Select"\n input: "sigmoid_cross_entropy_loss/xentropy/GreaterEqual"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/tuple/control_dependency"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_grad/zeros_like"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_grad/Select_1"\n op: "Select"\n input: "sigmoid_cross_entropy_loss/xentropy/GreaterEqual"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_grad/zeros_like"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/tuple/control_dependency"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_grad/tuple/group_deps"\n op: "NoOp"\n input: "^gradients/sigmoid_cross_entropy_loss/xentropy/Select_grad/Select"\n input: "^gradients/sigmoid_cross_entropy_loss/xentropy/Select_grad/Select_1"\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_grad/tuple/control_dependency"\n op: "Identity"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_grad/Select"\n input: "^gradients/sigmoid_cross_entropy_loss/xentropy/Select_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/sigmoid_cross_entropy_loss/xentropy/Select_grad/Select"\n }\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_grad/tuple/control_dependency_1"\n op: "Identity"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_grad/Select_1"\n input: "^gradients/sigmoid_cross_entropy_loss/xentropy/Select_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/sigmoid_cross_entropy_loss/xentropy/Select_grad/Select_1"\n }\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Shape"\n op: "Shape"\n input: "conv_clasf/BiasAdd"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "out_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Shape_1"\n op: "Shape"\n input: "target"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "out_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/BroadcastGradientArgs"\n op: "BroadcastGradientArgs"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Shape"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Shape_1"\n attr {\n key: "T"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Mul"\n op: "Mul"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/tuple/control_dependency_1"\n input: "target"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Sum"\n op: "Sum"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Mul"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/BroadcastGradientArgs"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tidx"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "keep_dims"\n value {\n b: false\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Reshape"\n op: "Reshape"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Sum"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Shape"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tshape"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Mul_1"\n op: "Mul"\n input: "conv_clasf/BiasAdd"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/sub_grad/tuple/control_dependency_1"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Sum_1"\n op: "Sum"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Mul_1"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/BroadcastGradientArgs:1"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tidx"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "keep_dims"\n value {\n b: false\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Reshape_1"\n op: "Reshape"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Sum_1"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Shape_1"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "Tshape"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/tuple/group_deps"\n op: "NoOp"\n input: "^gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Reshape"\n input: "^gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Reshape_1"\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/tuple/control_dependency"\n op: "Identity"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Reshape"\n input: "^gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Reshape"\n }\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/tuple/control_dependency_1"\n op: "Identity"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Reshape_1"\n input: "^gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/Reshape_1"\n }\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/Exp_grad/mul"\n op: "Mul"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/Log1p_grad/mul"\n input: "sigmoid_cross_entropy_loss/xentropy/Exp"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_1_grad/zeros_like"\n op: "ZerosLike"\n input: "sigmoid_cross_entropy_loss/xentropy/Neg"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_1_grad/Select"\n op: "Select"\n input: "sigmoid_cross_entropy_loss/xentropy/GreaterEqual"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/Exp_grad/mul"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_1_grad/zeros_like"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_1_grad/Select_1"\n op: "Select"\n input: "sigmoid_cross_entropy_loss/xentropy/GreaterEqual"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_1_grad/zeros_like"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/Exp_grad/mul"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_1_grad/tuple/group_deps"\n op: "NoOp"\n input: "^gradients/sigmoid_cross_entropy_loss/xentropy/Select_1_grad/Select"\n input: "^gradients/sigmoid_cross_entropy_loss/xentropy/Select_1_grad/Select_1"\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_1_grad/tuple/control_dependency"\n op: "Identity"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_1_grad/Select"\n input: "^gradients/sigmoid_cross_entropy_loss/xentropy/Select_1_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/sigmoid_cross_entropy_loss/xentropy/Select_1_grad/Select"\n }\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_1_grad/tuple/control_dependency_1"\n op: "Identity"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_1_grad/Select_1"\n input: "^gradients/sigmoid_cross_entropy_loss/xentropy/Select_1_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/sigmoid_cross_entropy_loss/xentropy/Select_1_grad/Select_1"\n }\n }\n }\n}\nnode {\n name: "gradients/sigmoid_cross_entropy_loss/xentropy/Neg_grad/Neg"\n op: "Neg"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_1_grad/tuple/control_dependency"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/AddN"\n op: "AddN"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_grad/tuple/control_dependency"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/mul_grad/tuple/control_dependency"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/Select_1_grad/tuple/control_dependency_1"\n input: "gradients/sigmoid_cross_entropy_loss/xentropy/Neg_grad/Neg"\n attr {\n key: "N"\n value {\n i: 4\n }\n }\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/sigmoid_cross_entropy_loss/xentropy/Select_grad/Select"\n }\n }\n }\n}\nnode {\n name: "gradients/conv_clasf/BiasAdd_grad/BiasAddGrad"\n op: "BiasAddGrad"\n input: "gradients/AddN"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "data_format"\n value {\n s: "NHWC"\n }\n }\n}\nnode {\n name: "gradients/conv_clasf/BiasAdd_grad/tuple/group_deps"\n op: "NoOp"\n input: "^gradients/AddN"\n input: "^gradients/conv_clasf/BiasAdd_grad/BiasAddGrad"\n}\nnode {\n name: "gradients/conv_clasf/BiasAdd_grad/tuple/control_dependency"\n op: "Identity"\n input: "gradients/AddN"\n input: "^gradients/conv_clasf/BiasAdd_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/sigmoid_cross_entropy_loss/xentropy/Select_grad/Select"\n }\n }\n }\n}\nnode {\n name: "gradients/conv_clasf/BiasAdd_grad/tuple/control_dependency_1"\n op: "Identity"\n input: "gradients/conv_clasf/BiasAdd_grad/BiasAddGrad"\n input: "^gradients/conv_clasf/BiasAdd_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/conv_clasf/BiasAdd_grad/BiasAddGrad"\n }\n }\n }\n}\nnode {\n name: "gradients/conv_clasf/Conv2D_grad/ShapeN"\n op: "ShapeN"\n input: "max_pooling2d/MaxPool"\n input: "conv_clasf/Conv2D/ReadVariableOp"\n attr {\n key: "N"\n value {\n i: 2\n }\n }\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "out_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/conv_clasf/Conv2D_grad/Conv2DBackpropInput"\n op: "Conv2DBackpropInput"\n input: "gradients/conv_clasf/Conv2D_grad/ShapeN"\n input: "conv_clasf/Conv2D/ReadVariableOp"\n input: "gradients/conv_clasf/BiasAdd_grad/tuple/control_dependency"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "data_format"\n value {\n s: "NHWC"\n }\n }\n attr {\n key: "dilations"\n value {\n list {\n i: 1\n i: 1\n i: 1\n i: 1\n }\n }\n }\n attr {\n key: "explicit_paddings"\n value {\n list {\n }\n }\n }\n attr {\n key: "padding"\n value {\n s: "SAME"\n }\n }\n attr {\n key: "strides"\n value {\n list {\n i: 1\n i: 1\n i: 1\n i: 1\n }\n }\n }\n attr {\n key: "use_cudnn_on_gpu"\n value {\n b: true\n }\n }\n}\nnode {\n name: "gradients/conv_clasf/Conv2D_grad/Conv2DBackpropFilter"\n op: "Conv2DBackpropFilter"\n input: "max_pooling2d/MaxPool"\n input: "gradients/conv_clasf/Conv2D_grad/ShapeN:1"\n input: "gradients/conv_clasf/BiasAdd_grad/tuple/control_dependency"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "data_format"\n value {\n s: "NHWC"\n }\n }\n attr {\n key: "dilations"\n value {\n list {\n i: 1\n i: 1\n i: 1\n i: 1\n }\n }\n }\n attr {\n key: "explicit_paddings"\n value {\n list {\n }\n }\n }\n attr {\n key: "padding"\n value {\n s: "SAME"\n }\n }\n attr {\n key: "strides"\n value {\n list {\n i: 1\n i: 1\n i: 1\n i: 1\n }\n }\n }\n attr {\n key: "use_cudnn_on_gpu"\n value {\n b: true\n }\n }\n}\nnode {\n name: "gradients/conv_clasf/Conv2D_grad/tuple/group_deps"\n op: "NoOp"\n input: "^gradients/conv_clasf/Conv2D_grad/Conv2DBackpropFilter"\n input: "^gradients/conv_clasf/Conv2D_grad/Conv2DBackpropInput"\n}\nnode {\n name: "gradients/conv_clasf/Conv2D_grad/tuple/control_dependency"\n op: "Identity"\n input: "gradients/conv_clasf/Conv2D_grad/Conv2DBackpropInput"\n input: "^gradients/conv_clasf/Conv2D_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/conv_clasf/Conv2D_grad/Conv2DBackpropInput"\n }\n }\n }\n}\nnode {\n name: "gradients/conv_clasf/Conv2D_grad/tuple/control_dependency_1"\n op: "Identity"\n input: "gradients/conv_clasf/Conv2D_grad/Conv2DBackpropFilter"\n input: "^gradients/conv_clasf/Conv2D_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/conv_clasf/Conv2D_grad/Conv2DBackpropFilter"\n }\n }\n }\n}\nnode {\n name: "gradients/max_pooling2d/MaxPool_grad/MaxPoolGrad"\n op: "MaxPoolGrad"\n input: "conv2d/Relu"\n input: "max_pooling2d/MaxPool"\n input: "gradients/conv_clasf/Conv2D_grad/tuple/control_dependency"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "data_format"\n value {\n s: "NHWC"\n }\n }\n attr {\n key: "ksize"\n value {\n list {\n i: 1\n i: 2\n i: 2\n i: 1\n }\n }\n }\n attr {\n key: "padding"\n value {\n s: "VALID"\n }\n }\n attr {\n key: "strides"\n value {\n list {\n i: 1\n i: 2\n i: 2\n i: 1\n }\n }\n }\n}\nnode {\n name: "gradients/conv2d/Relu_grad/ReluGrad"\n op: "ReluGrad"\n input: "gradients/max_pooling2d/MaxPool_grad/MaxPoolGrad"\n input: "conv2d/Relu"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "gradients/conv2d/BiasAdd_grad/BiasAddGrad"\n op: "BiasAddGrad"\n input: "gradients/conv2d/Relu_grad/ReluGrad"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "data_format"\n value {\n s: "NHWC"\n }\n }\n}\nnode {\n name: "gradients/conv2d/BiasAdd_grad/tuple/group_deps"\n op: "NoOp"\n input: "^gradients/conv2d/BiasAdd_grad/BiasAddGrad"\n input: "^gradients/conv2d/Relu_grad/ReluGrad"\n}\nnode {\n name: "gradients/conv2d/BiasAdd_grad/tuple/control_dependency"\n op: "Identity"\n input: "gradients/conv2d/Relu_grad/ReluGrad"\n input: "^gradients/conv2d/BiasAdd_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/conv2d/Relu_grad/ReluGrad"\n }\n }\n }\n}\nnode {\n name: "gradients/conv2d/BiasAdd_grad/tuple/control_dependency_1"\n op: "Identity"\n input: "gradients/conv2d/BiasAdd_grad/BiasAddGrad"\n input: "^gradients/conv2d/BiasAdd_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/conv2d/BiasAdd_grad/BiasAddGrad"\n }\n }\n }\n}\nnode {\n name: "gradients/conv2d/Conv2D_grad/ShapeN"\n op: "ShapeN"\n input: "truediv"\n input: "conv2d/Conv2D/ReadVariableOp"\n attr {\n key: "N"\n value {\n i: 2\n }\n }\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "out_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "gradients/conv2d/Conv2D_grad/Conv2DBackpropInput"\n op: "Conv2DBackpropInput"\n input: "gradients/conv2d/Conv2D_grad/ShapeN"\n input: "conv2d/Conv2D/ReadVariableOp"\n input: "gradients/conv2d/BiasAdd_grad/tuple/control_dependency"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "data_format"\n value {\n s: "NHWC"\n }\n }\n attr {\n key: "dilations"\n value {\n list {\n i: 1\n i: 1\n i: 1\n i: 1\n }\n }\n }\n attr {\n key: "explicit_paddings"\n value {\n list {\n }\n }\n }\n attr {\n key: "padding"\n value {\n s: "SAME"\n }\n }\n attr {\n key: "strides"\n value {\n list {\n i: 1\n i: 1\n i: 1\n i: 1\n }\n }\n }\n attr {\n key: "use_cudnn_on_gpu"\n value {\n b: true\n }\n }\n}\nnode {\n name: "gradients/conv2d/Conv2D_grad/Conv2DBackpropFilter"\n op: "Conv2DBackpropFilter"\n input: "truediv"\n input: "gradients/conv2d/Conv2D_grad/ShapeN:1"\n input: "gradients/conv2d/BiasAdd_grad/tuple/control_dependency"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "data_format"\n value {\n s: "NHWC"\n }\n }\n attr {\n key: "dilations"\n value {\n list {\n i: 1\n i: 1\n i: 1\n i: 1\n }\n }\n }\n attr {\n key: "explicit_paddings"\n value {\n list {\n }\n }\n }\n attr {\n key: "padding"\n value {\n s: "SAME"\n }\n }\n attr {\n key: "strides"\n value {\n list {\n i: 1\n i: 1\n i: 1\n i: 1\n }\n }\n }\n attr {\n key: "use_cudnn_on_gpu"\n value {\n b: true\n }\n }\n}\nnode {\n name: "gradients/conv2d/Conv2D_grad/tuple/group_deps"\n op: "NoOp"\n input: "^gradients/conv2d/Conv2D_grad/Conv2DBackpropFilter"\n input: "^gradients/conv2d/Conv2D_grad/Conv2DBackpropInput"\n}\nnode {\n name: "gradients/conv2d/Conv2D_grad/tuple/control_dependency"\n op: "Identity"\n input: "gradients/conv2d/Conv2D_grad/Conv2DBackpropInput"\n input: "^gradients/conv2d/Conv2D_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/conv2d/Conv2D_grad/Conv2DBackpropInput"\n }\n }\n }\n}\nnode {\n name: "gradients/conv2d/Conv2D_grad/tuple/control_dependency_1"\n op: "Identity"\n input: "gradients/conv2d/Conv2D_grad/Conv2DBackpropFilter"\n input: "^gradients/conv2d/Conv2D_grad/tuple/group_deps"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@gradients/conv2d/Conv2D_grad/Conv2DBackpropFilter"\n }\n }\n }\n}\nnode {\n name: "beta1_power/Initializer/initial_value"\n op: "Const"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n }\n float_val: 0.8999999761581421\n }\n }\n }\n}\nnode {\n name: "beta1_power"\n op: "VarHandleOp"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "container"\n value {\n s: ""\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "shape"\n value {\n shape {\n }\n }\n }\n attr {\n key: "shared_name"\n value {\n s: "beta1_power"\n }\n }\n}\nnode {\n name: "beta1_power/IsInitialized/VarIsInitializedOp"\n op: "VarIsInitializedOp"\n input: "beta1_power"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n}\nnode {\n name: "beta1_power/Assign"\n op: "AssignVariableOp"\n input: "beta1_power"\n input: "beta1_power/Initializer/initial_value"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "beta1_power/Read/ReadVariableOp"\n op: "ReadVariableOp"\n input: "beta1_power"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "beta2_power/Initializer/initial_value"\n op: "Const"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n }\n float_val: 0.9990000128746033\n }\n }\n }\n}\nnode {\n name: "beta2_power"\n op: "VarHandleOp"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "container"\n value {\n s: ""\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "shape"\n value {\n shape {\n }\n }\n }\n attr {\n key: "shared_name"\n value {\n s: "beta2_power"\n }\n }\n}\nnode {\n name: "beta2_power/IsInitialized/VarIsInitializedOp"\n op: "VarIsInitializedOp"\n input: "beta2_power"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n}\nnode {\n name: "beta2_power/Assign"\n op: "AssignVariableOp"\n input: "beta2_power"\n input: "beta2_power/Initializer/initial_value"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "beta2_power/Read/ReadVariableOp"\n op: "ReadVariableOp"\n input: "beta2_power"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv2d/kernel/Adam/Initializer/zeros/shape_as_tensor"\n op: "Const"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n dim {\n size: 4\n }\n }\n tensor_content: "\\003\\000\\000\\000\\003\\000\\000\\000\\003\\000\\000\\0002\\000\\000\\000"\n }\n }\n }\n}\nnode {\n name: "conv2d/kernel/Adam/Initializer/zeros/Const"\n op: "Const"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n }\n float_val: 0.0\n }\n }\n }\n}\nnode {\n name: "conv2d/kernel/Adam/Initializer/zeros"\n op: "Fill"\n input: "conv2d/kernel/Adam/Initializer/zeros/shape_as_tensor"\n input: "conv2d/kernel/Adam/Initializer/zeros/Const"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n attr {\n key: "index_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "conv2d/kernel/Adam"\n op: "VarHandleOp"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n attr {\n key: "container"\n value {\n s: ""\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "shape"\n value {\n shape {\n dim {\n size: 3\n }\n dim {\n size: 3\n }\n dim {\n size: 3\n }\n dim {\n size: 50\n }\n }\n }\n }\n attr {\n key: "shared_name"\n value {\n s: "conv2d/kernel/Adam"\n }\n }\n}\nnode {\n name: "conv2d/kernel/Adam/IsInitialized/VarIsInitializedOp"\n op: "VarIsInitializedOp"\n input: "conv2d/kernel/Adam"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n}\nnode {\n name: "conv2d/kernel/Adam/Assign"\n op: "AssignVariableOp"\n input: "conv2d/kernel/Adam"\n input: "conv2d/kernel/Adam/Initializer/zeros"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv2d/kernel/Adam/Read/ReadVariableOp"\n op: "ReadVariableOp"\n input: "conv2d/kernel/Adam"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv2d/kernel/Adam_1/Initializer/zeros/shape_as_tensor"\n op: "Const"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_INT32\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_INT32\n tensor_shape {\n dim {\n size: 4\n }\n }\n tensor_content: "\\003\\000\\000\\000\\003\\000\\000\\000\\003\\000\\000\\0002\\000\\000\\000"\n }\n }\n }\n}\nnode {\n name: "conv2d/kernel/Adam_1/Initializer/zeros/Const"\n op: "Const"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n }\n float_val: 0.0\n }\n }\n }\n}\nnode {\n name: "conv2d/kernel/Adam_1/Initializer/zeros"\n op: "Fill"\n input: "conv2d/kernel/Adam_1/Initializer/zeros/shape_as_tensor"\n input: "conv2d/kernel/Adam_1/Initializer/zeros/Const"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n attr {\n key: "index_type"\n value {\n type: DT_INT32\n }\n }\n}\nnode {\n name: "conv2d/kernel/Adam_1"\n op: "VarHandleOp"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n attr {\n key: "container"\n value {\n s: ""\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "shape"\n value {\n shape {\n dim {\n size: 3\n }\n dim {\n size: 3\n }\n dim {\n size: 3\n }\n dim {\n size: 50\n }\n }\n }\n }\n attr {\n key: "shared_name"\n value {\n s: "conv2d/kernel/Adam_1"\n }\n }\n}\nnode {\n name: "conv2d/kernel/Adam_1/IsInitialized/VarIsInitializedOp"\n op: "VarIsInitializedOp"\n input: "conv2d/kernel/Adam_1"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n}\nnode {\n name: "conv2d/kernel/Adam_1/Assign"\n op: "AssignVariableOp"\n input: "conv2d/kernel/Adam_1"\n input: "conv2d/kernel/Adam_1/Initializer/zeros"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv2d/kernel/Adam_1/Read/ReadVariableOp"\n op: "ReadVariableOp"\n input: "conv2d/kernel/Adam_1"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv2d/bias/Adam/Initializer/zeros"\n op: "Const"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n dim {\n size: 50\n }\n }\n float_val: 0.0\n }\n }\n }\n}\nnode {\n name: "conv2d/bias/Adam"\n op: "VarHandleOp"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "container"\n value {\n s: ""\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "shape"\n value {\n shape {\n dim {\n size: 50\n }\n }\n }\n }\n attr {\n key: "shared_name"\n value {\n s: "conv2d/bias/Adam"\n }\n }\n}\nnode {\n name: "conv2d/bias/Adam/IsInitialized/VarIsInitializedOp"\n op: "VarIsInitializedOp"\n input: "conv2d/bias/Adam"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n}\nnode {\n name: "conv2d/bias/Adam/Assign"\n op: "AssignVariableOp"\n input: "conv2d/bias/Adam"\n input: "conv2d/bias/Adam/Initializer/zeros"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv2d/bias/Adam/Read/ReadVariableOp"\n op: "ReadVariableOp"\n input: "conv2d/bias/Adam"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv2d/bias/Adam_1/Initializer/zeros"\n op: "Const"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n dim {\n size: 50\n }\n }\n float_val: 0.0\n }\n }\n }\n}\nnode {\n name: "conv2d/bias/Adam_1"\n op: "VarHandleOp"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "container"\n value {\n s: ""\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "shape"\n value {\n shape {\n dim {\n size: 50\n }\n }\n }\n }\n attr {\n key: "shared_name"\n value {\n s: "conv2d/bias/Adam_1"\n }\n }\n}\nnode {\n name: "conv2d/bias/Adam_1/IsInitialized/VarIsInitializedOp"\n op: "VarIsInitializedOp"\n input: "conv2d/bias/Adam_1"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n}\nnode {\n name: "conv2d/bias/Adam_1/Assign"\n op: "AssignVariableOp"\n input: "conv2d/bias/Adam_1"\n input: "conv2d/bias/Adam_1/Initializer/zeros"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv2d/bias/Adam_1/Read/ReadVariableOp"\n op: "ReadVariableOp"\n input: "conv2d/bias/Adam_1"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv_clasf/kernel/Adam/Initializer/zeros"\n op: "Const"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n dim {\n size: 3\n }\n dim {\n size: 3\n }\n dim {\n size: 50\n }\n dim {\n size: 2\n }\n }\n float_val: 0.0\n }\n }\n }\n}\nnode {\n name: "conv_clasf/kernel/Adam"\n op: "VarHandleOp"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/kernel"\n }\n }\n }\n attr {\n key: "container"\n value {\n s: ""\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "shape"\n value {\n shape {\n dim {\n size: 3\n }\n dim {\n size: 3\n }\n dim {\n size: 50\n }\n dim {\n size: 2\n }\n }\n }\n }\n attr {\n key: "shared_name"\n value {\n s: "conv_clasf/kernel/Adam"\n }\n }\n}\nnode {\n name: "conv_clasf/kernel/Adam/IsInitialized/VarIsInitializedOp"\n op: "VarIsInitializedOp"\n input: "conv_clasf/kernel/Adam"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/kernel"\n }\n }\n }\n}\nnode {\n name: "conv_clasf/kernel/Adam/Assign"\n op: "AssignVariableOp"\n input: "conv_clasf/kernel/Adam"\n input: "conv_clasf/kernel/Adam/Initializer/zeros"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv_clasf/kernel/Adam/Read/ReadVariableOp"\n op: "ReadVariableOp"\n input: "conv_clasf/kernel/Adam"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv_clasf/kernel/Adam_1/Initializer/zeros"\n op: "Const"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n dim {\n size: 3\n }\n dim {\n size: 3\n }\n dim {\n size: 50\n }\n dim {\n size: 2\n }\n }\n float_val: 0.0\n }\n }\n }\n}\nnode {\n name: "conv_clasf/kernel/Adam_1"\n op: "VarHandleOp"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/kernel"\n }\n }\n }\n attr {\n key: "container"\n value {\n s: ""\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "shape"\n value {\n shape {\n dim {\n size: 3\n }\n dim {\n size: 3\n }\n dim {\n size: 50\n }\n dim {\n size: 2\n }\n }\n }\n }\n attr {\n key: "shared_name"\n value {\n s: "conv_clasf/kernel/Adam_1"\n }\n }\n}\nnode {\n name: "conv_clasf/kernel/Adam_1/IsInitialized/VarIsInitializedOp"\n op: "VarIsInitializedOp"\n input: "conv_clasf/kernel/Adam_1"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/kernel"\n }\n }\n }\n}\nnode {\n name: "conv_clasf/kernel/Adam_1/Assign"\n op: "AssignVariableOp"\n input: "conv_clasf/kernel/Adam_1"\n input: "conv_clasf/kernel/Adam_1/Initializer/zeros"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv_clasf/kernel/Adam_1/Read/ReadVariableOp"\n op: "ReadVariableOp"\n input: "conv_clasf/kernel/Adam_1"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/kernel"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv_clasf/bias/Adam/Initializer/zeros"\n op: "Const"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n dim {\n size: 2\n }\n }\n float_val: 0.0\n }\n }\n }\n}\nnode {\n name: "conv_clasf/bias/Adam"\n op: "VarHandleOp"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/bias"\n }\n }\n }\n attr {\n key: "container"\n value {\n s: ""\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "shape"\n value {\n shape {\n dim {\n size: 2\n }\n }\n }\n }\n attr {\n key: "shared_name"\n value {\n s: "conv_clasf/bias/Adam"\n }\n }\n}\nnode {\n name: "conv_clasf/bias/Adam/IsInitialized/VarIsInitializedOp"\n op: "VarIsInitializedOp"\n input: "conv_clasf/bias/Adam"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/bias"\n }\n }\n }\n}\nnode {\n name: "conv_clasf/bias/Adam/Assign"\n op: "AssignVariableOp"\n input: "conv_clasf/bias/Adam"\n input: "conv_clasf/bias/Adam/Initializer/zeros"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv_clasf/bias/Adam/Read/ReadVariableOp"\n op: "ReadVariableOp"\n input: "conv_clasf/bias/Adam"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv_clasf/bias/Adam_1/Initializer/zeros"\n op: "Const"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n dim {\n size: 2\n }\n }\n float_val: 0.0\n }\n }\n }\n}\nnode {\n name: "conv_clasf/bias/Adam_1"\n op: "VarHandleOp"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/bias"\n }\n }\n }\n attr {\n key: "container"\n value {\n s: ""\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "shape"\n value {\n shape {\n dim {\n size: 2\n }\n }\n }\n }\n attr {\n key: "shared_name"\n value {\n s: "conv_clasf/bias/Adam_1"\n }\n }\n}\nnode {\n name: "conv_clasf/bias/Adam_1/IsInitialized/VarIsInitializedOp"\n op: "VarIsInitializedOp"\n input: "conv_clasf/bias/Adam_1"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/bias"\n }\n }\n }\n}\nnode {\n name: "conv_clasf/bias/Adam_1/Assign"\n op: "AssignVariableOp"\n input: "conv_clasf/bias/Adam_1"\n input: "conv_clasf/bias/Adam_1/Initializer/zeros"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "conv_clasf/bias/Adam_1/Read/ReadVariableOp"\n op: "ReadVariableOp"\n input: "conv_clasf/bias/Adam_1"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "Adam/learning_rate"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n }\n float_val: 0.0010000000474974513\n }\n }\n }\n}\nnode {\n name: "Adam/beta1"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n }\n float_val: 0.8999999761581421\n }\n }\n }\n}\nnode {\n name: "Adam/beta2"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n }\n float_val: 0.9990000128746033\n }\n }\n }\n}\nnode {\n name: "Adam/epsilon"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_FLOAT\n tensor_shape {\n }\n float_val: 9.99999993922529e-09\n }\n }\n }\n}\nnode {\n name: "Adam/update_conv2d/kernel/ResourceApplyAdam/ReadVariableOp"\n op: "ReadVariableOp"\n input: "beta1_power"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "Adam/update_conv2d/kernel/ResourceApplyAdam/ReadVariableOp_1"\n op: "ReadVariableOp"\n input: "beta2_power"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "Adam/update_conv2d/kernel/ResourceApplyAdam"\n op: "ResourceApplyAdam"\n input: "conv2d/kernel"\n input: "conv2d/kernel/Adam"\n input: "conv2d/kernel/Adam_1"\n input: "Adam/update_conv2d/kernel/ResourceApplyAdam/ReadVariableOp"\n input: "Adam/update_conv2d/kernel/ResourceApplyAdam/ReadVariableOp_1"\n input: "Adam/learning_rate"\n input: "Adam/beta1"\n input: "Adam/beta2"\n input: "Adam/epsilon"\n input: "gradients/conv2d/Conv2D_grad/tuple/control_dependency_1"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/kernel"\n }\n }\n }\n attr {\n key: "use_locking"\n value {\n b: false\n }\n }\n attr {\n key: "use_nesterov"\n value {\n b: false\n }\n }\n}\nnode {\n name: "Adam/update_conv2d/bias/ResourceApplyAdam/ReadVariableOp"\n op: "ReadVariableOp"\n input: "beta1_power"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "Adam/update_conv2d/bias/ResourceApplyAdam/ReadVariableOp_1"\n op: "ReadVariableOp"\n input: "beta2_power"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "Adam/update_conv2d/bias/ResourceApplyAdam"\n op: "ResourceApplyAdam"\n input: "conv2d/bias"\n input: "conv2d/bias/Adam"\n input: "conv2d/bias/Adam_1"\n input: "Adam/update_conv2d/bias/ResourceApplyAdam/ReadVariableOp"\n input: "Adam/update_conv2d/bias/ResourceApplyAdam/ReadVariableOp_1"\n input: "Adam/learning_rate"\n input: "Adam/beta1"\n input: "Adam/beta2"\n input: "Adam/epsilon"\n input: "gradients/conv2d/BiasAdd_grad/tuple/control_dependency_1"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "use_locking"\n value {\n b: false\n }\n }\n attr {\n key: "use_nesterov"\n value {\n b: false\n }\n }\n}\nnode {\n name: "Adam/update_conv_clasf/kernel/ResourceApplyAdam/ReadVariableOp"\n op: "ReadVariableOp"\n input: "beta1_power"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "Adam/update_conv_clasf/kernel/ResourceApplyAdam/ReadVariableOp_1"\n op: "ReadVariableOp"\n input: "beta2_power"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "Adam/update_conv_clasf/kernel/ResourceApplyAdam"\n op: "ResourceApplyAdam"\n input: "conv_clasf/kernel"\n input: "conv_clasf/kernel/Adam"\n input: "conv_clasf/kernel/Adam_1"\n input: "Adam/update_conv_clasf/kernel/ResourceApplyAdam/ReadVariableOp"\n input: "Adam/update_conv_clasf/kernel/ResourceApplyAdam/ReadVariableOp_1"\n input: "Adam/learning_rate"\n input: "Adam/beta1"\n input: "Adam/beta2"\n input: "Adam/epsilon"\n input: "gradients/conv_clasf/Conv2D_grad/tuple/control_dependency_1"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/kernel"\n }\n }\n }\n attr {\n key: "use_locking"\n value {\n b: false\n }\n }\n attr {\n key: "use_nesterov"\n value {\n b: false\n }\n }\n}\nnode {\n name: "Adam/update_conv_clasf/bias/ResourceApplyAdam/ReadVariableOp"\n op: "ReadVariableOp"\n input: "beta1_power"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "Adam/update_conv_clasf/bias/ResourceApplyAdam/ReadVariableOp_1"\n op: "ReadVariableOp"\n input: "beta2_power"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "Adam/update_conv_clasf/bias/ResourceApplyAdam"\n op: "ResourceApplyAdam"\n input: "conv_clasf/bias"\n input: "conv_clasf/bias/Adam"\n input: "conv_clasf/bias/Adam_1"\n input: "Adam/update_conv_clasf/bias/ResourceApplyAdam/ReadVariableOp"\n input: "Adam/update_conv_clasf/bias/ResourceApplyAdam/ReadVariableOp_1"\n input: "Adam/learning_rate"\n input: "Adam/beta1"\n input: "Adam/beta2"\n input: "Adam/epsilon"\n input: "gradients/conv_clasf/BiasAdd_grad/tuple/control_dependency_1"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv_clasf/bias"\n }\n }\n }\n attr {\n key: "use_locking"\n value {\n b: false\n }\n }\n attr {\n key: "use_nesterov"\n value {\n b: false\n }\n }\n}\nnode {\n name: "Adam/ReadVariableOp"\n op: "ReadVariableOp"\n input: "beta1_power"\n input: "^Adam/update_conv2d/bias/ResourceApplyAdam"\n input: "^Adam/update_conv2d/kernel/ResourceApplyAdam"\n input: "^Adam/update_conv_clasf/bias/ResourceApplyAdam"\n input: "^Adam/update_conv_clasf/kernel/ResourceApplyAdam"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "Adam/mul"\n op: "Mul"\n input: "Adam/ReadVariableOp"\n input: "Adam/beta1"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n}\nnode {\n name: "Adam/AssignVariableOp"\n op: "AssignVariableOp"\n input: "beta1_power"\n input: "Adam/mul"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "Adam/ReadVariableOp_1"\n op: "ReadVariableOp"\n input: "beta1_power"\n input: "^Adam/AssignVariableOp"\n input: "^Adam/update_conv2d/bias/ResourceApplyAdam"\n input: "^Adam/update_conv2d/kernel/ResourceApplyAdam"\n input: "^Adam/update_conv_clasf/bias/ResourceApplyAdam"\n input: "^Adam/update_conv_clasf/kernel/ResourceApplyAdam"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "Adam/ReadVariableOp_2"\n op: "ReadVariableOp"\n input: "beta2_power"\n input: "^Adam/update_conv2d/bias/ResourceApplyAdam"\n input: "^Adam/update_conv2d/kernel/ResourceApplyAdam"\n input: "^Adam/update_conv_clasf/bias/ResourceApplyAdam"\n input: "^Adam/update_conv_clasf/kernel/ResourceApplyAdam"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "Adam/mul_1"\n op: "Mul"\n input: "Adam/ReadVariableOp_2"\n input: "Adam/beta2"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n}\nnode {\n name: "Adam/AssignVariableOp_1"\n op: "AssignVariableOp"\n input: "beta2_power"\n input: "Adam/mul_1"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "Adam/ReadVariableOp_3"\n op: "ReadVariableOp"\n input: "beta2_power"\n input: "^Adam/AssignVariableOp_1"\n input: "^Adam/update_conv2d/bias/ResourceApplyAdam"\n input: "^Adam/update_conv2d/kernel/ResourceApplyAdam"\n input: "^Adam/update_conv_clasf/bias/ResourceApplyAdam"\n input: "^Adam/update_conv_clasf/kernel/ResourceApplyAdam"\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@conv2d/bias"\n }\n }\n }\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "Adam"\n op: "NoOp"\n input: "^Adam/AssignVariableOp"\n input: "^Adam/AssignVariableOp_1"\n input: "^Adam/update_conv2d/bias/ResourceApplyAdam"\n input: "^Adam/update_conv2d/kernel/ResourceApplyAdam"\n input: "^Adam/update_conv_clasf/bias/ResourceApplyAdam"\n input: "^Adam/update_conv_clasf/kernel/ResourceApplyAdam"\n}\nnode {\n name: "init"\n op: "NoOp"\n input: "^beta1_power/Assign"\n input: "^beta2_power/Assign"\n input: "^conv2d/bias/Adam/Assign"\n input: "^conv2d/bias/Adam_1/Assign"\n input: "^conv2d/bias/Assign"\n input: "^conv2d/kernel/Adam/Assign"\n input: "^conv2d/kernel/Adam_1/Assign"\n input: "^conv2d/kernel/Assign"\n input: "^conv_clasf/bias/Adam/Assign"\n input: "^conv_clasf/bias/Adam_1/Assign"\n input: "^conv_clasf/bias/Assign"\n input: "^conv_clasf/kernel/Adam/Assign"\n input: "^conv_clasf/kernel/Adam_1/Assign"\n input: "^conv_clasf/kernel/Assign"\n}\nnode {\n name: "save/filename/input"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n }\n string_val: "model"\n }\n }\n }\n}\nnode {\n name: "save/filename"\n op: "PlaceholderWithDefault"\n input: "save/filename/input"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "shape"\n value {\n shape {\n }\n }\n }\n}\nnode {\n name: "save/Const"\n op: "PlaceholderWithDefault"\n input: "save/filename"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "shape"\n value {\n shape {\n }\n }\n }\n}\nnode {\n name: "save/SaveV2/tensor_names"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n dim {\n size: 14\n }\n }\n string_val: "beta1_power"\n string_val: "beta2_power"\n string_val: "conv2d/bias"\n string_val: "conv2d/bias/Adam"\n string_val: "conv2d/bias/Adam_1"\n string_val: "conv2d/kernel"\n string_val: "conv2d/kernel/Adam"\n string_val: "conv2d/kernel/Adam_1"\n string_val: "conv_clasf/bias"\n string_val: "conv_clasf/bias/Adam"\n string_val: "conv_clasf/bias/Adam_1"\n string_val: "conv_clasf/kernel"\n string_val: "conv_clasf/kernel/Adam"\n string_val: "conv_clasf/kernel/Adam_1"\n }\n }\n }\n}\nnode {\n name: "save/SaveV2/shape_and_slices"\n op: "Const"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n dim {\n size: 14\n }\n }\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n }\n }\n }\n}\nnode {\n name: "save/SaveV2"\n op: "SaveV2"\n input: "save/Const"\n input: "save/SaveV2/tensor_names"\n input: "save/SaveV2/shape_and_slices"\n input: "beta1_power/Read/ReadVariableOp"\n input: "beta2_power/Read/ReadVariableOp"\n input: "conv2d/bias/Read/ReadVariableOp"\n input: "conv2d/bias/Adam/Read/ReadVariableOp"\n input: "conv2d/bias/Adam_1/Read/ReadVariableOp"\n input: "conv2d/kernel/Read/ReadVariableOp"\n input: "conv2d/kernel/Adam/Read/ReadVariableOp"\n input: "conv2d/kernel/Adam_1/Read/ReadVariableOp"\n input: "conv_clasf/bias/Read/ReadVariableOp"\n input: "conv_clasf/bias/Adam/Read/ReadVariableOp"\n input: "conv_clasf/bias/Adam_1/Read/ReadVariableOp"\n input: "conv_clasf/kernel/Read/ReadVariableOp"\n input: "conv_clasf/kernel/Adam/Read/ReadVariableOp"\n input: "conv_clasf/kernel/Adam_1/Read/ReadVariableOp"\n attr {\n key: "dtypes"\n value {\n list {\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n }\n }\n }\n}\nnode {\n name: "save/control_dependency"\n op: "Identity"\n input: "save/Const"\n input: "^save/SaveV2"\n attr {\n key: "T"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "_class"\n value {\n list {\n s: "loc:@save/Const"\n }\n }\n }\n}\nnode {\n name: "save/RestoreV2/tensor_names"\n op: "Const"\n device: "/device:CPU:0"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n dim {\n size: 14\n }\n }\n string_val: "beta1_power"\n string_val: "beta2_power"\n string_val: "conv2d/bias"\n string_val: "conv2d/bias/Adam"\n string_val: "conv2d/bias/Adam_1"\n string_val: "conv2d/kernel"\n string_val: "conv2d/kernel/Adam"\n string_val: "conv2d/kernel/Adam_1"\n string_val: "conv_clasf/bias"\n string_val: "conv_clasf/bias/Adam"\n string_val: "conv_clasf/bias/Adam_1"\n string_val: "conv_clasf/kernel"\n string_val: "conv_clasf/kernel/Adam"\n string_val: "conv_clasf/kernel/Adam_1"\n }\n }\n }\n}\nnode {\n name: "save/RestoreV2/shape_and_slices"\n op: "Const"\n device: "/device:CPU:0"\n attr {\n key: "dtype"\n value {\n type: DT_STRING\n }\n }\n attr {\n key: "value"\n value {\n tensor {\n dtype: DT_STRING\n tensor_shape {\n dim {\n size: 14\n }\n }\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n string_val: ""\n }\n }\n }\n}\nnode {\n name: "save/RestoreV2"\n op: "RestoreV2"\n input: "save/Const"\n input: "save/RestoreV2/tensor_names"\n input: "save/RestoreV2/shape_and_slices"\n device: "/device:CPU:0"\n attr {\n key: "dtypes"\n value {\n list {\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n type: DT_FLOAT\n }\n }\n }\n}\nnode {\n name: "save/Identity"\n op: "Identity"\n input: "save/RestoreV2"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/AssignVariableOp"\n op: "AssignVariableOp"\n input: "beta1_power"\n input: "save/Identity"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/Identity_1"\n op: "Identity"\n input: "save/RestoreV2:1"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/AssignVariableOp_1"\n op: "AssignVariableOp"\n input: "beta2_power"\n input: "save/Identity_1"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/Identity_2"\n op: "Identity"\n input: "save/RestoreV2:2"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/AssignVariableOp_2"\n op: "AssignVariableOp"\n input: "conv2d/bias"\n input: "save/Identity_2"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/Identity_3"\n op: "Identity"\n input: "save/RestoreV2:3"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/AssignVariableOp_3"\n op: "AssignVariableOp"\n input: "conv2d/bias/Adam"\n input: "save/Identity_3"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/Identity_4"\n op: "Identity"\n input: "save/RestoreV2:4"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/AssignVariableOp_4"\n op: "AssignVariableOp"\n input: "conv2d/bias/Adam_1"\n input: "save/Identity_4"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/Identity_5"\n op: "Identity"\n input: "save/RestoreV2:5"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/AssignVariableOp_5"\n op: "AssignVariableOp"\n input: "conv2d/kernel"\n input: "save/Identity_5"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/Identity_6"\n op: "Identity"\n input: "save/RestoreV2:6"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/AssignVariableOp_6"\n op: "AssignVariableOp"\n input: "conv2d/kernel/Adam"\n input: "save/Identity_6"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/Identity_7"\n op: "Identity"\n input: "save/RestoreV2:7"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/AssignVariableOp_7"\n op: "AssignVariableOp"\n input: "conv2d/kernel/Adam_1"\n input: "save/Identity_7"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/Identity_8"\n op: "Identity"\n input: "save/RestoreV2:8"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/AssignVariableOp_8"\n op: "AssignVariableOp"\n input: "conv_clasf/bias"\n input: "save/Identity_8"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/Identity_9"\n op: "Identity"\n input: "save/RestoreV2:9"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/AssignVariableOp_9"\n op: "AssignVariableOp"\n input: "conv_clasf/bias/Adam"\n input: "save/Identity_9"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/Identity_10"\n op: "Identity"\n input: "save/RestoreV2:10"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/AssignVariableOp_10"\n op: "AssignVariableOp"\n input: "conv_clasf/bias/Adam_1"\n input: "save/Identity_10"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/Identity_11"\n op: "Identity"\n input: "save/RestoreV2:11"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/AssignVariableOp_11"\n op: "AssignVariableOp"\n input: "conv_clasf/kernel"\n input: "save/Identity_11"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/Identity_12"\n op: "Identity"\n input: "save/RestoreV2:12"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/AssignVariableOp_12"\n op: "AssignVariableOp"\n input: "conv_clasf/kernel/Adam"\n input: "save/Identity_12"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/Identity_13"\n op: "Identity"\n input: "save/RestoreV2:13"\n attr {\n key: "T"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/AssignVariableOp_13"\n op: "AssignVariableOp"\n input: "conv_clasf/kernel/Adam_1"\n input: "save/Identity_13"\n attr {\n key: "dtype"\n value {\n type: DT_FLOAT\n }\n }\n}\nnode {\n name: "save/restore_all"\n op: "NoOp"\n input: "^save/AssignVariableOp"\n input: "^save/AssignVariableOp_1"\n input: "^save/AssignVariableOp_10"\n input: "^save/AssignVariableOp_11"\n input: "^save/AssignVariableOp_12"\n input: "^save/AssignVariableOp_13"\n input: "^save/AssignVariableOp_2"\n input: "^save/AssignVariableOp_3"\n input: "^save/AssignVariableOp_4"\n input: "^save/AssignVariableOp_5"\n input: "^save/AssignVariableOp_6"\n input: "^save/AssignVariableOp_7"\n input: "^save/AssignVariableOp_8"\n input: "^save/AssignVariableOp_9"\n}\n';
}
</script>
<link rel="import" href="https://tensorboard.appspot.com/tf-graph-basic.build.html" onload=load()>
<div style="height:600px">
<tf-graph-basic id="graph0.8855588178068329"></tf-graph-basic>
</div>
"></iframe>