How can I obtain the middle layer results in tf2?

57 Views Asked by At

I write a model in subclassing way,

''' class block(tf.keras.Model):

def __init__(self,index,is_train_bn,channel_axis):
    super().__init__()
    prefix = 'block' + str(index + 5)
    self.is_train_bn=is_train_bn
    self.sepconv1_act = layers.Activation('relu', name=prefix + '_sepconv1_act')
    self.sepconv1 = layers.SeparableConv2D(728, (3, 3),padding='same',use_bias=False,name=prefix + '_sepconv1')
    self.sepconv1_bn = layers.BatchNormalization(axis=channel_axis, name=prefix + '_sepconv1_bn')
    self.sepconv2_act = layers.Activation('relu', name=prefix + '_sepconv2_act')
    self.sepconv2 = layers.SeparableConv2D(728, (3, 3),padding='same',use_bias=False,name=prefix + '_sepconv2')
    self.sepconv2_bn = layers.BatchNormalization(axis=channel_axis, name=prefix + '_sepconv2_bn')
    self.sepconv3_act = layers.Activation('relu', name=prefix + '_sepconv3_act')
    self.sepconv3 = layers.SeparableConv2D(728, (3, 3),padding='same',use_bias=False,name=prefix + '_sepconv3')
    self.sepconv3_bn = layers.BatchNormalization(axis=channel_axis, name=prefix + '_sepconv3_bn')

def __call__(self,x,training=False):
    residual = x
    x=self.sepconv1_act(x)
    x=self.sepconv1(x)
    x=self.sepconv1_bn(x,self.is_train_bn)
    x=self.sepconv2_act(x)
    x=self.sepconv2 (x)
    x=self.sepconv2_bn(x,self.is_train_bn)
    x=self.sepconv3_act (x)
    x=self.sepconv3 (x)
    x=self.sepconv3_bn (x,self.is_train_bn)
    return x+residual

''' When I want to print x, I get this error:

' Cannot convert a symbolic Tensor (block1_conv1_act_1/Relu:0) to a numpy array'.

1

There are 1 best solutions below

1
On

To print out "x" from "middle of model" you can apply the approach exemplified below (code modified from your example). When creating that kind of "monitoring model" you simple get the "x_to_probe" out by a procedure like:

enter image description here

...where in this example the input of the model is exemplified by a random tensor.

import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers

channel_axis=1
prefix='hmmm...'

sepconv1_act = layers.Activation('relu', name=prefix + '_sepconv1_act')
sepconv1 = layers.SeparableConv2D(728, (3, 3),padding='same',use_bias=False,name=prefix + '_sepconv1')
sepconv1_bn = layers.BatchNormalization(axis=channel_axis, name=prefix + '_sepconv1_bn')
sepconv2_act = layers.Activation('relu', name=prefix + '_sepconv2_act')
sepconv2 = layers.SeparableConv2D(728, (3, 3),padding='same',use_bias=False,name=prefix + '_sepconv2')
sepconv2_bn = layers.BatchNormalization(axis=channel_axis, name=prefix + '_sepconv2_bn')
sepconv3_act = layers.Activation('relu', name=prefix + '_sepconv3_act')
sepconv3 = layers.SeparableConv2D(728, (3, 3),padding='same',use_bias=False,name=prefix + '_sepconv3')
sepconv3_bn = layers.BatchNormalization(axis=channel_axis, name=prefix + '_sepconv3_bn')

#This should be "vice-versa" ...the x need to be taken from the function input...
#residual = x

is_train_bn=1

#x=self.sepconv1_act(x)
inputs=keras.Input(shape=(1,16,16))
x=sepconv1_act(inputs)
x=sepconv1(x)
x=sepconv1_bn(x,is_train_bn)
x=sepconv2_act(x)
x=sepconv2 (x)
x=sepconv2_bn(x,is_train_bn)
x=sepconv3_act (x)
x_to_probe=sepconv3 (x)
x=sepconv3_bn (x_to_probe,is_train_bn)

model=keras.Model(inputs=inputs,outputs=x,name="example for Wayne")
model.summary()

#Let's take x out..
model_for_monitoring_selected_x=keras.Model(inputs=inputs,outputs=x_to_probe,name="example for Wayne to print x")
model_for_monitoring_selected_x.summary()