You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Hi
Im partitioning different scaled feature map and then concatenating all. but im facing bellow error. please help me to resolve this
node = layer._inbound_nodes[node_index]
AttributeError: 'NoneType' object has no attribute '_inbound_nodes'
Hi
Im partitioning different scaled feature map and then concatenating all. but im facing bellow error. please help me to resolve this
node = layer._inbound_nodes[node_index]
AttributeError: 'NoneType' object has no attribute '_inbound_nodes'
here is my code
model_resnet = ResNet50(include_top=False, weights="imagenet", input_shape=(386, 124, 3))
conv1 = Conv2D(128, (2,2), activation = 'relu', strides = (3,3))(model_resnet.output)
conv1 = BatchNormalization()(conv1)
pool1 = MaxPooling2D((2,2), padding='same')(conv1)
relu1 = Activation('relu')(pool1)
drop1 = Dropout(rate = 0.5)(relu1)
from keras.layers import Reshape
flattened1 = Reshape((256,))(drop1)
conv2 = Conv2D(256, (2,2), activation = 'relu', strides = (2,2))(model_resnet.output)
conv2 = BatchNormalization()(conv2)
pool2 = MaxPooling2D((2,2), padding='same')(conv2)
relu2 = Activation('relu')(pool2)
drop2 = Dropout(rate = 0.5)(relu2)
flattened2 = Reshape((768,))(drop2)
##########partitions scale 2#####
b= pool2.shape[1]
n_partitions= 3
delta= b//n_partitions
partitions= []
for i in range(1, 3):
slice= pool2[ :, (i-1)delta:idelta, :]
partitions.append(slice)
p1= partitions[0]
p1 = BatchNormalization()(p1)
p1 = MaxPooling2D((2,2), padding='same')(p1)
p1 = Activation('relu')(p1)
p1 = Dropout(rate = 0.5)(p1)
p1 = Reshape((256,))(p1)
p2= partitions[1]
p2 = BatchNormalization()(p2)
p2 = MaxPooling2D((2,2), padding='same')(p2)
p2 = Activation('relu')(p2)
p2 = Dropout(rate = 0.5)(p2)
p2 = Reshape((256,))(p2)
########endscale2####
conv3 = Conv2D(256, (2,2), activation = 'relu', strides = (1,1))(model_resnet.output)
conv3 = BatchNormalization()(conv3)
pool3 = MaxPooling2D((2,2), padding='same')(conv3)
relu3 = Activation('relu')(pool3)
drop3 = Dropout(rate = 0.5)(relu3)
flattened3 = Reshape((3072,))(drop3)
######partitions scale3#####
b= pool3.shape[1]
n_partitions= 3
delta= b//n_partitions
partitions= []
for i in range(1, 4):
slice= pool3[ :, (i-1)delta:idelta, :]
partitions.append(slice)
p3= partitions[0]
p3 = BatchNormalization()(p3)
p3 = MaxPooling2D((2,2), padding='same')(p3)
p3 = Activation('relu')(p3)
p3 = Dropout(rate = 0.2)(p3)
p3 = Reshape((256,))(p3)
p4= partitions[1]
p4 = BatchNormalization()(p4)
p4 = MaxPooling2D((2,2), padding='same')(p4)
p4 = Activation('relu')(p4)
p4 = Dropout(rate = 0.2)(p4)
p4 = Reshape((256,))(p4)
p5= partitions[2]
p5 = BatchNormalization()(p5)
p5 = MaxPooling2D((2,2), padding='same')(p5)
p5 = Activation('relu')(p5)
p5 = Dropout(rate = 0.2)(p5)
p5 = Reshape((256,))(p5)
#####scale3 end##
#res=tf.concat(axis=0,values=[pool1, pool2, pool3,p1, p2, p3, p4, p5])
F =concatenate([flattened1, flattened2, flattened3, p1, p2, p3, p4, p5],1)
x4 = Dense(units=751, activation='softmax',name='fc8', kernel_initializer=RandomNormal(mean=0.0,stddev=0.001))(F)
model = Model(inputs=model_resnet.input, outputs=x4)
The text was updated successfully, but these errors were encountered: