196 def build(input_shape, num_outputs, block_fn, repetitions, branches=False):
197 """Builds a custom ResNet like architecture. 200 input_shape: The input shape in the form (nb_channels, nb_rows, nb_cols) 201 num_outputs: The number of outputs at final softmax layer 202 block_fn: The block function to use. This is either `basic_block` or `bottleneck`. 203 The original paper used basic_block for layers < 50 204 repetitions: Number of repetitions of various block units. 205 At each block unit, the number of filters are doubled and the input size is halved 211 if len(input_shape) != 3:
212 raise Exception(
"Input shape should be a tuple (nb_channels, nb_rows, nb_cols)")
215 # Permute dimension order if necessary 216 if K.image_dim_ordering() == 'tf': 217 input_shape = (input_shape[1], input_shape[2], input_shape[0]) 223 input =
Input(shape=input_shape)
224 conv1 =
_conv_bn_relu(filters=64, kernel_size=(7, 7), strides=(2, 2))(input)
225 pool1 = MaxPooling2D(pool_size=(3, 3), strides=(2, 2), padding=
"same")(conv1)
230 block =
_residual_block(block_fn, filters=filters, repetitions=r, is_first_layer=(i == 0))(block)
237 block_shape = K.int_shape(block)
238 pool2 = AveragePooling2D(pool_size=(block_shape[ROW_AXIS], block_shape[COL_AXIS]),
239 strides=(1, 1))(block)
240 flatten1 = Flatten()(pool2)
246 model = Model(inputs=input, outputs=flatten1)
252 dense = Dense(units=num_outputs, kernel_initializer=
"he_normal",
253 activation=
"softmax")(flatten1)
254 model = Model(inputs=input, outputs=dense)
def _conv_bn_relu(conv_params)
def _get_block(identifier)
auto enumerate(Iterables &&...iterables)
Range-for loop helper tracking the number of iteration.
def _residual_block(block_function, filters, repetitions, is_first_layer=False)
cet::coded_exception< errors::ErrorCodes, ExceptionDetail::translate > Exception
def build(input_shape, num_outputs, block_fn, repetitions, branches=False)
def _handle_dim_ordering()