Classes | Functions
resnet Namespace Reference

Classes

class  ResnetBuilder
 

Functions

def _bn_relu (input)
 
def _conv_bn_relu (conv_params)
 
def _bn_relu_conv (conv_params)
 
def _shortcut (input, residual)
 
def _residual_block (block_function, filters, repetitions, is_first_layer=False)
 
def basic_block (filters, init_strides=(1, 1), is_first_block_of_first_layer=False)
 
def bottleneck (filters, init_strides=(1, 1), is_first_block_of_first_layer=False)
 
def _handle_dim_ordering ()
 
def _get_block (identifier)
 

Detailed Description

Based on https://github.com/raghakot/keras-resnet/blob/master/resnet.py

Function Documentation

def resnet._bn_relu (   input)
private
Helper to build a BN -> relu block

Definition at line 33 of file resnet.py.

33 def _bn_relu(input):
34  """Helper to build a BN -> relu block
35  """
36  norm = BatchNormalization(axis=CHANNEL_AXIS)(input)
37  return Activation("relu")(norm)
38 
39 
def _bn_relu(input)
Definition: resnet.py:33
def resnet._bn_relu_conv (   conv_params)
private
Helper to build a BN -> relu -> conv block.
This is an improved scheme proposed in http://arxiv.org/pdf/1603.05027v2.pdf

Definition at line 60 of file resnet.py.

60 def _bn_relu_conv(**conv_params):
61  """Helper to build a BN -> relu -> conv block.
62  This is an improved scheme proposed in http://arxiv.org/pdf/1603.05027v2.pdf
63  """
64  filters = conv_params["filters"]
65  kernel_size = conv_params["kernel_size"]
66  strides = conv_params.setdefault("strides", (1, 1))
67  kernel_initializer = conv_params.setdefault("kernel_initializer", "he_normal")
68  padding = conv_params.setdefault("padding", "same")
69  kernel_regularizer = conv_params.setdefault("kernel_regularizer", l2(1.e-4))
70 
71  def f(input):
72  activation = _bn_relu(input)
73  return Conv2D(filters=filters, kernel_size=kernel_size,
74  strides=strides, padding=padding,
75  kernel_initializer=kernel_initializer,
76  kernel_regularizer=kernel_regularizer)(activation)
77 
78  return f
79 
80 
def _bn_relu(input)
Definition: resnet.py:33
def _bn_relu_conv(conv_params)
Definition: resnet.py:60
def resnet._conv_bn_relu (   conv_params)
private
Helper to build a conv -> BN -> relu block

Definition at line 40 of file resnet.py.

40 def _conv_bn_relu(**conv_params):
41  """Helper to build a conv -> BN -> relu block
42  """
43  filters = conv_params["filters"]
44  kernel_size = conv_params["kernel_size"]
45  strides = conv_params.setdefault("strides", (1, 1))
46  kernel_initializer = conv_params.setdefault("kernel_initializer", "he_normal")
47  padding = conv_params.setdefault("padding", "same")
48  kernel_regularizer = conv_params.setdefault("kernel_regularizer", l2(1.e-4))
49 
50  def f(input):
51  conv = Conv2D(filters=filters, kernel_size=kernel_size,
52  strides=strides, padding=padding,
53  kernel_initializer=kernel_initializer,
54  kernel_regularizer=kernel_regularizer)(input)
55  return _bn_relu(conv)
56 
57  return f
58 
59 
def _bn_relu(input)
Definition: resnet.py:33
def _conv_bn_relu(conv_params)
Definition: resnet.py:40
def resnet._get_block (   identifier)
private

Definition at line 185 of file resnet.py.

185 def _get_block(identifier):
186  if isinstance(identifier, six.string_types):
187  res = globals().get(identifier)
188  if not res:
189  raise ValueError('Invalid {}'.format(identifier))
190  return res
191  return identifier
192 
193 
static bool format(QChar::Decomposition tag, QString &str, int index, int len)
Definition: qstring.cpp:11496
def _get_block(identifier)
Definition: resnet.py:185
auto const & get(AssnsNode< L, R, D > const &r)
Definition: AssnsNode.h:115
def resnet._handle_dim_ordering ( )
private

Definition at line 171 of file resnet.py.

172  global ROW_AXIS
173  global COL_AXIS
174  global CHANNEL_AXIS
175  if K.image_dim_ordering() == 'tf':
176  ROW_AXIS = 1
177  COL_AXIS = 2
178  CHANNEL_AXIS = 3
179  else:
180  CHANNEL_AXIS = 1
181  ROW_AXIS = 2
182  COL_AXIS = 3
183 
184 
def _handle_dim_ordering()
Definition: resnet.py:171
def resnet._residual_block (   block_function,
  filters,
  repetitions,
  is_first_layer = False 
)
private
Builds a residual block with repeating bottleneck blocks.

Definition at line 106 of file resnet.py.

106 def _residual_block(block_function, filters, repetitions, is_first_layer=False):
107  """Builds a residual block with repeating bottleneck blocks.
108  """
109  def f(input):
110  for i in range(repetitions):
111  init_strides = (1, 1)
112  if i == 0 and not is_first_layer:
113  init_strides = (2, 2)
114  input = block_function(filters=filters, init_strides=init_strides,
115  is_first_block_of_first_layer=(is_first_layer and i == 0))(input)
116  return input
117 
118  return f
119 
120 
def _residual_block(block_function, filters, repetitions, is_first_layer=False)
Definition: resnet.py:106
def resnet._shortcut (   input,
  residual 
)
private
Adds a shortcut between input and residual block and merges them with "sum"

Definition at line 81 of file resnet.py.

81 def _shortcut(input, residual):
82  """Adds a shortcut between input and residual block and merges them with "sum"
83  """
84  # Expand channels of shortcut to match residual.
85  # Stride appropriately to match residual (width, height)
86  # Should be int if network architecture is correctly configured.
87  input_shape = K.int_shape(input)
88  residual_shape = K.int_shape(residual)
89  stride_width = int(round(input_shape[ROW_AXIS] / residual_shape[ROW_AXIS]))
90  stride_height = int(round(input_shape[COL_AXIS] / residual_shape[COL_AXIS]))
91  equal_channels = input_shape[CHANNEL_AXIS] == residual_shape[CHANNEL_AXIS]
92 
93  shortcut = input
94  # 1 X 1 conv if shape is different. Else identity.
95  if stride_width > 1 or stride_height > 1 or not equal_channels:
96  shortcut = Conv2D(filters=residual_shape[CHANNEL_AXIS],
97  kernel_size=(1, 1),
98  strides=(stride_width, stride_height),
99  padding="valid",
100  kernel_initializer="he_normal",
101  kernel_regularizer=l2(0.0001))(input)
102 
103  return add([shortcut, residual])
104 
105 
Coord add(Coord c1, Coord c2)
Definition: restypedef.cpp:23
def _shortcut(input, residual)
Definition: resnet.py:81
def resnet.basic_block (   filters,
  init_strides = (1, 1),
  is_first_block_of_first_layer = False 
)
Basic 3 X 3 convolution blocks for use on resnets with layers <= 34.
Follows improved proposed scheme in http://arxiv.org/pdf/1603.05027v2.pdf

Definition at line 121 of file resnet.py.

121 def basic_block(filters, init_strides=(1, 1), is_first_block_of_first_layer=False):
122  """Basic 3 X 3 convolution blocks for use on resnets with layers <= 34.
123  Follows improved proposed scheme in http://arxiv.org/pdf/1603.05027v2.pdf
124  """
125  def f(input):
126 
127  if is_first_block_of_first_layer:
128  # don't repeat bn->relu since we just did bn->relu->maxpool
129  conv1 = Conv2D(filters=filters, kernel_size=(3, 3),
130  strides=init_strides,
131  padding="same",
132  kernel_initializer="he_normal",
133  kernel_regularizer=l2(1e-4))(input)
134  else:
135  conv1 = _bn_relu_conv(filters=filters, kernel_size=(3, 3),
136  strides=init_strides)(input)
137 
138  residual = _bn_relu_conv(filters=filters, kernel_size=(3, 3))(conv1)
139  return _shortcut(input, residual)
140 
141  return f
142 
143 
def _bn_relu_conv(conv_params)
Definition: resnet.py:60
def basic_block(filters, init_strides=(1, 1), is_first_block_of_first_layer=False)
Definition: resnet.py:121
def _shortcut(input, residual)
Definition: resnet.py:81
def resnet.bottleneck (   filters,
  init_strides = (1, 1),
  is_first_block_of_first_layer = False 
)
Bottleneck architecture for > 34 layer resnet.
Follows improved proposed scheme in http://arxiv.org/pdf/1603.05027v2.pdf

Returns:
    A final conv layer of filters * 4

Definition at line 144 of file resnet.py.

144 def bottleneck(filters, init_strides=(1, 1), is_first_block_of_first_layer=False):
145  """Bottleneck architecture for > 34 layer resnet.
146  Follows improved proposed scheme in http://arxiv.org/pdf/1603.05027v2.pdf
147 
148  Returns:
149  A final conv layer of filters * 4
150  """
151  def f(input):
152 
153  if is_first_block_of_first_layer:
154  # don't repeat bn->relu since we just did bn->relu->maxpool
155  conv_1_1 = Conv2D(filters=filters, kernel_size=(1, 1),
156  strides=init_strides,
157  padding="same",
158  kernel_initializer="he_normal",
159  kernel_regularizer=l2(1e-4))(input)
160  else:
161  conv_1_1 = _bn_relu_conv(filters=filters, kernel_size=(1, 1),
162  strides=init_strides)(input)
163 
164  conv_3_3 = _bn_relu_conv(filters=filters, kernel_size=(3, 3))(conv_1_1)
165  residual = _bn_relu_conv(filters=filters * 4, kernel_size=(1, 1))(conv_3_3)
166  return _shortcut(input, residual)
167 
168  return f
169 
170 
def _bn_relu_conv(conv_params)
Definition: resnet.py:60
def _shortcut(input, residual)
Definition: resnet.py:81
def bottleneck(filters, init_strides=(1, 1), is_first_block_of_first_layer=False)
Definition: resnet.py:144