Skip to content

Commit

Permalink
detailed doc about global pool layers in Gluon (apache#11832)
Browse files Browse the repository at this point in the history
  • Loading branch information
haojin2 authored and szha committed Jul 20, 2018
1 parent 3390095 commit 3ac7091
Showing 1 changed file with 116 additions and 6 deletions.
122 changes: 116 additions & 6 deletions python/mxnet/gluon/nn/conv_layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -993,47 +993,157 @@ def __init__(self, pool_size=(2, 2, 2), strides=None, padding=0,


class GlobalMaxPool1D(_Pooling):
"""Global max pooling operation for temporal data."""
"""Gloabl max pooling operation for one dimensional (temporal) data.
Parameters
----------
layout : str, default 'NCW'
Dimension ordering of data and weight. Only supports 'NCW' layout for now.
'N', 'C', 'W' stands for batch, channel, and width (time) dimensions
respectively. Pooling is applied on the W dimension.
Inputs:
- **data**: 3D input tensor with shape `(batch_size, in_channels, width)`
when `layout` is `NCW`. For other layouts shape is permuted accordingly.
Outputs:
- **out**: 3D output tensor with shape `(batch_size, channels, 1)`
when `layout` is `NCW`.
"""
def __init__(self, layout='NCW', **kwargs):
assert layout == 'NCW', "Only supports 'NCW' layout for now"
super(GlobalMaxPool1D, self).__init__(
(1,), None, 0, True, True, 'max', **kwargs)


class GlobalMaxPool2D(_Pooling):
"""Global max pooling operation for spatial data."""
"""Global max pooling operation for two dimensional (spatial) data.
Parameters
----------
layout : str, default 'NCHW'
Dimension ordering of data and weight. Only supports 'NCHW' layout for now.
'N', 'C', 'H', 'W' stands for batch, channel, height, and width
dimensions respectively. padding is applied on 'H' and 'W' dimension.
Inputs:
- **data**: 4D input tensor with shape
`(batch_size, in_channels, height, width)` when `layout` is `NCHW`.
For other layouts shape is permuted accordingly.
Outputs:
- **out**: 4D output tensor with shape
`(batch_size, channels, 1, 1)` when `layout` is `NCHW`.
"""
def __init__(self, layout='NCHW', **kwargs):
assert layout == 'NCHW', "Only supports 'NCHW' layout for now"
super(GlobalMaxPool2D, self).__init__(
(1, 1), None, 0, True, True, 'max', **kwargs)


class GlobalMaxPool3D(_Pooling):
"""Global max pooling operation for 3D data."""
"""Global max pooling operation for 3D data (spatial or spatio-temporal).
Parameters
----------
layout : str, default 'NCDHW'
Dimension ordering of data and weight. Only supports 'NCDHW' layout for now.
'N', 'C', 'H', 'W', 'D' stands for batch, channel, height, width and
depth dimensions respectively. padding is applied on 'D', 'H' and 'W'
dimension.
Inputs:
- **data**: 5D input tensor with shape
`(batch_size, in_channels, depth, height, width)` when `layout` is `NCW`.
For other layouts shape is permuted accordingly.
Outputs:
- **out**: 5D output tensor with shape
`(batch_size, channels, 1, 1, 1)` when `layout` is `NCDHW`.
"""
def __init__(self, layout='NCDHW', **kwargs):
assert layout == 'NCDHW', "Only supports 'NCDHW' layout for now"
super(GlobalMaxPool3D, self).__init__(
(1, 1, 1), None, 0, True, True, 'max', **kwargs)


class GlobalAvgPool1D(_Pooling):
"""Global average pooling operation for temporal data."""
"""Global average pooling operation for temporal data.
Parameters
----------
layout : str, default 'NCW'
Dimension ordering of data and weight. Only supports 'NCW' layout for now.
'N', 'C', 'W' stands for batch, channel, and width (time) dimensions
respectively. padding is applied on 'W' dimension.
Inputs:
- **data**: 3D input tensor with shape `(batch_size, in_channels, width)`
when `layout` is `NCW`. For other layouts shape is permuted accordingly.
Outputs:
- **out**: 3D output tensor with shape `(batch_size, channels, 1)`.
"""
def __init__(self, layout='NCW', **kwargs):
assert layout == 'NCW', "Only supports 'NCW' layout for now"
super(GlobalAvgPool1D, self).__init__(
(1,), None, 0, True, True, 'avg', **kwargs)


class GlobalAvgPool2D(_Pooling):
"""Global average pooling operation for spatial data."""
"""Global average pooling operation for spatial data.
Parameters
----------
layout : str, default 'NCHW'
Dimension ordering of data and weight. Only supports 'NCHW' layout for now.
'N', 'C', 'H', 'W' stands for batch, channel, height, and width
dimensions respectively.
Inputs:
- **data**: 4D input tensor with shape
`(batch_size, in_channels, height, width)` when `layout` is `NCHW`.
For other layouts shape is permuted accordingly.
Outputs:
- **out**: 4D output tensor with shape
`(batch_size, channels, 1, 1)` when `layout` is `NCHW`.
"""
def __init__(self, layout='NCHW', **kwargs):
assert layout == 'NCHW', "Only supports 'NCHW' layout for now"
super(GlobalAvgPool2D, self).__init__(
(1, 1), None, 0, True, True, 'avg', **kwargs)


class GlobalAvgPool3D(_Pooling):
"""Global max pooling operation for 3D data."""
"""Global average pooling operation for 3D data (spatial or spatio-temporal).
Parameters
----------
layout : str, default 'NCDHW'
Dimension ordering of data and weight. Can be 'NCDHW', 'NDHWC', etc.
'N', 'C', 'H', 'W', 'D' stands for batch, channel, height, width and
depth dimensions respectively. padding is applied on 'D', 'H' and 'W'
dimension.
Inputs:
- **data**: 5D input tensor with shape
`(batch_size, in_channels, depth, height, width)` when `layout` is `NCDHW`.
For other layouts shape is permuted accordingly.
Outputs:
- **out**: 5D output tensor with shape
`(batch_size, channels, 1, 1, 1)` when `layout` is `NCDHW`.
"""
def __init__(self, layout='NCDHW', **kwargs):
assert layout == 'NCDHW', "Only supports 'NCDHW' layout for now"
super(GlobalAvgPool3D, self).__init__(
Expand Down

0 comments on commit 3ac7091

Please sign in to comment.