···4646> randomMyNet = randomNetwork
47474848The function `randomMyNet` witnesses the `CreatableNetwork`
4949-constraint of the neural network, that is, it ensures the network
4949+constraint of the neural network, and in doing so, ensures the network
5050can be built, and hence, that the architecture is sound.
5151-}
5252
+7
src/Grenade/Layers/Elu.hs
···22{-# LANGUAGE TypeOperators #-}
33{-# LANGUAGE TypeFamilies #-}
44{-# LANGUAGE MultiParamTypeClasses #-}
55+{-|
66+Module : Grenade.Layers.Logit
77+Description : Exponential linear unit layer
88+Copyright : (c) Huw Campbell, 2016-2017
99+License : BSD2
1010+Stability : experimental
1111+-}
512module Grenade.Layers.Elu (
613 Elu (..)
714 ) where
+8
src/Grenade/Layers/Logit.hs
···33{-# LANGUAGE TypeFamilies #-}
44{-# LANGUAGE FlexibleInstances #-}
55{-# LANGUAGE MultiParamTypeClasses #-}
66+{-|
77+Module : Grenade.Layers.Logit
88+Description : Sigmoid nonlinear layer
99+Copyright : (c) Huw Campbell, 2016-2017
1010+License : BSD2
1111+Stability : experimental
1212+-}
613module Grenade.Layers.Logit (
714 Logit (..)
815 ) where
···1421import Grenade.Core
15221623-- | A Logit layer.
2424+--
1725-- A layer which can act between any shape of the same dimension, perfoming an sigmoid function.
1826-- This layer should be used as the output layer of a network for logistic regression (classification)
1927-- problems.
+9
src/Grenade/Layers/Pad.hs
···55{-# LANGUAGE TypeFamilies #-}
66{-# LANGUAGE MultiParamTypeClasses #-}
77{-# LANGUAGE FlexibleContexts #-}
88+{-|
99+Module : Grenade.Core.Pad
1010+Description : Padding layer for 2D and 3D images
1111+Copyright : (c) Huw Campbell, 2016-2017
1212+License : BSD2
1313+Stability : experimental
1414+-}
815module Grenade.Layers.Pad (
916 Pad (..)
1017 ) where
···2229import Numeric.LinearAlgebra.Static (extract, create)
23302431-- | A padding layer for a neural network.
3232+--
3333+-- Pads on the X and Y dimension of an image.
2534data Pad :: Nat
2635 -> Nat
2736 -> Nat
+8
src/Grenade/Layers/Pooling.hs
···66{-# LANGUAGE TypeFamilies #-}
77{-# LANGUAGE MultiParamTypeClasses #-}
88{-# LANGUAGE FlexibleContexts #-}
99+{-|
1010+Module : Grenade.Core.Pooling
1111+Description : Max Pooling layer for 2D and 3D images
1212+Copyright : (c) Huw Campbell, 2016-2017
1313+License : BSD2
1414+Stability : experimental
1515+-}
916module Grenade.Layers.Pooling (
1017 Pooling (..)
1118 ) where
···2229import Numeric.LinearAlgebra.Static as LAS hiding ((|||), build, toRows)
23302431-- | A pooling layer for a neural network.
3232+--
2533-- Does a max pooling, looking over a kernel similarly to the convolution network, but returning
2634-- maxarg only. This layer is often used to provide minor amounts of translational invariance.
2735--
+7
src/Grenade/Layers/Relu.hs
···22{-# LANGUAGE TypeOperators #-}
33{-# LANGUAGE TypeFamilies #-}
44{-# LANGUAGE MultiParamTypeClasses #-}
55+{-|
66+Module : Grenade.Layers.Relu
77+Description : Rectifying linear unit layer
88+Copyright : (c) Huw Campbell, 2016-2017
99+License : BSD2
1010+Stability : experimental
1111+-}
512module Grenade.Layers.Relu (
613 Relu (..)
714 ) where
+10-3
src/Grenade/Layers/Reshape.hs
···33{-# LANGUAGE TypeFamilies #-}
44{-# LANGUAGE MultiParamTypeClasses #-}
55{-# LANGUAGE FlexibleContexts #-}
66+{-|
77+Module : Grenade.Layers.Reshape
88+Description : Multipurpose reshaping layer
99+Copyright : (c) Huw Campbell, 2016-2017
1010+License : BSD2
1111+Stability : experimental
1212+-}
613module Grenade.Layers.Reshape (
714 Reshape (..)
815 ) where
···19262027-- | Reshape Layer
2128--
2222--- Flattens input down to D1 from either 2D or 3D data.
2323---
2424--- Casts input D1 up to either 2D or 3D data if the shapes are good.
2929+-- The Reshape layer can flatten any 2D or 3D image to 1D vector with the
3030+-- same number of activations, as well as cast up from 1D to a 2D or 3D
3131+-- shape.
2532--
2633-- Can also be used to turn a 3D image with only one channel into a 2D image
2734-- or vice versa.
+11
src/Grenade/Layers/Softmax.hs
···33{-# LANGUAGE TypeFamilies #-}
44{-# LANGUAGE FlexibleInstances #-}
55{-# LANGUAGE MultiParamTypeClasses #-}
66+{-|
77+Module : Grenade.Core.Softmax
88+Description : Softmax loss layer
99+Copyright : (c) Huw Campbell, 2016-2017
1010+License : BSD2
1111+Stability : experimental
1212+-}
613module Grenade.Layers.Softmax (
714 Softmax (..)
815 , softmax
···1724import Numeric.LinearAlgebra.Static as LAS
18251926-- | A Softmax layer
2727+--
2028-- This layer is like a logit layer, but normalises
2129-- a set of matricies to be probabilities.
3030+--
3131+-- One can use this layer as the last layer in a network
3232+-- if they need normalised probabilities.
2233data Softmax = Softmax
2334 deriving Show
2435
···33{-# LANGUAGE TypeFamilies #-}
44{-# LANGUAGE MultiParamTypeClasses #-}
55{-# LANGUAGE FlexibleInstances #-}
66+{-|
77+Module : Grenade.Core.Trivial
88+Description : Trivial layer which perfoms no operations on the data
99+Copyright : (c) Huw Campbell, 2016-2017
1010+License : BSD2
1111+Stability : experimental
1212+-}
613module Grenade.Layers.Trivial (
714 Trivial (..)
815 ) where
···11181219import Grenade.Core
13201414--- | A trivial layer.
2121+-- | A Trivial layer.
2222+--
2323+-- This can be used to pass an unchanged value up one side of a
2424+-- graph, for a Residual network for example.
1525data Trivial = Trivial
1626 deriving Show
1727