💣 Machine learning which might blow up in your face 💣

Docs

+79 -5
+1 -1
src/Grenade.hs
··· 46 46 > randomMyNet = randomNetwork 47 47 48 48 The function `randomMyNet` witnesses the `CreatableNetwork` 49 - constraint of the neural network, that is, it ensures the network 49 + constraint of the neural network, and in doing so, ensures the network 50 50 can be built, and hence, that the architecture is sound. 51 51 -} 52 52
+7
src/Grenade/Layers/Elu.hs
··· 2 2 {-# LANGUAGE TypeOperators #-} 3 3 {-# LANGUAGE TypeFamilies #-} 4 4 {-# LANGUAGE MultiParamTypeClasses #-} 5 + {-| 6 + Module : Grenade.Layers.Logit 7 + Description : Exponential linear unit layer 8 + Copyright : (c) Huw Campbell, 2016-2017 9 + License : BSD2 10 + Stability : experimental 11 + -} 5 12 module Grenade.Layers.Elu ( 6 13 Elu (..) 7 14 ) where
+8
src/Grenade/Layers/Logit.hs
··· 3 3 {-# LANGUAGE TypeFamilies #-} 4 4 {-# LANGUAGE FlexibleInstances #-} 5 5 {-# LANGUAGE MultiParamTypeClasses #-} 6 + {-| 7 + Module : Grenade.Layers.Logit 8 + Description : Sigmoid nonlinear layer 9 + Copyright : (c) Huw Campbell, 2016-2017 10 + License : BSD2 11 + Stability : experimental 12 + -} 6 13 module Grenade.Layers.Logit ( 7 14 Logit (..) 8 15 ) where ··· 14 21 import Grenade.Core 15 22 16 23 -- | A Logit layer. 24 + -- 17 25 -- A layer which can act between any shape of the same dimension, perfoming an sigmoid function. 18 26 -- This layer should be used as the output layer of a network for logistic regression (classification) 19 27 -- problems.
+9
src/Grenade/Layers/Pad.hs
··· 5 5 {-# LANGUAGE TypeFamilies #-} 6 6 {-# LANGUAGE MultiParamTypeClasses #-} 7 7 {-# LANGUAGE FlexibleContexts #-} 8 + {-| 9 + Module : Grenade.Core.Pad 10 + Description : Padding layer for 2D and 3D images 11 + Copyright : (c) Huw Campbell, 2016-2017 12 + License : BSD2 13 + Stability : experimental 14 + -} 8 15 module Grenade.Layers.Pad ( 9 16 Pad (..) 10 17 ) where ··· 22 29 import Numeric.LinearAlgebra.Static (extract, create) 23 30 24 31 -- | A padding layer for a neural network. 32 + -- 33 + -- Pads on the X and Y dimension of an image. 25 34 data Pad :: Nat 26 35 -> Nat 27 36 -> Nat
+8
src/Grenade/Layers/Pooling.hs
··· 6 6 {-# LANGUAGE TypeFamilies #-} 7 7 {-# LANGUAGE MultiParamTypeClasses #-} 8 8 {-# LANGUAGE FlexibleContexts #-} 9 + {-| 10 + Module : Grenade.Core.Pooling 11 + Description : Max Pooling layer for 2D and 3D images 12 + Copyright : (c) Huw Campbell, 2016-2017 13 + License : BSD2 14 + Stability : experimental 15 + -} 9 16 module Grenade.Layers.Pooling ( 10 17 Pooling (..) 11 18 ) where ··· 22 29 import Numeric.LinearAlgebra.Static as LAS hiding ((|||), build, toRows) 23 30 24 31 -- | A pooling layer for a neural network. 32 + -- 25 33 -- Does a max pooling, looking over a kernel similarly to the convolution network, but returning 26 34 -- maxarg only. This layer is often used to provide minor amounts of translational invariance. 27 35 --
+7
src/Grenade/Layers/Relu.hs
··· 2 2 {-# LANGUAGE TypeOperators #-} 3 3 {-# LANGUAGE TypeFamilies #-} 4 4 {-# LANGUAGE MultiParamTypeClasses #-} 5 + {-| 6 + Module : Grenade.Layers.Relu 7 + Description : Rectifying linear unit layer 8 + Copyright : (c) Huw Campbell, 2016-2017 9 + License : BSD2 10 + Stability : experimental 11 + -} 5 12 module Grenade.Layers.Relu ( 6 13 Relu (..) 7 14 ) where
+10 -3
src/Grenade/Layers/Reshape.hs
··· 3 3 {-# LANGUAGE TypeFamilies #-} 4 4 {-# LANGUAGE MultiParamTypeClasses #-} 5 5 {-# LANGUAGE FlexibleContexts #-} 6 + {-| 7 + Module : Grenade.Layers.Reshape 8 + Description : Multipurpose reshaping layer 9 + Copyright : (c) Huw Campbell, 2016-2017 10 + License : BSD2 11 + Stability : experimental 12 + -} 6 13 module Grenade.Layers.Reshape ( 7 14 Reshape (..) 8 15 ) where ··· 19 26 20 27 -- | Reshape Layer 21 28 -- 22 - -- Flattens input down to D1 from either 2D or 3D data. 23 - -- 24 - -- Casts input D1 up to either 2D or 3D data if the shapes are good. 29 + -- The Reshape layer can flatten any 2D or 3D image to 1D vector with the 30 + -- same number of activations, as well as cast up from 1D to a 2D or 3D 31 + -- shape. 25 32 -- 26 33 -- Can also be used to turn a 3D image with only one channel into a 2D image 27 34 -- or vice versa.
+11
src/Grenade/Layers/Softmax.hs
··· 3 3 {-# LANGUAGE TypeFamilies #-} 4 4 {-# LANGUAGE FlexibleInstances #-} 5 5 {-# LANGUAGE MultiParamTypeClasses #-} 6 + {-| 7 + Module : Grenade.Core.Softmax 8 + Description : Softmax loss layer 9 + Copyright : (c) Huw Campbell, 2016-2017 10 + License : BSD2 11 + Stability : experimental 12 + -} 6 13 module Grenade.Layers.Softmax ( 7 14 Softmax (..) 8 15 , softmax ··· 17 24 import Numeric.LinearAlgebra.Static as LAS 18 25 19 26 -- | A Softmax layer 27 + -- 20 28 -- This layer is like a logit layer, but normalises 21 29 -- a set of matricies to be probabilities. 30 + -- 31 + -- One can use this layer as the last layer in a network 32 + -- if they need normalised probabilities. 22 33 data Softmax = Softmax 23 34 deriving Show 24 35
+7
src/Grenade/Layers/Tanh.hs
··· 3 3 {-# LANGUAGE TypeFamilies #-} 4 4 {-# LANGUAGE FlexibleInstances #-} 5 5 {-# LANGUAGE MultiParamTypeClasses #-} 6 + {-| 7 + Module : Grenade.Layers.Tanh 8 + Description : Hyperbolic tangent nonlinear layer 9 + Copyright : (c) Huw Campbell, 2016-2017 10 + License : BSD2 11 + Stability : experimental 12 + -} 6 13 module Grenade.Layers.Tanh ( 7 14 Tanh (..) 8 15 ) where
+11 -1
src/Grenade/Layers/Trivial.hs
··· 3 3 {-# LANGUAGE TypeFamilies #-} 4 4 {-# LANGUAGE MultiParamTypeClasses #-} 5 5 {-# LANGUAGE FlexibleInstances #-} 6 + {-| 7 + Module : Grenade.Core.Trivial 8 + Description : Trivial layer which perfoms no operations on the data 9 + Copyright : (c) Huw Campbell, 2016-2017 10 + License : BSD2 11 + Stability : experimental 12 + -} 6 13 module Grenade.Layers.Trivial ( 7 14 Trivial (..) 8 15 ) where ··· 11 18 12 19 import Grenade.Core 13 20 14 - -- | A trivial layer. 21 + -- | A Trivial layer. 22 + -- 23 + -- This can be used to pass an unchanged value up one side of a 24 + -- graph, for a Residual network for example. 15 25 data Trivial = Trivial 16 26 deriving Show 17 27