-- Hoogle documentation, generated by Haddock
-- See Hoogle, http://www.haskell.org/hoogle/


-- | hmatrix operations lifted for backprop
--   
--   hmatrix operations lifted for backprop, along with orphan instances.
--   
--   Meant to act as a drop-in replacement to the API of
--   Numeric.LinearAlgebra.Static. Just change your imports, and your
--   functions are automatically backpropagatable.
--   
--   See README on Github at
--   <a>https://github.com/mstksg/hmatrix-backprop#readme</a>
@package hmatrix-backprop
@version 0.1.2.3


-- | A wrapper over <a>Numeric.LinearAlgebra.Static</a> (type-safe vector
--   and matrix operations based on blas/lapack) that allows its operations
--   to work with <a>backprop</a>. Also provides orphan instances of
--   <a>Backprop</a> for types in <a>Numeric.LinearAlgebra.Static</a>.
--   
--   In short, these functions are "lifted" to work with <a>BVar</a>s.
--   
--   Using <a>evalBP</a> will run the original operation:
--   
--   <pre>
--   <a>evalBP</a> :: (forall s. <a>Reifies</a> s <a>W</a>. <a>BVar</a> s a -&gt; <a>BVar</a> s b) -&gt; a -&gt; b
--   </pre>
--   
--   But using <a>gradBP</a> or <a>backprop</a> will give you the gradient:
--   
--   <pre>
--   <a>gradBP</a> :: (forall s. <a>Reifies</a> s <a>W</a>. <a>BVar</a> s a -&gt; <a>BVar</a> s b) -&gt; a -&gt; a
--   </pre>
--   
--   These can act as a drop-in replacement to the API of
--   <a>Numeric.LinearAlgebra.Static</a>. Just change your imports, and
--   your functions are automatically backpropagatable. Useful types are
--   all re-exported.
--   
--   Also contains <a>sumElements</a> <a>BVar</a> operation.
--   
--   Formulas for gradients come from the following papers:
--   
--   <ul>
--   
--   <li><a>https://people.maths.ox.ac.uk/gilesm/files/NA-08-01.pdf</a></li>
--   <li><a>http://www.dtic.mil/dtic/tr/fulltext/u2/624426.pdf</a></li>
--   
--   <li><a>http://www.cs.cmu.edu/~zkolter/course/15-884/linalg-review.pdf</a></li>
--   <li><a>https://arxiv.org/abs/1602.07527</a></li>
--   </ul>
--   
--   Some functions are notably unlifted:
--   
--   <ul>
--   <li><a>svd</a>: I can't find any resources that allow you to
--   backpropagate if the U and V matrices are used! If you find one, let
--   me know, or feel free to submit a PR! Because of this, Currently only
--   a version that exports only the singular values is exported.</li>
--   <li><a>svdTall</a>, <a>svdFlat</a>: Not sure where to start for
--   these</li>
--   <li><tt>qr</tt>: Same story.
--   <a>https://github.com/tensorflow/tensorflow/issues/6504</a> might
--   yield a clue?</li>
--   <li><a>her</a>: No <a>Num</a> instance for <a>Her</a> makes this
--   impossible at the moment with the current backprop API</li>
--   <li><a>exmp</a>: Definitely possible, but I haven't dug deep enough to
--   figure it out yet! There is a description here
--   <a>https://people.maths.ox.ac.uk/gilesm/files/NA-08-01.pdf</a> but it
--   requires some things I am not familiar with yet. Feel free to submit a
--   PR!</li>
--   <li><a>sqrtm</a>: Also likely possible. Maybe try to translate
--   <a>http://people.cs.umass.edu/~smaji/projects/matrix-sqrt/</a> ? PRs
--   welcomed!</li>
--   <li><a>linSolve</a>: Haven't figured out where to start!</li>
--   <li><a>&lt;/&gt;</a>: Same story</li>
--   <li>Functions returning existential types, like <a>withNullSpace</a>,
--   <a>withOrth</a>, <a>withRows</a>, etc.; not quite sure what the best
--   way to handle these are at the moment.</li>
--   <li><a>withRows</a> and <a>withColumns</a> made "type-safe", without
--   existential types, with <a>fromRows</a> and <a>fromColumns</a>.</li>
--   </ul>
module Numeric.LinearAlgebra.Static.Backprop
data R (n :: Nat)
type ℝ = Double
vec2 :: Reifies s W => BVar s ℝ -> BVar s ℝ -> BVar s (R 2)
vec3 :: Reifies s W => BVar s ℝ -> BVar s ℝ -> BVar s ℝ -> BVar s (R 3)
vec4 :: Reifies s W => BVar s ℝ -> BVar s ℝ -> BVar s ℝ -> BVar s ℝ -> BVar s (R 4)
(&) :: (KnownNat n, 1 <= n, KnownNat (n + 1), Reifies s W) => BVar s (R n) -> BVar s ℝ -> BVar s (R (n + 1))
infixl 4 &
(#) :: (KnownNat n, KnownNat m, Reifies s W) => BVar s (R n) -> BVar s (R m) -> BVar s (R (n + m))
infixl 4 #
split :: forall p n s. (KnownNat p, KnownNat n, p <= n, Reifies s W) => BVar s (R n) -> (BVar s (R p), BVar s (R (n - p)))
headTail :: (Reifies s W, KnownNat n, 1 <= n) => BVar s (R n) -> (BVar s ℝ, BVar s (R (n - 1)))

-- | Potentially extremely bad for anything but short lists!!!
vector :: forall n s. Reifies s W => Vector n (BVar s ℝ) -> BVar s (R n)
linspace :: forall n s. (KnownNat n, Reifies s W) => BVar s ℝ -> BVar s ℝ -> BVar s (R n)
range :: KnownNat n => R n
dim :: KnownNat n => R n
data L (m :: Nat) (n :: Nat)
type Sq (n :: Nat) = L n n
row :: Reifies s W => BVar s (R n) -> BVar s (L 1 n)
col :: (KnownNat n, Reifies s W) => BVar s (R n) -> BVar s (L n 1)
(|||) :: (KnownNat c, KnownNat r1, KnownNat (r1 + r2), Reifies s W) => BVar s (L c r1) -> BVar s (L c r2) -> BVar s (L c (r1 + r2))
infixl 3 |||
(===) :: (KnownNat c, KnownNat r1, KnownNat (r1 + r2), Reifies s W) => BVar s (L r1 c) -> BVar s (L r2 c) -> BVar s (L (r1 + r2) c)
infixl 2 ===
splitRows :: forall p m n s. (KnownNat p, KnownNat m, KnownNat n, p <= m, Reifies s W) => BVar s (L m n) -> (BVar s (L p n), BVar s (L (m - p) n))
splitCols :: forall p m n s. (KnownNat p, KnownNat m, KnownNat n, KnownNat (n - p), p <= n, Reifies s W) => BVar s (L m n) -> (BVar s (L m p), BVar s (L m (n - p)))
unrow :: (KnownNat n, Reifies s W) => BVar s (L 1 n) -> BVar s (R n)
uncol :: (KnownNat n, Reifies s W) => BVar s (L n 1) -> BVar s (R n)
tr :: (Transposable m mt, Transposable mt m, Backprop m, Reifies s W) => BVar s m -> BVar s mt
eye :: KnownNat n => Sq n
diag :: (KnownNat n, Reifies s W) => BVar s (R n) -> BVar s (Sq n)

-- | Potentially extremely bad for anything but short lists!!!
matrix :: forall m n s. (KnownNat m, KnownNat n, Reifies s W) => [BVar s ℝ] -> BVar s (L m n)
type ℂ = Complex Double
data C (n :: Nat)
data M (m :: Nat) (n :: Nat)
𝑖 :: Sized ℂ s c => s

-- | Matrix product
(<>) :: (KnownNat m, KnownNat k, KnownNat n, Reifies s W) => BVar s (L m k) -> BVar s (L k n) -> BVar s (L m n)
infixr 8 <>

-- | Matrix-vector product
(#>) :: (KnownNat m, KnownNat n, Reifies s W) => BVar s (L m n) -> BVar s (R n) -> BVar s (R m)
infixr 8 #>

-- | Dot product
(<.>) :: (KnownNat n, Reifies s W) => BVar s (R n) -> BVar s (R n) -> BVar s ℝ
infixr 8 <.>

-- | Can only get the singular values, for now. Let me know if you find an
--   algorithm that can compute the gradients based on differentials for
--   the other matricies!
svd :: forall m n s. (KnownNat m, KnownNat n, Reifies s W) => BVar s (L m n) -> BVar s (R n)

-- | Version of <a>svd</a> that returns the full SVD, but if you attempt to
--   find the gradient, it will fail at runtime if you ever use U or V.
svd_ :: forall m n s. (KnownNat m, KnownNat n, Reifies s W) => BVar s (L m n) -> (BVar s (L m m), BVar s (R n), BVar s (L n n))
class Eigen m l v | m -> l, m -> v

-- | <i>NOTE</i> The gradient is not necessarily symmetric! The gradient is
--   not meant to be retireved directly; insteadl, <a>eigenvalues</a> is
--   meant to be used as a part of a larger computation, and the gradient
--   as an intermediate step.
eigensystem :: forall n s. (KnownNat n, Reifies s W) => BVar s (Sym n) -> (BVar s (R n), BVar s (L n n))

-- | <i>NOTE</i> The gradient is not necessarily symmetric! The gradient is
--   not meant to be retireved directly; insteadl, <a>eigenvalues</a> is
--   meant to be used as a part of a larger computation, and the gradient
--   as an intermediate step.
eigenvalues :: forall n s. (KnownNat n, Reifies s W) => BVar s (Sym n) -> BVar s (R n)

-- | Algorithm from <a>https://arxiv.org/abs/1602.07527</a>
--   
--   The paper also suggests a potential imperative algorithm that might
--   help. Need to benchmark to see what is best.
--   
--   <i>NOTE</i> The gradient is not necessarily symmetric! The gradient is
--   not meant to be retireved directly; insteadl, <a>eigenvalues</a> is
--   meant to be used as a part of a larger computation, and the gradient
--   as an intermediate step.
chol :: forall n s. (KnownNat n, Reifies s W) => BVar s (Sym n) -> BVar s (Sq n)

-- | p-norm for vectors, operator norm for matrices
class Normed a

-- | Number of non-zero items
norm_0 :: (Normed a, Backprop a, Reifies s W) => BVar s a -> BVar s ℝ

-- | Sum of absolute values
norm_1V :: (KnownNat n, Reifies s W) => BVar s (R n) -> BVar s ℝ

-- | Maximum <a>norm_1</a> of columns
norm_1M :: (KnownNat n, KnownNat m, Reifies s W) => BVar s (L n m) -> BVar s ℝ

-- | Square root of sum of squares
--   
--   Be aware that gradient diverges when the norm is zero
norm_2V :: (KnownNat n, Reifies s W) => BVar s (R n) -> BVar s ℝ

-- | Maximum singular value
norm_2M :: (KnownNat n, KnownNat m, Reifies s W) => BVar s (L n m) -> BVar s ℝ

-- | Maximum absolute value
norm_InfV :: (KnownNat n, Reifies s W) => BVar s (R n) -> BVar s ℝ

-- | Maximum <a>norm_1</a> of rows
norm_InfM :: (KnownNat n, KnownNat m, Reifies s W) => BVar s (L n m) -> BVar s ℝ
mean :: (KnownNat n, 1 <= n, Reifies s W) => BVar s (R n) -> BVar s ℝ

-- | Mean and covariance. If you know you only want to use one or the
--   other, use <a>meanL</a> or <a>cov</a>.
meanCov :: forall m n s. (KnownNat n, KnownNat m, 1 <= m, Reifies s W) => BVar s (L m n) -> (BVar s (R n), BVar s (Sym n))

-- | <a>meanCov</a>, but if you know you won't use the covariance.
meanL :: forall m n s. (KnownNat n, KnownNat m, 1 <= m, Reifies s W) => BVar s (L m n) -> BVar s (R n)

-- | <a>cov</a>, but if you know you won't use the covariance.
cov :: forall m n s. (KnownNat n, KnownNat m, 1 <= m, Reifies s W) => BVar s (L m n) -> BVar s (Sym n)
class Disp t
disp :: Disp t => Int -> t -> IO ()
class Domain field (vec :: Nat -> *) (mat :: Nat -> Nat -> *) | mat -> vec field, vec -> mat field, field -> mat vec
mul :: (KnownNat m, KnownNat k, KnownNat n, Domain field vec mat, Backprop (mat m k), Backprop (mat k n), Transposable (mat m k) (mat k m), Transposable (mat k n) (mat n k), Reifies s W) => BVar s (mat m k) -> BVar s (mat k n) -> BVar s (mat m n)
app :: (KnownNat m, KnownNat n, Domain field vec mat, Transposable (mat m n) (mat n m), Backprop (mat m n), Backprop (vec n), Reifies s W) => BVar s (mat m n) -> BVar s (vec n) -> BVar s (vec m)
dot :: (KnownNat n, Domain field vec mat, Sized field (vec n) d, Num (vec n), Backprop (vec n), Reifies s W) => BVar s (vec n) -> BVar s (vec n) -> BVar s field
cross :: (Domain field vec mat, Reifies s W, Backprop (vec 3)) => BVar s (vec 3) -> BVar s (vec 3) -> BVar s (vec 3)

-- | Create matrix with diagonal, and fill with default entries
diagR :: forall m n k field vec mat s. (Domain field vec mat, Num (vec k), Num (mat m n), KnownNat m, KnownNat n, KnownNat k, Container Vector field, Sized field (mat m n) Matrix, Sized field (vec k) Vector, Backprop field, Backprop (vec k), Reifies s W) => BVar s field -> BVar s (vec k) -> BVar s (mat m n)

-- | Note: if possible, use the potentially much more performant
--   <a>vmap'</a>.
vmap :: (KnownNat n, Reifies s W) => (BVar s ℝ -> BVar s ℝ) -> BVar s (R n) -> BVar s (R n)

-- | <a>vmap</a>, but potentially more performant. Only usable if the
--   mapped function does not depend on any external <a>BVar</a>s.
vmap' :: (Num (vec n), Storable field, Sized field (vec n) Vector, Backprop (vec n), Backprop field, Reifies s W) => (forall s'. Reifies s' W => BVar s' field -> BVar s' field) -> BVar s (vec n) -> BVar s (vec n)

-- | Note: Potentially less performant than <a>vmap'</a>.
dvmap :: (KnownNat n, Domain field vec mat, Num (vec n), Backprop (vec n), Backprop field, Reifies s W) => (forall s'. Reifies s' W => BVar s' field -> BVar s' field) -> BVar s (vec n) -> BVar s (vec n)

-- | Note: if possible, use the potentially much more performant
--   <a>mmap'</a>.
mmap :: (KnownNat n, KnownNat m, Reifies s W) => (BVar s ℝ -> BVar s ℝ) -> BVar s (L n m) -> BVar s (L n m)

-- | <a>mmap</a>, but potentially more performant. Only usable if the
--   mapped function does not depend on any external <a>BVar</a>s.
mmap' :: forall n m mat field s. (KnownNat m, Num (mat n m), Backprop (mat n m), Backprop field, Sized field (mat n m) Matrix, Element field, Reifies s W) => (forall s'. Reifies s' W => BVar s' field -> BVar s' field) -> BVar s (mat n m) -> BVar s (mat n m)

-- | Note: Potentially less performant than <a>mmap'</a>.
dmmap :: (KnownNat n, KnownNat m, Domain field vec mat, Num (mat n m), Backprop (mat n m), Backprop field, Reifies s W) => (forall s'. Reifies s' W => BVar s' field -> BVar s' field) -> BVar s (mat n m) -> BVar s (mat n m)
outer :: (KnownNat m, KnownNat n, Domain field vec mat, Transposable (mat n m) (mat m n), Backprop (vec n), Backprop (vec m), Reifies s W) => BVar s (vec n) -> BVar s (vec m) -> BVar s (mat n m)

-- | Note: if possible, use the potentially much more performant
--   <a>zipWithVector'</a>.
zipWithVector :: (KnownNat n, Reifies s W) => (BVar s ℝ -> BVar s ℝ -> BVar s ℝ) -> BVar s (R n) -> BVar s (R n) -> BVar s (R n)
zipWithVector' :: (Num (vec n), Backprop (vec n), Storable field, Backprop field, Sized field (vec n) Vector, Reifies s W) => (forall s'. Reifies s' W => BVar s' field -> BVar s' field -> BVar s' field) -> BVar s (vec n) -> BVar s (vec n) -> BVar s (vec n)

-- | A version of <a>zipWithVector'</a> that is potentially less performant
--   but is based on <a>zipWithVector</a> from <a>Domain</a>.
dzipWithVector :: (KnownNat n, Domain field vec mat, Num (vec n), Backprop (vec n), Backprop field, Reifies s W) => (forall s'. Reifies s' W => BVar s' field -> BVar s' field -> BVar s' field) -> BVar s (vec n) -> BVar s (vec n) -> BVar s (vec n)
det :: (KnownNat n, Num (mat n n), Backprop (mat n n), Domain field vec mat, Sized field (mat n n) d, Transposable (mat n n) (mat n n), Reifies s W) => BVar s (mat n n) -> BVar s field

-- | The inverse and the natural log of the determinant together. If you
--   know you don't need the inverse, it is best to use <a>lndet</a>.
invlndet :: forall n mat field vec d s. (KnownNat n, Num (mat n n), Domain field vec mat, Sized field (mat n n) d, Transposable (mat n n) (mat n n), Backprop field, Backprop (mat n n), Reifies s W) => BVar s (mat n n) -> (BVar s (mat n n), (BVar s field, BVar s field))

-- | The natural log of the determinant.
lndet :: forall n mat field vec d s. (KnownNat n, Num (mat n n), Backprop (mat n n), Domain field vec mat, Sized field (mat n n) d, Transposable (mat n n) (mat n n), Reifies s W) => BVar s (mat n n) -> BVar s field
inv :: (KnownNat n, Num (mat n n), Backprop (mat n n), Domain field vec mat, Transposable (mat n n) (mat n n), Reifies s W) => BVar s (mat n n) -> BVar s (mat n n)
toRows :: forall m n s. (KnownNat m, KnownNat n, Reifies s W) => BVar s (L m n) -> Vector m (BVar s (R n))
toColumns :: forall m n s. (KnownNat m, KnownNat n, Reifies s W) => BVar s (L m n) -> Vector n (BVar s (R m))
fromRows :: forall m n s. (KnownNat m, Reifies s W) => Vector m (BVar s (R n)) -> BVar s (L m n)
fromColumns :: forall m n s. (KnownNat n, Reifies s W) => Vector n (BVar s (R m)) -> BVar s (L m n)
konst :: forall t s d q. (Sized t s d, Container d t, Backprop t, Reifies q W) => BVar q t -> BVar q s
sumElements :: forall t s d q. (Sized t s d, Container d t, Backprop s, Reifies q W) => BVar q s -> BVar q t

-- | If there are extra items in the total derivative, they are dropped. If
--   there are missing items, they are treated as zero.
extractV :: forall t s q. (Sized t s Vector, Konst t Int Vector, Container Vector t, Backprop s, Reifies q W) => BVar q s -> BVar q (Vector t)

-- | If there are extra items in the total derivative, they are dropped. If
--   there are missing items, they are treated as zero.
extractM :: forall t s q. (Sized t s Matrix, Backprop s, Konst t (Int, Int) Matrix, Container Matrix t, Num (Matrix t), Reifies q W) => BVar q s -> BVar q (Matrix t)
create :: (Sized t s d, Backprop s, Num (d t), Backprop (d t), Reifies q W) => BVar q (d t) -> Maybe (BVar q s)
class Diag m d | m -> d
takeDiag :: (KnownNat n, Diag (mat n n) (vec n), Domain field vec mat, Num field, Backprop (mat n n), Reifies s W) => BVar s (mat n n) -> BVar s (vec n)
data Sym (n :: Nat)

-- | &lt;math&gt;
sym :: (KnownNat n, Reifies s W) => BVar s (Sq n) -> BVar s (Sym n)

-- | &lt;math&gt;
mTm :: (KnownNat m, KnownNat n, Reifies s W) => BVar s (L m n) -> BVar s (Sym n)

-- | Warning: the gradient is going necessarily symmetric, and so is
--   <i>not</i> meant to be used directly. Rather, it is meant to be used
--   in the middle (or at the end) of a longer computation.
unSym :: (KnownNat n, Reifies s W) => BVar s (Sym n) -> BVar s (Sq n)

-- | Unicode synonym for <tt>&lt;.&gt;&gt;</tt>
(<·>) :: (KnownNat n, Reifies s W) => BVar s (R n) -> BVar s (R n) -> BVar s ℝ
infixr 8 <·>

-- | A <tt><a>BVar</a> s a</tt> is a value of type <tt>a</tt> that can be
--   "backpropagated".
--   
--   Functions referring to <a>BVar</a>s are tracked by the library and can
--   be automatically differentiated to get their gradients and results.
--   
--   For simple numeric values, you can use its <a>Num</a>,
--   <a>Fractional</a>, and <a>Floating</a> instances to manipulate them as
--   if they were the numbers they represent.
--   
--   If <tt>a</tt> contains items, the items can be accessed and extracted
--   using lenses. A <tt><a>Lens'</a> b a</tt> can be used to access an
--   <tt>a</tt> inside a <tt>b</tt>, using <tt>^^.</tt> (<a>viewVar</a>):
--   
--   <pre>
--   (<a>^.</a>)  ::        a -&gt; <a>Lens'</a> a b -&gt;        b
--   (<tt>^^.</tt>) :: <a>BVar</a> s a -&gt; <a>Lens'</a> a b -&gt; <a>BVar</a> s b
--   </pre>
--   
--   There is also <tt>^^?</tt> (<a>previewVar</a>), to use a
--   <tt>Prism'</tt> or <a>Traversal'</a> to extract a target that may or
--   may not be present (which can implement pattern matching),
--   <tt>^^..</tt> (<a>toListOfVar</a>) to use a <a>Traversal'</a> to
--   extract <i>all</i> targets inside a <a>BVar</a>, and <tt>.~~</tt>
--   (<a>setVar</a>) to set and update values inside a <a>BVar</a>.
--   
--   If you have control over your data type definitions, you can also use
--   <a>splitBV</a> and <a>joinBV</a> to manipulate data types by easily
--   extracting fields out of a <a>BVar</a> of data types and creating
--   <a>BVar</a>s of data types out of <a>BVar</a>s of their fields. See
--   <a>Numeric.Backprop#hkd</a> for a tutorial on this use pattern.
--   
--   For more complex operations, libraries can provide functions on
--   <a>BVar</a>s using <a>liftOp</a> and related functions. This is how
--   you can create primitive functions that users can use to manipulate
--   your library's values. See
--   <a>https://backprop.jle.im/08-equipping-your-library.html</a> for a
--   detailed guide.
--   
--   For example, the <i>hmatrix</i> library has a matrix-vector
--   multiplication function, <tt>#&gt; :: L m n -&gt; R n -&gt; L m</tt>.
--   
--   A library could instead provide a function <tt>#&gt; :: <a>BVar</a> (L
--   m n) -&gt; BVar (R n) -&gt; BVar (R m)</tt>, which the user can then
--   use to manipulate their <a>BVar</a>s of <tt>L m n</tt>s and <tt>R
--   n</tt>s, etc.
--   
--   See <a>Numeric.Backprop#liftops</a> and documentation for
--   <a>liftOp</a> for more information.
data BVar s a

-- | Class of values that can be backpropagated in general.
--   
--   For instances of <a>Num</a>, these methods can be given by
--   <a>zeroNum</a>, <a>addNum</a>, and <a>oneNum</a>. There are also
--   generic options given in <a>Numeric.Backprop.Class</a> for functors,
--   <a>IsList</a> instances, and <a>Generic</a> instances.
--   
--   <pre>
--   instance <a>Backprop</a> <a>Double</a> where
--       <a>zero</a> = <a>zeroNum</a>
--       <a>add</a> = <a>addNum</a>
--       <a>one</a> = <a>oneNum</a>
--   </pre>
--   
--   If you leave the body of an instance declaration blank, GHC Generics
--   will be used to derive instances if the type has a single constructor
--   and each field is an instance of <a>Backprop</a>.
--   
--   To ensure that backpropagation works in a sound way, should obey the
--   laws:
--   
--   <ul>
--   <li><i><i>identity</i></i></li>
--   </ul>
--   
--   <ul>
--   <li><pre><a>add</a> x (<a>zero</a> y) = x</pre></li>
--   <li><pre><a>add</a> (<a>zero</a> x) y = y</pre></li>
--   </ul>
--   
--   Also implies preservation of information, making <tt><a>zipWith</a>
--   (<a>+</a>)</tt> an illegal implementation for lists and vectors.
--   
--   This is only expected to be true up to potential "extra zeroes" in
--   <tt>x</tt> and <tt>y</tt> in the result.
--   
--   <ul>
--   <li><i><i>commutativity</i></i></li>
--   </ul>
--   
--   <ul>
--   <li><pre><a>add</a> x y = <a>add</a> y x</pre></li>
--   </ul>
--   
--   <ul>
--   <li><i><i>associativity</i></i></li>
--   </ul>
--   
--   <ul>
--   <li><pre><a>add</a> x (<a>add</a> y z) = <a>add</a> (<a>add</a> x y)
--   z</pre></li>
--   </ul>
--   
--   <ul>
--   <li><i><i>idempotence</i></i></li>
--   </ul>
--   
--   <ul>
--   <li><pre><a>zero</a> <a>.</a> <a>zero</a> = <a>zero</a></pre></li>
--   <li><pre><a>one</a> <a>.</a> <a>one</a> = <a>one</a></pre></li>
--   </ul>
--   
--   <ul>
--   <li><i><i>unital</i></i></li>
--   </ul>
--   
--   <ul>
--   <li><pre><a>one</a> = <tt>gradBP</tt> <a>id</a></pre></li>
--   </ul>
--   
--   Note that not all values in the backpropagation process needs all of
--   these methods: Only the "final result" needs <a>one</a>, for example.
--   These are all grouped under one typeclass for convenience in defining
--   instances, and also to talk about sensible laws. For fine-grained
--   control, use the "explicit" versions of library functions (for
--   example, in <a>Numeric.Backprop.Explicit</a>) instead of
--   <a>Backprop</a> based ones.
--   
--   This typeclass replaces the reliance on <a>Num</a> of the previous API
--   (v0.1). <a>Num</a> is strictly more powerful than <a>Backprop</a>, and
--   is a stronger constraint on types than is necessary for proper
--   backpropagating. In particular, <a>fromInteger</a> is a problem for
--   many types, preventing useful backpropagation for lists,
--   variable-length vectors (like <a>Data.Vector</a>) and variable-size
--   matrices from linear algebra libraries like <i>hmatrix</i> and
--   <i>accelerate</i>.
class Backprop a
class Reifies (s :: k) a | s -> a

-- | An ephemeral Wengert Tape in the environment. Used internally to track
--   of the computational graph of variables.
--   
--   For the end user, one can just imagine <tt><a>Reifies</a> s
--   <a>W</a></tt> as a required constraint on <tt>s</tt> that allows
--   backpropagation to work.
data W
instance Numeric.Backprop.Class.Backprop (Internal.Static.R n)
instance Numeric.Backprop.Class.Backprop (Internal.Static.C n)
instance (GHC.TypeNats.KnownNat n, GHC.TypeNats.KnownNat m) => Numeric.Backprop.Class.Backprop (Internal.Static.L n m)
instance (GHC.TypeNats.KnownNat n, GHC.TypeNats.KnownNat m) => Numeric.Backprop.Class.Backprop (Internal.Static.M n m)
instance GHC.TypeNats.KnownNat n => Numeric.Backprop.Class.Backprop (Numeric.LinearAlgebra.Static.Sym n)
