-- Hoogle documentation, generated by Haddock
-- See Hoogle, http://www.haskell.org/hoogle/


-- | LRU cache
--   
--   Please see README.md
@package lrucaching
@version 0.3.3


-- | This module contains internal datastructures. No guarantees are made
--   as to the stability of this module and violating invariants can result
--   in unspecified behavior.
module Data.LruCache.Internal

-- | LRU cache based on hashing.
data LruCache k v
LruCache :: !Int -> !Int -> !Priority -> !(HashPSQ k Priority v) -> LruCache k v

-- | The maximum number of elements in the queue
[lruCapacity] :: LruCache k v -> !Int

-- | The current number of elements in the queue
[lruSize] :: LruCache k v -> !Int

-- | The next logical time
[lruTick] :: LruCache k v -> !Priority

-- | Underlying priority queue
[lruQueue] :: LruCache k v -> !(HashPSQ k Priority v)

-- | Logical time at which an element was last accessed.
type Priority = Int64
instance Data.Traversable.Traversable (Data.LruCache.Internal.LruCache k)
instance Data.Foldable.Foldable (Data.LruCache.Internal.LruCache k)
instance GHC.Base.Functor (Data.LruCache.Internal.LruCache k)
instance (GHC.Show.Show v, GHC.Show.Show k) => GHC.Show.Show (Data.LruCache.Internal.LruCache k v)
instance (GHC.Classes.Ord k, Data.Hashable.Class.Hashable k, GHC.Classes.Eq v) => GHC.Classes.Eq (Data.LruCache.Internal.LruCache k v)
instance (Control.DeepSeq.NFData k, Control.DeepSeq.NFData v) => Control.DeepSeq.NFData (Data.LruCache.Internal.LruCache k v)


-- | Pure API to an LRU cache.
module Data.LruCache

-- | LRU cache based on hashing.
data LruCache k v

-- | Logical time at which an element was last accessed.
type Priority = Int64

-- | Create an empty <a>LruCache</a> of the given size.
empty :: Int -> LruCache k v

-- | Insert an element into the <a>LruCache</a>.
insert :: (Hashable k, Ord k) => k -> v -> LruCache k v -> LruCache k v

-- | Insert an element into the <a>LruCache</a> returning the evicted
--   element if any.
--   
--   When the logical clock reaches its maximum value and all values are
--   evicted <a>Nothing</a> is returned.
insertView :: (Hashable k, Ord k) => k -> v -> LruCache k v -> (Maybe (k, v), LruCache k v)

-- | Lookup an element in an <a>LruCache</a> and mark it as the least
--   recently accessed.
lookup :: (Hashable k, Ord k) => k -> LruCache k v -> Maybe (v, LruCache k v)


-- | Convenience module for the common case of caching results of IO
--   actions when finalizers have to be run when cache entries are evicted.
module Data.LruCache.IO.Finalizer

-- | Store a LRU cache in an 'IORef to be able to conveniently update it.
newtype LruHandle k v
LruHandle :: (IORef (LruCache k (v, v -> IO ()))) -> LruHandle k v

-- | Create a new LRU cache of the given size.
newLruHandle :: Int -> IO (LruHandle k v)

-- | Return the cached result of the action or, in the case of a cache
--   miss, execute the action and insert it in the cache.
cached :: (Hashable k, Ord k) => LruHandle k v -> k -> IO v -> (v -> IO ()) -> IO v

-- | Using a stripe of multiple handles can improve the performance in the
--   case of concurrent accesses since several handles can be accessed in
--   parallel.
newtype StripedLruHandle k v
StripedLruHandle :: (Vector (LruHandle k v)) -> StripedLruHandle k v

-- | Create a new <tt>StripedHandle</tt> with the given number of stripes
--   and the given capacity for each stripe.
newStripedLruHandle :: Int -> Int -> IO (StripedLruHandle k v)

-- | Striped version of <a>cached</a>.
stripedCached :: (Hashable k, Ord k) => StripedLruHandle k v -> k -> IO v -> (v -> IO ()) -> IO v


-- | Convenience module for the common case of caching results of IO
--   actions. See <a>Finalizer</a> if you want to run finalizers
--   automatically when cache entries are evicted
module Data.LruCache.IO

-- | Store a LRU cache in an 'IORef to be able to conveniently update it.
newtype LruHandle k v
LruHandle :: (IORef (LruCache k v)) -> LruHandle k v

-- | Return the cached result of the action or, in the case of a cache
--   miss, execute the action and insert it in the cache.
cached :: (Hashable k, Ord k) => LruHandle k v -> k -> IO v -> IO v

-- | Create a new LRU cache of the given size.
newLruHandle :: Int -> IO (LruHandle k v)

-- | Using a stripe of multiple handles can improve the performance in the
--   case of concurrent accesses since several handles can be accessed in
--   parallel.
newtype StripedLruHandle k v
StripedLruHandle :: (Vector (LruHandle k v)) -> StripedLruHandle k v

-- | Striped version of <a>cached</a>.
stripedCached :: (Hashable k, Ord k) => StripedLruHandle k v -> k -> IO v -> IO v

-- | Create a new <tt>StripedHandle</tt> with the given number of stripes
--   and the given capacity for each stripe.
newStripedLruHandle :: Int -> Int -> IO (StripedLruHandle k v)
