Skip to content

Commit

Permalink
Merge pull request #526 from IntersectMBO/jdral/newline-type-sigs
Browse files Browse the repository at this point in the history
Do not put `::` on a newline
  • Loading branch information
jorisdral authored Jan 14, 2025
2 parents 4cfbf9b + 2882d50 commit abb934d
Show file tree
Hide file tree
Showing 21 changed files with 96 additions and 96 deletions.
8 changes: 4 additions & 4 deletions bench/macro/lsm-tree-bench-lookups.hs
Original file line number Diff line number Diff line change
Expand Up @@ -425,8 +425,8 @@ benchBloomQueries !bs !keyRng !n

-- | This gives us the combined cost of calculating batches of keys, performing
-- bloom queries for each batch, and performing index searches for each batch.
benchIndexSearches
:: ArenaManager RealWorld
benchIndexSearches ::
ArenaManager RealWorld
-> V.Vector (Bloom SerialisedKey)
-> V.Vector IndexCompact
-> V.Vector (FS.Handle h)
Expand All @@ -443,8 +443,8 @@ benchIndexSearches !arenaManager !bs !ics !hs !keyRng !n

-- | This gives us the combined cost of calculating batches of keys, and
-- preparing lookups for each batch.
benchPrepLookups
:: ArenaManager RealWorld
benchPrepLookups ::
ArenaManager RealWorld
-> V.Vector (Bloom SerialisedKey)
-> V.Vector IndexCompact
-> V.Vector (FS.Handle h)
Expand Down
8 changes: 4 additions & 4 deletions bench/macro/lsm-tree-bench-wp8.hs
Original file line number Diff line number Diff line change
Expand Up @@ -498,8 +498,8 @@ initGen initialSize batchSize batchCount seed =
-- Batch generation
-------------------------------------------------------------------------------

generateBatch
:: Int -- ^ initial size of the collection
generateBatch ::
Int -- ^ initial size of the collection
-> Int -- ^ batch size
-> MCG.MCG -- ^ generator
-> Int -- ^ batch number
Expand All @@ -522,8 +522,8 @@ We could also make it exact, but then we'll need to carry some state around
-}
{-# INLINE generateBatch' #-}
generateBatch'
:: Int -- ^ initial size of the collection
generateBatch' ::
Int -- ^ initial size of the collection
-> Int -- ^ batch size
-> MCG.MCG -- ^ generator
-> Int -- ^ batch number
Expand Down
4 changes: 2 additions & 2 deletions bench/macro/rocksdb-bench-wp8.hs
Original file line number Diff line number Diff line change
Expand Up @@ -295,8 +295,8 @@ We could also make it exact, but then we'll need to carry some state around
(at least the difference).
-}
generateBatch
:: Int -- ^ initial size of the collection
generateBatch ::
Int -- ^ initial size of the collection
-> Int -- ^ batch size
-> MCG.MCG -- ^ generator
-> Int -- ^ batch number
Expand Down
8 changes: 4 additions & 4 deletions bloomfilter/src/Data/BloomFilter/Calc.hs
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ import Numeric (expm1)
-- >>> [ showFFloat (Just 5) (falsePositiveProb 10_000 100_000 k) "" | k <- [1..5] ]
-- ["0.09516","0.03286","0.01741","0.01181","0.00943"]
--
falsePositiveProb
:: Double -- ^ /n/, number of elements
falsePositiveProb ::
Double -- ^ /n/, number of elements
-> Double -- ^ /m/, size of bloom filter
-> Double -- ^ /k/, number of hash functions
-> Double
Expand All @@ -28,8 +28,8 @@ falsePositiveProb n m k =

-- | Filter size for given number of elements, false positive rate and
-- number of hash functions.
filterSize
:: Double -- ^ /n/, number of elements
filterSize ::
Double -- ^ /n/, number of elements
-> Double -- ^ /e/, false positive rate
-> Double -- ^ /k/, number of hash functions
-> Double
Expand Down
4 changes: 2 additions & 2 deletions bloomfilter/src/Data/BloomFilter/Easy.hs
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,8 @@ easyNew errRate capacity = MB.new numHashes numBits
-- >>> safeSuggestSizing 10000 0.01
-- Right (99317,7)
--
safeSuggestSizing
:: Int -- ^ expected maximum capacity
safeSuggestSizing ::
Int -- ^ expected maximum capacity
-> Double -- ^ desired false positive rate (0 < /e/ < 1)
-> Either String (Word64, Int)
safeSuggestSizing (fromIntegral -> capacity) errRate
Expand Down
4 changes: 2 additions & 2 deletions bloomfilter/src/Data/BloomFilter/Hash.hs
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,8 @@ type Hash = Word64
--
class Hashable a where
-- | Compute a 64-bit hash of a value.
hashSalt64
:: Word64 -- ^ seed
hashSalt64 ::
Word64 -- ^ seed
-> a -- ^ value to hash
-> Word64

Expand Down
4 changes: 2 additions & 2 deletions src-mcg/MCG.hs
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@ data MCG = MCG { m :: !Word64, a :: !Word64, x :: !Word64 }
-- >>> make 101_000_000 20240429
-- MCG {m = 101000023, a = 197265, x = 20240430}
--
make
:: Word64 -- ^ a lower bound for the period
make ::
Word64 -- ^ a lower bound for the period
-> Word64 -- ^ initial seed.
-> MCG
make (max 4 -> period_) seed = MCG m a (mod (seed + 1) m)
Expand Down
32 changes: 16 additions & 16 deletions src-monkey/Monkey.hs
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,8 @@ import Numeric.AD (Mode, Scalar, auto, conjugateGradientDescent)
-- >>> nonZeroResultCost t ps
-- 1.06551
--
constantBits
:: Integer -- ^ total memory (in bits)
constantBits ::
Integer -- ^ total memory (in bits)
-> Integer -- ^ total elements
-> Integer -- ^ T: size ratio
-> Integer -- ^ L: level count
Expand Down Expand Up @@ -159,8 +159,8 @@ constantBits m_max n t l =
-- >>> nonZeroResultCost t ps
-- 1.03575
--
monkeyBits
:: Integer -- ^ total memory (in bits)
monkeyBits ::
Integer -- ^ total memory (in bits)
-> Integer -- ^ total elements
-> Integer -- ^ T: size ratio
-> Integer -- ^ L: level count
Expand All @@ -181,8 +181,8 @@ monkeyBits m_max n t l
-- For now we use conjugate gradient descent instead of an analytical solution.
-- Iterative version is easier to tweak.
--
monkeyImpl
:: Double
monkeyImpl ::
Double
-> Double
-> [(Double,Double)]
-> [Double]
Expand All @@ -199,8 +199,8 @@ monkeyImpl m_max t ((entries0, _bits0) : initial) =
]

-- | Calculate how many buffers we can fit given size ratio T and level count L.
runsMultiplies
:: Integer -- ^ T: size ratio
runsMultiplies ::
Integer -- ^ T: size ratio
-> Integer -- ^ L: level count
-> Integer
runsMultiplies t l = t ^ l -1
Expand Down Expand Up @@ -241,8 +241,8 @@ numLevels n m t
-- Assumes that the bloom filter uses 'numHashFunctions' hash functions.
--
-- Equation 2.
falsePositiveRate
:: Floating a
falsePositiveRate ::
Floating a
=> a -- ^ entries
-> a -- ^ bits
-> a
Expand Down Expand Up @@ -278,8 +278,8 @@ numHashFunctions nbits nentries = truncate @Double $ max 1 $
(fromIntegral nbits / fromIntegral nentries) * log 2

-- | Worst-Case Zero-result Lookup Cost (equation 3).
zeroResultCost
:: Floating t
zeroResultCost ::
Floating t
=> t -- ^ T: Size ratio
-> [t] -- ^ \(p_i)\
-> t
Expand All @@ -292,8 +292,8 @@ zeroResultCost t ps =
-- In monoidal setting we might need to read more than one run, if initial lookups are upserts.
-- (We need another parameter: a ratio of inserts and upserts).
--
nonZeroResultCost
:: Floating t
nonZeroResultCost ::
Floating t
=> t -- ^ T: Size ratio
-> [t] -- ^ \(p_i\)
-> t
Expand All @@ -302,8 +302,8 @@ nonZeroResultCost t ps =

-- | Main memory footprint (equation 4).
--
totalMemory
:: Floating t
totalMemory ::
Floating t
=> t -- ^ N: total entries
-> t -- ^ T: size ratio
-> [t] -- ^ \(p_i)\
Expand Down
4 changes: 2 additions & 2 deletions src/Data/Map/Range.hs
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ pattern BoundInclusive k = Bound k Inclusive
-- | Find all the keys in the given range and return the corresponding
-- (key, value) pairs (in ascending order).
--
rangeLookup
:: forall k v. Ord k
rangeLookup ::
forall k v. Ord k
=> Bound k -- ^ lower bound
-> Bound k -- ^ upper bound
-> Map k v
Expand Down
16 changes: 8 additions & 8 deletions src/Database/LSMTree/Internal/BloomFilterQuery2.hs
Original file line number Diff line number Diff line change
Expand Up @@ -219,8 +219,8 @@ prepKeyHashes keys =
P.generatePrimArray (V.length keys) $ \i ->
Bloom.makeCheapHashes (V.unsafeIndex keys i)

prepInitialCandidateProbes
:: P.StrictArray (Bloom SerialisedKey)
prepInitialCandidateProbes ::
P.StrictArray (Bloom SerialisedKey)
-> P.PrimArray (Bloom.CheapHashes SerialisedKey)
-- ^ The pre-computed \"cheap hashes\" of the keys.
-> P.MutablePrimArray s CandidateProbe
Expand Down Expand Up @@ -262,8 +262,8 @@ prepInitialCandidateProbes


{-# NOINLINE bloomQueriesBody #-}
bloomQueriesBody
:: forall s.
bloomQueriesBody ::
forall s.
P.StrictArray (Bloom SerialisedKey)
-> P.PrimArray (Bloom.CheapHashes SerialisedKey)
-- ^ The pre-computed \"cheap hashes\" of the keys.
Expand All @@ -286,8 +286,8 @@ bloomQueriesBody !filters !keyhashes !candidateProbes =
{-# INLINE prepGivenCandidateProbe #-}

-- assume buff size of 0x20, so mask of 0x1f
testCandidateProbe, prepNextCandidateProbe
:: Int -> Int
testCandidateProbe, prepNextCandidateProbe ::
Int -> Int
-- ^ The read and write indexes within the rolling buffer. These wrap
-- around.
-> RunIxKeyIx
Expand Down Expand Up @@ -374,8 +374,8 @@ bloomQueriesBody !filters !keyhashes !candidateProbes =
| otherwise =
P.resizeMutablePrimArray output outputix

prepGivenCandidateProbe
:: Int -> Int
prepGivenCandidateProbe ::
Int -> Int
-> RunIxKeyIx
-> P.MutablePrimArray s RunIxKeyIx
-> Int
Expand Down
4 changes: 2 additions & 2 deletions src/Database/LSMTree/Internal/Lookup.hs
Original file line number Diff line number Diff line change
Expand Up @@ -88,8 +88,8 @@ type RunIx = Int
-- each search result. The resulting vector has the same length as the
-- @VP.Vector RunIxKeyIx@ argument, because index searching always returns a
-- positive search result.
indexSearches
:: Arena s
indexSearches ::
Arena s
-> V.Vector IndexCompact
-> V.Vector (Handle h)
-> V.Vector SerialisedKey
Expand Down
4 changes: 2 additions & 2 deletions src/Database/LSMTree/Internal/PageAcc.hs
Original file line number Diff line number Diff line change
Expand Up @@ -227,8 +227,8 @@ resetPageAccN PageAcc {..} !n = do

-- | Add an entry to 'PageAcc'.
--
pageAccAddElem
:: PageAcc s
pageAccAddElem ::
PageAcc s
-> SerialisedKey
-> Entry SerialisedValue BlobSpan
-> ST s Bool -- ^ 'True' if value was successfully added.
Expand Down
4 changes: 2 additions & 2 deletions src/Database/LSMTree/Internal/PageAcc1.hs
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ pageSize = 4096
{-# INLINE pageSize #-}

-- | Create a singleton page, also returning the overflow value bytes.
singletonPage
:: SerialisedKey
singletonPage ::
SerialisedKey
-> Entry SerialisedValue BlobSpan
-> (RawPage, [RawOverflowPage])
singletonPage k (Insert v) = runST $ do
Expand Down
12 changes: 6 additions & 6 deletions src/Database/LSMTree/Internal/RawOverflowPage.hs
Original file line number Diff line number Diff line change
Expand Up @@ -77,8 +77,8 @@ rawOverflowPageToByteString =
-- This function will copy data if the byte array is not pinned, or the length
-- is strictly less than 4096.
--
makeRawOverflowPage
:: ByteArray -- ^ bytearray
makeRawOverflowPage ::
ByteArray -- ^ bytearray
-> Int -- ^ offset in bytes into the bytearray
-> Int -- ^ length in bytes, must be @>= 0 && <= 4096@
-> RawOverflowPage
Expand All @@ -91,8 +91,8 @@ makeRawOverflowPage ba off len
| otherwise
= makeRawOverflowPageCopy ba off len

makeRawOverflowPageCopy
:: ByteArray -- ^ bytearray
makeRawOverflowPageCopy ::
ByteArray -- ^ bytearray
-> Int -- ^ offset in bytes into the bytearray
-> Int -- ^ length in bytes
-> RawOverflowPage
Expand All @@ -109,8 +109,8 @@ makeRawOverflowPageCopy ba off len =
-- | Create a 'RawOverflowPage' without copying. The byte array and offset must
-- satisfy the invariant for 'RawOverflowPage'.
--
unsafeMakeRawOverflowPage
:: ByteArray -- ^ bytearray, must be pinned and contain 4096 bytes (after offset)
unsafeMakeRawOverflowPage ::
ByteArray -- ^ bytearray, must be pinned and contain 4096 bytes (after offset)
-> Int -- ^ offset in bytes
-> RawOverflowPage
unsafeMakeRawOverflowPage ba off =
Expand Down
28 changes: 14 additions & 14 deletions src/Database/LSMTree/Internal/RawPage.hs
Original file line number Diff line number Diff line change
Expand Up @@ -90,8 +90,8 @@ instance Eq RawPage where
--
-- This function may copy data to satisfy internal 'RawPage' invariants.
-- Use 'unsafeMakeRawPage' if you don't want copy.
makeRawPage
:: ByteArray -- ^ bytearray, must contain 4096 bytes (after offset)
makeRawPage ::
ByteArray -- ^ bytearray, must contain 4096 bytes (after offset)
-> Int -- ^ offset in bytes, must be 8 byte aligned.
-> RawPage
makeRawPage ba off
Expand All @@ -105,8 +105,8 @@ makeRawPage ba off
page = RawPage (div2 off) ba
clamp l u x = max l (min u x)

unsafeMakeRawPage
:: ByteArray -- ^ bytearray, must be pinned and contain 4096 bytes (after offset)
unsafeMakeRawPage ::
ByteArray -- ^ bytearray, must be pinned and contain 4096 bytes (after offset)
-> Int -- ^ offset in bytes, must be 8 byte aligned.
-> RawPage
unsafeMakeRawPage ba off = assert (invariant page) page
Expand Down Expand Up @@ -142,8 +142,8 @@ data RawPageLookup entry =
--
-- Return the 'Entry' corresponding to the supplied 'SerialisedKey' if it exists
-- within the 'RawPage'.
rawPageLookup
:: RawPage
rawPageLookup ::
RawPage
-> SerialisedKey
-> RawPageLookup (Entry SerialisedValue BlobSpan)
rawPageLookup !page !key
Expand Down Expand Up @@ -178,8 +178,8 @@ rawPageLookup !page !key
-- * maybe True (key > ) (getRawPageIndexKey . rawPageIndex page . pred =<< rawPageFindKey page key)
--
-- * maybe (maximum (rawPageKeys page) < key) (rawPageFindKey page key)
rawPageFindKey
:: RawPage
rawPageFindKey ::
RawPage
-> SerialisedKey
-> Maybe Word16 -- ^ entry number of first entry greater or equal to the key
rawPageFindKey !page !key
Expand All @@ -205,8 +205,8 @@ data BinarySearchResult

-- | Binary search procedure shared between 'rawPageLookup' and 'rawPageFindKey'.
{-# INLINE bisectPageToKey #-}
bisectPageToKey
:: Int
bisectPageToKey ::
Int
-> RawPage
-> SerialisedKey
-> BinarySearchResult
Expand Down Expand Up @@ -255,8 +255,8 @@ getRawPageIndexKey = \case


{-# INLINE rawPageIndex #-}
rawPageIndex
:: RawPage
rawPageIndex ::
RawPage
-> Word16
-> RawPageIndex (Entry SerialisedValue BlobSpan)
rawPageIndex !page !ix
Expand Down Expand Up @@ -460,8 +460,8 @@ rawPageBlobSpanIndex page@(RawPage off ba) i = BlobSpan
off1 = div4 off + 1 + ceilDiv64 (fromIntegral dirNumKeys) + ceilDiv64 (fromIntegral (mul2 dirNumKeys))
off2 = mul2 (off1 + fromIntegral dirNumBlobs)

rawPageCalculateBlobIndex
:: RawPage
rawPageCalculateBlobIndex ::
RawPage
-> Int -- ^ key index
-> Int -- ^ blobspan index
rawPageCalculateBlobIndex (RawPage off ba) i = do
Expand Down
Loading

0 comments on commit abb934d

Please sign in to comment.