From 8d913e30b2d1d7f1e35d36cf04dde48c5e5e680d Mon Sep 17 00:00:00 2001 From: Adithya Kumar Date: Tue, 17 Dec 2024 12:32:27 +0530 Subject: [PATCH] Deprecate chunksOf from Data.Stream module --- benchmark/Streamly/Benchmark/Data/Array/Stream.hs | 4 ++-- benchmark/Streamly/Benchmark/Data/Parser.hs | 2 +- benchmark/Streamly/Benchmark/Data/ParserK.hs | 3 ++- .../Streamly/Benchmark/FileSystem/Handle/Read.hs | 9 ++++----- core/src/Streamly/Data/Stream.hs | 15 ++++++++++----- core/src/Streamly/Internal/Data/Fold/Chunked.hs | 2 +- core/src/Streamly/Internal/FileSystem/Handle.hs | 2 +- test/Streamly/Test/Data/Parser.hs | 2 +- test/Streamly/Test/Unicode/Stream.hs | 3 ++- 9 files changed, 24 insertions(+), 18 deletions(-) diff --git a/benchmark/Streamly/Benchmark/Data/Array/Stream.hs b/benchmark/Streamly/Benchmark/Data/Array/Stream.hs index 812d29f37f..87b466cd75 100644 --- a/benchmark/Streamly/Benchmark/Data/Array/Stream.hs +++ b/benchmark/Streamly/Benchmark/Data/Array/Stream.hs @@ -295,8 +295,8 @@ main = do then return (undefined, undefined) else do - small <- Stream.toList $ Stream.chunksOf 100 $ sourceUnfoldrM value 0 - big <- Stream.toList $ Stream.chunksOf value $ sourceUnfoldrM value 0 + small <- Stream.toList $ Array.chunksOf 100 $ sourceUnfoldrM value 0 + big <- Stream.toList $ Array.chunksOf value $ sourceUnfoldrM value 0 return (small, big) allBenchmarks env arrays value = diff --git a/benchmark/Streamly/Benchmark/Data/Parser.hs b/benchmark/Streamly/Benchmark/Data/Parser.hs index f4ed6da2a5..988c4f8ebe 100644 --- a/benchmark/Streamly/Benchmark/Data/Parser.hs +++ b/benchmark/Streamly/Benchmark/Data/Parser.hs @@ -856,7 +856,7 @@ main = do where - alloc value = Stream.fold Fold.toList $ Stream.chunksOf 100 $ sourceUnfoldrM value 0 + alloc value = Stream.fold Fold.toList $ Array.chunksOf 100 $ sourceUnfoldrM value 0 allBenchmarks env arrays value = [ bgroup (o_1_space_prefix moduleName) (o_1_space_serial value) diff --git a/benchmark/Streamly/Benchmark/Data/ParserK.hs b/benchmark/Streamly/Benchmark/Data/ParserK.hs index cc44c7c75e..8ad6d5f291 100644 --- a/benchmark/Streamly/Benchmark/Data/ParserK.hs +++ b/benchmark/Streamly/Benchmark/Data/ParserK.hs @@ -24,6 +24,7 @@ import Control.Monad.IO.Class (MonadIO) import Data.Foldable (asum) #ifdef BENCH_CHUNKED import Streamly.Data.Array (Array, Unbox) +import qualified Streamly.Internal.Data.Array as Array (chunksOf) #endif #ifdef BENCH_CHUNKED_GENERIC import Streamly.Data.Array.Generic (Array) @@ -116,7 +117,7 @@ benchIOSink value name f = >>= f . StreamK.fromStream #ifdef BENCH_CHUNKED - . Stream.chunksOf 4000 + . Array.chunksOf 4000 #endif #ifdef BENCH_CHUNKED_GENERIC . GenArr.chunksOf 4000 diff --git a/benchmark/Streamly/Benchmark/FileSystem/Handle/Read.hs b/benchmark/Streamly/Benchmark/FileSystem/Handle/Read.hs index 880fc08da3..0d347fa118 100644 --- a/benchmark/Streamly/Benchmark/FileSystem/Handle/Read.hs +++ b/benchmark/Streamly/Benchmark/FileSystem/Handle/Read.hs @@ -29,7 +29,6 @@ import GHC.Magic (inline) import GHC.Magic (noinline) import System.IO (Handle) -import qualified Streamly.Data.Stream as Stream import qualified Streamly.Data.Fold as Fold import qualified Streamly.FileSystem.Handle as FH import qualified Streamly.Internal.Data.Array as A @@ -249,7 +248,7 @@ inspect $ 'groupsOf `hasNoType` ''IUF.ConcatState -- FH.read/UF.many {-# INLINE chunksOf #-} chunksOf :: Int -> Handle -> IO Int chunksOf n inh = - S.fold Fold.length $ Stream.chunksOf n (S.unfold FH.reader inh) + S.fold Fold.length $ A.chunksOf n (S.unfold FH.reader inh) o_1_space_reduce_read_grouped :: BenchEnv -> [Benchmark] o_1_space_reduce_read_grouped env = @@ -288,11 +287,11 @@ o_1_space_reduce_read_grouped env = groupsOf 1000 inh -- chunksOf may use a different impl than groupsOf - , mkBenchSmall "S.chunksOf 1" env $ \inh _ -> + , mkBenchSmall "A.chunksOf 1" env $ \inh _ -> chunksOf 1 inh - , mkBench "S.chunksOf 10" env $ \inh _ -> + , mkBench "A.chunksOf 10" env $ \inh _ -> chunksOf 10 inh - , mkBench "S.chunksOf 1000" env $ \inh _ -> + , mkBench "A.chunksOf 1000" env $ \inh _ -> chunksOf 1000 inh ] ] diff --git a/core/src/Streamly/Data/Stream.hs b/core/src/Streamly/Data/Stream.hs index c18849dc61..aed4c2de21 100644 --- a/core/src/Streamly/Data/Stream.hs +++ b/core/src/Streamly/Data/Stream.hs @@ -665,11 +665,6 @@ module Streamly.Data.Stream , runReaderT , runStateT - -- XXX Arrays could be different types, therefore, this should be in - -- specific array module. Or maybe we should abstract over array types. - -- * Stream of Arrays - , Array.chunksOf - -- * Deprecated , scan , scanMaybe @@ -679,6 +674,7 @@ module Streamly.Data.Stream , unfoldMany , intercalate , intercalateSuffix + , chunksOf ) where @@ -688,6 +684,15 @@ import Prelude mapM, scanl, sequence, reverse, iterate, foldr1, repeat, replicate, concatMap) +import Streamly.Internal.Data.Unbox (Unbox(..)) +import Control.Monad.IO.Class (MonadIO(..)) + import qualified Streamly.Internal.Data.Array.Type as Array #include "DocTestDataStream.hs" + +{-# DEPRECATED chunksOf "Please use chunksOf from the Array module instead." #-} +{-# INLINE chunksOf #-} +chunksOf :: forall m a. (MonadIO m, Unbox a) + => Int -> Stream m a -> Stream m (Array.Array a) +chunksOf = Array.chunksOf diff --git a/core/src/Streamly/Internal/Data/Fold/Chunked.hs b/core/src/Streamly/Internal/Data/Fold/Chunked.hs index 7097c90e65..541a2d41e4 100644 --- a/core/src/Streamly/Internal/Data/Fold/Chunked.hs +++ b/core/src/Streamly/Internal/Data/Fold/Chunked.hs @@ -26,7 +26,7 @@ -- >> import qualified Streamly.Data.StreamK as StreamK -- -- >> f = ChunkFold.fromFold (Fold.take 7 Fold.toList) --- >> s = Stream.chunksOf 5 $ Stream.fromList "hello world" +-- >> s = Array.chunksOf 5 $ Stream.fromList "hello world" -- >> ArrayStream.runArrayFold f (StreamK.fromStream s) -- Right "hello w" -- diff --git a/core/src/Streamly/Internal/FileSystem/Handle.hs b/core/src/Streamly/Internal/FileSystem/Handle.hs index 2533689ca9..cf457367a9 100644 --- a/core/src/Streamly/Internal/FileSystem/Handle.hs +++ b/core/src/Streamly/Internal/FileSystem/Handle.hs @@ -477,7 +477,7 @@ writeChunksWithBufferOf = writeChunksWith -- do not want buffering to occur at GHC level as well. Same thing applies to -- writes as well. --- XXX Maybe we should have a Fold.chunksOf like we have Stream.chunksOf +-- XXX Maybe we should have a Fold.chunksOf like we have Array.chunksOf -- | @writeWith reqSize handle@ writes the input stream to @handle@. -- Bytes in the input stream are collected into a buffer until we have a chunk diff --git a/test/Streamly/Test/Data/Parser.hs b/test/Streamly/Test/Data/Parser.hs index 9d7589470b..439f7b056f 100644 --- a/test/Streamly/Test/Data/Parser.hs +++ b/test/Streamly/Test/Data/Parser.hs @@ -865,7 +865,7 @@ parseUnfold = do <*> chooseInt (1, len) <*> chooseInt (1, len)) $ \(ls, clen, tlen) -> monadicIO $ do - arrays <- S.toList $ S.chunksOf clen (S.fromList ls) + arrays <- S.toList $ A.chunksOf clen (S.fromList ls) let src = Producer.source (Just (Producer.OuterLoop arrays)) let parser = P.fromFold (FL.take tlen FL.toList) let readSrc = diff --git a/test/Streamly/Test/Unicode/Stream.hs b/test/Streamly/Test/Unicode/Stream.hs index 2abcf8d3da..c36de806de 100644 --- a/test/Streamly/Test/Unicode/Stream.hs +++ b/test/Streamly/Test/Unicode/Stream.hs @@ -21,6 +21,7 @@ import Test.QuickCheck.Monadic (run, monadicIO, assert, PropertyM) import Streamly.Data.Stream (Stream) import qualified Streamly.Data.Array as A +import qualified Streamly.Internal.Data.Array as Array import qualified Streamly.Data.Stream as Stream import qualified Streamly.Internal.Data.Array.Stream as AS import qualified Streamly.Internal.Data.Stream as Stream @@ -106,7 +107,7 @@ propDecodeEncodeIdArrays :: Property propDecodeEncodeIdArrays = forAll genUnicode $ \list -> monadicIO $ do - let wrds = Stream.chunksOf 8 $ SS.encodeUtf8' $ Stream.fromList list + let wrds = Array.chunksOf 8 $ SS.encodeUtf8' $ Stream.fromList list chrs <- Stream.toList $ IUS.decodeUtf8Chunks wrds assertEq chrs list