This commit is contained in:
Dmitry Zuikov 2023-01-18 19:28:33 +03:00
parent 0bc07eb912
commit 33052af876
4 changed files with 36 additions and 37 deletions

View File

@ -1,5 +1,3 @@
{-# Language RankNTypes #-}
{-# Language TemplateHaskell #-}
module HBS2.Actors.ChunkWriter
( ChunkWriter
, ChunkId
@ -26,7 +24,6 @@ import Data.ByteString.Lazy qualified as B
import Data.Hashable (hash)
import Data.Maybe
import Data.Word
import Lens.Micro.Platform
import Prettyprinter
import System.Directory
import System.FilePath
@ -42,40 +39,46 @@ newtype ChunkId = ChunkId FilePath
deriving newtype (IsString)
deriving stock (Eq,Ord,Show)
data ChunkWriter h m =
data ChunkWriter h m = forall a . (Storage a h ByteString m) =>
ChunkWriter
{ _pipeline :: Pipeline IO ()
, _dir :: FilePath
, storage :: forall a . (Key h ~ Hash h, Storage a h ByteString m) => a
{ pipeline :: Pipeline IO ()
, dir :: FilePath
, storage :: a
}
makeLenses 'ChunkWriter
runChunkWriter :: MonadIO m => ChunkWriter h m -> m ()
runChunkWriter w = do
liftIO $ createDirectoryIfMissing True ( w ^. dir )
liftIO $ runPipeline ( w ^. pipeline)
liftIO $ createDirectoryIfMissing True ( dir w )
liftIO $ runPipeline (pipeline w)
stopChunkWriter :: MonadIO m => ChunkWriter h m -> m ()
stopChunkWriter w = liftIO $ stopPipeline ( w ^. pipeline )
stopChunkWriter w = liftIO $ stopPipeline ( pipeline w )
newChunkWriterIO :: Maybe FilePath -> IO (ChunkWriter h m)
newChunkWriterIO tmp = do
newChunkWriterIO :: forall h a m . ( Key h ~ Hash h
, Storage a h ByteString m
, Monad m
)
=> a
-> Maybe FilePath
-> IO (ChunkWriter h m)
newChunkWriterIO s tmp = do
pip <- newPipeline defChunkWriterQ
def <- getXdgDirectory XdgData (defStorePath </> "temp-chunks")
let d = fromMaybe def tmp
pure $
ChunkWriter
{ _pipeline = pip
, _dir = d
, storage = undefined
{ pipeline = pip
, dir = d
, storage = s
}
makeFileName :: (Hashable salt, Pretty (Hash h)) => ChunkWriter h m -> salt -> Hash h -> FilePath
makeFileName w salt h = (w ^. dir) </> suff
makeFileName w salt h = dir w </> suff
where
suff = show $ pretty (fromIntegral (hash salt) :: Word32) <> "@" <> pretty h
@ -109,7 +112,7 @@ writeChunk :: (Hashable salt, MonadIO m, Pretty (Hash h))
-> Offset
-> ByteString -> m ()
writeChunk w salt h o bs = addJob (w ^. pipeline) $ liftIO do
writeChunk w salt h o bs = addJob (pipeline w) $ liftIO do
withBinaryFile fn ReadWriteMode $ \fh -> do
hSeek fh AbsoluteSeek (fromIntegral o)
B.hPutStr fh bs
@ -137,7 +140,7 @@ getHash w salt h = liftIO do
q <- Q.newTBQueueIO 1
addJob (w ^. pipeline) do
addJob (pipeline w) do
h1 <- hashObject @h <$> B.readFile fn
atomically $ Q.writeTBQueue q h1

View File

@ -48,9 +48,8 @@ import HBS2.Storage
newtype Raw a = Raw { fromRaw :: a }
type instance Block (Raw LBS.ByteString) = LBS.ByteString
type instance Block LBS.ByteString = LBS.ByteString
newtype StorageQueueSize = StorageQueueSize { fromQueueSize :: Int }
@ -283,10 +282,10 @@ simpleGetChunkLazy s key off size = do
simplePutBlockLazy :: (IsKey h, Hashed h LBS.ByteString)
=> Bool -- | wait
-> SimpleStorage h
-> (Raw LBS.ByteString)
-> LBS.ByteString
-> IO (Maybe (Hash h))
simplePutBlockLazy doWait s (Raw lbs) = do
simplePutBlockLazy doWait s lbs = do
let hash = hashObject lbs
let fn = simpleBlockFileName s hash
@ -339,10 +338,10 @@ simpleWriteLinkRaw :: forall h . ( IsKey h
)
=> SimpleStorage h
-> Hash h
-> Raw LBS.ByteString
-> LBS.ByteString
-> IO (Maybe (Hash h))
simpleWriteLinkRaw ss h (Raw lbs) = do
simpleWriteLinkRaw ss h lbs = do
let fnr = simpleRefFileName ss h
runMaybeT $ do
@ -366,18 +365,18 @@ simpleReadLinkRaw ss hash = do
pure $ fromMaybe Nothing rs
instance Hashed hash LBS.ByteString => Hashed hash (Raw LBS.ByteString) where
hashObject (Raw s) = hashObject s
-- instance Hashed hash LBS.ByteString => Hashed hash LBS.ByteString where
-- hashObject s = hashObject s
instance ( MonadIO m, IsKey hash
, Hashed hash LBS.ByteString
, Key hash ~ Hash hash
)
=> Storage (SimpleStorage hash) hash (Raw LBS.ByteString) m where
=> Storage (SimpleStorage hash) hash LBS.ByteString m where
putBlock s lbs = liftIO $ simplePutBlockLazy True s (Raw lbs)
putBlock s lbs = liftIO $ simplePutBlockLazy True s lbs
enqueueBlock s lbs = liftIO $ simplePutBlockLazy False s (Raw lbs)
enqueueBlock s lbs = liftIO $ simplePutBlockLazy False s lbs
getBlock s key = liftIO $ simpleGetBlockLazy s key

View File

@ -225,7 +225,7 @@ runFakePeer se env = do
w <- async $ simpleStorageWorker storage
cww <- newChunkWriterIO (Just chDir)
cww <- newChunkWriterIO storage (Just chDir)
cw <- async $ runChunkWriter cww
@ -291,7 +291,7 @@ runFakePeer se env = do
h1 <- liftIO $ getHash cww chuKey h
when ( h1 == h ) $ do
debug $ "THIS BLOCK IS DEFINETLY DONE" <+> pretty h1
debug $ "THIS BLOCK IS DEFINITLY DONE" <+> pretty h1
-- ПОСЧИТАТЬ ХЭШ
-- ЕСЛИ СОШЁЛСЯ - ФИНАЛИЗИРОВАТЬ БЛОК
@ -302,9 +302,6 @@ runFakePeer se env = do
-- ТАК НЕ ПОЙДЕТ
-- ТАК ЧТО ТУТ ЖДЁМ, ДОПУСТИМ 2*mbSize и отваливаемся
-- ОТКУДА УЗНАТЬ РАЗМЕР БЛОКА?
-- ДОПУСТИМ, ОТ БЛОКИНФО?
-- ЕСЛИ НИЧЕГО НЕТ? => BLOCK_LOST
debug $ "got chunk" <+> pretty p
<+> pretty h
<+> pretty n

View File

@ -120,7 +120,7 @@ runNewRef opts mhash ss = do
let href = HashRef (fromMerkleHash mhash)
let mref = HashRefMerkle (HashRefObject href Nothing)
let ref = AnnotatedHashRef Nothing mref
res <- simpleWriteLinkRaw ss uuid (Raw (serialise ref))
res <- simpleWriteLinkRaw ss uuid (serialise ref)
print (pretty res)
withStore :: Data opts => opts -> ( SimpleStorage HbSync -> IO () ) -> IO ()