diff --git a/flake.nix b/flake.nix index c840d080..98609cc2 100644 --- a/flake.nix +++ b/flake.nix @@ -44,7 +44,6 @@ outputs = { self, nixpkgs, haskell-flake-utils, ... }@inputs: "hbs2-git" "hbs2-qblf" "hbs2-keyman" - "hbs2-share" "hbs2-fixer" "hbs2-cli" "hbs2-sync" @@ -70,7 +69,6 @@ outputs = { self, nixpkgs, haskell-flake-utils, ... }@inputs: "hbs2-storage-simple" = "./hbs2-storage-simple"; "hbs2-peer" = "./hbs2-peer"; "hbs2-keyman" = "./hbs2-keyman"; - "hbs2-share" = "./hbs2-share"; "hbs2-git" = "./hbs2-git"; "hbs2-fixer" = "./hbs2-fixer"; "hbs2-cli" = "./hbs2-cli"; diff --git a/hbs2-share/CHANGELOG.md b/hbs2-share/CHANGELOG.md deleted file mode 100644 index 5afa450c..00000000 --- a/hbs2-share/CHANGELOG.md +++ /dev/null @@ -1,5 +0,0 @@ -# Revision history for hbs2-share - -## 0.1.0.0 -- YYYY-mm-dd - -* First version. Released on an unsuspecting world. diff --git a/hbs2-share/LICENSE b/hbs2-share/LICENSE deleted file mode 100644 index 3086ee5d..00000000 --- a/hbs2-share/LICENSE +++ /dev/null @@ -1,30 +0,0 @@ -Copyright (c) 2023, Dmitry Zuikov - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * Neither the name of Dmitry Zuikov nor the names of other - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/hbs2-share/app/Main.hs b/hbs2-share/app/Main.hs deleted file mode 100644 index 1f31f166..00000000 --- a/hbs2-share/app/Main.hs +++ /dev/null @@ -1,43 +0,0 @@ -module Main where - -import HBS2.Share.App - -import Options.Applicative as O - --- Парсер для глобальных опций -globalOptions :: Parser [AppOption] -globalOptions = do - dry <- optional (flag' True (long "dry" <> short 'n' <> help "dont post anything")) - <&> maybe mempty (const [AppDontPostOpt]) - - debug <- optional (flag' True (long "debug" <> short 'v' <> help "allow debug output")) - <&> maybe mempty (const [AppDebugOpt]) - - trace <- optional (flag' True (long "trace" <> help "allow more debug output")) - <&> maybe mempty (const [AppTraceOpt]) - - - replica <- optional (flag' True (long "replica" <> help "replica (slave) mode")) - <&> maybe mempty (const [AppReplicaOpt]) - - pure (replica <> debug <> dry <> trace ) - --- Парсер для команд -commands :: AppPerks m => Parser (ShareCLI m ()) -commands = defCmd - -defCmd :: AppPerks m => Parser (ShareCLI m ()) -defCmd = pure $ runSync - -opts :: AppPerks m => ParserInfo ([AppOption], ShareCLI m ()) -opts = O.info (liftA2 (,) globalOptions commands <**> helper) - ( fullDesc - -- <> progDesc "An application with global options and subcommands" - <> header "hbs2-share" ) - -main :: IO () -main = do - (o, action) <- execParser opts - runApp o action - - diff --git a/hbs2-share/hbs2-share.cabal b/hbs2-share/hbs2-share.cabal deleted file mode 100644 index 2ab9d413..00000000 --- a/hbs2-share/hbs2-share.cabal +++ /dev/null @@ -1,116 +0,0 @@ -cabal-version: 3.0 -name: hbs2-share -version: 0.24.1.2 --- synopsis: --- description: -license: BSD-3-Clause -license-file: LICENSE -author: Dmitry Zuikov -maintainer: dzuikov@gmail.com --- copyright: -category: System -build-type: Simple --- extra-doc-files: CHANGELOG.md --- extra-source-files: - -common shared-properties - ghc-options: - -Wall - -fno-warn-type-defaults - -threaded - -rtsopts - -O2 - "-with-rtsopts=-N4 -A64m -AL256m -I0" - - default-language: GHC2021 - - default-extensions: - ApplicativeDo - , BangPatterns - , BlockArguments - , ConstraintKinds - , DataKinds - , DeriveDataTypeable - , DeriveGeneric - , DerivingStrategies - , DerivingVia - , ExtendedDefaultRules - , FlexibleContexts - , FlexibleInstances - , GADTs - , GeneralizedNewtypeDeriving - , ImportQualifiedPost - , LambdaCase - , MultiParamTypeClasses - , OverloadedStrings - , QuasiQuotes - , RecordWildCards - , ScopedTypeVariables - , StandaloneDeriving - , TupleSections - , TypeApplications - , TypeFamilies - - - build-depends: - hbs2-core - , hbs2-peer - , hbs2-storage-simple - , hbs2-keyman - , db-pipe - , suckless-conf - - , atomic-write - , bytestring - , containers - , directory - , filepath - , filepattern - , interpolatedstring-perl6 - , memory - , microlens-platform - , mtl - , serialise - , streaming - , stm - , text - , time - , timeit - , transformers - , typed-process - , unordered-containers - , unliftio - , zlib - - -library - import: shared-properties - exposed-modules: - HBS2.Share.App - HBS2.Share.App.Types - HBS2.Share.Config - HBS2.Share.State - HBS2.Share.Files - HBS2.Share.Keys - HBS2.Share.LocalHash - HBS2.Share.MetaData - - other-modules: - - -- other-modules: - -- other-extensions: - build-depends: base, hbs2-peer - hs-source-dirs: src - -executable hbs2-share - import: shared-properties - main-is: Main.hs - -- other-modules: - -- other-extensions: - build-depends: - base, hbs2-share, hbs2-peer - , optparse-applicative - - hs-source-dirs: app - default-language: GHC2021 - diff --git a/hbs2-share/src/HBS2/Share/App.hs b/hbs2-share/src/HBS2/Share/App.hs deleted file mode 100644 index 4b646eb2..00000000 --- a/hbs2-share/src/HBS2/Share/App.hs +++ /dev/null @@ -1,850 +0,0 @@ -{-# Language MultiWayIf #-} -module HBS2.Share.App - ( module HBS2.Share.App.Types - , AppOption(..) - , Command - , AppPerks - , runApp - , runSync - ) where - -import HBS2.Prelude.Plated -import HBS2.Base58 -import HBS2.Merkle -import HBS2.Data.Detect -import HBS2.Defaults (defBlockSize) -import HBS2.Hash -import HBS2.Clock -import HBS2.OrDie -import HBS2.Peer.Proto.RefChan.Types -import HBS2.Net.Auth.Credentials -import HBS2.Net.Auth.Credentials.Sigil -import HBS2.Data.Types.SignedBox -import HBS2.Net.Auth.GroupKeySymm -import HBS2.Net.Auth.GroupKeySymm qualified as Symm -import HBS2.Peer.Proto.RefChan - -import HBS2.Net.Messaging.Unix -import HBS2.Net.Proto.Service -import HBS2.Storage -import HBS2.Storage.Operations.ByteString -import HBS2.Storage.Operations.Missed (findMissedBlocks,findMissedBlocks2) - -import HBS2.Peer.CLI.Detect (detectRPC) -import HBS2.Peer.RPC.Client.StorageClient - -import HBS2.KeyMan.Keys.Direct - -import HBS2.Share.App.Types -import HBS2.Share.Config hiding (key) -import HBS2.Share.State -import HBS2.Share.Files qualified as F -import HBS2.Share.Keys -import HBS2.Share.MetaData -import HBS2.Share.LocalHash - -import HBS2.System.Logger.Simple.ANSI -import DBPipe.SQLite - -import Control.Applicative -import Control.Concurrent.STM (flushTQueue) -import Control.Monad.Except (runExceptT) -import Control.Monad.Trans.Maybe -import Data.ByteArray.Hash qualified as BA -import Data.ByteArray.Hash (SipHash(..), SipKey(..)) -import Data.ByteString.Lazy qualified as LBS -import Data.ByteString qualified as BS -import Data.HashSet qualified as HashSet -import Data.HashMap.Strict qualified as HashMap -import Data.List qualified as List -import Data.Maybe -import Data.Set qualified as Set -import Data.Set (Set) -import Data.Either -import System.Directory -import System.FilePath - -import Codec.Serialise -import Codec.Compression.GZip as GZip -import System.AtomicWrite.Writer.LazyByteString qualified as AwL - -import System.TimeIt - -import Streaming.Prelude qualified as S - - -type Command m = m () - - -runApp :: MonadUnliftIO m => [AppOption] -> ShareCLI m () -> m () -runApp opts action = do - - getLocalConfigDir' >>= - liftIO . createDirectoryIfMissing True - - getLocalConfigFile >>= \fn -> do - here <- liftIO $ doesFileExist fn - - unless here do - liftIO $ appendFile fn "" - - env <- liftIO (newAppEnv opts) - let db = view appDb env - - setLogging @INFO defLog - setLogging @ERROR (logPrefix "" . toStderr) - setLogging @WARN (logPrefix "" . toStdout) - setLogging @NOTICE (logPrefix "" . toStdout) - - when ( AppDebugOpt `elem` opts || AppTraceOpt `elem` opts) do - setLogging @DEBUG (logPrefix "" . toStderr) - - when (AppTraceOpt `elem` opts) do - setLogging @TRACE (logPrefix "" . toStderr) - - flip runContT pure $ do - void $ ContT $ bracket (async (runPipe db)) cancel - - lift $ withAppEnv env do - withState populateState - loadAllEncryptionStuff - action - - setLoggingOff @INFO - setLoggingOff @ERROR - setLoggingOff @WARN - setLoggingOff @NOTICE - setLoggingOff @DEBUG - setLoggingOff @TRACE - - -withAppEnv :: MonadIO m => AppEnv -> ShareCLI m a -> m a -withAppEnv env action = do - runReaderT (fromShareCLI action) env - - -newAppEnv :: forall m . MonadUnliftIO m => [AppOption] -> m AppEnv -newAppEnv opts = do - let dbOpts = dbPipeOptsDef - - w <- getWorkingDir - - conf <- readConfig - - let sonameOpt = runReader (cfgValue @RpcUnixOpt @(Maybe String) @(Reader [Syntax C])) conf - - rchan <- orThrowUser "refchan not set" (runReader (cfgValue @RefChanOpt @(Maybe RChan)) conf) - - sonameDetect <- detectRPC - - soname <- orThrowUser "rpc not detected" (sonameOpt <|> sonameDetect) - - AppEnv opts conf rchan - <$> (getLocalStatePath >>= newDBPipeEnv dbOpts) - <*> pure w - <*> pure soname - <*> newIORef Nothing - -withState :: (MonadReader AppEnv m, MonadIO m) - => DBPipeM m b - -> m b - -withState m = do - d <- asks (view appDb) - withDB d m - - -makeGK0Key :: forall e s m . ( AppPerks m - , HasProtocol e (ServiceProto StorageAPI e) - , s ~ Encryption L4Proto - ) - => RpcEndpoints e - -> ShareCLI m (Maybe GK0Key) - -makeGK0Key rpc = runMaybeT do - lift (getOwnRefChanHeadRef rpc) - >>= toMPlus - <&> GK0Key - - -getGK0 :: forall e s m . ( AppPerks m - , HasProtocol e (ServiceProto StorageAPI e) - , ForGroupKeySymm 'HBS2Basic - , s ~ 'HBS2Basic - ) - => RpcEndpoints e - -> ShareCLI m (GK0 s) -getGK0 rpc = do - - rchan <- asks (view appRefChan) - - let sto = AnyStorage (StorageClient (rpcStorage rpc)) - - gk0key <- makeGK0Key @e rpc - >>= orThrowUser "makeGK0Key(1): refchan not available" - - mgk <- runMaybeT do - gkh <- toMPlus =<< lift (withState $ selectGK0 gk0key) - - debug $ "found gk!" <+> pretty gkh - - runExceptT (readFromMerkle sto (SimpleKey (fromHashRef gkh))) - >>= toMPlus - <&> deserialiseOrFail @(GK0 s) - >>= toMPlus - - case mgk of - Just x -> do - pure x - - Nothing -> do - hd <- getRefChanHead @L4Proto sto (RefChanHeadKey (toRefChanId rchan)) - >>= orThrowUser "makeGK0Key(2): refchan not available" - - let readers = view refChanHeadReaders hd & HashSet.toList - gk <- generateGroupKey @s Nothing readers - href <- writeAsMerkle sto (serialise gk) <&> HashRef - - withState (insertGK0 gk0key href >> commitAll) - - debug $ "generated gk0!" <+> pretty href - - pure gk - -getOwnRefChanHeadRef :: forall e s m . ( AppPerks m - , HasProtocol e (ServiceProto StorageAPI e) - , s ~ Encryption L4Proto - ) - => RpcEndpoints e - -> ShareCLI m (Maybe HashRef) -getOwnRefChanHeadRef rpc = do - - let sto = AnyStorage (StorageClient (rpcStorage rpc)) - - runMaybeT do - rchan <- toMPlus =<< lift (cfgValue @RefChanOpt @(Maybe RChan)) - let puk = toRefChanId rchan - getRef sto (RefChanHeadKey @s puk) - >>= toMPlus - <&> HashRef - -withRpcClientUnix :: forall a e m . ( MonadUnliftIO m - , HasProtocol e (ServiceProto PeerAPI e) - , HasProtocol e (ServiceProto StorageAPI e) - , HasProtocol e (ServiceProto RefChanAPI e) - , e ~ UNIX - , MonadReader AppEnv m - ) - => ( RpcEndpoints e -> m a ) - -> m a - -withRpcClientUnix action = do - - -- FIXME: use-ContT - - soname <- asks (view appRpcSock) - - client <- race ( pause @'Seconds 1) (newMessagingUnix False 1.0 soname) `orDie` "hbs2-peer rpc timeout!" - - messaging <- async $ runMessagingUnix client - link messaging - - rpcPeer <- makeServiceCaller @PeerAPI @e (fromString soname) - rpcStorage <- makeServiceCaller @StorageAPI @e (fromString soname) - rpcRefChan <- makeServiceCaller @RefChanAPI @e (fromString soname) - - let endpoints = [ Endpoint @e rpcPeer - , Endpoint @e rpcStorage - , Endpoint @e rpcRefChan - ] - - c1 <- async $ liftIO $ runReaderT (runServiceClientMulti endpoints) client - - link c1 - - r <- action $ RpcEndpoints rpcPeer rpcStorage rpcRefChan - - pause @'Seconds 0.1 - - cancel c1 - - void $ waitAnyCatchCancel [c1, messaging] - - pure r - - -loadSigil :: forall s m . ( ForSigil s - , AppPerks m - ) => ShareCLI m (PubKey 'Sign s, SigilData s) -loadSigil = do - - dir <- getLocalConfigDir - - path' <- cfgValue @SigilPathOpt @(Maybe String) - >>= orThrowUser "sigil not set" - - let nonLocalPath = List.isPrefixOf "./" path' || List.isPrefixOf "/" path' - - path <- if not nonLocalPath then do - pure $ dir path' - else do - pure path' - - trace $ "SIGIL PATH" <+> pretty path - - sigil <- liftIO $ (BS.readFile path <&> parseSerialisableFromBase58 @(Sigil s)) - >>= orThrowUser ("invalid sigil format" <+> pretty path) - - w@(_,sd) <- orThrowUser "malformed sigil" (unboxSignedBox0 @(SigilData s) (sigilData sigil)) - - pure w - -loadAllEncryptionStuff :: AppPerks m => ShareCLI m () -loadAllEncryptionStuff = do - - -- 1. загружаем sigil - (pk, sd) <- loadSigil @'HBS2Basic - - trace $ "sigil loaded" <+> pretty (AsBase58 pk) - - enc <- runKeymanClient do - cr <- loadCredentials pk - >>= orThrowUser "can't find credentials" - - enc <- loadKeyRingEntry (sigilDataEncKey sd) - >>= orThrowUser "can't find keyring entry" - - pure $ EncryptionStuff cr enc - - encIO <- asks (view appEnc) - - writeIORef encIO (Just enc) - debug "encryption data loaded ok" - - -data UpdateFileMethod = UpdateFileForce - | UpdateFileSync - -updateFile :: (AppPerks m, HasProtocol e (ServiceProto StorageAPI e)) - => RpcEndpoints e - -> RemoteFile - -> ShareCLI m () -updateFile rpc fe = do - dir <- asks (view appWorkDir) - replica <- isReplica - if replica then do - updateFileMethod UpdateFileForce rpc fe - else do - updateFileMethod UpdateFileSync rpc fe - -updateFileMethod :: (AppPerks m, HasProtocol e (ServiceProto StorageAPI e)) - => UpdateFileMethod - -> RpcEndpoints e - -> RemoteFile - -> ShareCLI m () -updateFileMethod UpdateFileForce rpc fe = do - - dir <- asks (view appWorkDir) - - let key = _remoteFileKey fe - - let fn = dir toFilePath key - - let sto = AnyStorage (StorageClient (rpcStorage rpc)) - - encStuff <- asks (view appEnc) - >>= readIORef - >>= orThrowUser "credentials not available" - - let kr = [view kre encStuff] - - for_ (getDirs key) $ \d -> do - let fpath = dir d - here <- liftIO $ doesFileExist fpath - when here do - liftIO (removeFile fpath) - liftIO $ createDirectoryIfMissing True fpath - - here <- liftIO $ doesFileExist fn - - l <- withState (selectLocalFile key) - - let lh = view localFileHash <$> l - - when (lh /= Just (_remoteLocalHash fe) || not here) do - info $ "update file" <+> pretty key - - let h = view remoteTree fe & fromHashRef - - lbs <- runExceptT (readFromMerkle sto (ToDecryptBS kr h)) - >>= orThrowUser ("can't read file" <+> pretty h <+> pretty key) - - liftIO $ AwL.atomicWriteFile fn lbs - -updateFileMethod UpdateFileSync rpc fe = do - w <- asks (view appWorkDir) - let sto = AnyStorage (StorageClient (rpcStorage rpc)) - - encStuff <- asks (view appEnc) - >>= readIORef - >>= orThrowUser "credentials not available" - - let kr = [view kre encStuff] - - let key = _remoteFileKey fe - - (doUpdate, mt) <- withState do - let fn = _remoteFileKey fe - lf <- selectLocalFile (_remoteFileKey fe) - -- floc <- selectLocalFile (_remoteFileKey fe) - let tLoc = _localFileModTime <$> lf - let tRem = Just (_remoteFileTime fe) - - let rhash = Just $ _remoteLocalHash fe - let lhash = _localFileHash <$> lf - - pure (tRem > tLoc && rhash /= lhash, tRem) - - dont <- dontPost - - when (doUpdate && not dont) do - - let dirs = getDirs key - - info $ "U" <+> pretty key <+> pretty (_remoteTree fe) - - for_ dirs $ \d -> do - let fpath = w d - isFile <- liftIO $ doesFileExist fpath - - when isFile do - -- TODO: unique-rename? - fnew <- renameFileUniq fpath - info $ "renamed" <+> pretty fpath <+> pretty fnew - - debug $ "create dir" <+> pretty fpath - liftIO $ createDirectoryIfMissing True fpath - - let h = view remoteTree fe & fromHashRef - - lbs <- runExceptT (readFromMerkle sto (ToDecryptBS kr h)) - >>= orThrowUser ("can't read file" <+> pretty h <+> pretty key) - - let fn = w toFilePath key - - liftIO $ AwL.atomicWriteFile fn lbs - forM_ mt (liftIO . setModificationTime fn) - -renameFileUniq :: MonadUnliftIO m => FilePath -> m FilePath -renameFileUniq fs = do - - fnew' <- S.head_ do - for_ [1..] $ \i -> do - let new = fs <> "~" <> show i - here <- liftIO (doesFileExist new) - unless here do - S.yield new - - fnew <- orThrowUser ("can't rename file" <> pretty fs) fnew' - - liftIO $ renameFile fs fnew - - pure fnew - -isMissed :: (AppPerks m, MonadReader AppEnv m) - => AnyStorage - -> HashRef - -> m Bool - -isMissed sto h = do - miss <- withState (selectMissed h) - case miss of - Just False -> pure False - _ -> do - missed <- S.head_ (findMissedBlocks2 sto h) <&> isJust - withState (insertMissed h missed) - pure missed - -scanState :: forall e m . ( AppPerks m - , HasProtocol e (ServiceProto StorageAPI e) - , HasProtocol e (ServiceProto RefChanAPI e) - ) - => RpcEndpoints e - -> ShareCLI m HashRef - -scanState rpc = do - - debug "scanState" - - encStuff <- asks (view appEnc) - >>= readIORef - >>= orThrowUser "credentials not available" - - let kr = view kre encStuff - - let sto = AnyStorage (StorageClient (rpcStorage rpc)) - refchan <- asks (toRefChanId . view appRefChan) - - debug $ "scan state for" <+> pretty (AsBase58 refchan) - - rv <- callService @RpcRefChanGet (rpcRefChan rpc) refchan - >>= orThrowUser "getRefchan: rpc failure" - >>= orThrowUser "refchan not found" - - debug $ "refchan value" <+> pretty rv - - withState do - seen <- selectSeen rv - unless seen do - scanTx sto rv - commitAll - - props <- withState selectProposes - - -- FIXME: cache-somehow - ((px,e), meta) <- findGoodNewBlock kr sto props - >>= orThrowUser "no meta block found" - - withState do - for_ (mdFiles meta) $ \fe -> do - insertRemoteFile px (realToFrac e) meta fe - commitAll - - rfs <- withState $ selectRemoteFiles px - - for_ rfs $ \rf -> do - updateFile rpc rf - - withState $ insertSeen rv - - pure px - - where - - findGoodNewBlock kr sto props = do - runMaybeT (go props) - - where - - go [] = mzero - go (p:ps) = do - - let btx = fst p - missed <- lift $ isMissed sto btx - if missed then - go ps - else do - - what <- S.head_ do - walkMerkle (fromHashRef btx) (getBlock sto) $ \case - Right ( (hx:_) :: [HashRef] ) -> do - S.yield hx - - _ -> pure () - - hmeta <- toMPlus what - - meta <- runExceptT (readFromMerkle sto (ToDecryptBS [kr] (fromHashRef hmeta))) - >>= toMPlus - <&> GZip.decompress - <&> deserialiseOrFail @MetaData - >>= toMPlus - - if List.null (mdFiles meta) then do - go ps - else - pure (p,meta) - - scanTx sto rv = - -- FIXME: dont-process-twice - walkMerkle (fromHashRef rv) (getBlock sto) $ \case - Left h -> warn $ "missed block" <+> pretty h - - Right (hs ::[HashRef]) -> void $ runMaybeT do - trace $ "got some" <+> pretty (length hs) - - for_ hs $ \htx -> void $ runMaybeT do - - seen <- lift $ lift $ selectSeen htx - - -- debug $ "SEEN" <+> pretty seen <+> pretty htx - guard (not seen) - - bs <- toMPlus =<< getBlock sto (fromHashRef htx) - tx <- toMPlus $ deserialiseOrFail @(RefChanUpdate L4Proto) bs - - case tx of - Accept _ box -> do - (_, txx@(AcceptTran mt _ hp)) <- toMPlus $ unboxSignedBox0 box - trace $ "tx accept" <+> pretty htx <+> pretty hp <+> pretty mt - t <- toMPlus mt - lift $ lift $ insertAccept htx hp (fromIntegral t) - - Propose _ box -> do - (_, ProposeTran _ pbox :: ProposeTran L4Proto) <- toMPlus $ unboxSignedBox0 box - (_, bs2) <- toMPlus $ unboxSignedBox0 pbox - - let wtf = [ tryDetect (hashObject bs) (LBS.fromStrict bs2) ] - - mytx <- [ ha | AnnotatedHashRef _ ha <- universeBi wtf ] & listToMaybe & toMPlus - - trace $ "tx propose" <+> pretty htx <+> pretty mytx - lift $ lift $ insertPropose htx mytx - - lift $ lift $ insertSeen htx - -dontPost :: AppPerks m => ShareCLI m Bool -dontPost = do - opts <- asks ( view appOpts ) - replica <- isReplica - pure $ replica || or [ True | AppDontPostOpt <- opts ] - -isReplica :: AppPerks m => ShareCLI m Bool -isReplica = do - re <- asks _appOpts <&> (AppReplicaOpt `elem`) - conf <- getConf - pure $ re || or [ True | ListVal [SymbolVal "replica"] <- conf ] - -updateLocalState :: AppPerks m => ShareCLI m () -updateLocalState = do - - debug "updateLocalState" - - skip <- cfgValue @IgnoreOpt @(Set String) <&> Set.toList - - dir <- asks (view appWorkDir) - - let d = makeEntryKey mempty dir - - q <- newTQueueIO - - es <- liftIO (F.listFiles skip dir (atomically . writeTQueue q . makeEntryKey d)) - >> atomically (flushTQueue q) - - withState do - for_ es $ \e -> do - let fn = toFilePath e - t <- liftIO $ getModificationTime fn - - lf <- selectLocalFile e - - let newF = isNothing lf || (view localFileModTime <$> lf) /= Just t - - when newF do - h <- localHash (toFilePath e) - insertLocalFile e t h - - commitAll - -postState :: forall e s m . ( AppPerks m - , HasProtocol e (ServiceProto RefChanAPI e) - , HasProtocol e (ServiceProto StorageAPI e) - , s ~ 'HBS2Basic - ) - - => RpcEndpoints e - -> HashRef -- ^ current state - -> ShareCLI m () -postState rpc px = do - - debug "postState" - - encStuff <- asks (view appEnc) - >>= readIORef - >>= orThrowUser "credentials not available" - - let kr = view kre encStuff - - let (KeyringKeys pk sk) = view kre encStuff - - let sto = AnyStorage (StorageClient (rpcStorage rpc)) - refchan <- asks (toRefChanId . view appRefChan) - - -- генерим gk0 если нету: - gk0key <- makeGK0Key rpc - >>= orThrowUser "can't make gk0key (perhaps refchan is not available)" - - debug $ "gk0 key" <+> pretty gk0key - - gk0 <- getGK0 rpc - gkh <- writeAsMerkle sto (serialise gk0) - - debug $ "got GK0, okay" - - gks <- Symm.lookupGroupKey sk pk gk0 - & orThrow (userError $ show ("*** Can't decrypt group key" <+> pretty gkh)) - - w <- asks (view appWorkDir) - locals <- withState selectLocalFiles - - withState do - fee <- S.toList_ $ for_ locals $ \l -> do - let key = _localFileKey l - let fpath = w toFilePath key - r <- lift $ selectRemoteFile px key - - let rhash = _remoteLocalHash <$> r - let rtree = _remoteTree <$> r - let lhash = _localFileHash l - - here <- liftIO $ doesFileExist fpath - - when here do - if Just lhash == rhash && isJust r then do - - -- FIXME: only-if-readers-are-chanhed - -- делать только если поменялись читатели, - -- иначе будет тормозить на большом числе файлов - override <- genTreeOverride sto encStuff gk0 (fromJust rtree) - - case override of - Just (Left{}) -> do - -- nothing happen, no action required - S.yield $ Left $ FileEntry key lhash (fromJust rtree) - - Just (Right new) -> do - -- tree is overriden with new gk0 - S.yield $ Right $ FileEntry key lhash new - - Nothing -> do - -- errors during tree overriding, post new file - warn $ "errors while overriding tree" <+> pretty rtree - tree <- writeEncryptedFile gks gk0 sto fpath lhash - S.yield $ Right $ FileEntry key lhash tree - - else do - tree <- writeEncryptedFile gks gk0 sto fpath lhash - S.yield $ Right $ FileEntry key lhash tree - - let fe = List.sortOn (view feKey) (lefts fee <> rights fee) - - let updated = not $ List.null (rights fee) - - when updated do - - let gk1 = mempty - - let base = Just px - - let md = MetaData base gk1 fe - - -- можно брать только правые - let hashes = [ t | FileEntry _ _ t <- fe ] - - for_ (rights fee) $ \f -> do - info $ "M" <+> pretty (_feTree f) <+> pretty (_feKey f) - - let metabs = serialise md - & GZip.compressWith (defaultCompressParams { compressLevel = bestCompression }) - - manifest <- getLocalConfigDir <&> ( "manifest") - liftIO $ AwL.atomicWriteFile manifest metabs - - lh <- localHash manifest - mfhash <- writeEncryptedFile gks gk0 sto manifest lh - - let pt = toPTree (MaxSize 1024) (MaxNum 1024) (mfhash : hashes) -- FIXME: settings - - metaHash <- makeMerkle 0 pt $ \(_,_,bss) -> do - void $ liftIO (putBlock sto bss) - - info $ "entries:" <+> pretty (length hashes) <+> pretty metaHash - - let tx = AnnotatedHashRef Nothing (HashRef metaHash) - let ssk = view (creds . peerSignSk) encStuff - let spk = view (creds . peerSignPk) encStuff - - let box = makeSignedBox spk ssk (LBS.toStrict $ serialise tx) - - dont <- lift dontPost - - unless dont do - debug "POST TX" - r <- callService @RpcRefChanPropose (rpcRefChan rpc) (refchan, box) - pure () - - where - -- genTreeOverride :: AnyStorage -> EncryptionStuff -> GK0 'HBS2Basic -> HashRef -> m () - genTreeOverride sto enc gk0 tree = do - let (KeyringKeys pk sk) = view kre enc - runMaybeT do - obj <- MaybeT $ getBlock sto (fromHashRef tree) - case tryDetect (fromHashRef tree) obj of - MerkleAnn ann@(MTreeAnn {_mtaCrypt = EncryptGroupNaClSymm2 o gkh0 nonce}) -> do - - gk0old <- runExceptT (readFromMerkle sto (SimpleKey gkh0)) - >>= toMPlus - <&> deserialiseOrFail @(GroupKey 'Symm s) - >>= toMPlus - - let rcptOld = HashMap.keysSet (recipients gk0old) - let rcptNew = HashMap.keysSet (recipients gk0) - - if rcptOld == rcptNew then do - pure (Left tree) - else do - - gksOld <- toMPlus $ Symm.lookupGroupKey sk pk gk0old - - gk1 <- generateGroupKey @s (Just gksOld) (HashSet.toList rcptNew) - - gk1h <- writeAsMerkle sto (serialise gk1) - - let newCrypt = EncryptGroupNaClSymm2 o gk1h nonce - let newTreeBlock = ann { _mtaCrypt = newCrypt } - - newTree <- enqueueBlock sto (serialise newTreeBlock) - >>= toMPlus - <&> HashRef - - pure (Right newTree) - - _ -> mzero - - -runSync :: AppPerks m => ShareCLI m () -runSync = do - - replica <- isReplica - info $ "replica:" <+> pretty replica - - flip runContT pure $ do - - rpc <- ContT $ withRpcClientUnix - - lift do - updateLocalState - px <- scanState rpc - updateLocalState - postState rpc px - -writeEncryptedFile :: forall m s nonce . (MonadIO m, Serialise nonce, s ~ 'HBS2Basic) - => GroupSecret - -> GroupKey 'Symm s - -> AnyStorage - -> FilePath - -> nonce - -> m HashRef -writeEncryptedFile gks gk0 sto fn h = do - let nonce = LBS.drop 1 (serialise h) & LBS.toStrict - - let sk1 = SipKey 2716310006254639645 507093936407764973 - let sk2 = SipKey 9209724780415729085 2720900864410773155 - let (SipHash a) = BA.sipHash sk1 nonce - let (SipHash b) = BA.sipHash sk2 nonce - - let bsStream = flip readChunkedBS defBlockSize =<< liftIO (LBS.readFile fn) - - -- TODO: fix-metadata - let source = ToEncryptSymmBS @s gks - (Right gk0) - nonce - bsStream - NoMetaData - (Just (EncryptGroupNaClSymmBlockSIP (a,b))) - - th <- runExceptT (writeAsMerkle sto source) - >>= orThrowUser "can't encrypt data" - - pure $ HashRef th - diff --git a/hbs2-share/src/HBS2/Share/App/Types.hs b/hbs2-share/src/HBS2/Share/App/Types.hs deleted file mode 100644 index 777c65f2..00000000 --- a/hbs2-share/src/HBS2/Share/App/Types.hs +++ /dev/null @@ -1,105 +0,0 @@ -{-# Language UndecidableInstances #-} -{-# Language TemplateHaskell #-} -module HBS2.Share.App.Types - ( module HBS2.Share.App.Types - , module HBS2.Data.Types.Refs - , module Data.Config.Suckless - , module HBS2.Peer.RPC.API.Peer - , module HBS2.Peer.RPC.API.Storage - , module HBS2.Peer.RPC.API.RefChan - , module UnliftIO - , module Control.Monad.Trans.Cont - , module Control.Monad.Reader - , module Lens.Micro.Platform - ) where - -import HBS2.Prelude.Plated -import HBS2.Base58 -import HBS2.Data.Types.Refs -import HBS2.Peer.Proto.RefChan -import HBS2.Net.Proto.Types -import HBS2.Net.Proto.Service -import HBS2.Net.Auth.Credentials - -import HBS2.Peer.RPC.API.Peer -import HBS2.Peer.RPC.API.Storage -import HBS2.Peer.RPC.API.RefChan - -import Data.Config.Suckless -import DBPipe.SQLite - -import Control.Monad.Trans.Cont -import Control.Monad.Reader -import Data.Maybe -import Lens.Micro.Platform -import UnliftIO - -newtype RChan = RChan { toRefChanId :: RefChanId L4Proto } - -deriving newtype instance FromStringMaybe RChan - -instance Pretty RChan where - pretty (RChan x) = pretty (AsBase58 x) - -instance IsString RChan where - fromString s = fromMaybe (error "invalid refchan") $ fromStringMay s - -data RpcEndpoints e = - RpcEndpoints - { rpcPeer :: ServiceCaller PeerAPI e - , rpcStorage :: ServiceCaller StorageAPI e - , rpcRefChan :: ServiceCaller RefChanAPI e - } - - -data EncryptionStuff = - EncryptionStuff - { _creds :: PeerCredentials 'HBS2Basic - , _kre :: KeyringEntry 'HBS2Basic - } - -makeLenses ''EncryptionStuff - - -data AppOption = AppDontPostOpt - | AppDebugOpt - | AppTraceOpt - | AppReplicaOpt - deriving stock (Eq,Ord,Show,Data,Generic) - -data AppEnv = - AppEnv - { _appOpts :: [AppOption] - , _appConf :: [Syntax C] - , _appRefChan :: RChan - , _appDb :: DBPipeEnv - , _appWorkDir :: FilePath - , _appRpcSock :: FilePath - , _appEnc :: IORef (Maybe EncryptionStuff) - } - -makeLenses ''AppEnv - - -newtype ShareCLI m a = ShareCLI { fromShareCLI :: ReaderT AppEnv m a } - deriving newtype - ( Applicative - , Functor - , Monad - , MonadIO - , MonadUnliftIO - , MonadReader AppEnv - ) - -type AppPerks m = MonadUnliftIO m - -instance (Monad m) => HasConf (ShareCLI m) where - getConf = asks (view appConf) - -instance Monad m => HasConf (ContT a (ShareCLI m)) where - getConf = lift getConf - - --- instance FromField HashRef - - diff --git a/hbs2-share/src/HBS2/Share/Config.hs b/hbs2-share/src/HBS2/Share/Config.hs deleted file mode 100644 index 1a13bd8d..00000000 --- a/hbs2-share/src/HBS2/Share/Config.hs +++ /dev/null @@ -1,109 +0,0 @@ -module HBS2.Share.Config - ( module Data.Config.Suckless.KeyValue - , appName - , confDirName - , getWorkingDir - , getLocalConfigDir' - , getLocalConfigDir - , getLocalStatePath - , getLocalConfigDir' - , getLocalConfigFile' - , getLocalConfigFile - , readConfig - , IgnoreOpt - , RefChanOpt - , RpcUnixOpt - , SigilPathOpt - ) where - -import HBS2.Prelude.Plated -import HBS2.OrDie - -import HBS2.Share.App.Types - -import Data.Config.Suckless -import Data.Config.Suckless.KeyValue - -import System.Directory -import System.FilePath -import Data.Either -import Data.Set (Set) -import UnliftIO - - -data IgnoreOpt - -data RefChanOpt - -data RpcUnixOpt - -data SigilPathOpt - -instance HasCfgKey IgnoreOpt (Set String) where - key = "ignore" - -instance HasCfgKey RefChanOpt (Maybe RChan) where - key = "refchan" - -instance HasCfgKey RpcUnixOpt (Maybe String) where - key = "rpc.unix" - -instance HasCfgKey SigilPathOpt (Maybe String) where - key = "sigil" - -appName :: FilePath -appName = "hbs2-share" - -confDirName :: FilePath -confDirName = "." <> appName - -getWorkingDir :: MonadUnliftIO m => m FilePath -getWorkingDir = getLocalConfigDir <&> takeDirectory - -getLocalConfigDir' :: MonadIO m => m FilePath -getLocalConfigDir' = pure confDirName - - -getLocalConfigDir :: MonadIO m => m FilePath -getLocalConfigDir = findLocalConfDir confDirName - >>= orThrowUser "config not found" - -getLocalConfigFile' :: MonadIO m => m FilePath -getLocalConfigFile' = getLocalConfigDir' <&> ( "config") - -getLocalConfigFile :: MonadIO m => m FilePath -getLocalConfigFile = do - dir <- findLocalConfDir confDirName - >>= orThrowUser "config not found" - pure $ dir "config" - -getLocalStatePath :: MonadIO m => m FilePath -getLocalStatePath = do - path <- findLocalConfDir confDirName - >>= orThrowUser "config not found" - pure ( path "state.db" ) - -readConfig :: MonadIO m => m [Syntax C] -readConfig = do - liftIO $ try @_ @IOError (getLocalConfigFile >>= readFile) - <&> fromRight "" - <&> parseTop - <&> fromRight mempty - - -findLocalConfDir :: MonadIO m => FilePath -> m (Maybe FilePath) -findLocalConfDir filename = liftIO $ do - homeDir <- getHomeDirectory - currentDir <- getCurrentDirectory - findRecursively ( filename) currentDir homeDir - where - findRecursively _ currentDir homeDir - | currentDir == homeDir = return Nothing - | otherwise = do - let searchDir = currentDir filename - dirExists <- doesDirectoryExist searchDir - if dirExists - then return $ Just searchDir - else findRecursively ( filename) (takeDirectory currentDir) homeDir - - diff --git a/hbs2-share/src/HBS2/Share/Files.hs b/hbs2-share/src/HBS2/Share/Files.hs deleted file mode 100644 index 0cd61827..00000000 --- a/hbs2-share/src/HBS2/Share/Files.hs +++ /dev/null @@ -1,33 +0,0 @@ -module HBS2.Share.Files where - -import HBS2.Prelude.Plated - -import System.Directory -import System.FilePath -import Data.List qualified as List -import System.FilePattern -import Data.Function -import UnliftIO - - -listFiles :: MonadUnliftIO m => [FilePattern] -> FilePath -> (FilePath -> m ()) -> m () -listFiles ignore dir action = go dir - where - matches p f = or [ i ?== f | i <- p ] - - go fn = do - - let skip = or [ i ?== fn | i <- ignore ] - - unless skip do - isF <- liftIO $ doesFileExist fn - if isF then do - action fn - else do - isD <- liftIO $ doesDirectoryExist fn - when isD do - content <- liftIO $ listDirectory fn - forConcurrently_ [ fn x | x <- content, not (matches ignore x) ] $ \e -> do - go e - - diff --git a/hbs2-share/src/HBS2/Share/Keys.hs b/hbs2-share/src/HBS2/Share/Keys.hs deleted file mode 100644 index d6183b5b..00000000 --- a/hbs2-share/src/HBS2/Share/Keys.hs +++ /dev/null @@ -1,14 +0,0 @@ -module HBS2.Share.Keys where - -import HBS2.Prelude.Plated -import HBS2.Hash -import HBS2.Data.Types.Refs -import HBS2.Net.Proto.Types - -type GK0 s = GroupKey 'Symm s - -newtype GK0Key = GK0Key HashRef - deriving stock (Generic,Data) - deriving newtype (Pretty, Hashed HbSync) - - diff --git a/hbs2-share/src/HBS2/Share/LocalHash.hs b/hbs2-share/src/HBS2/Share/LocalHash.hs deleted file mode 100644 index 652c9aa6..00000000 --- a/hbs2-share/src/HBS2/Share/LocalHash.hs +++ /dev/null @@ -1,38 +0,0 @@ -module HBS2.Share.LocalHash where - -import HBS2.Prelude.Plated -import HBS2.Defaults (defBlockSize) -import HBS2.Hash -import HBS2.Data.Types.Refs -import HBS2.Storage.Operations.ByteString - -import HBS2.Share.App.Types - -import Data.ByteArray.Hash (SipHash(..), SipKey(..)) -import Data.ByteArray.Hash qualified as BA -import Streaming.Prelude qualified as S -import Data.ByteString.Lazy qualified as LBS -import Codec.Serialise - -newtype LocalHash = LocalHash { fromLocalHash :: Hash HbSync } - deriving stock (Eq,Ord,Data,Generic,Show) - -instance Serialise LocalHash - -instance Pretty LocalHash where - pretty (LocalHash h) = pretty h - -localHash :: MonadUnliftIO m => FilePath -> m LocalHash -localHash fp = do - liftIO $ withBinaryFile fp ReadMode $ \h -> do - lbs <- LBS.hGetContents h - readChunkedBS lbs defBlockSize - & S.map LBS.toStrict - & S.map (\z -> let (SipHash w) = BA.sipHash sk0 z in w) - & S.toList_ - <&> serialise - <&> LocalHash . hashObject @HbSync - where - sk0 = SipKey 5401424299739428297 3116460833428128256 - - diff --git a/hbs2-share/src/HBS2/Share/MetaData.hs b/hbs2-share/src/HBS2/Share/MetaData.hs deleted file mode 100644 index f64dc64d..00000000 --- a/hbs2-share/src/HBS2/Share/MetaData.hs +++ /dev/null @@ -1,71 +0,0 @@ -{-# Language TemplateHaskell #-} -module HBS2.Share.MetaData where - -import HBS2.Prelude.Plated -import HBS2.Data.Types.Refs - -import HBS2.Share.LocalHash - -import Data.HashMap.Strict qualified as HashMap -import Data.HashMap.Strict (HashMap) -import Codec.Serialise -import System.FilePath -import Data.List qualified as List -import Data.Maybe -import Data.Text qualified as Text -import Lens.Micro.Platform - -newtype PathEntry = PathEntry Text - deriving stock (Eq,Ord,Data,Generic,Show) - deriving newtype (Hashable,Pretty) - -newtype EntryKey = EntryKey { entryKey :: [PathEntry] } - deriving stock (Eq,Ord,Data,Generic,Show) - deriving newtype (Hashable,Semigroup,Monoid) - - -data FileEntry = - FileEntry - { _feKey :: EntryKey - , _feLocalHash :: LocalHash - , _feTree :: HashRef - } - deriving stock (Show,Data,Generic) - -makeLenses ''FileEntry - -instance IsString EntryKey where - fromString p = EntryKey [ PathEntry (fromString s) | s <- splitDirectories p ] - -instance Pretty EntryKey where - pretty (EntryKey ps) = pretty $ joinPath [ Text.unpack p | PathEntry p <- ps ] - - -toFilePath :: EntryKey -> FilePath -toFilePath = show . pretty - -data MetaData = - MetaData - { mdBase :: Maybe HashRef -- ^ reference to state TX - , mdGK1 :: HashMap HashRef HashRef - , mdFiles :: [FileEntry] - } - deriving stock (Show,Generic) - -instance Serialise PathEntry -instance Serialise EntryKey -instance Serialise FileEntry -instance Serialise MetaData - - -makeEntryKey :: EntryKey -> FilePath -> EntryKey -makeEntryKey (EntryKey prefix) path = EntryKey pnew - where - pp = entryKey $ fromString path - pnew = List.stripPrefix prefix pp & fromMaybe pp - -getDirs :: EntryKey -> [FilePath] -getDirs ek = fmap (joinPath . fmap unPathEntry) $ init $ tailSafe $ List.inits $ entryKey ek - where - unPathEntry (PathEntry p) = Text.unpack p - diff --git a/hbs2-share/src/HBS2/Share/State.hs b/hbs2-share/src/HBS2/Share/State.hs deleted file mode 100644 index f6bbcb97..00000000 --- a/hbs2-share/src/HBS2/Share/State.hs +++ /dev/null @@ -1,379 +0,0 @@ -{-# OPTIONS_GHC -fno-warn-orphans #-} -{-# Language TemplateHaskell #-} -module HBS2.Share.State where - -import HBS2.Prelude -import HBS2.Hash -import HBS2.Share.App.Types -import HBS2.Share.Keys -import HBS2.Share.LocalHash -import HBS2.Share.MetaData - -import DBPipe.SQLite - -import Text.InterpolatedString.Perl6 (qc) -import Data.Maybe -import Data.Time (UTCTime) -import Data.Time.Clock.POSIX (posixSecondsToUTCTime) -import Data.List qualified as List - -data LocalFile = - LocalFile - { _localFileKey :: EntryKey - , _localFileModTime :: UTCTime - , _localFileHash :: LocalHash - } - deriving stock (Generic) - -makeLenses 'LocalFile - -data RemoteFile = - RemoteFile - { _remoteFileKey :: EntryKey - , _remoteFileTime :: UTCTime - , _remoteLocalHash :: LocalHash - , _remoteTree :: HashRef - } - deriving stock (Generic) - -makeLenses 'RemoteFile - -instance FromRow LocalFile - -instance FromRow RemoteFile - -class HasHash a where - toHash :: a -> Hash HbSync - -instance HasHash (Hash HbSync) where - toHash = id - -instance HasHash HashRef where - toHash = fromHashRef - -newtype HashVal = HashVal { fromHashVal :: HashRef } - deriving newtype (IsString) - -wrapHash :: HasHash hx => hx -> HashVal -wrapHash hx = HashVal (HashRef (toHash hx)) - -instance ToField GK0Key where - toField (GK0Key hs) = toField (show (pretty hs)) - -instance ToField HashVal where - toField (HashVal v) = toField (show (pretty v)) - -instance FromField HashVal where - fromField = fmap fromString . fromField @String - -instance ToField EntryKey where - toField p = toField (show $ pretty p) - -instance FromField EntryKey where - fromField = fmap (makeEntryKey mempty) . fromField @String - -instance ToField LocalHash where - toField (LocalHash l) = toField (HashVal (HashRef l)) - -instance FromField LocalHash where - fromField = fmap (LocalHash . fromHashRef . fromHashVal) . fromField @HashVal - -instance FromField HashRef where - fromField = fmap fromHashVal . fromField @HashVal - -populateState :: MonadUnliftIO m => DBPipeM m () -populateState = do - ddl [qc|create table if not exists gk0 - ( hash text not null - , gk0 text not null - , ts datetime default current_timestamp - , primary key (hash) - ) - |] - - ddl [qc|create table if not exists localfile - ( key text not null - , modtime datetime not null - , localhash text not null - , primary key (key) - ) - |] - - ddl [qc|create table if not exists localtree - ( key text not null - , tree text not null - , primary key (key) - ) - |] - - ddl [qc|create table if not exists accept - ( accept text not null - , propose text not null - , epoch int not null - , primary key (accept) - ) - |] - - ddl [qc|create table if not exists propose - ( propose text not null - , tx text not null - , primary key (propose) - ) - |] - - - ddl [qc|create table if not exists missed - ( hash text not null - , missed bool not null - , primary key (hash) - ) - |] - - createRemoteFileTable - createSeenTable - - commitAll - - -insertGK0 :: MonadUnliftIO m => GK0Key -> HashRef -> DBPipeM m () -insertGK0 gk0 val = do - insert [qc| - insert into gk0 (hash, gk0) values (?,?) - on conflict do update set gk0 = excluded.gk0 - |] (gk0, HashVal val) - - -selectGK0 :: MonadUnliftIO m => GK0Key -> DBPipeM m (Maybe HashRef) -selectGK0 gk0 = do - -- FIXME: time-hardcode - select [qc| - select gk0 from gk0 - where hash = ? and ts > datetime('now', '-30 days'); - limit 1 - |] (Only gk0) - <&> listToMaybe . fmap (fromHashVal . fromOnly) - -insertLocalFile :: MonadUnliftIO m - => EntryKey - -> UTCTime - -> LocalHash - -> DBPipeM m () - -insertLocalFile fkey modtime localhash = do - insert [qc| - insert into localfile (key, modtime, localhash) values (?,?,?) - on conflict (key) do update set modtime = excluded.modtime - , localhash = excluded.localhash - |] (fkey, modtime, localhash) - - -selectLocalFile :: MonadUnliftIO m => EntryKey -> DBPipeM m (Maybe LocalFile) -selectLocalFile fkey = do - select [qc| - select key - , modtime - , localhash - from localfile - where key = ?; - limit 1 - |] (Only fkey) - <&> listToMaybe - -selectLocalFiles :: MonadUnliftIO m => DBPipeM m [LocalFile] -selectLocalFiles = do - select_ [qc| - select key, modtime, localhash - from localfile - |] - -insertLocalTree :: forall hx m . (MonadUnliftIO m, HasHash hx) - => EntryKey - -> hx - -> DBPipeM m () -insertLocalTree fkey tree = do - insert [qc| - insert into localtree (key, tree) values (?,?) - on conflict (key) do update set tree = excluded.tree - |] (fkey, HashVal (HashRef (toHash tree))) - - -selectLocalTrees :: forall m . ( MonadUnliftIO m ) - => DBPipeM m [(EntryKey, LocalHash, HashRef)] -selectLocalTrees = do - select_ [qc| select t.key - , f.localhash - , t.tree - from localtree t join localfile f on t.key = f.key|] - <&> fmap (over _3 fromHashVal) - - -insertAccept :: forall hx m . ( MonadUnliftIO m, HasHash hx ) - => hx - -> hx - -> Integer - -> DBPipeM m () - -insertAccept k p t = do - insert [qc| - insert into accept (accept,propose,epoch) values (?,?,?) - on conflict (accept) do nothing - |] (HashVal (HashRef $ toHash k), HashVal (HashRef $ toHash p), t) - -insertPropose :: forall hx m . ( MonadUnliftIO m, HasHash hx ) - => hx - -> hx - -> DBPipeM m () - -insertPropose k tx = do - insert [qc| - insert into propose (propose,tx) values (?,?) - on conflict (propose) do nothing - |] (HashVal (HashRef $ toHash k), HashVal (HashRef $ toHash tx)) - - -selectProposes :: forall m . MonadUnliftIO m => DBPipeM m [(HashRef, Integer)] -selectProposes = do - let q = [qc| -WITH RankedAccept AS ( - SELECT a.propose, - a.epoch, - ROW_NUMBER() OVER (PARTITION BY a.propose ORDER BY a.epoch) AS rn, - COUNT(*) OVER (PARTITION BY a.propose) AS cnt - FROM accept a -), -T0 AS ( -SELECT p.propose, - p.tx, - cast(AVG(a.epoch) as int) AS epoch -FROM propose p -JOIN RankedAccept a ON p.propose = a.propose -WHERE a.rn IN ((a.cnt + 1) / 2, (a.cnt / 2) + 1) -GROUP BY p.propose, p.tx ) - -SELECT T0.tx, T0.epoch -FROM T0 -ORDER BY T0.epoch DESC|] - - select_ q <&> fmap (over _1 fromHashVal) - -selectMissed :: MonadUnliftIO m => HashRef -> DBPipeM m (Maybe Bool) -selectMissed hash = do - select [qc| - select missed from missed where hash = ? limit 1 - |] (Only (HashVal hash)) <&> fmap fromOnly . listToMaybe - -insertMissed :: MonadUnliftIO m => HashRef -> Bool -> DBPipeM m () -insertMissed hash miss = do - insert [qc| - insert into missed (hash,missed) values (?,?) - on conflict (hash) do update set missed = excluded.missed - |] (HashVal hash, miss) - -deleteMissed :: MonadUnliftIO m => HashRef -> DBPipeM m () -deleteMissed hash = do - insert [qc| - delete from missed where hash = ? - |] (Only (HashVal hash)) - - -createRemoteFileTable :: MonadUnliftIO m => DBPipeM m () -createRemoteFileTable = do - ddl [qc|create table if not exists remotefile - ( propose text not null - , key text not null - , localhash text not null - , tree text not null - , time datetime not null - , primary key (propose,key) - ) - |] - -insertRemoteFile :: ( MonadUnliftIO m - , Real epoch - , Fractional epoch - ) - => HashRef - -> epoch - -> MetaData - -> FileEntry - -> DBPipeM m () -insertRemoteFile px epoch _ fe = do - insert [qc| - insert into remotefile - ( propose - , key - , localhash - , tree - , time - ) - values (?,?,?,?,?) - on conflict (propose,key) - do update - set localhash = excluded.localhash - , tree = excluded.tree - , time = excluded.time - - |] ( HashVal px - , _feKey fe - , _feLocalHash fe - , HashVal (_feTree fe) - , posixSecondsToUTCTime $ realToFrac epoch - ) - -selectRemoteFiles :: (MonadUnliftIO m) - => HashRef - -> DBPipeM m [RemoteFile] -selectRemoteFiles px = do - select [qc| - select key - , time - , localhash - , tree - from remotefile where propose = ? - |] (Only (HashVal px)) - - -selectRemoteFile :: (MonadUnliftIO m) - => HashRef - -> EntryKey - -> DBPipeM m (Maybe RemoteFile) -selectRemoteFile px k = do - select [qc| - select key - , time - , localhash - , tree - from remotefile where propose = ? and key = ? - limit 1 - |] (HashVal px, k) <&> listToMaybe - - -createSeenTable :: MonadUnliftIO m => DBPipeM m () -createSeenTable = do - ddl [qc|create table if not exists seen - ( hash text not null - , primary key (hash) - ) - |] - - -insertSeen :: (MonadUnliftIO m, HasHash hx) - => hx - -> DBPipeM m () -insertSeen hx = do - insert [qc| - insert into seen (hash) - values (?) - on conflict (hash) - do nothing - |] (Only $ wrapHash hx) - -selectSeen :: (MonadUnliftIO m, HasHash hx) - => hx - -> DBPipeM m Bool -selectSeen hx = do - select [qc| - select 1 from seen where hash = ? limit 1 - |] (Only $ wrapHash hx) - <&> (maybe False fromOnly . listToMaybe) -