mirror of https://github.com/voidlizard/hbs2
0.24.1
This commit is contained in:
parent
a3b5822243
commit
da6bb6bcb4
|
@ -0,0 +1 @@
|
|||
./state.db
|
|
@ -0,0 +1,5 @@
|
|||
title: "hbs2 project repo"
|
||||
author: "Dmitry Zuikov"
|
||||
public: yes
|
||||
|
||||
Project description TBD
|
4
Makefile
4
Makefile
|
@ -10,9 +10,9 @@ BIN_DIR := ./bin
|
|||
BINS := \
|
||||
hbs2 \
|
||||
hbs2-peer \
|
||||
hbs2-reposync \
|
||||
hbs2-keyman \
|
||||
hbs2-git-reposync \
|
||||
hbs2-fixer \
|
||||
hbs2-git-subscribe \
|
||||
git-remote-hbs2 \
|
||||
git-hbs2 \
|
||||
|
||||
|
|
44
README.md
44
README.md
|
@ -1,3 +1,37 @@
|
|||
- [ABOUT](#about){#toc-about}
|
||||
- [Status update
|
||||
2024-03-20](#status-update-2024-03-20){#toc-status-update-2024-03-20}
|
||||
- [Status update
|
||||
2024-03-17](#status-update-2024-03-17){#toc-status-update-2024-03-17}
|
||||
- [What is it](#what-is-it){#toc-what-is-it}
|
||||
- [Current status](#current-status){#toc-current-status}
|
||||
- [HOWTO](#howto){#toc-howto}
|
||||
- [How to install](#how-to-install){#toc-how-to-install}
|
||||
- [How to generate peer's
|
||||
key?](#how-to-generate-peers-key){#toc-how-to-generate-peers-key}
|
||||
- [How to run
|
||||
hbs2-peer](#how-to-run-hbs2-peer){#toc-how-to-run-hbs2-peer}
|
||||
- [How to configure
|
||||
hbs2-peer](#how-to-configure-hbs2-peer){#toc-how-to-configure-hbs2-peer}
|
||||
- [How to create a new own
|
||||
repo](#how-to-create-a-new-own-repo){#toc-how-to-create-a-new-own-repo}
|
||||
- [How to launch a
|
||||
peer](#how-to-launch-a-peer){#toc-how-to-launch-a-peer}
|
||||
- [How to save an encrypted file
|
||||
(TBD)](#how-to-save-an-encrypted-file-tbd){#toc-how-to-save-an-encrypted-file-tbd}
|
||||
- [FAQ](#faq){#toc-faq}
|
||||
- [Why DVCS are not actually
|
||||
distributed](#why-dvcs-are-not-actually-distributed){#toc-why-dvcs-are-not-actually-distributed}
|
||||
- [Okay, if centralized services are bad, why are you
|
||||
here?](#okay-if-centralized-services-are-bad-why-are-you-here){#toc-okay-if-centralized-services-are-bad-why-are-you-here}
|
||||
- [What platforms are supported
|
||||
yet?](#what-platforms-are-supported-yet){#toc-what-platforms-are-supported-yet}
|
||||
- [What is a "reflog"](#what-is-a-reflog){#toc-what-is-a-reflog}
|
||||
- [What is the fixme?](#what-is-the-fixme){#toc-what-is-the-fixme}
|
||||
- [Contact](#contact){#toc-contact}
|
||||
- [Download](#download){#toc-download}
|
||||
- [Support](#support){#toc-support}
|
||||
|
||||
- [ABOUT](#about)
|
||||
- [What is it](#what-is-it)
|
||||
- [Current status](#current-status)
|
||||
|
@ -34,6 +68,15 @@ This solution facilitates decentralized P2P git repository
|
|||
synchronization with automatic peer discovery, requiring no server or
|
||||
service.
|
||||
|
||||
## Status update 2024-03-20
|
||||
|
||||
hbs2-git 0.24.1 is in master. Status =\> beta. Old hbs2-git is
|
||||
discontinued. Use the new one.
|
||||
|
||||
Data structures are incompatible between the old and the new versions,
|
||||
however, migrations is safe and all references remains the same (merely
|
||||
the type of the references are changed).
|
||||
|
||||
## Status update 2024-03-17
|
||||
|
||||
We have been using hbs2 and hbs2-git for approximately 13 months.
|
||||
|
@ -171,7 +214,6 @@ Typically hbs2-peer config is located at
|
|||
|
||||
\$HOME/.config/hbs2-peer/config
|
||||
|
||||
|
||||
; ip/port to for UDP
|
||||
listen "0.0.0.0:7351"
|
||||
|
||||
|
|
|
@ -1,15 +1,33 @@
|
|||
all: hbs2-git-problem hbs2-git-new-repo
|
||||
REV:=$(shell git rev-parse --short HEAD)
|
||||
|
||||
define make_target
|
||||
$(basename $(1))-$(REV)$(suffix $(1))
|
||||
endef
|
||||
|
||||
|
||||
all: hbs2-git-problem hbs2-git-new-repo hbs2-git-doc
|
||||
|
||||
.PHONY: all clean
|
||||
|
||||
%.pdf: %.tex
|
||||
xelatex $<
|
||||
xelatex $<
|
||||
cp $@ $(call make_target,$@)
|
||||
|
||||
hbs2-git-problem: hbs2-git-problem.pdf
|
||||
|
||||
hbs2-git-new-repo: hbs2-git-new-repo.pdf
|
||||
|
||||
hbs2-git-doc: hbs2-git-doc-0.24.1.pdf
|
||||
|
||||
publish-hbs2-git-doc: hbs2-git-doc-0.24.1.pdf
|
||||
$(eval TARGET := $(call make_target,$<))
|
||||
$(eval HASH := $(shell hbs2 metadata create --hash --auto $(TARGET)))
|
||||
@echo Updating $(HBS2GITDOCLWW) $(HASH)
|
||||
hbs2-peer lwwref update -v $(HASH) $(HBS2GITDOCLWW)
|
||||
|
||||
publish: publish-hbs2-git-doc
|
||||
|
||||
clean:
|
||||
rm -f *.aux *.log *.nav *.out *.snm *.vrb *.toc *.pdf
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,5 @@
|
|||
FIXME: git-fetch-push-slow
|
||||
Кажется, тормозит за счёт сканирования меркл-деревьев на предмет
|
||||
missed blocks.
|
||||
|
||||
Надо бы сделать какой-то кэш/фильтры для ускорения вопроса.
|
|
@ -33,10 +33,10 @@ outputs = { self, nixpkgs, haskell-flake-utils, ... }@inputs:
|
|||
"hbs2-core"
|
||||
"hbs2-storage-simple"
|
||||
"hbs2-git"
|
||||
"hbs2-git-reposync"
|
||||
"hbs2-qblf"
|
||||
"hbs2-keyman"
|
||||
"hbs2-share"
|
||||
"hbs2-fixer"
|
||||
];
|
||||
in
|
||||
haskell-flake-utils.lib.simpleCabalProject2flake {
|
||||
|
@ -60,7 +60,7 @@ outputs = { self, nixpkgs, haskell-flake-utils, ... }@inputs:
|
|||
"hbs2-keyman" = "./hbs2-keyman";
|
||||
"hbs2-share" = "./hbs2-share";
|
||||
"hbs2-git" = "./hbs2-git";
|
||||
"hbs2-git-reposync" = "./hbs2-git-reposync";
|
||||
"hbs2-fixer" = "./hbs2-fixer";
|
||||
};
|
||||
|
||||
hpPreOverrides = {pkgs, ...}: final: prev: with pkgs; {
|
||||
|
@ -101,6 +101,7 @@ outputs = { self, nixpkgs, haskell-flake-utils, ... }@inputs:
|
|||
hoogle
|
||||
htags
|
||||
text-icu
|
||||
magic
|
||||
pkgs.icu72
|
||||
pkgs.openssl
|
||||
weeder
|
||||
|
@ -117,8 +118,6 @@ outputs = { self, nixpkgs, haskell-flake-utils, ... }@inputs:
|
|||
'';
|
||||
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
cabal-version: 3.0
|
||||
name: hbs2-core
|
||||
version: 0.1.1.0
|
||||
version: 0.24.1.0
|
||||
-- synopsis:
|
||||
-- description:
|
||||
license: BSD-3-Clause
|
||||
|
|
|
@ -405,7 +405,7 @@ runPeerM :: forall e m . ( MonadIO m
|
|||
runPeerM env f = do
|
||||
|
||||
let de = view envDeferred env
|
||||
as <- liftIO $ replicateM 8 $ async $ runPipeline de
|
||||
as <- liftIO $ replicateM 16 $ async $ runPipeline de
|
||||
|
||||
sw <- liftIO $ async $ forever $ withPeerM env $ do
|
||||
pause defSweepTimeout
|
||||
|
|
|
@ -1,14 +1,23 @@
|
|||
module HBS2.Base58 where
|
||||
|
||||
import Data.ByteString.Base58 (encodeBase58, bitcoinAlphabet, decodeBase58,Alphabet(..))
|
||||
import Data.ByteString qualified as BS
|
||||
import Data.ByteString.Char8 qualified as BS8
|
||||
import Data.ByteString.Char8 (ByteString)
|
||||
import Data.ByteString.Lazy.Char8 qualified as LBS
|
||||
import Data.ByteString.Lazy.Char8 qualified as LBS8
|
||||
import Data.ByteString.Lazy qualified as LBS
|
||||
import Data.Word
|
||||
import Data.Char (ord)
|
||||
import Numeric
|
||||
|
||||
import Prettyprinter
|
||||
|
||||
newtype AsBase58 a = AsBase58 { unAsBase58 :: a }
|
||||
|
||||
newtype AsHex a = AsHex { unAsHex :: a }
|
||||
|
||||
newtype AsHexSparse a = AsHexSparse { unAsHexSparse :: a }
|
||||
|
||||
alphabet :: Alphabet
|
||||
alphabet = bitcoinAlphabet
|
||||
|
||||
|
@ -32,3 +41,29 @@ instance Pretty (AsBase58 LBS.ByteString) where
|
|||
instance Show (AsBase58 ByteString) where
|
||||
show (AsBase58 bs) = BS8.unpack $ toBase58 bs
|
||||
|
||||
|
||||
byteToHex :: Word8 -> String
|
||||
byteToHex byte = pad $ showHex byte ""
|
||||
where pad s = if length s < 2 then '0':s else s
|
||||
|
||||
byteStringToHex :: BS.ByteString -> String
|
||||
byteStringToHex bs = concatMap (byteToHex . fromIntegral) (BS.unpack bs)
|
||||
|
||||
instance Pretty (AsHexSparse ByteString) where
|
||||
pretty (AsHexSparse bs) = pretty $ unwords $ byteToHex <$> BS.unpack bs
|
||||
|
||||
instance Pretty (AsHexSparse LBS.ByteString) where
|
||||
pretty (AsHexSparse bs) = pretty $ unwords $ byteToHex <$> LBS.unpack bs
|
||||
|
||||
instance Pretty (AsHex ByteString) where
|
||||
pretty (AsHex bs) = pretty $ byteStringToHex bs
|
||||
|
||||
instance Pretty (AsHex LBS.ByteString) where
|
||||
pretty (AsHex bs) = pretty $ byteStringToHex (LBS.toStrict bs)
|
||||
|
||||
instance Show (AsHex ByteString) where
|
||||
show (AsHex bs) = byteStringToHex bs
|
||||
|
||||
instance Show (AsHex LBS.ByteString) where
|
||||
show (AsHex bs) = byteStringToHex (LBS.toStrict bs)
|
||||
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||
{-# Language UndecidableInstances #-}
|
||||
module HBS2.Net.Auth.Schema where
|
||||
module HBS2.Net.Auth.Schema
|
||||
( module HBS2.Net.Auth.Schema
|
||||
, module HBS2.Net.Proto.Types
|
||||
) where
|
||||
|
||||
import HBS2.Prelude
|
||||
import HBS2.OrDie
|
||||
|
|
|
@ -23,6 +23,7 @@ module HBS2.Prelude
|
|||
, Text.Text
|
||||
, (&), (<&>), for_, for
|
||||
, HasErrorStatus(..), ErrorStatus(..), SomeError(..), WithSomeError(..), mayE, someE
|
||||
, ByFirst(..)
|
||||
) where
|
||||
|
||||
import HBS2.Clock
|
||||
|
@ -153,5 +154,14 @@ asyncLinked m = do
|
|||
pure l
|
||||
|
||||
|
||||
data ByFirst a b = ByFirst a b
|
||||
|
||||
instance Eq a => Eq (ByFirst a b) where
|
||||
(==) (ByFirst a _) (ByFirst b _) = a == b
|
||||
|
||||
instance Hashable a => Hashable (ByFirst a b) where
|
||||
hashWithSalt s (ByFirst a _) = hashWithSalt s a
|
||||
|
||||
|
||||
-- asyncLinked :: forall m . MonadUnliftIO m =>
|
||||
|
||||
|
|
|
@ -7,9 +7,11 @@ module HBS2.System.Dir
|
|||
|
||||
import System.FilePath
|
||||
import System.FilePattern
|
||||
import System.Directory as D
|
||||
import UnliftIO hiding (try)
|
||||
|
||||
import System.Directory qualified as D
|
||||
import Data.ByteString.Lazy qualified as LBS
|
||||
import UnliftIO
|
||||
import Control.Exception qualified as E
|
||||
import Control.Monad
|
||||
|
||||
data MkDirOpt = MkDirOptNone
|
||||
|
||||
|
@ -27,7 +29,32 @@ instance ToFilePath FilePath where
|
|||
|
||||
mkdir :: (MonadIO m, ToFilePath a) => a -> m ()
|
||||
mkdir a = do
|
||||
liftIO $ createDirectoryIfMissing True (toFilePath a)
|
||||
void $ liftIO $ E.try @SomeException (D.createDirectoryIfMissing True (toFilePath a))
|
||||
|
||||
data TouchOpt = TouchEasy | TouchHard
|
||||
deriving stock (Eq,Ord,Show)
|
||||
|
||||
class ToFilePath a => HasTouchOpts a where
|
||||
touchOpts :: a -> [TouchOpt]
|
||||
|
||||
instance HasTouchOpts FilePath where
|
||||
touchOpts = const [TouchEasy]
|
||||
|
||||
touch :: (MonadIO m, HasTouchOpts a) => a -> m ()
|
||||
touch what = do
|
||||
here <- doesPathExist fn
|
||||
dir <- doesDirectoryExist fn
|
||||
|
||||
when (not here || hard) do
|
||||
mkdir (takeDirectory fn)
|
||||
liftIO $ print (takeDirectory fn)
|
||||
unless dir do
|
||||
liftIO $ print fn
|
||||
liftIO $ LBS.appendFile fn mempty
|
||||
|
||||
where
|
||||
hard = TouchHard `elem` touchOpts what
|
||||
fn = toFilePath what
|
||||
|
||||
pwd :: MonadIO m => m FilePath
|
||||
pwd = liftIO D.getCurrentDirectory
|
||||
|
|
|
@ -0,0 +1,705 @@
|
|||
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||
{-# LANGUAGE TemplateHaskell #-}
|
||||
{-# LANGUAGE PatternSynonyms #-}
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
{-# LANGUAGE UndecidableInstances #-}
|
||||
module Main where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.Actors.Peer
|
||||
import HBS2.Base58
|
||||
import HBS2.OrDie
|
||||
import HBS2.Hash
|
||||
import HBS2.Data.Types.Refs
|
||||
import HBS2.Net.Auth.Credentials
|
||||
import HBS2.Polling
|
||||
import HBS2.Misc.PrettyStuff
|
||||
import HBS2.System.Dir
|
||||
import HBS2.System.Logger.Simple.ANSI hiding (info)
|
||||
import HBS2.Net.Messaging.Unix
|
||||
|
||||
import HBS2.Git.Data.LWWBlock
|
||||
|
||||
import HBS2.Net.Proto.Notify
|
||||
import HBS2.Net.Proto.Service
|
||||
import HBS2.Peer.Notify
|
||||
import HBS2.Peer.RPC.API.Peer
|
||||
import HBS2.Peer.RPC.API.RefLog
|
||||
import HBS2.Peer.RPC.API.LWWRef
|
||||
import HBS2.Peer.RPC.API.Storage
|
||||
import HBS2.Peer.RPC.Client.StorageClient
|
||||
|
||||
import HBS2.Peer.CLI.Detect
|
||||
import HBS2.Peer.Proto.RefLog
|
||||
|
||||
import Data.Config.Suckless
|
||||
|
||||
import Data.Time.Clock
|
||||
import Data.Coerce
|
||||
import Control.Monad.Reader
|
||||
import Lens.Micro.Platform
|
||||
import System.Directory
|
||||
import Options.Applicative
|
||||
import Data.Maybe
|
||||
import Data.Either
|
||||
import Data.HashMap.Strict (HashMap)
|
||||
import Data.HashMap.Strict qualified as HM
|
||||
import Control.Monad.Trans.Cont
|
||||
import Control.Monad.Trans.Maybe
|
||||
import Data.ByteString.Lazy.Char8 qualified as LBS
|
||||
import Data.Text qualified as Text
|
||||
import Data.Hashable
|
||||
import Control.Exception qualified as E
|
||||
import System.Process.Typed
|
||||
import System.Environment qualified as Env
|
||||
import System.Exit qualified as Exit
|
||||
import Data.Cache qualified as Cache
|
||||
import Data.Cache (Cache)
|
||||
|
||||
{- HLINT ignore "Functor law" -}
|
||||
|
||||
|
||||
type Config = [Syntax C]
|
||||
|
||||
|
||||
type RLWW = LWWRefKey HBS2Basic
|
||||
type RRefLog = RefLogKey HBS2Basic
|
||||
|
||||
newtype Watcher =
|
||||
Watcher [Syntax C]
|
||||
deriving newtype (Semigroup,Monoid)
|
||||
|
||||
data Ref =
|
||||
RefRefLog RRefLog
|
||||
| RefLWW RLWW
|
||||
deriving stock (Eq,Generic)
|
||||
|
||||
instance Pretty Ref where
|
||||
pretty (RefRefLog r) = parens $ "reflog" <+> dquotes (pretty r)
|
||||
pretty (RefLWW r) = parens $ "lwwref" <+> dquotes (pretty r)
|
||||
|
||||
newtype AnyPolledRef =
|
||||
AnyPolledRef (PubKey 'Sign HBS2Basic)
|
||||
deriving (Eq,Generic)
|
||||
|
||||
instance Hashable AnyPolledRef
|
||||
|
||||
-- FIXME: move-to-suckless-conf
|
||||
deriving newtype instance Hashable Id
|
||||
|
||||
instance Pretty AnyPolledRef where
|
||||
pretty (AnyPolledRef r) = pretty (AsBase58 r)
|
||||
-- deriving newtype instance Pretty (PubKey 'Sign HBS2Basic) => Pretty AnyPolledRef
|
||||
|
||||
instance FromStringMaybe AnyPolledRef where
|
||||
fromStringMay = fmap AnyPolledRef . fromStringMay
|
||||
|
||||
newtype PolledRef =
|
||||
PolledRef (Ref, NominalDiffTime)
|
||||
deriving stock (Eq,Generic)
|
||||
deriving newtype (Pretty)
|
||||
|
||||
instance Hashable Ref
|
||||
|
||||
instance Hashable PolledRef where
|
||||
hashWithSalt salt (PolledRef (r,_)) = hashWithSalt salt r
|
||||
|
||||
data FixerEnv = FixerEnv
|
||||
{ _configFile :: Maybe FilePath
|
||||
, _lwwAPI :: ServiceCaller LWWRefAPI UNIX
|
||||
, _refLogAPI :: ServiceCaller RefLogAPI UNIX
|
||||
, _refLogSink :: NotifySink (RefLogEvents L4Proto) UNIX
|
||||
, _peerAPI :: ServiceCaller PeerAPI UNIX
|
||||
, _sto :: AnyStorage
|
||||
, _config :: TVar Config
|
||||
, _configPoll :: TVar Int
|
||||
, _watchers :: TVar (HashMap PolledRef Watcher)
|
||||
, _listeners :: TVar (HashMap RRefLog (Async ()))
|
||||
, _result :: TVar (HashMap Ref (Maybe HashRef, Maybe HashRef))
|
||||
, _runNum :: TVar Int
|
||||
, _locals :: TVar (HashMap Id (Syntax C))
|
||||
, _pipeline :: TQueue (IO ())
|
||||
}
|
||||
|
||||
makeLenses ''FixerEnv
|
||||
|
||||
|
||||
newtype FixerM m a = FixerM { runFixerM :: ReaderT FixerEnv m a }
|
||||
deriving newtype (Applicative, Functor, Monad, MonadIO, MonadReader FixerEnv, MonadUnliftIO)
|
||||
|
||||
instance MonadIO m => HasConf (FixerM m) where
|
||||
getConf = asks _config >>= readTVarIO
|
||||
|
||||
|
||||
debugPrefix = toStdout . logPrefix "[debug] "
|
||||
|
||||
readConf :: MonadIO m => FilePath -> m [Syntax MegaParsec]
|
||||
readConf fn = liftIO (readFile fn) <&> parseTop <&> fromRight mempty
|
||||
|
||||
withConfig :: MonadUnliftIO m => Maybe FilePath -> FixerM m () -> FixerM m ()
|
||||
withConfig cfgPath m = do
|
||||
defConfDir <- liftIO $ getXdgDirectory XdgConfig "hbs2-fixer"
|
||||
|
||||
let configPath = fromMaybe (defConfDir </> "config") cfgPath
|
||||
unless (isJust cfgPath) do
|
||||
debug $ pretty configPath
|
||||
touch configPath
|
||||
|
||||
syn <- readConf configPath
|
||||
tsyn <- newTVarIO syn
|
||||
|
||||
local (set config tsyn . set configFile (Just configPath)) (void m)
|
||||
|
||||
withApp :: Maybe FilePath -> FixerM IO () -> IO ()
|
||||
withApp cfgPath action = do
|
||||
setLogging @DEBUG debugPrefix
|
||||
setLogging @INFO defLog
|
||||
setLogging @ERROR errorPrefix
|
||||
setLogging @WARN warnPrefix
|
||||
setLogging @NOTICE noticePrefix
|
||||
|
||||
soname <- detectRPC
|
||||
`orDie` "can't detect RPC"
|
||||
|
||||
flip runContT pure do
|
||||
|
||||
client <- lift $ race (pause @'Seconds 1) (newMessagingUnix False 1.0 soname)
|
||||
>>= orThrowUser ("can't connect to" <+> pretty soname)
|
||||
|
||||
void $ ContT $ withAsync $ runMessagingUnix client
|
||||
|
||||
peerAPI <- makeServiceCaller @PeerAPI (fromString soname)
|
||||
refLogAPI <- makeServiceCaller @RefLogAPI (fromString soname)
|
||||
storageAPI <- makeServiceCaller @StorageAPI (fromString soname)
|
||||
lwwAPI <- makeServiceCaller @LWWRefAPI (fromString soname)
|
||||
|
||||
let endpoints = [ Endpoint @UNIX peerAPI
|
||||
, Endpoint @UNIX refLogAPI
|
||||
, Endpoint @UNIX lwwAPI
|
||||
, Endpoint @UNIX storageAPI
|
||||
]
|
||||
|
||||
void $ ContT $ withAsync $ liftIO $ runReaderT (runServiceClientMulti endpoints) client
|
||||
|
||||
let o = [MUWatchdog 20, MUDontRetry]
|
||||
clientN <- newMessagingUnixOpts o False 1.0 soname
|
||||
|
||||
void $ ContT $ withAsync $ runMessagingUnix clientN
|
||||
|
||||
sink <- newNotifySink
|
||||
|
||||
void $ ContT $ withAsync $ flip runReaderT clientN $ do
|
||||
debug $ red "notify restarted!"
|
||||
runNotifyWorkerClient sink
|
||||
|
||||
void $ ContT $ withAsync $ flip runReaderT clientN $ do
|
||||
runProto @UNIX
|
||||
[ makeResponse (makeNotifyClient @(RefLogEvents L4Proto) sink)
|
||||
]
|
||||
|
||||
env <- FixerEnv Nothing
|
||||
lwwAPI
|
||||
refLogAPI
|
||||
sink
|
||||
peerAPI
|
||||
(AnyStorage (StorageClient storageAPI))
|
||||
<$> newTVarIO mempty
|
||||
<*> newTVarIO 30
|
||||
<*> newTVarIO mempty
|
||||
<*> newTVarIO mempty
|
||||
<*> newTVarIO mempty
|
||||
<*> newTVarIO 0
|
||||
<*> newTVarIO mempty
|
||||
<*> newTQueueIO
|
||||
|
||||
lift $ runReaderT (runFixerM $ withConfig cfgPath action) env
|
||||
`finally` do
|
||||
setLoggingOff @DEBUG
|
||||
setLoggingOff @INFO
|
||||
setLoggingOff @ERROR
|
||||
setLoggingOff @WARN
|
||||
setLoggingOff @NOTICE
|
||||
|
||||
where
|
||||
errorPrefix = toStdout . logPrefix "[error] "
|
||||
warnPrefix = toStdout . logPrefix "[warn] "
|
||||
noticePrefix = toStdout
|
||||
|
||||
|
||||
data ConfWatch =
|
||||
ConfWatch
|
||||
| ConfRead
|
||||
| ConfUpdate [Syntax C]
|
||||
|
||||
mainLoop :: FixerM IO ()
|
||||
mainLoop = forever $ do
|
||||
debug "hbs2-fixer. do stuff since 2024"
|
||||
conf <- getConf
|
||||
-- debug $ line <> vcat (fmap pretty conf)
|
||||
|
||||
flip runContT pure do
|
||||
|
||||
lift $ updateFromConfig conf
|
||||
|
||||
void $ ContT $ withAsync $ do
|
||||
cfg <- asks _configFile `orDie` "config file not specified"
|
||||
|
||||
flip fix ConfRead $ \next -> \case
|
||||
ConfRead -> do
|
||||
debug $ yellow "read config" <+> pretty cfg
|
||||
newConf <- readConf cfg
|
||||
oldConf <- getConf
|
||||
|
||||
let a = hashObject @HbSync (LBS.pack $ show $ pretty newConf)
|
||||
let b = hashObject @HbSync (LBS.pack $ show $ pretty oldConf)
|
||||
|
||||
let changed = a /= b
|
||||
|
||||
if not changed then
|
||||
next ConfWatch
|
||||
else
|
||||
next (ConfUpdate newConf)
|
||||
|
||||
ConfUpdate new -> do
|
||||
debug $ yellow "read config / update state"
|
||||
updateFromConfig new
|
||||
next ConfWatch
|
||||
|
||||
ConfWatch{} -> do
|
||||
w <- asks _configPoll >>= readTVarIO
|
||||
pause (TimeoutSec (realToFrac w))
|
||||
next ConfRead
|
||||
|
||||
-- poll reflogs
|
||||
void $ ContT $ withAsync do
|
||||
|
||||
let w = asks _watchers
|
||||
>>= readTVarIO
|
||||
<&> HM.toList
|
||||
<&> \wtf -> [ (ByFirst r wa, t) | (PolledRef (r,t), wa) <- wtf ]
|
||||
|
||||
polling (Polling 1 1) w $ \case
|
||||
ByFirst ref wa -> do
|
||||
new <- getRefRpc ref
|
||||
re <- asks _result
|
||||
old <- readTVarIO re
|
||||
<&> (snd <=< HM.lookup ref)
|
||||
|
||||
when (new /= old) do
|
||||
atomically $ modifyTVar re (HM.insert ref (old, new))
|
||||
-- bindId
|
||||
forM_ new (runWatcher wa ref)
|
||||
|
||||
pure ()
|
||||
|
||||
|
||||
jobs <- asks _pipeline
|
||||
void $ ContT $ withAsync $ forever do
|
||||
liftIO $ E.try @SomeException (join $ atomically $ readTQueue jobs)
|
||||
>>= \case
|
||||
Left e -> err (viaShow e)
|
||||
_ -> pure ()
|
||||
|
||||
forever $ pause @'Seconds 60
|
||||
|
||||
oneSec :: MonadUnliftIO m => m b -> m (Either () b)
|
||||
oneSec = race (pause @'Seconds 1)
|
||||
|
||||
|
||||
fromStrLitMay :: forall s c . FromStringMaybe s => Syntax c -> Maybe s
|
||||
fromStrLitMay = \case
|
||||
LitStrVal s -> fromStringMay (Text.unpack s)
|
||||
_ -> Nothing
|
||||
|
||||
pattern PTop :: forall {c}. Id -> [Syntax c] -> Syntax c
|
||||
pattern PTop ctor rest <- ListVal (SymbolVal ctor : rest)
|
||||
|
||||
pattern PPolledRef :: forall {c}. Id -> AnyPolledRef -> Syntax c
|
||||
pattern PPolledRef t r <- ListVal [ SymbolVal t, fromStrLitMay @AnyPolledRef -> Just r ]
|
||||
|
||||
pattern PWatchRef :: forall {c}. Integer -> Id -> AnyPolledRef -> [Syntax c] -> [Syntax c]
|
||||
pattern PWatchRef n t r w <- (LitIntVal n : PPolledRef t r : w)
|
||||
|
||||
pattern PListenRef :: forall {c}. Id -> AnyPolledRef -> [Syntax c] -> [Syntax c]
|
||||
pattern PListenRef t r w <- (PPolledRef t r : w)
|
||||
|
||||
-- pattern PDisplay :: Syntax c
|
||||
pattern PDisplay :: forall {c}. Syntax c -> Syntax c
|
||||
pattern PDisplay w <- ListVal [ SymbolVal "display", w ]
|
||||
|
||||
pattern PApply :: Id -> [Syntax C] -> Syntax C
|
||||
pattern PApply f a <- ListVal ( SymbolVal f : a )
|
||||
|
||||
fetchRef :: forall m . MonadIO m => Ref -> FixerM m ()
|
||||
fetchRef r = do
|
||||
case r of
|
||||
RefRefLog ref -> do
|
||||
api <- asks _refLogAPI
|
||||
void $ liftIO $ oneSec $ void $ callService @RpcRefLogFetch api (fromRefLogKey ref)
|
||||
RefLWW ref -> do
|
||||
api <- asks _lwwAPI
|
||||
void $ liftIO $ oneSec $ void $ callService @RpcLWWRefFetch api ref
|
||||
|
||||
|
||||
getRefRpc :: forall m . MonadIO m => Ref -> FixerM m (Maybe HashRef)
|
||||
getRefRpc r = do
|
||||
case r of
|
||||
RefRefLog ref -> do
|
||||
api <- asks _refLogAPI
|
||||
liftIO (oneSec $ callService @RpcRefLogGet api (fromRefLogKey ref))
|
||||
>>= \case
|
||||
Right (Right x) -> pure x
|
||||
_ -> pure Nothing
|
||||
|
||||
RefLWW ref -> do
|
||||
api <- asks _lwwAPI
|
||||
liftIO (oneSec $ callService @RpcLWWRefGet api ref) >>= \case
|
||||
Right (Right x) -> pure (lwwValue <$> x)
|
||||
_ -> pure Nothing
|
||||
|
||||
subscribeRef :: forall m . MonadIO m => Integer -> Ref -> FixerM m ()
|
||||
subscribeRef n r = do
|
||||
debug $ "subscribeRef" <+> pretty n <+> pretty r
|
||||
let (puk,t) = case r of
|
||||
RefRefLog k -> (coerce k, "reflog")
|
||||
RefLWW k -> (coerce k, "lwwref")
|
||||
|
||||
let minutes = fromIntegral $ max 1 (n `div` 60)
|
||||
|
||||
api <- asks _peerAPI
|
||||
void $ liftIO $ oneSec $ callService @RpcPollAdd api (puk, t, minutes)
|
||||
|
||||
asRef :: Id -> AnyPolledRef -> Maybe Ref
|
||||
asRef t r = case t of
|
||||
"lwwref" -> Just $ RefLWW (coerce r)
|
||||
"reflog" -> Just $ RefRefLog (coerce r)
|
||||
_ -> Nothing
|
||||
|
||||
|
||||
runWatcher :: forall m . MonadUnliftIO m => Watcher -> Ref -> HashRef -> FixerM m ()
|
||||
runWatcher (Watcher code) ref new = do
|
||||
debug $ yellow "CHANGED" <+> pretty ref <+> pretty new
|
||||
|
||||
sto <- asks _sto
|
||||
|
||||
newCode <- flip transformBiM code $ \case
|
||||
PApply "lwwref:get-hbs2-git-reflog" _ -> do
|
||||
v <- case ref of
|
||||
RefLWW k -> readLWWBlock sto k
|
||||
_ -> pure Nothing
|
||||
|
||||
-- FIXME: wrappers-for-syntax-ctors
|
||||
let vv = maybe1 v (List (noContext @C) mempty) $
|
||||
\(_, LWWBlockData{..}) ->
|
||||
List (noContext @C) [ Symbol (noContext @C) "reflog"
|
||||
, Literal (noContext @C)
|
||||
(mkLit @Text (fromString $ show $ pretty (AsBase58 lwwRefLogPubKey)))
|
||||
]
|
||||
pure vv
|
||||
|
||||
w -> pure w
|
||||
|
||||
debug (pretty newCode)
|
||||
runConfig newCode
|
||||
|
||||
|
||||
|
||||
display :: forall m . MonadUnliftIO m => Syntax C -> FixerM m ()
|
||||
display what = do
|
||||
case what of
|
||||
LitStrVal s -> notice (pretty s)
|
||||
ast -> notice (pretty ast)
|
||||
|
||||
nil :: Syntax C
|
||||
nil = List (noContext @C) []
|
||||
|
||||
list_ :: [Syntax C] -> Syntax C
|
||||
list_ = List (noContext @C)
|
||||
|
||||
symbol_ :: Id -> Syntax C
|
||||
symbol_ = Symbol (noContext @C)
|
||||
|
||||
str_ :: Text -> Syntax C
|
||||
str_ s = Literal (noContext @C) (LitStr s)
|
||||
|
||||
int_ :: Integer -> Syntax C
|
||||
int_ s = Literal (noContext @C) (LitInt s)
|
||||
|
||||
bool_ :: Bool -> Syntax C
|
||||
bool_ s = Literal (noContext @C) (LitBool s)
|
||||
|
||||
-- FIXME: to-suckless-conf
|
||||
class AsString s where
|
||||
asString :: s -> String
|
||||
|
||||
instance AsString Literal where
|
||||
asString (LitStr s) = Text.unpack s
|
||||
asString other = show $ pretty other
|
||||
|
||||
instance AsString (Syntax c) where
|
||||
asString (Literal _ x) = asString x
|
||||
asString x = show $ pretty x
|
||||
|
||||
data RunOpts =
|
||||
RunCWD FilePath
|
||||
|
||||
instance Pretty RunOpts where
|
||||
pretty = \case
|
||||
RunCWD f -> parens ("cwd" <+> pretty f)
|
||||
|
||||
eval :: forall m . MonadUnliftIO m => Syntax C -> FixerM m (Syntax C)
|
||||
eval = eval'
|
||||
-- debug $ "EVAL" <+> pretty syn <+> pretty r
|
||||
-- pure r
|
||||
|
||||
eval' :: forall m . MonadUnliftIO m => Syntax C -> FixerM m (Syntax C)
|
||||
eval' syn = do
|
||||
|
||||
case syn of
|
||||
|
||||
x@(Literal{}) -> pure x
|
||||
|
||||
(SymbolVal n) -> lookupLocal n
|
||||
|
||||
w@(PApply "list" code') -> do
|
||||
code <- mapM unquote code'
|
||||
pure $ list_ (symbol_ "list" : code)
|
||||
|
||||
PApply "local" [SymbolVal n, what] -> do
|
||||
bindLocal n =<< eval what
|
||||
pure nil
|
||||
|
||||
PApply "eval" [e] -> do
|
||||
eval e >>= \case
|
||||
(ListVal ( SymbolVal "list" : es ) ) -> do
|
||||
lastDef nil <$> mapM eval es
|
||||
|
||||
_ -> pure nil
|
||||
|
||||
PApply "listen" (what' : code) -> do
|
||||
what <- eval what'
|
||||
case what of
|
||||
PPolledRef "reflog" ref -> do
|
||||
setReflogListener (coerce ref) =<< mapM unquote code
|
||||
|
||||
PPolledRef tp r -> do
|
||||
warn $ yellow "not supported listener type" <+> pretty tp
|
||||
|
||||
_ -> pure ()
|
||||
|
||||
pure nil
|
||||
|
||||
PApply "watch" (p' : what' : watcher') -> do
|
||||
p <- eval p'
|
||||
what <- eval what'
|
||||
watcher <- mapM unquote watcher'
|
||||
|
||||
case (p, what) of
|
||||
(LitIntVal n, PPolledRef tp ref) -> do
|
||||
|
||||
let re = asRef tp ref
|
||||
|
||||
forM_ re (subscribeRef n)
|
||||
void $ async (pause @'Seconds 5 >> forM_ re fetchRef)
|
||||
|
||||
void $ runMaybeT do
|
||||
|
||||
-- FIXME: more-diagnostics
|
||||
pref <- toMPlus $ case tp of
|
||||
"lwwref" -> Just $ PolledRef (RefLWW (coerce ref), fromIntegral n)
|
||||
"reflog" -> Just $ PolledRef (RefRefLog (coerce ref), fromIntegral n)
|
||||
_ -> Nothing
|
||||
|
||||
debug $ blue "watch" <+> pretty n <+> pretty tp <+> pretty ref
|
||||
w <- asks _watchers
|
||||
atomically $ modifyTVar w (HM.insert pref (Watcher watcher))
|
||||
|
||||
_ -> pure ()
|
||||
|
||||
pure nil
|
||||
|
||||
PApply "on-start" wtf -> do
|
||||
|
||||
rn <- asks _runNum
|
||||
rnn <- atomically do
|
||||
x <- readTVar rn
|
||||
modifyTVar rn succ
|
||||
pure x
|
||||
|
||||
when (rnn == 0) do
|
||||
mapM_ eval wtf
|
||||
|
||||
pure nil
|
||||
|
||||
PApply fn args' -> do
|
||||
args <- mapM eval args'
|
||||
case fn of
|
||||
|
||||
"reflog" -> do
|
||||
pure $ list_ (symbol_ "reflog" : args)
|
||||
|
||||
"lwwref" -> do
|
||||
pure $ list_ (symbol_ "lwwref" : args)
|
||||
|
||||
"watch-config" -> do
|
||||
case headDef (int_ 30) args of
|
||||
LitIntVal n -> do
|
||||
debug $ "watch-config" <+> pretty n
|
||||
asks _configPoll >>= atomically . flip writeTVar (fromIntegral n)
|
||||
_ -> do
|
||||
pure ()
|
||||
|
||||
pure nil
|
||||
|
||||
"debug" -> do
|
||||
let onOff = headDef (bool_ False) args
|
||||
case onOff of
|
||||
LitBoolVal True -> do
|
||||
setLogging @DEBUG debugPrefix
|
||||
_ -> do
|
||||
setLoggingOff @DEBUG
|
||||
|
||||
pure nil
|
||||
|
||||
"string-append" -> do
|
||||
pieces <- for args $ \case
|
||||
LitStrVal s -> pure s
|
||||
other -> pure (Text.pack $ show $ pretty other)
|
||||
|
||||
pure $ str_ $ mconcat pieces
|
||||
|
||||
"display" -> do
|
||||
first <- headDef nil <$> mapM eval args
|
||||
case first of
|
||||
LitStrVal s -> notice (pretty s)
|
||||
ast -> notice (pretty ast)
|
||||
|
||||
pure nil
|
||||
|
||||
"getenv" -> do
|
||||
let name = asString $ headDef nil args
|
||||
liftIO $ Env.lookupEnv name
|
||||
>>= \case
|
||||
Nothing -> pure nil
|
||||
Just s -> pure $ str_ (fromString s)
|
||||
|
||||
"mkdir" -> do
|
||||
debug $ "mkdir" <+> pretty args
|
||||
mapM_ mkdir [ Text.unpack s | (LitStrVal s) <- args ]
|
||||
pure nil
|
||||
|
||||
"exit" -> do
|
||||
case headDef (int_ 0) args of
|
||||
LitIntVal 0 -> liftIO Exit.exitSuccess
|
||||
LitIntVal w -> liftIO $ Exit.exitWith (ExitFailure $ fromIntegral w)
|
||||
_ -> liftIO Exit.exitFailure
|
||||
|
||||
pure nil
|
||||
|
||||
"run" -> do
|
||||
debug $ red "RUN-ARGS" <+> pretty args
|
||||
(o,cargs) <- case args of
|
||||
(ListVal (SymbolVal "list" : SymbolVal "opts" : opts) : rest) -> do
|
||||
let pairs = [ (opt, e) | ListVal [SymbolVal opt, e] <- opts ]
|
||||
oo <- for pairs $ \(o, e) -> (o,) <$> eval e
|
||||
let cwd = lastMay [ RunCWD (Text.unpack f )
|
||||
| ("cwd", LitStrVal f) <- oo
|
||||
]
|
||||
pure (maybeToList cwd, rest)
|
||||
|
||||
rest -> do
|
||||
pure (mempty, rest)
|
||||
|
||||
let what = unwords $ [Text.unpack s | LitStrVal s <- cargs]
|
||||
|
||||
let cwd = case headMay [ p | c@(RunCWD p) <- o ] of
|
||||
Just c -> setWorkingDir c
|
||||
_ -> id
|
||||
|
||||
debug $ red "RUN" <+> pretty what <+> pretty o
|
||||
|
||||
let job = void $ runProcess_ (shell what & cwd)
|
||||
pip <- asks _pipeline
|
||||
atomically $ writeTQueue pip job
|
||||
|
||||
pure nil
|
||||
|
||||
_ -> pure nil
|
||||
|
||||
|
||||
_ -> pure nil
|
||||
|
||||
|
||||
unquote :: forall code m . (code ~ Syntax C, MonadUnliftIO m) => code -> FixerM m code
|
||||
unquote code = flip transformBiM code $ \case
|
||||
x@(ListVal [SymbolVal "unquoted", rest] :: Syntax C) -> do
|
||||
eval rest
|
||||
|
||||
x -> pure x
|
||||
|
||||
setReflogListener :: forall m . MonadUnliftIO m => RRefLog -> [Syntax C] -> FixerM m ()
|
||||
setReflogListener reflog code = do
|
||||
debug $ green "setReflogListener" <+> pretty reflog <> line <> pretty code
|
||||
|
||||
dudes <- asks _listeners
|
||||
|
||||
a <- atomically do
|
||||
x <- readTVar dudes <&> HM.lookup reflog
|
||||
modifyTVar dudes (HM.delete reflog)
|
||||
pure x
|
||||
|
||||
maybe1 a none cancel
|
||||
|
||||
sink <- asks _refLogSink
|
||||
|
||||
debug $ "subscribe to" <+> pretty reflog
|
||||
|
||||
new <- async do
|
||||
cache <- liftIO $ Cache.newCache (Just (toTimeSpec (TimeoutSec 10)))
|
||||
|
||||
runNotifySink sink (RefLogNotifyKey reflog) $ \(RefLogUpdateNotifyData _ h) -> do
|
||||
debug $ "Got notification" <+> pretty reflog <+> pretty h
|
||||
here <- liftIO (Cache.lookup cache (reflog, h)) <&> isJust
|
||||
unless here do
|
||||
liftIO $ Cache.insert cache (reflog,h) ()
|
||||
runConfig code
|
||||
|
||||
atomically $ modifyTVar dudes (HM.insert reflog new)
|
||||
|
||||
bindLocal :: forall m . MonadUnliftIO m => Id -> Syntax C -> FixerM m ()
|
||||
bindLocal l e = do
|
||||
-- debug $ "bindLocal" <+> pretty l
|
||||
asks _locals >>= atomically . flip modifyTVar (HM.insert l e)
|
||||
|
||||
lookupLocal :: forall m . MonadUnliftIO m => Id ->FixerM m (Syntax C)
|
||||
lookupLocal name = do
|
||||
-- debug $ "lookupLocal" <+> pretty name
|
||||
asks _locals >>= readTVarIO <&> fromMaybe nil . HM.lookup name
|
||||
|
||||
runConfig :: forall m . MonadUnliftIO m => Config -> FixerM m ()
|
||||
runConfig conf = do
|
||||
debug $ green "runConfig"
|
||||
bindLocal "off" (bool_ False)
|
||||
bindLocal "on" (bool_ True)
|
||||
|
||||
mapM_ eval conf
|
||||
|
||||
updateFromConfig :: MonadUnliftIO m => Config -> FixerM m ()
|
||||
updateFromConfig conf = do
|
||||
asks _config >>= atomically . flip writeTVar conf
|
||||
runConfig conf
|
||||
|
||||
main :: IO ()
|
||||
main = do
|
||||
runMe =<< customExecParser (prefs showHelpOnError)
|
||||
( info (helper <*> opts)
|
||||
( fullDesc
|
||||
<> header "hbs2-fixer"
|
||||
<> progDesc "Intermediary between hbs2-peer and external applications. Listen events / do stuff"
|
||||
))
|
||||
|
||||
where
|
||||
opts = optional $ strOption (short 'c' <> long "config" <> metavar "FILE" <> help "Specify configuration file")
|
||||
|
||||
runMe opt = withApp opt mainLoop
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
;; hbs2-fixer config example
|
||||
|
||||
(local home (getenv "HOME"))
|
||||
|
||||
(local root (string-append home "/.local/share/hbs2-git-repos/0.24.1"))
|
||||
|
||||
(local hbs2-repo "BTThPdHKF8XnEq4m6wzbKHKA6geLFK4ydYhBXAqBdHSP" )
|
||||
(local hbs2-repo-path (string-append root "/" hbs2-repo))
|
||||
|
||||
(watch-config 30)
|
||||
|
||||
(debug off)
|
||||
|
||||
(display (string-append "repo1" " " hbs2-repo-path))
|
||||
|
||||
(eval (list (display "OKAY11 FROM EVAL")))
|
||||
|
||||
(on-start
|
||||
(display (string-append "on-start" " " "mkdir" " " hbs2-repo-path))
|
||||
|
||||
(mkdir hbs2-repo-path)
|
||||
|
||||
(run (string-append "git init --bare " hbs2-repo-path))
|
||||
(display update-hbs2-repo)
|
||||
|
||||
(run (list opts (cwd hbs2-repo-path))
|
||||
(string-append "git hbs2 import" " " hbs2-repo))
|
||||
|
||||
(run (list opts (cwd hbs2-repo-path))
|
||||
(string-append "git gc" ) )
|
||||
)
|
||||
|
||||
(watch 60 (lwwref "BTThPdHKF8XnEq4m6wzbKHKA6geLFK4ydYhBXAqBdHSP")
|
||||
(run-config
|
||||
(watch 300 (lwwref:get-hbs2-git-reflog)
|
||||
(display "GIT REFLOG CHANGED BY WATCH")
|
||||
|
||||
(run (list opts (cwd hbs2-repo-path))
|
||||
(string-append "git hbs2 import" " " hbs2-repo ))
|
||||
|
||||
(display (string-append "Updated " hbs2-repo " OK"))
|
||||
|
||||
)
|
||||
|
||||
(listen (lwwref:get-hbs2-git-reflog)
|
||||
|
||||
(display "GIT REFLOG CHANGED BY LISTENER")
|
||||
|
||||
(run (list opts (cwd hbs2-repo-path))
|
||||
(string-append "git hbs2 import" " " hbs2-repo ))
|
||||
|
||||
(display (string-append "Updated " hbs2-repo " OK"))
|
||||
)
|
||||
|
||||
)
|
||||
(display (string-append "Updated " hbs2-repo))
|
||||
)
|
||||
|
||||
; (watch 30 (lwwref "Byc3XUeSbJBXVFueumkNkVJMPHbGoUdxYEJBgzJPf8io")
|
||||
; (run "./on-my-ref4.sh")
|
||||
; )
|
||||
|
||||
; (watch 30 (lwwref "DTmSb3Au7apDTMctQn6yqs9GJ8mFW7YQXzgVqZpmkTtf")
|
||||
; (run "./on-my-ref4.sh")
|
||||
; )
|
||||
|
||||
; (watch 30 (reflog "BKtvRLispCM9UuQqHaNxu4SEUzpQNQ3PeRNknecKGPZ6")
|
||||
; (run "./on-my-ref4.sh")
|
||||
; )
|
||||
|
||||
; (display "JOPAKITA 111")
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
|
||||
(display (getenv 1234))
|
||||
|
||||
(display (getenv "HOME"))
|
|
@ -0,0 +1,10 @@
|
|||
|
||||
(local home (getenv "HOME"))
|
||||
|
||||
(local root (string-append home "/.local/share/hbs2-git-repos/0.24.1"))
|
||||
|
||||
(local hbs2-repo "BTThPdHKF8XnEq4m6wzbKHKA6geLFK4ydYhBXAqBdHSP" )
|
||||
(local hbs2-repo-path (string-append root "/" hbs2-repo))
|
||||
|
||||
(display root)
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
;; hbs2-fixer config example
|
||||
|
||||
; (debug off)
|
||||
|
||||
(watch-config 30)
|
||||
|
||||
(local home (getenv "HOME"))
|
||||
|
||||
(local root (string-append home "/.local/share/hbs2-git-repos/0.24.1"))
|
||||
|
||||
(local hbs2-repo "BTThPdHKF8XnEq4m6wzbKHKA6geLFK4ydYhBXAqBdHSP" )
|
||||
(local hbs2-repo-path (string-append root "/" hbs2-repo))
|
||||
|
||||
|
||||
(local myref "BKtvRLispCM9UuQqHaNxu4SEUzpQNQ3PeRNknecKGPZ6" )
|
||||
|
||||
(listen (reflog myref)
|
||||
(display (string-append "HELLO FROM REFLOG " (unquoted myref)))
|
||||
)
|
||||
|
||||
(listen (lwwref myref)
|
||||
(display "WON'T HAPPEN")
|
||||
)
|
||||
|
||||
(display "FUUBAR!")
|
|
@ -0,0 +1,5 @@
|
|||
|
||||
(local code (list (display "HELLO")))
|
||||
|
||||
(eval code)
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
|
||||
(watch 30 (lwwref "BTThPdHKF8XnEq4m6wzbKHKA6geLFK4ydYhBXAqBdHSP")
|
||||
(display "PREVED")
|
||||
)
|
|
@ -1,6 +1,6 @@
|
|||
cabal-version: 3.0
|
||||
name: hbs2-git-reposync
|
||||
version: 0.1.0.0
|
||||
name: hbs2-fixer
|
||||
version: 0.24.1.0
|
||||
-- synopsis:
|
||||
-- description:
|
||||
license: BSD-3-Clause
|
||||
|
@ -57,7 +57,7 @@ common shared-properties
|
|||
, TemplateHaskell
|
||||
|
||||
|
||||
build-depends: hbs2-core, hbs2-peer
|
||||
build-depends: hbs2-core, hbs2-peer, hbs2-git
|
||||
, attoparsec
|
||||
, aeson
|
||||
, async
|
||||
|
@ -106,9 +106,9 @@ common shared-properties
|
|||
, wai-app-file-cgi
|
||||
, wai-extra
|
||||
|
||||
executable hbs2-git-reposync
|
||||
executable hbs2-fixer
|
||||
import: shared-properties
|
||||
main-is: ReposyncMain.hs
|
||||
main-is: Main.hs
|
||||
|
||||
ghc-options:
|
||||
-threaded
|
||||
|
@ -122,14 +122,9 @@ executable hbs2-git-reposync
|
|||
base, hbs2-core, hbs2-peer
|
||||
, optparse-applicative
|
||||
, unliftio
|
||||
, terminal-progress-bar
|
||||
, http-types
|
||||
, scotty
|
||||
, wai
|
||||
, wai-middleware-static
|
||||
, wai-extra
|
||||
|
||||
hs-source-dirs: .
|
||||
default-language: Haskell2010
|
||||
hs-source-dirs: app
|
||||
default-language: GHC2021
|
||||
|
||||
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
Copyright (c) 2023, Dmitry Zuikov
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials provided
|
||||
with the distribution.
|
||||
|
||||
* Neither the name of Dmitry Zuikov nor the names of other
|
||||
contributors may be used to endorse or promote products derived
|
||||
from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
@ -1,463 +0,0 @@
|
|||
{-# Language TemplateHaskell #-}
|
||||
module Main where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.Net.Auth.Credentials
|
||||
import HBS2.OrDie
|
||||
import HBS2.Data.Types.Refs
|
||||
import HBS2.Actors.Peer
|
||||
import HBS2.Net.Proto.Notify
|
||||
import HBS2.Peer.Proto
|
||||
import HBS2.Peer.RPC.Client.Unix hiding (Cookie)
|
||||
import HBS2.Peer.RPC.API.RefLog
|
||||
import HBS2.Peer.Notify
|
||||
|
||||
import HBS2.System.Logger.Simple hiding (info)
|
||||
|
||||
import Data.Config.Suckless
|
||||
|
||||
import Data.Char qualified as Char
|
||||
import Control.Monad.Catch (MonadThrow(..))
|
||||
import Control.Monad.Except (runExceptT,throwError)
|
||||
import Control.Monad.Cont
|
||||
import Control.Monad.Reader
|
||||
import Data.ByteString.Builder hiding (writeFile)
|
||||
import Data.ByteString.Char8 qualified as BS8
|
||||
import Data.ByteString.Lazy.Char8 qualified as LBS
|
||||
import Data.Either
|
||||
import Data.List qualified as List
|
||||
import Data.Maybe
|
||||
import Data.Text qualified as Text
|
||||
import Lens.Micro.Platform
|
||||
import Network.Wai.Middleware.Static (staticPolicy, addBase)
|
||||
import Network.Wai.Middleware.RequestLogger (logStdoutDev)
|
||||
import Options.Applicative
|
||||
import qualified Data.Text.Encoding as TE
|
||||
import System.Directory
|
||||
import System.FilePath
|
||||
import System.Process.Typed
|
||||
import Text.InterpolatedString.Perl6 (qc)
|
||||
import Control.Concurrent.STM (flushTQueue)
|
||||
import UnliftIO
|
||||
import Web.Scotty hiding (header,next)
|
||||
|
||||
import Network.HTTP.Types
|
||||
import Network.Wai
|
||||
|
||||
import System.Exit qualified as Exit
|
||||
import System.IO.Unsafe (unsafePerformIO)
|
||||
|
||||
import Streaming.Prelude qualified as S
|
||||
|
||||
-- TODO: support-encrypted-repoes
|
||||
|
||||
die :: (MonadIO m, Show msg) => msg -> m a
|
||||
die msg = liftIO $ Exit.die [qc|{msg}|]
|
||||
|
||||
data RepoInitException = RepoInitException FilePath deriving (Show, Typeable)
|
||||
instance Exception RepoInitException
|
||||
|
||||
debugPrefix :: SetLoggerEntry
|
||||
debugPrefix = toStdout . logPrefix "[debug] "
|
||||
|
||||
errorPrefix :: SetLoggerEntry
|
||||
errorPrefix = toStdout . logPrefix "[error] "
|
||||
|
||||
warnPrefix :: SetLoggerEntry
|
||||
warnPrefix = toStdout . logPrefix "[warn] "
|
||||
|
||||
noticePrefix :: SetLoggerEntry
|
||||
noticePrefix = toStdout . logPrefix "[notice] "
|
||||
|
||||
data ReposyncRootKey
|
||||
data ReposyncHttpPort
|
||||
|
||||
instance Monad m => HasCfgKey ReposyncRootKey (Maybe String) m where
|
||||
key = "root"
|
||||
|
||||
instance Monad m => HasCfgKey ReposyncHttpPort (Maybe Int) m where
|
||||
key = "http-port"
|
||||
|
||||
data RepoEntry =
|
||||
RepoEntry
|
||||
{ repoPath :: FilePath
|
||||
, repoRef :: RefLogKey HBS2Basic
|
||||
, repoKeys :: [FilePath]
|
||||
, repoHash :: TVar (Maybe HashRef)
|
||||
}
|
||||
deriving stock (Eq)
|
||||
|
||||
|
||||
data ReposyncState =
|
||||
|
||||
ReposyncState
|
||||
{ _rpcSoname :: FilePath
|
||||
, _rpcRefLog :: ServiceCaller RefLogAPI UNIX
|
||||
, _rpcNotifySink :: NotifySink (RefLogEvents L4Proto) UNIX
|
||||
, _reposyncBaseDir :: FilePath
|
||||
, _reposyncPort :: Int
|
||||
, _reposyncEntries :: TVar [RepoEntry]
|
||||
}
|
||||
|
||||
makeLenses 'ReposyncState
|
||||
|
||||
newtype ReposyncM m a =
|
||||
App { unReposyncM :: ReaderT ReposyncState m a }
|
||||
deriving newtype ( Applicative
|
||||
, Functor
|
||||
, Monad
|
||||
, MonadIO
|
||||
, MonadThrow
|
||||
, MonadReader ReposyncState
|
||||
, MonadUnliftIO
|
||||
, MonadTrans
|
||||
)
|
||||
|
||||
|
||||
myName :: FilePath
|
||||
myName = "hbs2-git-reposync"
|
||||
|
||||
reposyncDefaultDir :: FilePath
|
||||
reposyncDefaultDir = unsafePerformIO do
|
||||
getXdgDirectory XdgData (myName </> "repo")
|
||||
{-# NOINLINE reposyncDefaultDir #-}
|
||||
|
||||
newState :: MonadUnliftIO m
|
||||
=> FilePath
|
||||
-> ServiceCaller RefLogAPI UNIX
|
||||
-> NotifySink (RefLogEvents L4Proto) UNIX
|
||||
-> m ReposyncState
|
||||
|
||||
newState so refLog sink =
|
||||
ReposyncState so refLog sink reposyncDefaultDir 4017 <$> newTVarIO mempty
|
||||
|
||||
{- HLINT ignore "Functor law" -}
|
||||
withConfig :: forall a m . (MonadUnliftIO m) => Maybe FilePath -> ReposyncM m a -> ReposyncM m ()
|
||||
withConfig cfg m = do
|
||||
|
||||
let defDir = reposyncDefaultDir
|
||||
|
||||
defConfDir <- liftIO $ getXdgDirectory XdgConfig myName
|
||||
|
||||
realCfg <- case cfg of
|
||||
Just f -> pure f
|
||||
Nothing -> do
|
||||
liftIO do
|
||||
let conf = defConfDir </> "config"
|
||||
void $ try @_ @IOException $ createDirectoryIfMissing True defConfDir
|
||||
debug $ "config-dir" <+> pretty defConfDir
|
||||
void $ try @_ @IOException $ appendFile conf ""
|
||||
pure conf
|
||||
|
||||
syn <- liftIO (readFile realCfg) <&> parseTop
|
||||
<&> fromRight mempty
|
||||
|
||||
debug $ "config" <+> pretty realCfg <> line <> pretty syn
|
||||
|
||||
ev <- asks (view reposyncEntries)
|
||||
|
||||
let root = runReader (cfgValue @ReposyncRootKey) syn
|
||||
& fromMaybe defDir
|
||||
|
||||
let port = runReader (cfgValue @ReposyncHttpPort) syn
|
||||
& fromMaybe 4017
|
||||
|
||||
es <- entries root syn
|
||||
atomically $ modifyTVar ev (\x -> List.nub ( x <> es))
|
||||
|
||||
local ( set reposyncBaseDir root .
|
||||
set reposyncPort port
|
||||
) (void m)
|
||||
|
||||
where
|
||||
entries root syn = do
|
||||
|
||||
let findKeys w = [ Text.unpack p
|
||||
| ListVal (Key "decrypt" [LitStrVal p]) <- w
|
||||
]
|
||||
|
||||
let reflogs = catMaybes [ (,) <$> fromStringMay @(RefLogKey HBS2Basic) (Text.unpack o)
|
||||
<*> pure (findKeys args)
|
||||
| ListVal (Key "reflog" (LitStrVal o : args)) <- syn
|
||||
]
|
||||
|
||||
forM reflogs $ \(repo, keys) -> do
|
||||
let path = show $ pretty repo
|
||||
mt <- newTVarIO Nothing
|
||||
pure $ RepoEntry (root </> path) repo keys mt
|
||||
|
||||
|
||||
|
||||
|
||||
runSync :: (MonadUnliftIO m, MonadThrow m) => ReposyncM m ()
|
||||
runSync = do
|
||||
es <- asks (view reposyncEntries) >>= readTVarIO
|
||||
so <- asks (view rpcSoname)
|
||||
|
||||
refLogRPC <- asks (view rpcRefLog)
|
||||
sink <- asks (view rpcNotifySink)
|
||||
|
||||
root <- asks (view reposyncBaseDir)
|
||||
port <- asks (fromIntegral . view reposyncPort)
|
||||
|
||||
http <- async $ liftIO $ scotty port $ do
|
||||
middleware $ staticPolicy (addBase root)
|
||||
middleware logStdoutDev
|
||||
get "/" $ do
|
||||
text "This is hbs2-git-reposync"
|
||||
|
||||
r <- forM es $ \entry -> async $ void $ flip runContT pure do
|
||||
let ref = repoRef entry
|
||||
let rk = fromRefLogKey ref
|
||||
tv <- newTVarIO Nothing
|
||||
|
||||
upd <- newTQueueIO
|
||||
|
||||
debug $ "STARTED WITH" <+> pretty (repoPath entry)
|
||||
|
||||
let notif =
|
||||
liftIO $ async do
|
||||
debug $ "Subscribed" <+> pretty ref
|
||||
runNotifySink sink (RefLogNotifyKey ref) $ \(RefLogUpdateNotifyData _ h) -> do
|
||||
debug $ "Got notification" <+> pretty ref <+> pretty h
|
||||
atomically $ writeTQueue upd ()
|
||||
|
||||
void $ ContT $ bracket notif cancel
|
||||
|
||||
lift $ initRepo entry
|
||||
|
||||
lift $ syncRepo entry
|
||||
|
||||
|
||||
fix \next -> do
|
||||
|
||||
void $ liftIO $ race (pause @'Seconds 60) (atomically (peekTQueue upd))
|
||||
pause @'Seconds 5
|
||||
liftIO $ atomically $ flushTQueue upd
|
||||
|
||||
rr' <- liftIO $ race (pause @'Seconds 1) do
|
||||
callService @RpcRefLogGet refLogRPC rk
|
||||
<&> fromRight Nothing
|
||||
|
||||
rr <- either (const $ pause @'Seconds 1 >> warn "rpc call timeout" >> next) pure rr'
|
||||
|
||||
debug $ "REFLOG VALUE:" <+> pretty rr
|
||||
|
||||
r0 <- readTVarIO tv
|
||||
|
||||
unless ( rr == r0 ) do
|
||||
debug $ "Syncronize repo!" <+> pretty (repoPath entry)
|
||||
fix \again -> do
|
||||
lift (syncRepo entry) >>= \case
|
||||
Left{} -> do
|
||||
debug $ "Failed to update:" <+> pretty (repoPath entry)
|
||||
pause @'Seconds 5
|
||||
again
|
||||
|
||||
Right{} -> do
|
||||
atomically $ writeTVar tv rr
|
||||
|
||||
next
|
||||
|
||||
void $ waitAnyCatchCancel (http : r)
|
||||
|
||||
data SyncError = SyncError
|
||||
|
||||
syncRepo :: (MonadUnliftIO m, MonadThrow m) => RepoEntry -> m (Either SyncError ())
|
||||
syncRepo (RepoEntry{..}) = runExceptT do
|
||||
|
||||
-- let cfg = shell [qc|git fetch origin && git remote update origin|] & setWorkingDir repoPath
|
||||
let cfg = shell [qc|git remote update origin && git remote prune origin|] & setWorkingDir repoPath
|
||||
code <- runProcess cfg
|
||||
|
||||
case code of
|
||||
ExitFailure{} -> do
|
||||
err $ "Unable to sync repo" <+> pretty repoPath
|
||||
throwError SyncError
|
||||
|
||||
_ -> debug $ "synced" <+> pretty repoPath
|
||||
|
||||
|
||||
let readLocalBranches = shell [qc|git for-each-ref refs/heads|]
|
||||
& setWorkingDir repoPath
|
||||
|
||||
let readBranches = shell [qc|git ls-remote origin|]
|
||||
& setWorkingDir repoPath
|
||||
|
||||
(_, o, _) <- readProcess readBranches
|
||||
|
||||
let txt = TE.decodeUtf8 (LBS.toStrict o)
|
||||
|
||||
let ls = Text.lines txt & fmap Text.words
|
||||
|
||||
let refs = [ (b,a) | [a,b] <- ls ]
|
||||
|
||||
-- TODO: remove-only-vanished-refs
|
||||
unless (null refs) do
|
||||
|
||||
(_, o, _) <- readProcess readLocalBranches
|
||||
let out = TE.decodeUtf8 (LBS.toStrict o)
|
||||
& Text.lines
|
||||
& fmap Text.words
|
||||
|
||||
let refs = [ r | [_,_,r] <- out ]
|
||||
forM_ refs $ \r -> do
|
||||
-- debug $ "REMOVING REF" <+> pretty r
|
||||
let cmd = shell [qc|git update-ref -d {r}|] & setWorkingDir repoPath
|
||||
void $ runProcess cmd
|
||||
|
||||
forM_ refs $ \(ref, val) -> do
|
||||
-- debug $ "SET REFERENCE" <+> pretty ref <+> pretty val
|
||||
let updateBranch = shell [qc|git update-ref {ref} {val}|]
|
||||
& setWorkingDir repoPath
|
||||
& setStdout closed
|
||||
& setStderr closed
|
||||
|
||||
void $ readProcess updateBranch
|
||||
|
||||
void $ runProcess (shell "git update-server-info" & setWorkingDir repoPath)
|
||||
|
||||
-- let gc = shell [qc|git gc|] & setWorkingDir repoPath
|
||||
-- void $ runProcess gc
|
||||
|
||||
regenConfig :: MonadUnliftIO m => RepoEntry -> ReposyncM m ()
|
||||
regenConfig RepoEntry{..} = do
|
||||
|
||||
let hbs2conf = repoPath </> ".hbs2/config"
|
||||
rpc <- asks (view rpcSoname)
|
||||
|
||||
let config = ";; generated by hbs2-reposync" <> line
|
||||
<> "rpc" <+> "unix" <+> viaShow rpc <> line
|
||||
<> line
|
||||
<> vcat (fmap (("decrypt"<+>) . dquotes.pretty) repoKeys)
|
||||
|
||||
liftIO $ writeFile hbs2conf (show config)
|
||||
|
||||
initRepo :: (MonadUnliftIO m, MonadThrow m) => RepoEntry -> ReposyncM m ()
|
||||
initRepo e@(RepoEntry{..}) = do
|
||||
debug $ "initRepo" <+> pretty repoPath
|
||||
|
||||
let gitDir = repoPath
|
||||
gitHere <- liftIO $ doesDirectoryExist gitDir
|
||||
|
||||
liftIO $ createDirectoryIfMissing True gitDir
|
||||
debug $ "create dir" <+> pretty gitDir
|
||||
|
||||
let hbs2 = gitDir </> ".hbs2"
|
||||
liftIO $ createDirectoryIfMissing True hbs2
|
||||
|
||||
regenConfig e
|
||||
|
||||
unless gitHere do
|
||||
|
||||
let cfg = shell [qc|git init --bare && git remote add origin hbs2://{pretty repoRef}|]
|
||||
& setWorkingDir repoPath
|
||||
|
||||
code <- runProcess cfg
|
||||
|
||||
case code of
|
||||
ExitFailure{} -> do
|
||||
err $ "Unable to init git repository:" <+> pretty gitDir
|
||||
throwM $ RepoInitException gitDir
|
||||
|
||||
_ -> pure ()
|
||||
|
||||
|
||||
detectRPC :: (MonadUnliftIO m) => m (Maybe FilePath)
|
||||
detectRPC = do
|
||||
|
||||
(_, o, _) <- readProcess (shell [qc|hbs2-peer poke|])
|
||||
let answ = parseTop (LBS.unpack o) & fromRight mempty
|
||||
|
||||
pure (headMay [ Text.unpack r | ListVal (Key "rpc:" [LitStrVal r]) <- answ ])
|
||||
|
||||
withApp :: forall a m . MonadUnliftIO m
|
||||
=> Maybe FilePath
|
||||
-> ReposyncM m a
|
||||
-> m ()
|
||||
|
||||
withApp cfg m = do
|
||||
|
||||
setLogging @DEBUG debugPrefix
|
||||
setLogging @INFO defLog
|
||||
setLogging @ERROR errorPrefix
|
||||
setLogging @WARN warnPrefix
|
||||
setLogging @NOTICE noticePrefix
|
||||
|
||||
-- lrpc =
|
||||
|
||||
forever $ handleAny cleanup $ do
|
||||
|
||||
soname <- detectRPC `orDie` "RPC not found"
|
||||
|
||||
let o = [MUWatchdog 20, MUDontRetry]
|
||||
|
||||
client <- race ( pause @'Seconds 1) (newMessagingUnixOpts o False 1.0 soname)
|
||||
`orDie` "hbs2-peer rpc timeout!"
|
||||
|
||||
clientN <- newMessagingUnixOpts o False 1.0 soname
|
||||
|
||||
rpc <- makeServiceCaller (fromString soname)
|
||||
|
||||
messaging <- async $ runMessagingUnix client
|
||||
|
||||
mnotify <- async $ runMessagingUnix clientN
|
||||
|
||||
sink <- newNotifySink
|
||||
|
||||
wNotify <- liftIO $ async $ flip runReaderT clientN $ do
|
||||
debug "notify restarted!"
|
||||
runNotifyWorkerClient sink
|
||||
|
||||
nProto <- liftIO $ async $ flip runReaderT clientN $ do
|
||||
runProto @UNIX
|
||||
[ makeResponse (makeNotifyClient @(RefLogEvents L4Proto) sink)
|
||||
]
|
||||
|
||||
let endpoints = [ Endpoint @UNIX rpc
|
||||
]
|
||||
|
||||
c1 <- async $ liftIO $ runReaderT (runServiceClientMulti endpoints) client
|
||||
|
||||
state <- newState soname rpc sink
|
||||
|
||||
r <- async $ void $ runReaderT (unReposyncM $ withConfig cfg m) state
|
||||
|
||||
void $ waitAnyCatchCancel [c1, messaging, mnotify, nProto, wNotify, r]
|
||||
|
||||
notice "exiting"
|
||||
|
||||
setLoggingOff @DEBUG
|
||||
setLoggingOff @INFO
|
||||
setLoggingOff @ERROR
|
||||
setLoggingOff @WARN
|
||||
setLoggingOff @NOTICE
|
||||
|
||||
|
||||
where
|
||||
cleanup e = do
|
||||
err (viaShow e)
|
||||
warn "Something bad happened. Retrying..."
|
||||
pause @'Seconds 2.5
|
||||
|
||||
main :: IO ()
|
||||
main = runMe . customExecParser (prefs showHelpOnError) $
|
||||
info (helper <*> ((,) <$> opts <*> parser))
|
||||
( fullDesc
|
||||
<> header "hbs2-reposync"
|
||||
<> progDesc "syncronizes hbs2-git repositories"
|
||||
)
|
||||
where
|
||||
-- parser :: Parser (IO ())
|
||||
parser = hsubparser ( command "run" (info pRun (progDesc "run syncronization"))
|
||||
)
|
||||
|
||||
runMe x = do
|
||||
(o, run) <- x
|
||||
withApp o run
|
||||
|
||||
opts = optional $ strOption (short 'c' <> long "config")
|
||||
|
||||
pRun = do
|
||||
pure runSync
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
|
||||
rpc unix "/tmp/hbs2-rpc.socket"
|
||||
|
||||
; http-port 4017
|
||||
|
||||
; root "/home/dmz/.local/share/hbs2-reposync/repo"
|
||||
|
||||
;; single reflog
|
||||
|
||||
[ reflog "BTThPdHKF8XnEq4m6wzbKHKA6geLFK4ydYhBXAqBdHSP"
|
||||
;; options may go here if any
|
||||
]
|
||||
|
||||
[ reflog "JCVvyFfj1C21QfFkcjrFN6CoarykfAf6jLFpCNNKjP7E"
|
||||
(decrypt "/home/dmz/w/hbs2/owner.key")
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
# Revision history for hbs2-git
|
||||
|
||||
## 0.1.0.0 -- YYYY-mm-dd
|
||||
|
||||
* First version. Released on an unsuspecting world.
|
|
@ -1,30 +0,0 @@
|
|||
Copyright (c) 2023, Dmitry Zuikov
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials provided
|
||||
with the distribution.
|
||||
|
||||
* Neither the name of Dmitry Zuikov nor the names of other
|
||||
contributors may be used to endorse or promote products derived
|
||||
from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
@ -1,18 +0,0 @@
|
|||
rpc unix "/tmp/hbs2-rpc.socket"
|
||||
|
||||
keyring "/home/dmz/dmz-data/hbs2/HBcSZnjpEcA88S19S5QwC5N4yyKZY4SvAuBWqHQhK6wV.key"
|
||||
|
||||
keyring "/home/dmz/w/hbs2/test1.key"
|
||||
keyring "/home/dmz/w/hbs2/test2.key"
|
||||
keyring "/home/dmz/w/hbs2/test3.key"
|
||||
keyring "/home/dmz/w/hbs2/test4.key"
|
||||
keyring "/home/dmz/w/hbs2/test5.key"
|
||||
|
||||
[ encrypted "EDRuSaFmWbCnyUNtFbgCtqfiCrYPJvnY9pZB81AbSTbr"
|
||||
(ttl 86400)
|
||||
(owner "21MU19xcRbhhPakPJCG6Jyrf7Xv6JMCyMDEfCk16hR5G")
|
||||
(member "21MU19xcRbhhPakPJCG6Jyrf7Xv6JMCyMDEfCk16hR5G")
|
||||
(member "GcTjPEDSTCKNKnwPZWBjudeTqSie2fvYfsoSAzUKTRZ5")
|
||||
]
|
||||
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
rpc unix "/tmp/hbs2-rpc.socket"
|
||||
|
||||
branch "master"
|
||||
branch "hbs2-git"
|
||||
|
||||
keyring "/home/dmz/dmz-data/hbs2/BTThPdHKF8XnEq4m6wzbKHKA6geLFK4ydYhBXAqBdHSP.key"
|
||||
keyring "/home/dmz/dmz-data/hbs2/HBcSZnjpEcA88S19S5QwC5N4yyKZY4SvAuBWqHQhK6wV.key"
|
||||
|
||||
keyring "/home/dmz/w/hbs2/k5.key"
|
||||
|
||||
;;keyring "/home/dmz/w/hbs2/test1.key"
|
||||
;;keyring "/home/dmz/w/hbs2/test2.key"
|
||||
;;keyring "/home/dmz/w/hbs2/test6.key"
|
||||
;; keyring "/home/dmz/w/hbs2/test3.key"
|
||||
|
||||
decrypt "/home/dmz/w/hbs2/au11.key"
|
||||
decrypt "/home/dmz/w/hbs2/owner.key"
|
||||
decrypt "/home/dmz/w/hbs2/k5.key"
|
||||
|
||||
[ encrypted "HFKuPTyaQLLmfgfVveu5GA4spt4c6oQBMUo1aeQ4abXG"
|
||||
(ttl 86400)
|
||||
(owner "21MU19xcRbhhPakPJCG6Jyrf7Xv6JMCyMDEfCk16hR5G")
|
||||
(member "H9miZgHYg84wZM8Hu93t7iLHcKnZytAEgcB26LGbLTz4")
|
||||
(member "2jsaezeu8iCRYBqMVBauCxnkHXvP3CkEFLeVxE8bRfvH")
|
||||
(member "FNGD1oNh9AVXw1v7ZFpC5V2P2GGYRoUwnP6qwTw9JGpn")
|
||||
(member "J2FWG3uib7TpZsu1k8sz8cekC3VH1ggNBhZKJxtUce4Q")
|
||||
(member "E9WGzRzmD5G5SHbz9u7n3WKCz1eaVNPvT5f1NEKUQ6FU")
|
||||
(keyring "/home/dmz/w/hbs2/owner.key")
|
||||
]
|
||||
|
|
@ -0,0 +1,111 @@
|
|||
{-# LANGUAGE OverloadedStrings #-}
|
||||
{-# LANGUAGE UndecidableInstances #-}
|
||||
module Main where
|
||||
|
||||
import HBS2.Git.Client.Prelude hiding (info)
|
||||
import HBS2.Git.Client.App hiding (_progress, _storage, _peerAPI, _lwwAPI, _refLogAPI)
|
||||
import HBS2.Git.Client.Progress
|
||||
import HBS2.Git.Client.Import
|
||||
import HBS2.Git.Client.RefLog
|
||||
import HBS2.Peer.CLI.Detect
|
||||
|
||||
import Options.Applicative
|
||||
import Data.Semigroup ((<>))
|
||||
|
||||
main :: IO ()
|
||||
main = do
|
||||
let parser = subscribe
|
||||
<$> optional (strOption
|
||||
( long "socket"
|
||||
<> short 's'
|
||||
<> metavar "SOCKET"
|
||||
<> help "Socket file path"))
|
||||
<*> argument pLww (metavar "LWWREF")
|
||||
join $ execParser (info (parser <**> helper)
|
||||
( fullDesc
|
||||
<> progDesc "Parse command line arguments"
|
||||
<> header "Command line arguments parsing example"))
|
||||
|
||||
|
||||
where
|
||||
pLww :: ReadM (LWWRefKey HBS2Basic)
|
||||
pLww = maybeReader fromStringMay
|
||||
|
||||
|
||||
data MyStuff =
|
||||
MyStuff
|
||||
{ _peerAPI :: ServiceCaller PeerAPI UNIX
|
||||
, _lwwAPI :: ServiceCaller LWWRefAPI UNIX
|
||||
, _refLogAPI :: ServiceCaller RefLogAPI UNIX
|
||||
, _storage :: AnyStorage
|
||||
, _progress :: AnyProgress
|
||||
}
|
||||
|
||||
newtype MyApp m a = MyApp { fromMyApp :: ReaderT MyStuff m a }
|
||||
deriving newtype ( Functor
|
||||
, Applicative
|
||||
, Monad
|
||||
, MonadIO
|
||||
, MonadUnliftIO
|
||||
, MonadThrow
|
||||
, MonadReader MyStuff
|
||||
)
|
||||
|
||||
instance Monad m => HasProgressIndicator (MyApp m) where
|
||||
getProgressIndicator = asks _progress
|
||||
|
||||
instance Monad m => HasStorage (MyApp m) where
|
||||
getStorage = asks _storage
|
||||
|
||||
instance Monad m => HasAPI PeerAPI UNIX (MyApp m) where
|
||||
getAPI = asks _peerAPI
|
||||
|
||||
instance Monad m => HasAPI LWWRefAPI UNIX (MyApp m) where
|
||||
getAPI = asks _lwwAPI
|
||||
|
||||
instance Monad m => HasAPI RefLogAPI UNIX (MyApp m) where
|
||||
getAPI = asks _refLogAPI
|
||||
|
||||
subscribe :: forall m . MonadUnliftIO m => Maybe String -> LWWRefKey HBS2Basic -> m ()
|
||||
subscribe soname' ref = do
|
||||
|
||||
soname <- maybe1 soname' detectRPC (pure.Just) `orDie` "can't locate rpc"
|
||||
|
||||
flip runContT pure do
|
||||
|
||||
client <- lift $ race (pause @'Seconds 1) (newMessagingUnix False 1.0 soname)
|
||||
>>= orThrowUser ("can't connect to" <+> pretty soname)
|
||||
|
||||
q <- lift newProgressQ
|
||||
let ip = AnyProgress q
|
||||
|
||||
void $ ContT $ withAsync $ runMessagingUnix client
|
||||
void $ ContT $ withAsync $ drawProgress q
|
||||
|
||||
peerAPI <- makeServiceCaller @PeerAPI (fromString soname)
|
||||
refLogAPI <- makeServiceCaller @RefLogAPI (fromString soname)
|
||||
storageAPI <- makeServiceCaller @StorageAPI (fromString soname)
|
||||
lwwAPI <- makeServiceCaller @LWWRefAPI (fromString soname)
|
||||
|
||||
let sto = AnyStorage (StorageClient storageAPI)
|
||||
|
||||
let endpoints = [ Endpoint @UNIX peerAPI
|
||||
, Endpoint @UNIX refLogAPI
|
||||
, Endpoint @UNIX lwwAPI
|
||||
, Endpoint @UNIX storageAPI
|
||||
]
|
||||
|
||||
void $ ContT $ withAsync $ liftIO $ runReaderT (runServiceClientMulti endpoints) client
|
||||
|
||||
let app = MyStuff peerAPI lwwAPI refLogAPI sto ip
|
||||
|
||||
lift $ flip runReaderT app $ fromMyApp do
|
||||
merelySubscribeRepo ref
|
||||
|
||||
onProgress ip ImportAllDone
|
||||
|
||||
hFlush stdout
|
||||
hFlush stderr
|
||||
|
||||
pure ()
|
||||
|
|
@ -1,267 +0,0 @@
|
|||
module Main where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.Data.Types.Refs
|
||||
import HBS2.Base58
|
||||
import HBS2.OrDie
|
||||
import HBS2.Git.Types
|
||||
|
||||
import HBS2.System.Logger.Simple
|
||||
|
||||
import HBS2Git.App
|
||||
import HBS2Git.State
|
||||
import HBS2Git.Import
|
||||
import HBS2Git.Evolve
|
||||
import HBS2.Git.Local.CLI
|
||||
|
||||
import HBS2Git.Export (runExport)
|
||||
|
||||
import HBS2Git.Config as Config
|
||||
import GitRemoteTypes
|
||||
import GitRemotePush
|
||||
|
||||
|
||||
import Control.Concurrent.STM
|
||||
import Control.Monad.Reader
|
||||
import Data.Attoparsec.Text hiding (try)
|
||||
import Data.Attoparsec.Text qualified as Atto
|
||||
import Data.ByteString.Char8 qualified as BS
|
||||
import Data.ByteString.Lazy.Char8 qualified as LBS
|
||||
import Data.Foldable
|
||||
import Data.Functor
|
||||
import Data.Function ((&))
|
||||
import Data.HashMap.Strict qualified as HashMap
|
||||
import Data.Maybe
|
||||
import Data.Text qualified as Text
|
||||
import Data.List qualified as List
|
||||
import System.Environment
|
||||
import System.Posix.Signals
|
||||
import Text.InterpolatedString.Perl6 (qc)
|
||||
import UnliftIO.IO as UIO
|
||||
import Control.Monad.Catch
|
||||
import Control.Monad.Trans.Resource
|
||||
import Lens.Micro.Platform
|
||||
|
||||
|
||||
send :: MonadIO m => BS.ByteString -> m ()
|
||||
send = liftIO . BS.hPutStr stdout
|
||||
|
||||
sendLn :: MonadIO m => BS.ByteString -> m ()
|
||||
sendLn s = do
|
||||
trace $ "sendLn" <+> pretty (show s)
|
||||
liftIO $ BS.hPutStrLn stdout s
|
||||
|
||||
sendEol :: MonadIO m => m ()
|
||||
sendEol = liftIO $ BS.hPutStrLn stdout "" >> hFlush stdout
|
||||
|
||||
receive :: MonadIO m => m BS.ByteString
|
||||
receive = liftIO $ BS.hGetLine stdin
|
||||
|
||||
done :: MonadIO m => m Bool
|
||||
done = UIO.hIsEOF stdin
|
||||
|
||||
parseRepoURL :: String -> Maybe HashRef
|
||||
parseRepoURL url' = either (const Nothing) Just (parseOnly p url)
|
||||
where
|
||||
url = Text.pack url'
|
||||
p = do
|
||||
_ <- string "hbs2://"
|
||||
topic' <- Atto.manyTill' anyChar endOfInput
|
||||
let topic = BS.unpack <$> fromBase58 (BS.pack topic')
|
||||
maybe (fail "invalid url") (pure . fromString) topic
|
||||
|
||||
|
||||
capabilities :: BS.ByteString
|
||||
capabilities = BS.unlines ["push","fetch"]
|
||||
|
||||
|
||||
getGlobalOptionFromURL :: HasGlobalOptions m => [String] -> m ()
|
||||
getGlobalOptionFromURL args = do
|
||||
|
||||
case args of
|
||||
[_, ss] -> do
|
||||
let (_, suff) = Text.breakOn "?" (Text.pack ss)
|
||||
& over _2 (Text.dropWhile (== '?'))
|
||||
& over _2 (Text.splitOn "&")
|
||||
& over _2 (fmap (over _2 (Text.dropWhile (=='=')) . Text.break (== '=')))
|
||||
& over _2 (filter (\(k,_) -> k /= ""))
|
||||
|
||||
forM_ suff $ \(k,v) -> do
|
||||
addGlobalOption (Text.unpack k) (Text.unpack v)
|
||||
|
||||
_ -> pure ()
|
||||
|
||||
loop :: forall m . ( MonadIO m
|
||||
, MonadCatch m
|
||||
, MonadUnliftIO m
|
||||
, MonadMask m
|
||||
, HasProgress m
|
||||
, HasConf m
|
||||
, HasStorage m
|
||||
, HasRPC m
|
||||
, HasRefCredentials m
|
||||
, HasEncryptionKeys m
|
||||
, HasGlobalOptions m
|
||||
) => [String] -> m ()
|
||||
loop args = do
|
||||
|
||||
trace $ "args:" <+> pretty args
|
||||
|
||||
ref <- case args of
|
||||
[_, ss] -> do
|
||||
let (s, _) = Text.breakOn "?" (Text.pack ss)
|
||||
|
||||
let r = Text.stripPrefix "hbs2://" s <&> fromString @RepoRef . Text.unpack
|
||||
|
||||
pure r `orDie` [qc|bad reference {args}||]
|
||||
|
||||
_ -> do
|
||||
die [qc|bad reference: {args}|]
|
||||
|
||||
trace $ "ref:" <+> pretty ref
|
||||
|
||||
dbPath <- makeDbPath ref
|
||||
|
||||
trace $ "dbPath:" <+> pretty dbPath
|
||||
|
||||
db <- dbEnv dbPath
|
||||
|
||||
-- TODO: hbs2-peer-fetch-reference-and-wait
|
||||
|
||||
checkRef <- readRef ref <&> isJust
|
||||
|
||||
let getHeads upd = do
|
||||
when upd do importRefLogNew False ref
|
||||
refsNew <- withDB db stateGetActualRefs
|
||||
let possibleHead = listToMaybe $ List.take 1 $ List.sortOn guessHead (fmap fst refsNew)
|
||||
|
||||
let hd = refsNew & LBS.pack . show
|
||||
. pretty
|
||||
. AsGitRefsFile
|
||||
. RepoHead possibleHead
|
||||
. HashMap.fromList
|
||||
pure hd
|
||||
|
||||
|
||||
hd <- getHeads True
|
||||
|
||||
refs <- withDB db stateGetActualRefs
|
||||
|
||||
let heads = [ h | h@GitHash{} <- universeBi refs ]
|
||||
|
||||
missed <- try (mapM (gitReadObject Nothing) heads) <&> either (\(_::SomeException) -> True) (const False)
|
||||
|
||||
let force = missed || List.null heads
|
||||
|
||||
when force do
|
||||
-- sync state first
|
||||
traceTime "TIMING: importRefLogNew" $ importRefLogNew True ref
|
||||
|
||||
batch <- liftIO $ newTVarIO False
|
||||
|
||||
fix \next -> do
|
||||
|
||||
eof <- done
|
||||
|
||||
when eof do
|
||||
exitFailure
|
||||
|
||||
s <- receive
|
||||
|
||||
let str = BS.unwords (BS.words s)
|
||||
let cmd = BS.words str
|
||||
|
||||
isBatch <- liftIO $ readTVarIO batch
|
||||
|
||||
case cmd of
|
||||
[] -> do
|
||||
liftIO $ atomically $ writeTVar batch False
|
||||
sendEol
|
||||
when isBatch next
|
||||
-- unless isBatch do
|
||||
|
||||
["capabilities"] -> do
|
||||
trace $ "send capabilities" <+> pretty (BS.unpack capabilities)
|
||||
send capabilities >> sendEol
|
||||
next
|
||||
|
||||
["list"] -> do
|
||||
for_ (LBS.lines hd) (sendLn . LBS.toStrict)
|
||||
sendEol
|
||||
next
|
||||
|
||||
["list","for-push"] -> do
|
||||
for_ (LBS.lines hd) (sendLn . LBS.toStrict)
|
||||
sendEol
|
||||
next
|
||||
|
||||
["fetch", sha1, x] -> do
|
||||
trace $ "fetch" <+> pretty (BS.unpack sha1) <+> pretty (BS.unpack x)
|
||||
liftIO $ atomically $ writeTVar batch True
|
||||
-- sendEol
|
||||
next
|
||||
|
||||
["push", rr] -> do
|
||||
let bra = BS.split ':' rr
|
||||
let pu = fmap (fromString' . BS.unpack) bra
|
||||
liftIO $ atomically $ writeTVar batch True
|
||||
-- debug $ "FUCKING PUSH" <> viaShow rr <+> pretty pu
|
||||
-- shutUp
|
||||
pushed <- push ref pu
|
||||
case pushed of
|
||||
Nothing -> hPrint stderr "oopsie!" >> sendEol >> shutUp
|
||||
Just re -> sendLn [qc|ok {pretty re}|]
|
||||
next
|
||||
|
||||
other -> die $ show other
|
||||
|
||||
|
||||
shutUp
|
||||
|
||||
where
|
||||
fromString' "" = Nothing
|
||||
fromString' x = Just $ fromString x
|
||||
|
||||
main :: IO ()
|
||||
main = do
|
||||
|
||||
hSetBuffering stdin NoBuffering
|
||||
hSetBuffering stdout LineBuffering
|
||||
|
||||
doTrace <- lookupEnv "HBS2TRACE" <&> isJust
|
||||
|
||||
when doTrace do
|
||||
setLogging @DEBUG debugPrefix
|
||||
setLogging @TRACE tracePrefix
|
||||
|
||||
setLogging @NOTICE noticePrefix
|
||||
setLogging @ERROR errorPrefix
|
||||
setLogging @WARN warnPrefix
|
||||
setLogging @INFO infoPrefix
|
||||
|
||||
args <- getArgs
|
||||
|
||||
void $ installHandler sigPIPE Ignore Nothing
|
||||
|
||||
evolve
|
||||
|
||||
(_, syn) <- Config.configInit
|
||||
|
||||
runWithRPC $ \rpc -> do
|
||||
env <- RemoteEnv <$> liftIO (newTVarIO mempty)
|
||||
<*> liftIO (newTVarIO mempty)
|
||||
<*> liftIO (newTVarIO mempty)
|
||||
<*> pure rpc
|
||||
|
||||
runRemoteM env do
|
||||
runWithConfig syn $ do
|
||||
getGlobalOptionFromURL args
|
||||
loadCredentials mempty
|
||||
loadKeys
|
||||
loop args
|
||||
|
||||
shutUp
|
||||
|
||||
hPutStrLn stdout ""
|
||||
hPutStrLn stderr ""
|
||||
|
|
@ -1,106 +0,0 @@
|
|||
{-# Language AllowAmbiguousTypes #-}
|
||||
module GitRemotePush where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.Data.Types.Refs
|
||||
import HBS2.OrDie
|
||||
import HBS2.System.Logger.Simple
|
||||
import HBS2.Net.Auth.Credentials hiding (getCredentials)
|
||||
|
||||
import HBS2.Git.Local
|
||||
import HBS2.Git.Local.CLI
|
||||
|
||||
import HBS2Git.Config as Config
|
||||
import HBS2Git.Types
|
||||
import HBS2Git.State
|
||||
import HBS2Git.App
|
||||
import HBS2Git.Export (exportRefOnly,exportRefDeleted)
|
||||
import HBS2Git.Import (importRefLogNew)
|
||||
|
||||
import GitRemoteTypes
|
||||
|
||||
import Control.Monad.Reader
|
||||
import Data.Functor
|
||||
import Data.Set (Set)
|
||||
import Text.InterpolatedString.Perl6 (qc)
|
||||
import Control.Monad.Catch
|
||||
import Control.Monad.Trans.Resource
|
||||
|
||||
newtype RunWithConfig m a =
|
||||
WithConfig { fromWithConf :: ReaderT [Syntax C] m a }
|
||||
deriving newtype ( Applicative
|
||||
, Functor
|
||||
, Monad
|
||||
, MonadIO
|
||||
, MonadReader [Syntax C]
|
||||
, MonadTrans
|
||||
, MonadThrow
|
||||
, MonadCatch
|
||||
, MonadMask
|
||||
, MonadUnliftIO
|
||||
)
|
||||
|
||||
|
||||
runWithConfig :: MonadIO m => [Syntax C] -> RunWithConfig m a -> m a
|
||||
runWithConfig conf m = runReaderT (fromWithConf m) conf
|
||||
|
||||
|
||||
instance (Monad m, HasGlobalOptions m) => HasGlobalOptions (RunWithConfig m) where
|
||||
addGlobalOption k v = lift $ addGlobalOption k v
|
||||
getGlobalOption k = lift $ getGlobalOption k
|
||||
|
||||
instance (Monad m, HasStorage m) => HasStorage (RunWithConfig m) where
|
||||
getStorage = lift getStorage
|
||||
|
||||
instance (Monad m, HasRPC m) => HasRPC (RunWithConfig m) where
|
||||
getRPC = lift getRPC
|
||||
|
||||
instance MonadIO m => HasConf (RunWithConfig (GitRemoteApp m)) where
|
||||
getConf = ask
|
||||
|
||||
instance MonadIO m => HasRefCredentials (RunWithConfig (GitRemoteApp m)) where
|
||||
getCredentials = lift . getCredentials
|
||||
setCredentials r c = lift $ setCredentials r c
|
||||
|
||||
|
||||
instance MonadIO m => HasEncryptionKeys (RunWithConfig (GitRemoteApp m)) where
|
||||
addEncryptionKey = lift . addEncryptionKey
|
||||
findEncryptionKey = lift . findEncryptionKey
|
||||
enumEncryptionKeys = lift enumEncryptionKeys
|
||||
|
||||
push :: forall m . ( MonadIO m
|
||||
, MonadCatch m
|
||||
, HasConf m
|
||||
, HasRefCredentials m
|
||||
, HasEncryptionKeys m
|
||||
, HasGlobalOptions m
|
||||
, HasStorage m
|
||||
, HasRPC m
|
||||
, MonadUnliftIO m
|
||||
, MonadMask m
|
||||
)
|
||||
|
||||
=> RepoRef -> [Maybe GitRef] -> m (Maybe GitRef)
|
||||
|
||||
|
||||
push remote what@[Just bFrom , Just br] = do
|
||||
|
||||
_ <- cfgValue @ConfBranch @(Set GitRef) <&> transformBi normalizeRef
|
||||
trace $ "PUSH PARAMS" <+> pretty what
|
||||
gh <- gitGetHash (normalizeRef bFrom) `orDie` [qc|can't read hash for ref {pretty br}|]
|
||||
_ <- traceTime "TIME: exportRefOnly" $ exportRefOnly () remote (Just bFrom) br gh
|
||||
importRefLogNew False remote
|
||||
pure (Just br)
|
||||
|
||||
push remote [Nothing, Just br] = do
|
||||
|
||||
_ <- cfgValue @ConfBranch @(Set GitRef) <&> transformBi normalizeRef
|
||||
trace $ "deleting remote reference" <+> pretty br
|
||||
exportRefDeleted () remote br
|
||||
importRefLogNew False remote
|
||||
pure (Just br)
|
||||
|
||||
push r w = do
|
||||
warn $ "ignoring weird push" <+> pretty w <+> pretty r
|
||||
pure Nothing
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
{-# Language TemplateHaskell #-}
|
||||
{-# Language UndecidableInstances #-}
|
||||
module GitRemoteTypes where
|
||||
|
||||
import HBS2.Prelude
|
||||
import HBS2.OrDie
|
||||
import HBS2.Net.Proto
|
||||
import HBS2.Net.Auth.Credentials
|
||||
import HBS2.Peer.RPC.Client.StorageClient
|
||||
|
||||
import HBS2Git.Types
|
||||
import Control.Monad.Reader
|
||||
import Lens.Micro.Platform
|
||||
import Data.HashMap.Strict qualified as HashMap
|
||||
import Data.HashMap.Strict (HashMap)
|
||||
import Control.Concurrent.STM
|
||||
import Control.Monad.Catch
|
||||
import Control.Monad.Trans.Resource
|
||||
|
||||
data RemoteEnv =
|
||||
RemoteEnv
|
||||
{ _reCreds :: TVar (HashMap RepoRef (PeerCredentials Schema))
|
||||
, _reKeys :: TVar (HashMap (PubKey 'Encrypt Schema) (PrivKey 'Encrypt Schema))
|
||||
, _reOpts :: TVar (HashMap String String)
|
||||
, _reRpc :: RPCEndpoints
|
||||
}
|
||||
|
||||
makeLenses 'RemoteEnv
|
||||
|
||||
newtype GitRemoteApp m a =
|
||||
GitRemoteApp { fromRemoteApp :: ReaderT RemoteEnv m a }
|
||||
deriving newtype ( Applicative
|
||||
, Functor
|
||||
, Monad
|
||||
, MonadIO
|
||||
, MonadReader RemoteEnv
|
||||
, MonadThrow
|
||||
, MonadCatch
|
||||
, MonadUnliftIO
|
||||
, MonadMask
|
||||
, MonadTrans
|
||||
)
|
||||
|
||||
instance Monad m => HasStorage (GitRemoteApp m) where
|
||||
getStorage = asks (rpcStorage . view reRpc) <&> AnyStorage . StorageClient
|
||||
|
||||
instance Monad m => HasRPC (GitRemoteApp m) where
|
||||
getRPC = asks (view reRpc)
|
||||
|
||||
runRemoteM :: MonadIO m => RemoteEnv -> GitRemoteApp m a -> m a
|
||||
runRemoteM env m = runReaderT (fromRemoteApp m) env
|
||||
|
||||
|
||||
instance MonadIO m => HasGlobalOptions (GitRemoteApp m) where
|
||||
addGlobalOption k v =
|
||||
asks (view reOpts ) >>= \t -> liftIO $ atomically $
|
||||
modifyTVar' t (HashMap.insert k v)
|
||||
|
||||
getGlobalOption k = do
|
||||
hm <- asks (view reOpts) >>= liftIO . readTVarIO
|
||||
pure (HashMap.lookup k hm)
|
||||
|
||||
instance MonadIO m => HasRefCredentials (GitRemoteApp m) where
|
||||
|
||||
setCredentials ref cred = do
|
||||
asks (view reCreds) >>= \t -> liftIO $ atomically $
|
||||
modifyTVar' t (HashMap.insert ref cred)
|
||||
|
||||
getCredentials ref = do
|
||||
hm <- asks (view reCreds) >>= liftIO . readTVarIO
|
||||
pure (HashMap.lookup ref hm) `orDie` "keyring not set (3)"
|
||||
|
||||
instance MonadIO m => HasEncryptionKeys (GitRemoteApp m) where
|
||||
addEncryptionKey ke = do
|
||||
asks (view reKeys) >>= \t -> liftIO $ atomically do
|
||||
modifyTVar' t (HashMap.insert (view krPk ke) (view krSk ke))
|
||||
|
||||
findEncryptionKey puk = (asks (view reKeys) >>= \t -> liftIO $ readTVarIO t) <&> HashMap.lookup puk
|
||||
|
||||
enumEncryptionKeys = do
|
||||
them <- (asks (view reKeys) >>= \t -> liftIO $ readTVarIO t) <&> HashMap.toList
|
||||
pure $ [KeyringEntry k s Nothing | (k,s) <- them ]
|
||||
|
|
@ -1,122 +1,219 @@
|
|||
{-# Language UndecidableInstances #-}
|
||||
module Main where
|
||||
|
||||
import HBS2.Prelude
|
||||
import HBS2.OrDie
|
||||
import HBS2.Git.Client.Prelude hiding (info)
|
||||
import HBS2.Git.Client.App
|
||||
import HBS2.Git.Client.Export
|
||||
import HBS2.Git.Client.Import
|
||||
import HBS2.Git.Client.State
|
||||
|
||||
import HBS2Git.App
|
||||
import HBS2Git.Export
|
||||
import HBS2Git.Tools
|
||||
import HBS2Git.KeysCommand
|
||||
import HBS2.Version
|
||||
import HBS2.Git.Data.RefLog
|
||||
import HBS2.Git.Local.CLI qualified as Git
|
||||
import HBS2.Git.Data.Tx qualified as TX
|
||||
import HBS2.Git.Data.Tx (RepoHead(..))
|
||||
import HBS2.Git.Data.LWWBlock
|
||||
import HBS2.Git.Data.GK
|
||||
|
||||
import RunShow
|
||||
import HBS2.Storage.Operations.ByteString
|
||||
|
||||
import Options.Applicative as O
|
||||
import Control.Monad
|
||||
import Data.Aeson qualified as Aeson
|
||||
import Data.ByteString.Lazy qualified as LBS
|
||||
|
||||
import Paths_hbs2_git qualified as Pkg
|
||||
import System.Exit
|
||||
|
||||
globalOptions :: Parser [GitOption]
|
||||
globalOptions = do
|
||||
|
||||
t <- flag [] [GitTrace]
|
||||
( long "trace" <> short 't' <> help "allow trace"
|
||||
)
|
||||
|
||||
d <- flag [] [GitDebug]
|
||||
( long "debug" <> short 'd' <> help "allow debug"
|
||||
)
|
||||
|
||||
pure (t <> d)
|
||||
|
||||
commands :: GitPerks m => Parser (GitCLI m ())
|
||||
commands =
|
||||
hsubparser ( command "export" (info pExport (progDesc "export repo to hbs2-git"))
|
||||
<> command "import" (info pImport (progDesc "import repo from reflog"))
|
||||
<> command "key" (info pKey (progDesc "key management"))
|
||||
<> command "tools" (info pTools (progDesc "misc tools"))
|
||||
)
|
||||
|
||||
|
||||
pRefLogId :: ReadM RefLogId
|
||||
pRefLogId = maybeReader (fromStringMay @RefLogId)
|
||||
|
||||
|
||||
pLwwKey :: ReadM (LWWRefKey HBS2Basic)
|
||||
pLwwKey = maybeReader fromStringMay
|
||||
|
||||
pHashRef :: ReadM HashRef
|
||||
pHashRef = maybeReader (fromStringMay @HashRef)
|
||||
|
||||
pInit :: GitPerks m => Parser (GitCLI m ())
|
||||
pInit = do
|
||||
pure runDefault
|
||||
|
||||
|
||||
pExport :: GitPerks m => Parser (GitCLI m ())
|
||||
pExport = do
|
||||
|
||||
puk <- argument pLwwKey (metavar "REFLOG-KEY")
|
||||
|
||||
et <- flag ExportInc ExportNew
|
||||
( long "new" <> help "new is usable to export to a new empty reflog"
|
||||
)
|
||||
|
||||
enc <- flag' ExportPublic (long "public" <> help "create unencrypted reflog")
|
||||
<|>
|
||||
( ExportPrivate <$>
|
||||
strOption (long "encrypted" <> help "create encrypted reflog"
|
||||
<> metavar "GROUP-KEY-FILE")
|
||||
)
|
||||
|
||||
pure do
|
||||
git <- Git.gitDir >>= orThrowUser "not a git dir"
|
||||
notice (green "git dir" <+> pretty git <+> pretty (AsBase58 puk))
|
||||
|
||||
env <- ask
|
||||
|
||||
withGitEnv ( env & set gitApplyHeads False & set gitExportType et & set gitExportEnc enc) do
|
||||
unless (et == ExportNew) do
|
||||
importRepoWait puk
|
||||
|
||||
export puk mempty
|
||||
|
||||
pImport :: GitPerks m => Parser (GitCLI m ())
|
||||
pImport = do
|
||||
puk <- argument pLwwKey (metavar "LWWREF")
|
||||
|
||||
pure do
|
||||
git <- Git.gitDir >>= orThrowUser "not a git dir"
|
||||
importRepoWait puk
|
||||
|
||||
pTools :: GitPerks m => Parser (GitCLI m ())
|
||||
pTools = hsubparser ( command "dump-pack" (info pDumpPack (progDesc "dump hbs2 git pack"))
|
||||
<> command "show-ref" (info pShowRef (progDesc "show current references"))
|
||||
<> command "show-remotes" (info pShowLww (progDesc "show current remotes (hbs2 references)"))
|
||||
)
|
||||
|
||||
|
||||
data DumpOpt = DumpInfoOnly | DumpObjects | DumpPack
|
||||
|
||||
pDumpPack :: GitPerks m => Parser (GitCLI m ())
|
||||
pDumpPack = do
|
||||
what <- dumpInfoOnly <|> dumpObjects <|> dumpPack
|
||||
pure do
|
||||
co <- liftIO LBS.getContents
|
||||
|
||||
(idSize,idVer,sidx,pack) <- TX.unpackPackMay co
|
||||
& orThrowUser "can't unpack the bundle"
|
||||
|
||||
case what of
|
||||
DumpInfoOnly -> do
|
||||
liftIO $ print $ pretty "version:" <+> pretty idVer <> line
|
||||
<> "index size:" <+> pretty idSize <> line
|
||||
<> "objects:" <+> pretty (length sidx)
|
||||
DumpObjects -> do
|
||||
liftIO $ print $ vcat (fmap pretty sidx)
|
||||
|
||||
DumpPack -> do
|
||||
liftIO $ LBS.putStr pack
|
||||
|
||||
where
|
||||
dumpInfoOnly = flag DumpInfoOnly DumpInfoOnly
|
||||
( long "info-only" )
|
||||
|
||||
dumpObjects = flag DumpObjects DumpObjects
|
||||
( long "objects" )
|
||||
|
||||
dumpPack = flag DumpPack DumpPack
|
||||
( long "pack" )
|
||||
|
||||
|
||||
pShowLww :: GitPerks m => Parser (GitCLI m ())
|
||||
pShowLww = pure do
|
||||
items <- withState selectAllLww
|
||||
liftIO $ print $ vcat (fmap fmt items)
|
||||
where
|
||||
fmt (l,n,k) = fill 4 (pretty n) <+> fill 32 (pretty l) <+> fill 32 (pretty (AsBase58 k))
|
||||
|
||||
pShowRef :: GitPerks m => Parser (GitCLI m ())
|
||||
pShowRef = do
|
||||
pure do
|
||||
sto <- asks _storage
|
||||
void $ runMaybeT do
|
||||
|
||||
tx <- withState do
|
||||
selectMaxAppliedTx >>= lift . toMPlus <&> fst
|
||||
|
||||
rh <- TX.readRepoHeadFromTx sto tx >>= toMPlus
|
||||
|
||||
liftIO $ print $ vcat (fmap formatRef (_repoHeadRefs rh))
|
||||
|
||||
|
||||
pKey :: GitPerks m => Parser (GitCLI m ())
|
||||
pKey = hsubparser ( command "show" (info pKeyShow (progDesc "show current key"))
|
||||
<> command "update" (info pKeyUpdate (progDesc "update current key"))
|
||||
)
|
||||
<|> pKeyShow
|
||||
|
||||
pKeyShow :: GitPerks m => Parser (GitCLI m ())
|
||||
pKeyShow = do
|
||||
full <- flag False True (long "full" <> help "show full key info")
|
||||
pure do
|
||||
sto <- asks _storage
|
||||
void $ runMaybeT do
|
||||
|
||||
tx <- withState do
|
||||
selectMaxAppliedTx >>= lift . toMPlus <&> fst
|
||||
|
||||
rh <- TX.readRepoHeadFromTx sto tx
|
||||
>>= toMPlus
|
||||
|
||||
gkh <- toMPlus (_repoHeadGK0 rh)
|
||||
|
||||
if not full then do
|
||||
liftIO $ print $ pretty gkh
|
||||
else do
|
||||
gk <- runExceptT (readGK0 sto gkh) >>= toMPlus
|
||||
liftIO $ print $ ";; group key" <+> pretty gkh <> line <> line <> pretty gk
|
||||
|
||||
pKeyUpdate :: GitPerks m => Parser (GitCLI m ())
|
||||
pKeyUpdate = do
|
||||
rlog <- argument pRefLogId (metavar "REFLOG-KEY")
|
||||
fn <- strArgument (metavar "GROUP-KEY-FILE")
|
||||
pure do
|
||||
gk <- loadGK0FromFile fn
|
||||
`orDie` "can not load group key or invalid format"
|
||||
|
||||
sto <- asks _storage
|
||||
|
||||
gh <- writeAsMerkle sto (serialise gk) <&> HashRef
|
||||
|
||||
added <- withState $ runMaybeT do
|
||||
(tx,_) <- lift selectMaxAppliedTx >>= toMPlus
|
||||
lift do
|
||||
insertNewGK0 rlog tx gh
|
||||
commitAll
|
||||
pure gh
|
||||
|
||||
case added of
|
||||
Nothing -> liftIO $ putStrLn "not added" >> exitFailure
|
||||
Just x -> liftIO $ print $ pretty x
|
||||
|
||||
main :: IO ()
|
||||
main = join . customExecParser (prefs showHelpOnError) $
|
||||
info (helper <*> parser)
|
||||
main = do
|
||||
(o, action) <- customExecParser (prefs showHelpOnError) $
|
||||
O.info (liftA2 (,) globalOptions commands <**> helper)
|
||||
( fullDesc
|
||||
<> header "git-hbs2"
|
||||
<> progDesc "helper tool for hbs2-git"
|
||||
)
|
||||
where
|
||||
parser :: Parser (IO ())
|
||||
parser = hsubparser ( command "init" (info pInit (progDesc "init new hbs2 repo"))
|
||||
<> command "list-refs" (info pListRefs (progDesc "list refs"))
|
||||
<> command "show" (info pShow (progDesc "show various types of objects"))
|
||||
<> command "tools" (info pTools (progDesc "misc tools"))
|
||||
<> command "key" (info pKeys (progDesc "manage keys"))
|
||||
<> command "version" (info pVersion (progDesc "show program version"))
|
||||
<> header "hbs2-git"
|
||||
<> progDesc "hbs2-git"
|
||||
)
|
||||
|
||||
pVersion = pure do
|
||||
LBS.putStr $ Aeson.encode $(inlineBuildVersion Pkg.version)
|
||||
|
||||
pExport = do
|
||||
keyfile <- strArgument (metavar "KEIRING-FILE")
|
||||
pure $ runApp WithLog do
|
||||
runExport' keyfile
|
||||
|
||||
pListRefs = do
|
||||
pure $ runApp NoLog runListRefs
|
||||
|
||||
showReader s = if s == "config"
|
||||
then Just ShowConfig
|
||||
else ShowRef <$> fromStringMay s
|
||||
|
||||
pShow = do
|
||||
object <- optional $
|
||||
argument (maybeReader showReader) (metavar "object" <> help "<HASH-REF> | config")
|
||||
|
||||
pure $ runApp NoLog (runShow object)
|
||||
|
||||
pTools = hsubparser ( command "scan" (info pToolsScan (progDesc "scan reference"))
|
||||
<> command "export" (info pExport (progDesc "export repo"))
|
||||
<> command "refs" (info pToolsGetRefs (progDesc "list references"))
|
||||
|
||||
)
|
||||
|
||||
pToolsScan = do
|
||||
ref <- strArgument (metavar "HASH-REF")
|
||||
pure $ runApp WithLog (runToolsScan ref)
|
||||
|
||||
pToolsGetRefs = do
|
||||
ref <- strArgument (metavar "HASH-REF")
|
||||
pure $ runApp WithLog (runToolsGetRefs ref)
|
||||
runGitCLI o action
|
||||
|
||||
|
||||
pKeys = hsubparser ( command "list" (info pKeysList (progDesc "list keys for refs"))
|
||||
<> command "refs" (info pKeyRefsList (progDesc "list encrypted refs"))
|
||||
<> command "update" (info pKeyUpdate (progDesc "update key for the ref"))
|
||||
)
|
||||
|
||||
|
||||
pKeyUpdate = do
|
||||
ref <- strArgument (metavar "REF-KEY")
|
||||
pure $ do
|
||||
rk <- pure (fromStringMay ref) `orDie` "invalid REF-KEY"
|
||||
runApp WithLog (runKeysUpdate rk)
|
||||
|
||||
pKeyRefsList = do
|
||||
pure $ do
|
||||
runApp WithLog runKeyRefsList
|
||||
|
||||
pKeysList = do
|
||||
ref <- strArgument (metavar "REF-KEY")
|
||||
pure $ do
|
||||
rk <- pure (fromStringMay ref) `orDie` "invalid REF-KEY"
|
||||
runApp WithLog (runKeysList rk)
|
||||
|
||||
pInit = do
|
||||
opts <- pOpts
|
||||
pure do
|
||||
runInit (runInitRepo opts)
|
||||
|
||||
where
|
||||
pOpts = pInteractive
|
||||
|
||||
pInteractive = NewRepoOpts <$> optional pKeyring
|
||||
<*> pEncryption
|
||||
|
||||
|
||||
pEncryption = pEncryptionHere <|> pure Nothing
|
||||
|
||||
pEncryptionHere = do
|
||||
puk <- option pEncPk ( short 'p' <> long "encryption-pk" <> help "public key for encryption")
|
||||
fn <- strOption ( short 'e' <> long "keyring-enc" <> help "keyring for encryption" )
|
||||
pure $ Just (puk, fn)
|
||||
|
||||
|
||||
pEncPk :: ReadM (PubKey 'Encrypt (Encryption L4Proto))
|
||||
pEncPk = eitherReader $
|
||||
maybe (Left "invalid encryption public key") pure . fromStringMay
|
||||
|
||||
pKeyring = do
|
||||
strOption (short 'k' <> long "keyring" <> help "reference keyring file")
|
||||
|
||||
|
|
|
@ -1,53 +0,0 @@
|
|||
module RunShow where
|
||||
|
||||
import HBS2.Prelude
|
||||
import HBS2.Base58
|
||||
|
||||
import HBS2Git.App
|
||||
import HBS2Git.State
|
||||
import HBS2Git.Config
|
||||
import HBS2Git.Tools
|
||||
import HBS2Git.PrettyStuff
|
||||
|
||||
import Control.Monad.Catch (MonadMask)
|
||||
import Data.Foldable
|
||||
import Prettyprinter.Render.Terminal
|
||||
|
||||
data ShowObject = ShowRef RepoRef | ShowConfig
|
||||
|
||||
showRef :: (MonadIO m, MonadMask m) => RepoRef -> App m ()
|
||||
showRef h = do
|
||||
db <- makeDbPath h >>= dbEnv
|
||||
-- FIXME: re-implement-showRef
|
||||
pure ()
|
||||
-- withDB db do
|
||||
-- hd <- stateGetHead
|
||||
-- imported <- stateGetLastImported 10
|
||||
-- liftIO $ do
|
||||
-- print $ "current state for" <+> pretty (AsBase58 h)
|
||||
-- print $ "head:" <+> pretty hd
|
||||
-- print $ pretty "last operations:"
|
||||
-- for_ imported (\(t,h1,h2) -> print $ pretty t <+> pretty h1 <+> pretty h2)
|
||||
|
||||
showRefs :: (MonadIO m, MonadMask m) => App m ()
|
||||
showRefs = do
|
||||
liftIO $ putDoc $ line <> green "References:" <> section
|
||||
runListRefs
|
||||
|
||||
showConfig :: (MonadIO m, MonadMask m) => App m ()
|
||||
showConfig = liftIO do
|
||||
ConfigPathInfo{..} <- getConfigPathInfo
|
||||
cfg <- readFile configFilePath
|
||||
putDoc $ green "Config file location:" <> section <> pretty configFilePath <> section
|
||||
putDoc $ green "Config contents:" <> line <> pretty cfg
|
||||
|
||||
showSummary :: (MonadIO m, MonadMask m) => App m ()
|
||||
showSummary = do
|
||||
showRefs
|
||||
liftIO $ putDoc section
|
||||
showConfig
|
||||
|
||||
runShow :: (MonadIO m, MonadMask m) => Maybe ShowObject -> App m ()
|
||||
runShow (Just (ShowRef h)) = showRef h
|
||||
runShow (Just ShowConfig) = showConfig
|
||||
runShow Nothing = showSummary
|
|
@ -0,0 +1,215 @@
|
|||
module Main where
|
||||
|
||||
import Prelude hiding (getLine)
|
||||
|
||||
import HBS2.Git.Client.Prelude
|
||||
import HBS2.Git.Client.App
|
||||
import HBS2.Git.Client.Import
|
||||
import HBS2.Git.Client.Export
|
||||
import HBS2.Git.Client.State
|
||||
import HBS2.Git.Client.Progress
|
||||
import HBS2.Git.Client.Config
|
||||
import HBS2.Git.Data.RefLog
|
||||
import HBS2.Git.Data.Tx qualified as TX
|
||||
import HBS2.Git.Data.Tx (RepoHead(..))
|
||||
import HBS2.Git.Data.LWWBlock
|
||||
|
||||
import HBS2.System.Dir
|
||||
|
||||
import Control.Concurrent.STM qualified as STM
|
||||
import System.Posix.Signals
|
||||
import System.Environment
|
||||
import System.IO (hPutStrLn)
|
||||
import System.IO qualified as IO
|
||||
import System.Exit qualified as Exit
|
||||
|
||||
import Data.ByteString.Char8 qualified as BS8
|
||||
import Data.ByteString.Lazy.Char8 qualified as LBS8
|
||||
import Data.Attoparsec.ByteString.Char8 hiding (try)
|
||||
import Data.Attoparsec.ByteString.Char8 qualified as Atto
|
||||
import Data.Maybe
|
||||
import Data.HashMap.Strict qualified as HM
|
||||
import Data.List qualified as L
|
||||
import Text.InterpolatedString.Perl6 (qc)
|
||||
import System.Exit hiding (die)
|
||||
|
||||
{- HLINT ignore "Use isEOF" -}
|
||||
{- HLINT ignore "Use putStrLn" -}
|
||||
|
||||
done :: MonadIO m => m Bool
|
||||
done = hIsEOF stdin
|
||||
|
||||
getLine :: MonadIO m => m String
|
||||
getLine = liftIO IO.getLine
|
||||
|
||||
sendLine :: MonadIO m => String -> m ()
|
||||
sendLine = liftIO . IO.putStrLn
|
||||
|
||||
die :: (MonadIO m, Pretty a) => a -> m b
|
||||
die s = liftIO $ Exit.die (show $ pretty s)
|
||||
|
||||
parseURL :: String -> Maybe (LWWRefKey HBS2Basic)
|
||||
parseURL s = eitherToMaybe $ Atto.parseOnly p (BS8.pack s)
|
||||
where
|
||||
p = do
|
||||
void $ string "hbs21://" <|> string "hbs2://"
|
||||
|
||||
Atto.takeWhile1 (`elem` getAlphabet)
|
||||
<&> BS8.unpack
|
||||
<&> fromStringMay @(LWWRefKey HBS2Basic)
|
||||
>>= maybe (fail "invalid reflog key") pure
|
||||
|
||||
parsePush :: String -> Maybe (Maybe GitRef, GitRef)
|
||||
parsePush s = eitherToMaybe $ Atto.parseOnly p (BS8.pack s)
|
||||
where
|
||||
gitref = fromString @GitRef . BS8.unpack
|
||||
p = do
|
||||
a <- optional (Atto.takeWhile1 (/= ':')) <&> fmap gitref
|
||||
char ':'
|
||||
b <- Atto.takeWhile1 (const True) <&> gitref
|
||||
pure (a,b)
|
||||
|
||||
data S =
|
||||
Plain
|
||||
| Push
|
||||
deriving stock (Eq,Ord,Show,Enum)
|
||||
|
||||
|
||||
{- HLINT ignore "Functor law" -}
|
||||
main :: IO ()
|
||||
main = do
|
||||
hSetBuffering stdin LineBuffering
|
||||
hSetBuffering stdout LineBuffering
|
||||
|
||||
void $ installHandler sigPIPE Ignore Nothing
|
||||
|
||||
args <- getArgs
|
||||
|
||||
(remote, puk) <- case args of
|
||||
[s, u] ->
|
||||
(s,) <$> pure (parseURL u)
|
||||
`orDie` show ("invalid reflog" <+> pretty u)
|
||||
|
||||
_ -> die "bad args"
|
||||
|
||||
runGitCLI mempty $ do
|
||||
|
||||
env <- ask
|
||||
|
||||
flip runContT pure do
|
||||
|
||||
lift $ withGitEnv (env & set gitApplyHeads False) do
|
||||
|
||||
debug $ red "run" <+> pretty args
|
||||
|
||||
sto <- asks _storage
|
||||
ip <- asks _progress
|
||||
|
||||
importRepoWait puk
|
||||
`catch` (\(_ :: ImportRefLogNotFound) -> do
|
||||
onProgress ip ImportAllDone
|
||||
let url = headMay (catMaybes [ parseURL a | a <- args]) <&> AsBase58
|
||||
pause @'Seconds 0.25
|
||||
liftIO $ hFlush stderr
|
||||
liftIO $ hPutDoc stderr $ ""
|
||||
<> ul (yellow "Reference" <+> pretty url <+> yellow "is not available yet.") <> line
|
||||
<> "If you sure it's a new one -- make sure you've added the key to hbs2-keyman and then run"
|
||||
<> line <> line
|
||||
<> "hbs2-keyman update" <> line <> line
|
||||
<> "git" <+> pretty hbs2Name <+> "export --new" <+> pretty url <> line <> line
|
||||
<> "to init the reflog first." <> line
|
||||
<> "Pushing to an existing reflog as a new one may cause unwanted data duplication." <> line
|
||||
<> line
|
||||
<> "Note: what ever pushed -- can not be unpushed" <> line
|
||||
<> "If it's not a new reflog --- just wait until it became available"
|
||||
liftIO exitFailure
|
||||
)
|
||||
`catch` ( \(ImportTxApplyError h) -> do
|
||||
onProgress ip ImportAllDone
|
||||
pause @'Seconds 0.25
|
||||
liftIO $ hFlush stderr
|
||||
liftIO $ hPutDoc stderr $ red "Can not apply tx" <+> pretty h <> line <> line
|
||||
<> "It means you don't have a key do decrypt this tx or the data is not completely downloaded yet"
|
||||
<> line
|
||||
|
||||
liftIO exitFailure
|
||||
)
|
||||
|
||||
void $ runExceptT do
|
||||
|
||||
tpush <- newTQueueIO -- @(GitRef, Maybe GitHash)
|
||||
|
||||
flip fix Plain $ \next s -> do
|
||||
|
||||
eof <- done
|
||||
|
||||
when eof $ pure ()
|
||||
|
||||
cmd <- ExceptT (try @_ @IOError (getLine <&> words))
|
||||
|
||||
debug $ "C:" <+> pretty cmd
|
||||
|
||||
case cmd of
|
||||
|
||||
[] | s == Plain -> do
|
||||
onProgress ip (ImportSetQuiet True)
|
||||
pure ()
|
||||
|
||||
[] | s == Push -> do
|
||||
refs <- atomically (STM.flushTQueue tpush)
|
||||
<&> HM.toList . HM.fromList
|
||||
|
||||
importRepoWait puk
|
||||
export puk refs
|
||||
sendLine ""
|
||||
next Plain
|
||||
|
||||
["capabilities"] -> do
|
||||
debug $ "send capabilities"
|
||||
sendLine "push"
|
||||
sendLine "fetch"
|
||||
sendLine ""
|
||||
next Plain
|
||||
|
||||
("list" : _) -> do
|
||||
|
||||
|
||||
r' <- runMaybeT $ withState do
|
||||
tx <- selectMaxAppliedTx >>= lift . toMPlus <&> fst
|
||||
|
||||
rh <- TX.readRepoHeadFromTx sto tx >>= lift . toMPlus
|
||||
pure (_repoHeadRefs rh)
|
||||
|
||||
let r = fromMaybe mempty r'
|
||||
|
||||
forM_ (fmap (show . formatRef) r) sendLine
|
||||
|
||||
sendLine ""
|
||||
|
||||
next Plain
|
||||
|
||||
("push" : pargs : _ ) -> do
|
||||
(fromRef, toRef) <- orThrowUser "can't parse push" (parsePush pargs)
|
||||
|
||||
r <- readProcess (setStderr closed $ shell [qc|git rev-parse {pretty $ fromRef}|])
|
||||
<&> headDef "" . LBS8.words . view _2
|
||||
<&> fromStringMay @GitHash . LBS8.unpack
|
||||
|
||||
let val = const r =<< fromRef
|
||||
|
||||
atomically $ writeTQueue tpush (toRef, val)
|
||||
|
||||
sendLine [qc|ok {pretty toRef}|]
|
||||
next Push
|
||||
|
||||
_ -> next Plain
|
||||
|
||||
pure ()
|
||||
|
||||
`finally` liftIO do
|
||||
hPutStrLn stdout "" >> hFlush stdout
|
||||
-- notice $ red "BYE"
|
||||
hPutStrLn stderr ""
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,203 @@
|
|||
module HBS2.Git.Client.App
|
||||
( module HBS2.Git.Client.App
|
||||
, module HBS2.Git.Client.App.Types
|
||||
) where
|
||||
|
||||
import HBS2.Git.Client.Prelude hiding (info)
|
||||
import HBS2.Git.Client.App.Types
|
||||
import HBS2.Git.Client.Config
|
||||
import HBS2.Git.Client.Progress
|
||||
import HBS2.Git.Client.State
|
||||
|
||||
import HBS2.Git.Data.Tx
|
||||
|
||||
import HBS2.Git.Local.CLI
|
||||
|
||||
import HBS2.System.Dir
|
||||
|
||||
import Data.Maybe
|
||||
import System.Environment
|
||||
import System.IO (hPutStr)
|
||||
import Data.Vector qualified as V
|
||||
import Data.Vector ((!))
|
||||
|
||||
drawProgress :: MonadUnliftIO m => ProgressQ -> m ()
|
||||
drawProgress (ProgressQ q) = do
|
||||
|
||||
let spin = V.fromList ["--","\\","|","/"]
|
||||
let l = V.length spin
|
||||
i <- newTVarIO 0
|
||||
|
||||
tl <- newTVarIO =<< getTimeCoarse
|
||||
|
||||
let updateSpinner = do
|
||||
atomically $ modifyTVar i succ
|
||||
|
||||
let getSpinner = do
|
||||
j <- readTVarIO i <&> (`mod` l)
|
||||
pure $ spin ! j
|
||||
|
||||
let
|
||||
limit :: MonadIO m => Timeout 'Seconds -> m () -> m ()
|
||||
limit dt m = do
|
||||
t0 <- readTVarIO tl
|
||||
now <- getTimeCoarse
|
||||
when (expired dt (now - t0)) do
|
||||
atomically $ writeTVar tl now
|
||||
m
|
||||
|
||||
let loop = do
|
||||
flip fix False \next quiet -> do
|
||||
|
||||
let put s | quiet = pure ()
|
||||
| otherwise = putStatus s
|
||||
|
||||
ev <- atomically $ readTQueue q
|
||||
|
||||
case ev of
|
||||
ImportIdle -> do
|
||||
next quiet
|
||||
|
||||
ImportSetQuiet qq -> do
|
||||
put ""
|
||||
next qq
|
||||
|
||||
ImportWaitLWW n lww -> do
|
||||
limit 0.25 $ put ("wait lwwref" <+> pretty lww <+> pretty n)
|
||||
next quiet
|
||||
|
||||
ImportRefLogStart puk -> do
|
||||
put ("wait reflog" <+> pretty (AsBase58 puk))
|
||||
next quiet
|
||||
|
||||
ImportRefLogDone puk Nothing -> do
|
||||
updateSpinner
|
||||
c <- getSpinner
|
||||
put ("wait reflog" <+> pretty (AsBase58 puk) <+> pretty c)
|
||||
next quiet
|
||||
|
||||
ImportRefLogDone _ (Just h) -> do
|
||||
put ("reflog value" <+> pretty h)
|
||||
next quiet
|
||||
|
||||
ImportWaitTx h -> do
|
||||
updateSpinner
|
||||
c <- getSpinner
|
||||
put ("wait tx data" <+> pretty h <+> pretty c)
|
||||
next quiet
|
||||
|
||||
ImportScanTx h -> do
|
||||
put ("scan tx" <+> pretty h)
|
||||
next quiet
|
||||
|
||||
ImportApplyTx h -> do
|
||||
put ("apply tx" <+> pretty h)
|
||||
next quiet
|
||||
|
||||
ImportApplyTxError h s -> do
|
||||
limit 0.25 $ put $ red ("failed" <+> pretty s) <+> pretty h
|
||||
next quiet
|
||||
|
||||
ImportReadBundleChunk meta (Progress s _) -> do
|
||||
let h = bundleHash meta
|
||||
let e = if bundleEncrypted meta then yellow "@" else ""
|
||||
limit 0.5 $ put $ "read pack" <+> e <> pretty h <+> pretty s
|
||||
next quiet
|
||||
|
||||
ExportWriteObject (Progress s _) -> do
|
||||
limit 0.5 $ put $ "write object" <+> pretty s
|
||||
next quiet
|
||||
|
||||
ImportAllDone -> do
|
||||
put "\n"
|
||||
|
||||
loop
|
||||
`finally` do
|
||||
putStatus ""
|
||||
|
||||
where
|
||||
putStatus :: MonadUnliftIO m => Doc AnsiStyle -> m ()
|
||||
putStatus s = do
|
||||
liftIO $ hPutStr stderr $ toStringANSI $ "\r" <> fill 80 "" <> "\r" <> pretty (take 74 (toStringANSI s))
|
||||
liftIO $ hFlush stderr
|
||||
|
||||
runGitCLI :: (GitPerks m) => [GitOption] -> GitCLI m a -> m a
|
||||
runGitCLI o m = do
|
||||
|
||||
soname <- runExceptT getSocketName
|
||||
>>= orThrowUser "no rpc socket"
|
||||
|
||||
flip runContT pure do
|
||||
|
||||
client <- lift $ race (pause @'Seconds 1) (newMessagingUnix False 1.0 soname)
|
||||
>>= orThrowUser ("can't connect to" <+> pretty soname)
|
||||
|
||||
void $ ContT $ withAsync $ runMessagingUnix client
|
||||
|
||||
peerAPI <- makeServiceCaller @PeerAPI (fromString soname)
|
||||
refLogAPI <- makeServiceCaller @RefLogAPI (fromString soname)
|
||||
storageAPI <- makeServiceCaller @StorageAPI (fromString soname)
|
||||
lwwAPI <- makeServiceCaller @LWWRefAPI (fromString soname)
|
||||
|
||||
let endpoints = [ Endpoint @UNIX peerAPI
|
||||
, Endpoint @UNIX refLogAPI
|
||||
, Endpoint @UNIX lwwAPI
|
||||
, Endpoint @UNIX storageAPI
|
||||
]
|
||||
|
||||
void $ ContT $ withAsync $ liftIO $ runReaderT (runServiceClientMulti endpoints) client
|
||||
|
||||
conf <- lift $ readConfig True
|
||||
|
||||
git <- gitDir
|
||||
>>= orThrowUser "git dir not set"
|
||||
>>= canonicalizePath
|
||||
|
||||
q <- lift newProgressQ
|
||||
let ip = AnyProgress q
|
||||
|
||||
cpath <- lift getConfigDir
|
||||
|
||||
progress <- ContT $ withAsync (drawProgress q)
|
||||
|
||||
env <- lift $ newGitEnv ip o git cpath conf peerAPI refLogAPI lwwAPI storageAPI
|
||||
lift $ runReaderT setupLogging env
|
||||
lift $ withGitEnv env (evolveDB >> m)
|
||||
`finally` do
|
||||
onProgress ip ImportAllDone
|
||||
cancel progress
|
||||
shutDownLogging
|
||||
|
||||
runDefault :: GitPerks m => GitCLI m ()
|
||||
runDefault = do
|
||||
pure ()
|
||||
|
||||
setupLogging :: (GitPerks m, HasGitOpts m) => m ()
|
||||
setupLogging = do
|
||||
|
||||
traceEnv <- liftIO $ lookupEnv "HBS2TRACE" <&> isJust
|
||||
|
||||
setLogging @INFO defLog
|
||||
setLogging @ERROR (logPrefix "" . toStderr)
|
||||
setLogging @WARN (logPrefix "" . toStderr)
|
||||
setLogging @NOTICE (logPrefix "" . toStderr)
|
||||
|
||||
dbg <- debugEnabled
|
||||
|
||||
when (dbg || traceEnv) do
|
||||
setLogging @DEBUG (logPrefix "" . toStderr)
|
||||
|
||||
trc <- traceEnabled
|
||||
|
||||
when (trc || traceEnv) do
|
||||
setLogging @TRACE (logPrefix "" . toStderr)
|
||||
|
||||
shutDownLogging :: MonadUnliftIO m => m ()
|
||||
shutDownLogging = do
|
||||
setLoggingOff @INFO
|
||||
setLoggingOff @ERROR
|
||||
setLoggingOff @WARN
|
||||
setLoggingOff @NOTICE
|
||||
setLoggingOff @DEBUG
|
||||
setLoggingOff @TRACE
|
||||
|
|
@ -0,0 +1,168 @@
|
|||
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||
{-# Language UndecidableInstances #-}
|
||||
module HBS2.Git.Client.App.Types
|
||||
( module HBS2.Git.Client.App.Types
|
||||
, module HBS2.Git.Client.App.Types.GitEnv
|
||||
, module HBS2.Git.Local
|
||||
, module Data.Config.Suckless
|
||||
, module Control.Monad.Catch
|
||||
) where
|
||||
|
||||
import HBS2.Git.Client.Prelude hiding (info)
|
||||
import HBS2.Git.Client.Progress
|
||||
import HBS2.Git.Local
|
||||
import HBS2.Git.Client.App.Types.GitEnv
|
||||
|
||||
import HBS2.Git.Data.Tx
|
||||
import HBS2.Git.Data.GK
|
||||
|
||||
import HBS2.KeyMan.Keys.Direct
|
||||
import HBS2.Net.Auth.GroupKeySymm
|
||||
import HBS2.Storage.Operations.ByteString
|
||||
import HBS2.System.Dir
|
||||
|
||||
import Data.Config.Suckless
|
||||
import Control.Monad.Catch (MonadThrow(..))
|
||||
import DBPipe.SQLite
|
||||
import Data.HashMap.Strict qualified as HM
|
||||
import Data.Maybe
|
||||
import Data.Word
|
||||
|
||||
type Epoch = Word64
|
||||
|
||||
data GitOption = GitTrace
|
||||
| GitDebug
|
||||
| GitExport ExportType
|
||||
| GitEnc ExportEncryption
|
||||
| GitDontApplyHeads
|
||||
deriving stock (Eq,Ord)
|
||||
|
||||
|
||||
|
||||
newtype GitCLI m a = GitCLI { fromGitCLI :: ReaderT GitEnv m a }
|
||||
deriving newtype ( Applicative
|
||||
, Functor
|
||||
, Monad
|
||||
, MonadIO
|
||||
, MonadUnliftIO
|
||||
, MonadTrans
|
||||
, MonadReader GitEnv
|
||||
, MonadThrow
|
||||
)
|
||||
|
||||
-- type GitPerks m = ( MonadUnliftIO m, MonadThrow m )
|
||||
type GitPerks m = ( MonadUnliftIO m )
|
||||
|
||||
instance Monad m => HasProgressIndicator (GitCLI m) where
|
||||
getProgressIndicator = asks _progress
|
||||
|
||||
instance Monad m => HasStorage (GitCLI m) where
|
||||
getStorage = asks _storage
|
||||
|
||||
instance Monad m => HasAPI PeerAPI UNIX (GitCLI m) where
|
||||
getAPI = asks _peerAPI
|
||||
|
||||
instance Monad m => HasAPI LWWRefAPI UNIX (GitCLI m) where
|
||||
getAPI = asks _lwwRefAPI
|
||||
|
||||
instance Monad m => HasAPI RefLogAPI UNIX (GitCLI m) where
|
||||
getAPI = asks _refLogAPI
|
||||
|
||||
instance MonadReader GitEnv m => HasAPI RefLogAPI UNIX (ExceptT e m) where
|
||||
getAPI = asks _refLogAPI
|
||||
|
||||
instance MonadReader GitEnv m => HasAPI LWWRefAPI UNIX (ExceptT e m) where
|
||||
getAPI = asks _lwwRefAPI
|
||||
|
||||
instance MonadReader GitEnv m => HasAPI PeerAPI UNIX (ExceptT e m) where
|
||||
getAPI = asks _peerAPI
|
||||
|
||||
newGitEnv :: GitPerks m
|
||||
=> AnyProgress
|
||||
-> [GitOption]
|
||||
-> FilePath
|
||||
-> FilePath
|
||||
-> Config
|
||||
-> ServiceCaller PeerAPI UNIX
|
||||
-> ServiceCaller RefLogAPI UNIX
|
||||
-> ServiceCaller LWWRefAPI UNIX
|
||||
-> ServiceCaller StorageAPI UNIX
|
||||
-> m GitEnv
|
||||
|
||||
newGitEnv p opts path cpath conf peer reflog lww sto = do
|
||||
let dbfile = cpath </> "state.db"
|
||||
let dOpt = dbPipeOptsDef { dbLogger = \x -> debug ("state:" <+> pretty x) }
|
||||
db <- newDBPipeEnv dOpt dbfile
|
||||
cache <- newTVarIO mempty
|
||||
pure $ GitEnv
|
||||
traceOpt
|
||||
debugOpt
|
||||
applyHeadsOpt
|
||||
exportType
|
||||
exportEnc
|
||||
path
|
||||
cpath
|
||||
conf
|
||||
peer
|
||||
reflog
|
||||
lww
|
||||
(AnyStorage (StorageClient sto))
|
||||
db
|
||||
p
|
||||
cache
|
||||
where
|
||||
traceOpt = GitTrace `elem` opts
|
||||
debugOpt = GitDebug `elem` opts
|
||||
applyHeadsOpt = GitDontApplyHeads `notElem` opts
|
||||
-- FIXME: from-options
|
||||
exportType = lastDef ExportInc [ t | GitExport t <- opts ]
|
||||
exportEnc = lastDef ExportPublic [ t | GitEnc t <- opts ]
|
||||
|
||||
withGitEnv :: GitPerks m => GitEnv -> GitCLI m a -> m a
|
||||
withGitEnv env m = runReaderT (fromGitCLI m) env
|
||||
|
||||
instance (GitPerks m, MonadReader GitEnv m) => GroupKeyOperations m where
|
||||
|
||||
-- FIXME: may-be-faster
|
||||
loadKeyrings gkh = do
|
||||
|
||||
sto <- asks _storage
|
||||
cache <- asks _keyringCache
|
||||
|
||||
let k = gkh
|
||||
|
||||
ke <- readTVarIO cache <&> HM.lookup k
|
||||
|
||||
case ke of
|
||||
Just es -> pure es
|
||||
Nothing -> do
|
||||
|
||||
rcpt <- fromMaybe mempty <$> runMaybeT do
|
||||
runExceptT (readGK0 sto gkh)
|
||||
>>= toMPlus
|
||||
<&> HM.keys . recipients
|
||||
|
||||
es <- runKeymanClient $ do
|
||||
loadKeyRingEntries rcpt
|
||||
<&> fmap snd
|
||||
|
||||
atomically $ modifyTVar cache (HM.insert k es)
|
||||
pure es
|
||||
|
||||
openGroupKey gk = runMaybeT do
|
||||
ke' <- lift $ runKeymanClient do
|
||||
loadKeyRingEntries (HM.keys $ recipients gk)
|
||||
<&> headMay
|
||||
|
||||
(_, KeyringEntry{..}) <- toMPlus ke'
|
||||
|
||||
toMPlus $ lookupGroupKey _krSk _krPk gk
|
||||
|
||||
class HasGitOpts m where
|
||||
debugEnabled :: m Bool
|
||||
traceEnabled :: m Bool
|
||||
|
||||
instance MonadReader GitEnv m => HasGitOpts m where
|
||||
debugEnabled = asks _gitDebugEnabled
|
||||
traceEnabled = asks _gitTraceEnabled
|
||||
|
|
@ -0,0 +1,53 @@
|
|||
{-# Language TemplateHaskell #-}
|
||||
{-# Language UndecidableInstances #-}
|
||||
module HBS2.Git.Client.App.Types.GitEnv where
|
||||
|
||||
import HBS2.Git.Client.Prelude hiding (info)
|
||||
|
||||
import HBS2.Git.Client.Progress
|
||||
|
||||
import HBS2.Net.Auth.GroupKeySymm
|
||||
|
||||
import Data.Config.Suckless
|
||||
import DBPipe.SQLite
|
||||
import Data.HashMap.Strict (HashMap)
|
||||
|
||||
data ExportType = ExportNew
|
||||
| ExportFork HashRef
|
||||
| ExportInc
|
||||
deriving stock (Eq,Ord,Generic,Show)
|
||||
|
||||
data ExportEncryption =
|
||||
ExportPublic
|
||||
| ExportPrivate FilePath
|
||||
deriving stock (Eq,Ord,Generic,Show)
|
||||
|
||||
type Config = [Syntax C]
|
||||
|
||||
class Monad m => HasProgressIndicator m where
|
||||
getProgressIndicator :: m AnyProgress
|
||||
|
||||
class HasAPI api proto m where
|
||||
getAPI :: m (ServiceCaller api proto)
|
||||
|
||||
data GitEnv =
|
||||
GitEnv
|
||||
{ _gitTraceEnabled :: Bool
|
||||
, _gitDebugEnabled :: Bool
|
||||
, _gitApplyHeads :: Bool
|
||||
, _gitExportType :: ExportType
|
||||
, _gitExportEnc :: ExportEncryption
|
||||
, _gitPath :: FilePath
|
||||
, _configPath :: FilePath
|
||||
, _config :: Config
|
||||
, _peerAPI :: ServiceCaller PeerAPI UNIX
|
||||
, _refLogAPI :: ServiceCaller RefLogAPI UNIX
|
||||
, _lwwRefAPI :: ServiceCaller LWWRefAPI UNIX
|
||||
, _storage :: AnyStorage -- ServiceCaller StorageAPI UNIX
|
||||
, _db :: DBPipeEnv
|
||||
, _progress :: AnyProgress
|
||||
, _keyringCache :: TVar (HashMap HashRef [KeyringEntry HBS2Basic])
|
||||
}
|
||||
|
||||
|
||||
makeLenses 'GitEnv
|
|
@ -0,0 +1,89 @@
|
|||
module HBS2.Git.Client.Config (getConfigDir, readConfig, getManifest, hbs2Name) where
|
||||
|
||||
import HBS2.Git.Client.Prelude
|
||||
import HBS2.Git.Client.App.Types
|
||||
|
||||
import HBS2.System.Dir
|
||||
import HBS2.Git.Local.CLI
|
||||
|
||||
import Data.List qualified as L
|
||||
import Data.Text qualified as Text
|
||||
import Data.Either
|
||||
import Text.InterpolatedString.Perl6 (qc)
|
||||
|
||||
|
||||
data ConfigDirNotFound = ConfigDirNotFound
|
||||
deriving stock (Show,Typeable,Generic)
|
||||
|
||||
instance HasErrorStatus ConfigDirNotFound where
|
||||
getStatus = const Failed
|
||||
|
||||
instance Exception ConfigDirNotFound
|
||||
|
||||
hbs2Name :: String
|
||||
hbs2Name = "hbs21"
|
||||
|
||||
getConfigDir :: GitPerks m => m FilePath
|
||||
getConfigDir = do
|
||||
git <- gitDir >>= orThrow ConfigDirNotFound
|
||||
|
||||
let p = splitDirectories git & reverse
|
||||
|
||||
if headMay p == Just ".git" then
|
||||
pure $ joinPath $ reverse (".hbs2-git" : drop 1 p)
|
||||
else do
|
||||
pure $ git </> ".hbs2-git"
|
||||
|
||||
getManifest :: GitPerks m => m (Text, Text, Maybe Text)
|
||||
getManifest = do
|
||||
dir <- getConfigDir
|
||||
let mf = dir </> "manifest"
|
||||
|
||||
let defname = takeFileName (takeDirectory dir) & Text.pack
|
||||
let defbrief = "n/a"
|
||||
|
||||
content <- liftIO (try @_ @IOException $ readFile mf)
|
||||
<&> fromRight ""
|
||||
|
||||
let txt = if L.null content then Nothing else Just (Text.pack content)
|
||||
|
||||
-- FIXME: size-hardcode
|
||||
let header = lines (take 1024 content)
|
||||
& takeWhile ( not . L.null )
|
||||
& unlines
|
||||
& parseTop
|
||||
& fromRight mempty
|
||||
|
||||
let name = lastDef defname [ n | ListVal [ SymbolVal "name:", LitStrVal n ] <- header ]
|
||||
let brief = lastDef defbrief [ n | ListVal [ SymbolVal "brief:", LitStrVal n ] <- header ]
|
||||
|
||||
pure (name,brief,txt)
|
||||
|
||||
readConfig :: (GitPerks m) => Bool -> m Config
|
||||
readConfig canTouch = do
|
||||
{- HLINT ignore "Functor law" -}
|
||||
confPath <- getConfigDir
|
||||
let confRoot = confPath </> "config"
|
||||
|
||||
when canTouch do
|
||||
|
||||
here <- doesPathExist confRoot
|
||||
|
||||
unless here do
|
||||
mkdir confPath
|
||||
liftIO $ writeFile confRoot defConf
|
||||
|
||||
try @_ @SomeException (liftIO (readFile confRoot))
|
||||
<&> fromRight mempty
|
||||
<&> parseTop
|
||||
<&> fromRight mempty
|
||||
|
||||
|
||||
defConf :: String
|
||||
defConf = [qc|;; hbs2-git config file
|
||||
; those branches will be replicated by default
|
||||
export include "refs/heads/master"
|
||||
export include "refs/heads/main"
|
||||
export exclude "refs/heads/*"
|
||||
export tags
|
||||
|]
|
|
@ -0,0 +1,342 @@
|
|||
module HBS2.Git.Client.Export (export) where
|
||||
|
||||
|
||||
import HBS2.Git.Client.Prelude hiding (info)
|
||||
import HBS2.Git.Client.App.Types
|
||||
import HBS2.Git.Client.Config
|
||||
import HBS2.Git.Client.RefLog
|
||||
import HBS2.Git.Client.State
|
||||
import HBS2.Git.Client.Progress
|
||||
|
||||
import HBS2.Git.Data.RefLog
|
||||
import HBS2.Git.Data.Tx
|
||||
import HBS2.Git.Data.LWWBlock
|
||||
import HBS2.Git.Data.GK
|
||||
|
||||
import HBS2.Git.Local.CLI
|
||||
|
||||
import HBS2.KeyMan.Keys.Direct
|
||||
|
||||
import HBS2.OrDie
|
||||
import HBS2.Storage.Operations.ByteString
|
||||
import HBS2.System.Dir
|
||||
|
||||
import Text.InterpolatedString.Perl6 (qc)
|
||||
import Data.Text qualified as Text
|
||||
import Data.ByteString.Lazy.Char8 qualified as LBS8
|
||||
import Data.ByteString.Char8 qualified as BS8
|
||||
import Data.ByteString.Lazy qualified as LBS
|
||||
import Data.ByteString.Builder as B
|
||||
import Data.HashSet qualified as HashSet
|
||||
import Data.HashMap.Strict qualified as HashMap
|
||||
import Data.Maybe
|
||||
import Data.List qualified as L
|
||||
import Data.Ord (comparing)
|
||||
import Data.Either
|
||||
|
||||
data ExportError = ExportUnsupportedOperation
|
||||
| ExportBundleCreateError
|
||||
deriving stock (Show,Typeable)
|
||||
|
||||
instance Exception ExportError
|
||||
|
||||
instance HasErrorStatus ExportError where
|
||||
getStatus = \case
|
||||
ExportUnsupportedOperation -> Failed
|
||||
ExportBundleCreateError -> Failed
|
||||
|
||||
instance ToFilePath (GitRef, GitHash) where
|
||||
toFilePath (g, r) = show (pretty g)
|
||||
|
||||
{-# ANN module "HLint: ignore Eta reduce" #-}
|
||||
filterPat :: ToFilePath a => [FilePattern] -> [FilePattern] -> [a] -> [a]
|
||||
filterPat inc excl refs = filter check refs
|
||||
where
|
||||
check r = i || not e
|
||||
where
|
||||
e = not $ L.null $ catMaybes [ match p (toFilePath r) | p <- excl ]
|
||||
i = not $ L.null $ catMaybes [ match p (toFilePath r) | p <- inc ]
|
||||
|
||||
refsForExport :: (MonadReader GitEnv m, MonadIO m) => [(GitRef, Maybe GitHash)] -> m [(GitRef,GitHash)]
|
||||
|
||||
refsForExport forPushL = do
|
||||
{- HLINT ignore "Functor law" -}
|
||||
|
||||
conf <- asks _config
|
||||
path <- asks _gitPath
|
||||
|
||||
let tags = headDef mempty [ "--tags" :: String | (ListVal [SymbolVal "export", SymbolVal "tags"] ) <- conf]
|
||||
|
||||
let incl = [ Text.unpack p
|
||||
| (ListVal [SymbolVal "export", SymbolVal "include", LitStrVal p]) <- conf
|
||||
]
|
||||
|
||||
let excl = [ Text.unpack p
|
||||
| (ListVal [SymbolVal "export", SymbolVal "exclude", LitStrVal p]) <- conf
|
||||
]
|
||||
|
||||
let forPush = [ (k,v) | (k, Just v) <- forPushL ] & HashMap.fromList
|
||||
|
||||
let deleted = [ k | (k, Nothing) <- forPushL ] & HashSet.fromList
|
||||
|
||||
debug $ red "CONF" <> pretty path <> line <> indent 2 (vcat (fmap pretty conf))
|
||||
|
||||
let cmd = [qc|git --git-dir={path} show-ref {tags} --heads --head|]
|
||||
|
||||
debug $ red "CMD" <+> pretty cmd
|
||||
debug $ "FILTERS" <+> pretty (incl, excl)
|
||||
debug $ red "DELETED" <+> pretty (HashSet.toList deleted)
|
||||
debug $ red "FOR-PUSH" <+> pretty (HashMap.toList forPush)
|
||||
|
||||
-- мы экспортируем всегда HEAD что бы правильно работал git clone
|
||||
-- поэтому мы экспортируем и текущий бранч тоже
|
||||
-- даже если он запрещён фильтрами
|
||||
|
||||
currentBranch <- gitRunCommand [qc|git --git-dir={path} symbolic-ref HEAD|]
|
||||
>>= orThrowUser "can't read HEAD 1"
|
||||
<&> GitRef . BS8.strip . LBS8.toStrict
|
||||
|
||||
currentVal <- gitRunCommand [qc|git --git-dir={path} rev-parse {pretty currentBranch}|]
|
||||
>>= orThrowUser "can't read HEAD 2"
|
||||
<&> (BS8.unpack . BS8.strip . LBS8.toStrict)
|
||||
<&> fromStringMay @GitHash
|
||||
>>= orThrowUser "invalid git hash for HEAD"
|
||||
|
||||
gitRunCommand cmd
|
||||
>>= orThrowUser ("can't read git repo" <+> pretty path)
|
||||
<&> LBS8.lines
|
||||
<&> fmap LBS8.words
|
||||
<&> mapMaybe \case
|
||||
[val,name] -> (GitRef (LBS8.toStrict name),) <$> fromStringMay @GitHash (LBS8.unpack val)
|
||||
_ -> Nothing
|
||||
<&> filterPat incl excl
|
||||
<&> HashMap.fromList
|
||||
<&> HashMap.filterWithKey (\k _ -> not (HashSet.member k deleted))
|
||||
<&> mappend forPush
|
||||
<&> mappend (HashMap.singleton currentBranch currentVal)
|
||||
<&> HashMap.toList
|
||||
<&> L.sortBy orderRefs
|
||||
|
||||
where
|
||||
orderRefs (GitRef "HEAD", _) _ = LT
|
||||
orderRefs _ (GitRef "HEAD", _) = GT
|
||||
orderRefs x y = comparing fst x y
|
||||
|
||||
loadNewGK0 :: (MonadIO m, MonadReader GitEnv m)
|
||||
=> RefLogId
|
||||
-> Maybe HashRef
|
||||
-> m (Maybe (HashRef,Epoch))
|
||||
|
||||
loadNewGK0 r = \case
|
||||
Nothing -> storeNewGK0
|
||||
|
||||
Just tx0 -> do
|
||||
href <- storeNewGK0
|
||||
withState do
|
||||
for_ href (insertNewGK0 r tx0 . fst)
|
||||
commitAll
|
||||
|
||||
withState $ selectNewGK0 r
|
||||
|
||||
storeNewGK0 :: (MonadIO m, MonadReader GitEnv m) => m (Maybe (HashRef,Epoch))
|
||||
storeNewGK0 = do
|
||||
sto <- asks _storage
|
||||
enc <- asks _gitExportEnc
|
||||
runMaybeT do
|
||||
gkf <- headMay [ f | ExportPrivate f <- [enc] ] & toMPlus
|
||||
gk <- loadGK0FromFile gkf >>= toMPlus
|
||||
epoch <- getEpoch
|
||||
writeAsMerkle sto (serialise gk) <&> HashRef <&> (,epoch)
|
||||
|
||||
export :: ( GitPerks m
|
||||
, MonadReader GitEnv m
|
||||
, GroupKeyOperations m
|
||||
, HasAPI PeerAPI UNIX m
|
||||
)
|
||||
=> LWWRefKey HBS2Basic
|
||||
-> [(GitRef,Maybe GitHash)]
|
||||
-> m ()
|
||||
export key refs = do
|
||||
|
||||
git <- asks _gitPath
|
||||
sto <- asks _storage
|
||||
new <- asks _gitExportType <&> (== ExportNew)
|
||||
reflog <- asks _refLogAPI
|
||||
ip <- asks _progress
|
||||
|
||||
subscribeLWWRef key
|
||||
|
||||
(lww, LWWBlockData{..}) <- waitOrInitLWWRef
|
||||
|
||||
let puk0 = fromLwwRefKey key
|
||||
|
||||
debug $ red $ pretty $ AsBase58 lwwRefLogPubKey
|
||||
|
||||
(sk0,pk0) <- liftIO $ runKeymanClient do
|
||||
creds <- loadCredentials puk0
|
||||
>>= orThrowUser ("can't load credentials" <+> pretty (AsBase58 puk0))
|
||||
pure ( view peerSignSk creds, view peerSignPk creds )
|
||||
|
||||
(puk,sk) <- derivedKey @HBS2Basic @'Sign lwwRefSeed sk0
|
||||
|
||||
subscribeRefLog puk
|
||||
|
||||
myrefs <- refsForExport refs
|
||||
|
||||
let myrefsKey = L.sortOn fst myrefs & serialise & hashObject @HbSync & HashRef
|
||||
|
||||
flip runContT pure do
|
||||
callCC \exit -> do
|
||||
|
||||
|
||||
tx0 <- getLastAppliedTx
|
||||
|
||||
rh0 <- runMaybeT ( toMPlus tx0 >>= readRepoHeadFromTx sto >>= toMPlus )
|
||||
|
||||
(name,brief,mf) <- lift getManifest
|
||||
|
||||
gk0new0 <- loadNewGK0 puk tx0
|
||||
|
||||
let gk0old = _repoHeadGK0 =<< rh0
|
||||
|
||||
mbTxTime0 <- runMaybeT $ toMPlus tx0
|
||||
>>= withState .selectTxForRefLog puk
|
||||
>>= toMPlus
|
||||
|
||||
-- смотрим, какое время ключа для данного рефлога, т.к. голова-то
|
||||
-- может быть одна, а вот рефлоги -- разные
|
||||
-- если мы успели --- то накатываем свой ключ.
|
||||
-- если нет -- придется повторить
|
||||
let gk0new = if (snd <$> gk0new0) > (snd <$> mbTxTime0) then
|
||||
fst <$> gk0new0
|
||||
else
|
||||
gk0old
|
||||
|
||||
let gk0 = gk0new <|> gk0old
|
||||
|
||||
repohead <- makeRepoHeadSimple name brief mf gk0 myrefs
|
||||
|
||||
let oldRefs = maybe mempty _repoHeadRefs rh0
|
||||
|
||||
trace $ "TX0" <+> pretty tx0
|
||||
|
||||
bss <- maybe (pure mempty) txBundles tx0
|
||||
|
||||
objs <- lift enumAllGitObjects
|
||||
>>= withState . filterM (notInTx tx0)
|
||||
|
||||
when (null objs && not new && oldRefs == myrefs) do
|
||||
exit ()
|
||||
|
||||
debug $ red "REFS-FOR-EXPORT:" <+> pretty myrefs
|
||||
|
||||
done <- withState (selectBundleByKey puk myrefsKey)
|
||||
|
||||
out <-
|
||||
if isJust done && not new then do
|
||||
pure []
|
||||
|
||||
else do
|
||||
|
||||
p <- ContT $ withGitPack
|
||||
|
||||
for_ (zip [1..] objs) $ \(n,o) -> do
|
||||
onProgress ip (ExportWriteObject (Progress n Nothing))
|
||||
liftIO $ LBS8.hPutStrLn (getStdin p) (LBS8.pack $ show $ pretty o)
|
||||
|
||||
code <- hFlush (getStdin p) >> hClose (getStdin p) >> getExitCode p
|
||||
|
||||
let idx = serialise objs
|
||||
let size = B.word32BE (fromIntegral $ LBS.length idx)
|
||||
let hdr = B.word32BE 1
|
||||
pack <- liftIO $ LBS.hGetContents (getStdout p)
|
||||
let out = B.toLazyByteString ( size <> hdr <> B.lazyByteString idx <> B.lazyByteString pack )
|
||||
pure [out]
|
||||
|
||||
rank <- getEpoch <&> fromIntegral
|
||||
|
||||
let rw = gk0new /= gk0old
|
||||
|
||||
debug $ red "MAKE TX" <+> pretty rw <+> pretty gk0old <+> "->" <+> pretty gk0new
|
||||
|
||||
tx <- lift $ makeTx sto rw rank puk (const $ pure (Just sk)) repohead bss out
|
||||
|
||||
r <- lift $ race (pause @'Seconds 1) (callService @RpcRefLogPost reflog tx)
|
||||
>>= orThrowUser "hbs2-peer rpc timeout"
|
||||
|
||||
when (isLeft r) $ exit ()
|
||||
|
||||
void $ runMaybeT do
|
||||
(_,_,bh) <- unpackTx tx
|
||||
withState (insertBundleKey puk myrefsKey bh)
|
||||
|
||||
where
|
||||
|
||||
findSK pk = liftIO $ runKeymanClient $ runMaybeT do
|
||||
creds <- lift (loadCredentials pk) >>= toMPlus
|
||||
pure (view peerSignSk creds)
|
||||
|
||||
waitOrInitLWWRef = do
|
||||
sto <- asks _storage
|
||||
new <- asks _gitExportType <&> (== ExportNew)
|
||||
|
||||
flip fix 3 $ \next n -> do
|
||||
blk <- readLWWBlock sto key
|
||||
|
||||
case blk of
|
||||
Just x -> pure x
|
||||
|
||||
Nothing | new && n > 0 -> do
|
||||
_ <- runExceptT (initLWWRef sto Nothing findSK key)
|
||||
>>= either ( throwIO . userError . show ) pure
|
||||
|
||||
next (pred n)
|
||||
|
||||
| otherwise -> do
|
||||
-- FIXME: detailed-error-description
|
||||
orThrowUser "lwwref not available" Nothing
|
||||
|
||||
|
||||
notInTx Nothing _ = pure True
|
||||
notInTx (Just tx0) obj = not <$> isObjectInTx tx0 obj
|
||||
|
||||
getLastAppliedTx = runMaybeT do
|
||||
(tx0,_) <- withState selectMaxAppliedTx
|
||||
>>= toMPlus
|
||||
pure tx0
|
||||
|
||||
txBundles tx0 = withDef =<< runMaybeT do
|
||||
|
||||
new <- asks _gitExportType <&> (== ExportNew)
|
||||
sto <- asks _storage
|
||||
|
||||
txbody <- runExceptT (readTx sto tx0)
|
||||
>>= orThrowUser ("missed blocks for tx" <+> pretty tx0)
|
||||
|
||||
let bref = view _4 txbody
|
||||
|
||||
readBundleRefs sto bref
|
||||
>>= orThrowUser ("missed blocks for tx" <+> pretty tx0)
|
||||
|
||||
where
|
||||
withDef Nothing = pure mempty
|
||||
withDef (Just x) = pure x
|
||||
|
||||
enumAllGitObjects :: (GitPerks m, MonadReader GitEnv m) => m [GitHash]
|
||||
enumAllGitObjects = do
|
||||
path <- asks _gitPath
|
||||
let rcmd = [qc|git --git-dir {path} cat-file --batch-check='%(objectname)' --batch-all-objects|]
|
||||
(_, out, _) <- liftIO $ readProcess (shell rcmd)
|
||||
pure $ LBS8.lines out & mapMaybe (fromStringMay @GitHash . LBS8.unpack)
|
||||
|
||||
|
||||
withGitPack :: (GitPerks m, MonadReader GitEnv m) => (Process Handle Handle () -> m a) -> m a
|
||||
withGitPack action = do
|
||||
fp <- asks _gitPath
|
||||
let cmd = "git"
|
||||
let args = ["--git-dir", fp, "pack-objects", "--stdout", "-q"]
|
||||
let config = setStdin createPipe $ setStdout createPipe $ setStderr closed $ proc cmd args
|
||||
p <- startProcess config
|
||||
action p
|
||||
|
||||
|
|
@ -0,0 +1,394 @@
|
|||
module HBS2.Git.Client.Import where
|
||||
|
||||
import HBS2.Git.Client.Prelude hiding (info)
|
||||
import HBS2.Git.Client.App.Types
|
||||
import HBS2.Git.Client.State
|
||||
import HBS2.Git.Client.RefLog
|
||||
import HBS2.Git.Client.Progress
|
||||
|
||||
import HBS2.Git.Data.RefLog
|
||||
import HBS2.Git.Data.Tx
|
||||
import HBS2.Git.Data.LWWBlock
|
||||
|
||||
import Data.ByteString.Lazy qualified as LBS
|
||||
|
||||
import Data.HashMap.Strict (HashMap)
|
||||
import Data.HashMap.Strict qualified as HM
|
||||
import Text.InterpolatedString.Perl6 (qc)
|
||||
import Streaming.Prelude qualified as S
|
||||
import System.IO (hPrint)
|
||||
import Data.Maybe
|
||||
|
||||
data ImportRefLogNotFound = ImportRefLogNotFound
|
||||
deriving stock (Typeable,Show)
|
||||
|
||||
instance Exception ImportRefLogNotFound
|
||||
|
||||
|
||||
data ImportTxApplyError = ImportTxApplyError HashRef
|
||||
deriving stock (Typeable,Show)
|
||||
|
||||
|
||||
instance Exception ImportTxApplyError
|
||||
|
||||
|
||||
data ImportTxError =
|
||||
ImportTxReadError HashRef
|
||||
| ImportOpError OperationError
|
||||
| ImportUnbundleError HashRef
|
||||
| ImportMissed HashRef
|
||||
deriving stock (Typeable)
|
||||
|
||||
instance Show ImportTxError where
|
||||
show (ImportTxReadError h) = [qc|ImportTxError {pretty h}|]
|
||||
show (ImportOpError o) = show o
|
||||
show (ImportUnbundleError h) = [qc|ImportUnbundleError {pretty h}|]
|
||||
show (ImportMissed h) = [qc|ImportMissed {pretty h}|]
|
||||
|
||||
instance Exception ImportTxError
|
||||
|
||||
data IState =
|
||||
IWaitLWWBlock Int
|
||||
| IWaitRefLog Int RefLogId
|
||||
| IScanRefLog RefLogId HashRef
|
||||
| IApplyTx HashRef
|
||||
| IExit
|
||||
|
||||
|
||||
-- class
|
||||
|
||||
merelySubscribeRepo :: forall e s m . ( GitPerks m
|
||||
, HasStorage m
|
||||
, HasProgressIndicator m
|
||||
, HasAPI PeerAPI UNIX m
|
||||
, HasAPI LWWRefAPI UNIX m
|
||||
, HasAPI RefLogAPI UNIX m
|
||||
, e ~ L4Proto
|
||||
, s ~ Encryption e
|
||||
)
|
||||
=> LWWRefKey HBS2Basic
|
||||
-> m (Maybe (PubKey 'Sign s))
|
||||
merelySubscribeRepo lwwKey = do
|
||||
|
||||
ip <- getProgressIndicator
|
||||
sto <- getStorage
|
||||
|
||||
subscribeLWWRef lwwKey
|
||||
fetchLWWRef lwwKey
|
||||
|
||||
r <- flip fix (IWaitLWWBlock 10) $ \next -> \case
|
||||
|
||||
IWaitLWWBlock w | w <= 0 -> do
|
||||
throwIO ImportRefLogNotFound
|
||||
|
||||
IWaitLWWBlock w -> do
|
||||
onProgress ip (ImportWaitLWW w lwwKey)
|
||||
lww <- readLWWBlock sto lwwKey
|
||||
|
||||
case lww of
|
||||
Nothing -> do
|
||||
pause @'Seconds 2
|
||||
fetchLWWRef lwwKey
|
||||
next (IWaitLWWBlock (pred w))
|
||||
|
||||
Just (_, LWWBlockData{..}) -> do
|
||||
void $ try @_ @SomeException (getRefLogMerkle lwwRefLogPubKey)
|
||||
subscribeRefLog lwwRefLogPubKey
|
||||
pause @'Seconds 0.25
|
||||
pure $ Just lwwRefLogPubKey
|
||||
|
||||
_ -> pure Nothing
|
||||
|
||||
onProgress ip ImportAllDone
|
||||
pure r
|
||||
|
||||
importRepoWait :: ( GitPerks m
|
||||
, MonadReader GitEnv m
|
||||
, HasAPI PeerAPI UNIX m
|
||||
, HasAPI LWWRefAPI UNIX m
|
||||
, HasAPI RefLogAPI UNIX m
|
||||
)
|
||||
=> LWWRefKey HBS2Basic
|
||||
-> m ()
|
||||
|
||||
importRepoWait lwwKey = do
|
||||
|
||||
env <- ask
|
||||
|
||||
ip <- asks _progress
|
||||
sto <- asks _storage
|
||||
|
||||
meet <- newTVarIO (mempty :: HashMap HashRef Int)
|
||||
|
||||
subscribeLWWRef lwwKey
|
||||
|
||||
fetchLWWRef lwwKey
|
||||
|
||||
flip fix (IWaitLWWBlock 20) $ \next -> \case
|
||||
|
||||
IWaitLWWBlock w | w <= 0 -> do
|
||||
throwIO ImportRefLogNotFound
|
||||
|
||||
IWaitLWWBlock w -> do
|
||||
onProgress ip (ImportWaitLWW w lwwKey)
|
||||
lww <- readLWWBlock sto lwwKey
|
||||
|
||||
case lww of
|
||||
Nothing -> do
|
||||
pause @'Seconds 2
|
||||
fetchLWWRef lwwKey
|
||||
next (IWaitLWWBlock (pred w))
|
||||
|
||||
Just (LWWRef{..}, LWWBlockData{..}) -> do
|
||||
|
||||
withState do
|
||||
insertLww lwwKey lwwSeq lwwRefLogPubKey
|
||||
|
||||
void $ try @_ @SomeException (getRefLogMerkle lwwRefLogPubKey)
|
||||
subscribeRefLog lwwRefLogPubKey
|
||||
pause @'Seconds 0.25
|
||||
getRefLogMerkle lwwRefLogPubKey
|
||||
next (IWaitRefLog 20 lwwRefLogPubKey)
|
||||
|
||||
IWaitRefLog w puk | w <= 0 -> do
|
||||
throwIO ImportRefLogNotFound
|
||||
|
||||
IWaitRefLog w puk -> do
|
||||
onProgress ip (ImportRefLogStart puk)
|
||||
try @_ @SomeException (getRefLogMerkle puk) >>= \case
|
||||
Left _ -> do
|
||||
onProgress ip (ImportRefLogDone puk Nothing)
|
||||
pause @'Seconds 2
|
||||
next (IWaitRefLog (pred w) puk)
|
||||
|
||||
Right Nothing -> do
|
||||
onProgress ip (ImportRefLogDone puk Nothing)
|
||||
pause @'Seconds 2
|
||||
next (IWaitRefLog (pred w) puk)
|
||||
|
||||
Right (Just h) -> do
|
||||
onProgress ip (ImportRefLogDone puk (Just h))
|
||||
next (IScanRefLog puk h)
|
||||
|
||||
IScanRefLog puk h -> do
|
||||
scanRefLog puk h
|
||||
withState (selectMaxSeqTxNotDone puk) >>= \case
|
||||
Just tx -> next (IApplyTx tx)
|
||||
Nothing -> do
|
||||
hasAnyTx <- withState existsAnyTxDone
|
||||
|
||||
if hasAnyTx then -- existing repo, is' a fetch
|
||||
next IExit
|
||||
else do
|
||||
void $ race (pause @'Seconds 10) do
|
||||
forever do
|
||||
onProgress ip (ImportWaitTx h)
|
||||
pause @'Seconds 0.25
|
||||
|
||||
next (IScanRefLog puk h)
|
||||
|
||||
IApplyTx h -> do
|
||||
onProgress ip (ImportApplyTx h)
|
||||
|
||||
r <- runExceptT (applyTx h)
|
||||
`catch` \case
|
||||
ImportUnbundleError{} -> pure (Left IncompleteData)
|
||||
_ -> throwIO (userError "tx apply / state read error")
|
||||
|
||||
|
||||
case r of
|
||||
|
||||
Left MissedBlockError -> do
|
||||
next =<< repeatOrExit
|
||||
|
||||
Left IncompleteData -> do
|
||||
atomically $ modifyTVar meet (HM.insertWith (+) h 1)
|
||||
onProgress ip (ImportApplyTxError h (Just "read/decrypt"))
|
||||
attempts <- readTVarIO meet <&> fromMaybe 0 . HM.lookup h
|
||||
|
||||
when (attempts >= 10 ) do
|
||||
throwIO (ImportTxApplyError h)
|
||||
|
||||
next =<< repeatOrExit
|
||||
|
||||
Left e -> do
|
||||
err (line <> red (viaShow e))
|
||||
throwIO (userError "tx apply / state read error")
|
||||
|
||||
Right{} -> next IExit
|
||||
|
||||
IExit -> do
|
||||
onProgress ip (ImportSetQuiet True)
|
||||
onProgress ip ImportAllDone
|
||||
|
||||
|
||||
where
|
||||
repeatOrExit = do
|
||||
hasAnyTx <- withState existsAnyTxDone
|
||||
if hasAnyTx then do
|
||||
pure IExit
|
||||
else do
|
||||
pause @'Seconds 2
|
||||
pure (IWaitLWWBlock 5)
|
||||
|
||||
scanRefLog :: (GitPerks m, MonadReader GitEnv m)
|
||||
=> RefLogId
|
||||
-> HashRef
|
||||
-> m ()
|
||||
|
||||
scanRefLog puk rv = do
|
||||
sto <- asks _storage
|
||||
ip <- asks _progress
|
||||
env <- ask
|
||||
|
||||
txs <- S.toList_ $ do
|
||||
walkMerkle @[HashRef] (fromHashRef rv) (getBlock sto) $ \case
|
||||
Left he -> do
|
||||
err $ red "missed block" <+> pretty he
|
||||
|
||||
Right hxs -> do
|
||||
for_ hxs $ \htx -> do
|
||||
here <- lift (withState (existsTx htx))
|
||||
unless here (S.yield htx)
|
||||
|
||||
tx <- liftIO $ S.toList_ $ do
|
||||
for_ txs $ \tx -> do
|
||||
onProgress ip (ImportScanTx tx)
|
||||
runExceptT (readTx sto tx <&> (tx,))
|
||||
>>= either (const none) S.yield
|
||||
|
||||
withState $ transactional do
|
||||
for_ tx $ \(th,(n,rhh,rh,bundleh)) -> do
|
||||
-- notice $ red "TX" <+> pretty th <+> pretty n
|
||||
insertTx puk th n rhh bundleh
|
||||
|
||||
|
||||
applyTx :: (GitPerks m, MonadReader GitEnv m, MonadError OperationError m)
|
||||
=> HashRef
|
||||
-> m ()
|
||||
|
||||
applyTx h = do
|
||||
sto <- asks _storage
|
||||
(n,rhh,r,bunh) <- readTx sto h
|
||||
|
||||
bundles <- readBundleRefs sto bunh
|
||||
>>= orThrowError IncompleteData
|
||||
|
||||
trace $ red "applyTx" <+> pretty h <+> pretty h <+> pretty bundles
|
||||
|
||||
withState $ transactional do
|
||||
|
||||
applyBundles r bundles
|
||||
|
||||
app <- lift $ asks (view gitApplyHeads)
|
||||
|
||||
when app do
|
||||
lift $ applyHeads r
|
||||
|
||||
insertTxDone h
|
||||
|
||||
where
|
||||
|
||||
applyHeads rh = do
|
||||
|
||||
let refs = _repoHeadRefs rh
|
||||
|
||||
withGitFastImport $ \ps -> do
|
||||
let psin = getStdin ps
|
||||
|
||||
for_ refs $ \(r,v) -> do
|
||||
unless (r == GitRef "HEAD") do
|
||||
liftIO $ hPrint psin $
|
||||
"reset" <+> pretty r <> line <> "from" <+> pretty v <> line
|
||||
|
||||
hClose psin
|
||||
code <- waitExitCode ps
|
||||
|
||||
trace $ red "git fast-import status" <+> viaShow code
|
||||
pure ()
|
||||
|
||||
applyBundles r bundles = do
|
||||
env <- lift ask
|
||||
sto <- lift $ asks _storage
|
||||
ip <- lift $ asks _progress
|
||||
|
||||
-- withState $ do
|
||||
for_ (zip [0..] bundles) $ \(n,bu) -> do
|
||||
|
||||
insertTxBundle h n bu
|
||||
|
||||
here <- existsBundleDone bu
|
||||
|
||||
unless here do
|
||||
|
||||
BundleWithMeta meta bytes <- lift (runExceptT $ readBundle sto r bu)
|
||||
>>= orThrow (ImportUnbundleError bu)
|
||||
|
||||
(_,_,idx,lbs) <- unpackPackMay bytes
|
||||
& orThrow (ImportUnbundleError bu)
|
||||
|
||||
trace $ red "reading bundle" <+> pretty bu -- <+> pretty (LBS.length lbs)
|
||||
|
||||
for_ idx $ \i -> do
|
||||
insertBundleObject bu i
|
||||
|
||||
let chunks = LBS.toChunks lbs
|
||||
|
||||
void $ liftIO $ withGitEnv env $ withGitUnpack $ \p -> do
|
||||
let pstdin = getStdin p
|
||||
for_ (zip [1..] chunks) $ \(i,chu) -> do
|
||||
onProgress ip (ImportReadBundleChunk meta (Progress i Nothing))
|
||||
liftIO $ LBS.hPutStr pstdin (LBS.fromStrict chu)
|
||||
|
||||
hFlush pstdin >> hClose pstdin
|
||||
|
||||
code <- waitExitCode p
|
||||
|
||||
trace $ "unpack objects done:" <+> viaShow code
|
||||
|
||||
insertBundleDone bu
|
||||
|
||||
|
||||
withGitFastImport :: (MonadUnliftIO m, MonadReader GitEnv m)
|
||||
=> (Process Handle Handle () -> m a)
|
||||
-> m ()
|
||||
withGitFastImport action = do
|
||||
fp <- asks _gitPath
|
||||
let cmd = "git"
|
||||
let args = ["--git-dir", fp, "fast-import"]
|
||||
-- let config = setStdin createPipe $ setStdout createPipe $ setStderr closed $ proc cmd args
|
||||
|
||||
trc <- asks traceEnabled >>= \case
|
||||
True -> pure id
|
||||
False -> pure $ setStdout closed . setStderr closed
|
||||
|
||||
let pconfig = setStdin createPipe $ setStdout createPipe $ trc $ proc cmd args
|
||||
p <- startProcess pconfig
|
||||
void $ action p
|
||||
stopProcess p
|
||||
|
||||
withGitUnpack :: (MonadUnliftIO m, MonadReader GitEnv m)
|
||||
=> (Process Handle Handle () -> m a) -> m a
|
||||
withGitUnpack action = do
|
||||
fp <- asks _gitPath
|
||||
let cmd = "git"
|
||||
let args = ["--git-dir", fp, "unpack-objects", "-q"]
|
||||
|
||||
trc <- asks traceEnabled >>= \case
|
||||
True -> pure id
|
||||
False -> pure $ setStdout closed . setStderr closed
|
||||
|
||||
let pconfig = setStdin createPipe $ setStdout createPipe $ trc $ proc cmd args
|
||||
p <- startProcess pconfig
|
||||
action p
|
||||
|
||||
|
||||
gitPrune :: (MonadUnliftIO m, MonadReader GitEnv m)
|
||||
=> m ()
|
||||
gitPrune = do
|
||||
fp <- asks _gitPath
|
||||
let cmd = [qc|git --git-dir={fp} prune|]
|
||||
runProcess_ (shell cmd & setStderr closed & setStdout closed)
|
||||
pure ()
|
||||
|
||||
|
|
@ -0,0 +1,92 @@
|
|||
module HBS2.Git.Client.Prelude
|
||||
( module HBS2.Prelude.Plated
|
||||
, module HBS2.Base58
|
||||
, module HBS2.Clock
|
||||
, module HBS2.Hash
|
||||
, module HBS2.Data.Types.Refs
|
||||
, module HBS2.Net.Auth.Credentials
|
||||
, module HBS2.Merkle
|
||||
, module HBS2.Storage
|
||||
, module HBS2.Net.Messaging.Unix
|
||||
, module HBS2.OrDie
|
||||
, module HBS2.Misc.PrettyStuff
|
||||
, module HBS2.System.Logger.Simple.ANSI
|
||||
|
||||
-- peer
|
||||
, module HBS2.Net.Proto.Service
|
||||
, module HBS2.Peer.Proto.LWWRef
|
||||
, module HBS2.Peer.RPC.API.Peer
|
||||
, module HBS2.Peer.RPC.API.RefLog
|
||||
, module HBS2.Peer.RPC.API.LWWRef
|
||||
, module HBS2.Peer.RPC.API.Storage
|
||||
, module HBS2.Peer.RPC.Client.StorageClient
|
||||
|
||||
, module Control.Applicative
|
||||
, module Control.Monad.Reader
|
||||
, module Control.Monad.Trans.Cont
|
||||
, module Control.Monad.Trans.Maybe
|
||||
, module System.Process.Typed
|
||||
, module Control.Monad.Except
|
||||
, module Lens.Micro.Platform
|
||||
, module UnliftIO
|
||||
|
||||
, getSocketName
|
||||
, formatRef
|
||||
, deserialiseOrFail
|
||||
) where
|
||||
|
||||
import HBS2.Prelude.Plated hiding (at)
|
||||
import HBS2.Base58
|
||||
import HBS2.Clock
|
||||
|
||||
import HBS2.Peer.Proto
|
||||
|
||||
import HBS2.Hash
|
||||
import HBS2.Data.Types.Refs
|
||||
import HBS2.Net.Auth.Credentials
|
||||
import HBS2.Merkle
|
||||
import HBS2.Storage
|
||||
import HBS2.OrDie
|
||||
import HBS2.Misc.PrettyStuff
|
||||
import HBS2.System.Logger.Simple.ANSI
|
||||
|
||||
import HBS2.Net.Messaging.Unix
|
||||
import HBS2.Net.Proto.Service
|
||||
|
||||
import HBS2.Peer.Proto.LWWRef
|
||||
import HBS2.Peer.RPC.API.Peer
|
||||
import HBS2.Peer.RPC.API.RefLog
|
||||
import HBS2.Peer.RPC.API.LWWRef
|
||||
import HBS2.Peer.RPC.API.Storage
|
||||
import HBS2.Peer.RPC.Client.StorageClient
|
||||
|
||||
import HBS2.Peer.CLI.Detect
|
||||
|
||||
import Control.Applicative
|
||||
import Control.Monad.Trans.Cont
|
||||
import Control.Monad.Reader
|
||||
import Control.Monad.Except
|
||||
import Control.Exception
|
||||
import Control.Monad.Trans.Maybe
|
||||
import UnliftIO
|
||||
import System.Process.Typed
|
||||
import Lens.Micro.Platform
|
||||
import Codec.Serialise
|
||||
|
||||
data RPCNotFoundError = RPCNotFoundError
|
||||
deriving stock (Show,Typeable)
|
||||
|
||||
|
||||
instance Exception RPCNotFoundError
|
||||
|
||||
instance HasErrorStatus RPCNotFoundError where
|
||||
getStatus = const Failed
|
||||
|
||||
getSocketName :: forall m . (MonadUnliftIO m, MonadError RPCNotFoundError m) => m FilePath
|
||||
getSocketName = do
|
||||
detectRPC >>= maybe (throwError RPCNotFoundError) pure
|
||||
|
||||
|
||||
formatRef :: (Pretty a1, Pretty a2) => (a1, a2) -> Doc ann
|
||||
formatRef (r,h) = pretty h <+> pretty r
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
{-# Language TemplateHaskell #-}
|
||||
{-# Language AllowAmbiguousTypes #-}
|
||||
module HBS2.Git.Client.Progress where
|
||||
|
||||
import HBS2.Git.Client.Prelude
|
||||
import HBS2.Git.Data.RefLog
|
||||
import HBS2.Git.Data.LWWBlock
|
||||
|
||||
import HBS2.Git.Data.Tx
|
||||
|
||||
data Progress a =
|
||||
Progress
|
||||
{ _progressState :: a
|
||||
, _progressTotal :: Maybe a
|
||||
}
|
||||
deriving (Eq,Generic)
|
||||
|
||||
makeLenses 'Progress
|
||||
|
||||
class HasProgress a where
|
||||
onProgress :: MonadIO m => a -> ProgressEvent -> m ()
|
||||
|
||||
data ProgressEvent =
|
||||
ImportIdle
|
||||
| ImportWaitLWW Int (LWWRefKey HBS2Basic)
|
||||
| ImportRefLogStart RefLogId
|
||||
| ImportRefLogDone RefLogId (Maybe HashRef)
|
||||
| ImportWaitTx HashRef
|
||||
| ImportScanTx HashRef
|
||||
| ImportApplyTx HashRef
|
||||
| ImportApplyTxError HashRef (Maybe String)
|
||||
| ImportReadBundleChunk BundleMeta (Progress Int)
|
||||
| ImportSetQuiet Bool
|
||||
| ImportAllDone
|
||||
| ExportWriteObject (Progress Int)
|
||||
|
||||
|
||||
data AnyProgress = forall a . HasProgress a => AnyProgress a
|
||||
|
||||
instance HasProgress AnyProgress where
|
||||
onProgress (AnyProgress e) = onProgress e
|
||||
|
||||
instance HasProgress () where
|
||||
onProgress _ _ = pure ()
|
||||
|
||||
newtype ProgressQ = ProgressQ (TQueue ProgressEvent)
|
||||
|
||||
instance HasProgress ProgressQ where
|
||||
onProgress (ProgressQ q) ev = atomically (writeTQueue q ev)
|
||||
|
||||
newProgressQ :: MonadUnliftIO m => m ProgressQ
|
||||
newProgressQ = ProgressQ <$> newTQueueIO
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
module HBS2.Git.Client.RefLog
|
||||
( module HBS2.Git.Client.RefLog
|
||||
, module HBS2.Peer.Proto.RefLog
|
||||
) where
|
||||
|
||||
import HBS2.Git.Client.Prelude
|
||||
import HBS2.Git.Client.App.Types
|
||||
import HBS2.Git.Data.RefLog
|
||||
import HBS2.Git.Data.LWWBlock
|
||||
import HBS2.Peer.Proto.RefLog
|
||||
|
||||
data RefLogRequestTimeout = RefLogRequestTimeout
|
||||
deriving (Show,Typeable)
|
||||
|
||||
data RefLogRequestError = RefLogRequestError
|
||||
deriving (Show,Typeable)
|
||||
|
||||
instance Exception RefLogRequestTimeout
|
||||
|
||||
instance Exception RefLogRequestError
|
||||
|
||||
doSomeRandomShit :: HasAPI PeerAPI UNIX m => m ()
|
||||
doSomeRandomShit = error "FUCK"
|
||||
|
||||
subscribeRefLog :: forall m .(GitPerks m, HasAPI PeerAPI UNIX m) => RefLogId -> m ()
|
||||
subscribeRefLog puk = do
|
||||
api <- getAPI @PeerAPI @UNIX
|
||||
void $ callService @RpcPollAdd api (puk, "reflog", 13)
|
||||
|
||||
subscribeLWWRef :: forall m . (GitPerks m, HasAPI PeerAPI UNIX m) => LWWRefKey HBS2Basic -> m ()
|
||||
subscribeLWWRef puk = do
|
||||
api <- getAPI @PeerAPI @UNIX
|
||||
void $ callService @RpcPollAdd api (fromLwwRefKey puk, "lwwref", 17)
|
||||
|
||||
fetchLWWRef :: forall m . (GitPerks m, HasAPI LWWRefAPI UNIX m) => LWWRefKey HBS2Basic -> m ()
|
||||
fetchLWWRef key = do
|
||||
api <- getAPI @LWWRefAPI @UNIX
|
||||
void $ race (pause @'Seconds 1) (callService @RpcLWWRefFetch api key)
|
||||
|
||||
getRefLogMerkle :: forall m . (GitPerks m, HasAPI RefLogAPI UNIX m) => RefLogId -> m (Maybe HashRef)
|
||||
getRefLogMerkle puk = do
|
||||
|
||||
api <- getAPI @RefLogAPI @UNIX
|
||||
|
||||
void $ race (pause @'Seconds 1) (callService @RpcRefLogFetch api puk)
|
||||
>>= orThrow RefLogRequestTimeout
|
||||
>>= orThrow RefLogRequestError
|
||||
|
||||
race (pause @'Seconds 1) (callService @RpcRefLogGet api puk)
|
||||
>>= orThrow RefLogRequestTimeout
|
||||
>>= orThrow RefLogRequestError
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,382 @@
|
|||
{-# Language UndecidableInstances #-}
|
||||
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||
module HBS2.Git.Client.State
|
||||
( module HBS2.Git.Client.State
|
||||
, transactional
|
||||
, commitAll
|
||||
) where
|
||||
|
||||
import HBS2.Git.Client.Prelude
|
||||
import HBS2.Git.Client.App.Types
|
||||
import HBS2.Git.Client.Config
|
||||
|
||||
import HBS2.Peer.Proto.RefLog
|
||||
|
||||
import HBS2.Git.Data.RefLog
|
||||
import HBS2.Git.Data.LWWBlock
|
||||
|
||||
import DBPipe.SQLite
|
||||
import Data.Maybe
|
||||
import Data.List qualified as List
|
||||
import Text.InterpolatedString.Perl6 (qc)
|
||||
import Data.Word
|
||||
|
||||
newtype Base58Field a = Base58Field { fromBase58Field :: a }
|
||||
deriving stock (Eq,Ord,Generic)
|
||||
|
||||
instance Pretty (AsBase58 a) => ToField (Base58Field a) where
|
||||
toField (Base58Field x) = toField @String (show $ pretty (AsBase58 x))
|
||||
|
||||
instance IsString a => FromField (Base58Field a) where
|
||||
fromField = fmap (Base58Field . fromString) . fromField @String
|
||||
|
||||
instance FromField (RefLogKey HBS2Basic) where
|
||||
fromField = fmap fromString . fromField @String
|
||||
|
||||
instance ToField HashRef where
|
||||
toField h = toField @String (show $ pretty h)
|
||||
|
||||
instance FromField HashRef where
|
||||
fromField = fmap fromString . fromField @String
|
||||
|
||||
instance ToField GitHash where
|
||||
toField h = toField (show $ pretty h)
|
||||
|
||||
instance ToField GitRef where
|
||||
toField h = toField (show $ pretty h)
|
||||
|
||||
instance FromField GitRef where
|
||||
fromField = fmap fromString . fromField @String
|
||||
|
||||
instance FromField GitHash where
|
||||
fromField = fmap fromString . fromField @String
|
||||
|
||||
instance FromField (LWWRefKey HBS2Basic) where
|
||||
fromField = fmap fromString . fromField @String
|
||||
|
||||
createStateDir :: (GitPerks m, MonadReader GitEnv m) => m ()
|
||||
createStateDir = do
|
||||
void $ readConfig True
|
||||
|
||||
initState :: (GitPerks m, MonadReader GitEnv m) => m ()
|
||||
initState = do
|
||||
createStateDir
|
||||
evolveDB
|
||||
|
||||
class WithState m a where
|
||||
withState :: DBPipeM m a -> m a
|
||||
|
||||
instance (MonadIO m, MonadReader GitEnv m) => WithState m a where
|
||||
withState action = do
|
||||
env <- asks _db
|
||||
withDB env action
|
||||
|
||||
|
||||
evolveDB :: (GitPerks m, MonadReader GitEnv m) => m ()
|
||||
evolveDB = withState do
|
||||
createTxTable
|
||||
createTxDoneTable
|
||||
createTxBundleTable
|
||||
createBundleDoneTable
|
||||
createBundleKeyTable
|
||||
createBundleObjectTable
|
||||
createNewGK0Table
|
||||
createLwwTable
|
||||
commitAll
|
||||
|
||||
createTxTable :: MonadIO m => DBPipeM m ()
|
||||
createTxTable = do
|
||||
ddl [qc|
|
||||
create table if not exists tx
|
||||
( reflog text not null
|
||||
, tx text not null
|
||||
, seq int not null
|
||||
, head text not null
|
||||
, bundle text not null
|
||||
, primary key (reflog,tx)
|
||||
)
|
||||
|]
|
||||
|
||||
ddl [qc|
|
||||
CREATE INDEX IF NOT EXISTS idx_tx_seq ON tx(seq)
|
||||
|]
|
||||
|
||||
|
||||
createTxDoneTable :: MonadIO m => DBPipeM m ()
|
||||
createTxDoneTable = do
|
||||
ddl [qc|
|
||||
create table if not exists txdone
|
||||
( tx text not null primary key
|
||||
)
|
||||
|]
|
||||
|
||||
createBundleDoneTable :: MonadIO m => DBPipeM m ()
|
||||
createBundleDoneTable = do
|
||||
ddl [qc|
|
||||
create table if not exists bundledone
|
||||
( hash text primary key
|
||||
)
|
||||
|]
|
||||
|
||||
createBundleKeyTable :: MonadIO m => DBPipeM m ()
|
||||
|
||||
createBundleKeyTable = do
|
||||
ddl [qc|
|
||||
create table if not exists bundlekey
|
||||
( reflog text not null
|
||||
, key text not null
|
||||
, bundle text not null
|
||||
, primary key (reflog, key)
|
||||
)
|
||||
|]
|
||||
|
||||
|
||||
createTxBundleTable :: MonadIO m => DBPipeM m ()
|
||||
createTxBundleTable = do
|
||||
ddl [qc|
|
||||
create table if not exists txbundle
|
||||
( tx text not null
|
||||
, num integer not null
|
||||
, bundle text not null
|
||||
, primary key (tx, num)
|
||||
)
|
||||
|]
|
||||
|
||||
createBundleObjectTable :: MonadIO m => DBPipeM m ()
|
||||
createBundleObjectTable = do
|
||||
ddl [qc|
|
||||
create table if not exists bundleobject
|
||||
( bundle text not null
|
||||
, object text not null
|
||||
, primary key (bundle, object)
|
||||
)
|
||||
|]
|
||||
|
||||
createNewGK0Table :: MonadIO m => DBPipeM m ()
|
||||
createNewGK0Table = do
|
||||
ddl [qc|
|
||||
create table if not exists newgk0
|
||||
( reflog text not null
|
||||
, tx text not null
|
||||
, ts int not null default (strftime('%s','now'))
|
||||
, gk0 text not null
|
||||
, primary key (reflog,tx)
|
||||
)
|
||||
|]
|
||||
|
||||
|
||||
createLwwTable :: MonadIO m => DBPipeM m ()
|
||||
createLwwTable = do
|
||||
ddl [qc|
|
||||
create table if not exists lww
|
||||
( hash text not null
|
||||
, seq int not null
|
||||
, reflog text not null
|
||||
, primary key (hash,seq,reflog)
|
||||
)
|
||||
|]
|
||||
|
||||
|
||||
existsTx :: MonadIO m => HashRef -> DBPipeM m Bool
|
||||
existsTx txHash = do
|
||||
select @(Only Bool) [qc|
|
||||
SELECT true FROM tx WHERE tx = ? LIMIT 1
|
||||
|] (Only txHash)
|
||||
<&> not . List.null
|
||||
|
||||
insertTx :: MonadIO m
|
||||
=> RefLogId
|
||||
-> HashRef
|
||||
-> Integer
|
||||
-> HashRef
|
||||
-> HashRef
|
||||
-> DBPipeM m ()
|
||||
insertTx puk tx sn h bundle = do
|
||||
insert [qc|
|
||||
insert into tx (reflog,tx,seq,head,bundle)
|
||||
values (?,?,?,?,?)
|
||||
on conflict (reflog,tx) do nothing
|
||||
|] (Base58Field puk,tx,sn,h,bundle)
|
||||
|
||||
|
||||
selectTxForRefLog :: MonadIO m
|
||||
=> RefLogId
|
||||
-> HashRef
|
||||
-> DBPipeM m (Maybe (HashRef, Epoch))
|
||||
selectTxForRefLog puk tx = do
|
||||
select [qc|
|
||||
select head,seq
|
||||
from tx where reflog = ? and tx = ?
|
||||
limit 1
|
||||
|] (Base58Field puk, tx) <&> listToMaybe
|
||||
|
||||
selectTxHead :: MonadIO m => HashRef -> DBPipeM m (Maybe HashRef)
|
||||
selectTxHead txHash = do
|
||||
result <- select [qc|
|
||||
select head from tx where TX = ? limit 1
|
||||
|] (Only txHash)
|
||||
pure $ listToMaybe $ fmap fromOnly result
|
||||
|
||||
selectMaxTxSeq :: MonadIO m => RefLogId -> DBPipeM m Integer
|
||||
selectMaxTxSeq puk = do
|
||||
select [qc|
|
||||
select max(seq) as seq from tx where reflog = ?
|
||||
|] (Only (Base58Field puk))
|
||||
<&> maybe 0 fromOnly . listToMaybe
|
||||
|
||||
insertTxDone :: MonadIO m => HashRef -> DBPipeM m ()
|
||||
insertTxDone txHash = do
|
||||
insert [qc|
|
||||
INSERT INTO txdone (tx) VALUES (?)
|
||||
ON CONFLICT (tx) DO NOTHING
|
||||
|] (Only txHash)
|
||||
|
||||
|
||||
existsTxDone :: MonadIO m => HashRef -> DBPipeM m Bool
|
||||
existsTxDone txHash = do
|
||||
select @(Only Bool) [qc|
|
||||
SELECT true FROM txdone WHERE tx = ? LIMIT 1
|
||||
|] (Only txHash)
|
||||
<&> not . null
|
||||
|
||||
existsAnyTxDone :: MonadIO m => DBPipeM m Bool
|
||||
existsAnyTxDone = do
|
||||
select_ @_ @(Only (Maybe Bool)) [qc|
|
||||
SELECT true FROM txdone LIMIT 1
|
||||
|] <&> not . null
|
||||
|
||||
selectMaxSeqTxNotDone :: MonadIO m => RefLogId -> DBPipeM m (Maybe HashRef)
|
||||
selectMaxSeqTxNotDone puk = do
|
||||
select [qc|
|
||||
WITH MaxDoneSeq AS (
|
||||
SELECT MAX(tx.seq) as maxSeq
|
||||
FROM tx
|
||||
JOIN txdone ON tx.tx = txdone.tx
|
||||
WHERE tx.reflog = ?
|
||||
),
|
||||
FilteredTx AS (
|
||||
SELECT tx.tx, tx.seq
|
||||
FROM tx
|
||||
LEFT JOIN txdone ON tx.tx = txdone.tx
|
||||
WHERE tx.reflog = ? AND txdone.tx IS NULL
|
||||
)
|
||||
SELECT ft.tx FROM FilteredTx ft
|
||||
JOIN MaxDoneSeq mds ON ft.seq > COALESCE(mds.maxSeq, 0)
|
||||
ORDER BY ft.seq DESC
|
||||
LIMIT 1
|
||||
|] (Base58Field puk, Base58Field puk)
|
||||
<&> listToMaybe . fmap fromOnly
|
||||
|
||||
|
||||
selectMaxAppliedTx :: MonadIO m => DBPipeM m (Maybe (HashRef, Integer))
|
||||
selectMaxAppliedTx = do
|
||||
select [qc|
|
||||
SELECT t.tx, t.seq FROM txdone d JOIN tx t ON d.tx = t.tx ORDER BY t.seq DESC LIMIT 1
|
||||
|] ()
|
||||
<&> listToMaybe
|
||||
|
||||
insertBundleDone :: MonadIO m => HashRef -> DBPipeM m ()
|
||||
insertBundleDone hashRef = do
|
||||
insert [qc|
|
||||
INSERT INTO bundledone (hash) VALUES (?)
|
||||
ON CONFLICT (hash) DO NOTHING
|
||||
|] (Only hashRef)
|
||||
|
||||
existsBundleDone :: MonadIO m => HashRef -> DBPipeM m Bool
|
||||
existsBundleDone hashRef = do
|
||||
select @(Only Bool) [qc|
|
||||
SELECT true FROM bundledone WHERE hash = ? LIMIT 1
|
||||
|] (Only hashRef)
|
||||
<&> not . null
|
||||
|
||||
|
||||
insertBundleKey :: MonadIO m => RefLogId -> HashRef -> HashRef -> DBPipeM m ()
|
||||
insertBundleKey reflogId keyHash bundleHash = do
|
||||
insert [qc|
|
||||
INSERT INTO bundlekey (reflog, key, bundle) VALUES (?, ?, ?)
|
||||
ON CONFLICT (reflog,key) DO NOTHING
|
||||
|] (Base58Field reflogId, keyHash, bundleHash)
|
||||
|
||||
selectBundleByKey :: MonadIO m => RefLogId -> HashRef -> DBPipeM m (Maybe HashRef)
|
||||
selectBundleByKey reflogId keyHash = do
|
||||
select [qc|
|
||||
SELECT bundle FROM bundlekey WHERE reflog = ? AND key = ? LIMIT 1
|
||||
|] (Base58Field reflogId, keyHash)
|
||||
<&> listToMaybe . fmap fromOnly
|
||||
|
||||
insertTxBundle :: MonadIO m => HashRef -> Int -> HashRef -> DBPipeM m ()
|
||||
insertTxBundle tx num bundleHash = do
|
||||
insert [qc|
|
||||
INSERT INTO txbundle (tx, num, bundle) VALUES (?, ?, ?)
|
||||
ON CONFLICT (tx, num) DO UPDATE SET bundle = EXCLUDED.bundle
|
||||
|] (tx, num, bundleHash)
|
||||
|
||||
insertBundleObject :: MonadIO m => HashRef -> GitHash -> DBPipeM m ()
|
||||
insertBundleObject bundle object = do
|
||||
insert [qc|
|
||||
insert into bundleobject (bundle, object) values (?, ?)
|
||||
on conflict (bundle, object) do nothing
|
||||
|] (bundle, object)
|
||||
|
||||
|
||||
selectBundleObjects :: MonadIO m => HashRef -> DBPipeM m [GitHash]
|
||||
selectBundleObjects bundle = do
|
||||
select [qc|
|
||||
select object from bundleobject where bundle = ?
|
||||
|] (Only bundle)
|
||||
<&> fmap fromOnly
|
||||
|
||||
|
||||
selectObjectsForTx:: MonadIO m => HashRef -> DBPipeM m [GitHash]
|
||||
selectObjectsForTx txHash = do
|
||||
select [qc|
|
||||
select distinct bundleobject.object
|
||||
from txbundle
|
||||
join bundleobject on txbundle.bundle = bundleobject.bundle
|
||||
where txbundle.tx = ?
|
||||
|] (Only txHash) <&> fmap fromOnly
|
||||
|
||||
|
||||
isObjectInTx :: MonadIO m => HashRef -> GitHash -> DBPipeM m Bool
|
||||
isObjectInTx txHash objectHash = do
|
||||
result <- select @(Only Int) [qc|
|
||||
select 1
|
||||
from txbundle
|
||||
join bundleobject on txbundle.bundle = bundleobject.bundle
|
||||
where txbundle.tx = ? and bundleobject.object = ?
|
||||
limit 1
|
||||
|] (txHash, objectHash)
|
||||
pure $ not (null result)
|
||||
|
||||
|
||||
insertNewGK0 :: MonadIO m => RefLogId -> HashRef -> HashRef -> DBPipeM m ()
|
||||
insertNewGK0 reflog tx gk0 = do
|
||||
insert [qc|
|
||||
insert into newgk0 (reflog, tx, gk0) values (?, ?, ?)
|
||||
on conflict (reflog,tx) do update set gk0 = excluded.gk0
|
||||
|] (Base58Field reflog, tx, gk0)
|
||||
|
||||
selectNewGK0 :: MonadIO m => RefLogId -> DBPipeM m (Maybe (HashRef,Epoch))
|
||||
selectNewGK0 reflog = do
|
||||
select [qc|
|
||||
select gk0, ts
|
||||
from newgk0 g
|
||||
where g.reflog = ?
|
||||
order by ts desc
|
||||
limit 1
|
||||
|] (Only (Base58Field reflog)) <&> listToMaybe
|
||||
|
||||
|
||||
insertLww :: MonadIO m => LWWRefKey HBS2Basic -> Word64 -> RefLogId -> DBPipeM m ()
|
||||
insertLww lww snum reflog = do
|
||||
insert [qc|
|
||||
INSERT INTO lww (hash, seq, reflog) VALUES (?, ?, ?)
|
||||
ON CONFLICT (hash,seq,reflog) DO NOTHING
|
||||
|] (Base58Field lww, snum, Base58Field reflog)
|
||||
|
||||
selectAllLww :: MonadIO m => DBPipeM m [(LWWRefKey HBS2Basic, Word64, RefLogId)]
|
||||
selectAllLww = do
|
||||
select_ [qc|
|
||||
SELECT hash, seq, reflog FROM lww
|
||||
|] <&> fmap (over _3 (fromRefLogKey @HBS2Basic))
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
module HBS2.Git.Data.GK where
|
||||
|
||||
import HBS2.Git.Client.Prelude
|
||||
|
||||
import HBS2.Net.Auth.GroupKeySymm
|
||||
import HBS2.Storage.Operations.ByteString
|
||||
|
||||
import Data.ByteString.Lazy qualified as LBS
|
||||
|
||||
type GK0 = GroupKey 'Symm HBS2Basic
|
||||
|
||||
readGK0 :: (MonadIO m, MonadError OperationError m) => AnyStorage -> HashRef -> m GK0
|
||||
readGK0 sto h = do
|
||||
runExceptT (readFromMerkle sto (SimpleKey (fromHashRef h)))
|
||||
>>= orThrowError MissedBlockError
|
||||
<&> deserialiseOrFail @GK0
|
||||
>>= orThrowError UnsupportedFormat
|
||||
|
||||
loadGK0FromFile :: MonadIO m => FilePath -> m (Maybe GK0)
|
||||
loadGK0FromFile fp = runMaybeT do
|
||||
|
||||
content <- liftIO (try @_ @IOError (LBS.readFile fp))
|
||||
>>= toMPlus
|
||||
|
||||
toMPlus $ parseGroupKey @HBS2Basic (AsGroupKeyFile content)
|
||||
|
|
@ -0,0 +1,142 @@
|
|||
{-# Language AllowAmbiguousTypes #-}
|
||||
{-# Language UndecidableInstances #-}
|
||||
module HBS2.Git.Data.LWWBlock
|
||||
( module HBS2.Git.Data.LWWBlock
|
||||
, module HBS2.Peer.Proto.LWWRef
|
||||
, HBS2Basic
|
||||
) where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.OrDie
|
||||
import HBS2.Net.Proto.Types
|
||||
import HBS2.Data.Types.Refs
|
||||
import HBS2.Data.Types.SignedBox
|
||||
import HBS2.Net.Auth.Schema()
|
||||
import HBS2.Net.Auth.Credentials
|
||||
import HBS2.Storage
|
||||
import HBS2.Peer.Proto.LWWRef
|
||||
|
||||
import Data.Word
|
||||
import Codec.Serialise
|
||||
import System.Random
|
||||
|
||||
import Control.Exception
|
||||
import Control.Monad.Except
|
||||
import Control.Monad.Trans.Maybe
|
||||
|
||||
-- NOTE: on-lww-block-data
|
||||
-- HKDF ( SK(LWWRef) , lwwRefNonce ) ==> SK( RefLog )
|
||||
-- lwwRefLogPubKey == PK ( SK (RefLog ) )
|
||||
--
|
||||
-- LWWBlock is required to make repo reference "stable",
|
||||
-- i.e. it should remains the same even if the structure
|
||||
-- of repository has been changed or it was, say, "trimmed".
|
||||
--
|
||||
-- Therefore, there is the root key and the LWWRef, pointing
|
||||
-- to a block, which contains actual seed data for the "current"
|
||||
-- repo and it's possible to support permanent reference (LWWRef)
|
||||
-- to a repo, while it's actual structure may be changed
|
||||
-- (hbs2-git repo structure changes or garbage collecting (removing old
|
||||
-- transactions, etc).
|
||||
--
|
||||
-- (LWWRef PK) -> (LWWBlockData) -> (RefLog : [TX])
|
||||
--
|
||||
|
||||
data LWWBlockData e =
|
||||
LWWBlockData
|
||||
{ lwwRefSeed :: Word64
|
||||
, lwwRefLogPubKey :: PubKey 'Sign (Encryption e)
|
||||
}
|
||||
deriving stock Generic
|
||||
|
||||
data LWWBlock e =
|
||||
LWWBlock1 { lwwBlockData :: LWWBlockData e }
|
||||
deriving stock Generic
|
||||
|
||||
instance Serialise (PubKey 'Sign (Encryption e)) => Serialise (LWWBlockData e)
|
||||
instance Serialise (PubKey 'Sign (Encryption e)) => Serialise (LWWBlock e)
|
||||
|
||||
|
||||
data LWWBlockOpError =
|
||||
LWWBlockOpSkNotAvail
|
||||
| LWWBlockOpStorageError
|
||||
| LWWBlockOpSomeError
|
||||
deriving stock (Show,Typeable,Generic)
|
||||
|
||||
instance Exception LWWBlockOpError
|
||||
|
||||
{- HLINT ignore "Functor law" -}
|
||||
|
||||
readLWWBlock :: forall e s m . ( MonadIO m
|
||||
, Signatures s
|
||||
, s ~ Encryption e
|
||||
, ForLWWRefProto e
|
||||
, IsRefPubKey s
|
||||
, e ~ L4Proto
|
||||
)
|
||||
=> AnyStorage
|
||||
-> LWWRefKey s
|
||||
-> m (Maybe (LWWRef e, LWWBlockData e))
|
||||
|
||||
readLWWBlock sto k = runMaybeT do
|
||||
|
||||
w@LWWRef{..} <- runExceptT (readLWWRef @e sto k)
|
||||
>>= toMPlus
|
||||
>>= toMPlus
|
||||
|
||||
getBlock sto (fromHashRef lwwValue)
|
||||
>>= toMPlus
|
||||
<&> deserialiseOrFail @(LWWBlock e)
|
||||
>>= toMPlus
|
||||
<&> lwwBlockData
|
||||
<&> (w,)
|
||||
|
||||
initLWWRef :: forall e s m . ( MonadIO m
|
||||
, MonadError LWWBlockOpError m
|
||||
, IsRefPubKey s
|
||||
, ForSignedBox e
|
||||
, HasDerivedKey s 'Sign Word64 m
|
||||
, s ~ Encryption e
|
||||
, Signatures s
|
||||
, e ~ L4Proto
|
||||
)
|
||||
=> AnyStorage
|
||||
-> Maybe Word64
|
||||
-> ( PubKey 'Sign s -> m (Maybe (PrivKey 'Sign s) ) )
|
||||
-> LWWRefKey s
|
||||
-> m HashRef
|
||||
initLWWRef sto seed' findSk lwwKey = do
|
||||
-- let k0 = fromLwwRefKey lww
|
||||
seed <- maybe1 seed' randomIO pure
|
||||
|
||||
let pk0 = fromLwwRefKey lwwKey
|
||||
sk0 <- findSk pk0
|
||||
>>= orThrowError LWWBlockOpSkNotAvail
|
||||
|
||||
lww0 <- runMaybeT do
|
||||
getRef sto lwwKey >>= toMPlus
|
||||
>>= getBlock sto >>= toMPlus
|
||||
<&> deserialiseOrFail @(SignedBox (LWWRef e) e)
|
||||
>>= toMPlus
|
||||
<&> unboxSignedBox0
|
||||
>>= toMPlus
|
||||
<&> snd
|
||||
|
||||
(pk1, _) <- derivedKey @s @'Sign seed sk0
|
||||
|
||||
let newLwwData = LWWBlock1 (LWWBlockData @e seed pk1)
|
||||
|
||||
hx <- putBlock sto (serialise newLwwData)
|
||||
>>= orThrowError LWWBlockOpStorageError
|
||||
<&> HashRef
|
||||
|
||||
let lww :: LWWRef e
|
||||
lww = LWWRef { lwwSeq = succ (maybe 0 lwwSeq lww0)
|
||||
, lwwValue = hx
|
||||
, lwwProof = Nothing
|
||||
}
|
||||
|
||||
updateLWWRef @s sto lwwKey sk0 lww
|
||||
>>= orThrowError LWWBlockOpStorageError
|
||||
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
module HBS2.Git.Data.RefLog where
|
||||
|
||||
import HBS2.Git.Client.Prelude
|
||||
|
||||
type RefLogId = PubKey 'Sign HBS2Basic
|
||||
|
||||
|
|
@ -0,0 +1,381 @@
|
|||
module HBS2.Git.Data.Tx
|
||||
( module HBS2.Git.Data.Tx
|
||||
, OperationError(..)
|
||||
) where
|
||||
|
||||
import HBS2.Git.Client.Prelude
|
||||
import HBS2.Git.Data.RefLog
|
||||
|
||||
import HBS2.Defaults
|
||||
import HBS2.Data.Detect
|
||||
import HBS2.KeyMan.Keys.Direct
|
||||
import HBS2.Peer.Proto
|
||||
import HBS2.Net.Auth.GroupKeySymm
|
||||
import HBS2.Net.Auth.Credentials
|
||||
import HBS2.Storage.Operations.ByteString
|
||||
import HBS2.Storage.Operations.Missed
|
||||
|
||||
import HBS2.Git.Data.GK
|
||||
|
||||
import HBS2.Git.Local
|
||||
|
||||
|
||||
import Data.Maybe
|
||||
import Data.Either
|
||||
import Data.Word
|
||||
import Data.ByteString.Lazy qualified as LBS
|
||||
import Data.ByteString (ByteString)
|
||||
import Streaming.Prelude qualified as S
|
||||
import Data.Binary.Get
|
||||
import Data.ByteArray.Hash (SipHash(..), SipKey(..))
|
||||
import Data.ByteArray.Hash qualified as BA
|
||||
import Data.HashMap.Strict qualified as HM
|
||||
|
||||
type Rank = Integer
|
||||
|
||||
|
||||
type LBS = LBS.ByteString
|
||||
|
||||
type RepoTx = RefLogUpdate L4Proto
|
||||
|
||||
data RepoHeadType = RepoHeadType1
|
||||
deriving stock (Enum,Generic)
|
||||
|
||||
data RepoHeadExt = RepoHeadExt
|
||||
deriving stock Generic
|
||||
|
||||
data RepoHead =
|
||||
RepoHeadSimple
|
||||
{ _repoHeadType :: RepoHeadType
|
||||
, _repoHeadTime :: Word64
|
||||
, _repoHeadGK0 :: Maybe HashRef
|
||||
, _repoHeadName :: Text
|
||||
, _repoHeadBrief :: Text
|
||||
, _repoManifest :: Maybe Text
|
||||
, _repoHeadRefs :: [(GitRef, GitHash)]
|
||||
, _repoHeadExt :: [RepoHeadExt]
|
||||
}
|
||||
deriving stock (Generic)
|
||||
|
||||
|
||||
instance Serialise RepoHeadType
|
||||
instance Serialise RepoHeadExt
|
||||
instance Serialise RepoHead
|
||||
|
||||
data TxKeyringNotFound = TxKeyringNotFound
|
||||
deriving stock (Show, Typeable, Generic)
|
||||
|
||||
instance Exception TxKeyringNotFound
|
||||
|
||||
class GroupKeyOperations m where
|
||||
openGroupKey :: GK0 -> m (Maybe GroupSecret)
|
||||
loadKeyrings :: HashRef -> m [KeyringEntry HBS2Basic]
|
||||
|
||||
makeRepoHeadSimple :: MonadIO m
|
||||
=> Text
|
||||
-> Text
|
||||
-> Maybe Text
|
||||
-> Maybe HashRef
|
||||
-> [(GitRef, GitHash)]
|
||||
-> m RepoHead
|
||||
makeRepoHeadSimple name brief manifest gk refs = do
|
||||
t <- getEpoch
|
||||
pure $ RepoHeadSimple RepoHeadType1 t gk name brief manifest refs mempty
|
||||
|
||||
writeRepoHead :: MonadUnliftIO m => AnyStorage -> RepoHead -> m HashRef
|
||||
writeRepoHead sto rh = writeAsMerkle sto (serialise rh) <&> HashRef
|
||||
|
||||
makeTx :: forall s m . (MonadUnliftIO m, GroupKeyOperations m, s ~ HBS2Basic)
|
||||
=> AnyStorage
|
||||
-> Bool -- ^ rewrite bundle merkle tree with new gk0
|
||||
-> Rank -- ^ tx rank
|
||||
-> RefLogId
|
||||
-> ( PubKey 'Sign s -> m (Maybe (PrivKey 'Sign s) ) )
|
||||
-> RepoHead
|
||||
-> [HashRef]
|
||||
-> [LBS]
|
||||
-> m RepoTx
|
||||
|
||||
makeTx sto rewrite r puk findSk rh prev lbss = do
|
||||
|
||||
let rfk = RefLogKey @HBS2Basic puk
|
||||
|
||||
privk <- findSk puk
|
||||
>>= orThrow TxKeyringNotFound
|
||||
|
||||
-- FIXME: delete-on-fail
|
||||
headRef <- writeRepoHead sto rh
|
||||
|
||||
writeEnv <- newWriteBundleEnv sto rh
|
||||
|
||||
cRefs <- for lbss (writeBundle writeEnv)
|
||||
|
||||
let newBundles0 = prev <> cRefs
|
||||
|
||||
newBundles <- do
|
||||
if not rewrite then do
|
||||
pure newBundles0
|
||||
else do
|
||||
for newBundles0 \bh -> do
|
||||
|
||||
blk <- getBlock sto (fromHashRef bh)
|
||||
>>= orThrow StorageError
|
||||
|
||||
case tryDetect (fromHashRef bh) blk of
|
||||
|
||||
Merkle{} -> do
|
||||
bs <- runExceptT (readFromMerkle sto (SimpleKey (fromHashRef bh)))
|
||||
>>= either throwIO pure
|
||||
|
||||
trace $ "encrypt existed block" <+> pretty bh
|
||||
writeBundle writeEnv bs
|
||||
|
||||
MerkleAnn ann@(MTreeAnn {_mtaCrypt = EncryptGroupNaClSymm2 o gkh nonce}) -> do
|
||||
|
||||
gk <- runExceptT (readGK0 sto (HashRef gkh))
|
||||
>>= orThrow (GroupKeyNotFound 4)
|
||||
|
||||
gks <- openGroupKey gk
|
||||
>>= orThrow (GroupKeyNotFound 5)
|
||||
|
||||
debug $ "update GK0 for existed block" <+> pretty bh
|
||||
let rcpt = HM.keys (recipients (wbeGk0 writeEnv))
|
||||
gk1 <- generateGroupKey @HBS2Basic (Just gks) rcpt
|
||||
|
||||
gk1h <- writeAsMerkle sto (serialise gk1)
|
||||
|
||||
let newCrypt = EncryptGroupNaClSymm2 o gk1h nonce
|
||||
let newTreeBlock = ann { _mtaCrypt = newCrypt }
|
||||
|
||||
newTree <- enqueueBlock sto (serialise newTreeBlock)
|
||||
>>= orThrow StorageError
|
||||
|
||||
pure (HashRef newTree)
|
||||
|
||||
_ -> throwIO UnsupportedFormat
|
||||
|
||||
let pt = toPTree (MaxSize defHashListChunk) (MaxNum 256) newBundles
|
||||
|
||||
me <- makeMerkle 0 pt $ \(_,_,bss) -> do
|
||||
void $ putBlock sto bss
|
||||
|
||||
let meRef = HashRef me
|
||||
|
||||
-- TODO: post-real-rank-for-tx
|
||||
let tx = SequentialRef r (AnnotatedHashRef (Just headRef) meRef)
|
||||
& serialise
|
||||
& LBS.toStrict
|
||||
|
||||
makeRefLogUpdate @L4Proto @HBS2Basic puk privk tx
|
||||
|
||||
|
||||
unpackTx :: MonadIO m
|
||||
=> RefLogUpdate L4Proto
|
||||
-> m (Integer, HashRef, HashRef)
|
||||
|
||||
unpackTx tx = do
|
||||
|
||||
sr <- deserialiseOrFail @SequentialRef (LBS.fromStrict (view refLogUpdData tx))
|
||||
& orThrow UnsupportedFormat
|
||||
|
||||
case sr of
|
||||
SequentialRef n (AnnotatedHashRef (Just rhh) blkh) -> pure (n,rhh,blkh)
|
||||
_ -> throwIO UnsupportedFormat
|
||||
|
||||
readTx :: (MonadIO m, MonadError OperationError m)
|
||||
=> AnyStorage
|
||||
-> HashRef
|
||||
-> m (Integer, HashRef, RepoHead, HashRef)
|
||||
|
||||
readTx sto href = do
|
||||
|
||||
tx <- getBlock sto (fromHashRef href)
|
||||
>>= orThrowError MissedBlockError
|
||||
<&> deserialiseOrFail @(RefLogUpdate L4Proto)
|
||||
>>= orThrowError UnsupportedFormat
|
||||
|
||||
(n,rhh,blkh) <- unpackTx tx
|
||||
|
||||
rh <- runExceptT (readFromMerkle sto (SimpleKey (fromHashRef rhh)))
|
||||
>>= orThrowError IncompleteData
|
||||
<&> deserialiseOrFail @RepoHead
|
||||
>>= orThrowError UnsupportedFormat
|
||||
|
||||
missed <- S.head_ (findMissedBlocks2 sto blkh) <&> isJust
|
||||
|
||||
when missed do
|
||||
throwError IncompleteData
|
||||
|
||||
pure (n, rhh, rh, blkh)
|
||||
|
||||
|
||||
readRepoHeadFromTx :: MonadIO m
|
||||
=> AnyStorage
|
||||
-> HashRef
|
||||
-> m (Maybe RepoHead)
|
||||
|
||||
readRepoHeadFromTx sto href = runMaybeT do
|
||||
|
||||
tx <- getBlock sto (fromHashRef href) >>= toMPlus
|
||||
<&> deserialiseOrFail @(RefLogUpdate L4Proto)
|
||||
>>= toMPlus
|
||||
|
||||
(n,rhh,_) <- unpackTx tx
|
||||
|
||||
runExceptT (readFromMerkle sto (SimpleKey (fromHashRef rhh)))
|
||||
>>= toMPlus
|
||||
<&> deserialiseOrFail @RepoHead
|
||||
>>= toMPlus
|
||||
|
||||
|
||||
data BundleMeta =
|
||||
BundleMeta
|
||||
{ bundleHash :: HashRef
|
||||
, bundleEncrypted :: Bool
|
||||
}
|
||||
deriving stock (Show,Generic)
|
||||
|
||||
data BundleWithMeta =
|
||||
BundleWithMeta
|
||||
{ bundleMeta :: BundleMeta
|
||||
, bundlebBytes :: LBS
|
||||
}
|
||||
deriving stock (Generic)
|
||||
|
||||
readBundle :: (MonadIO m, MonadError OperationError m, GroupKeyOperations m)
|
||||
=> AnyStorage
|
||||
-> RepoHead
|
||||
-> HashRef
|
||||
-> m BundleWithMeta
|
||||
readBundle sto rh ref = do
|
||||
|
||||
obj <- getBlock sto (fromHashRef ref)
|
||||
>>= orThrow MissedBlockError
|
||||
|
||||
let q = tryDetect (fromHashRef ref) obj
|
||||
|
||||
case q of
|
||||
Merkle t -> do
|
||||
let meta = BundleMeta ref False
|
||||
BundleWithMeta meta <$>
|
||||
readFromMerkle sto (SimpleKey key)
|
||||
|
||||
MerkleAnn (MTreeAnn {_mtaCrypt = EncryptGroupNaClSymm gkh _}) -> do
|
||||
ke <- loadKeyrings (HashRef gkh)
|
||||
let meta = BundleMeta ref True
|
||||
BundleWithMeta meta <$> readFromMerkle sto (ToDecryptBS ke key)
|
||||
|
||||
_ -> throwError UnsupportedFormat
|
||||
|
||||
where
|
||||
key = fromHashRef ref
|
||||
|
||||
readBundleRefs :: (MonadIO m)
|
||||
=> AnyStorage
|
||||
-> HashRef
|
||||
-> m (Either [HashRef] [HashRef])
|
||||
|
||||
readBundleRefs sto bunh = do
|
||||
r <- S.toList_ $
|
||||
walkMerkle @[HashRef] (fromHashRef bunh) (getBlock sto) $ \case
|
||||
Left h -> S.yield (Left h)
|
||||
Right ( bundles :: [HashRef] ) -> do
|
||||
mapM_ (S.yield . Right) bundles
|
||||
|
||||
let missed = lefts r
|
||||
|
||||
if not (null missed) then do
|
||||
pure (Left (fmap HashRef missed))
|
||||
else do
|
||||
pure (Right $ rights r)
|
||||
|
||||
|
||||
type GitPack = LBS.ByteString
|
||||
type UnpackedBundle = (Word32, Word32, [GitHash], GitPack)
|
||||
|
||||
unpackPackMay :: LBS.ByteString -> Maybe UnpackedBundle
|
||||
unpackPackMay co = result $ flip runGetOrFail co do
|
||||
w <- getWord32be
|
||||
v <- getWord32be
|
||||
idx <- lookAheadE (getLazyByteString (fromIntegral w) <&> deserialiseOrFail @[GitHash])
|
||||
>>= either (fail.show) pure
|
||||
pack <- getRemainingLazyByteString
|
||||
pure (w,v,idx,pack)
|
||||
|
||||
where
|
||||
result = \case
|
||||
Left{} -> Nothing
|
||||
Right (_,_,r) -> Just r
|
||||
|
||||
|
||||
|
||||
data WriteBundleEnv =
|
||||
WriteBundleEnvPlain
|
||||
{ wbeHead :: RepoHead
|
||||
, wbeStorage :: AnyStorage
|
||||
}
|
||||
| WriteBundleEnvEnc
|
||||
{ wbeSk1 :: SipKey
|
||||
, wbeSk2 :: SipKey
|
||||
, wbeHead :: RepoHead
|
||||
, wbeGk0 :: GK0
|
||||
, wbeGks :: GroupSecret
|
||||
, wbeStorage :: AnyStorage
|
||||
}
|
||||
|
||||
newWriteBundleEnv :: (MonadIO m, GroupKeyOperations m) => AnyStorage -> RepoHead -> m WriteBundleEnv
|
||||
newWriteBundleEnv sto rh = case _repoHeadGK0 rh of
|
||||
Nothing -> do
|
||||
pure $ WriteBundleEnvPlain rh sto
|
||||
|
||||
Just gk0h -> do
|
||||
|
||||
gk0 <- runExceptT (readGK0 sto gk0h)
|
||||
>>= either throwIO pure
|
||||
|
||||
gks <- openGroupKey gk0
|
||||
>>= orThrow (GroupKeyNotFound 3)
|
||||
|
||||
pure $ WriteBundleEnvEnc
|
||||
{ wbeSk1 = SipKey 2716370006254639645 507093936407764973
|
||||
, wbeSk2 = SipKey 9209704780415729085 272090086441077315
|
||||
, wbeHead = rh
|
||||
, wbeGk0 = gk0
|
||||
, wbeGks = gks
|
||||
, wbeStorage = sto
|
||||
}
|
||||
|
||||
makeNonceForBundle :: Monad m => WriteBundleEnv -> LBS.ByteString -> m ByteString
|
||||
makeNonceForBundle env lbs = do
|
||||
let piece = ( LBS.take (fromIntegral defBlockSize * 2) lbs
|
||||
<> serialise (wbeHead env)
|
||||
) & hashObject @HbSync & serialise & LBS.drop 1 & LBS.toStrict
|
||||
pure piece
|
||||
|
||||
writeBundle :: MonadIO m => WriteBundleEnv -> LBS.ByteString -> m HashRef
|
||||
writeBundle env lbs = do
|
||||
|
||||
case env of
|
||||
WriteBundleEnvPlain{..} -> do
|
||||
writeAsMerkle wbeStorage lbs <&> HashRef
|
||||
|
||||
WriteBundleEnvEnc{..} -> do
|
||||
let bsStream = readChunkedBS lbs defBlockSize
|
||||
|
||||
nonce <- makeNonceForBundle env lbs
|
||||
|
||||
let (SipHash a) = BA.sipHash wbeSk1 nonce
|
||||
let (SipHash b) = BA.sipHash wbeSk2 nonce
|
||||
|
||||
let source = ToEncryptSymmBS wbeGks
|
||||
(Right wbeGk0)
|
||||
nonce
|
||||
bsStream
|
||||
NoMetaData
|
||||
(Just (EncryptGroupNaClSymmBlockSIP (a,b)))
|
||||
|
||||
th <- runExceptT (writeAsMerkle wbeStorage source)
|
||||
>>= orThrow StorageError
|
||||
|
||||
pure $ HashRef th
|
||||
|
|
@ -0,0 +1,68 @@
|
|||
module HBS2.Git.Local where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
|
||||
import Data.ByteString.Base16 qualified as B16
|
||||
import Text.InterpolatedString.Perl6 (qc)
|
||||
import Data.ByteString.Char8 qualified as BS
|
||||
import Data.ByteString.Char8 (ByteString)
|
||||
import Codec.Serialise
|
||||
|
||||
|
||||
data SHA1 = SHA1
|
||||
deriving stock(Eq,Ord,Data,Generic)
|
||||
|
||||
newtype GitHash = GitHash ByteString
|
||||
deriving stock (Eq,Ord,Data,Generic,Show)
|
||||
deriving newtype Hashable
|
||||
|
||||
instance Serialise GitHash
|
||||
|
||||
instance IsString GitHash where
|
||||
fromString s = GitHash (B16.decodeLenient (BS.pack s))
|
||||
|
||||
instance FromStringMaybe GitHash where
|
||||
fromStringMay s = either (const Nothing) (pure . GitHash) (B16.decode bs)
|
||||
where
|
||||
bs = BS.pack s
|
||||
|
||||
instance Pretty GitHash where
|
||||
pretty (GitHash s) = pretty @String [qc|{B16.encode s}|]
|
||||
|
||||
|
||||
newtype GitRef = GitRef { unGitRef :: ByteString }
|
||||
deriving stock (Eq,Ord,Data,Generic,Show)
|
||||
deriving newtype (IsString,Monoid,Semigroup,Hashable)
|
||||
|
||||
instance Serialise GitRef
|
||||
|
||||
mkGitRef :: ByteString -> GitRef
|
||||
mkGitRef = GitRef
|
||||
|
||||
instance Pretty GitRef where
|
||||
pretty (GitRef x) = pretty @String [qc|{x}|]
|
||||
|
||||
data GitObjectType = Commit | Tree | Blob
|
||||
deriving stock (Eq,Ord,Show,Generic)
|
||||
|
||||
instance Serialise GitObjectType
|
||||
|
||||
instance IsString GitObjectType where
|
||||
fromString = \case
|
||||
"commit" -> Commit
|
||||
"tree" -> Tree
|
||||
"blob" -> Blob
|
||||
x -> error [qc|invalid git object type {x}|]
|
||||
|
||||
instance FromStringMaybe GitObjectType where
|
||||
fromStringMay = \case
|
||||
"commit" -> Just Commit
|
||||
"tree" -> Just Tree
|
||||
"blob" -> Just Blob
|
||||
_ -> Nothing
|
||||
|
||||
instance Pretty GitObjectType where
|
||||
pretty = \case
|
||||
Commit -> pretty @String "commit"
|
||||
Tree -> pretty @String "tree"
|
||||
Blob -> pretty @String "blob"
|
|
@ -0,0 +1,66 @@
|
|||
module HBS2.Git.Local.CLI where
|
||||
|
||||
import HBS2.Prelude
|
||||
|
||||
import System.FilePath
|
||||
import HBS2.System.Dir
|
||||
|
||||
import System.Environment hiding (setEnv)
|
||||
|
||||
import Control.Monad.Trans.Maybe
|
||||
import Control.Applicative
|
||||
import System.Process.Typed
|
||||
import Data.ByteString.Lazy (ByteString)
|
||||
import Data.ByteString.Lazy.Char8 qualified as LBS8
|
||||
import Text.InterpolatedString.Perl6 (qc)
|
||||
|
||||
findGitDir :: MonadIO m => m (Maybe FilePath)
|
||||
findGitDir = findGitDir' =<< pwd
|
||||
where
|
||||
findGitDir' dir = do
|
||||
let gd = dir </> ".git"
|
||||
exists <- liftIO $ doesDirectoryExist gd
|
||||
if exists
|
||||
then return $ Just gd
|
||||
else let parentDir = takeDirectory dir
|
||||
in if parentDir == dir -- we've reached the root directory
|
||||
then return Nothing
|
||||
else findGitDir' parentDir
|
||||
|
||||
checkIsBare :: MonadIO m => Maybe FilePath -> m Bool
|
||||
checkIsBare fp = do
|
||||
|
||||
let wd = maybe id setWorkingDir fp
|
||||
|
||||
(code,s,_) <- readProcess ( shell [qc|git config --local core.bare|]
|
||||
& setStderr closed & wd
|
||||
)
|
||||
|
||||
case (code, LBS8.words s) of
|
||||
(ExitSuccess, "true" : _) -> pure True
|
||||
_ -> pure False
|
||||
|
||||
gitDir :: MonadIO m => m (Maybe FilePath)
|
||||
gitDir = runMaybeT do
|
||||
byEnv <- liftIO $ lookupEnv "GIT_DIR"
|
||||
byDir <- findGitDir
|
||||
|
||||
byBare <- checkIsBare Nothing >>= \case
|
||||
True -> pwd >>= expandPath <&> Just
|
||||
False -> pure Nothing
|
||||
|
||||
toMPlus (byEnv <|> byDir <|> byBare)
|
||||
|
||||
|
||||
gitRunCommand :: MonadIO m
|
||||
=> String
|
||||
-> m (Either ExitCode ByteString)
|
||||
|
||||
gitRunCommand cmd = do
|
||||
let procCfg = setStdin closed $ setStderr closed $ shell cmd
|
||||
(code, out, _) <- readProcess procCfg
|
||||
case code of
|
||||
ExitSuccess -> pure (Right out)
|
||||
e -> pure (Left e)
|
||||
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
cabal-version: 3.0
|
||||
name: hbs2-git
|
||||
version: 0.1.0.0
|
||||
version: 0.24.1.0
|
||||
-- synopsis:
|
||||
-- description:
|
||||
license: BSD-3-Clause
|
||||
|
@ -8,24 +8,21 @@ license-file: LICENSE
|
|||
author: Dmitry Zuikov
|
||||
maintainer: dzuikov@gmail.com
|
||||
-- copyright:
|
||||
category: Development
|
||||
category: System
|
||||
build-type: Simple
|
||||
extra-doc-files: CHANGELOG.md
|
||||
-- extra-doc-files: CHANGELOG.md
|
||||
-- extra-source-files:
|
||||
|
||||
common shared-properties
|
||||
ghc-options:
|
||||
-Wall
|
||||
-Wno-type-defaults
|
||||
-fprint-potential-instances
|
||||
-- -fno-warn-unused-matches
|
||||
-- -fno-warn-unused-do-bind
|
||||
-- -Werror=missing-methods
|
||||
-- -Werror=incomplete-patterns
|
||||
-- -fno-warn-unused-binds
|
||||
-fno-warn-type-defaults
|
||||
-threaded
|
||||
-rtsopts
|
||||
-O2
|
||||
"-with-rtsopts=-N4 -A64m -AL256m -I0"
|
||||
|
||||
|
||||
default-language: Haskell2010
|
||||
default-language: GHC2021
|
||||
|
||||
default-extensions:
|
||||
ApplicativeDo
|
||||
|
@ -52,147 +49,122 @@ common shared-properties
|
|||
, StandaloneDeriving
|
||||
, TupleSections
|
||||
, TypeApplications
|
||||
, TypeOperators
|
||||
, TypeFamilies
|
||||
, TemplateHaskell
|
||||
|
||||
|
||||
build-depends: hbs2-core, hbs2-peer
|
||||
build-depends:
|
||||
hbs2-core
|
||||
, hbs2-peer
|
||||
, hbs2-storage-simple
|
||||
, hbs2-keyman
|
||||
, db-pipe
|
||||
, suckless-conf
|
||||
|
||||
, attoparsec
|
||||
, aeson
|
||||
, async
|
||||
, base16-bytestring
|
||||
, atomic-write
|
||||
, bytestring
|
||||
, cache
|
||||
, binary
|
||||
, containers
|
||||
, streaming
|
||||
, streaming-bytestring
|
||||
, streaming-commons
|
||||
, streaming-utils
|
||||
, cryptonite
|
||||
, directory
|
||||
, exceptions
|
||||
, filelock
|
||||
, filepath
|
||||
, filepattern
|
||||
, generic-lens
|
||||
, hashable
|
||||
, http-conduit
|
||||
, interpolatedstring-perl6
|
||||
, memory
|
||||
, microlens-platform
|
||||
, mtl
|
||||
, prettyprinter
|
||||
, prettyprinter-ansi-terminal
|
||||
, random
|
||||
, resourcet
|
||||
, safe
|
||||
, saltine
|
||||
, serialise
|
||||
, split
|
||||
, sqlite-simple
|
||||
, streaming
|
||||
, stm
|
||||
, suckless-conf
|
||||
, temporary
|
||||
, text
|
||||
, time
|
||||
, timeit
|
||||
, transformers
|
||||
, typed-process
|
||||
, uniplate
|
||||
, unordered-containers
|
||||
, unliftio
|
||||
, unliftio-core
|
||||
, unordered-containers
|
||||
, wai-app-file-cgi
|
||||
, wai-extra
|
||||
, zlib
|
||||
, prettyprinter
|
||||
, prettyprinter-ansi-terminal
|
||||
, random
|
||||
, vector
|
||||
, unix
|
||||
|
||||
|
||||
library
|
||||
import: shared-properties
|
||||
|
||||
exposed-modules:
|
||||
HBS2.Git.Types
|
||||
HBS2Git.Prelude
|
||||
HBS2Git.Alerts
|
||||
HBS2Git.Annotations
|
||||
HBS2Git.App
|
||||
HBS2Git.KeysMetaData
|
||||
HBS2Git.Config
|
||||
HBS2Git.Evolve
|
||||
HBS2Git.Export
|
||||
HBS2Git.Encryption
|
||||
HBS2Git.Encryption.KeyInfo
|
||||
HBS2Git.GitRepoLog
|
||||
HBS2Git.Import
|
||||
HBS2Git.KeysCommand
|
||||
HBS2Git.Tools
|
||||
HBS2.Git.Local
|
||||
HBS2.Git.Local.CLI
|
||||
HBS2Git.PrettyStuff
|
||||
HBS2Git.State
|
||||
HBS2Git.Types
|
||||
|
||||
HBS2.Git.Data.Tx
|
||||
HBS2.Git.Data.GK
|
||||
HBS2.Git.Data.RefLog
|
||||
HBS2.Git.Data.LWWBlock
|
||||
|
||||
HBS2.Git.Client.Prelude
|
||||
HBS2.Git.Client.App.Types
|
||||
HBS2.Git.Client.App.Types.GitEnv
|
||||
HBS2.Git.Client.App
|
||||
HBS2.Git.Client.Config
|
||||
HBS2.Git.Client.State
|
||||
HBS2.Git.Client.RefLog
|
||||
HBS2.Git.Client.Export
|
||||
HBS2.Git.Client.Import
|
||||
HBS2.Git.Client.Progress
|
||||
|
||||
build-depends: base
|
||||
, base16-bytestring
|
||||
, binary
|
||||
, unix
|
||||
|
||||
hs-source-dirs: hbs2-git-client-lib
|
||||
|
||||
|
||||
executable hbs2-git-subscribe
|
||||
import: shared-properties
|
||||
main-is: Main.hs
|
||||
-- other-modules:
|
||||
-- other-extensions:
|
||||
build-depends: base
|
||||
, exceptions
|
||||
, terminal-progress-bar
|
||||
, http-types
|
||||
, uuid
|
||||
, zlib
|
||||
build-depends:
|
||||
base, hbs2-git
|
||||
, binary
|
||||
, vector
|
||||
, optparse-applicative
|
||||
|
||||
hs-source-dirs: lib
|
||||
default-language: Haskell2010
|
||||
hs-source-dirs: git-hbs2-subscribe
|
||||
default-language: GHC2021
|
||||
|
||||
executable git-hbs2
|
||||
import: shared-properties
|
||||
main-is: Main.hs
|
||||
|
||||
ghc-options:
|
||||
-threaded
|
||||
-rtsopts
|
||||
"-with-rtsopts=-N4 -A64m -AL256m -I0"
|
||||
|
||||
other-modules:
|
||||
RunShow
|
||||
Paths_hbs2_git
|
||||
|
||||
-- other-modules:
|
||||
-- other-extensions:
|
||||
build-depends:
|
||||
base, hbs2-git
|
||||
, binary
|
||||
, vector
|
||||
, optparse-applicative
|
||||
, http-types
|
||||
, template-haskell
|
||||
|
||||
hs-source-dirs: git-hbs2
|
||||
default-language: Haskell2010
|
||||
default-language: GHC2021
|
||||
|
||||
|
||||
executable git-remote-hbs2
|
||||
import: shared-properties
|
||||
main-is: GitRemoteMain.hs
|
||||
|
||||
ghc-options:
|
||||
-threaded
|
||||
-rtsopts
|
||||
"-with-rtsopts=-N4 -A64m -AL256m -I0"
|
||||
|
||||
other-modules:
|
||||
GitRemoteTypes
|
||||
GitRemotePush
|
||||
|
||||
main-is: Main.hs
|
||||
-- other-modules:
|
||||
-- other-extensions:
|
||||
build-depends:
|
||||
base, hbs2-git
|
||||
, async
|
||||
, attoparsec
|
||||
, binary
|
||||
, vector
|
||||
, optparse-applicative
|
||||
, unix
|
||||
, unliftio
|
||||
, terminal-progress-bar
|
||||
, http-types
|
||||
|
||||
hs-source-dirs: git-hbs2
|
||||
default-language: Haskell2010
|
||||
|
||||
hs-source-dirs: git-remote-hbs2
|
||||
default-language: GHC2021
|
||||
|
||||
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
cradle:
|
||||
cabal:
|
|
@ -1,31 +0,0 @@
|
|||
module HBS2.Git.Local
|
||||
( module HBS2.Git.Types
|
||||
, module HBS2.Git.Local
|
||||
)where
|
||||
|
||||
import HBS2.Git.Types
|
||||
|
||||
import Data.Functor
|
||||
import Data.String
|
||||
import Control.Monad
|
||||
import Control.Monad.IO.Class
|
||||
import Data.Set (Set)
|
||||
import Data.Set qualified as Set
|
||||
import System.Directory
|
||||
import System.FilePath
|
||||
|
||||
gitReadRefs :: MonadIO m => FilePath -> Set String -> m [(GitRef, GitHash)]
|
||||
gitReadRefs p m = do
|
||||
|
||||
xs <- forM (Set.toList m) $ \br -> do
|
||||
let fn = p </> "refs/heads" </> br
|
||||
here <- liftIO $ doesFileExist fn
|
||||
if here then do
|
||||
s <- liftIO $ readFile fn <&> (fromString br,) . fromString
|
||||
pure [s]
|
||||
else do
|
||||
pure mempty
|
||||
|
||||
pure $ mconcat xs
|
||||
|
||||
|
|
@ -1,515 +0,0 @@
|
|||
{-# Language AllowAmbiguousTypes #-}
|
||||
module HBS2.Git.Local.CLI
|
||||
( module HBS2.Git.Local.CLI
|
||||
, getStdin
|
||||
, getStdout
|
||||
, stopProcess
|
||||
) where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.Git.Types
|
||||
|
||||
import HBS2.System.Logger.Simple
|
||||
|
||||
import Control.Concurrent.Async
|
||||
import Control.Concurrent.STM
|
||||
import Control.Monad.Writer
|
||||
import Data.HashSet (HashSet)
|
||||
import Data.HashSet qualified as HashSet
|
||||
import Data.HashMap.Strict (HashMap)
|
||||
import Data.HashMap.Strict qualified as HashMap
|
||||
import Data.ByteString.Char8 qualified as BS8
|
||||
import Data.ByteString.Lazy.Char8 (ByteString)
|
||||
import Data.ByteString.Lazy.Char8 qualified as LBS
|
||||
import Data.Functor
|
||||
import Data.Function
|
||||
import Data.Maybe
|
||||
import Data.Set qualified as Set
|
||||
import Data.Set (Set)
|
||||
import Data.List qualified as List
|
||||
import Data.Text.Encoding qualified as Text
|
||||
import Data.Text.Encoding (decodeLatin1)
|
||||
import Data.Text qualified as Text
|
||||
import System.Process.Typed
|
||||
import Text.InterpolatedString.Perl6 (qc)
|
||||
import Lens.Micro.Platform
|
||||
import Control.Monad.Trans.Maybe
|
||||
import System.IO
|
||||
|
||||
-- FIXME: specify-git-dir
|
||||
|
||||
parseHash :: BS8.ByteString -> GitHash
|
||||
parseHash = fromString . BS8.unpack
|
||||
|
||||
parseHashLazy :: LBS.ByteString -> GitHash
|
||||
parseHashLazy = fromString . BS8.unpack . LBS.toStrict
|
||||
|
||||
gitGetDepsPure :: GitObject -> Set GitHash
|
||||
|
||||
gitGetDepsPure (GitObject Tree bs) = Set.fromList $ execWriter (go bs)
|
||||
where
|
||||
go :: ByteString -> Writer [GitHash] ()
|
||||
go s = case LBS.uncons s of
|
||||
Nothing -> pure ()
|
||||
Just ('\x00', rest) -> do
|
||||
let (hash, rest') = LBS.splitAt 20 rest
|
||||
tell [GitHash (LBS.toStrict hash)]
|
||||
go rest'
|
||||
|
||||
Just (_, rest) -> go rest
|
||||
|
||||
gitGetDepsPure (GitObject Commit bs) = Set.fromList (recurse ls)
|
||||
where
|
||||
ls = LBS.lines bs
|
||||
recurse :: [LBS.ByteString] -> [GitHash]
|
||||
recurse [] = []
|
||||
recurse ("":_) = []
|
||||
recurse (x:xs) =
|
||||
case LBS.words x of
|
||||
["tree", s] -> fromString (LBS.unpack s) : recurse xs
|
||||
["parent", s] -> fromString (LBS.unpack s) : recurse xs
|
||||
_ -> recurse xs
|
||||
|
||||
|
||||
gitGetDepsPure _ = mempty
|
||||
|
||||
gitCommitGetParentsPure :: LBS.ByteString -> [GitHash]
|
||||
gitCommitGetParentsPure bs = foldMap seek pairs
|
||||
where
|
||||
pairs = take 2 . LBS.words <$> LBS.lines bs
|
||||
seek = \case
|
||||
["parent", x] -> [fromString (LBS.unpack x)]
|
||||
_ -> mempty
|
||||
|
||||
data GitParsedRef = GitCommitRef GitHash
|
||||
| GitTreeRef GitHash
|
||||
deriving stock (Data,Eq,Ord)
|
||||
|
||||
gitGetParsedCommit :: MonadIO m => GitObject -> m [GitParsedRef]
|
||||
gitGetParsedCommit (GitObject Commit bs) = do
|
||||
let ws = fmap LBS.words (LBS.lines bs)
|
||||
oo <- forM ws $ \case
|
||||
["tree", s] -> pure [GitTreeRef (fromString (LBS.unpack s))]
|
||||
["commit", s] -> pure [GitCommitRef (fromString (LBS.unpack s))]
|
||||
_ -> pure mempty
|
||||
|
||||
pure $ mconcat oo
|
||||
|
||||
gitGetParsedCommit _ = pure mempty
|
||||
|
||||
-- FIXME: use-fromStringMay
|
||||
gitGetObjectType :: MonadIO m => GitHash -> m (Maybe GitObjectType)
|
||||
gitGetObjectType hash = do
|
||||
(_, out, _) <- readProcess (shell [qc|git cat-file -t {pretty hash}|])
|
||||
case headMay (LBS.words out) of
|
||||
Just "commit" -> pure (Just Commit)
|
||||
Just "tree" -> pure (Just Tree)
|
||||
Just "blob" -> pure (Just Blob)
|
||||
_ -> pure Nothing
|
||||
|
||||
|
||||
|
||||
gitGetCommitDeps :: MonadIO m => GitHash -> m [GitHash]
|
||||
gitGetCommitDeps hash = do
|
||||
(_, out, _) <- readProcess (shell [qc|git cat-file commit {pretty hash}|])
|
||||
pure $ Set.toList (gitGetDepsPure (GitObject Commit out))
|
||||
|
||||
gitGetTreeDeps :: MonadIO m => GitHash -> m [GitHash]
|
||||
gitGetTreeDeps hash = do
|
||||
(_, out, _) <- readProcess (shell [qc|git ls-tree {pretty hash}|])
|
||||
let ls = fmap parseHash . getHash <$> BS8.lines (LBS.toStrict out)
|
||||
pure (catMaybes ls)
|
||||
where
|
||||
getHash = flip atMay 2 . BS8.words
|
||||
|
||||
|
||||
gitGetDependencies :: MonadIO m => GitHash -> m [GitHash]
|
||||
gitGetDependencies hash = do
|
||||
ot <- gitGetObjectType hash
|
||||
case ot of
|
||||
Just Commit -> gitGetCommitDeps hash
|
||||
Just Tree -> gitGetTreeDeps hash
|
||||
_ -> pure mempty
|
||||
|
||||
|
||||
-- | calculates all dependencies of given list
|
||||
-- of git objects
|
||||
gitGetAllDependencies :: MonadIO m
|
||||
=> Int -- ^ number of threads
|
||||
-> [ GitHash ] -- ^ initial list of objects to calculate deps
|
||||
-> ( GitHash -> IO [GitHash] ) -- ^ lookup function
|
||||
-> ( GitHash -> IO () ) -- ^ progress update function
|
||||
-> m [(GitHash, GitHash)]
|
||||
|
||||
gitGetAllDependencies n objects lookup progress = liftIO do
|
||||
input <- newTQueueIO
|
||||
output <- newTQueueIO
|
||||
|
||||
memo <- newTVarIO ( mempty :: HashSet GitHash )
|
||||
work <- newTVarIO ( mempty :: HashMap Int Int )
|
||||
num <- newTVarIO 1
|
||||
|
||||
atomically $ mapM_ (writeTQueue input) objects
|
||||
|
||||
replicateConcurrently_ n $ do
|
||||
|
||||
i <- atomically $ stateTVar num ( \x -> (x, succ x) )
|
||||
|
||||
fix \next -> do
|
||||
o <- atomically $ tryReadTQueue input
|
||||
case o of
|
||||
Nothing -> do
|
||||
todo <- atomically $ do
|
||||
modifyTVar work (HashMap.delete i)
|
||||
readTVar work <&> HashMap.elems <&> sum
|
||||
|
||||
when (todo > 0) next
|
||||
|
||||
Just h -> do
|
||||
|
||||
progress h
|
||||
|
||||
done <- atomically $ do
|
||||
here <- readTVar memo <&> HashSet.member h
|
||||
modifyTVar memo (HashSet.insert h)
|
||||
pure here
|
||||
|
||||
unless done do
|
||||
cached <- lookup h
|
||||
|
||||
deps <- if null cached then do
|
||||
gitGetDependencies h
|
||||
else
|
||||
pure cached
|
||||
|
||||
forM_ deps $ \d -> do
|
||||
atomically $ writeTQueue output (h,d)
|
||||
|
||||
atomically $ modifyTVar work (HashMap.insert i (length deps))
|
||||
|
||||
next
|
||||
|
||||
liftIO $ atomically $ flushTQueue output
|
||||
|
||||
|
||||
gitGetTransitiveClosure :: forall cache . (HasCache cache GitHash (Set GitHash) IO)
|
||||
=> cache
|
||||
-> Set GitHash
|
||||
-> GitHash
|
||||
-> IO (Set GitHash)
|
||||
|
||||
gitGetTransitiveClosure cache exclude hash = do
|
||||
-- trace $ "gitGetTransitiveClosure" <+> pretty hash
|
||||
r <- cacheLookup cache hash :: IO (Maybe (Set GitHash))
|
||||
case r of
|
||||
Just xs -> pure xs
|
||||
Nothing -> do
|
||||
deps <- gitGetDependencies hash
|
||||
clos <- mapM (gitGetTransitiveClosure cache exclude) deps
|
||||
let res = (Set.fromList (hash:deps) <> Set.unions clos) `Set.difference` exclude
|
||||
cacheInsert cache hash res
|
||||
pure res
|
||||
|
||||
|
||||
-- gitGetAllDepsByCommit :: GitHash -> IO [GitHash]
|
||||
-- gitGetAllDepsByCommit h = do
|
||||
-- -- FIXME: error-handling
|
||||
-- (_, out, _) <- liftIO $ readProcess (shell [qc|git rev-list {pretty h}|])
|
||||
-- let ls = LBS.lines out & fmap ( fromString . LBS.unpack )
|
||||
|
||||
-- forM ls $ \l -> do
|
||||
-- o <- liftIO $ gitReadObject (Just Commit) l
|
||||
-- let tree = gitGetDepsPure (GitObject Commit o)
|
||||
-- (_, out, _) <- liftIO $ readProcess (shell [qc|git rev-list {pretty h}|])
|
||||
|
||||
-- print tree
|
||||
|
||||
-- -- mapM_ (print.pretty) ls
|
||||
-- pure []
|
||||
-- deps <- mapM gitGetDependencies ls <&> mconcat
|
||||
-- pure $ List.nub $ ls <> deps
|
||||
|
||||
-- FIXME: inject-git-working-dir-via-typeclass
|
||||
|
||||
gitConfigGet :: MonadIO m => Text -> m (Maybe Text)
|
||||
gitConfigGet k = do
|
||||
let cmd = [qc|git config {k}|]
|
||||
(code, out, _) <- liftIO $ readProcess (shell cmd)
|
||||
|
||||
case code of
|
||||
ExitSuccess -> pure (Just $ Text.strip [qc|{LBS.unpack out}|])
|
||||
_ -> pure Nothing
|
||||
|
||||
|
||||
gitConfigSet :: MonadIO m => Text -> Text -> m ()
|
||||
gitConfigSet k v = do
|
||||
let cmd = [qc|git config --add {k} {v}|]
|
||||
liftIO $ putStrLn cmd
|
||||
runProcess_ (shell cmd)
|
||||
|
||||
gitGetRemotes :: MonadIO m => m [(Text,Text)]
|
||||
gitGetRemotes = do
|
||||
let cmd = [qc|git config --get-regexp '^remote\..*\.url$'|]
|
||||
(code, out, _) <- liftIO $ readProcess (shell cmd)
|
||||
|
||||
let txt = Text.decodeUtf8 (LBS.toStrict out)
|
||||
|
||||
let ls = Text.lines txt -- & foldMap (drop 1 . Text.words)
|
||||
|
||||
remotes <- forM ls $ \l ->
|
||||
case Text.words l of
|
||||
[r,val] | Text.isPrefixOf "remote." r -> pure $ (,val) <$> stripRemote r
|
||||
_ -> pure Nothing
|
||||
|
||||
pure $ catMaybes remotes
|
||||
|
||||
where
|
||||
stripRemote x = headMay $ take 1 $ drop 1 $ Text.splitOn "." x
|
||||
|
||||
-- FIXME: respect-git-workdir
|
||||
gitHeadFullName :: MonadIO m => GitRef -> m GitRef
|
||||
gitHeadFullName (GitRef r) = do
|
||||
let r' = Text.stripPrefix "refs/heads" r & fromMaybe r
|
||||
pure $ GitRef $ "refs/heads/" <> r'
|
||||
|
||||
-- FIXME: error handling!
|
||||
gitReadObject :: MonadIO m => Maybe GitObjectType -> GitHash -> m LBS.ByteString
|
||||
gitReadObject mbType' hash = do
|
||||
|
||||
mbType'' <- case mbType' of
|
||||
Nothing -> gitGetObjectType hash
|
||||
Just tp -> pure (Just tp)
|
||||
|
||||
oType <- maybe (error [qc|unknown type of {pretty hash}|]) pure mbType''
|
||||
|
||||
-- liftIO $ hPutStrLn stderr [qc|git cat-file {pretty oType} {pretty hash}|]
|
||||
|
||||
(_, out, _) <- readProcess (shell [qc|git cat-file {pretty oType} {pretty hash}|])
|
||||
|
||||
pure out
|
||||
|
||||
|
||||
gitRemotes :: MonadIO m => m (Set GitRef)
|
||||
gitRemotes = do
|
||||
let cmd = setStdin closed $ setStdout closed
|
||||
$ setStderr closed
|
||||
$ shell [qc|git remote|]
|
||||
|
||||
(_, out, _) <- readProcess cmd
|
||||
let txt = decodeLatin1 (LBS.toStrict out)
|
||||
pure $ Set.fromList (GitRef . Text.strip <$> Text.lines txt)
|
||||
|
||||
|
||||
gitNormalizeRemoteBranchName :: MonadIO m => GitRef -> m GitRef
|
||||
gitNormalizeRemoteBranchName orig@(GitRef ref) = do
|
||||
remotes <- gitRemotes
|
||||
stripped <- forM (Set.toList remotes) $ \(GitRef remote) -> do
|
||||
pure $ GitRef <$> (("refs/heads" <>) <$> Text.stripPrefix remote ref)
|
||||
|
||||
|
||||
let GitRef r = headDef orig (catMaybes stripped)
|
||||
|
||||
if Text.isPrefixOf "refs/heads" r
|
||||
then pure (GitRef r)
|
||||
else pure (GitRef $ "refs/heads/" <> r)
|
||||
|
||||
|
||||
gitStoreObject :: MonadIO m => GitObject -> m (Maybe GitHash)
|
||||
gitStoreObject (GitObject t s) = do
|
||||
let cmd = [qc|git hash-object -t {pretty t} -w --stdin|]
|
||||
let procCfg = setStdin (byteStringInput s) $ setStderr closed
|
||||
(shell cmd)
|
||||
(code, out, _) <- readProcess procCfg
|
||||
case code of
|
||||
ExitSuccess -> pure $ Just (parseHashLazy out)
|
||||
ExitFailure{} -> pure Nothing
|
||||
|
||||
gitCheckObject :: MonadIO m => GitHash -> m Bool
|
||||
gitCheckObject gh = do
|
||||
let cmd = [qc|git cat-file -e {pretty gh}|]
|
||||
let procCfg = setStderr closed (shell cmd)
|
||||
(code, _, _) <- readProcess procCfg
|
||||
case code of
|
||||
ExitSuccess -> pure True
|
||||
ExitFailure{} -> pure False
|
||||
|
||||
gitListAllObjects :: MonadIO m => m [(GitObjectType, GitHash)]
|
||||
gitListAllObjects = do
|
||||
let cmd = [qc|git cat-file --batch-check --batch-all-objects|]
|
||||
let procCfg = setStdin closed $ setStderr closed (shell cmd)
|
||||
(_, out, _) <- readProcess procCfg
|
||||
|
||||
pure $ LBS.lines out & foldMap (fromLine . LBS.words)
|
||||
|
||||
where
|
||||
fromLine = \case
|
||||
[ha, tp, _] -> [(fromString (LBS.unpack tp), fromString (LBS.unpack ha))]
|
||||
_ -> []
|
||||
|
||||
-- FIXME: better error handling
|
||||
gitGetHash :: MonadIO m => GitRef -> m (Maybe GitHash)
|
||||
gitGetHash ref = do
|
||||
|
||||
trace $ "gitGetHash" <+> [qc|git rev-parse {pretty ref}|]
|
||||
|
||||
(code, out, _) <- readProcess (shell [qc|git rev-parse {pretty ref}|])
|
||||
|
||||
if code == ExitSuccess then do
|
||||
let hash = fromString . LBS.unpack <$> headMay (LBS.lines out)
|
||||
pure hash
|
||||
else
|
||||
pure Nothing
|
||||
|
||||
gitGetBranchHEAD :: MonadIO m => m (Maybe GitRef)
|
||||
gitGetBranchHEAD = do
|
||||
(code, out, _) <- readProcess (shell [qc|git rev-parse --abbrev-ref HEAD|])
|
||||
|
||||
if code == ExitSuccess then do
|
||||
let hash = fromString . LBS.unpack <$> headMay (LBS.lines out)
|
||||
pure hash
|
||||
else
|
||||
pure Nothing
|
||||
|
||||
|
||||
gitListLocalBranches :: MonadIO m => m [(GitRef, GitHash)]
|
||||
gitListLocalBranches = do
|
||||
let cmd = [qc|git branch --format='%(objectname) %(refname)'|]
|
||||
let procCfg = setStdin closed $ setStderr closed (shell cmd)
|
||||
(_, out, _) <- readProcess procCfg
|
||||
|
||||
pure $ LBS.lines out & foldMap (fromLine . LBS.words)
|
||||
|
||||
where
|
||||
fromLine = \case
|
||||
[h, n] -> [(fromString (LBS.unpack n), fromString (LBS.unpack h))]
|
||||
_ -> []
|
||||
|
||||
|
||||
gitListAllCommits :: MonadIO m => m [GitHash]
|
||||
gitListAllCommits = do
|
||||
let cmd = [qc|git log --all --pretty=format:'%H'|]
|
||||
let procCfg = setStdin closed $ setStderr closed (shell cmd)
|
||||
(_, out, _) <- readProcess procCfg
|
||||
pure $ fmap (fromString . LBS.unpack) (LBS.lines out)
|
||||
|
||||
gitRunCommand :: MonadIO m => String -> m (Either ExitCode ByteString)
|
||||
gitRunCommand cmd = do
|
||||
let procCfg = setStdin closed $ setStderr closed (shell cmd)
|
||||
(code, out, _) <- readProcess procCfg
|
||||
case code of
|
||||
ExitSuccess -> pure (Right out)
|
||||
e -> pure (Left e)
|
||||
|
||||
-- | list all commits from the given one in order of date
|
||||
gitListAllCommitsExceptBy :: MonadIO m => Set GitHash -> Maybe GitHash -> GitHash -> m [GitHash]
|
||||
gitListAllCommitsExceptBy excl l h = do
|
||||
let from = maybe mempty (\r -> [qc|{pretty r}..|] ) l
|
||||
let cmd = [qc|git rev-list --reverse --date-order {from}{pretty h}|]
|
||||
let procCfg = setStdin closed $ setStderr closed (shell cmd)
|
||||
(_, out, _) <- readProcess procCfg
|
||||
let res = fmap (fromString . LBS.unpack) (LBS.lines out)
|
||||
pure $ List.reverse $ filter ( not . flip Set.member excl) res
|
||||
|
||||
-- | list all objects for the given commit range in order of date
|
||||
gitRevList :: MonadIO m => Maybe GitHash -> GitHash -> m [GitHash]
|
||||
gitRevList l h = do
|
||||
let from = maybe mempty (\r -> [qc|{pretty r}..|] ) l
|
||||
-- let cmd = [qc|git rev-list --objects --in-commit-order --reverse --date-order {from}{pretty h}|]
|
||||
-- let cmd = [qc|git rev-list --objects --reverse --in-commit-order {from}{pretty h}|]
|
||||
let cmd = [qc|git rev-list --reverse --in-commit-order --objects {from}{pretty h}|]
|
||||
let procCfg = setStdin closed $ setStderr closed (shell cmd)
|
||||
(_, out, _) <- readProcess procCfg
|
||||
pure $ mapMaybe (fmap (fromString . LBS.unpack) . headMay . LBS.words) (LBS.lines out)
|
||||
|
||||
-- TODO: handle-invalid-input-somehow
|
||||
gitGetObjectTypeMany :: MonadIO m => [GitHash] -> m [(GitHash, GitObjectType)]
|
||||
gitGetObjectTypeMany hashes = do
|
||||
let hss = LBS.unlines $ fmap (LBS.pack.show.pretty) hashes
|
||||
let cmd = [qc|git cat-file --batch-check='%(objectname) %(objecttype)'|]
|
||||
let procCfg = setStdin (byteStringInput hss) $ setStderr closed (shell cmd)
|
||||
(_, out, _) <- readProcess procCfg
|
||||
pure $ mapMaybe (parse . fmap LBS.unpack . LBS.words) (LBS.lines out)
|
||||
where
|
||||
parse [h,tp] = (,) <$> fromStringMay h <*> fromStringMay tp
|
||||
parse _ = Nothing
|
||||
|
||||
gitGetCommitImmediateDeps :: MonadIO m => GitHash -> m [GitHash]
|
||||
gitGetCommitImmediateDeps h = do
|
||||
o <- gitReadObject (Just Commit) h
|
||||
let lws = LBS.lines o & fmap LBS.words
|
||||
|
||||
t <- forM lws $ \case
|
||||
["tree", hs] -> pure (Just ( fromString @GitHash (LBS.unpack hs) ))
|
||||
_ -> pure Nothing
|
||||
|
||||
let tree = take 1 $ catMaybes t
|
||||
|
||||
deps <- gitRunCommand [qc|git rev-list --objects {pretty (headMay tree)}|]
|
||||
>>= either (const $ pure mempty)
|
||||
(pure . mapMaybe withLine . LBS.lines)
|
||||
|
||||
pure $ List.nub $ tree <> deps
|
||||
where
|
||||
withLine :: LBS.ByteString -> Maybe GitHash
|
||||
withLine l = do
|
||||
let wordsInLine = LBS.words l
|
||||
firstWord <- listToMaybe wordsInLine
|
||||
pure $ fromString @GitHash $ LBS.unpack firstWord
|
||||
|
||||
|
||||
startGitHashObject :: MonadIO m => GitObjectType -> m (Process Handle () ())
|
||||
startGitHashObject objType = do
|
||||
let cmd = "git"
|
||||
let args = ["hash-object", "-w", "-t", show (pretty objType), "--stdin-paths"]
|
||||
let config = setStdin createPipe $ setStdout closed $ setStderr inherit $ proc cmd args
|
||||
startProcess config
|
||||
|
||||
startGitCatFile :: MonadIO m => m (Process Handle Handle ())
|
||||
startGitCatFile = do
|
||||
let cmd = "git"
|
||||
let args = ["cat-file", "--batch"]
|
||||
let config = setStdin createPipe $ setStdout createPipe $ setStderr closed $ proc cmd args
|
||||
startProcess config
|
||||
|
||||
gitReadFromCatFileBatch :: MonadIO m
|
||||
=> Process Handle Handle a
|
||||
-> GitHash
|
||||
-> m (Maybe GitObject)
|
||||
|
||||
gitReadFromCatFileBatch prc gh = do
|
||||
|
||||
let ssin = getStdin prc
|
||||
let sout = getStdout prc
|
||||
|
||||
liftIO $ hPrint ssin (pretty gh) >> hFlush ssin
|
||||
|
||||
runMaybeT do
|
||||
|
||||
here <- liftIO $ hWaitForInput sout 1000
|
||||
|
||||
guard here
|
||||
|
||||
header <- liftIO $ BS8.hGetLine sout
|
||||
|
||||
case BS8.unpack <$> BS8.words header of
|
||||
[ha, t, s] -> do
|
||||
(_, tp, size) <- MaybeT $ pure $ (,,) <$> fromStringMay @GitHash ha
|
||||
<*> fromStringMay @GitObjectType t
|
||||
<*> readMay s
|
||||
|
||||
content <- liftIO $ LBS.hGet sout size
|
||||
|
||||
guard (LBS.length content == fromIntegral size)
|
||||
|
||||
void $ liftIO $ LBS.hGet sout 1
|
||||
|
||||
let object = GitObject tp content
|
||||
|
||||
-- TODO: optionally-check-hash
|
||||
-- guard (gh== gitHashObject object)
|
||||
|
||||
pure object
|
||||
|
||||
_ -> MaybeT $ pure Nothing
|
||||
|
||||
|
|
@ -1,134 +0,0 @@
|
|||
{-# Language AllowAmbiguousTypes #-}
|
||||
module HBS2.Git.Types where
|
||||
|
||||
import HBS2.Prelude
|
||||
import HBS2.System.Logger.Simple
|
||||
|
||||
import Crypto.Hash hiding (SHA1)
|
||||
import Crypto.Hash qualified as Crypto
|
||||
import Data.Aeson
|
||||
import Data.ByteArray qualified as BA
|
||||
import Data.ByteString.Base16 qualified as B16
|
||||
import Data.ByteString (ByteString)
|
||||
import Data.ByteString.Char8 qualified as BS
|
||||
import Data.ByteString.Lazy.Char8 qualified as LBS
|
||||
import Data.Data
|
||||
import Data.Generics.Uniplate.Data()
|
||||
import Data.String (IsString(..))
|
||||
import Data.Text.Encoding (decodeLatin1)
|
||||
import Data.Text qualified as Text
|
||||
import Data.Text (Text)
|
||||
import GHC.Generics
|
||||
import Prettyprinter
|
||||
import Text.InterpolatedString.Perl6 (qc)
|
||||
import Data.Hashable
|
||||
import Codec.Serialise
|
||||
import Data.Maybe
|
||||
|
||||
class Monad m => HasCache t k v m where
|
||||
cacheLookup :: t -> k -> m (Maybe v)
|
||||
cacheInsert :: t -> k -> v -> m ()
|
||||
|
||||
data SHA1 = SHA1
|
||||
deriving stock(Eq,Ord,Data,Generic)
|
||||
|
||||
newtype GitHash = GitHash ByteString
|
||||
deriving stock (Eq,Ord,Data,Generic,Show)
|
||||
deriving newtype Hashable
|
||||
|
||||
instance Serialise GitHash
|
||||
|
||||
instance IsString GitHash where
|
||||
fromString s = GitHash (B16.decodeLenient (BS.pack s))
|
||||
|
||||
instance FromStringMaybe GitHash where
|
||||
fromStringMay s = either (const Nothing) pure (GitHash <$> B16.decode bs)
|
||||
where
|
||||
bs = BS.pack s
|
||||
|
||||
instance Pretty GitHash where
|
||||
pretty (GitHash s) = pretty @String [qc|{B16.encode s}|]
|
||||
|
||||
|
||||
data GitObjectType = Commit | Tree | Blob
|
||||
deriving stock (Eq,Ord,Show,Generic)
|
||||
|
||||
instance ToJSON GitObjectType
|
||||
instance FromJSON GitObjectType
|
||||
|
||||
instance IsString GitObjectType where
|
||||
fromString = \case
|
||||
"commit" -> Commit
|
||||
"tree" -> Tree
|
||||
"blob" -> Blob
|
||||
x -> error [qc|invalid git object type {x}|]
|
||||
|
||||
instance FromStringMaybe GitObjectType where
|
||||
fromStringMay = \case
|
||||
"commit" -> Just Commit
|
||||
"tree" -> Just Tree
|
||||
"blob" -> Just Blob
|
||||
_ -> Nothing
|
||||
|
||||
instance Pretty GitObjectType where
|
||||
pretty = \case
|
||||
Commit -> pretty @String "commit"
|
||||
Tree -> pretty @String "tree"
|
||||
Blob -> pretty @String "blob"
|
||||
|
||||
|
||||
data GitObject = GitObject GitObjectType LBS.ByteString
|
||||
|
||||
newtype GitRef = GitRef { unGitRef :: Text }
|
||||
deriving stock (Eq,Ord,Data,Generic,Show)
|
||||
deriving newtype (IsString,FromJSON,ToJSON,Monoid,Semigroup,Hashable)
|
||||
|
||||
instance Serialise GitRef
|
||||
|
||||
mkGitRef :: ByteString -> GitRef
|
||||
mkGitRef x = GitRef (decodeLatin1 x)
|
||||
|
||||
instance Pretty GitRef where
|
||||
pretty (GitRef x) = pretty @String [qc|{x}|]
|
||||
|
||||
|
||||
instance FromJSONKey GitRef where
|
||||
fromJSONKey = FromJSONKeyText GitRef
|
||||
|
||||
class Monad m => HasDependecies m a where
|
||||
getDependencies :: a -> m [GitHash]
|
||||
|
||||
class GitHashed a where
|
||||
gitHashObject :: a -> GitHash
|
||||
|
||||
instance GitHashed LBS.ByteString where
|
||||
gitHashObject s = GitHash $ BA.convert digest
|
||||
where
|
||||
digest = hashlazy s :: Digest Crypto.SHA1
|
||||
|
||||
instance GitHashed GitObject where
|
||||
gitHashObject (GitObject t c) = gitHashObject (hd <> c)
|
||||
where
|
||||
hd = LBS.pack $ show (pretty t) <> " " <> show (LBS.length c) <> "\x0"
|
||||
|
||||
normalizeRef :: GitRef -> GitRef
|
||||
normalizeRef (GitRef x) = GitRef "refs/heads/" <> GitRef (fromMaybe x (Text.stripPrefix "refs/heads/" (strip x)))
|
||||
where
|
||||
strip = Text.dropWhile (=='+')
|
||||
|
||||
guessHead :: GitRef -> Integer
|
||||
guessHead = \case
|
||||
"refs/heads/master" -> 0
|
||||
"refs/heads/main" -> 0
|
||||
_ -> 1
|
||||
|
||||
shutUp :: MonadIO m => m ()
|
||||
shutUp = do
|
||||
setLoggingOff @DEBUG
|
||||
setLoggingOff @ERROR
|
||||
setLoggingOff @NOTICE
|
||||
setLoggingOff @TRACE
|
||||
setLoggingOff @INFO
|
||||
setLoggingOff @WARN
|
||||
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
module HBS2Git.Alerts where
|
||||
|
||||
import HBS2.Prelude
|
||||
|
||||
import Text.InterpolatedString.Perl6 (qc)
|
||||
|
||||
noKeyInfoMsg :: forall a . Pretty a => a -> String
|
||||
noKeyInfoMsg repo =
|
||||
[qc|*** No KeyInfo found, maybe malformed 'encryption' section for {pretty repo} in config|]
|
|
@ -1,20 +0,0 @@
|
|||
module HBS2Git.Annotations where
|
||||
|
||||
import HBS2Git.Prelude
|
||||
import HBS2Git.Encryption
|
||||
|
||||
data Annotation =
|
||||
GK1 HashRef (GroupKey 'Symm HBS2Basic)
|
||||
deriving (Generic)
|
||||
|
||||
data Annotations =
|
||||
NoAnnotations
|
||||
| SmallAnnotations [Annotation]
|
||||
deriving (Generic)
|
||||
|
||||
instance Serialise Annotation
|
||||
instance Serialise Annotations
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,602 +0,0 @@
|
|||
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||
{-# Language AllowAmbiguousTypes #-}
|
||||
{-# Language UndecidableInstances #-}
|
||||
module HBS2Git.App
|
||||
( module HBS2Git.App
|
||||
, module HBS2Git.Types
|
||||
, HasStorage(..)
|
||||
, HasConf(..)
|
||||
)
|
||||
where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.Data.Types.Refs
|
||||
import HBS2.Base58
|
||||
import HBS2.OrDie
|
||||
import HBS2.Hash
|
||||
import HBS2.Clock
|
||||
import HBS2.Storage
|
||||
import HBS2.Storage.Operations.ByteString as OP
|
||||
import HBS2.Net.Auth.GroupKeySymm qualified as Symm
|
||||
import HBS2.System.Logger.Simple
|
||||
import HBS2.Merkle
|
||||
import HBS2.Git.Types
|
||||
import HBS2.Peer.RPC.Client.StorageClient
|
||||
import HBS2.Net.Auth.Credentials hiding (getCredentials)
|
||||
import HBS2.Peer.Proto
|
||||
import HBS2.Defaults (defBlockSize)
|
||||
|
||||
import HBS2.Peer.RPC.Client.Unix
|
||||
import HBS2.Peer.RPC.API.Peer
|
||||
import HBS2.Peer.RPC.API.RefLog
|
||||
|
||||
import HBS2Git.Types
|
||||
import HBS2Git.Config as Config
|
||||
import HBS2Git.State
|
||||
import HBS2Git.KeysMetaData
|
||||
import HBS2Git.Encryption
|
||||
import HBS2Git.Evolve
|
||||
import HBS2Git.PrettyStuff
|
||||
import HBS2Git.Alerts
|
||||
|
||||
import Data.Maybe
|
||||
import Control.Monad.Trans.Maybe
|
||||
import Data.Foldable
|
||||
import Data.Either
|
||||
import Control.Monad.Reader
|
||||
import Control.Monad.Trans.Resource
|
||||
import Control.Monad.Except (runExceptT)
|
||||
import Control.Monad.Catch
|
||||
import Crypto.Saltine.Core.Sign qualified as Sign
|
||||
import Data.ByteString.Lazy.Char8 (ByteString)
|
||||
import Data.ByteString.Char8 qualified as B8
|
||||
import Data.ByteString.Lazy.Char8 qualified as LBS
|
||||
import Data.Set (Set)
|
||||
import Data.Set qualified as Set
|
||||
import Lens.Micro.Platform
|
||||
import System.Directory
|
||||
import System.FilePattern.Directory
|
||||
import System.FilePath
|
||||
import System.Process.Typed
|
||||
import Text.InterpolatedString.Perl6 (qc)
|
||||
import Control.Concurrent.STM (flushTQueue)
|
||||
import Codec.Serialise
|
||||
import Data.HashMap.Strict qualified as HashMap
|
||||
import Data.HashSet qualified as HashSet
|
||||
import Data.List qualified as List
|
||||
import Data.Text qualified as Text
|
||||
import System.Environment
|
||||
|
||||
import Prettyprinter.Render.Terminal
|
||||
|
||||
import Streaming.Prelude qualified as S
|
||||
|
||||
import UnliftIO as UIO
|
||||
|
||||
data NoRPCException = NoRPCException
|
||||
deriving stock (Show, Typeable)
|
||||
|
||||
instance Exception NoRPCException
|
||||
|
||||
-- instance HasTimeLimits UNIX (ServiceProto PeerAPI UNIX) m where
|
||||
|
||||
instance MonadIO m => HasCfgKey ConfBranch (Set String) m where
|
||||
key = "branch"
|
||||
|
||||
instance MonadIO m => HasCfgKey ConfBranch (Set GitRef) m where
|
||||
key = "branch"
|
||||
|
||||
instance MonadIO m => HasCfgKey HeadBranch (Maybe GitRef) m where
|
||||
key = "head-branch"
|
||||
|
||||
instance MonadIO m => HasCfgKey KeyRingFile (Maybe FilePath) m where
|
||||
key = "keyring"
|
||||
|
||||
instance MonadIO m => HasCfgKey KeyRingFiles (Set FilePath) m where
|
||||
key = "keyring"
|
||||
|
||||
instance MonadIO m => HasCfgKey StoragePref (Maybe FilePath) m where
|
||||
key = "storage"
|
||||
|
||||
tracePrefix :: SetLoggerEntry
|
||||
tracePrefix = toStderr . logPrefix "[trace] "
|
||||
|
||||
debugPrefix :: SetLoggerEntry
|
||||
debugPrefix = toStderr . logPrefix "[debug] "
|
||||
|
||||
errorPrefix :: SetLoggerEntry
|
||||
errorPrefix = toStderr . logPrefix "[error] "
|
||||
|
||||
warnPrefix :: SetLoggerEntry
|
||||
warnPrefix = toStderr . logPrefix "[warn] "
|
||||
|
||||
noticePrefix :: SetLoggerEntry
|
||||
noticePrefix = toStderr
|
||||
|
||||
infoPrefix :: SetLoggerEntry
|
||||
infoPrefix = toStderr
|
||||
|
||||
data WithLog = NoLog | WithLog
|
||||
|
||||
|
||||
instance MonadIO m => HasGlobalOptions (App m) where
|
||||
addGlobalOption k v =
|
||||
asks (view appOpts ) >>= \t -> liftIO $ atomically $
|
||||
modifyTVar' t (HashMap.insert k v)
|
||||
|
||||
getGlobalOption k = do
|
||||
hm <- asks (view appOpts) >>= liftIO . readTVarIO
|
||||
pure (HashMap.lookup k hm)
|
||||
|
||||
instance MonadIO m => HasRefCredentials (App m) where
|
||||
setCredentials ref cred = do
|
||||
asks (view appRefCred) >>= \t -> liftIO $ atomically $
|
||||
modifyTVar' t (HashMap.insert ref cred)
|
||||
|
||||
getCredentials ref = do
|
||||
hm <- asks (view appRefCred) >>= liftIO . readTVarIO
|
||||
pure (HashMap.lookup ref hm) `orDie` "keyring not set (1)"
|
||||
|
||||
instance MonadIO m => HasEncryptionKeys (App m) where
|
||||
addEncryptionKey ke = do
|
||||
asks (view appKeys) >>= \t -> liftIO $ atomically do
|
||||
modifyTVar' t (HashMap.insert (view krPk ke) (view krSk ke))
|
||||
|
||||
findEncryptionKey puk = (asks (view appKeys) >>= \t -> liftIO $ readTVarIO t) <&> HashMap.lookup puk
|
||||
|
||||
enumEncryptionKeys = do
|
||||
them <- (asks (view appKeys) >>= \t -> liftIO $ readTVarIO t) <&> HashMap.toList
|
||||
pure $ [KeyringEntry k s Nothing | (k,s) <- them ]
|
||||
|
||||
instance (Monad m, HasStorage m) => (HasStorage (ResourceT m)) where
|
||||
getStorage = lift getStorage
|
||||
|
||||
instance MonadIO m => HasStorage (App m) where
|
||||
getStorage = asks (rpcStorage . view appRpc) <&> AnyStorage . StorageClient
|
||||
|
||||
instance MonadIO m => HasRPC (App m) where
|
||||
getRPC = asks (view appRpc)
|
||||
|
||||
withApp :: MonadIO m => AppEnv -> App m a -> m a
|
||||
withApp env m = runReaderT (fromApp m) env
|
||||
|
||||
|
||||
detectRPC :: (MonadIO m, MonadThrow m) => Bool -> m FilePath
|
||||
detectRPC noisy = do
|
||||
(_, o, _) <- readProcess (shell [qc|hbs2-peer poke|])
|
||||
|
||||
let answ = parseTop (LBS.unpack o) & fromRight mempty
|
||||
|
||||
so <- case headMay [ Text.unpack r | ListVal (Key "rpc:" [LitStrVal r]) <- answ ] of
|
||||
Nothing -> throwM NoRPCException
|
||||
Just w -> pure w
|
||||
|
||||
when noisy do
|
||||
|
||||
-- FIXME: logger-to-support-colors
|
||||
liftIO $ hPutDoc stderr $ yellow "rpc: found RPC" <+> pretty so
|
||||
<> line <>
|
||||
yellow "rpc: add option" <+> parens ("rpc unix" <+> dquotes (pretty so))
|
||||
<+> "to the config .hbs2/config"
|
||||
<> line <> line
|
||||
|
||||
|
||||
pure so
|
||||
|
||||
runWithRPC :: forall m . (MonadUnliftIO m, MonadThrow m) => (RPCEndpoints -> m ()) -> m ()
|
||||
runWithRPC action = do
|
||||
|
||||
(_, syn) <- configInit
|
||||
|
||||
let soname' = lastMay [ Text.unpack n
|
||||
| ListVal (Key "rpc" [SymbolVal "unix", LitStrVal n]) <- syn
|
||||
]
|
||||
|
||||
soname <- race ( pause @'Seconds 1) (maybe (detectRPC True) pure soname') `orDie` "hbs2-peer rpc timeout!"
|
||||
|
||||
client <- race ( pause @'Seconds 1) (newMessagingUnix False 1.0 soname) `orDie` "hbs2-peer rpc timeout!"
|
||||
|
||||
rpc <- RPCEndpoints <$> makeServiceCaller (fromString soname)
|
||||
<*> makeServiceCaller (fromString soname)
|
||||
<*> makeServiceCaller (fromString soname)
|
||||
|
||||
messaging <- async $ runMessagingUnix client
|
||||
link messaging
|
||||
|
||||
let endpoints = [ Endpoint @UNIX (rpcPeer rpc)
|
||||
, Endpoint @UNIX (rpcStorage rpc)
|
||||
, Endpoint @UNIX (rpcRefLog rpc)
|
||||
]
|
||||
|
||||
c1 <- async $ liftIO $ runReaderT (runServiceClientMulti endpoints) client
|
||||
link c1
|
||||
|
||||
test <- race ( pause @'Seconds 1) (callService @RpcPoke (rpcPeer rpc) ()) `orDie` "hbs2-peer rpc timeout!"
|
||||
|
||||
void $ pure test `orDie` "hbs2-peer rpc error!"
|
||||
|
||||
debug $ "hbs2-peer RPC ok" <+> pretty soname
|
||||
|
||||
action rpc
|
||||
|
||||
cancel messaging
|
||||
|
||||
void $ waitAnyCatchCancel [messaging, c1]
|
||||
|
||||
runInit :: (MonadUnliftIO m, MonadThrow m) => m () -> m ()
|
||||
runInit m = m
|
||||
|
||||
runApp :: (MonadUnliftIO m, MonadThrow m) => WithLog -> App m () -> m ()
|
||||
runApp l m = do
|
||||
|
||||
flip UIO.catches dealWithException do
|
||||
|
||||
case l of
|
||||
NoLog -> pure ()
|
||||
WithLog -> do
|
||||
setLogging @ERROR errorPrefix
|
||||
setLogging @NOTICE noticePrefix
|
||||
setLogging @INFO infoPrefix
|
||||
|
||||
doTrace <- liftIO $ lookupEnv "HBS2TRACE" <&> isJust
|
||||
|
||||
if doTrace then do
|
||||
setLogging @DEBUG debugPrefix
|
||||
setLogging @TRACE tracePrefix
|
||||
else do
|
||||
setLoggingOff @DEBUG
|
||||
setLoggingOff @TRACE
|
||||
|
||||
evolve
|
||||
|
||||
(pwd, syn) <- Config.configInit
|
||||
|
||||
xdgstate <- getAppStateDir
|
||||
|
||||
runWithRPC $ \rpc -> do
|
||||
mtCred <- liftIO $ newTVarIO mempty
|
||||
mtKeys <- liftIO $ newTVarIO mempty
|
||||
mtOpt <- liftIO $ newTVarIO mempty
|
||||
let env = AppEnv pwd (pwd </> ".git") syn xdgstate mtCred mtKeys mtOpt rpc
|
||||
runReaderT (fromApp (loadKeys >> m)) (set appRpc rpc env)
|
||||
|
||||
debug $ vcat (fmap pretty syn)
|
||||
|
||||
setLoggingOff @DEBUG
|
||||
setLoggingOff @ERROR
|
||||
setLoggingOff @NOTICE
|
||||
setLoggingOff @TRACE
|
||||
setLoggingOff @INFO
|
||||
|
||||
where
|
||||
dealWithException = [ noWorkDir ]
|
||||
|
||||
noWorkDir = Handler $
|
||||
\NoWorkDirException -> liftIO do
|
||||
hPutDoc stderr $ "hbs2-git:" <+> red "*** no git working directory found."
|
||||
<+> yellow "Perhaps you'd call" <+> "'git init'" <+> "first"
|
||||
<> line
|
||||
exitFailure
|
||||
|
||||
readBlock :: forall m . (MonadIO m, HasStorage m) => HashRef -> m (Maybe ByteString)
|
||||
readBlock h = do
|
||||
sto <- getStorage
|
||||
liftIO $ getBlock sto (fromHashRef h)
|
||||
|
||||
readRef :: (HasStorage m, MonadIO m) => RepoRef -> m (Maybe HashRef)
|
||||
readRef ref = do
|
||||
sto <- getStorage
|
||||
liftIO (getRef sto ref) <&> fmap HashRef
|
||||
|
||||
readHashesFromBlock :: (MonadIO m, HasStorage m) => HashRef -> m [HashRef]
|
||||
readHashesFromBlock (HashRef h) = do
|
||||
treeQ <- liftIO newTQueueIO
|
||||
walkMerkle h (readBlock . HashRef) $ \hr -> do
|
||||
case hr of
|
||||
Left{} -> pure ()
|
||||
Right (hrr :: [HashRef]) -> liftIO $ atomically $ writeTQueue treeQ hrr
|
||||
re <- liftIO $ atomically $ flushTQueue treeQ
|
||||
pure $ mconcat re
|
||||
|
||||
type ObjType = MTreeAnn [HashRef]
|
||||
|
||||
readObject :: forall m . (MonadIO m, HasStorage m) => HashRef -> m (Maybe ByteString)
|
||||
readObject h = runMaybeT do
|
||||
|
||||
q <- liftIO newTQueueIO
|
||||
|
||||
-- trace $ "readObject" <+> pretty h
|
||||
|
||||
blk <- MaybeT $ readBlock h
|
||||
|
||||
ann <- MaybeT $ pure $ deserialiseOrFail @(MTreeAnn [HashRef]) blk & either (const Nothing) Just
|
||||
|
||||
walkMerkleTree (_mtaTree ann) (lift . readBlock . HashRef) $ \(hr :: Either (Hash HbSync) [HashRef]) -> do
|
||||
case hr of
|
||||
Left{} -> mzero
|
||||
Right (hrr :: [HashRef]) -> do
|
||||
for_ hrr $ \(HashRef hx) -> do
|
||||
block <- MaybeT $ readBlock (HashRef hx)
|
||||
liftIO $ atomically $ writeTQueue q block
|
||||
|
||||
mconcat <$> liftIO (atomically $ flushTQueue q)
|
||||
|
||||
calcRank :: forall m . (MonadIO m, HasStorage m) => HashRef -> m Int
|
||||
calcRank h = fromMaybe 0 <$> runMaybeT do
|
||||
|
||||
blk <- MaybeT $ readBlock h
|
||||
|
||||
ann <- MaybeT $ pure $ deserialiseOrFail @(MTree [HashRef]) blk & either (const Nothing) Just
|
||||
|
||||
n <- S.toList_ $ do
|
||||
walkMerkleTree ann (lift . readBlock . HashRef) $ \(hr :: Either (Hash HbSync) [HashRef]) -> do
|
||||
case hr of
|
||||
Left{} -> pure ()
|
||||
Right (hrr :: [HashRef]) -> do
|
||||
S.yield (List.length hrr)
|
||||
|
||||
pure $ sum n
|
||||
|
||||
postRefUpdate :: ( MonadIO m
|
||||
, MonadMask m
|
||||
, HasStorage m
|
||||
, HasConf m
|
||||
, HasRefCredentials m
|
||||
, HasEncryptionKeys m
|
||||
, HasRPC m
|
||||
, IsRefPubKey Schema
|
||||
)
|
||||
=> RepoRef
|
||||
-> Integer
|
||||
-> HashRef
|
||||
-> m ()
|
||||
|
||||
postRefUpdate ref seqno hash = do
|
||||
|
||||
cred <- getCredentials ref
|
||||
let pubk = view peerSignPk cred
|
||||
let privk = view peerSignSk cred
|
||||
|
||||
ann <- genKeysAnnotations ref
|
||||
|
||||
-- вот прямо сюда ОЧЕНЬ удобно вставить метаданные для GK1
|
||||
let tran = SequentialRef seqno (AnnotatedHashRef ann hash)
|
||||
let bs = serialise tran & LBS.toStrict
|
||||
|
||||
msg <- makeRefLogUpdate @HBS2L4Proto pubk privk bs
|
||||
|
||||
rpc <- getRPC <&> rpcRefLog
|
||||
|
||||
callService @RpcRefLogPost rpc msg
|
||||
>>= either (err . viaShow) (const $ pure ())
|
||||
|
||||
|
||||
storeObject :: ( MonadIO m
|
||||
, MonadMask m
|
||||
, HasStorage m
|
||||
, HasConf m
|
||||
, HasRefCredentials m
|
||||
, HasEncryptionKeys m
|
||||
)
|
||||
=> RepoRef
|
||||
-> ByteString
|
||||
-> ByteString
|
||||
-> m (Maybe HashRef)
|
||||
storeObject repo meta bs = do
|
||||
encrypted <- isRefEncrypted (fromRefLogKey repo)
|
||||
info $ "encrypted" <+> pretty repo <> colon <+> if encrypted then "yes" else "no"
|
||||
storeObjectRPC encrypted repo meta bs
|
||||
|
||||
|
||||
|
||||
data WriteOp = WritePlain | WriteEncrypted B8.ByteString
|
||||
|
||||
storeObjectRPC :: ( MonadIO m
|
||||
, MonadMask m
|
||||
, HasStorage m
|
||||
, HasConf m
|
||||
, HasRefCredentials m
|
||||
, HasEncryptionKeys m
|
||||
)
|
||||
=> Bool
|
||||
-> RepoRef
|
||||
-> ByteString
|
||||
-> ByteString
|
||||
-> m (Maybe HashRef)
|
||||
|
||||
storeObjectRPC False repo meta bs = do
|
||||
sto <- getStorage
|
||||
db <- makeDbPath repo >>= dbEnv
|
||||
|
||||
runMaybeT do
|
||||
|
||||
|
||||
h <- liftIO $ writeAsMerkle sto bs
|
||||
let txt = LBS.unpack meta & Text.pack
|
||||
blk <- MaybeT $ liftIO $ getBlock sto h
|
||||
|
||||
-- FIXME: fix-excess-data-roundtrip
|
||||
mtree <- MaybeT $ deserialiseOrFail @(MTree [HashRef]) blk
|
||||
& either (const $ pure Nothing) (pure . Just)
|
||||
|
||||
-- TODO: upadte-metadata-right-here
|
||||
let ann = serialise (MTreeAnn (ShortMetadata txt) NullEncryption mtree)
|
||||
MaybeT $ liftIO $ putBlock sto ann <&> fmap HashRef
|
||||
|
||||
|
||||
storeObjectRPC True repo meta bs = do
|
||||
|
||||
sto <- getStorage
|
||||
db <- makeDbPath repo >>= dbEnv
|
||||
|
||||
runMaybeT do
|
||||
|
||||
let txt = LBS.unpack meta & Text.pack
|
||||
|
||||
ki <- lift $ getKeyInfo (fromRefLogKey repo) >>= maybe noKeyInfo pure
|
||||
gkh0 <- withDB db $ stateGetLocalKey ki >>= maybe noKeyFound pure
|
||||
|
||||
gk0 <- runExceptT (readFromMerkle sto (SimpleKey (fromHashRef gkh0)))
|
||||
>>= either (const $ noKeyFound) (pure . deserialiseOrFail @(GroupKey 'Symm HBS2Basic))
|
||||
>>= either (const $ noKeyFound) pure
|
||||
|
||||
let pk = keyInfoOwner ki
|
||||
|
||||
sk <- lift (findEncryptionKey pk) >>= maybe noKeyFound pure
|
||||
|
||||
gks <- maybe noKeyFound pure (Symm.lookupGroupKey sk pk gk0)
|
||||
|
||||
let nonce = hashObject @HbSync bs & serialise
|
||||
& LBS.drop 2
|
||||
& LBS.toStrict
|
||||
|
||||
let bsStream = readChunkedBS bs defBlockSize
|
||||
|
||||
let source = ToEncryptSymmBS gks
|
||||
(Left gkh0 :: LoadedRef (GroupKey 'Symm HBS2Basic))
|
||||
nonce
|
||||
bsStream
|
||||
(ShortMetadata txt)
|
||||
Nothing
|
||||
|
||||
h <- runExceptT (writeAsMerkle sto source) >>= either (const cantWriteMerkle) pure
|
||||
|
||||
pure (HashRef h)
|
||||
|
||||
where
|
||||
|
||||
cantWriteMerkle :: forall a m . MonadIO m => m a
|
||||
cantWriteMerkle = die "Can't write encrypted merkle tree"
|
||||
|
||||
noKeyFound :: forall a m . MonadIO m => m a
|
||||
noKeyFound = do
|
||||
liftIO $ hPutDoc stderr (red $ "No group key found for repo" <+> pretty repo <> line)
|
||||
die "*** fatal"
|
||||
|
||||
noKeyInfo = do
|
||||
liftIO $ hPutDoc stderr (red $ pretty (noKeyInfoMsg repo) <> line)
|
||||
die "*** fatal"
|
||||
|
||||
|
||||
loadCredentials :: ( MonadIO m
|
||||
, HasConf m
|
||||
, HasRefCredentials m
|
||||
) => [FilePath] -> m ()
|
||||
loadCredentials fp = do
|
||||
|
||||
debug $ "loadCredentials" <+> pretty fp
|
||||
|
||||
krOpt' <- cfgValue @KeyRingFiles @(Set FilePath) <&> Set.toList
|
||||
|
||||
let krOpt = List.nub $ fp <> krOpt'
|
||||
|
||||
void $ runMaybeT do
|
||||
|
||||
when (null krOpt) do
|
||||
debug "keyring not set (2)"
|
||||
mzero
|
||||
|
||||
for_ krOpt $ \fn -> do
|
||||
(puk, cred) <- loadKeyring fn
|
||||
trace $ "got creds for" <+> pretty (AsBase58 puk)
|
||||
lift $ setCredentials (RefLogKey puk) cred
|
||||
pure ()
|
||||
|
||||
loadCredentials' ::
|
||||
( MonadIO m
|
||||
, HasRefCredentials m
|
||||
)
|
||||
=> FilePath -> m Sign.PublicKey
|
||||
loadCredentials' fn = do
|
||||
(puk, cred) <- runMaybeT (loadKeyring fn) `orDie` [qc|Can't load credentials {fn}|]
|
||||
trace $ "got creds for" <+> pretty (AsBase58 puk)
|
||||
setCredentials (RefLogKey puk) cred
|
||||
pure puk
|
||||
|
||||
loadKeyring :: (MonadIO m, MonadPlus m) => FilePath -> m (Sign.PublicKey, PeerCredentials Schema)
|
||||
loadKeyring fn = do
|
||||
krData <- liftIO $ B8.readFile fn
|
||||
|
||||
let cred' = parseCredentials @Schema (AsCredFile krData)
|
||||
|
||||
maybe1 cred' mzero $ \cred -> do
|
||||
let puk = view peerSignPk cred
|
||||
pure (puk, cred)
|
||||
|
||||
|
||||
makeFilter :: String -> (String, [String])
|
||||
makeFilter = norm . over _1 sub1 . over _2 List.singleton . go ""
|
||||
where
|
||||
go pref ( cn : cs ) | cn `elem` "?*" = (p0, p1 <> p2)
|
||||
where
|
||||
(p0, p1) = splitFileName pref
|
||||
p2 = cn : cs
|
||||
|
||||
go pref ( '/' : cn : cs ) | cn `elem` "?*" = (pref <> ['/'], cn : cs)
|
||||
|
||||
go pref ( c : cs ) = go (pref <> [c]) cs
|
||||
|
||||
go pref [] = (pref, "")
|
||||
|
||||
sub1 "" = "."
|
||||
sub1 x = x
|
||||
|
||||
norm (xs, [""]) = (p1, [p2])
|
||||
where
|
||||
(p1, p2) = splitFileName xs
|
||||
|
||||
norm x = x
|
||||
|
||||
loadKeys :: ( MonadIO m
|
||||
, HasConf m
|
||||
, HasEncryptionKeys m
|
||||
, HasGlobalOptions m
|
||||
) => m ()
|
||||
loadKeys = do
|
||||
conf <- getConf
|
||||
|
||||
trace $ "loadKeys"
|
||||
|
||||
found1 <- findKeyFiles =<< liftIO (lookupEnv "HBS2KEYS")
|
||||
found2 <- findKeyFiles =<< getGlobalOption "key"
|
||||
|
||||
found <- liftIO $ mapM canonicalizePath (found1 <> found2)
|
||||
|
||||
let enc = [ args | (ListVal (SymbolVal "encrypted" : (LitStrVal r) : args)) <- conf ]
|
||||
|
||||
let owners = [ fromStringMay @(PubKey 'Encrypt Schema) (Text.unpack o)
|
||||
| ListVal (Key "owner" [LitStrVal o]) :: Syntax C <- universeBi enc
|
||||
] & catMaybes & HashSet.fromList
|
||||
|
||||
|
||||
let members = [ fromStringMay @(PubKey 'Encrypt Schema) (Text.unpack o)
|
||||
| ListVal (Key "member" [LitStrVal o]) :: Syntax C <- universeBi enc
|
||||
] & catMaybes & HashSet.fromList
|
||||
|
||||
let decrypt = [ Text.unpack o
|
||||
| ListVal (Key "decrypt" [LitStrVal o]) <- conf
|
||||
]
|
||||
|
||||
let keyrings = [ Text.unpack o | (ListVal (Key "keyring" [LitStrVal o]) :: Syntax C)
|
||||
<- universeBi enc
|
||||
] <> decrypt <> found
|
||||
& List.nub
|
||||
|
||||
forM_ keyrings $ \k -> void $ runMaybeT do
|
||||
trace $ "loadKeys: keyring" <+> pretty k
|
||||
(_, pc) <- loadKeyring k
|
||||
|
||||
forM_ (view peerKeyring pc) $ \ke -> do
|
||||
let pk = view krPk ke
|
||||
|
||||
trace $ "loadKeyring: key" <+> pretty (AsBase58 pk)
|
||||
lift $ addEncryptionKey ke
|
||||
|
||||
|
||||
where
|
||||
findKeyFiles w = do
|
||||
let flt = makeFilter <$> w
|
||||
maybe1 flt (pure mempty) $
|
||||
\(p, fmask) -> liftIO do
|
||||
getDirectoryFiles p fmask <&> fmap (p</>)
|
||||
|
|
@ -1,142 +0,0 @@
|
|||
module HBS2Git.Config
|
||||
( module HBS2Git.Config
|
||||
, module Data.Config.Suckless
|
||||
) where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.Base58
|
||||
import HBS2.System.Logger.Simple
|
||||
import HBS2.OrDie
|
||||
|
||||
import Data.Config.Suckless
|
||||
|
||||
import HBS2Git.Types
|
||||
|
||||
import Control.Applicative
|
||||
|
||||
import Control.Exception
|
||||
import Control.Monad.Catch (MonadThrow, throwM)
|
||||
import System.FilePath
|
||||
import System.Directory
|
||||
import Data.Maybe
|
||||
import Data.Either
|
||||
import Data.List (isSuffixOf)
|
||||
import Control.Monad.Trans.Maybe
|
||||
|
||||
import System.Environment
|
||||
|
||||
import System.IO (stderr)
|
||||
|
||||
data NoWorkDirException =
|
||||
NoWorkDirException
|
||||
deriving (Show, Typeable)
|
||||
|
||||
instance Exception NoWorkDirException
|
||||
|
||||
appName :: FilePath
|
||||
appName = "hbs2-git"
|
||||
|
||||
-- Finds .git dir inside given directory moving upwards
|
||||
findGitDir :: MonadIO m => FilePath -> m (Maybe FilePath)
|
||||
findGitDir dir = liftIO do
|
||||
trace "locating .git directory"
|
||||
let gitDir = dir </> ".git"
|
||||
exists <- doesDirectoryExist gitDir
|
||||
if exists
|
||||
then return $ Just gitDir
|
||||
else let parentDir = takeDirectory dir
|
||||
in if parentDir == dir -- we've reached the root directory
|
||||
then return Nothing
|
||||
else findGitDir parentDir
|
||||
|
||||
configPathOld :: MonadIO m => FilePath -> m FilePath
|
||||
configPathOld pwd = liftIO do
|
||||
xdg <- liftIO $ getXdgDirectory XdgConfig appName
|
||||
home <- liftIO getHomeDirectory
|
||||
pure $ xdg </> makeRelative home pwd
|
||||
|
||||
configPath :: (MonadIO m, MonadThrow m) => FilePath -> m FilePath
|
||||
configPath _ = do
|
||||
pwd <- liftIO getCurrentDirectory
|
||||
git <- findGitDir pwd
|
||||
byEnv <- liftIO $ lookupEnv "GIT_DIR"
|
||||
|
||||
bare <- if isJust (git <|> byEnv) then do
|
||||
pure Nothing
|
||||
else runMaybeT do
|
||||
-- check may be it's a bare git repo
|
||||
gitConf <- toMPlus =<< liftIO ( try @IOException $
|
||||
readFile "config"
|
||||
<&> parseTop
|
||||
<&> fromRight mempty )
|
||||
|
||||
let core = or [True | SymbolVal @C "core" <- universeBi gitConf]
|
||||
let bare = or [True | ListVal [SymbolVal @C "bare", _, SymbolVal "true"] <- universeBi gitConf ]
|
||||
let repo = or [True | SymbolVal @C "repositoryformatversion" <- universeBi gitConf ]
|
||||
|
||||
if core && bare && repo then do
|
||||
pure pwd
|
||||
else
|
||||
MaybeT $ pure Nothing
|
||||
|
||||
let maybePath = dropSuffix <$> (git <|> byEnv <|> bare)
|
||||
|
||||
path <- maybe (throwM NoWorkDirException)
|
||||
pure
|
||||
maybePath
|
||||
|
||||
pure (path </> ".hbs2")
|
||||
|
||||
where
|
||||
dropSuffix s | isSuffixOf ".git/" s = takeDirectory s
|
||||
| isSuffixOf ".git" s = takeDirectory s
|
||||
| otherwise = s
|
||||
|
||||
data ConfigPathInfo = ConfigPathInfo {
|
||||
configRepoParentDir :: FilePath,
|
||||
configDir :: FilePath,
|
||||
configFilePath :: FilePath
|
||||
} deriving (Eq, Show)
|
||||
|
||||
-- returns git repository parent dir, config directory and config file path
|
||||
getConfigPathInfo :: (MonadIO m, MonadThrow m) => m ConfigPathInfo
|
||||
getConfigPathInfo = do
|
||||
trace "getConfigPathInfo"
|
||||
confP <- configPath ""
|
||||
let pwd = takeDirectory confP
|
||||
let confFile = confP </> "config"
|
||||
trace $ "confPath:" <+> pretty confP
|
||||
pure ConfigPathInfo {
|
||||
configRepoParentDir = pwd,
|
||||
configDir = confP,
|
||||
configFilePath = confFile
|
||||
}
|
||||
|
||||
-- returns current directory, where found .git directory
|
||||
configInit :: (MonadIO m, MonadThrow m) => m (FilePath, [Syntax C])
|
||||
configInit = liftIO do
|
||||
trace "configInit"
|
||||
ConfigPathInfo{..} <- getConfigPathInfo
|
||||
here <- doesDirectoryExist configDir
|
||||
unless here do
|
||||
debug $ "create directory" <+> pretty configDir
|
||||
createDirectoryIfMissing True configDir
|
||||
confHere <- doesFileExist configFilePath
|
||||
unless confHere do
|
||||
appendFile configFilePath ""
|
||||
cfg <- readFile configFilePath <&> parseTop <&> either mempty id
|
||||
pure (configRepoParentDir, cfg)
|
||||
|
||||
cookieFile :: (MonadIO m, MonadThrow m) => m FilePath
|
||||
cookieFile = configPath "" <&> (</> "cookie")
|
||||
|
||||
getAppStateDir :: forall m . MonadIO m => m FilePath
|
||||
getAppStateDir = liftIO $ getXdgDirectory XdgData appName
|
||||
|
||||
|
||||
makeDbPath :: MonadIO m => RepoRef -> m FilePath
|
||||
makeDbPath h = do
|
||||
state <- getAppStateDir
|
||||
liftIO $ createDirectoryIfMissing True state
|
||||
pure $ state </> show (pretty (AsBase58 h))
|
||||
|
|
@ -1,55 +0,0 @@
|
|||
module HBS2Git.Encryption
|
||||
( module HBS2Git.Encryption
|
||||
, module HBS2Git.Encryption.KeyInfo
|
||||
, module HBS2.Net.Auth.GroupKeySymm
|
||||
) where
|
||||
|
||||
import HBS2Git.Prelude
|
||||
|
||||
import HBS2.Net.Auth.Credentials
|
||||
import HBS2.Net.Proto.Types hiding (Cookie)
|
||||
import HBS2.Net.Auth.GroupKeySymm hiding (Cookie)
|
||||
|
||||
|
||||
import HBS2Git.Encryption.KeyInfo
|
||||
|
||||
import Data.Config.Suckless.Syntax
|
||||
import Data.Config.Suckless.KeyValue
|
||||
|
||||
import Data.HashSet qualified as HashSet
|
||||
import Data.Maybe
|
||||
import Data.Text qualified as Text
|
||||
import Data.Time.Clock.POSIX
|
||||
|
||||
-- type ForEncryption ?
|
||||
|
||||
isRefEncrypted :: (MonadIO m, HasConf m) => PubKey 'Sign HBS2Basic -> m Bool
|
||||
isRefEncrypted ref = do
|
||||
conf <- getConf
|
||||
|
||||
let ee = [ True
|
||||
| (ListVal (SymbolVal "encrypted" : (LitStrVal r) : _)) <- conf
|
||||
, fromStringMay (Text.unpack r) == Just ref
|
||||
]
|
||||
|
||||
-- liftIO $ hPutDoc stderr $ "isRefEncrypted" <+> pretty (AsBase58 ref) <+> pretty ee <+> pretty (not (null ee)) <> line
|
||||
|
||||
pure $ not $ null ee
|
||||
|
||||
getKeyInfo :: (MonadIO m, HasConf m) => PubKey 'Sign HBS2Basic -> m (Maybe KeyInfo)
|
||||
getKeyInfo ref = do
|
||||
conf <- getConf
|
||||
|
||||
now <- liftIO getPOSIXTime
|
||||
let every = [ keyInfoFrom now syn | syn <- conf
|
||||
, isJust (keyInfoFrom now syn)
|
||||
] & catMaybes
|
||||
|
||||
pure (lastMay [ x | x <- every, keyInfoRef x == ref ])
|
||||
|
||||
|
||||
genGK0 :: (MonadIO m) => KeyInfo -> m (GroupKey 'Symm HBS2Basic)
|
||||
genGK0 ki = generateGroupKey @HBS2Basic Nothing members
|
||||
where
|
||||
members = HashSet.toList ( keyInfoOwner ki `HashSet.insert` keyInfoMembers ki )
|
||||
|
|
@ -1,56 +0,0 @@
|
|||
{-# Language UndecidableInstances #-}
|
||||
module HBS2Git.Encryption.KeyInfo where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.Hash
|
||||
import HBS2.Net.Auth.Credentials
|
||||
|
||||
import HBS2.Net.Proto.Types hiding (Cookie)
|
||||
|
||||
import Data.Config.Suckless.Syntax
|
||||
import Data.Config.Suckless.KeyValue
|
||||
|
||||
import Codec.Serialise
|
||||
import Data.HashSet
|
||||
import Data.HashSet qualified as HashSet
|
||||
import Data.Text qualified as Text
|
||||
import Data.Time.Clock.POSIX (POSIXTime)
|
||||
import Data.Maybe
|
||||
|
||||
|
||||
data KeyInfo =
|
||||
KeyInfo
|
||||
{ keyInfoNonce :: Integer
|
||||
, keyInfoRef :: PubKey 'Sign HBS2Basic
|
||||
, keyInfoOwner :: PubKey 'Encrypt HBS2Basic
|
||||
, keyInfoMembers :: HashSet (PubKey 'Encrypt HBS2Basic)
|
||||
}
|
||||
deriving (Eq,Ord,Show,Generic)
|
||||
|
||||
type ForKeys s = (Serialise (PubKey 'Sign s), Serialise (PubKey 'Encrypt s))
|
||||
|
||||
instance ForKeys HBS2Basic => Serialise KeyInfo
|
||||
|
||||
instance ForKeys HBS2Basic => Hashed HbSync KeyInfo where
|
||||
hashObject ki = hashObject (serialise ki)
|
||||
|
||||
|
||||
keyInfoFrom :: POSIXTime -> Syntax C -> Maybe KeyInfo
|
||||
keyInfoFrom t (ListVal (SymbolVal "encrypted" : (LitStrVal r) : args)) =
|
||||
KeyInfo <$> nonce
|
||||
<*> ref
|
||||
<*> owner
|
||||
<*> members
|
||||
|
||||
where
|
||||
nonce = Just $ maybe 0 (round t `div`) ttl
|
||||
ref = fromStringMay (Text.unpack r)
|
||||
ttl = Just $ lastDef 86400 [ x | ListVal (Key "ttl" [LitIntVal x]) <- args ]
|
||||
owner = fromStringMay =<< lastMay [ Text.unpack o | ListVal (Key "owner" [LitStrVal o]) <- args ]
|
||||
members = Just $ HashSet.fromList
|
||||
$ catMaybes
|
||||
[ fromStringMay (Text.unpack o) | ListVal (Key "member" [LitStrVal o]) <- args ]
|
||||
|
||||
-- keypath = lastMay [ Text.unpack p | ListVal @C (Key "keyring" [LitStrVal p]) <- args ]
|
||||
|
||||
keyInfoFrom _ _ = Nothing
|
|
@ -1,108 +0,0 @@
|
|||
module HBS2Git.Evolve (evolve,makePolled) where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.System.Logger.Simple
|
||||
import HBS2.Net.Proto.Service
|
||||
|
||||
import HBS2.Peer.RPC.API.Peer
|
||||
|
||||
import HBS2Git.Types
|
||||
import HBS2Git.Config
|
||||
import HBS2Git.PrettyStuff
|
||||
|
||||
import Control.Monad.Trans.Maybe
|
||||
import Control.Monad.Catch (MonadThrow(..))
|
||||
import Data.List qualified as List
|
||||
import System.Directory
|
||||
import System.Random
|
||||
import System.FilePath
|
||||
import UnliftIO
|
||||
|
||||
-- NOTE: hbs2-git-evolve
|
||||
-- выполняет идемпотентные миграции между старыми и
|
||||
-- новыми версиями.
|
||||
-- например, переносит конфиг
|
||||
|
||||
evolve :: (MonadIO m, MonadThrow m) => m ()
|
||||
evolve = void $ runMaybeT do
|
||||
|
||||
here <- liftIO getCurrentDirectory
|
||||
|
||||
debug $ "evolve: current directory:" <+> pretty here
|
||||
|
||||
cfg <- configPath ""
|
||||
|
||||
debug $ "*** GIT DIRECTORY" <+> pretty cfg
|
||||
|
||||
migrateConfig
|
||||
generateCookie
|
||||
|
||||
|
||||
makePolled :: (MonadIO m, HasRPC m) => RepoRef -> m ()
|
||||
makePolled ref = do
|
||||
rpc <- getRPC <&> rpcPeer
|
||||
n <- liftIO $ randomRIO (4,7)
|
||||
void $ callService @RpcPollAdd rpc (fromRefLogKey ref, "reflog", n)
|
||||
|
||||
generateCookie :: (MonadIO m, MonadThrow m) => m ()
|
||||
generateCookie = void $ runMaybeT do
|
||||
file <- cookieFile
|
||||
|
||||
guard =<< liftIO (not <$> doesFileExist file)
|
||||
|
||||
-- NOTE: cookie-note
|
||||
-- поскольку куки должна быть уникальна в рамках БД,
|
||||
-- а тут мы пока не знаем, с какой БД мы работаем,
|
||||
-- то отложим генерацию куки до создания БД.
|
||||
-- В скором времени БД будет одна, но пока это не так
|
||||
liftIO $ writeFile file ""
|
||||
|
||||
|
||||
migrateConfig :: (MonadIO m, MonadThrow m) => m ()
|
||||
migrateConfig = void $ runMaybeT do
|
||||
here <- liftIO getCurrentDirectory
|
||||
|
||||
rootDir <- configPath "" <&> takeDirectory
|
||||
|
||||
oldPath <- configPathOld here
|
||||
let oldConf = oldPath </> "config"
|
||||
|
||||
let newConfDir = rootDir </> ".hbs2"
|
||||
let newConfFile = newConfDir </> "config"
|
||||
|
||||
guard =<< liftIO (not <$> doesFileExist newConfFile)
|
||||
|
||||
trace $ "EVOLVE: root directory" <+> pretty newConfDir
|
||||
|
||||
confFileHere <- liftIO $ doesFileExist newConfFile
|
||||
|
||||
guard (not confFileHere)
|
||||
|
||||
liftIO do
|
||||
hPutDoc stderr $ red "evolve: creating new config" <+> pretty newConfFile <> line
|
||||
createDirectoryIfMissing True newConfDir
|
||||
|
||||
appendFile newConfFile ""
|
||||
|
||||
oldHere <- doesFileExist oldConf
|
||||
|
||||
when oldHere do
|
||||
hPutDoc stderr $ red "evolve: moving config to" <+> pretty newConfFile <> line
|
||||
liftIO $ renameFile oldConf newConfFile
|
||||
|
||||
anything <- liftIO $ listDirectory oldPath
|
||||
|
||||
if List.null anything then do
|
||||
hPutDoc stderr $ red "evolve: removing"
|
||||
<+> pretty oldPath <> line
|
||||
|
||||
removeDirectory oldPath
|
||||
else do
|
||||
hPutDoc stderr $ red "evolve: not empty" <+> pretty oldPath <> line
|
||||
|
||||
hPutDoc stderr $ yellow "evolve: remove"
|
||||
<+> pretty oldPath
|
||||
<+> yellow "on your own"
|
||||
<> line
|
||||
|
||||
|
|
@ -1,540 +0,0 @@
|
|||
{-# Language AllowAmbiguousTypes #-}
|
||||
{-# Language RankNTypes #-}
|
||||
{-# Language TemplateHaskell #-}
|
||||
module HBS2Git.Export
|
||||
( exportRefDeleted
|
||||
, exportRefOnly
|
||||
, runExport
|
||||
, runExport'
|
||||
, ExportRepoOps
|
||||
) where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.Data.Types.Refs
|
||||
import HBS2.OrDie
|
||||
import HBS2.System.Logger.Simple
|
||||
import HBS2.Base58
|
||||
import HBS2.Peer.Proto
|
||||
|
||||
import HBS2.Git.Local
|
||||
import HBS2.Git.Local.CLI
|
||||
|
||||
import HBS2Git.App
|
||||
import HBS2Git.State
|
||||
import HBS2Git.Config
|
||||
import HBS2Git.KeysMetaData
|
||||
import HBS2Git.GitRepoLog
|
||||
import HBS2Git.PrettyStuff
|
||||
|
||||
import Control.Applicative
|
||||
import Control.Monad.Catch
|
||||
import Control.Monad.Reader
|
||||
import Control.Concurrent.STM
|
||||
import Data.ByteString.Lazy.Char8 qualified as LBS
|
||||
import Data.HashMap.Strict qualified as HashMap
|
||||
import Data.HashSet qualified as HashSet
|
||||
import Data.HashSet (HashSet)
|
||||
import Data.Maybe
|
||||
import Data.Set qualified as Set
|
||||
import Data.Map qualified as Map
|
||||
import Data.List qualified as List
|
||||
import Lens.Micro.Platform
|
||||
import Prettyprinter.Render.Terminal
|
||||
import System.Directory
|
||||
import System.FilePath
|
||||
import Text.InterpolatedString.Perl6 (qc)
|
||||
import UnliftIO.IO
|
||||
import System.IO hiding (hClose,hPrint,hPutStrLn,hFlush)
|
||||
import System.IO.Temp
|
||||
import Control.Monad.Trans.Resource
|
||||
import Data.List.Split (chunksOf)
|
||||
import Codec.Compression.GZip
|
||||
import Control.Monad.Trans.Maybe
|
||||
|
||||
class ExportRepoOps a where
|
||||
|
||||
instance ExportRepoOps ()
|
||||
|
||||
data ExportEnv =
|
||||
ExportEnv
|
||||
{ _exportDB :: DBEnv
|
||||
, _exportWritten :: TVar (HashSet GitHash)
|
||||
, _exportFileName :: FilePath
|
||||
, _exportDir :: FilePath
|
||||
, _exportRepo :: RepoRef
|
||||
, _exportReadObject :: GitHash -> IO (Maybe GitObject)
|
||||
}
|
||||
|
||||
makeLenses 'ExportEnv
|
||||
|
||||
|
||||
exportRefDeleted :: forall o m . ( MonadIO m
|
||||
, MonadCatch m
|
||||
, MonadMask m
|
||||
, MonadUnliftIO m
|
||||
, HasConf m
|
||||
, HasRefCredentials m
|
||||
, HasEncryptionKeys m
|
||||
, HasProgress m
|
||||
, HasStorage m
|
||||
, HasRPC m
|
||||
, ExportRepoOps o
|
||||
)
|
||||
=> o
|
||||
-> RepoRef
|
||||
-> GitRef
|
||||
-> m HashRef
|
||||
exportRefDeleted _ repo ref = do
|
||||
trace $ "exportRefDeleted" <+> pretty repo <+> pretty ref
|
||||
|
||||
dbPath <- makeDbPath repo
|
||||
db <- dbEnv dbPath
|
||||
|
||||
let opts = ()
|
||||
|
||||
-- это "ненормальный" лог, т.е удаление ссылки в текущем контексте
|
||||
-- мы удаляем ссылку "там", то есть нам нужно "то" значение ссылки
|
||||
-- удалить её локально мы можем и так, просто гитом.
|
||||
-- NOTE: empty-log-post
|
||||
-- мы тут постим пустой лог (не содержащий коммитов)
|
||||
-- нам нужно будет найти его позицию относитеьлно прочих логов.
|
||||
-- его контекст = текущее значение ссылки, которое мы удаляем
|
||||
-- но вот если мы удаляем уже удаленную ссылку, то для ссылки 00..0
|
||||
-- будет ошибка где-то.
|
||||
|
||||
vals <- withDB db $ stateGetLastKnownCommits 10
|
||||
let (ctxHead, ctxBs) = makeContextEntry vals
|
||||
|
||||
trace $ "DELETING REF CONTEXT" <+> pretty vals
|
||||
|
||||
let repoHead = RepoHead Nothing (HashMap.fromList [(ref,"0000000000000000000000000000000000000000")])
|
||||
let repoHeadStr = (LBS.pack . show . pretty . AsGitRefsFile) repoHead
|
||||
let ha = gitHashObject (GitObject Blob repoHeadStr)
|
||||
let headEntry = GitLogEntry GitLogEntryHead (Just ha) ( fromIntegral $ LBS.length repoHeadStr )
|
||||
|
||||
r <- fromMaybe 0 <$> runMaybeT do
|
||||
h <- MaybeT $ readRef repo
|
||||
calcRank h
|
||||
|
||||
let rankBs = serialise (GitLogContextRank r)
|
||||
let rank = GitLogEntry GitLogContext Nothing (fromIntegral $ LBS.length rankBs)
|
||||
|
||||
let content = gitRepoLogMakeEntry opts ctxHead ctxBs
|
||||
<> gitRepoLogMakeEntry opts headEntry repoHeadStr
|
||||
<> gitRepoLogMakeEntry opts rank rankBs
|
||||
|
||||
-- FIXME: remove-code-dup
|
||||
let meta = fromString $ show
|
||||
$ "hbs2-git" <> line
|
||||
<> "type:" <+> "hbs2-git-push-log"
|
||||
<> line
|
||||
|
||||
updateGK0 repo
|
||||
|
||||
logMerkle <- storeObject repo meta content `orDie` [qc|Can't store push log|]
|
||||
postRefUpdate repo 0 logMerkle
|
||||
pure logMerkle
|
||||
|
||||
makeContextEntry :: [GitHash] -> (GitLogEntry, LBS.ByteString)
|
||||
makeContextEntry hashes = (entryHead, payload)
|
||||
where
|
||||
ha = Nothing
|
||||
payload = GitLogContextCommits (HashSet.fromList hashes) & serialise
|
||||
entryHead = GitLogEntry GitLogContext ha undefined
|
||||
|
||||
|
||||
newtype ExportT m a = ExportT { fromExportT :: ReaderT ExportEnv m a }
|
||||
deriving newtype ( Functor
|
||||
, Applicative
|
||||
, Monad
|
||||
, MonadIO
|
||||
, MonadTrans
|
||||
, MonadReader ExportEnv
|
||||
, MonadMask
|
||||
, MonadCatch
|
||||
, MonadThrow
|
||||
)
|
||||
|
||||
instance (Monad m, HasStorage m) => HasStorage (ExportT m) where
|
||||
getStorage = lift getStorage
|
||||
|
||||
instance (Monad m, HasConf m) => HasConf (ExportT m) where
|
||||
getConf = lift getConf
|
||||
|
||||
instance (Monad m, HasRPC m) => HasRPC (ExportT m) where
|
||||
getRPC = lift getRPC
|
||||
|
||||
instance (Monad m, HasEncryptionKeys m) => HasEncryptionKeys (ExportT m) where
|
||||
addEncryptionKey = lift . addEncryptionKey
|
||||
findEncryptionKey k = lift $ findEncryptionKey k
|
||||
enumEncryptionKeys = lift enumEncryptionKeys
|
||||
|
||||
withExportEnv :: MonadIO m => ExportEnv -> ExportT m a -> m a
|
||||
withExportEnv env f = runReaderT (fromExportT f) env
|
||||
|
||||
writeLogSegments :: forall m . ( MonadIO m
|
||||
, HasStorage m
|
||||
, HasRPC m
|
||||
, MonadMask m
|
||||
, HasRefCredentials m
|
||||
, HasEncryptionKeys m
|
||||
, HasConf m
|
||||
)
|
||||
=> ( Int -> m () )
|
||||
-> RepoRef
|
||||
-> GitHash
|
||||
-> [GitHash]
|
||||
-> Int
|
||||
-> [(GitLogEntry, LBS.ByteString)]
|
||||
-> ExportT m [HashRef]
|
||||
|
||||
writeLogSegments onProgress repo val objs chunkSize trailing = do
|
||||
|
||||
db <- asks $ view exportDB
|
||||
written <- asks $ view exportWritten
|
||||
fname <- asks $ view exportFileName
|
||||
dir <- asks $ view exportDir
|
||||
remote <- asks $ view exportRepo
|
||||
readGit <- asks $ view exportReadObject
|
||||
|
||||
let opts = CompressWholeLog
|
||||
|
||||
-- TODO: options-for-compression-level
|
||||
-- помним, что всё иммутабельное. как один раз запостим,
|
||||
-- такое и будет жить всегда
|
||||
let compressOpts = defaultCompressParams { compressLevel = bestSpeed }
|
||||
|
||||
rank <- fromMaybe 0 <$> runMaybeT do
|
||||
h <- MaybeT $ readRef remote
|
||||
calcRank h <&> fromIntegral
|
||||
|
||||
-- FIXME: fix-code-dup
|
||||
let meta = fromString $ show
|
||||
$ "hbs2-git"
|
||||
<> line
|
||||
<> "type:" <+> "hbs2-git-push-log"
|
||||
<> line
|
||||
<> "flags:" <+> "gz:sgmt"
|
||||
<> line
|
||||
|
||||
let segments = chunksOf chunkSize objs
|
||||
let totalSegments = length segments
|
||||
|
||||
-- TODO: no-sense-in-temp-files
|
||||
-- временные файлы больше не имеют смысла, т.к мы
|
||||
-- 1) нарезаем на небольшие сегменты
|
||||
-- 2) всё равно их читаем обратно в память, что бы сжать gzip
|
||||
-- нужно удалить, будет работать чуть быстрее
|
||||
|
||||
r <- forM (zip segments [1..]) $ \(segment, segmentIndex) -> do
|
||||
let fpath = dir </> fname <> "_" <> show segmentIndex
|
||||
bracket (liftIO $ openBinaryFile fpath AppendMode)
|
||||
(const $ pure ()) $ \fh -> do
|
||||
for_ segment $ \d -> do
|
||||
here <- liftIO $ readTVarIO written <&> HashSet.member d
|
||||
inState <- withDB db (stateIsLogObjectExists d)
|
||||
|
||||
lift $ onProgress 1
|
||||
|
||||
unless (here || inState) do
|
||||
|
||||
GitObject tp o <- liftIO $ readGit d `orDie` [qc|error reading object {pretty d}|]
|
||||
|
||||
let entry = GitLogEntry ( gitLogEntryTypeOf tp ) (Just d) ( fromIntegral $ LBS.length o )
|
||||
gitRepoLogWriteEntry opts fh entry o
|
||||
liftIO $ atomically $ modifyTVar written (HashSet.insert d)
|
||||
|
||||
-- gitRepoLogWriteEntry fh ctx ctxBs
|
||||
|
||||
trace $ "writing" <+> pretty tp <+> pretty d
|
||||
|
||||
when (segmentIndex == totalSegments) $ do
|
||||
for_ trailing $ \(e, bs) -> do
|
||||
gitRepoLogWriteEntry opts fh e bs
|
||||
|
||||
-- finalize log section
|
||||
hClose fh
|
||||
|
||||
content <- liftIO $ LBS.readFile fpath
|
||||
|
||||
let gzipped = compressWith compressOpts content
|
||||
|
||||
-- let nonce = hashObject @HbSync (serialise segments)
|
||||
logMerkle <- lift $ storeObject repo meta gzipped `orDie` [qc|Can't store push log|]
|
||||
|
||||
trace $ "PUSH LOG HASH: " <+> pretty logMerkle
|
||||
trace $ "POSTING REFERENCE UPDATE TRANSACTION" <+> pretty remote <+> pretty logMerkle
|
||||
|
||||
lift $ postRefUpdate remote rank logMerkle
|
||||
|
||||
pure logMerkle
|
||||
|
||||
|
||||
if not (null r) then do
|
||||
pure r
|
||||
else do
|
||||
let content = foldMap (uncurry (gitRepoLogMakeEntry opts)) trailing
|
||||
& compressWith compressOpts
|
||||
|
||||
logMerkle <- lift $ storeObject repo meta content `orDie` [qc|Can't store push log|]
|
||||
|
||||
lift $ postRefUpdate remote rank logMerkle
|
||||
|
||||
pure [logMerkle]
|
||||
|
||||
-- | Exports only one ref to the repo.
|
||||
-- Corresponds to a single ```git push``` operation
|
||||
exportRefOnly :: forall o m . ( MonadIO m
|
||||
, MonadCatch m
|
||||
, MonadMask m
|
||||
, MonadUnliftIO m
|
||||
, HasConf m
|
||||
, HasRefCredentials m
|
||||
, HasEncryptionKeys m
|
||||
, HasProgress m
|
||||
, HasStorage m
|
||||
, HasRPC m
|
||||
, ExportRepoOps o
|
||||
)
|
||||
=> o
|
||||
-> RepoRef
|
||||
-> Maybe GitRef
|
||||
-> GitRef
|
||||
-> GitHash
|
||||
-> m (Maybe HashRef)
|
||||
|
||||
exportRefOnly _ remote rfrom ref val = do
|
||||
|
||||
let repoHead = RepoHead Nothing (HashMap.fromList [(ref,val)])
|
||||
|
||||
let repoHeadStr = (LBS.pack . show . pretty . AsGitRefsFile) repoHead
|
||||
|
||||
dbPath <- makeDbPath remote
|
||||
db <- dbEnv dbPath
|
||||
|
||||
r <- fromMaybe 0 <$> runMaybeT do
|
||||
h <- MaybeT $ readRef remote
|
||||
calcRank h
|
||||
|
||||
updateGK0 remote
|
||||
|
||||
trace $ "exportRefOnly" <+> pretty remote <+> pretty ref <+> pretty val
|
||||
|
||||
-- 1. get max ref value for known REMOTE branch
|
||||
-- 2. if unkwnown - get max branch ref value for known LOCAL branch (known from the state)
|
||||
-- 3. if unkwnown - then Nothing
|
||||
-- therefore, we export only the delta for the objects for push between known state and current
|
||||
-- git repot state
|
||||
-- if it's a new branch push without any objects commited -- then empty log
|
||||
-- only with HEAD section should be created
|
||||
lastKnownRev <- withDB db do
|
||||
rThat <- stateGetActualRefValue ref
|
||||
rThis <- maybe1 rfrom (pure Nothing) stateGetActualRefValue
|
||||
pure $ rThat <|> rThis
|
||||
|
||||
trace $ "LAST_KNOWN_REV" <+> braces (pretty rfrom) <+> braces (pretty ref) <+> braces (pretty lastKnownRev)
|
||||
|
||||
entries <- traceTime "gitRevList" $ gitRevList lastKnownRev val
|
||||
|
||||
let entryNum = length entries
|
||||
|
||||
-- NOTE: just-for-test-new-non-empty-push-to-another-branch-112
|
||||
|
||||
-- FIXME: may-blow-on-huge-repo-export
|
||||
types <- traceTime "gitGetObjectTypeMany" $ gitGetObjectTypeMany entries <&> Map.fromList
|
||||
|
||||
let lookupType t = Map.lookup t types
|
||||
|
||||
let onEntryType e = (fx $ lookupType e, e)
|
||||
where fx = \case
|
||||
Just Blob -> 0
|
||||
Just Tree -> 1
|
||||
Just Commit -> 2
|
||||
Nothing -> 3
|
||||
|
||||
trace $ "ENTRIES:" <+> pretty (length entries)
|
||||
|
||||
trace "MAKING OBJECTS LOG"
|
||||
|
||||
let fname = [qc|{pretty val}.data|]
|
||||
|
||||
-- TODO: investigate-on-signal-behaviour
|
||||
-- похоже, что в случае прилёта сигнала он тут не обрабатывается,
|
||||
-- и временный каталог остаётся
|
||||
runResourceT $ do
|
||||
|
||||
gitCatFile <- startGitCatFile
|
||||
|
||||
written <- liftIO $ newTVarIO (HashSet.empty :: HashSet GitHash)
|
||||
|
||||
let myTempDir = "hbs-git"
|
||||
temp <- liftIO getCanonicalTemporaryDirectory
|
||||
|
||||
(_,dir) <- allocate (createTempDirectory temp myTempDir) removeDirectoryRecursive
|
||||
|
||||
let (blobs, notBlobs) = List.partition (\e -> fst (onEntryType e) == 0) entries
|
||||
let (trees, notTrees) = List.partition (\e -> fst (onEntryType e) == 1) notBlobs
|
||||
-- FIXME: others-might-be-tags
|
||||
let (commits, others) = List.partition (\e -> fst (onEntryType e) == 2) notTrees
|
||||
|
||||
-- FIXME: hbs2-git-size-hardcode-to-args
|
||||
let batch = 20000
|
||||
let objects = blobs <> trees <> others <> commits
|
||||
mon <- newProgressMonitor "write objects" (length objects)
|
||||
|
||||
let env = ExportEnv
|
||||
{ _exportDB = db
|
||||
, _exportWritten = written
|
||||
, _exportFileName = fname
|
||||
, _exportDir = dir
|
||||
, _exportRepo = remote
|
||||
, _exportReadObject = gitReadFromCatFileBatch gitCatFile
|
||||
}
|
||||
|
||||
|
||||
let ha = gitHashObject (GitObject Blob repoHeadStr)
|
||||
let headEntry = GitLogEntry GitLogEntryHead (Just ha) ( fromIntegral $ LBS.length repoHeadStr )
|
||||
|
||||
let upd = updateProgress mon
|
||||
|
||||
vals <- withDB db $ stateGetLastKnownCommits 10
|
||||
let (ctx, ctxBs) = makeContextEntry (List.nub $ val:vals)
|
||||
|
||||
let rankBs = serialise (GitLogContextRank r)
|
||||
let rank = GitLogEntry GitLogContext Nothing (fromIntegral $ LBS.length rankBs)
|
||||
|
||||
-- we need context entries to determine log HEAD operation sequence
|
||||
-- so only the last section needs it alongwith headEntry
|
||||
logz <- lift $ withExportEnv env (writeLogSegments upd remote val objects batch [ (ctx, ctxBs)
|
||||
, (rank, rankBs)
|
||||
, (headEntry, repoHeadStr)
|
||||
])
|
||||
|
||||
-- NOTE: отдаём только последнюю секцию лога,
|
||||
-- что бы оставить совместимость
|
||||
pure $ lastMay logz
|
||||
|
||||
---
|
||||
|
||||
|
||||
|
||||
runExport :: forall m . ( MonadIO m
|
||||
, MonadUnliftIO m
|
||||
, MonadCatch m
|
||||
, HasProgress (App m)
|
||||
, MonadMask (App m)
|
||||
, HasStorage (App m)
|
||||
, HasRPC (App m)
|
||||
, HasEncryptionKeys (App m)
|
||||
)
|
||||
|
||||
=> Maybe FilePath -> RepoRef -> App m ()
|
||||
runExport mfp repo = do
|
||||
loadCredentials (maybeToList mfp)
|
||||
loadKeys
|
||||
let krf = fromMaybe "keyring-file" mfp & takeFileName
|
||||
runExport'' krf repo
|
||||
|
||||
---
|
||||
|
||||
runExport' :: forall m . ( MonadIO m
|
||||
, MonadUnliftIO m
|
||||
, MonadCatch m
|
||||
, HasProgress (App m)
|
||||
, MonadMask (App m)
|
||||
, HasStorage (App m)
|
||||
, HasRPC (App m)
|
||||
, HasEncryptionKeys (App m)
|
||||
)
|
||||
|
||||
=> FilePath -> App m ()
|
||||
|
||||
runExport' fp = do
|
||||
repo <- loadCredentials' fp
|
||||
loadKeys
|
||||
runExport'' (takeFileName fp) (RefLogKey repo)
|
||||
|
||||
---
|
||||
|
||||
runExport'' :: forall m . ( MonadIO m
|
||||
, MonadUnliftIO m
|
||||
, MonadCatch m
|
||||
, HasProgress (App m)
|
||||
, MonadMask (App m)
|
||||
, HasStorage (App m)
|
||||
, HasRPC (App m)
|
||||
)
|
||||
|
||||
=> FilePath -> RepoRef -> App m ()
|
||||
runExport'' krf repo = do
|
||||
|
||||
liftIO $ putDoc $
|
||||
line
|
||||
<> green "Exporting to reflog" <+> pretty (AsBase58 repo)
|
||||
<> section
|
||||
<> "it may take some time on the first run"
|
||||
<> section
|
||||
|
||||
git <- asks (view appGitDir)
|
||||
|
||||
trace $ "git directory is" <+> pretty git
|
||||
|
||||
-- FIXME: wtf-runExport
|
||||
branchesGr <- cfgValue @ConfBranch <&> Set.map normalizeRef
|
||||
|
||||
headBranch <- gitGetBranchHEAD `orDie` "undefined HEAD for repo"
|
||||
|
||||
refs <- gitListLocalBranches
|
||||
<&> filter (\x -> Set.null branchesGr || Set.member (fst x) branchesGr)
|
||||
|
||||
trace $ "REFS" <+> pretty refs
|
||||
|
||||
fullHead <- gitHeadFullName headBranch
|
||||
|
||||
-- debug $ "HEAD" <+> pretty fullHead
|
||||
|
||||
-- let repoHead = RepoHead (Just fullHead)
|
||||
-- (HashMap.fromList refs)
|
||||
|
||||
-- trace $ "NEW REPO HEAD" <+> pretty (AsGitRefsFile repoHead)
|
||||
|
||||
val <- gitGetHash fullHead `orDie` [qc|Can't resolve ref {pretty fullHead}|]
|
||||
|
||||
-- _ <- exportRefOnly () remote br gh
|
||||
hhh <- exportRefOnly () repo Nothing fullHead val
|
||||
|
||||
-- NOTE: ???
|
||||
-- traceTime "importRefLogNew (export)" $ importRefLogNew False repo
|
||||
|
||||
shutUp
|
||||
|
||||
cwd <- liftIO getCurrentDirectory
|
||||
cfgPath <- configPath cwd
|
||||
|
||||
liftIO $ putStrLn ""
|
||||
liftIO $ putDoc $
|
||||
"exported" <+> pretty hhh
|
||||
<> section
|
||||
<> green "Repository config:" <+> pretty (cfgPath </> "config")
|
||||
<> section
|
||||
<> "Put the keyring file" <+> yellow (pretty krf) <+> "into a safe place," <> line
|
||||
<> "like encrypted directory or volume."
|
||||
<> section
|
||||
<> "You will need this keyring to push into the repository."
|
||||
<> section
|
||||
<> green "Add keyring into the repo's config:"
|
||||
<> section
|
||||
<> "keyring" <+> pretty [qc|"/my/safe/place/{krf}"|]
|
||||
<> section
|
||||
<> green "Add git remote:"
|
||||
<> section
|
||||
<> pretty [qc|git remote add remotename hbs2://{pretty repo}|]
|
||||
<> section
|
||||
<> green "Work with git as usual:"
|
||||
<> section
|
||||
<> "git pull remotename" <> line
|
||||
<> "(or git fetch remotename && git reset --hard remotename/branch)" <> line
|
||||
<> "git push remotename" <> line
|
||||
<> line
|
||||
|
||||
|
|
@ -1,211 +0,0 @@
|
|||
{-# Language TemplateHaskell #-}
|
||||
module HBS2Git.GitRepoLog where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.Git.Types
|
||||
import HBS2.Data.Types.Refs
|
||||
|
||||
import HBS2.System.Logger.Simple
|
||||
|
||||
import Data.Word
|
||||
import Data.Function
|
||||
import Lens.Micro.Platform
|
||||
import Codec.Serialise
|
||||
import Data.ByteString.Lazy qualified as LBS
|
||||
import Data.ByteString.Lazy (ByteString)
|
||||
-- import System.IO
|
||||
import UnliftIO.IO
|
||||
import Control.Monad.IO.Unlift
|
||||
import Codec.Compression.GZip
|
||||
import System.Directory
|
||||
import Data.HashSet (HashSet)
|
||||
import Data.HashSet qualified as HashSet
|
||||
import Control.Concurrent.STM
|
||||
import Data.Maybe
|
||||
|
||||
class HasGitLogOptions a where
|
||||
compressEntries :: a -> Bool
|
||||
compressWholeLog :: a -> Bool
|
||||
|
||||
|
||||
-- | default GitLogOptions
|
||||
instance HasGitLogOptions () where
|
||||
compressEntries = const True
|
||||
compressWholeLog = const False
|
||||
|
||||
data CompressWholeLog = CompressWholeLog
|
||||
|
||||
instance HasGitLogOptions CompressWholeLog where
|
||||
compressEntries = const False
|
||||
compressWholeLog = const True
|
||||
|
||||
data GitLogEntryType = GitLogEntryCommit
|
||||
| GitLogEntryBlob
|
||||
| GitLogEntryTree
|
||||
| GitLogEntryHead
|
||||
| GitLogHead
|
||||
| GitLogDeps
|
||||
| GitLogHeadDel
|
||||
| GitLogContext
|
||||
deriving stock (Eq,Ord,Enum,Generic,Show)
|
||||
|
||||
|
||||
newtype GitLogTimeStamp = GitLogTimeStamp Int
|
||||
deriving stock (Eq,Ord,Show,Data,Generic)
|
||||
|
||||
instance Serialise GitLogTimeStamp
|
||||
|
||||
newtype GitLogHeadEntry =
|
||||
GitLogHeadEntry
|
||||
{ _gitLogHeadAfter :: Maybe HashRef
|
||||
}
|
||||
deriving stock (Eq,Generic)
|
||||
|
||||
instance Serialise GitLogHeadEntry
|
||||
|
||||
makeLenses ''GitLogHeadEntry
|
||||
|
||||
|
||||
newtype GitLogDepsEntry =
|
||||
GitLogDepsEntry
|
||||
{ _gitLogDeps :: [HashRef]
|
||||
}
|
||||
deriving stock (Eq,Generic)
|
||||
|
||||
makeLenses ''GitLogDepsEntry
|
||||
|
||||
instance Serialise GitLogDepsEntry
|
||||
|
||||
-- deletion is handled by special way.
|
||||
-- we need a context WHEN the reference is deleted
|
||||
-- because it may be deleted, created again, deleted again, etc.
|
||||
-- Having current repository context via collecting all reference states
|
||||
-- we may calculate an actual current state of the reference.
|
||||
-- Or, we may use a special code to mark object as deleted
|
||||
data GitLogHeadDelEntry =
|
||||
GitLogHeadDelEntry
|
||||
{ _gitHeadContext :: [(GitRef, GitHash)] -- this gives us context to order this delete operation
|
||||
, _gitHeadDeleted :: GitRef -- this is a reference to delete
|
||||
}
|
||||
deriving stock (Eq,Generic)
|
||||
|
||||
makeLenses ''GitLogHeadDelEntry
|
||||
|
||||
instance Serialise GitLogHeadDelEntry
|
||||
|
||||
data GitLogContextEntry =
|
||||
GitLogNoContext
|
||||
| GitLogContextCommits (HashSet GitHash)
|
||||
| GitLogContextRank Int
|
||||
deriving stock (Eq,Data,Generic)
|
||||
|
||||
commitsOfGitLogContextEntry :: GitLogContextEntry -> [GitHash]
|
||||
commitsOfGitLogContextEntry = \case
|
||||
GitLogContextCommits co -> HashSet.toList co
|
||||
_ -> mempty
|
||||
|
||||
instance Serialise GitLogContextEntry
|
||||
|
||||
data GitLogEntry =
|
||||
GitLogEntry
|
||||
{ _gitLogEntryType :: GitLogEntryType
|
||||
, _gitLogEntryHash :: Maybe GitHash
|
||||
, _gitLogEntrySize :: Word32
|
||||
}
|
||||
deriving stock (Eq,Ord,Generic,Show)
|
||||
|
||||
makeLenses 'GitLogEntry
|
||||
|
||||
entryHeadSize :: Integral a => a
|
||||
entryHeadSize = 64
|
||||
|
||||
instance Serialise GitLogEntryType
|
||||
instance Serialise GitLogEntry
|
||||
|
||||
gitLogEntryTypeOf :: GitObjectType -> GitLogEntryType
|
||||
gitLogEntryTypeOf = \case
|
||||
Commit -> GitLogEntryCommit
|
||||
Tree -> GitLogEntryTree
|
||||
Blob -> GitLogEntryBlob
|
||||
|
||||
-- | scans hbs2-git repo log
|
||||
gitRepoLogScan :: forall m . MonadUnliftIO m
|
||||
=> Bool -- ^ do read log section content
|
||||
-> FilePath -- ^ log file path
|
||||
-> (GitLogEntry -> Maybe ByteString -> m ()) -- ^ log section callback
|
||||
-> m ()
|
||||
|
||||
gitRepoLogScan r fn cb = do
|
||||
|
||||
trace $ "gitRepoLogScan" <+> pretty fn
|
||||
withBinaryFile fn ReadMode $ \h -> do
|
||||
sz <- liftIO $ getFileSize fn
|
||||
go h sz
|
||||
|
||||
where
|
||||
go _ 0 = pure ()
|
||||
go h size = do
|
||||
ss <- liftIO $ LBS.hGet h entryHeadSize
|
||||
let es = deserialise @GitLogEntry ss
|
||||
let esize = es ^. gitLogEntrySize
|
||||
let consumed = entryHeadSize + fromIntegral esize
|
||||
if r then do
|
||||
o <- liftIO $ LBS.hGet h (fromIntegral esize) <&> decompress
|
||||
cb es (Just o)
|
||||
else do
|
||||
liftIO $ hSeek h RelativeSeek (fromIntegral esize)
|
||||
cb es Nothing
|
||||
go h ( max 0 (size - consumed) )
|
||||
|
||||
gitRepoLogWriteHead :: forall o m . (HasGitLogOptions o, MonadIO m)
|
||||
=> o
|
||||
-> Handle
|
||||
-> GitLogHeadEntry
|
||||
-> m ()
|
||||
|
||||
gitRepoLogWriteHead opt fh e = do
|
||||
let s = serialise e
|
||||
let entry = GitLogEntry GitLogHead Nothing (fromIntegral $ LBS.length s)
|
||||
gitRepoLogWriteEntry opt fh entry s
|
||||
|
||||
|
||||
|
||||
gitRepoLogMakeEntry :: forall o . (HasGitLogOptions o)
|
||||
=> o
|
||||
-> GitLogEntry
|
||||
-> ByteString
|
||||
-> ByteString
|
||||
|
||||
gitRepoLogMakeEntry opts entry' o = bs <> ss
|
||||
where
|
||||
ss = compressWith co o
|
||||
entry = entry' & set gitLogEntrySize (fromIntegral $ LBS.length ss)
|
||||
bs = LBS.take entryHeadSize $ serialise entry <> LBS.replicate entryHeadSize 0
|
||||
co | compressEntries opts = defaultCompressParams { compressLevel = bestSpeed }
|
||||
| otherwise = defaultCompressParams { compressLevel = noCompression }
|
||||
|
||||
gitRepoLogWriteEntry :: forall o m . (MonadIO m, HasGitLogOptions o)
|
||||
=> o
|
||||
-> Handle
|
||||
-> GitLogEntry
|
||||
-> ByteString
|
||||
-> m ()
|
||||
|
||||
gitRepoLogWriteEntry opts fh entry' o = do
|
||||
let entryWithSize = gitRepoLogMakeEntry opts entry' o
|
||||
liftIO $ LBS.hPutStr fh entryWithSize
|
||||
|
||||
gitRepoMakeIndex :: FilePath -> IO (HashSet GitHash)
|
||||
gitRepoMakeIndex fp = do
|
||||
here <- doesFileExist fp
|
||||
if not here then do
|
||||
pure mempty
|
||||
else do
|
||||
out <- newTQueueIO
|
||||
|
||||
gitRepoLogScan False fp $ \e _ -> do
|
||||
atomically $ writeTQueue out ( e ^. gitLogEntryHash )
|
||||
|
||||
atomically $ flushTQueue out <&> HashSet.fromList . catMaybes
|
||||
|
||||
|
|
@ -1,409 +0,0 @@
|
|||
{-# Language TemplateHaskell #-}
|
||||
module HBS2Git.Import where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.Data.Types.Refs
|
||||
import HBS2.OrDie
|
||||
import HBS2.System.Logger.Simple
|
||||
import HBS2.Merkle
|
||||
import HBS2.Hash
|
||||
import HBS2.Storage
|
||||
import HBS2.Storage.Operations.Class
|
||||
import HBS2.Storage.Operations.Missed
|
||||
import HBS2.Storage.Operations.ByteString(TreeKey(..))
|
||||
import HBS2.Net.Auth.GroupKeySymm
|
||||
import HBS2.Peer.Proto
|
||||
import Text.InterpolatedString.Perl6 (qc)
|
||||
import HBS2.Data.Detect hiding (Blob)
|
||||
|
||||
import HBS2.Git.Local
|
||||
import HBS2Git.GitRepoLog
|
||||
import HBS2Git.App
|
||||
import HBS2Git.Config
|
||||
import HBS2Git.State
|
||||
import HBS2Git.Evolve
|
||||
import HBS2Git.KeysMetaData
|
||||
import HBS2.Git.Local.CLI
|
||||
|
||||
import Data.Fixed
|
||||
import Control.Monad.Trans.Maybe
|
||||
import Control.Concurrent.STM
|
||||
import Control.Concurrent.STM.TQueue qualified as Q
|
||||
import Control.Monad.Reader
|
||||
import Data.Maybe
|
||||
import Data.ByteString.Lazy.Char8 qualified as LBS
|
||||
import Lens.Micro.Platform
|
||||
import Data.Set qualified as Set
|
||||
import Codec.Serialise
|
||||
import Control.Monad.Except (runExceptT)
|
||||
import Control.Monad.Catch
|
||||
import Control.Monad.Trans.Resource
|
||||
import System.Directory
|
||||
import System.IO.Temp
|
||||
import UnliftIO.IO
|
||||
import System.IO (openBinaryFile)
|
||||
import System.FilePath.Posix
|
||||
import Data.HashMap.Strict qualified as HashMap
|
||||
import Data.Text qualified as Text
|
||||
import Data.Either
|
||||
|
||||
import Streaming.Prelude qualified as S
|
||||
import Streaming.ByteString qualified as SB
|
||||
import Streaming.Zip qualified as SZip
|
||||
|
||||
import HBS2Git.PrettyStuff
|
||||
|
||||
data RunImportOpts =
|
||||
RunImportOpts
|
||||
{ _runImportDry :: Maybe Bool
|
||||
, _runImportRefVal :: Maybe HashRef
|
||||
}
|
||||
|
||||
makeLenses 'RunImportOpts
|
||||
|
||||
isRunImportDry :: RunImportOpts -> Bool
|
||||
isRunImportDry o = view runImportDry o == Just True
|
||||
|
||||
|
||||
|
||||
walkHashes :: (MonadIO m, HasStorage m) => TQueue HashRef -> Hash HbSync -> m ()
|
||||
walkHashes q h = walkMerkle h (readBlock . HashRef) $ \(hr :: Either (Hash HbSync) [HashRef]) -> do
|
||||
case hr of
|
||||
Left hx -> die $ show $ pretty "missed block:" <+> pretty hx
|
||||
Right (hrr :: [HashRef]) -> do
|
||||
forM_ hrr $ \hx -> do
|
||||
liftIO $ atomically $ Q.writeTQueue q hx
|
||||
|
||||
blockSource :: (MonadIO m, HasStorage m) => HashRef -> SB.ByteStream m Integer
|
||||
blockSource h = do
|
||||
tsize <- liftIO $ newTVarIO 0
|
||||
deepScan ScanDeep (const none) (fromHashRef h) (lift . readBlock . HashRef) $ \ha -> do
|
||||
sec <- lift $ readBlock (HashRef ha) `orDie` [qc|missed block {pretty ha}|]
|
||||
-- skip merkle tree head block, write only the data
|
||||
liftIO $ atomically $ modifyTVar tsize (+ LBS.length sec)
|
||||
when (h /= HashRef ha) do
|
||||
SB.fromLazy sec
|
||||
|
||||
liftIO $ readTVarIO tsize <&> fromIntegral
|
||||
|
||||
getLogFlags :: MonadIO m
|
||||
=> (HashRef -> m (Maybe LBS.ByteString))
|
||||
-> HashRef
|
||||
-> m (Maybe [Text])
|
||||
|
||||
getLogFlags doRead h = do
|
||||
|
||||
runMaybeT do
|
||||
|
||||
treeBs <- MaybeT $ doRead h
|
||||
|
||||
let something = tryDetect (fromHashRef h) treeBs
|
||||
let meta = mconcat $ rights [ parseTop (Text.unpack s) | ShortMetadata s <- universeBi something ]
|
||||
|
||||
-- TODO: check-if-it-is-hbs2-git-log
|
||||
let tp = lastMay [ "hbs2-git-push-log"
|
||||
| (ListVal (Key "type:" [SymbolVal "hbs2-git-push-log"]) ) <- meta
|
||||
]
|
||||
|
||||
guard ( tp == Just "hbs2-git-push-log" )
|
||||
|
||||
pure $ mconcat [ Text.splitOn ":" (Text.pack (show $ pretty s))
|
||||
| (ListVal (Key "flags:" [SymbolVal s]) ) <- meta
|
||||
]
|
||||
|
||||
class HasImportOpts a where
|
||||
importForce :: a -> Bool
|
||||
importDontWriteGit :: a -> Bool
|
||||
|
||||
instance HasImportOpts Bool where
|
||||
importForce f = f
|
||||
importDontWriteGit = const False
|
||||
|
||||
instance HasImportOpts (Bool, Bool) where
|
||||
importForce = fst
|
||||
importDontWriteGit = snd
|
||||
|
||||
importRefLogNew :: ( MonadIO m
|
||||
, MonadUnliftIO m
|
||||
, MonadCatch m
|
||||
, MonadMask m
|
||||
, HasStorage m
|
||||
, HasRPC m
|
||||
, HasEncryptionKeys m
|
||||
, HasImportOpts opts
|
||||
)
|
||||
=> opts -> RepoRef -> m ()
|
||||
|
||||
importRefLogNew opts ref = runResourceT do
|
||||
|
||||
let force = importForce opts
|
||||
|
||||
sto <- getStorage
|
||||
|
||||
let myTempDir = "hbs-git"
|
||||
temp <- liftIO getTemporaryDirectory
|
||||
|
||||
(_,dir) <- allocate (createTempDirectory temp myTempDir) removeDirectoryRecursive
|
||||
|
||||
lift $ makePolled ref
|
||||
|
||||
db <- makeDbPath ref >>= dbEnv
|
||||
|
||||
void $ runMaybeT do
|
||||
trace $ "importRefLogNew" <+> pretty ref
|
||||
logRoot <- toMPlus =<< readRef ref
|
||||
trace $ "ROOT" <+> pretty logRoot
|
||||
|
||||
trans <- withDB db $ stateGetAllTranImported <&> Set.fromList
|
||||
done <- withDB db $ stateGetRefImported logRoot
|
||||
|
||||
when (not done || force) do
|
||||
|
||||
logQ <- liftIO newTQueueIO
|
||||
|
||||
lift $ walkHashes logQ (fromHashRef logRoot)
|
||||
|
||||
let notSkip n = force || not (Set.member n trans)
|
||||
|
||||
entries' <- liftIO $ atomically $ flushTQueue logQ <&> filter notSkip
|
||||
|
||||
pMiss <- newProgressMonitor [qc|scan for missed blocks|] (length entries')
|
||||
|
||||
-- TODO: might-be-slow
|
||||
entries <- S.toList_ $ forM_ entries' $ \e -> do
|
||||
updateProgress pMiss 1
|
||||
missed <- lift $ findMissedBlocks sto e
|
||||
if null missed then do
|
||||
S.yield e
|
||||
else do
|
||||
S.yield e
|
||||
forM_ missed $ \m -> do
|
||||
debug $ "missed blocks in tree" <+> pretty e <+> pretty m
|
||||
|
||||
pCommit <- liftIO $ startGitHashObject Commit
|
||||
pTree <- liftIO $ startGitHashObject Tree
|
||||
pBlob <- liftIO $ startGitHashObject Blob
|
||||
|
||||
let hCommits = getStdin pCommit
|
||||
let hTrees = getStdin pTree
|
||||
let hBlobs = getStdin pBlob
|
||||
|
||||
let handles = [hCommits, hTrees, hBlobs]
|
||||
|
||||
sp0 <- withDB db savepointNew
|
||||
withDB db $ savepointBegin sp0
|
||||
|
||||
decrypt <- lift $ lift enumEncryptionKeys
|
||||
|
||||
debug $ "Decrypt" <> vcat (fmap pretty decrypt)
|
||||
|
||||
pMeta <- newProgressMonitor [qc|process metadata|] (length entries)
|
||||
|
||||
forM_ entries $ \e -> runMaybeT do
|
||||
let kDone = serialise ("processmetadata", e)
|
||||
|
||||
updateProgress pMeta 1
|
||||
|
||||
-- guard =<< withDB db (not <$> stateGetProcessed kDone)
|
||||
|
||||
rd <- toMPlus =<< parseTx e
|
||||
let (SequentialRef _ (AnnotatedHashRef ann' h)) = rd
|
||||
forM_ ann' (withDB db . importKeysAnnotations ref e)
|
||||
|
||||
-- withDB db $ statePutProcessed kDone
|
||||
|
||||
-- TODO: exclude-metadata-transactions
|
||||
forM_ entries $ \e -> do
|
||||
|
||||
missed <- lift $ readBlock e <&> isNothing
|
||||
|
||||
when missed do
|
||||
warn $ "MISSED BLOCK" <+> pretty e
|
||||
|
||||
let fname = show (pretty e)
|
||||
let fpath = dir </> fname
|
||||
|
||||
(keyFh, fh) <- allocate (openBinaryFile fpath AppendMode) hClose
|
||||
|
||||
void $ runMaybeT $ do
|
||||
|
||||
refData <- toMPlus =<< parseTx e
|
||||
-- NOTE: good-place-to-process-hash-log-update-first
|
||||
let (SequentialRef _ (AnnotatedHashRef ann' h)) = refData
|
||||
|
||||
-- forM_ ann' (withDB db . importKeysAnnotations ref e)
|
||||
|
||||
trace $ "PUSH LOG HASH" <+> pretty h
|
||||
|
||||
treeBs <- MaybeT $ lift $ readBlock h
|
||||
|
||||
let something = tryDetect (fromHashRef h) treeBs
|
||||
let meta = mconcat $ rights [ parseTop (Text.unpack s) | ShortMetadata s <- universeBi something ]
|
||||
|
||||
-- TODO: check-if-it-is-hbs2-git-log
|
||||
|
||||
let flags = mconcat [ Text.splitOn ":" (Text.pack (show $ pretty s))
|
||||
| (ListVal (Key "flags:" [SymbolVal s]) ) <- meta
|
||||
]
|
||||
|
||||
let gzipped = "gz" `elem` flags
|
||||
|
||||
debug $ "FOUND LOG METADATA " <+> pretty flags
|
||||
<+> pretty "gzipped:" <+> pretty gzipped
|
||||
|
||||
here <- withDB db $ stateGetLogImported h
|
||||
|
||||
unless (here && not force) do
|
||||
|
||||
(src, enc) <- case something of
|
||||
|
||||
MerkleAnn ann@(MTreeAnn _ sc@(EncryptGroupNaClSymm g nonce) tree) -> do
|
||||
|
||||
gk10' <- runExceptT $ readFromMerkle sto (SimpleKey g)
|
||||
|
||||
-- FIXME: nicer-error-handling
|
||||
gk10'' <- either (const $ err ("GK0 not found:" <+> pretty g) >> mzero) pure gk10'
|
||||
|
||||
gk10 <- toMPlus (deserialiseOrFail gk10'')
|
||||
|
||||
gk11 <- withDB db $ stateListGK1 (HashRef g)
|
||||
|
||||
let gk1 = mconcat $ gk10 : gk11
|
||||
|
||||
-- elbs <- runExceptT $ readFromMerkle sto (ToDecryptBS decrypt (fromHashRef h))
|
||||
elbs <- runExceptT $ readFromMerkle sto (ToDecryptBS2 gk1 nonce decrypt ann)
|
||||
|
||||
case elbs of
|
||||
Left{} -> do
|
||||
let lock = toStringANSI $ red "x"
|
||||
hPutStrLn stderr [qc|import [{lock}] {pretty e}|]
|
||||
mzero
|
||||
|
||||
Right lbs -> (,True) <$> pure do
|
||||
SB.fromLazy lbs
|
||||
pure (fromIntegral (LBS.length lbs))
|
||||
|
||||
-- FIXME: remove-debug
|
||||
MerkleAnn{} -> pure (blockSource h, False)
|
||||
|
||||
_ -> pure (blockSource h, False)
|
||||
|
||||
sz <- if gzipped then do
|
||||
SB.toHandle fh $ SZip.gunzip src
|
||||
else
|
||||
SB.toHandle fh src
|
||||
|
||||
release keyFh
|
||||
|
||||
let fpathReal = fpath
|
||||
|
||||
tnum <- liftIO $ newTVarIO 0
|
||||
liftIO $ gitRepoLogScan True fpathReal $ \_ _ -> do
|
||||
liftIO $ atomically $ modifyTVar tnum succ
|
||||
|
||||
num <- liftIO $ readTVarIO tnum
|
||||
trace $ "LOG ENTRY COUNT" <+> pretty num
|
||||
|
||||
let lock = toStringANSI $ if enc then yellow "@" else " "
|
||||
|
||||
let pref = take 16 (show (pretty e))
|
||||
let name = [qc|import [{lock}] {pref}... {realToFrac sz / (1024*1024) :: Fixed E3}|]
|
||||
|
||||
oMon <- newProgressMonitor name num
|
||||
|
||||
lift $ lift $ gitRepoLogScan True fpathReal $ \entry s -> void $ runMaybeT do
|
||||
|
||||
updateProgress oMon 1
|
||||
|
||||
lbs <- toMPlus s
|
||||
|
||||
withDB db do
|
||||
|
||||
case view gitLogEntryType entry of
|
||||
GitLogEntryCommit -> do
|
||||
bss <- lift (pure s) `orDie` [qc|git object not read from log|]
|
||||
let co = view gitLogEntryHash entry
|
||||
hx <- pure (view gitLogEntryHash entry) `orDie` [qc|empty git hash|]
|
||||
|
||||
trace $ "logobject" <+> pretty h <+> "commit" <+> pretty (view gitLogEntryHash entry)
|
||||
|
||||
writeIfNew hCommits dir hx (GitObject Commit lbs)
|
||||
statePutLogObject (h, Commit, hx)
|
||||
|
||||
let parents = gitCommitGetParentsPure bss
|
||||
|
||||
forM_ parents $ \p -> do
|
||||
trace $ "fact" <+> "commit-parent" <+> pretty co <+> pretty p
|
||||
statePutLogCommitParent (hx,p)
|
||||
|
||||
GitLogEntryBlob -> do
|
||||
trace $ "logobject" <+> pretty h <+> "blob" <+> pretty (view gitLogEntryHash entry)
|
||||
hx <- pure (view gitLogEntryHash entry) `orDie` [qc|empty git hash|]
|
||||
writeIfNew hBlobs dir hx (GitObject Blob lbs)
|
||||
statePutLogObject (h, Blob, hx)
|
||||
|
||||
GitLogEntryTree -> do
|
||||
trace $ "logobject" <+> pretty h <+> "tree" <+> pretty (view gitLogEntryHash entry)
|
||||
hx <- pure (view gitLogEntryHash entry) `orDie` [qc|empty git hash|]
|
||||
writeIfNew hTrees dir hx (GitObject Tree lbs)
|
||||
statePutLogObject (h, Tree, hx)
|
||||
|
||||
GitLogContext -> do
|
||||
trace $ "logobject" <+> pretty h <+> "context" <+> pretty (view gitLogEntryHash entry)
|
||||
|
||||
void $ runMaybeT do
|
||||
ss <- MaybeT $ pure s
|
||||
logEntry <- MaybeT $ pure $ deserialiseOrFail @GitLogContextEntry ss & either (const Nothing) Just
|
||||
|
||||
case logEntry of
|
||||
GitLogContextRank n -> do
|
||||
lift $ statePutLogContextRank h n
|
||||
|
||||
GitLogContextCommits co -> do
|
||||
lift $ forM_ co (statePutLogContextCommit h)
|
||||
|
||||
_ -> pure ()
|
||||
|
||||
GitLogEntryHead -> do
|
||||
trace $ "HEAD ENTRY" <+> viaShow s
|
||||
let mbrh = fromStringMay @RepoHead (maybe mempty LBS.unpack s)
|
||||
rh <- pure mbrh `orDie` [qc|invalid log header in {pretty h} {s}|]
|
||||
|
||||
forM_ (HashMap.toList $ view repoHeads rh) $ \(re,ha) -> do
|
||||
trace $ "logrefval" <+> pretty h <+> pretty re <+> pretty ha
|
||||
statePutLogRefVal (h,re,ha)
|
||||
|
||||
_ -> pure ()
|
||||
|
||||
-- otherwise we wan't process those logs next time.
|
||||
unless (importDontWriteGit opts) do
|
||||
statePutLogImported h
|
||||
statePutTranImported e
|
||||
|
||||
mapM_ hClose handles
|
||||
|
||||
withDB db $ do
|
||||
stateUpdateCommitDepths
|
||||
-- statePutRefImported logRoot
|
||||
if (length entries == length entries') then do
|
||||
statePutRefImported logRoot
|
||||
else do
|
||||
warn "Some entries not processed!"
|
||||
|
||||
savepointRelease sp0
|
||||
|
||||
where
|
||||
|
||||
parseTx e = runMaybeT do
|
||||
bs <- MaybeT $ readBlock e
|
||||
refupd <- toMPlus $ deserialiseOrFail @(RefLogUpdate HBS2L4Proto) bs
|
||||
toMPlus $ deserialiseOrFail (LBS.fromStrict $ view refLogUpdData refupd)
|
||||
|
||||
writeIfNew gitHandle dir h (GitObject tp s) = do
|
||||
unless (importDontWriteGit opts) do
|
||||
let nf = dir </> show (pretty h)
|
||||
liftIO $ LBS.writeFile nf s
|
||||
hPutStrLn gitHandle nf
|
||||
hFlush gitHandle
|
||||
trace $ "WRITTEN OBJECT" <+> pretty tp <+> pretty h <+> pretty nf
|
||||
|
|
@ -1,79 +0,0 @@
|
|||
module HBS2Git.KeysCommand
|
||||
( module HBS2Git.KeysCommand
|
||||
, module HBS2.Net.Proto.Types
|
||||
, CryptoAction(..)
|
||||
) where
|
||||
|
||||
|
||||
import HBS2Git.Prelude
|
||||
import HBS2Git.App
|
||||
import HBS2Git.Encryption
|
||||
|
||||
import HBS2.OrDie
|
||||
import HBS2.Net.Proto.Types
|
||||
|
||||
import HBS2.System.Logger.Simple
|
||||
|
||||
import Data.Time.Clock.POSIX
|
||||
import Data.Maybe
|
||||
|
||||
|
||||
runKeyRefsList :: (MonadIO m, HasConf m) => m ()
|
||||
runKeyRefsList = do
|
||||
conf <- getConf
|
||||
|
||||
now <- liftIO getPOSIXTime
|
||||
|
||||
let every = [ keyInfoRef <$> keyInfoFrom now syn | syn <- conf
|
||||
, isJust (keyInfoFrom now syn)
|
||||
] & catMaybes
|
||||
|
||||
liftIO $ print $ vcat (fmap (pretty . AsBase58) every)
|
||||
|
||||
|
||||
|
||||
runKeysUpdate :: (MonadIO m, HasConf m) => PubKey 'Sign HBS2Basic -> m ()
|
||||
runKeysUpdate ref = do
|
||||
conf <- getConf
|
||||
|
||||
-- TODO: generate-GK0
|
||||
-- generate basic key for OWNER only
|
||||
|
||||
now <- liftIO getPOSIXTime
|
||||
let every = [ keyInfoFrom now syn | syn <- conf
|
||||
, isJust (keyInfoFrom now syn)
|
||||
] & catMaybes
|
||||
|
||||
this <- pure (lastMay [ x | x <- every, keyInfoRef x == ref ])
|
||||
`orDie` "Not found encrypted section for given ref"
|
||||
|
||||
gk0 <- generateGroupKey @HBS2Basic Nothing [keyInfoOwner this]
|
||||
|
||||
pure ()
|
||||
|
||||
-- now <- liftIO getPOSIXTime
|
||||
|
||||
-- let every = [ keyInfoFrom now syn | syn <- conf
|
||||
-- , isJust (keyInfoFrom now syn)
|
||||
-- ] & catMaybes
|
||||
|
||||
-- let keys = [ x | x <- every, keyInfoRef x == ref ]
|
||||
|
||||
-- info $ viaShow keys
|
||||
|
||||
|
||||
runKeysList :: (MonadIO m, HasConf m) => PubKey 'Sign HBS2Basic -> m ()
|
||||
runKeysList ref = do
|
||||
conf <- getConf
|
||||
|
||||
now <- liftIO getPOSIXTime
|
||||
|
||||
let every = [ keyInfoFrom now syn | syn <- conf
|
||||
, isJust (keyInfoFrom now syn)
|
||||
] & catMaybes
|
||||
|
||||
let keys = [ x | x <- every, keyInfoRef x == ref ]
|
||||
|
||||
info $ viaShow keys
|
||||
|
||||
|
|
@ -1,258 +0,0 @@
|
|||
module HBS2Git.KeysMetaData where
|
||||
|
||||
|
||||
import HBS2Git.Prelude
|
||||
import HBS2Git.Types
|
||||
import HBS2Git.Alerts
|
||||
import HBS2Git.Annotations
|
||||
import HBS2Git.Encryption
|
||||
import HBS2Git.State
|
||||
import HBS2Git.PrettyStuff
|
||||
import HBS2Git.Config
|
||||
|
||||
|
||||
import HBS2.Data.Detect
|
||||
import HBS2.Merkle
|
||||
import HBS2.Peer.Proto
|
||||
import HBS2.OrDie
|
||||
import HBS2.Storage
|
||||
import HBS2.Storage.Operations.ByteString
|
||||
import HBS2.System.Logger.Simple
|
||||
|
||||
import Control.Monad
|
||||
import Control.Monad.Catch (MonadMask)
|
||||
import Control.Monad.Except (runExceptT)
|
||||
import Control.Monad.Trans.Maybe
|
||||
import Data.ByteString.Lazy (ByteString)
|
||||
import Data.ByteString.Lazy.Char8 qualified as LBS
|
||||
import Data.Either
|
||||
import Data.HashMap.Strict qualified as HashMap
|
||||
import Data.HashSet qualified as HashSet
|
||||
import Data.List qualified as List
|
||||
import Data.Maybe
|
||||
import Lens.Micro.Platform
|
||||
import Streaming.Prelude qualified as S
|
||||
import System.IO
|
||||
import Text.InterpolatedString.Perl6 (qc)
|
||||
|
||||
|
||||
updateGK0 :: forall m . ( MonadIO m
|
||||
-- , HasRPC m
|
||||
, MonadMask m
|
||||
, HasStorage m
|
||||
, HasConf m
|
||||
, HasEncryptionKeys m
|
||||
)
|
||||
=> RepoRef
|
||||
-> m ()
|
||||
updateGK0 repo = void $ runMaybeT do
|
||||
|
||||
guard =<< lift (isRefEncrypted (fromRefLogKey repo))
|
||||
|
||||
db <- makeDbPath repo >>= dbEnv
|
||||
-- FIXME: check-if-for-die-good-here
|
||||
ki <- lift $ getKeyInfo (fromRefLogKey repo)
|
||||
`orDie` noKeyInfoMsg repo
|
||||
|
||||
-- 2. Если нет GK0 или он expired
|
||||
mbGk0Hash <- withDB db $ stateGetLocalKey ki
|
||||
|
||||
-- 2.1 Генерируем новый GK0
|
||||
gk0Hash <- lift $ maybe1 mbGk0Hash (makeNewGK0 ki) pure
|
||||
|
||||
when (isNothing mbGk0Hash) do
|
||||
liftIO $ hPutDoc stderr $ "New GK0" <+> pretty gk0Hash <> line
|
||||
|
||||
withDB db $ statePutLocalKey ki gk0Hash repo
|
||||
|
||||
debug $ "GK0" <+> pretty gk0Hash
|
||||
|
||||
where
|
||||
makeNewGK0 ki = do
|
||||
sto <- getStorage
|
||||
gk <- genGK0 ki <&> serialise
|
||||
liftIO $ writeAsMerkle sto (gk :: ByteString) <&> HashRef
|
||||
|
||||
genKeysAnnotations :: forall m . ( MonadIO m
|
||||
, MonadMask m
|
||||
, HasStorage m
|
||||
, HasConf m
|
||||
, HasEncryptionKeys m
|
||||
)
|
||||
=> RepoRef
|
||||
-> m (Maybe HashRef)
|
||||
|
||||
genKeysAnnotations repo = do
|
||||
sto <- getStorage
|
||||
|
||||
runMaybeT do
|
||||
|
||||
guard =<< lift (isRefEncrypted (fromRefLogKey repo))
|
||||
|
||||
db <- makeDbPath repo >>= dbEnv
|
||||
-- TODO: generate-and-update-keys-metadata
|
||||
-- 1. get GK0
|
||||
|
||||
ki <- lift $ getKeyInfo (fromRefLogKey repo)
|
||||
`orDie` noKeyInfoMsg repo
|
||||
|
||||
gk0Hash <- withDB db $ stateGetLocalKey ki
|
||||
`orDie` noKeyInfoMsg repo
|
||||
|
||||
let processedKey = serialise ("GENKEYMETADATA", gk0Hash)
|
||||
|
||||
isNewKey <- withDB db $ not <$> stateGetProcessed processedKey
|
||||
|
||||
sp0 <- withDB db savepointNew
|
||||
withDB db $ savepointBegin sp0
|
||||
|
||||
-- FIXME: excess-data-roundtrip
|
||||
gk0newBs <- (runExceptT (readFromMerkle sto (SimpleKey (fromHashRef gk0Hash))))
|
||||
`orDie` [qc|*** Can't load GK0 {pretty gk0Hash}, maybe storage failure|]
|
||||
|
||||
-- теперь нам надо как-то узнать, что ключ новый и нам надо обработать
|
||||
-- новых читателей.
|
||||
-- Вариант #1: писать авторов в стейт. если они не обработаны еще,
|
||||
-- то обрабатывать.
|
||||
|
||||
-- 2.2 Генерируем новый GK1 ∀ members
|
||||
-- FIXME: might-be-slow
|
||||
|
||||
guard isNewKey
|
||||
|
||||
-- notice $ "NEW KEY APPEARED" <+> pretty gk0Hash
|
||||
|
||||
h <- toMPlus =<< getRef sto (refAlias repo)
|
||||
|
||||
gk0hs <- HashSet.fromList <$> S.toList_ (findAllGK0 sto h)
|
||||
|
||||
let keySource = do
|
||||
forM_ gk0hs $ \gkh -> void $ runMaybeT do
|
||||
gbs <- toMPlus =<< runExceptT (readFromMerkle sto (SimpleKey gkh))
|
||||
gk0 <- toMPlus $ deserialiseOrFail @(GroupKey 'Symm HBS2Basic) gbs
|
||||
-- TODO: decrypt-secret-right-here
|
||||
lift $ S.yield (gkh, gk0)
|
||||
|
||||
allKeys <- S.toList_ keySource <&> HashMap.fromList
|
||||
|
||||
-- ∀ gk0:
|
||||
-- - вытащить секрет (найти, кем расшифровать) recipients
|
||||
-- - взять вообще всех recipients и сформировать новый GK1
|
||||
-- для каждого из recipients из allKeys
|
||||
|
||||
-- взять все доступные пары ключей?
|
||||
keys <- lift enumEncryptionKeys <&> fmap (\x -> (view krPk x, view krSk x))
|
||||
|
||||
new' <- forM (HashMap.toList allKeys) $ \(hx, gk0) -> do
|
||||
let gksec' = [ lookupGroupKey sk pk gk0 | (pk,sk) <- keys ] & catMaybes & headMay
|
||||
case gksec' of
|
||||
Nothing -> pure (Left hx)
|
||||
Just sec -> pure $ Right (hx, gk0, sec)
|
||||
|
||||
let missed = lefts new'
|
||||
|
||||
forM_ missed $ \miss -> do
|
||||
warn $ "new group key: unavailable keys for gk" <+> pretty miss
|
||||
|
||||
let new = rights new'
|
||||
|
||||
gk0new <- pure (deserialiseOrFail @(GroupKey 'Symm HBS2Basic) gk0newBs)
|
||||
`orDie` [qc|*** Malformed/corrupted group key {pretty gk0Hash}|]
|
||||
|
||||
let rcpt0 = recipients gk0new
|
||||
|
||||
gnew <- forM new $ \(hx, gk0, sec) -> do
|
||||
|
||||
-- TODO: test-if-key-removing-works
|
||||
let newRcpt = (recipients gk0new & HashMap.keysSet)
|
||||
`HashSet.difference`
|
||||
(recipients gk0 & HashMap.keysSet)
|
||||
|
||||
let r1 = HashMap.keys $ recipients gk0 <> recipients gk0new
|
||||
|
||||
let r11 = [ x | x <- r1, HashMap.member x rcpt0 ]
|
||||
|
||||
gk1 <- generateGroupKey @HBS2Basic (Just sec) r11
|
||||
|
||||
pure (hx, newRcpt, gk1)
|
||||
|
||||
let nr = HashSet.unions $ fmap (view _2) gnew
|
||||
|
||||
ann <- if HashSet.null nr then do
|
||||
pure mempty
|
||||
else do
|
||||
forM gnew $ \(gk0h, _, gk1) -> do
|
||||
pure (GK1 (HashRef gk0h) gk1)
|
||||
|
||||
|
||||
annHash <- if List.null ann then do
|
||||
pure Nothing
|
||||
else do
|
||||
Just . HashRef <$> writeAsMerkle sto (serialise (SmallAnnotations ann))
|
||||
|
||||
debug $ "ANNOTATIONS" <+> pretty annHash
|
||||
|
||||
withDB db do
|
||||
statePutProcessed processedKey
|
||||
savepointRelease sp0
|
||||
|
||||
toMPlus annHash
|
||||
|
||||
where
|
||||
|
||||
-- FIXME: deepScan-ScanShallow-broken
|
||||
-- TODO: deal-with-missed-blocks
|
||||
findAllGK0 sto h = do
|
||||
-- TODO: performance-memoize-possible
|
||||
-- можно мемоизировать для h
|
||||
deepScan ScanDeep (const none) h (getBlock sto) $ \hx -> do
|
||||
void $ runMaybeT do
|
||||
blk <- toMPlus =<< getBlock sto hx
|
||||
refupd <- toMPlus $ deserialiseOrFail @(RefLogUpdate HBS2L4Proto) blk
|
||||
payload <- toMPlus $ deserialiseOrFail (LBS.fromStrict $ view refLogUpdData refupd)
|
||||
|
||||
let (SequentialRef _ (AnnotatedHashRef _ ht)) = payload
|
||||
|
||||
treeBs <- toMPlus =<< getBlock sto (fromHashRef ht)
|
||||
|
||||
enc <- toMPlus (deserialiseOrFail @(MTreeAnn [HashRef]) treeBs)
|
||||
<&> _mtaCrypt
|
||||
|
||||
case enc of
|
||||
EncryptGroupNaClSymm g _ -> do
|
||||
-- liftIO $ hPutDoc stderr $ "GK0 FOR" <+> pretty
|
||||
lift $ S.yield g
|
||||
|
||||
_ -> pure ()
|
||||
|
||||
|
||||
importKeysAnnotations :: forall m . ( MonadIO m
|
||||
, MonadMask m
|
||||
, HasStorage m
|
||||
)
|
||||
=> RepoRef
|
||||
-> HashRef
|
||||
-> HashRef
|
||||
-> DB m ()
|
||||
|
||||
importKeysAnnotations repo e href = do
|
||||
sto <- lift getStorage
|
||||
void $ runMaybeT do
|
||||
ebs <- runExceptT $ readFromMerkle sto (SimpleKey (fromHashRef href))
|
||||
|
||||
bs <- toMPlus ebs
|
||||
|
||||
anns <- toMPlus $ deserialiseOrFail @Annotations bs
|
||||
|
||||
let entries = case anns of
|
||||
SmallAnnotations e -> [ gk1 | gk1@(GK1{}) <- e ]
|
||||
_ -> mempty
|
||||
|
||||
|
||||
forM_ entries $ \(GK1 gk0h gk1) -> do
|
||||
|
||||
forM_ (HashMap.toList (recipients gk1)) $ \(pk,box) -> do
|
||||
let gk1small = GroupKeySymm @HBS2Basic (HashMap.singleton pk box)
|
||||
lift $ statePutGK1 gk0h pk gk1small
|
||||
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
module HBS2Git.Prelude
|
||||
( module HBS2.Prelude.Plated
|
||||
, module HBS2.Base58
|
||||
, module HBS2.Data.Types.Refs
|
||||
, module Credentials
|
||||
, module Codec.Serialise
|
||||
) where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.Base58
|
||||
import HBS2.Data.Types.Refs
|
||||
import HBS2.Net.Auth.Credentials as Credentials
|
||||
|
||||
import Codec.Serialise
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
module HBS2Git.PrettyStuff
|
||||
( module HBS2Git.PrettyStuff
|
||||
, hPutDoc
|
||||
) where
|
||||
|
||||
import Data.Text qualified as Text
|
||||
import Prettyprinter
|
||||
import Prettyprinter.Render.Terminal
|
||||
|
||||
green :: Doc AnsiStyle -> Doc AnsiStyle
|
||||
green = annotate (color Green)
|
||||
|
||||
yellow :: Doc AnsiStyle -> Doc AnsiStyle
|
||||
yellow = annotate (color Yellow)
|
||||
|
||||
red :: Doc AnsiStyle -> Doc AnsiStyle
|
||||
red = annotate (color Red)
|
||||
|
||||
blue :: Doc AnsiStyle -> Doc AnsiStyle
|
||||
blue = annotate (color Blue)
|
||||
|
||||
section :: Doc ann
|
||||
section = line <> line
|
||||
|
||||
toStringANSI :: Doc AnsiStyle -> String
|
||||
toStringANSI doc = Text.unpack $ renderStrict $ layoutPretty defaultLayoutOptions doc
|
||||
|
|
@ -1,656 +0,0 @@
|
|||
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||
{-# Language UndecidableInstances #-}
|
||||
module HBS2Git.State where
|
||||
|
||||
import HBS2Git.Prelude hiding (getCredentials)
|
||||
import HBS2Git.Types
|
||||
import HBS2Git.Config (cookieFile)
|
||||
import HBS2Git.Encryption
|
||||
import HBS2.Git.Types
|
||||
|
||||
import HBS2.Data.Types.Refs
|
||||
import HBS2.Hash
|
||||
|
||||
import HBS2.System.Logger.Simple
|
||||
|
||||
|
||||
import Control.Monad.Trans.Resource
|
||||
import Data.Functor
|
||||
import Data.Function
|
||||
import Database.SQLite.Simple
|
||||
import Database.SQLite.Simple.FromField
|
||||
import Database.SQLite.Simple.ToField
|
||||
import Control.Monad.Reader
|
||||
import Text.InterpolatedString.Perl6 (qc)
|
||||
import Data.ByteString.Lazy.Char8 qualified as LBS
|
||||
import Data.Text.IO qualified as Text
|
||||
import Data.Text qualified as Text
|
||||
import System.Directory
|
||||
import System.FilePath
|
||||
import Data.Maybe
|
||||
import Data.UUID.V4 qualified as UUID
|
||||
import Control.Monad.Catch
|
||||
import Control.Concurrent.STM
|
||||
import Data.Graph (graphFromEdges, topSort)
|
||||
import Lens.Micro.Platform
|
||||
|
||||
-- FIXME: move-orphans-to-separate-module
|
||||
|
||||
instance ToField Cookie where
|
||||
toField (Cookie x) = toField x
|
||||
|
||||
instance FromField Cookie where
|
||||
fromField = fmap Cookie . fromField @Text.Text
|
||||
|
||||
instance ToField GitHash where
|
||||
toField h = toField (show $ pretty h)
|
||||
|
||||
instance ToField GitRef where
|
||||
toField h = toField (show $ pretty h)
|
||||
|
||||
instance FromField GitRef where
|
||||
fromField = fmap fromString . fromField @String
|
||||
|
||||
instance FromField GitHash where
|
||||
fromField = fmap fromString . fromField @String
|
||||
|
||||
instance FromField GitObjectType where
|
||||
fromField = fmap fromString . fromField @String
|
||||
|
||||
instance ToField HashRef where
|
||||
toField h = toField (show $ pretty h)
|
||||
|
||||
instance ToField GitObjectType where
|
||||
toField h = toField (show $ pretty h)
|
||||
|
||||
instance FromField HashRef where
|
||||
fromField = fmap fromString . fromField @String
|
||||
|
||||
instance ToField (RefLogKey HBS2Basic) where
|
||||
toField rk = toField (show (pretty rk))
|
||||
|
||||
newtype Base58Field a = Base58Field { unBaseB8Field :: a }
|
||||
|
||||
instance Pretty (AsBase58 a) => ToField (Base58Field a) where
|
||||
toField (Base58Field a) = toField (show (pretty (AsBase58 a)))
|
||||
|
||||
instance FromStringMaybe a => FromField (Base58Field a) where
|
||||
fromField x =
|
||||
fromField @String x
|
||||
<&> fromStringMay @a
|
||||
>>= maybe (fail "can't parse base58 value") (pure . Base58Field)
|
||||
|
||||
newtype DB m a =
|
||||
DB { fromDB :: ReaderT DBEnv m a }
|
||||
deriving newtype ( Applicative
|
||||
, Functor
|
||||
, Monad
|
||||
, MonadIO
|
||||
, MonadReader DBEnv
|
||||
, MonadTrans
|
||||
, MonadThrow
|
||||
, MonadCatch
|
||||
)
|
||||
|
||||
instance (HasRefCredentials m) => HasRefCredentials (DB m) where
|
||||
getCredentials = lift . getCredentials
|
||||
setCredentials r s = lift (setCredentials r s)
|
||||
|
||||
stateConnection :: MonadIO m => DB m Connection
|
||||
stateConnection = do
|
||||
env <- ask
|
||||
initConnection env
|
||||
|
||||
initConnection :: MonadIO m => DBEnv -> m Connection
|
||||
initConnection env = do
|
||||
mco <- liftIO $ readTVarIO (view dbConn env)
|
||||
case mco of
|
||||
Just co -> pure co
|
||||
Nothing -> do
|
||||
co <- liftIO $ open (view dbFilePath env)
|
||||
liftIO $ atomically $ writeTVar (view dbConn env) (Just co)
|
||||
pure co
|
||||
|
||||
dbEnv0 :: (MonadIO m, MonadMask m) => DB m () -> FilePath -> m DBEnv
|
||||
dbEnv0 dbInit fp = do
|
||||
trace "dbEnv called"
|
||||
let dir = takeDirectory fp
|
||||
liftIO $ createDirectoryIfMissing True dir
|
||||
env0 <- DBEnv fp "" <$> liftIO (newTVarIO Nothing)
|
||||
void $ withDB env0 dbInit
|
||||
cookie <- withDB env0 $ readOrCreateCookie
|
||||
DBEnv fp cookie <$> liftIO (newTVarIO Nothing)
|
||||
|
||||
dbEnv :: (MonadIO m, MonadMask m) => FilePath -> m DBEnv
|
||||
dbEnv = dbEnv0 stateInit
|
||||
|
||||
dbEnvReadOnly :: (MonadIO m, MonadMask m) => FilePath -> m DBEnv
|
||||
dbEnvReadOnly = dbEnv0 none
|
||||
|
||||
withDB :: (MonadIO m, MonadMask m) => DBEnv -> DB m a -> m a
|
||||
withDB env action = do
|
||||
trace $ "** DB run with COOKIE" <+> viaShow (view dbCookie env)
|
||||
conn <- initConnection env
|
||||
finally (runReaderT (fromDB action) env) $ do
|
||||
-- NOTE: we could not close connection here.
|
||||
pure ()
|
||||
|
||||
shutdownDB :: MonadIO m => DBEnv -> m ()
|
||||
shutdownDB env = liftIO do
|
||||
co <- atomically do
|
||||
conn <- readTVar (view dbConn env)
|
||||
writeTVar (view dbConn env) Nothing
|
||||
pure conn
|
||||
maybe1 co none close
|
||||
|
||||
stateInit :: (MonadIO m, MonadThrow m) => DB m ()
|
||||
stateInit = do
|
||||
conn <- stateConnection
|
||||
liftIO $ execute_ conn [qc|
|
||||
create table if not exists logrefval
|
||||
( loghash text not null
|
||||
, refname text not null
|
||||
, refval text not null
|
||||
, primary key (loghash, refname)
|
||||
)
|
||||
|]
|
||||
|
||||
liftIO $ execute_ conn [qc|
|
||||
create table if not exists logobject
|
||||
( loghash text not null
|
||||
, type text not null
|
||||
, githash text not null
|
||||
, primary key (loghash, githash)
|
||||
)
|
||||
|]
|
||||
|
||||
liftIO $ execute_ conn [qc|
|
||||
create table if not exists logcommitparent
|
||||
( kommit text not null
|
||||
, parent text not null
|
||||
, primary key (kommit,parent)
|
||||
)
|
||||
|]
|
||||
|
||||
forM_ ["logimported", "tranimported", "refimported"] $ \t -> do
|
||||
here <- colExists conn t "cookie"
|
||||
unless here $ liftIO do
|
||||
liftIO $ execute_ conn [qc|
|
||||
DROP TABLE IF EXISTS {t};
|
||||
|]
|
||||
|
||||
liftIO $ execute_ conn [qc|
|
||||
create table if not exists logimported
|
||||
( hash text not null
|
||||
, cookie text not null
|
||||
, primary key (hash, cookie)
|
||||
)
|
||||
|]
|
||||
|
||||
liftIO $ execute_ conn [qc|
|
||||
create table if not exists refimported
|
||||
( hash text not null
|
||||
, cookie text not null
|
||||
, timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
, primary key (hash, cookie)
|
||||
)
|
||||
|]
|
||||
|
||||
liftIO $ execute_ conn [qc|
|
||||
create table if not exists tranimported
|
||||
( hash text not null
|
||||
, cookie text not null
|
||||
, timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
, primary key (hash, cookie)
|
||||
)
|
||||
|]
|
||||
|
||||
liftIO $ execute_ conn [qc|
|
||||
DROP VIEW IF EXISTS v_refval_actual;
|
||||
|]
|
||||
|
||||
liftIO $ execute_ conn [qc|
|
||||
CREATE TABLE IF NOT EXISTS logcommitdepth
|
||||
( kommit text not null
|
||||
, depth integer not null
|
||||
, primary key (kommit)
|
||||
);
|
||||
|]
|
||||
|
||||
liftIO $ execute_ conn [qc|
|
||||
CREATE TABLE IF NOT EXISTS logrank
|
||||
( hash text not null
|
||||
, rank int not null
|
||||
, primary key (hash)
|
||||
);
|
||||
|]
|
||||
|
||||
liftIO $ execute_ conn [qc|
|
||||
CREATE TABLE IF NOT EXISTS cookie
|
||||
( cookie text not null
|
||||
, primary key (cookie)
|
||||
);
|
||||
|]
|
||||
|
||||
|
||||
liftIO $ execute_ conn [qc|
|
||||
CREATE TABLE IF NOT EXISTS groupkeylocal
|
||||
( keyhash text not null
|
||||
, ref text not null
|
||||
, timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
, valuehash text not null
|
||||
, primary key (keyhash)
|
||||
);
|
||||
|]
|
||||
|
||||
liftIO $ execute_ conn [qc|
|
||||
CREATE TABLE IF NOT EXISTS gk1
|
||||
( gk0 text not null
|
||||
, pk text not null
|
||||
, gk1 text not null
|
||||
, primary key (gk0, pk)
|
||||
);
|
||||
|]
|
||||
|
||||
liftIO $ execute_ conn [qc|
|
||||
CREATE TABLE IF NOT EXISTS processed
|
||||
( hash text not null
|
||||
, cookie text not null
|
||||
, timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
, primary key (hash)
|
||||
);
|
||||
|]
|
||||
|
||||
liftIO $ execute_ conn [qc|
|
||||
DROP VIEW IF EXISTS v_log_depth;
|
||||
|]
|
||||
|
||||
liftIO $ execute_ conn [qc|
|
||||
DROP VIEW IF EXISTS v_refval_actual;
|
||||
|]
|
||||
|
||||
liftIO $ execute_ conn [qc|
|
||||
CREATE VIEW v_refval_actual AS
|
||||
WITH ranks AS (
|
||||
SELECT rv.refname,
|
||||
MAX(COALESCE(d.depth, 0)) as max_depth,
|
||||
MAX(COALESCE(r.rank, 0)) as max_rank
|
||||
FROM logrefval rv
|
||||
LEFT JOIN logcommitdepth d ON rv.refval = d.kommit
|
||||
LEFT JOIN logrank r ON r.hash = rv.loghash
|
||||
GROUP BY rv.refname
|
||||
)
|
||||
SELECT r.refname, rv.refval, r.max_rank as r, r.max_depth as d
|
||||
FROM logrefval rv
|
||||
JOIN ranks r ON r.refname = rv.refname
|
||||
WHERE
|
||||
(
|
||||
(r.max_rank > 0 AND rv.loghash IN (SELECT hash FROM logrank WHERE rank = r.max_rank))
|
||||
OR (r.max_rank = 0 AND rv.refval IN (SELECT kommit FROM logcommitdepth WHERE depth = r.max_depth))
|
||||
)
|
||||
AND rv.refval <> '0000000000000000000000000000000000000000'
|
||||
ORDER BY r.refname;
|
||||
|]
|
||||
|
||||
void $ readOrCreateCookie
|
||||
|
||||
where
|
||||
colExists :: MonadIO m => Connection -> String -> String -> m Bool
|
||||
colExists conn table col = do
|
||||
let sql =[qc|PRAGMA table_info({table})|]
|
||||
fields <- liftIO $ query_ conn sql
|
||||
let fs = [x | ((_, x, _, _, _, _) :: (Int, String, String, Int, Maybe String, Int)) <- fields ]
|
||||
pure ( col `elem` fs )
|
||||
|
||||
readOrCreateCookie :: (MonadIO m, MonadThrow m) => DB m Cookie
|
||||
readOrCreateCookie = do
|
||||
cfn <- cookieFile
|
||||
cf <- liftIO $ readFile cfn <&> take 4096
|
||||
|
||||
if null cf then do
|
||||
cookie <- stateGenCookie
|
||||
liftIO $ Text.writeFile cfn (fromCookie cookie)
|
||||
pure cookie
|
||||
else do
|
||||
let cookie@(Cookie co) = Cookie (fromString cf)
|
||||
statePutCookie cookie
|
||||
pure cookie
|
||||
|
||||
newtype Savepoint =
|
||||
Savepoint String
|
||||
deriving newtype (IsString)
|
||||
deriving stock (Eq,Ord)
|
||||
|
||||
savepointNew :: forall m . MonadIO m => DB m Savepoint
|
||||
savepointNew = do
|
||||
uu <- liftIO UUID.nextRandom
|
||||
let s = LBS.pack (show uu) & hashObject @HbSync & pretty & show
|
||||
pure $ fromString ("sp" <> s)
|
||||
|
||||
savepointBegin :: forall m . MonadIO m => Savepoint -> DB m ()
|
||||
savepointBegin (Savepoint sp) = do
|
||||
conn <- stateConnection
|
||||
liftIO $ execute_ conn [qc|SAVEPOINT {sp}|]
|
||||
|
||||
savepointRelease:: forall m . MonadIO m => Savepoint -> DB m ()
|
||||
savepointRelease (Savepoint sp) = do
|
||||
conn <- stateConnection
|
||||
liftIO $ execute_ conn [qc|RELEASE SAVEPOINT {sp}|]
|
||||
|
||||
savepointRollback :: forall m . MonadIO m => Savepoint -> DB m ()
|
||||
savepointRollback (Savepoint sp) = do
|
||||
conn <- stateConnection
|
||||
liftIO $ execute_ conn [qc|ROLLBACK TO SAVEPOINT {sp}|]
|
||||
|
||||
transactional :: forall a m . (MonadCatch m, MonadIO m) => DB m a -> DB m a
|
||||
transactional action = do
|
||||
|
||||
sp <- savepointNew
|
||||
|
||||
savepointBegin sp
|
||||
r <- try action
|
||||
|
||||
case r of
|
||||
Left (e :: SomeException) -> do
|
||||
savepointRollback sp
|
||||
throwM e
|
||||
|
||||
Right x -> do
|
||||
savepointRelease sp
|
||||
pure x
|
||||
|
||||
-- TODO: backlog-head-history
|
||||
-- можно сделать таблицу history, в которую
|
||||
-- писать журнал всех изменений голов.
|
||||
-- тогда можно будет откатиться на любое предыдущее
|
||||
-- состояние репозитория
|
||||
|
||||
|
||||
statePutLogRefVal :: MonadIO m => (HashRef, GitRef, GitHash) -> DB m ()
|
||||
statePutLogRefVal row = do
|
||||
conn <- stateConnection
|
||||
liftIO $ execute conn [qc|
|
||||
insert into logrefval (loghash,refname,refval) values(?,?,?)
|
||||
on conflict (loghash,refname) do nothing
|
||||
|] row
|
||||
|
||||
|
||||
statePutLogObject :: MonadIO m => (HashRef, GitObjectType, GitHash) -> DB m ()
|
||||
statePutLogObject row = do
|
||||
conn <- stateConnection
|
||||
liftIO $ execute conn [qc|
|
||||
insert into logobject (loghash,type,githash) values(?,?,?)
|
||||
on conflict (loghash,githash) do nothing
|
||||
|] row
|
||||
|
||||
stateIsLogObjectExists :: MonadIO m => GitHash -> DB m Bool
|
||||
stateIsLogObjectExists h = do
|
||||
conn <- stateConnection
|
||||
liftIO $ query conn [qc|
|
||||
SELECT NULL FROM logobject WHERE githash = ? LIMIT 1
|
||||
|] (Only h) <&> isJust . listToMaybe . fmap (fromOnly @(Maybe Int))
|
||||
|
||||
|
||||
stateGetGitLogObject :: MonadIO m => GitHash -> DB m (Maybe HashRef)
|
||||
stateGetGitLogObject h = do
|
||||
conn <- stateConnection
|
||||
liftIO $ query conn [qc|
|
||||
SELECT loghash FROM logobject
|
||||
WHERE githash = ? and type in ('commit', 'tree', 'blob')
|
||||
LIMIT 1
|
||||
|] (Only h) <&> listToMaybe . fmap fromOnly
|
||||
|
||||
statePutLogContextCommit :: MonadIO m => HashRef -> GitHash -> DB m ()
|
||||
statePutLogContextCommit loghash ctx = do
|
||||
conn <- stateConnection
|
||||
liftIO $ execute conn [qc|
|
||||
insert into logobject (loghash,type,githash) values(?,'context',?)
|
||||
on conflict (loghash,githash) do nothing
|
||||
|] (loghash,ctx)
|
||||
|
||||
|
||||
statePutLogContextRank :: MonadIO m => HashRef -> Int -> DB m ()
|
||||
statePutLogContextRank loghash rank = do
|
||||
conn <- stateConnection
|
||||
liftIO $ execute conn [qc|
|
||||
insert into logrank (hash,rank) values(?,?)
|
||||
on conflict (hash) do nothing
|
||||
|] (loghash,rank)
|
||||
|
||||
statePutLogCommitParent :: MonadIO m => (GitHash, GitHash) -> DB m ()
|
||||
statePutLogCommitParent row = do
|
||||
conn <- stateConnection
|
||||
liftIO $ execute conn [qc|
|
||||
insert into logcommitparent (kommit,parent) values(?,?)
|
||||
on conflict (kommit,parent) do nothing
|
||||
|] row
|
||||
|
||||
|
||||
statePutLogImported :: MonadIO m => HashRef -> DB m ()
|
||||
statePutLogImported h = do
|
||||
conn <- stateConnection
|
||||
cookie <- asks (view dbCookie)
|
||||
liftIO $ execute conn [qc|
|
||||
insert into logimported (hash,cookie) values(?,?)
|
||||
on conflict (hash,cookie) do nothing
|
||||
|] (h,cookie)
|
||||
|
||||
stateGetLogImported :: MonadIO m => HashRef -> DB m Bool
|
||||
stateGetLogImported h = do
|
||||
conn <- stateConnection
|
||||
cookie <- asks (view dbCookie)
|
||||
r <- liftIO $ query @_ @(Only Int) conn [qc|
|
||||
select 1 from logimported where hash = ? and cookie = ? limit 1
|
||||
|] (h, cookie)
|
||||
pure $ not $ null r
|
||||
|
||||
statePutRefImported :: MonadIO m => HashRef -> DB m ()
|
||||
statePutRefImported h = do
|
||||
conn <- stateConnection
|
||||
cookie <- asks (view dbCookie)
|
||||
liftIO $ execute conn [qc|
|
||||
insert into refimported (hash,cookie) values(?,?)
|
||||
on conflict (hash,cookie) do nothing
|
||||
|] (h,cookie)
|
||||
|
||||
stateGetRefImported :: MonadIO m => HashRef -> DB m Bool
|
||||
stateGetRefImported h = do
|
||||
conn <- stateConnection
|
||||
cookie <- asks (view dbCookie)
|
||||
r <- liftIO $ query @_ @(Only Int) conn [qc|
|
||||
select 1 from refimported where hash = ? and cookie = ? limit 1
|
||||
|] (h, cookie)
|
||||
pure $ not $ null r
|
||||
|
||||
statePutTranImported :: MonadIO m => HashRef -> DB m ()
|
||||
statePutTranImported h = do
|
||||
conn <- stateConnection
|
||||
cookie <- asks (view dbCookie)
|
||||
debug $ "statePutTranImported" <+> pretty h <+> viaShow cookie
|
||||
liftIO $ execute conn [qc|
|
||||
insert into tranimported (hash, cookie) values(?, ?)
|
||||
on conflict (hash, cookie) do nothing
|
||||
|] (h, cookie)
|
||||
|
||||
stateGetTranImported :: MonadIO m => HashRef -> DB m Bool
|
||||
stateGetTranImported h = do
|
||||
conn <- stateConnection
|
||||
cookie <- asks (view dbCookie)
|
||||
r <- liftIO $ query @_ @(Only Int) conn [qc|
|
||||
select 1 from tranimported where hash = ? and cookie = ? limit 1
|
||||
|] (h, cookie)
|
||||
pure $ not $ null r
|
||||
|
||||
stateGetAllTranImported :: MonadIO m => DB m [HashRef]
|
||||
stateGetAllTranImported = do
|
||||
conn <- stateConnection
|
||||
cookie <- asks (view dbCookie)
|
||||
results <- liftIO $ query conn [qc|
|
||||
select hash from tranimported where cookie = ?
|
||||
|] (Only cookie)
|
||||
pure $ map fromOnly results
|
||||
|
||||
stateGetImportedCommits :: MonadIO m => DB m [GitHash]
|
||||
stateGetImportedCommits = do
|
||||
conn <- stateConnection
|
||||
liftIO $ query_ conn [qc|
|
||||
select distinct(githash) from logobject where type = 'commit'
|
||||
|] <&> fmap fromOnly
|
||||
|
||||
stateGetActualRefs :: MonadIO m => DB m [(GitRef, GitHash)]
|
||||
stateGetActualRefs = do
|
||||
conn <- stateConnection
|
||||
liftIO $ query_ conn [qc|
|
||||
select refname,refval from v_refval_actual
|
||||
|]
|
||||
|
||||
stateGetActualRefValue :: MonadIO m => GitRef -> DB m (Maybe GitHash)
|
||||
stateGetActualRefValue ref = do
|
||||
conn <- stateConnection
|
||||
liftIO $ query conn [qc|
|
||||
select refval from v_refval_actual
|
||||
where refname = ?
|
||||
|] (Only ref) <&> fmap fromOnly . listToMaybe
|
||||
|
||||
stateGetLastKnownCommits :: MonadIO m => Int -> DB m [GitHash]
|
||||
stateGetLastKnownCommits n = do
|
||||
conn <- stateConnection
|
||||
liftIO $ query conn [qc|
|
||||
select kommit from logcommitdepth order by depth asc limit ?;
|
||||
|] (Only n) <&> fmap fromOnly
|
||||
|
||||
stateUpdateCommitDepths :: MonadIO m => DB m ()
|
||||
stateUpdateCommitDepths = do
|
||||
conn <- stateConnection
|
||||
sp <- savepointNew
|
||||
|
||||
rows <- liftIO $ query_ @(GitHash, GitHash) conn [qc|SELECT kommit, parent FROM logcommitparent|]
|
||||
|
||||
-- TODO: check-it-works-on-huge-graphs
|
||||
let commitEdges = rows
|
||||
let (graph, nodeFromVertex, _) = graphFromEdges [(commit, commit, [parent]) | (commit, parent) <- commitEdges]
|
||||
let sortedVertices = topSort graph
|
||||
let sortedCommits = reverse [commit | vertex <- sortedVertices, let (commit, _, _) = nodeFromVertex vertex]
|
||||
let ordered = zip sortedCommits [1..]
|
||||
|
||||
savepointBegin sp
|
||||
liftIO $ execute_ conn [qc|DELETE FROM logcommitdepth|]
|
||||
forM_ ordered $ \(co, n) -> do
|
||||
liftIO $ execute conn
|
||||
[qc| INSERT INTO logcommitdepth(kommit,depth)
|
||||
VALUES(?,?)
|
||||
ON CONFLICT(kommit)
|
||||
DO UPDATE SET depth = ?
|
||||
|] (co,n,n)
|
||||
pure ()
|
||||
savepointRelease sp
|
||||
|
||||
|
||||
statePutCookie :: MonadIO m => Cookie -> DB m ()
|
||||
statePutCookie cookie = do
|
||||
conn <- stateConnection
|
||||
let sql = [qc|INSERT INTO cookie (cookie) values(?) ON CONFLICT(cookie) DO NOTHING|]
|
||||
liftIO $ execute conn sql (Only cookie)
|
||||
|
||||
stateGenCookie :: (MonadIO m) => DB m Cookie
|
||||
stateGenCookie = do
|
||||
conn <- stateConnection
|
||||
fix \next -> do
|
||||
cookie <- liftIO (UUID.nextRandom <&> (fromString @Cookie. show))
|
||||
|
||||
here <- liftIO $ query conn [qc|select 1 from cookie where cookie = ? limit 1|] (Only cookie)
|
||||
<&> listToMaybe @(Only Int)
|
||||
|
||||
if isJust here then do
|
||||
next
|
||||
else liftIO do
|
||||
void $ execute conn [qc|insert into cookie (cookie) values(?)|] (Only cookie)
|
||||
pure cookie
|
||||
|
||||
|
||||
stateListLocalKeys :: MonadIO m => DB m [HashRef]
|
||||
stateListLocalKeys = do
|
||||
undefined
|
||||
|
||||
stateGetLocalKey :: MonadIO m
|
||||
=> KeyInfo
|
||||
-> DB m (Maybe HashRef)
|
||||
stateGetLocalKey ki = do
|
||||
conn <- stateConnection
|
||||
let h = hashObject @HbSync ki & HashRef
|
||||
liftIO $ query conn [qc|select valuehash from groupkeylocal where keyhash = ? limit 1|] (Only h)
|
||||
<&> fmap fromOnly . listToMaybe
|
||||
|
||||
statePutLocalKey :: MonadIO m
|
||||
=> KeyInfo
|
||||
-> HashRef
|
||||
-> RefLogKey HBS2Basic
|
||||
-> DB m ()
|
||||
|
||||
statePutLocalKey ki gkh reflog = do
|
||||
conn <- stateConnection
|
||||
let sql = [qc|
|
||||
INSERT INTO groupkeylocal (keyhash, ref, valuehash)
|
||||
VALUES (?,?,?)
|
||||
ON CONFLICT (keyhash) DO UPDATE SET
|
||||
ref = excluded.ref, valuehash = excluded.valuehash
|
||||
|]
|
||||
|
||||
liftIO $ execute conn sql (HashRef (hashObject @HbSync ki), reflog, gkh)
|
||||
pure ()
|
||||
|
||||
|
||||
statePutProcessed :: (MonadIO m, Hashed HbSync b) => b -> DB m ()
|
||||
statePutProcessed h = do
|
||||
conn <- stateConnection
|
||||
cookie <- asks (view dbCookie)
|
||||
liftIO $ execute conn [qc|
|
||||
insert into processed (hash, cookie) values (?, ?)
|
||||
on conflict (hash) do nothing
|
||||
|] (HashRef (hashObject @HbSync h), cookie)
|
||||
|
||||
stateGetProcessed :: (MonadIO m, Hashed HbSync b) => b -> DB m Bool
|
||||
stateGetProcessed h = do
|
||||
conn <- stateConnection
|
||||
cookie <- asks (view dbCookie)
|
||||
r <- liftIO $ query @_ @(Only Int) conn [qc|
|
||||
select 1 from processed where hash = ? and cookie = ? limit 1
|
||||
|] (HashRef (hashObject @HbSync h), cookie)
|
||||
pure $ not $ null r
|
||||
|
||||
|
||||
statePutGK1 :: MonadIO m => HashRef
|
||||
-> PubKey 'Encrypt HBS2Basic
|
||||
-> GroupKey 'Symm HBS2Basic
|
||||
-> DB m ()
|
||||
|
||||
statePutGK1 gk0 pk gk1 = do
|
||||
conn <- stateConnection
|
||||
liftIO $ execute conn [qc|
|
||||
insert into gk1 (gk0, pk, gk1) values (?, ?, ?)
|
||||
on conflict (gk0, pk) do nothing
|
||||
|] (gk0, Base58Field pk, Base58Field gk1)
|
||||
|
||||
stateGetGK1 :: MonadIO m
|
||||
=> HashRef
|
||||
-> PubKey 'Encrypt HBS2Basic
|
||||
-> DB m (Maybe (GroupKey 'Symm HBS2Basic))
|
||||
|
||||
stateGetGK1 gk0 pk = do
|
||||
conn <- stateConnection
|
||||
r <- liftIO $ query conn [qc|
|
||||
select gk1 from gk1 where gk0 = ? and pk = ? limit 1
|
||||
|] (gk0, Base58Field pk)
|
||||
pure $ listToMaybe $ fmap (unBaseB8Field . fromOnly) r
|
||||
|
||||
stateListGK1 :: MonadIO m
|
||||
=> HashRef
|
||||
-> DB m [GroupKey 'Symm HBS2Basic]
|
||||
|
||||
stateListGK1 gk0 = do
|
||||
conn <- stateConnection
|
||||
r <- liftIO $ query conn [qc|
|
||||
select gk1 from gk1 where gk0 = ?
|
||||
|] (Only gk0)
|
||||
pure $ fmap (unBaseB8Field . fromOnly) r
|
||||
|
|
@ -1,323 +0,0 @@
|
|||
module HBS2Git.Tools where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.Base58
|
||||
import HBS2.Net.Proto.Types
|
||||
import HBS2.Net.Auth.Credentials
|
||||
import HBS2.Data.Types.Refs (HashRef)
|
||||
import HBS2.OrDie
|
||||
|
||||
import HBS2.System.Logger.Simple
|
||||
|
||||
import HBS2Git.Types
|
||||
import HBS2Git.App
|
||||
|
||||
import HBS2.Git.Local.CLI
|
||||
import HBS2.Git.Types
|
||||
import HBS2Git.Import (importRefLogNew)
|
||||
import HBS2Git.Config
|
||||
import HBS2Git.State
|
||||
import HBS2Git.PrettyStuff
|
||||
|
||||
import Data.HashMap.Strict qualified as HashMap
|
||||
import Data.ByteString.Char8 qualified as BS8
|
||||
import Data.Text qualified as Text
|
||||
import Data.Traversable
|
||||
import Data.Maybe
|
||||
import Data.Either
|
||||
import Prettyprinter.Render.Terminal
|
||||
import Control.Monad.IO.Unlift
|
||||
import Control.Monad.Catch (MonadCatch,MonadThrow,MonadMask)
|
||||
import Data.Generics.Product (field)
|
||||
import Lens.Micro.Platform
|
||||
import System.FilePath
|
||||
import System.Directory
|
||||
import System.Process.Typed
|
||||
import Text.InterpolatedString.Perl6 (qc)
|
||||
import System.IO.Temp
|
||||
import System.IO (stdout,stderr)
|
||||
|
||||
import UnliftIO
|
||||
|
||||
data EncryptionOpts =
|
||||
EncryptionOpts
|
||||
{ encryptKeyring :: FilePath
|
||||
, encryptKey :: PubKey 'Encrypt HBS2Basic
|
||||
}
|
||||
deriving stock Generic
|
||||
|
||||
data NewRepoOpts =
|
||||
NewRepoOpts
|
||||
{ newRepoKeyring :: Maybe FilePath
|
||||
, newRepoEncryption :: Maybe (PubKey 'Encrypt HBS2Basic, FilePath)
|
||||
}
|
||||
deriving stock (Generic)
|
||||
|
||||
data AsRemoteEntry = AsRemoteEntry
|
||||
{ remoteName :: Text,
|
||||
remoteURL :: Text,
|
||||
remoteRefValue :: Maybe HashRef
|
||||
}
|
||||
|
||||
remoteNameColWidth :: Int
|
||||
remoteNameColWidth = 16
|
||||
|
||||
remoteURLColWidth :: Int
|
||||
remoteURLColWidth = 51
|
||||
|
||||
remoteRefValueColWidth :: Int
|
||||
remoteRefValueColWidth = 44
|
||||
|
||||
instance Pretty AsRemoteEntry where
|
||||
pretty (AsRemoteEntry {..}) =
|
||||
fill remoteNameColWidth (pretty remoteName)
|
||||
<+> fill remoteURLColWidth (pretty remoteURL)
|
||||
<+> fill remoteRefValueColWidth (maybe "-" pretty remoteRefValue)
|
||||
|
||||
hbs2Prefix :: Text
|
||||
hbs2Prefix = "hbs2://"
|
||||
|
||||
-- TODO: backlog-list-refs-all-option
|
||||
-- сделать опцию --all которая выведет
|
||||
-- все известные ref-ы из стейта.
|
||||
-- Сейчас выводятся только локальные
|
||||
|
||||
runListRefs :: (MonadIO m, HasStorage (App m)) => App m ()
|
||||
runListRefs = do
|
||||
refs <- gitGetRemotes <&> filter isHbs2
|
||||
remoteEntries <-
|
||||
forM
|
||||
refs
|
||||
( \(name, url) -> do
|
||||
refVal <- getRefVal url
|
||||
pure $
|
||||
AsRemoteEntry
|
||||
{ remoteName = name,
|
||||
remoteURL = url,
|
||||
remoteRefValue = refVal
|
||||
}
|
||||
)
|
||||
let header =
|
||||
fill remoteNameColWidth (green "Name")
|
||||
<+> fill remoteURLColWidth (green "URL")
|
||||
<+> fill remoteRefValueColWidth (green "Reference value")
|
||||
liftIO $ putDoc $ header <> line
|
||||
liftIO $ putDoc $ vcat $ pretty <$> remoteEntries
|
||||
where
|
||||
isHbs2 (_, b) = Text.isPrefixOf hbs2Prefix b
|
||||
|
||||
runToolsScan :: (MonadUnliftIO m,MonadCatch m,MonadMask m,HasStorage (App m)) => RepoRef -> App m ()
|
||||
runToolsScan ref = do
|
||||
trace $ "runToolsScan" <+> pretty ref
|
||||
importRefLogNew True ref
|
||||
shutUp
|
||||
pure ()
|
||||
|
||||
runToolsGetRefs :: (MonadUnliftIO m,MonadCatch m,MonadMask m) => RepoRef -> App m ()
|
||||
runToolsGetRefs ref = do
|
||||
db <- makeDbPath ref >>= dbEnv
|
||||
refs <- withDB db stateGetActualRefs
|
||||
let rh = RepoHead Nothing (HashMap.fromList refs)
|
||||
hPrint stdout $ pretty (AsGitRefsFile rh)
|
||||
shutUp
|
||||
|
||||
getRefVal :: (MonadIO m, HasStorage m) => Text -> m (Maybe HashRef)
|
||||
getRefVal url =
|
||||
case Text.stripPrefix hbs2Prefix url of
|
||||
Nothing -> do
|
||||
liftIO $ print $ pretty "wrong URL format" <+> pretty url
|
||||
pure Nothing
|
||||
Just refStr -> case fromStringMay $ Text.unpack refStr of
|
||||
Nothing -> do
|
||||
liftIO $ print $ pretty "can't parse ref" <+> pretty refStr
|
||||
pure Nothing
|
||||
Just ref -> do
|
||||
mRefVal <- readRef ref
|
||||
case mRefVal of
|
||||
Nothing -> do
|
||||
liftIO $ print $ pretty "readRef error" <+> pretty ref
|
||||
pure Nothing
|
||||
Just v -> pure $ Just v
|
||||
|
||||
|
||||
|
||||
runInitRepo :: (MonadUnliftIO m, MonadThrow m, MonadCatch m) => NewRepoOpts -> m ()
|
||||
runInitRepo = runInitInteractive
|
||||
|
||||
runInitInteractive :: (MonadUnliftIO m, MonadThrow m, MonadCatch m) => NewRepoOpts -> m ()
|
||||
runInitInteractive opts = do
|
||||
|
||||
hSetBuffering stdin NoBuffering
|
||||
hSetBuffering stdout LineBuffering
|
||||
|
||||
conf <- configPath ""
|
||||
`catch`
|
||||
(\NoWorkDirException -> do
|
||||
liftIO $ hPutDoc stderr $ red "init:"
|
||||
<+> "No git working directory."
|
||||
<+> yellow "Run" <+> "'git init'" <+> "first"
|
||||
<> line
|
||||
die "nope"
|
||||
)
|
||||
|
||||
rpc <- (Just <$> detectRPC False)
|
||||
`catch`
|
||||
(\NoRPCException -> do
|
||||
liftIO $ hPutDoc stderr $ yellow "init:"
|
||||
<+> "No RPC found."
|
||||
<+> "Perhaps, hbs2-peer is down"
|
||||
<> line
|
||||
<> "Okay, you may add it later"
|
||||
<> line
|
||||
pure Nothing
|
||||
)
|
||||
|
||||
let confFile = conf </> "config"
|
||||
|
||||
liftIO $ createDirectoryIfMissing True conf
|
||||
|
||||
confHere <- liftIO $ doesFileExist confFile
|
||||
|
||||
when confHere do
|
||||
liftIO $ hPutDoc stdout $ yellow "Config"
|
||||
<+> pretty confFile
|
||||
<+> yellow "is already here."
|
||||
<+> "Continue? [y/n]: "
|
||||
|
||||
liftIO $ hFlush stdout
|
||||
|
||||
y <- liftIO getChar
|
||||
|
||||
unless (y `elem` "'yY ") do
|
||||
exitFailure
|
||||
|
||||
liftIO $ hPutStrLn stdout ""
|
||||
|
||||
syn <- if not confHere then do
|
||||
pure (mempty :: [Syntax C])
|
||||
else do
|
||||
liftIO $ try @_ @IOException (readFile confFile)
|
||||
<&> fromRight mempty
|
||||
<&> parseTop
|
||||
<&> fromRight mempty
|
||||
|
||||
let rpcHere = or [ True | (SymbolVal "rpc" :: Syntax C) <- universeBi syn ]
|
||||
|
||||
maybe1 rpc none $ \r -> do
|
||||
unless rpcHere $ liftIO do
|
||||
appendFile confFile $ show
|
||||
$ "rpc" <+> "unix" <+> dquotes (pretty r)
|
||||
<> line
|
||||
<> line
|
||||
|
||||
puk <- case view (field @"newRepoKeyring") opts of
|
||||
Just kr -> liftIO do
|
||||
addKeyring confFile kr
|
||||
|
||||
Nothing -> do
|
||||
tmp <- liftIO $ emptyTempFile "." "reflog.key"
|
||||
|
||||
code <- runProcess (shell [qc|hbs2 keyring-new > {tmp}|])
|
||||
|
||||
unless (code == ExitSuccess) do
|
||||
liftIO $ hPutDoc stderr $ red "init:" <+> "can't generate new keyring file" <> line
|
||||
die "nope"
|
||||
|
||||
addKeyring confFile tmp
|
||||
|
||||
|
||||
encrypt <- if isJust (view (field @"newRepoEncryption") opts) then do
|
||||
pure True
|
||||
else do
|
||||
liftIO $ hPutDoc stdout $ yellow "Make reflog" <+> pretty (AsBase58 puk)
|
||||
<+> "encrypted?"
|
||||
<+> "[y/n]: "
|
||||
|
||||
liftIO $ hFlush stdout
|
||||
|
||||
y2 <- liftIO getChar
|
||||
|
||||
liftIO $ hPutStrLn stdout ""
|
||||
|
||||
pure $ y2 `elem` "'yY "
|
||||
|
||||
when encrypt do
|
||||
let enc = view (field @"newRepoEncryption") opts
|
||||
|
||||
case enc of
|
||||
Just (epuk, fp') -> do
|
||||
fp <- liftIO $ makeAbsolute fp'
|
||||
addDecrypt confFile fp
|
||||
addEncrypted confFile puk epuk
|
||||
|
||||
Nothing -> do
|
||||
tmp <- liftIO $ emptyTempFile "." "cred.key"
|
||||
|
||||
code <- runProcess (shell [qc|hbs2 keyring-new -n1 > {tmp}|])
|
||||
|
||||
fp <- liftIO $ makeAbsolute tmp
|
||||
|
||||
ke <- readPubKeyFrom fp
|
||||
addDecrypt confFile fp
|
||||
addEncrypted confFile puk ke
|
||||
|
||||
pure ()
|
||||
|
||||
pure ()
|
||||
|
||||
liftIO $ hPutDoc stderr $ green "Succeed!" <> line <> line
|
||||
liftIO $ hPutDoc stderr $ pretty confFile <> line <> line
|
||||
liftIO $ readFile confFile >>= putStrLn
|
||||
|
||||
where
|
||||
|
||||
readPubKeyFrom fp = do
|
||||
bs <- liftIO $ BS8.readFile fp
|
||||
cred <- pure (parseCredentials @HBS2Basic (AsCredFile bs))
|
||||
`orDie` [qc|invalid credentials file {fp}|]
|
||||
|
||||
pure (view krPk <$> headMay (view peerKeyring cred))
|
||||
`orDie` [qc|invalid credentials file {fp}|]
|
||||
|
||||
addEncrypted fn puk enc = liftIO do
|
||||
|
||||
appendFile fn $ show $
|
||||
line
|
||||
<> brackets ( "encrypted" <+> dquotes (pretty (AsBase58 puk))
|
||||
<> line
|
||||
<> parens ("ttl" <+> pretty 864000)
|
||||
<> line
|
||||
<> parens ("owner" <+> dquotes (pretty (AsBase58 enc)))
|
||||
<> line
|
||||
)
|
||||
<> line
|
||||
|
||||
pure ()
|
||||
|
||||
addDecrypt fn kf = liftIO do
|
||||
appendFile fn $ show
|
||||
$ ";; this keyring is a SECRET for encryption/decryption"
|
||||
<> line
|
||||
<> ";; move it to a private/safe place"
|
||||
<> line
|
||||
<> "decrypt" <+> dquotes (pretty kf)
|
||||
<> line
|
||||
|
||||
addKeyring fn kr = liftIO do
|
||||
fp <- makeAbsolute kr
|
||||
|
||||
bs <- BS8.readFile fp
|
||||
cred <- pure (parseCredentials @HBS2Basic (AsCredFile bs))
|
||||
`orDie` [qc|invalid credentials file {fp}|]
|
||||
|
||||
let puk = view peerSignPk cred
|
||||
|
||||
liftIO $ hPutDoc stdout $ yellow "Adding reflog" <+> pretty (AsBase58 puk) <> line
|
||||
appendFile fn $ show $ ";; SECRET keyring for reflog" <+> pretty (AsBase58 puk) <> line
|
||||
appendFile fn $ show $ ";; move it to a private/safe place" <> line
|
||||
appendFile fn $ show line
|
||||
appendFile fn $ show $ "keyring" <+> dquotes (pretty fp) <> line <> line
|
||||
|
||||
pure puk
|
||||
|
||||
|
|
@ -1,237 +0,0 @@
|
|||
{-# Language PatternSynonyms #-}
|
||||
{-# Language UndecidableInstances #-}
|
||||
{-# Language TemplateHaskell #-}
|
||||
{-# Language AllowAmbiguousTypes #-}
|
||||
module HBS2Git.Types
|
||||
( module HBS2Git.Types
|
||||
, module Control.Monad.IO.Class
|
||||
, HasStorage(..)
|
||||
, HasConf(..)
|
||||
, AnyStorage(..)
|
||||
, RefLogKey(..)
|
||||
)
|
||||
where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.Hash
|
||||
import HBS2.Git.Types
|
||||
import HBS2.Storage
|
||||
import HBS2.Peer.RPC.Client.Unix hiding (Cookie)
|
||||
import HBS2.Net.Auth.Credentials
|
||||
import HBS2.Peer.Proto hiding (Cookie)
|
||||
|
||||
import HBS2.Peer.RPC.API.Peer
|
||||
import HBS2.Peer.RPC.API.RefLog
|
||||
import HBS2.Peer.RPC.API.Storage
|
||||
|
||||
import HBS2.System.Logger.Simple
|
||||
|
||||
import Data.Config.Suckless
|
||||
|
||||
import System.ProgressBar
|
||||
import System.Exit as Exit
|
||||
import Control.Applicative
|
||||
import Control.Monad.IO.Class
|
||||
import Control.Monad.Reader
|
||||
import Data.ByteString.Lazy.Char8 qualified as LBS
|
||||
import Database.SQLite.Simple (Connection)
|
||||
import Data.Char (isSpace)
|
||||
import Data.List qualified as List
|
||||
import Lens.Micro.Platform
|
||||
import Data.HashMap.Strict (HashMap)
|
||||
import Data.HashMap.Strict qualified as HashMap
|
||||
import Control.Concurrent.STM
|
||||
import System.IO qualified as IO
|
||||
import System.IO (Handle)
|
||||
import Data.Kind
|
||||
import Control.Monad.Catch
|
||||
import Control.Monad.IO.Unlift
|
||||
|
||||
import System.TimeIt
|
||||
|
||||
-- FIXME: remove-udp-hardcode-asap
|
||||
type Schema = HBS2Basic
|
||||
type HBS2L4Proto = L4Proto
|
||||
|
||||
-- FIXME: introduce-API-type
|
||||
type API = String
|
||||
|
||||
newtype Cookie =
|
||||
Cookie { fromCookie :: Text }
|
||||
deriving newtype (Eq,Ord,Show)
|
||||
|
||||
instance IsString Cookie where
|
||||
fromString s = Cookie cookie
|
||||
where cookie = fromString $ take 8
|
||||
$ show
|
||||
$ pretty
|
||||
$ hashObject @HbSync (LBS.pack s)
|
||||
data DBEnv =
|
||||
DBEnv { _dbFilePath :: FilePath
|
||||
, _dbCookie :: Cookie
|
||||
, _dbConn :: TVar (Maybe Connection)
|
||||
}
|
||||
|
||||
makeLenses 'DBEnv
|
||||
|
||||
type RepoRef = RefLogKey Schema
|
||||
|
||||
data ConfBranch
|
||||
data HeadBranch
|
||||
data KeyRingFile
|
||||
data KeyRingFiles
|
||||
data StoragePref
|
||||
|
||||
data RPCEndpoints =
|
||||
RPCEndpoints
|
||||
{ rpcPeer :: ServiceCaller PeerAPI UNIX
|
||||
, rpcStorage :: ServiceCaller StorageAPI UNIX
|
||||
, rpcRefLog :: ServiceCaller RefLogAPI UNIX
|
||||
}
|
||||
|
||||
data AppEnv =
|
||||
AppEnv
|
||||
{ _appCurDir :: FilePath
|
||||
, _appGitDir :: FilePath
|
||||
, _appConf :: [Syntax C]
|
||||
, _appStateDir :: FilePath
|
||||
, _appRefCred :: TVar (HashMap RepoRef (PeerCredentials Schema))
|
||||
, _appKeys :: TVar (HashMap (PubKey 'Encrypt Schema) (PrivKey 'Encrypt Schema))
|
||||
, _appOpts :: TVar (HashMap String String)
|
||||
, _appRpc :: RPCEndpoints
|
||||
}
|
||||
|
||||
makeLenses 'AppEnv
|
||||
|
||||
newtype AsGitRefsFile a = AsGitRefsFile a
|
||||
|
||||
class HasRPC m where
|
||||
getRPC :: m RPCEndpoints
|
||||
|
||||
data RepoHead =
|
||||
RepoHead
|
||||
{ _repoHEAD :: Maybe GitRef
|
||||
, _repoHeads :: HashMap GitRef GitHash
|
||||
}
|
||||
deriving stock (Generic,Show)
|
||||
|
||||
makeLenses 'RepoHead
|
||||
|
||||
|
||||
instance Monoid RepoHead where
|
||||
mempty = RepoHead Nothing mempty
|
||||
|
||||
instance Semigroup RepoHead where
|
||||
(<>) a b = mempty & set repoHEAD ( view repoHEAD b <|> view repoHEAD a )
|
||||
& set repoHeads ( view repoHeads a <> view repoHeads b )
|
||||
|
||||
instance Pretty (AsGitRefsFile RepoHead) where
|
||||
pretty (AsGitRefsFile h) = hhead <> vcat (fmap fmt els)
|
||||
where
|
||||
hhead = case view repoHEAD h of
|
||||
Nothing -> mempty
|
||||
Just r -> "@" <> pretty r <+> "HEAD" <> line
|
||||
|
||||
els = HashMap.toList (view repoHeads h)
|
||||
fmt (r,hx) = pretty hx <+> pretty (normalizeRef r)
|
||||
|
||||
|
||||
instance Serialise RepoHead
|
||||
|
||||
-- FIXME: test-for-from-string-maybe-repohead
|
||||
-- Нужно написать или сгенерировать тест
|
||||
instance FromStringMaybe RepoHead where
|
||||
fromStringMay "" = Nothing
|
||||
fromStringMay s =
|
||||
case traverse decodePair (take 2 . words <$> lines trimmed) of
|
||||
Right xs -> Just $ mconcat xs
|
||||
_ -> Nothing
|
||||
where
|
||||
trimmed = dropWhile isSpace s
|
||||
hbranch x = fromString <$> List.stripPrefix "@" x
|
||||
decodePair :: [String] -> Either [String] RepoHead
|
||||
decodePair [x, "HEAD"] | "@" `List.isPrefixOf` x = Right $ RepoHead (hbranch x) mempty
|
||||
|
||||
-- special case: deleted branch. should be handled somehow
|
||||
decodePair [_] = Right $ RepoHead Nothing mempty
|
||||
|
||||
decodePair [x,r] = case fromStringMay x of
|
||||
Just h -> Right $ RepoHead Nothing (HashMap.singleton (fromString r) h)
|
||||
Nothing -> Left [r,x]
|
||||
decodePair other = Left other
|
||||
|
||||
|
||||
class HasProgress m where
|
||||
type family ProgressMonitor m :: Type
|
||||
newProgressMonitor :: String -> Int -> m (ProgressMonitor m)
|
||||
updateProgress :: ProgressMonitor m -> Int -> m ()
|
||||
|
||||
|
||||
instance {-# OVERLAPPABLE #-} MonadIO m => HasProgress m where
|
||||
type instance ProgressMonitor m = ProgressBar ()
|
||||
updateProgress bar n = liftIO (incProgress bar n)
|
||||
newProgressMonitor s total = liftIO $ liftIO $ newProgressBar st 10 (Progress 0 total ())
|
||||
where
|
||||
st = defStyle { stylePrefix = msg (fromString s)
|
||||
, styleWidth = ConstantWidth 60
|
||||
}
|
||||
|
||||
class MonadIO m => HasRefCredentials m where
|
||||
getCredentials :: RepoRef -> m (PeerCredentials Schema)
|
||||
setCredentials :: RepoRef -> PeerCredentials Schema -> m ()
|
||||
|
||||
class MonadIO m => HasGlobalOptions m where
|
||||
addGlobalOption :: String -> String -> m ()
|
||||
getGlobalOption :: String -> m (Maybe String)
|
||||
|
||||
class MonadIO m => HasEncryptionKeys m where
|
||||
addEncryptionKey :: KeyringEntry Schema -> m ()
|
||||
findEncryptionKey :: PubKey 'Encrypt Schema -> m (Maybe (PrivKey 'Encrypt Schema))
|
||||
enumEncryptionKeys :: m [KeyringEntry Schema]
|
||||
|
||||
newtype App m a =
|
||||
App { fromApp :: ReaderT AppEnv m a }
|
||||
deriving newtype ( Applicative
|
||||
, Functor
|
||||
, Monad
|
||||
, MonadIO
|
||||
, MonadReader AppEnv
|
||||
, MonadThrow
|
||||
, MonadCatch
|
||||
, MonadMask
|
||||
, MonadUnliftIO
|
||||
, MonadTrans
|
||||
)
|
||||
|
||||
instance MonadIO m => HasConf (App m) where
|
||||
getConf = asks (view appConf)
|
||||
|
||||
|
||||
hPrint :: (Show a, MonadIO m) => Handle -> a -> m ()
|
||||
hPrint h s = liftIO $ IO.hPrint h s
|
||||
|
||||
hPutStrLn :: (Show a, MonadIO m) => Handle -> String -> m ()
|
||||
hPutStrLn h s = liftIO $ IO.hPutStrLn h s
|
||||
|
||||
exitSuccess :: MonadIO m => m ()
|
||||
exitSuccess = do
|
||||
shutUp
|
||||
liftIO Exit.exitSuccess
|
||||
|
||||
exitFailure :: MonadIO m => m ()
|
||||
exitFailure = do
|
||||
shutUp
|
||||
liftIO Exit.exitFailure
|
||||
|
||||
die :: MonadIO m => String -> m a
|
||||
die s = do
|
||||
shutUp
|
||||
pause @'Seconds 0.1
|
||||
liftIO $ Exit.die s
|
||||
|
||||
traceTime :: MonadIO m => String -> m a -> m a
|
||||
traceTime s action = do
|
||||
(t, x) <- timeItT action
|
||||
trace $ "time" <+> pretty s <+> pretty t
|
||||
pure x
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
cabal-version: 3.0
|
||||
name: hbs2-keyman
|
||||
version: 0.1.0.0
|
||||
version: 0.24.1.0
|
||||
-- synopsis:
|
||||
-- description:
|
||||
license: BSD-3-Clause
|
||||
|
|
|
@ -279,13 +279,6 @@ updatePeerInfo onError _ pinfo = do
|
|||
trimDown n s | IntSet.size s >= n = IntSet.deleteMax s
|
||||
| otherwise = s
|
||||
|
||||
data ByFirst a b = ByFirst a b
|
||||
|
||||
instance Eq a => Eq (ByFirst a b) where
|
||||
(==) (ByFirst a _) (ByFirst b _) = a == b
|
||||
|
||||
instance Hashable a => Hashable (ByFirst a b) where
|
||||
hashWithSalt s (ByFirst a _) = hashWithSalt s a
|
||||
|
||||
|
||||
downloadOnBlockSize :: (MonadIO m, IsPeerAddr e m, MyPeer e)
|
||||
|
|
|
@ -38,6 +38,8 @@ import Data.Cache qualified as Cache
|
|||
import Data.Either
|
||||
import Data.HashMap.Strict (HashMap)
|
||||
import Data.HashMap.Strict qualified as HashMap
|
||||
import Data.HashSet (HashSet)
|
||||
import Data.HashSet qualified as HashSet
|
||||
import Data.List qualified as List
|
||||
import Data.Maybe
|
||||
import Data.Text qualified as Text
|
||||
|
@ -82,6 +84,7 @@ data BasicBrains e =
|
|||
, _brainsCommit :: TQueue CommitCmd
|
||||
, _brainsDelDownload :: TQueue (Hash HbSync)
|
||||
, _brainsSizeCache :: Cache (Peer e, Hash HbSync) Integer
|
||||
, _brainsPolled :: TVar (HashSet (PubKey 'Sign (Encryption e), String))
|
||||
}
|
||||
|
||||
makeLenses 'BasicBrains
|
||||
|
@ -96,6 +99,7 @@ cleanupPostponed b h = do
|
|||
instance ( Hashable (Peer e)
|
||||
, Pretty (Peer e), Pretty (PeerAddr e)
|
||||
, Pretty (AsBase58 (PubKey 'Sign (Encryption e)))
|
||||
, Hashable (PubKey 'Sign (Encryption e))
|
||||
, e ~ L4Proto
|
||||
, ForRefChans e
|
||||
) => HasBrains e (BasicBrains e) where
|
||||
|
@ -103,14 +107,14 @@ instance ( Hashable (Peer e)
|
|||
onClientTCPConnected br pa@(L4Address proto _) ssid = do
|
||||
debug $ "BRAINS: onClientTCPConnected" <+> pretty proto <+> pretty pa <+> pretty ssid
|
||||
updateOP br $ insertClientTCP br pa ssid
|
||||
commitNow br True
|
||||
commitNow br False
|
||||
|
||||
getClientTCP br = liftIO (selectClientTCP br)
|
||||
|
||||
setActiveTCPSessions br ssids = do
|
||||
trace $ "BRAINS: setActiveTCPSessions" <+> pretty ssids
|
||||
updateOP br $ updateTCPSessions br ssids
|
||||
commitNow br True
|
||||
commitNow br False
|
||||
|
||||
listTCPPexCandidates = liftIO . selectTCPPexCandidates
|
||||
|
||||
|
@ -134,7 +138,7 @@ instance ( Hashable (Peer e)
|
|||
forM_ ps $ \pip -> do
|
||||
pa <- toPeerAddr pip
|
||||
insertKnownPeer br pa
|
||||
commitNow br True
|
||||
commitNow br False
|
||||
|
||||
onBlockSize b p h size = do
|
||||
liftIO $ Cache.insert (_brainsSizeCache b) (p,h) size
|
||||
|
@ -217,15 +221,15 @@ instance ( Hashable (Peer e)
|
|||
|
||||
addPolledRef brains r s i = do
|
||||
|
||||
liftIO $ atomically $ modifyTVar (_brainsPolled brains) (HashSet.insert (r,s))
|
||||
|
||||
updateOP brains $ do
|
||||
let conn = view brainsDb brains
|
||||
liftIO $ execute conn sql (show $ pretty (AsBase58 r), s, i)
|
||||
|
||||
commitNow brains True
|
||||
|
||||
where
|
||||
sql = [qc|
|
||||
insert into statedb.poll (ref,type,interval)
|
||||
insert into {poll_table} (ref,type,interval)
|
||||
values (?,?,?)
|
||||
on conflict do update set interval = excluded.interval
|
||||
|]
|
||||
|
@ -236,7 +240,7 @@ instance ( Hashable (Peer e)
|
|||
liftIO $ execute conn sql (Only (show $ pretty (AsBase58 r)))
|
||||
where
|
||||
sql = [qc|
|
||||
delete from statedb.poll
|
||||
delete from {poll_table}
|
||||
where ref = ?
|
||||
|]
|
||||
|
||||
|
@ -245,23 +249,35 @@ instance ( Hashable (Peer e)
|
|||
let conn = view brainsDb brains
|
||||
case mtp of
|
||||
Nothing -> postprocess <$>
|
||||
query_ conn [qc|select ref, type, interval from statedb.poll|]
|
||||
query_ conn [qc|select ref, type, interval from {poll_table}|]
|
||||
|
||||
Just tp -> postprocess <$>
|
||||
query conn [qc|select ref, type, interval from statedb.poll where type = ?|] (Only tp)
|
||||
query conn [qc|select ref, type, interval from {poll_table} where type = ?|] (Only tp)
|
||||
where
|
||||
postprocess = mapMaybe (\(r,t,i) -> (,t,i) <$> fromStringMay r )
|
||||
|
||||
isPolledRef brains ref = do
|
||||
liftIO do
|
||||
isPolledRef brains tp ref = do
|
||||
|
||||
cached <- liftIO $ readTVarIO (_brainsPolled brains) <&> HashSet.member (ref,tp)
|
||||
|
||||
if cached then
|
||||
pure True
|
||||
else do
|
||||
|
||||
r <- liftIO do
|
||||
let conn = view brainsDb brains
|
||||
query @_ @(Only Int) conn [qc|
|
||||
select 1 from statedb.poll
|
||||
where ref = ?
|
||||
select 1 from {poll_table}
|
||||
where ref = ? and type = ?
|
||||
limit 1
|
||||
|] ( Only ( show $ pretty (AsBase58 ref) ) )
|
||||
|] ( show $ pretty (AsBase58 ref), tp )
|
||||
<&> isJust . listToMaybe
|
||||
|
||||
when r do
|
||||
liftIO $ atomically $ modifyTVar (_brainsPolled brains) (HashSet.insert (ref,tp))
|
||||
|
||||
pure r
|
||||
|
||||
setSeen brains w ts = do
|
||||
utc <- liftIO getCurrentTime <&> addUTCTime ts
|
||||
let h = show $ pretty $ hashObject @HbSync w
|
||||
|
@ -718,6 +734,8 @@ insertPexInfo br peers = liftIO do
|
|||
|] (Only (show $ pretty p))
|
||||
|
||||
|
||||
{- HLINT ignore "Functor law" -}
|
||||
|
||||
selectPexInfo :: forall e . (e ~ L4Proto)
|
||||
=> BasicBrains e
|
||||
-> IO [PeerAddr e]
|
||||
|
@ -730,8 +748,23 @@ selectPexInfo br = liftIO do
|
|||
|] <&> fmap (fromStringMay . fromOnly)
|
||||
<&> catMaybes
|
||||
|
||||
tableExists :: Connection -> Maybe String -> String -> IO Bool
|
||||
tableExists conn prefix' tableName = do
|
||||
let sql = [qc|
|
||||
SELECT name FROM {prefix}.sqlite_master WHERE type='table' AND name=?
|
||||
|]
|
||||
r <- query conn sql (Only tableName) :: IO [Only String]
|
||||
pure $ not $ null r
|
||||
|
||||
where
|
||||
prefix = fromMaybe "main" prefix'
|
||||
|
||||
|
||||
-- FIXME: eventually-close-db
|
||||
newBasicBrains :: forall e m . (Hashable (Peer e), MonadIO m)
|
||||
newBasicBrains :: forall e m . ( Hashable (Peer e)
|
||||
, Hashable (PubKey 'Sign (Encryption e))
|
||||
, MonadIO m
|
||||
)
|
||||
=> PeerConfig
|
||||
-> m (BasicBrains e)
|
||||
|
||||
|
@ -836,15 +869,27 @@ newBasicBrains cfg = liftIO do
|
|||
)
|
||||
|]
|
||||
|
||||
poll_1 <- tableExists conn (Just "statedb") "poll_1"
|
||||
poll_0 <- tableExists conn (Just "statedb") "poll"
|
||||
|
||||
unless poll_1 do
|
||||
debug $ red "BRAINS: CREATE poll_1"
|
||||
execute_ conn [qc|
|
||||
create table if not exists statedb.poll
|
||||
create table if not exists statedb.poll_1
|
||||
( ref text not null
|
||||
, type text not null
|
||||
, interval int not null
|
||||
, primary key (ref)
|
||||
, primary key (ref,type)
|
||||
)
|
||||
|]
|
||||
|
||||
when poll_0 do
|
||||
debug $ red "BRAINS: FILL poll_1"
|
||||
execute_ conn [qc|
|
||||
insert into statedb.poll_1 (ref,type,interval)
|
||||
select ref,type,interval from statedb.poll;
|
||||
|]
|
||||
|
||||
execute_ conn [qc|
|
||||
create table if not exists peer_asymmkey
|
||||
( peer text not null
|
||||
|
@ -872,13 +917,17 @@ newBasicBrains cfg = liftIO do
|
|||
<*> newTQueueIO
|
||||
<*> newTQueueIO
|
||||
<*> Cache.newCache (Just (toTimeSpec (1200:: Timeout 'Seconds)))
|
||||
|
||||
<*> newTVarIO mempty
|
||||
|
||||
data PeerDownloadsDelOnStart
|
||||
|
||||
instance Monad m => HasCfgKey PeerDownloadsDelOnStart b m where
|
||||
key = "downloads-del-on-start"
|
||||
|
||||
{- HLINT ignore "Use camelCase" -}
|
||||
poll_table :: String
|
||||
poll_table = "statedb.poll_1"
|
||||
|
||||
runBasicBrains :: forall e m . ( e ~ L4Proto
|
||||
, MonadUnliftIO m
|
||||
, ForRefChans e
|
||||
|
@ -931,7 +980,7 @@ runBasicBrains cfg brains = do
|
|||
when (delDowns == FeatureOn ) do
|
||||
debug $ yellow "CLEAN ALL DOWNLOADS"
|
||||
updateOP brains (delAllDownloads brains)
|
||||
commitNow brains True
|
||||
commitNow brains False
|
||||
|
||||
let polls = catMaybes (
|
||||
[ (tp,n,) <$> fromStringMay @(PubKey 'Sign (Encryption e)) (Text.unpack ref)
|
||||
|
@ -945,7 +994,7 @@ runBasicBrains cfg brains = do
|
|||
updateOP brains $ do
|
||||
let conn = view brainsDb brains
|
||||
liftIO $ execute conn [qc|
|
||||
insert into statedb.poll (ref,type,interval)
|
||||
insert into {poll_table} (ref,type,interval)
|
||||
values (?,?,?)
|
||||
on conflict do update set interval = excluded.interval
|
||||
|] (show $ pretty (AsBase58 x), show $ pretty t, mi)
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
{-# Language TemplateHaskell #-}
|
||||
module CLI.Common where
|
||||
|
||||
import HBS2.Prelude
|
||||
import HBS2.Clock
|
||||
import HBS2.Net.Messaging.Unix
|
||||
import HBS2.Net.Proto
|
||||
import HBS2.Net.Proto.Service
|
||||
import HBS2.Net.Auth.Schema
|
||||
|
||||
import PeerConfig
|
||||
|
||||
|
@ -58,3 +60,6 @@ pRpcCommon :: Parser RPCOpt
|
|||
pRpcCommon = do
|
||||
RPCOpt <$> optional confOpt
|
||||
<*> optional rpcOpt
|
||||
|
||||
pPubKey :: ReadM (PubKey 'Sign HBS2Basic)
|
||||
pPubKey = maybeReader fromStringMay
|
||||
|
|
|
@ -0,0 +1,76 @@
|
|||
module CLI.LWWRef where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.OrDie
|
||||
import HBS2.Net.Proto.Service
|
||||
import HBS2.Net.Auth.Credentials
|
||||
import HBS2.Data.Types.SignedBox
|
||||
import HBS2.Net.Auth.Schema
|
||||
import HBS2.Peer.Proto.LWWRef
|
||||
|
||||
import HBS2.Peer.RPC.API.LWWRef
|
||||
import HBS2.KeyMan.Keys.Direct
|
||||
|
||||
import CLI.Common
|
||||
import RPC2()
|
||||
import PeerLogger hiding (info)
|
||||
|
||||
import System.Exit
|
||||
|
||||
import Options.Applicative
|
||||
import Data.Word
|
||||
import Lens.Micro.Platform
|
||||
|
||||
pLwwRef :: Parser (IO ())
|
||||
pLwwRef = hsubparser ( command "fetch" (info pLwwRefFetch (progDesc "fetch lwwref"))
|
||||
<> command "get" (info pLwwRefGet (progDesc "get lwwref"))
|
||||
<> command "update" (info pLwwRefUpdate (progDesc "update lwwref"))
|
||||
)
|
||||
pLwwRefFetch :: Parser (IO ())
|
||||
pLwwRefFetch = do
|
||||
rpc <- pRpcCommon
|
||||
ref <- strArgument (metavar "LWWREF")
|
||||
pure $ withMyRPC @LWWRefAPI rpc $ \caller -> do
|
||||
callService @RpcLWWRefFetch caller ref >>= \case
|
||||
Left e -> err (viaShow e) >> exitFailure
|
||||
Right{} -> pure ()
|
||||
|
||||
lwwRef :: ReadM (LWWRefKey HBS2Basic)
|
||||
lwwRef = maybeReader (fromStringMay @(LWWRefKey HBS2Basic))
|
||||
|
||||
pLwwRefGet :: Parser (IO ())
|
||||
pLwwRefGet = do
|
||||
rpc <- pRpcCommon
|
||||
ref <- strArgument (metavar "LWWREF")
|
||||
pure $ withMyRPC @LWWRefAPI rpc $ \caller -> do
|
||||
callService @RpcLWWRefGet caller ref >>= \case
|
||||
Left e -> err (viaShow e) >> exitFailure
|
||||
Right r -> print $ pretty r
|
||||
|
||||
pLwwRefUpdate :: Parser (IO ())
|
||||
pLwwRefUpdate = do
|
||||
rpc <- pRpcCommon
|
||||
puk <- argument pPubKey (metavar "LWWREF")
|
||||
seq' <- optional $ option @Word64 auto (short 's' <> long "seq" <> help "seqno" <>metavar "SEQ")
|
||||
val <- option (maybeReader fromStringMay) (short 'v' <> long "value" <> help "value" <> metavar "VALUE")
|
||||
pure $ withMyRPC @LWWRefAPI rpc $ \caller -> do
|
||||
|
||||
|
||||
(sk,pk) <- liftIO $ runKeymanClient do
|
||||
creds <- loadCredentials puk >>= orThrowUser "can't load credentials"
|
||||
pure ( view peerSignSk creds, view peerSignPk creds )
|
||||
|
||||
seq <- case seq' of
|
||||
Just v -> pure v
|
||||
Nothing -> do
|
||||
let ref = LWWRefKey puk
|
||||
callService @RpcLWWRefGet caller ref >>= \case
|
||||
Left e -> err (viaShow e) >> exitFailure
|
||||
Right Nothing -> err ("not found value for" <+> pretty ref) >> exitFailure
|
||||
Right (Just r) -> pure $ succ (lwwSeq r)
|
||||
|
||||
let box = makeSignedBox @L4Proto pk sk (LWWRef @L4Proto seq val Nothing)
|
||||
callService @RpcLWWRefUpdate caller box >>= \case
|
||||
Left e -> err (viaShow e) >> exitFailure
|
||||
Right r -> print $ pretty r
|
||||
|
|
@ -2,29 +2,59 @@
|
|||
module HttpWorker where
|
||||
|
||||
import HBS2.Prelude
|
||||
import HBS2.Hash
|
||||
import HBS2.Actors.Peer
|
||||
import HBS2.Storage
|
||||
import HBS2.Data.Detect
|
||||
import HBS2.Data.Types.Refs
|
||||
import HBS2.Merkle (AnnMetaData)
|
||||
import HBS2.Merkle
|
||||
import HBS2.Peer.Proto
|
||||
import HBS2.Peer.Proto.LWWRef
|
||||
import HBS2.Net.Auth.Schema
|
||||
import HBS2.Data.Types.SignedBox
|
||||
import HBS2.Events
|
||||
import HBS2.Storage.Operations.ByteString
|
||||
|
||||
import PeerTypes
|
||||
import PeerConfig
|
||||
import RefLog ( doRefLogBroadCast )
|
||||
|
||||
import Data.Config.Suckless
|
||||
|
||||
import Data.ByteString.Lazy qualified as LBS
|
||||
import Network.HTTP.Types.Status
|
||||
import Network.Wai.Middleware.RequestLogger
|
||||
import Text.InterpolatedString.Perl6 (qc)
|
||||
import Web.Scotty
|
||||
|
||||
import Data.ByteString.Builder (byteString, Builder)
|
||||
|
||||
import Control.Concurrent
|
||||
import Data.Either
|
||||
import Codec.Serialise (deserialiseOrFail)
|
||||
import Data.Aeson (object, (.=))
|
||||
import Data.ByteString.Lazy.Char8 qualified as LBS8
|
||||
import Control.Monad.Reader
|
||||
import Lens.Micro.Platform (view)
|
||||
import System.FilePath
|
||||
import Control.Monad.Except
|
||||
import Control.Monad.Trans.Cont
|
||||
|
||||
import UnliftIO (async)
|
||||
|
||||
{- HLINT ignore "Functor law" -}
|
||||
|
||||
-- TODO: introduce-http-of-off-feature
|
||||
|
||||
extractMetadataHash :: Hash HbSync -> LBS.ByteString -> Maybe (Hash HbSync)
|
||||
extractMetadataHash what blob =
|
||||
case tryDetect what blob of
|
||||
MerkleAnn (MTreeAnn {_mtaMeta = AnnHashRef h, _mtaCrypt = NullEncryption}) -> Just h
|
||||
_ -> Nothing
|
||||
|
||||
orElse :: m r -> Maybe a -> ContT r m a
|
||||
orElse a mb = ContT $ maybe1 mb a
|
||||
|
||||
httpWorker :: forall e s m . ( MyPeer e
|
||||
, MonadIO m
|
||||
, HasStorage m
|
||||
|
@ -32,6 +62,7 @@ httpWorker :: forall e s m . ( MyPeer e
|
|||
, s ~ Encryption e
|
||||
, m ~ PeerM e IO
|
||||
, e ~ L4Proto
|
||||
-- , ForLWWRefProto e
|
||||
) => PeerConfig -> AnnMetaData -> DownloadEnv e -> m ()
|
||||
|
||||
httpWorker (PeerConfig syn) pmeta e = do
|
||||
|
@ -45,7 +76,11 @@ httpWorker (PeerConfig syn) pmeta e = do
|
|||
scotty port $ do
|
||||
middleware logStdout
|
||||
|
||||
defaultHandler $ const do
|
||||
status status500
|
||||
|
||||
get "/size/:hash" do
|
||||
|
||||
what <- param @String "hash" <&> fromString
|
||||
size <- liftIO $ hasBlock sto what
|
||||
case size of
|
||||
|
@ -53,6 +88,73 @@ httpWorker (PeerConfig syn) pmeta e = do
|
|||
Just n -> do
|
||||
json n
|
||||
|
||||
-- TODO: key-to-disable-tree-streaming
|
||||
|
||||
get "/ref/:key" do
|
||||
|
||||
void $ flip runContT pure do
|
||||
what <- lift (param @String "key" <&> fromStringMay @(LWWRefKey HBS2Basic))
|
||||
>>= orElse (status status404)
|
||||
|
||||
rv <- getRef sto what
|
||||
>>= orElse (status status404)
|
||||
>>= getBlock sto
|
||||
>>= orElse (status status404)
|
||||
<&> either (const Nothing) Just . deserialiseOrFail @(SignedBox (LWWRef e) e)
|
||||
>>= orElse (status status404)
|
||||
<&> unboxSignedBox0 @(LWWRef e)
|
||||
>>= orElse (status status404)
|
||||
<&> lwwValue . snd
|
||||
|
||||
lift $ redirect [qc|/tree/{pretty rv}|]
|
||||
|
||||
get "/tree/:hash" do
|
||||
what <- param @String "hash" <&> fromString
|
||||
|
||||
void $ flip runContT pure do
|
||||
|
||||
callCC $ \exit -> do
|
||||
|
||||
blob <- liftIO (getBlock sto what)
|
||||
>>= orElse (status status404)
|
||||
|
||||
mh <- orElse (status status404) (extractMetadataHash what blob)
|
||||
|
||||
meta <- lift (getBlock sto mh) >>= orElse (status status404)
|
||||
<&> LBS8.unpack
|
||||
<&> fromRight mempty . parseTop
|
||||
|
||||
let tp = headDef "application/octet-stream"
|
||||
[ show (pretty w)
|
||||
| ListVal [SymbolVal "mime-type:", LitStrVal w] <- meta
|
||||
]
|
||||
|
||||
let fn = headMay
|
||||
[ show (pretty w)
|
||||
| ListVal [SymbolVal "file-name:", LitStrVal w] <- meta
|
||||
]
|
||||
|
||||
-- liftIO $ print $ pretty meta
|
||||
|
||||
case fn of
|
||||
Just x | takeExtension x == ".html" -> pure ()
|
||||
| otherwise -> lift $ do
|
||||
addHeader "content-disposition" [qc|attachment; filename="{x}"|]
|
||||
|
||||
_ -> pure ()
|
||||
|
||||
lift $ addHeader "content-type" (fromString tp)
|
||||
|
||||
elbs <- lift $ runExceptT $ readFromMerkle sto (SimpleKey what)
|
||||
|
||||
case elbs of
|
||||
Left{} -> lift $ status status404
|
||||
Right lbs -> lift do
|
||||
stream $ \write flush -> do
|
||||
for_ (LBS.toChunks lbs) $ \chunk -> do
|
||||
write $ byteString chunk
|
||||
flush
|
||||
|
||||
get "/cat/:hash" do
|
||||
what <- param @String "hash" <&> fromString
|
||||
blob <- liftIO $ getBlock sto what
|
||||
|
|
|
@ -0,0 +1,52 @@
|
|||
module LWWRef where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.Actors.Peer
|
||||
import HBS2.Data.Types.Refs
|
||||
import HBS2.Net.Proto
|
||||
import HBS2.Base58
|
||||
import HBS2.Storage
|
||||
import HBS2.Storage.Operations.Missed
|
||||
import HBS2.Hash
|
||||
import HBS2.Peer.Proto
|
||||
import HBS2.Peer.Proto.LWWRef
|
||||
import HBS2.Net.Auth.Credentials
|
||||
|
||||
import HBS2.Misc.PrettyStuff
|
||||
|
||||
import Brains
|
||||
import PeerConfig
|
||||
import PeerTypes
|
||||
|
||||
import Control.Monad
|
||||
import UnliftIO
|
||||
import Lens.Micro.Platform
|
||||
|
||||
{- HLINT ignore "Functor law" -}
|
||||
|
||||
lwwRefWorker :: forall e s m . ( MonadIO m
|
||||
, MonadUnliftIO m
|
||||
, MyPeer e
|
||||
, HasStorage m
|
||||
, Sessions e (KnownPeer e) m
|
||||
, HasGossip e (LWWRefProto e) m
|
||||
, Signatures s
|
||||
, s ~ Encryption e
|
||||
, IsRefPubKey s
|
||||
)
|
||||
=> PeerConfig
|
||||
-> SomeBrains e
|
||||
-> m ()
|
||||
|
||||
lwwRefWorker conf brains = do
|
||||
|
||||
let listRefs = listPolledRefs @e brains (Just "lwwref")
|
||||
<&> fmap (\(a,_,b) -> (a,b))
|
||||
<&> fmap (over _2 ( (*60) . fromIntegral) )
|
||||
|
||||
polling (Polling 5 5) listRefs $ \ref -> do
|
||||
debug $ yellow "POLLING LWWREF" <+> pretty (AsBase58 ref)
|
||||
gossip (LWWRefProto1 @e (LWWProtoGet (LWWRefKey ref)))
|
||||
|
||||
|
||||
|
|
@ -16,6 +16,7 @@ import HBS2.Data.Types.Refs
|
|||
import HBS2.Data.Types.SignedBox
|
||||
import HBS2.Data.Types
|
||||
import HBS2.Net.Auth.Credentials
|
||||
import HBS2.Net.Auth.Schema()
|
||||
import HBS2.Net.IP.Addr
|
||||
import HBS2.Net.Messaging.UDP
|
||||
import HBS2.Net.Messaging.TCP
|
||||
|
@ -47,11 +48,13 @@ import Bootstrap
|
|||
import CheckMetrics
|
||||
import RefLog qualified
|
||||
import RefLog (reflogWorker)
|
||||
import LWWRef (lwwRefWorker)
|
||||
import HttpWorker
|
||||
import DispatchProxy
|
||||
import PeerMeta
|
||||
import CLI.Common
|
||||
import CLI.RefChan
|
||||
import CLI.LWWRef
|
||||
import RefChan
|
||||
import RefChanNotifyLog
|
||||
import Fetch (fetchHash)
|
||||
|
@ -65,9 +68,12 @@ import HBS2.Peer.RPC.API.Storage
|
|||
import HBS2.Peer.RPC.API.Peer
|
||||
import HBS2.Peer.RPC.API.RefLog
|
||||
import HBS2.Peer.RPC.API.RefChan
|
||||
import HBS2.Peer.RPC.API.LWWRef
|
||||
import HBS2.Peer.Notify
|
||||
import HBS2.Peer.RPC.Client.StorageClient
|
||||
|
||||
import HBS2.Peer.Proto.LWWRef.Internal
|
||||
|
||||
import RPC2(RPC2Context(..))
|
||||
|
||||
import Codec.Serialise as Serialise
|
||||
|
@ -120,7 +126,7 @@ instance Exception GoAgainException
|
|||
|
||||
-- TODO: write-workers-to-config
|
||||
defStorageThreads :: Integral a => a
|
||||
defStorageThreads = 2
|
||||
defStorageThreads = 4
|
||||
|
||||
defLocalMulticast :: String
|
||||
defLocalMulticast = "239.192.152.145:10153"
|
||||
|
@ -233,6 +239,7 @@ runCLI = do
|
|||
<> command "fetch" (info pFetch (progDesc "fetch block"))
|
||||
<> command "reflog" (info pRefLog (progDesc "reflog commands"))
|
||||
<> command "refchan" (info pRefChan (progDesc "refchan commands"))
|
||||
<> command "lwwref" (info pLwwRef (progDesc "lwwref commands"))
|
||||
<> command "peers" (info pPeers (progDesc "show known peers"))
|
||||
<> command "pexinfo" (info pPexInfo (progDesc "show pex"))
|
||||
<> command "download" (info pDownload (progDesc "download management"))
|
||||
|
@ -450,7 +457,6 @@ runCLI = do
|
|||
<> command "del" (info pPollDel (progDesc "del poller" ))
|
||||
)
|
||||
|
||||
|
||||
pPollAdd = do
|
||||
rpc <- pRpcCommon
|
||||
r <- argument refP (metavar "REF")
|
||||
|
@ -614,6 +620,8 @@ respawn opts =
|
|||
runPeer :: forall e s . ( e ~ L4Proto
|
||||
, FromStringMaybe (PeerAddr e)
|
||||
, s ~ Encryption e
|
||||
-- , ForLWWRefProto e
|
||||
-- , Serialise (PubKey 'Sign (Encryption e))
|
||||
, HasStorage (PeerM e IO)
|
||||
)=> PeerOpts -> IO ()
|
||||
|
||||
|
@ -812,7 +820,7 @@ runPeer opts = Exception.handle (\e -> myException e
|
|||
let refChanAdapter =
|
||||
RefChanAdapter
|
||||
{ refChanOnHead = refChanOnHeadFn rce
|
||||
, refChanSubscribed = isPolledRef @e brains
|
||||
, refChanSubscribed = isPolledRef @e brains "refchan"
|
||||
, refChanWriteTran = refChanWriteTranFn rce
|
||||
, refChanValidatePropose = refChanValidateTranFn @e rce
|
||||
|
||||
|
@ -996,6 +1004,10 @@ runPeer opts = Exception.handle (\e -> myException e
|
|||
err $ red "Exception" <+> "in thread" <+> pretty t <+> viaShow e
|
||||
liftIO $ throwTo myself GoAgainException
|
||||
|
||||
|
||||
let lwwRefProtoA = lwwRefProto (LWWRefProtoAdapter { lwwFetchBlock = download })
|
||||
where download h = withPeerM env $ withDownload denv (addDownload Nothing h)
|
||||
|
||||
flip runContT pure do
|
||||
|
||||
peerThread "local multicast" $ forever $ do
|
||||
|
@ -1029,6 +1041,8 @@ runPeer opts = Exception.handle (\e -> myException e
|
|||
|
||||
peerThread "refChanNotifyLogWorker" (refChanNotifyLogWorker @e conf (SomeBrains brains))
|
||||
|
||||
peerThread "lwwRefWorker" (lwwRefWorker @e conf (SomeBrains brains))
|
||||
|
||||
liftIO $ withPeerM penv do
|
||||
runProto @e
|
||||
[ makeResponse (blockSizeProto blk (downloadOnBlockSize denv) onNoBlock)
|
||||
|
@ -1043,6 +1057,8 @@ runPeer opts = Exception.handle (\e -> myException e
|
|||
, makeResponse (refChanUpdateProto False pc refChanAdapter)
|
||||
, makeResponse (refChanRequestProto False refChanAdapter)
|
||||
, makeResponse (refChanNotifyProto False refChanAdapter)
|
||||
-- TODO: change-all-to-authorized
|
||||
, makeResponse ((authorized . subscribed (SomeBrains brains)) lwwRefProtoA)
|
||||
]
|
||||
|
||||
|
||||
|
@ -1147,15 +1163,16 @@ runPeer opts = Exception.handle (\e -> myException e
|
|||
envrl <- newNotifyEnvServer @(RefLogEvents L4Proto) refLogNotifySource
|
||||
w1 <- asyncLinked $ runNotifyWorkerServer env
|
||||
w2 <- asyncLinked $ runNotifyWorkerServer envrl
|
||||
runProto @UNIX
|
||||
wws <- replicateM 1 $ async $ runProto @UNIX
|
||||
[ makeResponse (makeServer @PeerAPI)
|
||||
, makeResponse (makeServer @RefLogAPI)
|
||||
, makeResponse (makeServer @RefChanAPI)
|
||||
, makeResponse (makeServer @StorageAPI)
|
||||
, makeResponse (makeServer @LWWRefAPI)
|
||||
, makeResponse (makeNotifyServer @(RefChanEvents L4Proto) env)
|
||||
, makeResponse (makeNotifyServer @(RefLogEvents L4Proto) envrl)
|
||||
]
|
||||
mapM_ wait [w1,w2]
|
||||
mapM_ wait (w1 : w2 : wws )
|
||||
|
||||
void $ waitAnyCancel $ w <> [ loop
|
||||
, m1
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
{-# Language UndecidableInstances #-}
|
||||
{-# Language AllowAmbiguousTypes #-}
|
||||
{-# Language MultiWayIf #-}
|
||||
{-# Language FunctionalDependencies #-}
|
||||
module PeerTypes
|
||||
( module PeerTypes
|
||||
, module PeerLogger
|
||||
|
@ -13,6 +14,8 @@ module PeerTypes
|
|||
import HBS2.Polling
|
||||
import HBS2.Actors.Peer
|
||||
import HBS2.Clock
|
||||
import HBS2.Net.Auth.Schema
|
||||
import HBS2.Net.Auth.Credentials
|
||||
import HBS2.Data.Types.SignedBox
|
||||
import HBS2.Data.Types.Peer
|
||||
import HBS2.Data.Types.Refs
|
||||
|
@ -24,6 +27,7 @@ import HBS2.Net.IP.Addr
|
|||
import HBS2.Net.Proto
|
||||
import HBS2.Peer.Proto.Peer
|
||||
import HBS2.Peer.Proto.BlockInfo
|
||||
import HBS2.Peer.Proto.LWWRef
|
||||
import HBS2.Net.Proto.Sessions
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.Storage
|
||||
|
@ -481,4 +485,40 @@ simpleBlockAnnounce size h = do
|
|||
pure $ BlockAnnounce @e no annInfo
|
||||
|
||||
|
||||
class IsPolledKey e proto | proto -> e where
|
||||
getPolledKey :: proto -> (String, PubKey 'Sign (Encryption e))
|
||||
|
||||
instance IsPolledKey e (LWWRefProto e) where
|
||||
getPolledKey = \case
|
||||
LWWRefProto1 (LWWProtoGet (LWWRefKey k)) -> (tp,k)
|
||||
LWWRefProto1 (LWWProtoSet (LWWRefKey k) _) -> (tp,k)
|
||||
where tp = "lwwref"
|
||||
|
||||
subscribed :: forall e proto m . ( MonadIO m
|
||||
, IsPolledKey e proto
|
||||
, Request e proto m
|
||||
, Response e proto m
|
||||
)
|
||||
|
||||
=> SomeBrains e
|
||||
-> (proto -> m ())
|
||||
-> proto
|
||||
-> m ()
|
||||
|
||||
subscribed brains f req = do
|
||||
let (tp,ref) = getPolledKey req
|
||||
polled <- isPolledRef @e brains tp ref
|
||||
when polled $ f req
|
||||
|
||||
authorized :: forall e proto m . ( MonadIO m
|
||||
, Request e proto m
|
||||
, Response e proto m
|
||||
, Sessions e (KnownPeer e) m
|
||||
)
|
||||
=> (proto -> m ()) -> proto -> m ()
|
||||
authorized f req = do
|
||||
p <- thatPeer @proto
|
||||
auth <- find (KnownPeerKey p) id <&> isJust
|
||||
when auth (f req)
|
||||
|
||||
|
||||
|
|
|
@ -2,10 +2,12 @@ module RPC2
|
|||
( module RPC2.Peer
|
||||
, module RPC2.RefLog
|
||||
, module RPC2.RefChan
|
||||
, module RPC2.LWWRef
|
||||
) where
|
||||
|
||||
|
||||
import RPC2.Peer
|
||||
import RPC2.RefLog
|
||||
import RPC2.RefChan
|
||||
import RPC2.LWWRef
|
||||
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||
{-# Language UndecidableInstances #-}
|
||||
module RPC2.LWWRef where
|
||||
|
||||
|
||||
import HBS2.Peer.Prelude
|
||||
|
||||
import HBS2.Actors.Peer
|
||||
import HBS2.Data.Types.SignedBox
|
||||
import HBS2.Peer.Proto
|
||||
import HBS2.Peer.Proto.LWWRef
|
||||
import HBS2.Peer.Proto.LWWRef.Internal
|
||||
import HBS2.Storage
|
||||
import HBS2.Net.Messaging.Unix
|
||||
import HBS2.Misc.PrettyStuff
|
||||
|
||||
import PeerTypes
|
||||
|
||||
import HBS2.Peer.RPC.Internal.Types
|
||||
import HBS2.Peer.RPC.API.LWWRef
|
||||
|
||||
import Lens.Micro.Platform
|
||||
import Control.Monad.Reader
|
||||
import Control.Monad.Trans.Maybe
|
||||
|
||||
type LWWRefContext m = (MonadIO m, HasRpcContext LWWRefAPI RPC2Context m)
|
||||
|
||||
instance (Monad m)
|
||||
=> HasRpcContext LWWRefAPI RPC2Context (ResponseM UNIX (ReaderT RPC2Context m)) where
|
||||
getRpcContext = lift ask
|
||||
|
||||
instance (LWWRefContext m) => HandleMethod m RpcLWWRefGet where
|
||||
|
||||
handleMethod key = do
|
||||
co <- getRpcContext @LWWRefAPI
|
||||
debug "rpc.LWWRefContext"
|
||||
|
||||
let penv = rpcPeerEnv co
|
||||
liftIO $ withPeerM penv $ do
|
||||
sto <- getStorage
|
||||
runMaybeT do
|
||||
rv <- getRef sto key >>= toMPlus
|
||||
val <- getBlock sto rv >>= toMPlus
|
||||
<&> unboxSignedBox @(LWWRef L4Proto) @L4Proto
|
||||
>>= toMPlus
|
||||
|
||||
pure $ snd val
|
||||
|
||||
instance LWWRefContext m => HandleMethod m RpcLWWRefFetch where
|
||||
|
||||
handleMethod key = do
|
||||
co <- getRpcContext @LWWRefAPI
|
||||
debug $ green "rpc.LWWRefFetch" <+> pretty key
|
||||
|
||||
let penv = rpcPeerEnv co
|
||||
liftIO $ withPeerM penv $ do
|
||||
gossip (LWWRefProto1 @L4Proto (LWWProtoGet key))
|
||||
|
||||
instance LWWRefContext m => HandleMethod m RpcLWWRefUpdate where
|
||||
|
||||
handleMethod box = do
|
||||
co <- getRpcContext @LWWRefAPI
|
||||
debug "rpc.LWWRefUpdate"
|
||||
|
||||
let penv = rpcPeerEnv co
|
||||
|
||||
let nada = LWWRefProtoAdapter dontHandle
|
||||
|
||||
void $ runMaybeT do
|
||||
(puk, _) <- unboxSignedBox0 box & toMPlus
|
||||
|
||||
liftIO $ withPeerM penv do
|
||||
me <- ownPeer @L4Proto
|
||||
runResponseM me $ do
|
||||
lwwRefProto nada (LWWRefProto1 (LWWProtoSet @L4Proto (LWWRefKey puk) box))
|
||||
|
||||
|
|
@ -24,6 +24,8 @@ instance (MonadIO m, HasRpcContext PeerAPI RPC2Context m) => HandleMethod m RpcP
|
|||
handleMethod (r,t,i) = do
|
||||
brains <- getRpcContext @PeerAPI <&> rpcBrains
|
||||
debug $ "rpc.pollAdd"
|
||||
polled <- isPolledRef @L4Proto brains t r
|
||||
unless polled do
|
||||
addPolledRef @L4Proto brains r t i
|
||||
|
||||
instance (MonadIO m, HasRpcContext PeerAPI RPC2Context m) => HandleMethod m RpcPollDel where
|
||||
|
|
|
@ -65,7 +65,7 @@ mkRefLogRequestAdapter :: forall e s m . ( MonadIO m
|
|||
=> SomeBrains e -> m (RefLogRequestI e (ResponseM e m ))
|
||||
mkRefLogRequestAdapter brains = do
|
||||
sto <- getStorage
|
||||
pure $ RefLogRequestI (doOnRefLogRequest brains sto) dontHandle (isPolledRef @e brains)
|
||||
pure $ RefLogRequestI (doOnRefLogRequest brains sto) dontHandle (isPolledRef @e brains "reflog")
|
||||
|
||||
doOnRefLogRequest :: forall e s m . ( MonadIO m
|
||||
, MyPeer e
|
||||
|
@ -78,10 +78,10 @@ doOnRefLogRequest :: forall e s m . ( MonadIO m
|
|||
-> m (Maybe (Hash HbSync))
|
||||
|
||||
doOnRefLogRequest brains sto (_,pk) = runMaybeT do
|
||||
isPolledRef @e brains pk >>= guard
|
||||
isPolledRef @e brains "reflog" pk >>= guard
|
||||
ref <- liftIO $ getRef sto (RefLogKey @s pk)
|
||||
when (isNothing ref) do
|
||||
warn $ "missed reflog value" <+> pretty ref
|
||||
warn $ "missed reflog value" <+> pretty (RefLogKey @s pk)
|
||||
toMPlus ref
|
||||
|
||||
data RefLogWorkerAdapter e =
|
||||
|
@ -150,7 +150,7 @@ reflogWorker conf brains adapter = do
|
|||
subscribe @e RefLogUpdateEvKey $ \(RefLogUpdateEvData (reflog,v, mpip)) -> do
|
||||
trace $ "reflog worker.got refupdate" <+> pretty (AsBase58 reflog)
|
||||
|
||||
polled <- isPolledRef @e brains reflog
|
||||
polled <- isPolledRef @e brains "reflog" reflog
|
||||
buddy <- maybe1 mpip (pure False) $ \pip -> do
|
||||
pa <- toPeerAddr @e pip
|
||||
acceptAnnouncesFromPeer @e conf pa
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
cabal-version: 3.0
|
||||
name: hbs2-peer
|
||||
version: 0.1.0.0
|
||||
version: 0.24.1.0
|
||||
-- synopsis:
|
||||
-- description:
|
||||
license: BSD-3-Clause
|
||||
|
@ -69,6 +69,7 @@ common common-deps
|
|||
, warp
|
||||
, http-conduit
|
||||
, http-types
|
||||
, wai
|
||||
, wai-extra
|
||||
, unliftio
|
||||
, unliftio-core
|
||||
|
@ -157,11 +158,14 @@ library
|
|||
HBS2.Peer.Proto.RefChan.RefChanNotify
|
||||
HBS2.Peer.Proto.RefChan.RefChanUpdate
|
||||
HBS2.Peer.Proto.AnyRef
|
||||
HBS2.Peer.Proto.LWWRef
|
||||
HBS2.Peer.Proto.LWWRef.Internal
|
||||
|
||||
HBS2.Peer.RPC.Class
|
||||
HBS2.Peer.RPC.API.Peer
|
||||
HBS2.Peer.RPC.API.RefLog
|
||||
HBS2.Peer.RPC.API.RefChan
|
||||
HBS2.Peer.RPC.API.LWWRef
|
||||
HBS2.Peer.RPC.API.Storage
|
||||
HBS2.Peer.RPC.Client.Unix
|
||||
HBS2.Peer.RPC.Client.StorageClient
|
||||
|
@ -172,6 +176,54 @@ library
|
|||
other-modules:
|
||||
-- HBS2.System.Logger.Simple
|
||||
|
||||
|
||||
test-suite test
|
||||
import: shared-properties
|
||||
default-language: Haskell2010
|
||||
|
||||
other-modules:
|
||||
|
||||
-- other-extensions:
|
||||
|
||||
type: exitcode-stdio-1.0
|
||||
hs-source-dirs: test
|
||||
main-is: TestSuite.hs
|
||||
build-depends:
|
||||
base, hbs2-peer, hbs2-core
|
||||
, async
|
||||
, bytestring
|
||||
, cache
|
||||
, containers
|
||||
, directory
|
||||
, hashable
|
||||
, microlens-platform
|
||||
, mtl
|
||||
, prettyprinter
|
||||
, QuickCheck
|
||||
, quickcheck-instances
|
||||
, random
|
||||
, safe
|
||||
, serialise
|
||||
, stm
|
||||
, streaming
|
||||
, tasty
|
||||
, tasty-quickcheck
|
||||
, tasty-hunit
|
||||
, tasty-quickcheck
|
||||
, transformers
|
||||
, uniplate
|
||||
, vector
|
||||
, saltine
|
||||
, simple-logger
|
||||
, string-conversions
|
||||
, filepath
|
||||
, temporary
|
||||
, unliftio
|
||||
, resourcet
|
||||
|
||||
|
||||
|
||||
|
||||
executable hbs2-peer
|
||||
import: shared-properties
|
||||
import: common-deps
|
||||
|
@ -207,18 +259,21 @@ executable hbs2-peer
|
|||
, RPC2.Downloads
|
||||
, RPC2.RefLog
|
||||
, RPC2.RefChan
|
||||
, RPC2.LWWRef
|
||||
, PeerTypes
|
||||
, PeerLogger
|
||||
, PeerConfig
|
||||
, RefLog
|
||||
, RefChan
|
||||
, RefChanNotifyLog
|
||||
, LWWRef
|
||||
, CheckMetrics
|
||||
, HttpWorker
|
||||
, Brains
|
||||
, DispatchProxy
|
||||
, CLI.Common
|
||||
, CLI.RefChan
|
||||
, CLI.LWWRef
|
||||
|
||||
, Paths_hbs2_peer
|
||||
|
||||
|
|
|
@ -18,8 +18,8 @@ class HasBrains e a where
|
|||
listPolledRefs :: MonadIO m => a -> Maybe String -> m [(PubKey 'Sign (Encryption e), String, Int)]
|
||||
listPolledRefs _ _ = pure mempty
|
||||
|
||||
isPolledRef :: MonadIO m => a -> PubKey 'Sign (Encryption e) -> m Bool
|
||||
isPolledRef _ _ = pure False
|
||||
isPolledRef :: MonadIO m => a -> String -> PubKey 'Sign (Encryption e) -> m Bool
|
||||
isPolledRef _ _ _ = pure False
|
||||
|
||||
delPolledRef :: MonadIO m => a -> PubKey 'Sign (Encryption e) -> m ()
|
||||
delPolledRef _ _ = pure ()
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||
{-# Language UndecidableInstances #-}
|
||||
module HBS2.Peer.Proto
|
||||
( module HBS2.Peer.Proto.PeerMeta
|
||||
, module HBS2.Peer.Proto.BlockAnnounce
|
||||
|
@ -27,6 +28,7 @@ import HBS2.Peer.Proto.PeerExchange
|
|||
import HBS2.Peer.Proto.RefLog
|
||||
import HBS2.Peer.Proto.RefChan hiding (Notify)
|
||||
import HBS2.Peer.Proto.AnyRef
|
||||
import HBS2.Peer.Proto.LWWRef
|
||||
|
||||
import HBS2.Actors.Peer.Types
|
||||
import HBS2.Net.Messaging.Unix (UNIX)
|
||||
|
@ -146,6 +148,12 @@ instance HasProtocol L4Proto (RefChanNotify L4Proto) where
|
|||
-- возьмем пока 10 секунд
|
||||
requestPeriodLim = NoLimit
|
||||
|
||||
instance ForLWWRefProto L4Proto => HasProtocol L4Proto (LWWRefProto L4Proto) where
|
||||
type instance ProtocolId (LWWRefProto L4Proto) = 12001
|
||||
type instance Encoded L4Proto = ByteString
|
||||
decode = either (const Nothing) Just . deserialiseOrFail
|
||||
encode = serialise
|
||||
requestPeriodLim = ReqLimPerMessage 1
|
||||
|
||||
instance Serialise (RefChanValidate UNIX) => HasProtocol UNIX (RefChanValidate UNIX) where
|
||||
type instance ProtocolId (RefChanValidate UNIX) = 0xFFFA0001
|
||||
|
|
|
@ -0,0 +1,139 @@
|
|||
{-# Language UndecidableInstances #-}
|
||||
{-# Language AllowAmbiguousTypes #-}
|
||||
module HBS2.Peer.Proto.LWWRef where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.OrDie
|
||||
import HBS2.Base58
|
||||
import HBS2.Storage
|
||||
import HBS2.Hash
|
||||
import HBS2.Net.Auth.Credentials
|
||||
import HBS2.Net.Proto.Types
|
||||
import HBS2.Data.Types.SignedBox
|
||||
import HBS2.Data.Types.Refs
|
||||
import HBS2.Net.Proto.Types
|
||||
import HBS2.Net.Auth.Schema()
|
||||
|
||||
import Data.ByteString (ByteString)
|
||||
import Data.Hashable hiding (Hashed)
|
||||
import Data.Maybe
|
||||
import Data.Word
|
||||
import Control.Monad.Trans.Maybe
|
||||
import Control.Monad.Except
|
||||
import Codec.Serialise
|
||||
|
||||
data LWWRefProtoReq e =
|
||||
LWWProtoGet (LWWRefKey (Encryption e))
|
||||
| LWWProtoSet (LWWRefKey (Encryption e)) (SignedBox (LWWRef e) e)
|
||||
deriving stock Generic
|
||||
|
||||
|
||||
data LWWRefProto e =
|
||||
LWWRefProto1 (LWWRefProtoReq e)
|
||||
deriving stock (Generic)
|
||||
|
||||
data LWWRef e =
|
||||
LWWRef
|
||||
{ lwwSeq :: Word64
|
||||
, lwwValue :: HashRef
|
||||
, lwwProof :: Maybe HashRef
|
||||
}
|
||||
deriving stock (Generic)
|
||||
|
||||
|
||||
type ForLWWRefProto e = (ForSignedBox e, Serialise (LWWRefKey (Encryption e)))
|
||||
|
||||
instance ForLWWRefProto e => Serialise (LWWRefProtoReq e)
|
||||
instance ForLWWRefProto e => Serialise (LWWRefProto e)
|
||||
instance ForLWWRefProto e => Serialise (LWWRef e)
|
||||
|
||||
newtype LWWRefKey s =
|
||||
LWWRefKey
|
||||
{ fromLwwRefKey :: PubKey 'Sign s
|
||||
}
|
||||
deriving stock (Generic)
|
||||
|
||||
|
||||
instance RefMetaData (LWWRefKey s)
|
||||
|
||||
deriving stock instance IsRefPubKey s => Eq (LWWRefKey s)
|
||||
|
||||
instance IsRefPubKey e => Serialise (LWWRefKey e)
|
||||
|
||||
instance IsRefPubKey s => Hashable (LWWRefKey s) where
|
||||
hashWithSalt s k = hashWithSalt s (hashObject @HbSync k)
|
||||
|
||||
instance IsRefPubKey s => Hashed HbSync (LWWRefKey s) where
|
||||
hashObject (LWWRefKey pk) = hashObject ("lwwrefkey|" <> serialise pk)
|
||||
|
||||
instance IsRefPubKey s => FromStringMaybe (LWWRefKey s) where
|
||||
fromStringMay s = LWWRefKey <$> fromStringMay s
|
||||
|
||||
instance IsRefPubKey s => IsString (LWWRefKey s) where
|
||||
fromString s = fromMaybe (error "bad public key base58") (fromStringMay s)
|
||||
|
||||
instance Pretty (AsBase58 (PubKey 'Sign s )) => Pretty (AsBase58 (LWWRefKey s)) where
|
||||
pretty (AsBase58 (LWWRefKey k)) = pretty (AsBase58 k)
|
||||
|
||||
instance Pretty (AsBase58 (PubKey 'Sign s )) => Pretty (LWWRefKey s) where
|
||||
pretty (LWWRefKey k) = pretty (AsBase58 k)
|
||||
|
||||
|
||||
instance Pretty (LWWRef e) where
|
||||
pretty (LWWRef{..}) = parens ( "lwwref" <> line
|
||||
<> indent 2 ( seqno <> line <> val <> line <> proof)
|
||||
)
|
||||
where
|
||||
seqno = parens ( "seq" <+> pretty lwwSeq )
|
||||
val = parens ( "value" <+> dquotes (pretty lwwValue) )
|
||||
proof | isNothing lwwProof = mempty
|
||||
| otherwise = parens ( "proof" <+> pretty lwwProof)
|
||||
|
||||
|
||||
data ReadLWWRefError =
|
||||
ReadLWWStorageError
|
||||
| ReadLWWFormatError
|
||||
| ReadLWWSignatureError
|
||||
deriving stock (Show,Typeable)
|
||||
|
||||
readLWWRef :: forall e s m . ( MonadIO m
|
||||
, MonadError ReadLWWRefError m
|
||||
, Encryption e ~ s
|
||||
, ForLWWRefProto e
|
||||
, Signatures s
|
||||
, IsRefPubKey s
|
||||
)
|
||||
=> AnyStorage
|
||||
-> LWWRefKey s
|
||||
-> m (Maybe (LWWRef e))
|
||||
|
||||
readLWWRef sto key = runMaybeT do
|
||||
getRef sto key
|
||||
>>= toMPlus
|
||||
>>= getBlock sto
|
||||
>>= toMPlus
|
||||
<&> deserialiseOrFail @(SignedBox (LWWRef e) e)
|
||||
>>= orThrowError ReadLWWFormatError
|
||||
<&> unboxSignedBox0
|
||||
>>= orThrowError ReadLWWSignatureError
|
||||
<&> snd
|
||||
|
||||
updateLWWRef :: forall s e m . ( Encryption e ~ s
|
||||
, ForLWWRefProto e
|
||||
, MonadIO m
|
||||
, Signatures s
|
||||
, IsRefPubKey s
|
||||
)
|
||||
=> AnyStorage
|
||||
-> LWWRefKey s
|
||||
-> PrivKey 'Sign s
|
||||
-> LWWRef e
|
||||
-> m (Maybe HashRef)
|
||||
|
||||
updateLWWRef sto k sk v = do
|
||||
let box = makeSignedBox @e (fromLwwRefKey k) sk v
|
||||
runMaybeT do
|
||||
hx <- putBlock sto (serialise box) >>= toMPlus
|
||||
updateRef sto k hx
|
||||
pure (HashRef hx)
|
||||
|
|
@ -0,0 +1,117 @@
|
|||
module HBS2.Peer.Proto.LWWRef.Internal
|
||||
( module HBS2.Peer.Proto.LWWRef.Internal
|
||||
, module HBS2.Peer.Proto.LWWRef
|
||||
) where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.Peer.Proto.LWWRef
|
||||
import HBS2.Data.Types.SignedBox
|
||||
import HBS2.Storage
|
||||
|
||||
import HBS2.Hash
|
||||
import HBS2.Clock
|
||||
import HBS2.Net.Proto
|
||||
import HBS2.Net.Auth.Credentials
|
||||
import HBS2.Base58
|
||||
import HBS2.Events
|
||||
import HBS2.Actors.Peer.Types
|
||||
import HBS2.Peer.Proto.Peer
|
||||
import HBS2.Net.Proto.Sessions
|
||||
import HBS2.Data.Types.Refs
|
||||
import HBS2.Misc.PrettyStuff
|
||||
import HBS2.System.Logger.Simple
|
||||
|
||||
import Codec.Serialise
|
||||
import Control.Monad
|
||||
import Control.Monad.Trans.Maybe
|
||||
import Data.Maybe
|
||||
|
||||
{- HLINT ignore "Functor law" -}
|
||||
|
||||
|
||||
data LWWRefProtoAdapter e m =
|
||||
LWWRefProtoAdapter
|
||||
{ lwwFetchBlock :: Hash HbSync -> m ()
|
||||
}
|
||||
|
||||
lwwRefProto :: forall e s m proto . ( MonadIO m
|
||||
, ForLWWRefProto e
|
||||
, Request e proto m
|
||||
, Response e proto m
|
||||
, HasDeferred proto e m
|
||||
, HasGossip e (LWWRefProto e) m
|
||||
, HasStorage m
|
||||
, IsPeerAddr e m
|
||||
, Pretty (Peer e)
|
||||
, Sessions e (KnownPeer e) m
|
||||
, Signatures s
|
||||
, Pretty (AsBase58 (PubKey 'Sign s))
|
||||
, s ~ Encryption e
|
||||
, proto ~ LWWRefProto e
|
||||
)
|
||||
=> LWWRefProtoAdapter e m
|
||||
-> LWWRefProto e -> m ()
|
||||
|
||||
lwwRefProto adapter pkt@(LWWRefProto1 req) = do
|
||||
debug $ yellow "lwwRefProto"
|
||||
|
||||
case req of
|
||||
LWWProtoGet key -> deferred @proto $ void $ runMaybeT do
|
||||
sto <- getStorage
|
||||
|
||||
ref <- getRef sto key >>= toMPlus
|
||||
|
||||
box <- getBlock sto ref
|
||||
>>= toMPlus
|
||||
<&> deserialiseOrFail
|
||||
>>= toMPlus
|
||||
|
||||
lift $ response (LWWRefProto1 (LWWProtoSet @e key box))
|
||||
|
||||
LWWProtoSet key box -> void $ runMaybeT do
|
||||
|
||||
(puk, lww) <- MaybeT $ pure $ unboxSignedBox0 box
|
||||
|
||||
guard ( puk == fromLwwRefKey key )
|
||||
|
||||
deferred @proto do
|
||||
|
||||
sto <- getStorage
|
||||
|
||||
let bs = serialise box
|
||||
let h0 = hashObject @HbSync bs
|
||||
|
||||
new <- hasBlock sto h0 <&> isNothing
|
||||
|
||||
when new do
|
||||
lift $ gossip pkt
|
||||
|
||||
lift $ lwwFetchBlock adapter (fromHashRef (lwwValue lww))
|
||||
|
||||
getRef sto key >>= \case
|
||||
Nothing -> do
|
||||
h <- enqueueBlock sto bs >>= toMPlus
|
||||
updateRef sto key h
|
||||
|
||||
Just rv -> do
|
||||
blk' <- getBlock sto rv
|
||||
maybe1 blk' (forcedUpdateLwwRef sto key bs) $ \blk -> do
|
||||
|
||||
let lww0 = deserialiseOrFail @(SignedBox (LWWRef e) e) blk
|
||||
& either (const Nothing) Just
|
||||
>>= unboxSignedBox0
|
||||
<&> snd
|
||||
|
||||
let seq0 = lwwSeq <$> lww0
|
||||
let lwwv0 = lwwValue <$> lww0
|
||||
|
||||
when ( Just (lwwSeq lww) > seq0
|
||||
|| (Just (lwwSeq lww) == seq0 && Just (lwwValue lww) > lwwv0)
|
||||
) do
|
||||
forcedUpdateLwwRef sto key (serialise box)
|
||||
|
||||
where
|
||||
forcedUpdateLwwRef sto key bs = do
|
||||
h' <- enqueueBlock sto bs
|
||||
forM_ h' $ updateRef sto key
|
||||
|
|
@ -24,6 +24,8 @@ import Data.ByteString (ByteString)
|
|||
import Type.Reflection (someTypeRep)
|
||||
import Lens.Micro.Platform
|
||||
|
||||
|
||||
|
||||
newtype RefLogKey s = RefLogKey { fromRefLogKey :: PubKey 'Sign s }
|
||||
deriving stock Generic
|
||||
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
module HBS2.Peer.RPC.API.LWWRef where
|
||||
|
||||
import HBS2.Peer.Prelude
|
||||
import HBS2.Peer.Proto.LWWRef
|
||||
import HBS2.Data.Types.SignedBox
|
||||
import HBS2.Net.Messaging.Unix
|
||||
import HBS2.Data.Types.Refs (HashRef(..))
|
||||
import HBS2.Net.Proto.Service
|
||||
import HBS2.Peer.Proto.RefLog (RefLogUpdate)
|
||||
|
||||
import Data.ByteString.Lazy (ByteString)
|
||||
import Codec.Serialise
|
||||
|
||||
data RpcLWWRefGet
|
||||
data RpcLWWRefUpdate
|
||||
data RpcLWWRefFetch
|
||||
|
||||
type LWWRefAPI = '[ RpcLWWRefGet -- may be done via storage
|
||||
, RpcLWWRefUpdate --
|
||||
, RpcLWWRefFetch --
|
||||
]
|
||||
|
||||
instance HasProtocol UNIX (ServiceProto LWWRefAPI UNIX) where
|
||||
type instance ProtocolId (ServiceProto LWWRefAPI UNIX) = 16267229472009458342
|
||||
type instance Encoded UNIX = ByteString
|
||||
decode = either (const Nothing) Just . deserialiseOrFail
|
||||
encode = serialise
|
||||
|
||||
type instance Input RpcLWWRefGet = LWWRefKey HBS2Basic
|
||||
type instance Output RpcLWWRefGet = Maybe (LWWRef L4Proto)
|
||||
|
||||
type instance Input RpcLWWRefFetch = LWWRefKey HBS2Basic
|
||||
type instance Output RpcLWWRefFetch = ()
|
||||
|
||||
type instance Input RpcLWWRefUpdate = SignedBox (LWWRef L4Proto) L4Proto
|
||||
type instance Output RpcLWWRefUpdate = ()
|
||||
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,107 @@
|
|||
module Main where
|
||||
|
||||
import HBS2.Prelude.Plated
|
||||
import HBS2.OrDie
|
||||
import HBS2.Base58 as B58
|
||||
import HBS2.Hash
|
||||
import HBS2.Net.Proto.Types
|
||||
import HBS2.Peer.Proto.RefLog
|
||||
import HBS2.Net.Auth.Schema
|
||||
import HBS2.Misc.PrettyStuff
|
||||
|
||||
import Test.Tasty
|
||||
import Test.Tasty.HUnit
|
||||
|
||||
import Data.Maybe
|
||||
import Data.ByteString
|
||||
import Data.ByteString.Lazy qualified as LBS
|
||||
import Codec.Serialise
|
||||
import Crypto.Saltine.Core.Sign qualified as Sign
|
||||
|
||||
|
||||
newtype W a = W a
|
||||
deriving stock Generic
|
||||
|
||||
instance Serialise a => Serialise (W a)
|
||||
|
||||
|
||||
newtype X a = X a
|
||||
deriving stock Generic
|
||||
|
||||
instance Serialise a => Serialise (X a)
|
||||
|
||||
newtype VersionedPubKey = VersionedPubKey { versionedPubKey :: ByteString }
|
||||
deriving stock (Show,Generic)
|
||||
|
||||
data RefLogRequestVersioned e =
|
||||
RefLogRequestVersioned
|
||||
{ refLogRequestVersioned :: VersionedPubKey
|
||||
}
|
||||
deriving stock (Show,Generic)
|
||||
|
||||
instance Serialise VersionedPubKey
|
||||
|
||||
instance Serialise (RefLogRequestVersioned e)
|
||||
|
||||
testVersionedKeysHashes :: IO ()
|
||||
testVersionedKeysHashes = do
|
||||
|
||||
keypart <- fromBase58 "BTThPdHKF8XnEq4m6wzbKHKA6geLFK4ydYhBXAqBdHSP"
|
||||
& orThrowUser "bad base58"
|
||||
<&> LBS.fromStrict
|
||||
|
||||
pk <- fromStringMay @(PubKey 'Sign HBS2Basic) "BTThPdHKF8XnEq4m6wzbKHKA6geLFK4ydYhBXAqBdHSP"
|
||||
& orThrowUser "key decode"
|
||||
|
||||
let pks = serialise pk
|
||||
|
||||
pks2 <- deserialiseOrFail @(PubKey 'Sign HBS2Basic) (pks <> "12345")
|
||||
& orThrowUser "key decode error"
|
||||
|
||||
let rfk = serialise (RefLogKey @HBS2Basic pk)
|
||||
let wrfk = serialise $ W (RefLogKey @HBS2Basic pk)
|
||||
let xrfk = serialise $ X (RefLogKey @HBS2Basic pk)
|
||||
|
||||
print $ pretty (AsHexSparse keypart)
|
||||
print $ pretty (AsHexSparse pks)
|
||||
print $ pretty (AsHexSparse rfk)
|
||||
print $ pretty (AsHexSparse wrfk)
|
||||
print $ pretty (AsHexSparse xrfk)
|
||||
|
||||
let req1 = RefLogRequest @L4Proto pk
|
||||
|
||||
let req2 = RefLogRequestVersioned @L4Proto ( VersionedPubKey (LBS.toStrict keypart <> "AAA") )
|
||||
|
||||
print $ yellow "okay"
|
||||
|
||||
let req1s = serialise req1
|
||||
let req2s = serialise req2
|
||||
|
||||
print $ pretty "---"
|
||||
|
||||
print $ pretty (AsHexSparse req1s)
|
||||
print $ pretty (AsHexSparse req2s)
|
||||
|
||||
rq0 <- deserialiseOrFail @(RefLogRequestVersioned L4Proto) req1s
|
||||
& orThrowUser "failed simple -> versioned"
|
||||
|
||||
rq1 <- deserialiseOrFail @(RefLogRequest L4Proto) req2s
|
||||
& orThrowUser "failed versioned -> simple"
|
||||
|
||||
print $ viaShow rq0
|
||||
print $ viaShow req1
|
||||
|
||||
print $ viaShow rq1
|
||||
|
||||
pure ()
|
||||
|
||||
main :: IO ()
|
||||
main =
|
||||
defaultMain $
|
||||
testGroup "root"
|
||||
[
|
||||
testCase "testVersionedKeys" testVersionedKeysHashes
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
cabal-version: 3.0
|
||||
name: hbs2-share
|
||||
version: 0.1.0.0
|
||||
version: 0.24.1.0
|
||||
-- synopsis:
|
||||
-- description:
|
||||
license: BSD-3-Clause
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
cabal-version: 3.0
|
||||
name: hbs2-storage-simple
|
||||
version: 0.1.0.0
|
||||
version: 0.24.1.0
|
||||
-- synopsis:
|
||||
-- description:
|
||||
license: BSD-3-Clause
|
||||
|
|
|
@ -947,47 +947,3 @@ executable test-playground
|
|||
, resourcet
|
||||
, text-icu >= 0.8.0.3
|
||||
|
||||
|
||||
executable test-repo-export
|
||||
import: shared-properties
|
||||
default-language: Haskell2010
|
||||
|
||||
-- other-extensions:
|
||||
|
||||
hs-source-dirs: repo-export
|
||||
main-is: RepoExportMain.hs
|
||||
build-depends:
|
||||
base, hbs2-core, hbs2-peer, hbs2-git
|
||||
, async
|
||||
, bytestring
|
||||
, cache
|
||||
, containers
|
||||
, directory
|
||||
, exceptions
|
||||
, hashable
|
||||
, microlens-platform
|
||||
, mtl
|
||||
, prettyprinter
|
||||
, random
|
||||
, safe
|
||||
, serialise
|
||||
, stm
|
||||
, streaming
|
||||
, transformers
|
||||
, uniplate
|
||||
, vector
|
||||
, simple-logger
|
||||
, string-conversions
|
||||
, filepath
|
||||
, temporary
|
||||
, unliftio
|
||||
, unordered-containers
|
||||
, timeit
|
||||
, memory
|
||||
, deepseq
|
||||
, xxhash-ffi
|
||||
, optparse-generic
|
||||
, interpolatedstring-perl6
|
||||
|
||||
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue