mirror of https://github.com/voidlizard/hbs2
boosted download
This commit is contained in:
parent
4e136aa08b
commit
83b4d8bff8
29
Makefile
29
Makefile
|
@ -5,11 +5,39 @@ SHELL := bash
|
||||||
MAKEFLAGS += --warn-undefined-variables
|
MAKEFLAGS += --warn-undefined-variables
|
||||||
MAKEFLAGS += --no-builtin-rules
|
MAKEFLAGS += --no-builtin-rules
|
||||||
|
|
||||||
|
GHC_VERSION := 9.4.8
|
||||||
|
BIN_DIR := ./bin
|
||||||
|
BINS := \
|
||||||
|
hbs2 \
|
||||||
|
hbs2-peer \
|
||||||
|
hbs2-reposync \
|
||||||
|
hbs2-keyman \
|
||||||
|
git-remote-hbs2 \
|
||||||
|
git-hbs2 \
|
||||||
|
|
||||||
ifeq ($(origin .RECIPEPREFIX), undefined)
|
ifeq ($(origin .RECIPEPREFIX), undefined)
|
||||||
$(error This Make does not support .RECIPEPREFIX. Please use GNU Make 4.0 or later)
|
$(error This Make does not support .RECIPEPREFIX. Please use GNU Make 4.0 or later)
|
||||||
endif
|
endif
|
||||||
.RECIPEPREFIX = >
|
.RECIPEPREFIX = >
|
||||||
|
|
||||||
|
$(BIN_DIR):
|
||||||
|
> @mkdir -p $@
|
||||||
|
|
||||||
|
.PHONY: symlinks
|
||||||
|
symlinks: $(BIN_DIR)
|
||||||
|
> @mkdir -p $(BIN_DIR)
|
||||||
|
> @echo $(BIN_DIR)
|
||||||
|
> @for bin in $(BINS); do \
|
||||||
|
> path=`find dist-newstyle -type f -name $$bin -path "*$(GHC_VERSION)*" | head -n 1`; \
|
||||||
|
> if [ -n "$$path" ]; then \
|
||||||
|
> echo "Creating symlink for $$bin"; \
|
||||||
|
> ln -sf $$PWD/$$path $(BIN_DIR)/$$bin; \
|
||||||
|
> else \
|
||||||
|
> echo "Binary $$bin for GHC $(GHC_VERSION) not found"; \
|
||||||
|
> fi; \
|
||||||
|
> done
|
||||||
|
|
||||||
|
|
||||||
.PHONY: build
|
.PHONY: build
|
||||||
build:
|
build:
|
||||||
> nix develop -c cabal build all
|
> nix develop -c cabal build all
|
||||||
|
@ -21,3 +49,4 @@ test-core:
|
||||||
.PHONY: test-raft
|
.PHONY: test-raft
|
||||||
test-raft:
|
test-raft:
|
||||||
> nix develop -c ghcid -c 'cabal repl' raft-algo -T RaftAlgo.Proto.devTest
|
> nix develop -c ghcid -c 'cabal repl' raft-algo -T RaftAlgo.Proto.devTest
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,8 @@
|
||||||
|
## 2024-02-06
|
||||||
|
|
||||||
|
Новый формат репозиториев и реворк hbs2-git, статус - wip.
|
||||||
|
|
||||||
|
|
||||||
## 2023-11-03
|
## 2023-11-03
|
||||||
|
|
||||||
PR: hbs2-file-logger-strikes-again
|
PR: hbs2-file-logger-strikes-again
|
||||||
|
|
|
@ -10,7 +10,7 @@ license-file: LICENSE
|
||||||
-- copyright:
|
-- copyright:
|
||||||
category: Network
|
category: Network
|
||||||
build-type: Simple
|
build-type: Simple
|
||||||
extra-doc-files: CHANGELOG.md
|
-- extra-doc-files: CHANGELOG.md
|
||||||
-- extra-source-files:
|
-- extra-source-files:
|
||||||
|
|
||||||
common warnings
|
common warnings
|
||||||
|
|
|
@ -10,7 +10,7 @@ license-file: LICENSE
|
||||||
-- copyright:
|
-- copyright:
|
||||||
category: Network
|
category: Network
|
||||||
build-type: Simple
|
build-type: Simple
|
||||||
extra-doc-files: CHANGELOG.md
|
-- extra-doc-files: CHANGELOG.md
|
||||||
-- extra-source-files:
|
-- extra-source-files:
|
||||||
|
|
||||||
common warnings
|
common warnings
|
||||||
|
|
12
flake.lock
12
flake.lock
|
@ -192,11 +192,11 @@
|
||||||
"flake-utils": "flake-utils_4"
|
"flake-utils": "flake-utils_4"
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1672412555,
|
"lastModified": 1698938553,
|
||||||
"narHash": "sha256-Kaa8F7nQFR3KuS6Y9WRUxeJeZlp6CCubyrRfmiEsW4k=",
|
"narHash": "sha256-oXpTKXioqFbl2mhhvpJIAvgNd+wYyv4ekI+YnJHEJ6s=",
|
||||||
"owner": "ivanovs-4",
|
"owner": "ivanovs-4",
|
||||||
"repo": "haskell-flake-utils",
|
"repo": "haskell-flake-utils",
|
||||||
"rev": "896219e5bde6efac72198550454e9dd9b5ed9ac9",
|
"rev": "19b273b5dc401a0a565e7f75cf50a593871b80c9",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -266,11 +266,11 @@
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1697009197,
|
"lastModified": 1707451808,
|
||||||
"narHash": "sha256-viVRhBTFT8fPJTb1N3brQIpFZnttmwo3JVKNuWRVc3s=",
|
"narHash": "sha256-UwDBUNHNRsYKFJzyTMVMTF5qS4xeJlWoeyJf+6vvamU=",
|
||||||
"owner": "nixos",
|
"owner": "nixos",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "01441e14af5e29c9d27ace398e6dd0b293e25a54",
|
"rev": "442d407992384ed9c0e6d352de75b69079904e4e",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
|
|
@ -95,7 +95,11 @@ outputs = { self, nixpkgs, haskell-flake-utils, ... }@inputs:
|
||||||
ghcid
|
ghcid
|
||||||
cabal-install
|
cabal-install
|
||||||
haskell-language-server
|
haskell-language-server
|
||||||
|
hoogle
|
||||||
htags
|
htags
|
||||||
|
text-icu
|
||||||
|
pkgs.icu72
|
||||||
|
pkgs.openssl
|
||||||
])
|
])
|
||||||
++
|
++
|
||||||
[ pkgs.pkg-config
|
[ pkgs.pkg-config
|
||||||
|
@ -106,6 +110,7 @@ outputs = { self, nixpkgs, haskell-flake-utils, ... }@inputs:
|
||||||
|
|
||||||
shellHook = ''
|
shellHook = ''
|
||||||
export GIT_HASH="${self.rev or self.dirtyRev or "dirty"}"
|
export GIT_HASH="${self.rev or self.dirtyRev or "dirty"}"
|
||||||
|
export STAN_USE_DEFAULT_CONFIG=True
|
||||||
'';
|
'';
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
|
@ -10,7 +10,7 @@ license-file: LICENSE
|
||||||
-- copyright:
|
-- copyright:
|
||||||
category: Network
|
category: Network
|
||||||
build-type: Simple
|
build-type: Simple
|
||||||
extra-doc-files: CHANGELOG.md
|
-- extra-doc-files: CHANGELOG.md
|
||||||
-- extra-source-files:
|
-- extra-source-files:
|
||||||
|
|
||||||
common warnings
|
common warnings
|
||||||
|
@ -143,10 +143,12 @@ library
|
||||||
, HBS2.System.Logger.Simple
|
, HBS2.System.Logger.Simple
|
||||||
, HBS2.System.Logger.Simple.ANSI
|
, HBS2.System.Logger.Simple.ANSI
|
||||||
, HBS2.System.Logger.Simple.Class
|
, HBS2.System.Logger.Simple.Class
|
||||||
|
, HBS2.System.Dir
|
||||||
, HBS2.Net.Dialog.Core
|
, HBS2.Net.Dialog.Core
|
||||||
, HBS2.Net.Dialog.Client
|
, HBS2.Net.Dialog.Client
|
||||||
, HBS2.Net.Dialog.Helpers.List
|
, HBS2.Net.Dialog.Helpers.List
|
||||||
, HBS2.Net.Dialog.Helpers.Streaming
|
, HBS2.Net.Dialog.Helpers.Streaming
|
||||||
|
, HBS2.Misc.PrettyStuff
|
||||||
, HBS2.Version
|
, HBS2.Version
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -35,6 +35,16 @@ class IsTimeout a where
|
||||||
toTimeSpec :: Timeout a -> TimeSpec
|
toTimeSpec :: Timeout a -> TimeSpec
|
||||||
toTimeSpec x = fromNanoSecs (fromIntegral (toNanoSeconds x))
|
toTimeSpec x = fromNanoSecs (fromIntegral (toNanoSeconds x))
|
||||||
|
|
||||||
|
class Expired timeout interval where
|
||||||
|
expired :: timeout -> interval -> Bool
|
||||||
|
|
||||||
|
|
||||||
|
instance IsTimeout t => Expired (Timeout t) TimeSpec where
|
||||||
|
expired t ts = fromIntegral (toNanoSecs ts) > toNanoSeconds t
|
||||||
|
|
||||||
|
-- expired :: IsTimeout t => Timeout 't -> TimeSpec -> Bool
|
||||||
|
-- expired timeout ts = False
|
||||||
|
|
||||||
toNominalDiffTime :: IsTimeout t => Timeout t -> NominalDiffTime
|
toNominalDiffTime :: IsTimeout t => Timeout t -> NominalDiffTime
|
||||||
toNominalDiffTime = fromRational . (/ (10^6)) . fromIntegral . toMicroSeconds
|
toNominalDiffTime = fromRational . (/ (10^6)) . fromIntegral . toMicroSeconds
|
||||||
|
|
||||||
|
|
|
@ -70,7 +70,7 @@ defBlockWipTimeout :: TimeSpec
|
||||||
defBlockWipTimeout = defCookieTimeout
|
defBlockWipTimeout = defCookieTimeout
|
||||||
|
|
||||||
defBlockInfoTimeout :: Timeout 'Seconds
|
defBlockInfoTimeout :: Timeout 'Seconds
|
||||||
defBlockInfoTimeout = 20
|
defBlockInfoTimeout = 2
|
||||||
|
|
||||||
defBlockInfoTimeoutSpec :: TimeSpec
|
defBlockInfoTimeoutSpec :: TimeSpec
|
||||||
defBlockInfoTimeoutSpec = toTimeSpec defBlockInfoTimeout
|
defBlockInfoTimeoutSpec = toTimeSpec defBlockInfoTimeout
|
||||||
|
@ -81,7 +81,7 @@ defBlockWaitMax = 60 :: Timeout 'Seconds
|
||||||
|
|
||||||
-- how much time wait for block from peer?
|
-- how much time wait for block from peer?
|
||||||
defChunkWaitMax :: Timeout 'Seconds
|
defChunkWaitMax :: Timeout 'Seconds
|
||||||
defChunkWaitMax = 30 :: Timeout 'Seconds
|
defChunkWaitMax = 10 :: Timeout 'Seconds
|
||||||
|
|
||||||
defSweepTimeout :: Timeout 'Seconds
|
defSweepTimeout :: Timeout 'Seconds
|
||||||
defSweepTimeout = 60 -- FIXME: only for debug!
|
defSweepTimeout = 60 -- FIXME: only for debug!
|
||||||
|
|
|
@ -0,0 +1,29 @@
|
||||||
|
module HBS2.Misc.PrettyStuff
|
||||||
|
( module HBS2.Misc.PrettyStuff
|
||||||
|
, hPutDoc
|
||||||
|
) where
|
||||||
|
|
||||||
|
import Data.Text qualified as Text
|
||||||
|
import Prettyprinter
|
||||||
|
import Prettyprinter.Render.Terminal
|
||||||
|
|
||||||
|
green :: Doc AnsiStyle -> Doc AnsiStyle
|
||||||
|
green = annotate (color Green)
|
||||||
|
|
||||||
|
yellow :: Doc AnsiStyle -> Doc AnsiStyle
|
||||||
|
yellow = annotate (color Yellow)
|
||||||
|
|
||||||
|
red :: Doc AnsiStyle -> Doc AnsiStyle
|
||||||
|
red = annotate (color Red)
|
||||||
|
|
||||||
|
blue :: Doc AnsiStyle -> Doc AnsiStyle
|
||||||
|
blue = annotate (color Blue)
|
||||||
|
|
||||||
|
section :: Doc ann
|
||||||
|
section = line <> line
|
||||||
|
|
||||||
|
toStringANSI :: Doc AnsiStyle -> String
|
||||||
|
toStringANSI doc = Text.unpack $ renderStrict $ layoutPretty defaultLayoutOptions doc
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -303,7 +303,7 @@ instance (ForByPass e, Messaging w e ByteString)
|
||||||
-- FIXME: check-code
|
-- FIXME: check-code
|
||||||
guard ( code == Just heySeed )
|
guard ( code == Just heySeed )
|
||||||
|
|
||||||
debug $ "HEY CODE:" <> parens (pretty code) <+> pretty orig
|
trace $ "HEY CODE:" <> parens (pretty code) <+> pretty orig
|
||||||
|
|
||||||
guard (not (LBS.null hbs))
|
guard (not (LBS.null hbs))
|
||||||
|
|
||||||
|
|
|
@ -395,7 +395,7 @@ runNotifySink sink k action = do
|
||||||
-- NOTE: run-notify-sink-cleanup
|
-- NOTE: run-notify-sink-cleanup
|
||||||
-- если нас пристрелили --- попрощаться с NotifySink хотя бы
|
-- если нас пристрелили --- попрощаться с NotifySink хотя бы
|
||||||
let cleanup = do
|
let cleanup = do
|
||||||
trace $ "CLIENT: cleanip and exit" <+> pretty ha
|
trace $ "CLIENT: cleanup and exit" <+> pretty ha
|
||||||
atomically $ writeTQueue (sinkPipeline sink) (NotifySinkBye ha)
|
atomically $ writeTQueue (sinkPipeline sink) (NotifySinkBye ha)
|
||||||
atomically $ modifyTVar (sinkNotify sink) (HashMap.delete ha)
|
atomically $ modifyTVar (sinkNotify sink) (HashMap.delete ha)
|
||||||
cancel w
|
cancel w
|
||||||
|
|
|
@ -19,6 +19,7 @@ module HBS2.Prelude
|
||||||
, FromByteString(..)
|
, FromByteString(..)
|
||||||
, Text.Text
|
, Text.Text
|
||||||
, (&), (<&>), for_, for
|
, (&), (<&>), for_, for
|
||||||
|
, HasErrorStatus(..), ErrorStatus(..), SomeError(..)
|
||||||
) where
|
) where
|
||||||
|
|
||||||
import Data.Typeable as X
|
import Data.Typeable as X
|
||||||
|
@ -96,6 +97,22 @@ instance Monad m => ToMPlus (MaybeT m) (Either x a) where
|
||||||
toMPlus (Right x) = MaybeT $ pure (Just x)
|
toMPlus (Right x) = MaybeT $ pure (Just x)
|
||||||
|
|
||||||
|
|
||||||
|
data ErrorStatus = Complete
|
||||||
|
| HasIssuesButOkay
|
||||||
|
| Failed
|
||||||
|
| SNAFU
|
||||||
|
| Unknown
|
||||||
|
deriving stock (Eq,Ord,Show,Enum,Generic)
|
||||||
|
|
||||||
|
class HasErrorStatus e where
|
||||||
|
getStatus :: e -> ErrorStatus
|
||||||
|
|
||||||
|
-- instance {-# OVERLAPPABLE #-} HasErrorStatus e where
|
||||||
|
-- getStatus _ = Unknown
|
||||||
|
|
||||||
|
data SomeError = forall e . (Show e, HasErrorStatus e) =>
|
||||||
|
SomeError e
|
||||||
|
|
||||||
asyncLinked :: MonadUnliftIO m => m a -> m (Async a)
|
asyncLinked :: MonadUnliftIO m => m a -> m (Async a)
|
||||||
asyncLinked m = do
|
asyncLinked m = do
|
||||||
l <- async m
|
l <- async m
|
||||||
|
|
|
@ -20,41 +20,8 @@ findMissedBlocks :: (MonadIO m) => AnyStorage -> HashRef -> m [HashRef]
|
||||||
findMissedBlocks sto href = do
|
findMissedBlocks sto href = do
|
||||||
-- TODO: limit-recursion-depth?
|
-- TODO: limit-recursion-depth?
|
||||||
-- TODO: cache-results-limit-calls-freq
|
-- TODO: cache-results-limit-calls-freq
|
||||||
|
|
||||||
-- trace $ "findMissedBlocks" <+> pretty href
|
-- trace $ "findMissedBlocks" <+> pretty href
|
||||||
|
S.toList_ $ findMissedBlocks2 sto href
|
||||||
S.toList_ $ do
|
|
||||||
|
|
||||||
walkMerkle (fromHashRef href) (lift . getBlock sto) $ \(hr :: Either (Hash HbSync) [HashRef]) -> do
|
|
||||||
case hr of
|
|
||||||
-- FIXME: investigate-this-wtf
|
|
||||||
Left hx -> S.yield (HashRef hx)
|
|
||||||
|
|
||||||
Right (hrr :: [HashRef]) -> do
|
|
||||||
forM_ hrr $ \hx -> runMaybeT do
|
|
||||||
blk <- lift $ getBlock sto (fromHashRef hx)
|
|
||||||
|
|
||||||
unless (isJust blk) do
|
|
||||||
lift $ S.yield hx
|
|
||||||
|
|
||||||
maybe1 blk none $ \bs -> do
|
|
||||||
let w = tryDetect (fromHashRef hx) bs
|
|
||||||
r <- case w of
|
|
||||||
Merkle{} -> lift $ lift $ findMissedBlocks sto hx
|
|
||||||
MerkleAnn t -> lift $ lift do
|
|
||||||
-- FIXME: make-tail-recursive
|
|
||||||
|
|
||||||
b0 <- case _mtaMeta t of
|
|
||||||
AnnHashRef hm -> findMissedBlocks sto (HashRef hm)
|
|
||||||
_ -> pure mempty
|
|
||||||
|
|
||||||
b1 <- findMissedBlocks sto hx
|
|
||||||
pure (b0 <> b1)
|
|
||||||
|
|
||||||
_ -> pure mempty
|
|
||||||
|
|
||||||
lift $ mapM_ S.yield r
|
|
||||||
|
|
||||||
|
|
||||||
findMissedBlocks2 :: (MonadIO m) => AnyStorage -> HashRef -> Stream (Of HashRef) m ()
|
findMissedBlocks2 :: (MonadIO m) => AnyStorage -> HashRef -> Stream (Of HashRef) m ()
|
||||||
findMissedBlocks2 sto href = do
|
findMissedBlocks2 sto href = do
|
||||||
|
|
|
@ -0,0 +1,46 @@
|
||||||
|
module HBS2.System.Dir
|
||||||
|
( module HBS2.System.Dir
|
||||||
|
, module System.FilePath
|
||||||
|
, module System.FilePattern
|
||||||
|
, module UnliftIO
|
||||||
|
) where
|
||||||
|
|
||||||
|
import System.FilePath
|
||||||
|
import System.FilePattern
|
||||||
|
import System.Directory as D
|
||||||
|
import UnliftIO hiding (try)
|
||||||
|
|
||||||
|
|
||||||
|
data MkDirOpt = MkDirOptNone
|
||||||
|
|
||||||
|
class HasMkDirOptions a where
|
||||||
|
mkdirOpts :: a -> [MkDirOpt]
|
||||||
|
|
||||||
|
instance HasMkDirOptions FilePath where
|
||||||
|
mkdirOpts = mempty
|
||||||
|
|
||||||
|
class ToFilePath a where
|
||||||
|
toFilePath :: a -> FilePath
|
||||||
|
|
||||||
|
instance ToFilePath FilePath where
|
||||||
|
toFilePath = id
|
||||||
|
|
||||||
|
mkdir :: (MonadIO m, ToFilePath a) => a -> m ()
|
||||||
|
mkdir a = do
|
||||||
|
liftIO $ createDirectoryIfMissing True (toFilePath a)
|
||||||
|
|
||||||
|
pwd :: MonadIO m => m FilePath
|
||||||
|
pwd = liftIO D.getCurrentDirectory
|
||||||
|
|
||||||
|
doesPathExist :: MonadIO m => FilePath -> m Bool
|
||||||
|
doesPathExist = liftIO . D.doesPathExist
|
||||||
|
|
||||||
|
canonicalizePath :: MonadIO m => FilePath -> m FilePath
|
||||||
|
canonicalizePath = liftIO . D.canonicalizePath
|
||||||
|
|
||||||
|
expandPath :: MonadIO m => FilePath -> m FilePath
|
||||||
|
expandPath = liftIO . D.canonicalizePath
|
||||||
|
|
||||||
|
doesDirectoryExist :: MonadIO m => FilePath -> m Bool
|
||||||
|
doesDirectoryExist = liftIO . D.doesDirectoryExist
|
||||||
|
|
|
@ -8,10 +8,14 @@ module HBS2.System.Logger.Simple.ANSI
|
||||||
, info
|
, info
|
||||||
, AnsiStyle
|
, AnsiStyle
|
||||||
, ToLogStr(..)
|
, ToLogStr(..)
|
||||||
|
, INFO,NOTICE,WARN,ERROR,DEBUG,TRACE
|
||||||
|
, setLogging,setLoggingOff
|
||||||
|
, toStderr,toStdout,logPrefix,defLog
|
||||||
) where
|
) where
|
||||||
|
|
||||||
import Prettyprinter.Render.Terminal
|
import Prettyprinter.Render.Terminal
|
||||||
import HBS2.System.Logger.Simple qualified as Logger
|
import HBS2.System.Logger.Simple qualified as Logger
|
||||||
|
import HBS2.System.Logger.Simple (INFO,NOTICE,WARN,ERROR,DEBUG,TRACE,setLoggingOff,setLogging,toStderr,toStdout,logPrefix,defLog)
|
||||||
import Control.Monad.IO.Class
|
import Control.Monad.IO.Class
|
||||||
import Prettyprinter
|
import Prettyprinter
|
||||||
import System.Log.FastLogger
|
import System.Log.FastLogger
|
||||||
|
|
|
@ -10,7 +10,7 @@ maintainer: dzuikov@gmail.com
|
||||||
-- copyright:
|
-- copyright:
|
||||||
category: Data
|
category: Data
|
||||||
build-type: Simple
|
build-type: Simple
|
||||||
extra-doc-files: CHANGELOG.md
|
-- extra-doc-files: CHANGELOG.md
|
||||||
-- extra-source-files:
|
-- extra-source-files:
|
||||||
|
|
||||||
common warnings
|
common warnings
|
||||||
|
|
|
@ -28,68 +28,23 @@ import PeerInfo
|
||||||
import Brains
|
import Brains
|
||||||
import DownloadMon
|
import DownloadMon
|
||||||
|
|
||||||
import Control.Concurrent.Async
|
import Control.Concurrent.STM qualified as STM
|
||||||
import Control.Concurrent.STM
|
import Control.Monad.Trans.Cont
|
||||||
import Control.Concurrent.STM.TSem
|
|
||||||
import Control.Monad.Reader
|
import Control.Monad.Reader
|
||||||
import Control.Monad.Trans.Maybe
|
import Control.Monad.Trans.Maybe
|
||||||
import Data.ByteString.Lazy (ByteString)
|
|
||||||
import Data.ByteString.Lazy qualified as LBS
|
|
||||||
import Data.Cache qualified as Cache
|
import Data.Cache qualified as Cache
|
||||||
import Data.Foldable hiding (find)
|
import Data.HashMap.Strict (HashMap)
|
||||||
import Data.HashMap.Strict qualified as HashMap
|
import Data.HashMap.Strict qualified as HashMap
|
||||||
import Data.HashSet (HashSet)
|
|
||||||
import Data.HashSet qualified as HashSet
|
|
||||||
import Data.IntMap (IntMap)
|
|
||||||
import Data.IntMap qualified as IntMap
|
import Data.IntMap qualified as IntMap
|
||||||
import Data.IntSet qualified as IntSet
|
import Data.IntSet qualified as IntSet
|
||||||
import Data.List qualified as List
|
|
||||||
import Data.Maybe
|
import Data.Maybe
|
||||||
import Lens.Micro.Platform
|
import Lens.Micro.Platform
|
||||||
import System.Random (randomRIO)
|
|
||||||
import System.Random.Shuffle (shuffleM)
|
|
||||||
import Codec.Serialise
|
import Codec.Serialise
|
||||||
|
import Data.Hashable
|
||||||
|
import System.Random.Shuffle (shuffleM)
|
||||||
|
import Control.Concurrent (getNumCapabilities)
|
||||||
|
|
||||||
getBlockForDownload :: forall e m . (MonadIO m, IsPeerAddr e m, MyPeer e, HasStorage m)
|
import UnliftIO
|
||||||
=> Peer e
|
|
||||||
-> BlockDownloadM e m (Maybe (Hash HbSync))
|
|
||||||
|
|
||||||
getBlockForDownload peer = do
|
|
||||||
pa <- lift $ toPeerAddr peer
|
|
||||||
tinq <- asks (view blockInQ)
|
|
||||||
brains <- asks (view downloadBrains)
|
|
||||||
prop <- asks (view blockProposed)
|
|
||||||
|
|
||||||
sto <- lift getStorage
|
|
||||||
|
|
||||||
inq <- liftIO $ readTVarIO tinq
|
|
||||||
-- let size = HashMap.size inq
|
|
||||||
|
|
||||||
let allBlks = HashMap.keys inq
|
|
||||||
|
|
||||||
hs' <- forM allBlks $ \blk -> do
|
|
||||||
here <- liftIO $ hasBlock sto blk <&> isJust
|
|
||||||
newOne <- shouldDownloadBlock @e brains peer blk
|
|
||||||
|
|
||||||
if not here && newOne then do
|
|
||||||
pure $ Just blk
|
|
||||||
else do
|
|
||||||
po <- shouldPostponeBlock @e brains blk
|
|
||||||
|
|
||||||
when po do
|
|
||||||
postponeBlock blk
|
|
||||||
|
|
||||||
pure Nothing
|
|
||||||
|
|
||||||
let hs = catMaybes hs'
|
|
||||||
let size = length hs
|
|
||||||
|
|
||||||
if size == 0 then do
|
|
||||||
pure Nothing
|
|
||||||
else do
|
|
||||||
i <- randomRIO (0, size - 1)
|
|
||||||
let blk = HashMap.keys inq !! i
|
|
||||||
pure $ Just blk
|
|
||||||
|
|
||||||
|
|
||||||
processBlock :: forall e m . ( MonadIO m
|
processBlock :: forall e m . ( MonadIO m
|
||||||
|
@ -253,9 +208,9 @@ downloadFromWithPeer peer thisBkSize h = do
|
||||||
rtt <- medianPeerRTT pinfo <&> fmap ( (/1e9) . realToFrac )
|
rtt <- medianPeerRTT pinfo <&> fmap ( (/1e9) . realToFrac )
|
||||||
<&> fromMaybe defChunkWaitMax
|
<&> fromMaybe defChunkWaitMax
|
||||||
|
|
||||||
let w = 4 * rtt * realToFrac (length bursts)
|
let w = 4 * rtt * 256 -- realToFrac (length bursts)
|
||||||
|
|
||||||
let burstTime = max defChunkWaitMax $ realToFrac w :: Timeout 'Seconds
|
let burstTime = min defChunkWaitMax $ realToFrac w :: Timeout 'Seconds
|
||||||
|
|
||||||
trace $ "BURST TIME" <+> pretty burstTime
|
trace $ "BURST TIME" <+> pretty burstTime
|
||||||
|
|
||||||
|
@ -272,7 +227,7 @@ downloadFromWithPeer peer thisBkSize h = do
|
||||||
Just (i,chunksN) -> do
|
Just (i,chunksN) -> do
|
||||||
let req = BlockGetChunks h chusz (fromIntegral i) (fromIntegral chunksN)
|
let req = BlockGetChunks h chusz (fromIntegral i) (fromIntegral chunksN)
|
||||||
|
|
||||||
void $ liftIO $ atomically $ flushTQueue chuQ
|
void $ liftIO $ atomically $ STM.flushTQueue chuQ
|
||||||
|
|
||||||
lift $ request peer (BlockChunks @e coo req)
|
lift $ request peer (BlockChunks @e coo req)
|
||||||
|
|
||||||
|
@ -283,7 +238,7 @@ downloadFromWithPeer peer thisBkSize h = do
|
||||||
m <- readTVar r
|
m <- readTVar r
|
||||||
pure (j, IntMap.member j m)
|
pure (j, IntMap.member j m)
|
||||||
|
|
||||||
let here = and $ fmap snd hc
|
let here = all snd hc
|
||||||
if here then do
|
if here then do
|
||||||
pure here
|
pure here
|
||||||
|
|
||||||
|
@ -291,9 +246,9 @@ downloadFromWithPeer peer thisBkSize h = do
|
||||||
pause rtt
|
pause rtt
|
||||||
zzz
|
zzz
|
||||||
|
|
||||||
void $ liftIO $ race ( pause (2 * rtt) ) $ atomically do
|
void $ liftIO $ race ( pause (8 * rtt) ) $ atomically do
|
||||||
void $ peekTQueue chuQ
|
void $ peekTQueue chuQ
|
||||||
flushTQueue chuQ
|
STM.flushTQueue chuQ
|
||||||
|
|
||||||
catched <- waity <&> either id id
|
catched <- waity <&> either id id
|
||||||
|
|
||||||
|
@ -337,6 +292,7 @@ downloadFromWithPeer peer thisBkSize h = do
|
||||||
trace $ "PROCESS BLOCK" <+> pretty coo <+> pretty h
|
trace $ "PROCESS BLOCK" <+> pretty coo <+> pretty h
|
||||||
lift $ expire @e key
|
lift $ expire @e key
|
||||||
void $ liftIO $ putBlock sto block
|
void $ liftIO $ putBlock sto block
|
||||||
|
deleteBlockFromQ h
|
||||||
onBlockDownloaded brains peer h
|
onBlockDownloaded brains peer h
|
||||||
void $ processBlock h
|
void $ processBlock h
|
||||||
else do
|
else do
|
||||||
|
@ -362,6 +318,7 @@ downloadFromWithPeer peer thisBkSize h = do
|
||||||
trace $ "downloadFromWithPeer EXIT" <+> pretty coo
|
trace $ "downloadFromWithPeer EXIT" <+> pretty coo
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
instance HasPeerLocator e m => HasPeerLocator e (BlockDownloadM e m) where
|
instance HasPeerLocator e m => HasPeerLocator e (BlockDownloadM e m) where
|
||||||
getPeerLocator = lift getPeerLocator
|
getPeerLocator = lift getPeerLocator
|
||||||
|
|
||||||
|
@ -426,6 +383,45 @@ updatePeerInfo onError _ pinfo = do
|
||||||
trimDown n s | IntSet.size s >= n = IntSet.deleteMax s
|
trimDown n s | IntSet.size s >= n = IntSet.deleteMax s
|
||||||
| otherwise = s
|
| otherwise = s
|
||||||
|
|
||||||
|
data ByFirst a b = ByFirst a b
|
||||||
|
|
||||||
|
instance Eq a => Eq (ByFirst a b) where
|
||||||
|
(==) (ByFirst a _) (ByFirst b _) = a == b
|
||||||
|
|
||||||
|
instance Hashable a => Hashable (ByFirst a b) where
|
||||||
|
hashWithSalt s (ByFirst a _) = hashWithSalt s a
|
||||||
|
|
||||||
|
|
||||||
|
data DTask =
|
||||||
|
DTask
|
||||||
|
{ _dtaskBlock :: Hash HbSync
|
||||||
|
, _dtaskBlockSize :: Integer
|
||||||
|
}
|
||||||
|
|
||||||
|
data DState e =
|
||||||
|
DState
|
||||||
|
{ _dPeerInbox :: TVar (HashMap (Peer e) (TBQueue DTask, [Async ()]))
|
||||||
|
}
|
||||||
|
|
||||||
|
data PState =
|
||||||
|
PIdle
|
||||||
|
| PWork DTask
|
||||||
|
| PCheckPeer
|
||||||
|
|
||||||
|
newDState :: forall e m . (MonadUnliftIO m, MyPeer e) => m (DState e)
|
||||||
|
newDState = DState @e <$> newTVarIO mempty
|
||||||
|
|
||||||
|
downloadOnBlockSize :: (MonadIO m, IsPeerAddr e m)
|
||||||
|
=> DownloadEnv e
|
||||||
|
-> (Peer e, Hash HbSync, Maybe Integer)
|
||||||
|
-> m ()
|
||||||
|
|
||||||
|
downloadOnBlockSize denv (p,h,size) = do
|
||||||
|
maybe1 size none $ \s -> do
|
||||||
|
debug $ "GOT BLOCK SIZE" <+> pretty h
|
||||||
|
onBlockSize (_downloadBrains denv) p h s
|
||||||
|
atomically $ writeTVar (_blockInDirty denv) True
|
||||||
|
|
||||||
blockDownloadLoop :: forall e m . ( m ~ PeerM e IO
|
blockDownloadLoop :: forall e m . ( m ~ PeerM e IO
|
||||||
, MonadIO m
|
, MonadIO m
|
||||||
, Request e (BlockInfo e) m
|
, Request e (BlockInfo e) m
|
||||||
|
@ -455,21 +451,46 @@ blockDownloadLoop :: forall e m . ( m ~ PeerM e IO
|
||||||
=> DownloadEnv e -> m ()
|
=> DownloadEnv e -> m ()
|
||||||
blockDownloadLoop env0 = do
|
blockDownloadLoop env0 = do
|
||||||
|
|
||||||
|
-- [dmz@minipig:~/w/hbs2]$ hbs2 cat 8is4yaZLi4sK3mPSS7Z9yrJK8dRXQyrcD54qe1GWi8qe | wc -c
|
||||||
|
-- 1278173938
|
||||||
|
|
||||||
|
-- MiB (RX Bytes/second)
|
||||||
|
-- 90.25 .....|.............||.....
|
||||||
|
-- 75.21 .....||||||..||||.|||.....
|
||||||
|
-- 60.17 ....||||||||||||||||||....
|
||||||
|
-- 45.13 ....||||||||||||||||||....
|
||||||
|
-- 30.08 ....|||||||||||||||||||...
|
||||||
|
-- 15.04 ::::|||||||||||||||||||:::
|
||||||
|
-- 1 15 20 25 30 35
|
||||||
|
|
||||||
|
-- MiB (RX Bytes/second)
|
||||||
|
-- 74.60 ......|||||..|||||||.|.|...
|
||||||
|
-- 62.17 ......||||||||||||||||||...
|
||||||
|
-- 49.74 ......||||||||||||||||||...
|
||||||
|
-- 37.30 ......|||||||||||||||||||..
|
||||||
|
-- 24.87 ......|||||||||||||||||||..
|
||||||
|
-- 12.43 :::::|||||||||||||||||||||:
|
||||||
|
-- 1 10 15 20 25 30
|
||||||
|
|
||||||
|
-- FIXME: asap-fix-async-spawning
|
||||||
|
|
||||||
e <- ask
|
e <- ask
|
||||||
|
|
||||||
let blks = mempty
|
|
||||||
|
|
||||||
pl <- getPeerLocator @e
|
pl <- getPeerLocator @e
|
||||||
|
|
||||||
|
sto <- getStorage
|
||||||
|
|
||||||
pause @'Seconds 3.81
|
pause @'Seconds 3.81
|
||||||
|
|
||||||
let withAllStuff = withPeerM e . withDownload env0
|
let withAllStuff = withPeerM e . withDownload env0
|
||||||
|
|
||||||
|
flip runContT pure do
|
||||||
|
|
||||||
-- FIXME: exception-handling
|
-- FIXME: exception-handling
|
||||||
void $ liftIO $ async $ withPeerM e do
|
void $ ContT $ withAsync $ withPeerM e do
|
||||||
downloadMonLoop (view downloadMon env0)
|
downloadMonLoop (view downloadMon env0)
|
||||||
|
|
||||||
void $ liftIO $ async $ forever $ withPeerM e do
|
void $ ContT $ withAsync $ forever $ withPeerM e do
|
||||||
pause @'Seconds 30
|
pause @'Seconds 30
|
||||||
|
|
||||||
pee <- knownPeers @e pl
|
pee <- knownPeers @e pl
|
||||||
|
@ -480,7 +501,7 @@ blockDownloadLoop env0 = do
|
||||||
liftIO $ atomically $ writeTVar (view peerBurstMax pinfo) Nothing
|
liftIO $ atomically $ writeTVar (view peerBurstMax pinfo) Nothing
|
||||||
|
|
||||||
|
|
||||||
void $ liftIO $ async $ forever $ withPeerM e do
|
void $ ContT $ withAsync $ forever $ withPeerM e do
|
||||||
pause @'Seconds 1.5
|
pause @'Seconds 1.5
|
||||||
|
|
||||||
pee <- knownPeers @e pl
|
pee <- knownPeers @e pl
|
||||||
|
@ -491,123 +512,228 @@ blockDownloadLoop env0 = do
|
||||||
updatePeerInfo False p pinfo
|
updatePeerInfo False p pinfo
|
||||||
|
|
||||||
|
|
||||||
void $ liftIO $ async $ forever $ withAllStuff do
|
void $ ContT $ withAsync $ withAllStuff do
|
||||||
|
|
||||||
|
brains <- asks (view downloadBrains)
|
||||||
|
q <- asks (view blockInQ)
|
||||||
|
|
||||||
|
let refs = liftIO $ readTVarIO q <&> HashMap.keys <&> fmap (,2)
|
||||||
|
|
||||||
|
polling (Polling 5 2.5) refs $ \h -> do
|
||||||
|
here <- hasBlock sto h <&> isJust
|
||||||
|
|
||||||
|
if here then do
|
||||||
|
deleteBlockFromQ h
|
||||||
|
else do
|
||||||
|
po <- shouldPostponeBlock @e brains h
|
||||||
|
when po do
|
||||||
|
postponeBlock h
|
||||||
|
|
||||||
|
void $ ContT $ withAsync $ forever $ withAllStuff do
|
||||||
|
printDownloadStats
|
||||||
|
|
||||||
|
-- inQ <- asks (view blockInQ)
|
||||||
|
-- brains <- asks (view downloadBrains)
|
||||||
|
|
||||||
|
-- void $ ContT $ withAsync (withPeerM e (preRequestSizes brains rcache inQ))
|
||||||
|
|
||||||
|
state <- liftIO $ newDState @e
|
||||||
|
|
||||||
|
cores <- liftIO getNumCapabilities
|
||||||
|
|
||||||
|
-- FIXME: limit-cores-number
|
||||||
|
trace $ "!@!! CORES !!!!!" <+> pretty cores
|
||||||
|
|
||||||
|
let inboxCap = 200
|
||||||
|
sizeRq <- newTBQueueIO (10 * inboxCap)
|
||||||
|
|
||||||
|
void $ ContT $ withAsync $ withAllStuff $ forever do
|
||||||
|
req <- atomically (readTBQueue sizeRq)
|
||||||
|
withPeerM e $ broadCastMessage @e req
|
||||||
|
|
||||||
|
void $ ContT $ withAsync $ withAllStuff $ forever do
|
||||||
|
q <- asks (view blockInQ)
|
||||||
|
dirty <- asks (view blockInDirty)
|
||||||
|
brains <- asks (view downloadBrains)
|
||||||
|
|
||||||
|
now <- liftIO getTimeCoarse
|
||||||
|
|
||||||
|
blocks <- readTVarIO q <&> HashMap.toList
|
||||||
|
>>= liftIO . shuffleM
|
||||||
|
|
||||||
|
for_ blocks $ \(block, status) -> void $ runMaybeT do
|
||||||
|
sst <- readTVarIO (_bsState status)
|
||||||
|
|
||||||
|
case sst of
|
||||||
|
BlkNew -> do
|
||||||
|
trace $ "GOT NEW BLOCK" <+> pretty block
|
||||||
|
atomically $ do
|
||||||
|
full <- isFullTBQueue sizeRq
|
||||||
|
unless full do
|
||||||
|
writeTVar (_bsState status) (BlkSizeAsked now)
|
||||||
|
writeTBQueue sizeRq (GetBlockSize @e block)
|
||||||
|
|
||||||
|
BlkSizeAsked t0 -> do
|
||||||
|
|
||||||
|
trace $ "BLOCK WAIT SIZE" <+> pretty block
|
||||||
|
|
||||||
|
ss <- readTVarIO (_dPeerInbox state)
|
||||||
|
|
||||||
|
candidates' <- for (HashMap.toList ss) $ \(peer, inbox) -> do
|
||||||
|
pinfo <- withPeerM e $ find (PeerInfoKey peer) id
|
||||||
|
|
||||||
|
rtt <- runMaybeT (toMPlus pinfo >>= medianPeerRTT >>= toMPlus)
|
||||||
|
<&> fmap (logBase 10 . realToFrac)
|
||||||
|
|
||||||
|
bs <- blockSize brains peer block
|
||||||
|
|
||||||
|
maybe1 bs (pure Nothing) $ \size -> do
|
||||||
|
should <- shouldDownloadBlock @e brains peer block
|
||||||
|
if not should
|
||||||
|
then pure Nothing
|
||||||
|
else do
|
||||||
|
pure (Just (peer, size, inbox))
|
||||||
|
|
||||||
|
candidate <- liftIO $ shuffleM (catMaybes candidates') <&> headMay
|
||||||
|
-- candidate <- pure (catMaybes candidates') <&> headMay
|
||||||
|
|
||||||
|
forM_ candidate $ \(_, size, inbox) -> do
|
||||||
|
-- поток-читатель исчез, по таймауту, скорее всего. ДИХСН.
|
||||||
|
-- может, в лог написать.
|
||||||
|
void $ liftIO $ try @_ @SomeException $ atomically do
|
||||||
|
full <- isFullTBQueue (fst inbox)
|
||||||
|
unless full do
|
||||||
|
writeTVar ( _bsState status) (BlkDownloadStarted now)
|
||||||
|
writeTBQueue (fst inbox) (DTask block size)
|
||||||
|
|
||||||
|
when (isNothing candidate && expired defBlockInfoTimeout (now - t0) ) do
|
||||||
|
-- на самом деле можно считать, и отправлять блоки в отстой
|
||||||
|
atomically $ writeTVar (_bsState status) BlkNew
|
||||||
|
|
||||||
|
BlkDownloadStarted t0 | expired (600 :: Timeout 'Seconds) (now - t0) -> do
|
||||||
|
here <- liftIO $ hasBlock sto block <&> isJust
|
||||||
|
if here then do
|
||||||
|
lift $ deleteBlockFromQ block
|
||||||
|
else do
|
||||||
|
trace $ "BLOCK DOWNLOAD FAIL" <+> pretty block
|
||||||
|
atomically $ writeTVar (_bsState status) BlkNew
|
||||||
|
|
||||||
|
_ -> none
|
||||||
|
|
||||||
|
-- FIXME: normal-waiting-for-what?
|
||||||
|
-- тут надо как-то моднее ждать
|
||||||
|
void $ race (pause @'Seconds 1) $ atomically do
|
||||||
|
readTVar dirty >>= STM.check
|
||||||
|
writeTVar dirty False
|
||||||
|
|
||||||
|
npi <- newPeerInfo
|
||||||
|
|
||||||
|
lift $ withAllStuff do
|
||||||
|
brains <- asks (view downloadBrains)
|
||||||
|
dirty <- asks (view blockInDirty)
|
||||||
|
|
||||||
|
let refs = withPeerM e (getKnownPeers @e <&> fmap (,60))
|
||||||
|
|
||||||
|
polling (Polling 5 60) refs $ \peer -> do
|
||||||
|
debug $ "SOME FUCKING PEER:" <+> pretty peer
|
||||||
|
|
||||||
|
-- ШАГ 1. Поллим пиров, создаём новых, если для них нет зареганой очереди
|
||||||
|
here <- readTVarIO (_dPeerInbox state) <&> HashMap.member peer
|
||||||
|
|
||||||
|
-- ШАГ 2. Создаём тред + инбокс если нету
|
||||||
|
unless here do
|
||||||
|
q <- newTBQueueIO inboxCap
|
||||||
|
|
||||||
|
ass <- replicateM cores $ async $ flip runContT pure do
|
||||||
|
|
||||||
|
pinfo <- withPeerM e $ fetch True npi (PeerInfoKey peer) id
|
||||||
|
|
||||||
|
let downFail = view peerDownloadFail pinfo
|
||||||
|
let downBlk = view peerDownloadedBlk pinfo
|
||||||
|
|
||||||
|
void $ ContT $ bracket none $ const $ do
|
||||||
|
atomically do
|
||||||
|
m <- readTVar (_dPeerInbox state)
|
||||||
|
let v = HashMap.lookup peer m
|
||||||
|
forM_ v (STM.flushTBQueue . fst)
|
||||||
|
writeTVar (_dPeerInbox state) (HashMap.delete peer m)
|
||||||
|
|
||||||
|
-- pause @'Seconds 1
|
||||||
|
flip fix PIdle $ \next -> \case
|
||||||
|
PIdle -> do
|
||||||
|
what <- liftIO do
|
||||||
|
r <- race (pause @'Seconds 60)
|
||||||
|
(try @_ @SomeException (atomically $ readTBQueue q))
|
||||||
|
case r of
|
||||||
|
Left _ -> pure (Left True)
|
||||||
|
Right (Left{}) -> pure (Left False)
|
||||||
|
Right (Right x) -> pure (Right x)
|
||||||
|
|
||||||
|
case what of
|
||||||
|
Left True -> next PCheckPeer
|
||||||
|
Left False -> pure ()
|
||||||
|
Right todo -> do
|
||||||
|
next (PWork todo)
|
||||||
|
|
||||||
|
PCheckPeer -> do
|
||||||
|
debug $ "PEER CHECK" <+> pretty peer
|
||||||
|
auth <- withPeerM e (find (KnownPeerKey peer) id <&> isJust)
|
||||||
|
|
||||||
|
when auth do
|
||||||
|
next PIdle
|
||||||
|
|
||||||
|
debug "PEER FINISHING"
|
||||||
|
|
||||||
|
PWork (DTask{..}) -> do
|
||||||
|
debug $ "PEER IS WORKING" <+> pretty peer <+> pretty _dtaskBlock
|
||||||
|
|
||||||
|
let (p,h) = (peer, _dtaskBlock)
|
||||||
|
|
||||||
|
onBlockDownloadAttempt brains peer h
|
||||||
|
|
||||||
|
-- FIXME: ASAP-hardcode
|
||||||
|
r <- liftIO $ race ( pause ( 10 :: Timeout 'Seconds) )
|
||||||
|
$ withPeerM e
|
||||||
|
$ withDownload env0
|
||||||
|
$ downloadFromWithPeer peer _dtaskBlockSize _dtaskBlock
|
||||||
|
|
||||||
|
withPeerM e $ withDownload env0 do
|
||||||
|
case r of
|
||||||
|
Left{} -> do
|
||||||
|
-- liftIO $ atomically $ modifyTVar downFail succ
|
||||||
|
failedDownload p h
|
||||||
|
atomically $ modifyTVar downFail succ
|
||||||
|
debug $ "DOWNLOAD FAILED!" <+> pretty p <+> pretty h
|
||||||
|
-- addDownload Nothing h
|
||||||
|
|
||||||
|
Right{} -> do
|
||||||
|
deleteBlockFromQ h
|
||||||
|
liftIO $ atomically do
|
||||||
|
writeTVar downFail 0
|
||||||
|
modifyTVar downBlk succ
|
||||||
|
|
||||||
|
debug $ "DOWNLOAD SUCCEED" <+> pretty p <+> pretty h
|
||||||
|
|
||||||
|
next PIdle
|
||||||
|
|
||||||
|
|
||||||
|
atomically $ modifyTVar (_dPeerInbox state) (HashMap.insert peer (q, ass))
|
||||||
|
|
||||||
|
|
||||||
|
where
|
||||||
|
printDownloadStats = do
|
||||||
pause @'Seconds 5 -- FIXME: put to defaults
|
pause @'Seconds 5 -- FIXME: put to defaults
|
||||||
-- we need to show download stats
|
-- we need to show download stats
|
||||||
|
|
||||||
wipNum <- asks (view blockInQ) >>= liftIO . readTVarIO <&> HashMap.size
|
q <- asks (view blockInQ)
|
||||||
|
|
||||||
|
wipNum <- liftIO (readTVarIO q) <&> HashMap.size
|
||||||
po <- postponedNum
|
po <- postponedNum
|
||||||
|
|
||||||
notice $ "maintain blocks wip" <+> pretty wipNum
|
notice $ "maintain blocks wip" <+> pretty wipNum
|
||||||
<+> "postponed"
|
<+> "postponed"
|
||||||
<+> pretty po
|
<+> pretty po
|
||||||
|
|
||||||
busyPeers <- liftIO $ newTVarIO (mempty :: HashSet (Peer e))
|
|
||||||
released <- liftIO newTQueueIO
|
|
||||||
|
|
||||||
npi <- newPeerInfo
|
|
||||||
|
|
||||||
liftIO $ withAllStuff do
|
|
||||||
brains <- asks (view downloadBrains)
|
|
||||||
|
|
||||||
fix \next -> do
|
|
||||||
wipNum <- asks (view blockInQ) >>= liftIO . readTVarIO <&> HashMap.size
|
|
||||||
|
|
||||||
when (wipNum == 0) do
|
|
||||||
pause @'Seconds 1
|
|
||||||
next
|
|
||||||
|
|
||||||
allPips <- lift $ getKnownPeers @e
|
|
||||||
|
|
||||||
onKnownPeers brains allPips
|
|
||||||
|
|
||||||
pips <- flip filterM allPips $
|
|
||||||
\p -> liftIO do
|
|
||||||
busy <- readTVarIO busyPeers <&> HashSet.member p
|
|
||||||
pure $ not busy
|
|
||||||
|
|
||||||
when (List.null pips) do
|
|
||||||
void $ liftIO $ race (pause @'Seconds 5) $ do
|
|
||||||
trace "ALL PEERS BUSY"
|
|
||||||
void $ liftIO $ atomically $ do
|
|
||||||
p <- readTQueue released
|
|
||||||
ps <- flushTQueue released
|
|
||||||
for_ (p:ps) $ \x -> do
|
|
||||||
modifyTVar busyPeers (HashSet.delete x)
|
|
||||||
next
|
|
||||||
|
|
||||||
for_ pips $ \p -> do
|
|
||||||
h0 <- getBlockForDownload p
|
|
||||||
|
|
||||||
-- trace $ "getBlockForDownload" <+> pretty p <+> pretty h0
|
|
||||||
|
|
||||||
-- FIXME: busyloop-when-no-block-for-peer
|
|
||||||
maybe1 h0 (pure ()) $ \h -> do
|
|
||||||
|
|
||||||
liftIO $ atomically $ do
|
|
||||||
modifyTVar busyPeers (HashSet.insert p)
|
|
||||||
|
|
||||||
void $ liftIO $ async $ withAllStuff do
|
|
||||||
|
|
||||||
-- trace $ "start downloading shit" <+> pretty p <+> pretty h
|
|
||||||
|
|
||||||
lift $ onBlockDownloadAttempt brains p h
|
|
||||||
|
|
||||||
pinfo <- lift $ fetch True npi (PeerInfoKey p) id
|
|
||||||
size' <- blockSize brains p h
|
|
||||||
|
|
||||||
esize <- case size' of
|
|
||||||
Nothing -> do
|
|
||||||
doBlockSizeRequest p h
|
|
||||||
|
|
||||||
Just s -> pure (Right (Just s))
|
|
||||||
|
|
||||||
case esize of
|
|
||||||
Left{} -> pure ()
|
|
||||||
Right Nothing -> do
|
|
||||||
let downMiss = view peerDownloadMiss pinfo
|
|
||||||
liftIO $ atomically $ modifyTVar downMiss succ
|
|
||||||
|
|
||||||
Right (Just size) -> do
|
|
||||||
-- trace $ "BLOCK SIZE" <+> pretty p <+> pretty h <+> pretty size
|
|
||||||
let downFail = view peerDownloadFail pinfo
|
|
||||||
let downBlk = view peerDownloadedBlk pinfo
|
|
||||||
|
|
||||||
r <- liftIO $ race ( pause defBlockWaitMax )
|
|
||||||
$ withAllStuff
|
|
||||||
$ downloadFromWithPeer p size h
|
|
||||||
|
|
||||||
liftIO $ atomically $ writeTQueue released p
|
|
||||||
|
|
||||||
case r of
|
|
||||||
Left{} -> do
|
|
||||||
liftIO $ atomically $ modifyTVar downFail succ
|
|
||||||
failedDownload p h
|
|
||||||
|
|
||||||
Right{} -> do
|
|
||||||
onBlockDownloaded brains p h
|
|
||||||
liftIO $ withAllStuff $ processBlock h
|
|
||||||
liftIO $ atomically do
|
|
||||||
writeTVar downFail 0
|
|
||||||
modifyTVar downBlk succ
|
|
||||||
|
|
||||||
-- trace $ "exit download thread" <+> pretty p <+> pretty h
|
|
||||||
liftIO $ atomically $ writeTQueue released p
|
|
||||||
|
|
||||||
next
|
|
||||||
|
|
||||||
withDownload env0 do
|
|
||||||
|
|
||||||
mapM_ processBlock blks
|
|
||||||
|
|
||||||
proposed <- asks (view blockProposed)
|
|
||||||
|
|
||||||
void $ liftIO $ async $ forever do
|
|
||||||
pause @'Seconds 20
|
|
||||||
-- debug "block download loop. does not do anything"
|
|
||||||
liftIO $ Cache.purgeExpired proposed
|
|
||||||
|
|
||||||
|
|
||||||
postponedLoop :: forall e m . ( MyPeer e
|
postponedLoop :: forall e m . ( MyPeer e
|
||||||
, Sessions e (KnownPeer e) m
|
, Sessions e (KnownPeer e) m
|
||||||
, Request e (BlockInfo e) m
|
, Request e (BlockInfo e) m
|
||||||
|
@ -676,7 +802,7 @@ doBlockSizeRequest peer h = do
|
||||||
liftIO $ race ( pause defBlockInfoTimeout )
|
liftIO $ race ( pause defBlockInfoTimeout )
|
||||||
( atomically $ do
|
( atomically $ do
|
||||||
s <- readTQueue q
|
s <- readTQueue q
|
||||||
void $ flushTQueue q
|
void $ STM.flushTQueue q
|
||||||
pure s
|
pure s
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -23,6 +23,7 @@ import HBS2.System.Logger.Simple
|
||||||
import PeerConfig
|
import PeerConfig
|
||||||
|
|
||||||
import Control.Concurrent.STM
|
import Control.Concurrent.STM
|
||||||
|
import Control.Applicative
|
||||||
import Control.Exception
|
import Control.Exception
|
||||||
import Control.Monad
|
import Control.Monad
|
||||||
import Control.Monad.Reader
|
import Control.Monad.Reader
|
||||||
|
@ -77,6 +78,8 @@ data BasicBrains e =
|
||||||
, _brainsDb :: Connection
|
, _brainsDb :: Connection
|
||||||
, _brainsPipeline :: TQueue (IO ())
|
, _brainsPipeline :: TQueue (IO ())
|
||||||
, _brainsCommit :: TQueue CommitCmd
|
, _brainsCommit :: TQueue CommitCmd
|
||||||
|
, _brainsDelDownload :: TQueue (Hash HbSync)
|
||||||
|
, _brainsSizeCache :: Cache (Peer e, Hash HbSync) Integer
|
||||||
}
|
}
|
||||||
|
|
||||||
makeLenses 'BasicBrains
|
makeLenses 'BasicBrains
|
||||||
|
@ -132,12 +135,8 @@ instance ( Hashable (Peer e)
|
||||||
commitNow br True
|
commitNow br True
|
||||||
|
|
||||||
onBlockSize b p h size = do
|
onBlockSize b p h size = do
|
||||||
|
liftIO $ Cache.insert (_brainsSizeCache b) (p,h) size
|
||||||
updateOP b $ insertSize b p h size
|
updateOP b $ insertSize b p h size
|
||||||
commitNow b True
|
|
||||||
-- FIXME: wait-till-really-commited
|
|
||||||
-- sz <- liftIO $ selectBlockSize b p h
|
|
||||||
-- trace $ "BRAINS: onBlockSize" <+> pretty p <+> pretty h <+> pretty sz
|
|
||||||
pure ()
|
|
||||||
|
|
||||||
onBlockDownloadAttempt b peer h = do
|
onBlockDownloadAttempt b peer h = do
|
||||||
-- trace $ "BRAINS: onBlockDownloadAttempt" <+> pretty peer <+> pretty h
|
-- trace $ "BRAINS: onBlockDownloadAttempt" <+> pretty peer <+> pretty h
|
||||||
|
@ -151,9 +150,11 @@ instance ( Hashable (Peer e)
|
||||||
|
|
||||||
onBlockDownloaded b p h = do
|
onBlockDownloaded b p h = do
|
||||||
-- trace $ "BRAINS: onBlockDownloaded" <+> pretty p <+> pretty h
|
-- trace $ "BRAINS: onBlockDownloaded" <+> pretty p <+> pretty h
|
||||||
cleanupPostponed b h
|
-- cleanupPostponed b h
|
||||||
updateOP b do
|
updateOP b do
|
||||||
insertPeer b h p
|
insertPeer b h p
|
||||||
|
atomically $ writeTQueue (_brainsDelDownload b) h
|
||||||
|
-- deleteDownload b (HashRef h)
|
||||||
|
|
||||||
onBlockPostponed b h = do
|
onBlockPostponed b h = do
|
||||||
-- trace $ "BRAINS: onBlockPostponed" <+> pretty h
|
-- trace $ "BRAINS: onBlockPostponed" <+> pretty h
|
||||||
|
@ -202,7 +203,9 @@ instance ( Hashable (Peer e)
|
||||||
pure $ mapMaybe fromStringMay r
|
pure $ mapMaybe fromStringMay r
|
||||||
|
|
||||||
blockSize b p h = do
|
blockSize b p h = do
|
||||||
liftIO $ selectBlockSize b p h
|
let cs = view brainsSizeCache b
|
||||||
|
found <- liftIO $ Cache.lookup cs (p,h)
|
||||||
|
maybe (liftIO $ selectBlockSize b p h) (pure . Just) found
|
||||||
|
|
||||||
isReflogProcessed b h = do
|
isReflogProcessed b h = do
|
||||||
liftIO $ selectReflogProcessed b h
|
liftIO $ selectReflogProcessed b h
|
||||||
|
@ -570,7 +573,7 @@ SAVEPOINT zzz1;
|
||||||
|
|
||||||
DELETE FROM ancestors WHERE strftime('%s','now') - strftime('%s', ts) > 600;
|
DELETE FROM ancestors WHERE strftime('%s','now') - strftime('%s', ts) > 600;
|
||||||
DELETE FROM seenby WHERE strftime('%s','now') - strftime('%s', ts) > 600;
|
DELETE FROM seenby WHERE strftime('%s','now') - strftime('%s', ts) > 600;
|
||||||
DELETE FROM blocksize WHERE strftime('%s','now') - strftime('%s', ts) > 300;
|
DELETE FROM blocksize WHERE strftime('%s','now') - strftime('%s', ts) > 1200;
|
||||||
DELETE FROM statedb.pexinfo where seen < datetime('now', '-7 days');
|
DELETE FROM statedb.pexinfo where seen < datetime('now', '-7 days');
|
||||||
DELETE FROM seen where ts < datetime('now');
|
DELETE FROM seen where ts < datetime('now');
|
||||||
|
|
||||||
|
@ -851,6 +854,8 @@ newBasicBrains cfg = liftIO do
|
||||||
<*> pure conn
|
<*> pure conn
|
||||||
<*> newTQueueIO
|
<*> newTQueueIO
|
||||||
<*> newTQueueIO
|
<*> newTQueueIO
|
||||||
|
<*> newTQueueIO
|
||||||
|
<*> Cache.newCache (Just (toTimeSpec (1200:: Timeout 'Seconds)))
|
||||||
|
|
||||||
runBasicBrains :: forall e m . ( e ~ L4Proto
|
runBasicBrains :: forall e m . ( e ~ L4Proto
|
||||||
, MonadUnliftIO m
|
, MonadUnliftIO m
|
||||||
|
@ -865,12 +870,13 @@ runBasicBrains cfg brains = do
|
||||||
|
|
||||||
let pip = view brainsPipeline brains
|
let pip = view brainsPipeline brains
|
||||||
let expire = view brainsExpire brains
|
let expire = view brainsExpire brains
|
||||||
|
let sizes = view brainsSizeCache brains
|
||||||
let commit = view brainsCommit brains
|
let commit = view brainsCommit brains
|
||||||
|
|
||||||
-- FIXME: async-error-handling
|
-- FIXME: async-error-handling
|
||||||
void $ liftIO $ async $ forever do
|
void $ liftIO $ async $ forever do
|
||||||
|
|
||||||
ewaiters <- race (pause @'Seconds 5) $ do
|
ewaiters <- race (pause @'Seconds 10) $ do
|
||||||
atomically $ do
|
atomically $ do
|
||||||
c <- readTQueue commit
|
c <- readTQueue commit
|
||||||
cs <- flushTQueue commit
|
cs <- flushTQueue commit
|
||||||
|
@ -884,10 +890,16 @@ runBasicBrains cfg brains = do
|
||||||
transactional brains (sequence_ (w:ws))
|
transactional brains (sequence_ (w:ws))
|
||||||
sequence_ waiters
|
sequence_ waiters
|
||||||
|
|
||||||
void $ liftIO $ async $ forever do
|
void $ liftIO $ async do
|
||||||
|
del <- liftIO $ atomically $ flushTQueue (_brainsDelDownload brains)
|
||||||
|
forever do
|
||||||
pause @'Seconds 60
|
pause @'Seconds 60
|
||||||
|
|
||||||
updateOP brains (cleanupHashes brains)
|
updateOP brains (cleanupHashes brains)
|
||||||
|
|
||||||
|
for_ del $ \d -> do
|
||||||
|
delDownload @e brains (HashRef d)
|
||||||
|
|
||||||
trace "runBasicBrains init"
|
trace "runBasicBrains init"
|
||||||
|
|
||||||
let (PeerConfig syn) = cfg
|
let (PeerConfig syn) = cfg
|
||||||
|
@ -897,7 +909,7 @@ runBasicBrains cfg brains = do
|
||||||
] )
|
] )
|
||||||
|
|
||||||
void $ async $ do
|
void $ async $ do
|
||||||
-- pause @'Seconds 5
|
pause @'Seconds 10
|
||||||
forM_ polls $ \(t,mi,x) -> do
|
forM_ polls $ \(t,mi,x) -> do
|
||||||
trace $ "BRAINS: poll" <+> pretty t <+> pretty (AsBase58 x) <+> pretty mi
|
trace $ "BRAINS: poll" <+> pretty t <+> pretty (AsBase58 x) <+> pretty mi
|
||||||
updateOP brains $ do
|
updateOP brains $ do
|
||||||
|
@ -907,14 +919,24 @@ runBasicBrains cfg brains = do
|
||||||
values (?,?,?)
|
values (?,?,?)
|
||||||
on conflict do update set interval = excluded.interval
|
on conflict do update set interval = excluded.interval
|
||||||
|] (show $ pretty (AsBase58 x), show $ pretty t, mi)
|
|] (show $ pretty (AsBase58 x), show $ pretty t, mi)
|
||||||
commitNow brains True
|
-- commitNow brains True
|
||||||
|
|
||||||
void $ forever do
|
void $ forever do
|
||||||
pause @'Seconds 15
|
pause @'Seconds 20
|
||||||
ee <- liftIO $ Cache.toList expire
|
ee <- liftIO $ Cache.toList expire
|
||||||
let eee = [ h | (h,_,Just{}) <- ee ]
|
let eee = [ h | (h,_,Just{}) <- ee ]
|
||||||
forM_ eee $ \h -> do
|
forM_ eee $ \h -> do
|
||||||
cleanupPostponed brains h
|
cleanupPostponed brains h
|
||||||
|
|
||||||
liftIO $ Cache.purgeExpired expire
|
liftIO $ Cache.purgeExpired expire
|
||||||
|
liftIO $ Cache.purgeExpired sizes
|
||||||
|
|
||||||
|
del <- liftIO $ atomically $ flushTQueue (_brainsDelDownload brains)
|
||||||
|
for_ del $ \d -> do
|
||||||
|
delDownload @e brains (HashRef d)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -28,9 +28,9 @@ downloadMonLoop env = do
|
||||||
debug "I'm a download monitor"
|
debug "I'm a download monitor"
|
||||||
|
|
||||||
-- FIXME: timeout-hardcodes
|
-- FIXME: timeout-hardcodes
|
||||||
let refs = readTVarIO (view downloads env) <&> HashMap.keys <&> fmap (,2)
|
let refs = readTVarIO (view downloads env) <&> HashMap.keys <&> fmap (,10)
|
||||||
|
|
||||||
polling (Polling 2.5 1) refs $ \ref -> do
|
polling (Polling 2.5 2) refs $ \ref -> do
|
||||||
debug $ "DownloadMon. check" <+> pretty ref
|
debug $ "DownloadMon. check" <+> pretty ref
|
||||||
done <- checkDownloaded ref
|
done <- checkDownloaded ref
|
||||||
when done do
|
when done do
|
||||||
|
|
|
@ -60,7 +60,7 @@ medianPeerRTT pinfo = do
|
||||||
pure $ median rttBuffer
|
pure $ median rttBuffer
|
||||||
|
|
||||||
rttBufferCapacity :: Int
|
rttBufferCapacity :: Int
|
||||||
rttBufferCapacity = 10
|
rttBufferCapacity = 1024
|
||||||
|
|
||||||
-- | New values are added to the head of the list, old values are discarded when the list is full.
|
-- | New values are added to the head of the list, old values are discarded when the list is full.
|
||||||
insertRTT :: MonadIO m => Integer -> TVar [Integer] -> m ()
|
insertRTT :: MonadIO m => Integer -> TVar [Integer] -> m ()
|
||||||
|
|
|
@ -646,7 +646,7 @@ runPeer opts = Exception.handle (\e -> myException e
|
||||||
let tcpProbeWait = runReader (cfgValue @PeerTcpProbeWaitKey) syn
|
let tcpProbeWait = runReader (cfgValue @PeerTcpProbeWaitKey) syn
|
||||||
& fromInteger @(Timeout 'Seconds) . fromMaybe 300
|
& fromInteger @(Timeout 'Seconds) . fromMaybe 300
|
||||||
|
|
||||||
let downloadThreadNum = runReader (cfgValue @PeerDownloadThreadKey) syn & fromMaybe 2
|
-- let downloadThreadNum = runReader (cfgValue @PeerDownloadThreadKey) syn & fromMaybe 1
|
||||||
|
|
||||||
let useSocks5 = runReader (cfgValue @PeerTcpSOCKS5) syn
|
let useSocks5 = runReader (cfgValue @PeerTcpSOCKS5) syn
|
||||||
|
|
||||||
|
@ -1025,7 +1025,6 @@ runPeer opts = Exception.handle (\e -> myException e
|
||||||
|
|
||||||
peerThread "pexLoop" (pexLoop @e brains tcp)
|
peerThread "pexLoop" (pexLoop @e brains tcp)
|
||||||
|
|
||||||
replicateM_ downloadThreadNum do
|
|
||||||
peerThread "blockDownloadLoop" (blockDownloadLoop denv)
|
peerThread "blockDownloadLoop" (blockDownloadLoop denv)
|
||||||
|
|
||||||
peerThread "blockDownloadQ" (downloadQueue conf (SomeBrains brains) denv)
|
peerThread "blockDownloadQ" (downloadQueue conf (SomeBrains brains) denv)
|
||||||
|
@ -1042,7 +1041,7 @@ runPeer opts = Exception.handle (\e -> myException e
|
||||||
|
|
||||||
peerThread "all protos" do
|
peerThread "all protos" do
|
||||||
runProto @e
|
runProto @e
|
||||||
[ makeResponse (blockSizeProto blk dontHandle onNoBlock)
|
[ makeResponse (blockSizeProto blk (downloadOnBlockSize denv) onNoBlock)
|
||||||
, makeResponse (blockChunksProto adapter)
|
, makeResponse (blockChunksProto adapter)
|
||||||
, makeResponse blockAnnounceProto
|
, makeResponse blockAnnounceProto
|
||||||
, makeResponse (withCredentials @e pc . peerHandShakeProto hshakeAdapter penv)
|
, makeResponse (withCredentials @e pc . peerHandShakeProto hshakeAdapter penv)
|
||||||
|
|
|
@ -15,7 +15,6 @@ import HBS2.Clock
|
||||||
import HBS2.Data.Types.SignedBox
|
import HBS2.Data.Types.SignedBox
|
||||||
import HBS2.Data.Types.Peer
|
import HBS2.Data.Types.Peer
|
||||||
import HBS2.Data.Types.Refs
|
import HBS2.Data.Types.Refs
|
||||||
import HBS2.Data.Detect
|
|
||||||
import HBS2.Defaults
|
import HBS2.Defaults
|
||||||
import HBS2.Events
|
import HBS2.Events
|
||||||
import HBS2.Hash
|
import HBS2.Hash
|
||||||
|
@ -38,8 +37,6 @@ import Brains
|
||||||
import PeerConfig
|
import PeerConfig
|
||||||
|
|
||||||
import Prelude hiding (log)
|
import Prelude hiding (log)
|
||||||
import Data.Foldable (for_)
|
|
||||||
import Control.Concurrent.Async
|
|
||||||
import Control.Monad.Reader
|
import Control.Monad.Reader
|
||||||
import Control.Monad.Writer qualified as W
|
import Control.Monad.Writer qualified as W
|
||||||
import Data.ByteString.Lazy (ByteString)
|
import Data.ByteString.Lazy (ByteString)
|
||||||
|
@ -56,15 +53,12 @@ import Data.IntMap (IntMap)
|
||||||
import Data.IntSet (IntSet)
|
import Data.IntSet (IntSet)
|
||||||
import Data.Text qualified as Text
|
import Data.Text qualified as Text
|
||||||
import Data.Text.Encoding qualified as TE
|
import Data.Text.Encoding qualified as TE
|
||||||
import Data.Heap qualified as Heap
|
|
||||||
import Data.Heap (Entry(..))
|
|
||||||
import Data.Time.Clock
|
|
||||||
import Data.Word
|
import Data.Word
|
||||||
import Data.List qualified as List
|
|
||||||
import Data.Set qualified as Set
|
import Data.Set qualified as Set
|
||||||
import Data.Set (Set)
|
import Data.Set (Set)
|
||||||
|
|
||||||
import UnliftIO.STM
|
import UnliftIO.STM
|
||||||
|
import UnliftIO
|
||||||
|
|
||||||
import Streaming.Prelude qualified as S
|
import Streaming.Prelude qualified as S
|
||||||
|
|
||||||
|
@ -163,6 +157,7 @@ instance Expires (EventKey e (DownloadReq e)) where
|
||||||
|
|
||||||
type DownloadFromPeerStuff e m = ( MyPeer e
|
type DownloadFromPeerStuff e m = ( MyPeer e
|
||||||
, MonadIO m
|
, MonadIO m
|
||||||
|
, MonadUnliftIO m
|
||||||
, ForSignedBox e
|
, ForSignedBox e
|
||||||
, Request e (BlockInfo e) m
|
, Request e (BlockInfo e) m
|
||||||
, Request e (BlockChunks e) m
|
, Request e (BlockChunks e) m
|
||||||
|
@ -215,12 +210,16 @@ newtype instance SessionKey e (BlockChunks e) =
|
||||||
deriving newtype instance Hashable (SessionKey L4Proto (BlockChunks L4Proto))
|
deriving newtype instance Hashable (SessionKey L4Proto (BlockChunks L4Proto))
|
||||||
deriving stock instance Eq (SessionKey L4Proto (BlockChunks L4Proto))
|
deriving stock instance Eq (SessionKey L4Proto (BlockChunks L4Proto))
|
||||||
|
|
||||||
|
data BlkS =
|
||||||
|
BlkNew
|
||||||
|
| BlkSizeAsked TimeSpec
|
||||||
|
| BlkDownloadStarted TimeSpec
|
||||||
|
|
||||||
data BlockState =
|
data BlockState =
|
||||||
BlockState
|
BlockState
|
||||||
{ _bsStart :: TimeSpec
|
{ _bsStart :: TimeSpec
|
||||||
, _bsReqSizeTimes :: TVar Int
|
, _bsWip :: Maybe TimeSpec
|
||||||
, _bsLastSeen :: TVar TimeSpec
|
, _bsState :: TVar BlkS
|
||||||
, _bsHasSize :: TVar Bool
|
|
||||||
}
|
}
|
||||||
|
|
||||||
makeLenses 'BlockState
|
makeLenses 'BlockState
|
||||||
|
@ -246,7 +245,10 @@ downloadMonAdd env h whenDone = do
|
||||||
|
|
||||||
data DownloadEnv e =
|
data DownloadEnv e =
|
||||||
DownloadEnv
|
DownloadEnv
|
||||||
{ _blockInQ :: TVar (HashMap (Hash HbSync) ())
|
{ _blockInQ :: TVar (HashMap (Hash HbSync) BlockState)
|
||||||
|
, _blockInDirty :: TVar Bool
|
||||||
|
-- FIXME: trim!!
|
||||||
|
, _blockSizeCache :: TVar (HashMap (Hash HbSync) (HashMap (Peer e) Integer))
|
||||||
, _blockPostponed :: TVar (HashMap (Hash HbSync) () )
|
, _blockPostponed :: TVar (HashMap (Hash HbSync) () )
|
||||||
, _blockPostponedTo :: Cache (Hash HbSync) ()
|
, _blockPostponedTo :: Cache (Hash HbSync) ()
|
||||||
, _blockDelayTo :: TQueue (Hash HbSync)
|
, _blockDelayTo :: TQueue (Hash HbSync)
|
||||||
|
@ -261,6 +263,8 @@ makeLenses 'DownloadEnv
|
||||||
newDownloadEnv :: (MonadIO m, MyPeer e, HasBrains e brains) => brains -> m (DownloadEnv e)
|
newDownloadEnv :: (MonadIO m, MyPeer e, HasBrains e brains) => brains -> m (DownloadEnv e)
|
||||||
newDownloadEnv brains = liftIO do
|
newDownloadEnv brains = liftIO do
|
||||||
DownloadEnv <$> newTVarIO mempty
|
DownloadEnv <$> newTVarIO mempty
|
||||||
|
<*> newTVarIO False
|
||||||
|
<*> newTVarIO mempty
|
||||||
<*> newTVarIO mempty
|
<*> newTVarIO mempty
|
||||||
<*> Cache.newCache (Just defBlockBanTime)
|
<*> Cache.newCache (Just defBlockBanTime)
|
||||||
<*> newTQueueIO
|
<*> newTQueueIO
|
||||||
|
@ -274,6 +278,7 @@ newtype BlockDownloadM e m a =
|
||||||
, Applicative
|
, Applicative
|
||||||
, Monad
|
, Monad
|
||||||
, MonadIO
|
, MonadIO
|
||||||
|
, MonadUnliftIO
|
||||||
, MonadReader (DownloadEnv e)
|
, MonadReader (DownloadEnv e)
|
||||||
, MonadTrans
|
, MonadTrans
|
||||||
)
|
)
|
||||||
|
@ -308,14 +313,22 @@ addDownload :: forall e m . ( DownloadConstr e m
|
||||||
addDownload mbh h = do
|
addDownload mbh h = do
|
||||||
|
|
||||||
tinq <- asks (view blockInQ)
|
tinq <- asks (view blockInQ)
|
||||||
|
dirty <- asks (view blockInDirty)
|
||||||
brains <- asks (view downloadBrains)
|
brains <- asks (view downloadBrains)
|
||||||
here <- isBlockHereCached h
|
here <- isBlockHereCached h
|
||||||
|
|
||||||
if here then do
|
if here then do
|
||||||
removeFromWip h
|
removeFromWip h
|
||||||
else do
|
else do
|
||||||
|
newBlock <- BlockState
|
||||||
|
<$> liftIO getTimeCoarse
|
||||||
|
<*> pure Nothing
|
||||||
|
<*> liftIO (newTVarIO BlkNew)
|
||||||
|
|
||||||
claimBlockCameFrom @e brains mbh h
|
claimBlockCameFrom @e brains mbh h
|
||||||
liftIO $ atomically $ modifyTVar tinq $ HashMap.insert h ()
|
liftIO $ atomically $ do
|
||||||
|
modifyTVar tinq $ HashMap.insert h newBlock
|
||||||
|
writeTVar dirty True
|
||||||
|
|
||||||
postponedNum :: forall e m . (MyPeer e, MonadIO m) => BlockDownloadM e m Int
|
postponedNum :: forall e m . (MyPeer e, MonadIO m) => BlockDownloadM e m Int
|
||||||
postponedNum = do
|
postponedNum = do
|
||||||
|
@ -333,6 +346,16 @@ delayLittleBit h = do
|
||||||
q <- asks (view blockDelayTo)
|
q <- asks (view blockDelayTo)
|
||||||
liftIO $ atomically $ writeTQueue q h
|
liftIO $ atomically $ writeTQueue q h
|
||||||
|
|
||||||
|
deleteBlockFromQ :: MonadIO m => Hash HbSync -> BlockDownloadM e m ()
|
||||||
|
deleteBlockFromQ h = do
|
||||||
|
inq <- asks (view blockInQ)
|
||||||
|
po <- asks (view blockPostponed)
|
||||||
|
ca <- asks (view blockSizeCache)
|
||||||
|
liftIO $ atomically $ modifyTVar' inq (HashMap.delete h)
|
||||||
|
liftIO $ atomically $ modifyTVar' po (HashMap.delete h)
|
||||||
|
liftIO $ atomically $ modifyTVar' po (HashMap.delete h)
|
||||||
|
liftIO $ atomically $ modifyTVar' ca (HashMap.delete h)
|
||||||
|
|
||||||
postponeBlock :: forall e m . (MyPeer e, MonadIO m) => Hash HbSync -> BlockDownloadM e m ()
|
postponeBlock :: forall e m . (MyPeer e, MonadIO m) => Hash HbSync -> BlockDownloadM e m ()
|
||||||
postponeBlock h = do
|
postponeBlock h = do
|
||||||
|
|
||||||
|
@ -484,7 +507,7 @@ checkDownloaded :: forall m . (MonadIO m, HasStorage m) => HashRef -> m Bool
|
||||||
checkDownloaded hr = do
|
checkDownloaded hr = do
|
||||||
sto <- getStorage
|
sto <- getStorage
|
||||||
|
|
||||||
missed <- findMissedBlocks sto hr
|
missed <- S.head_ $ findMissedBlocks2 sto hr
|
||||||
|
|
||||||
pure $ null missed
|
pure $ null missed
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@ license-file: LICENSE
|
||||||
-- copyright:
|
-- copyright:
|
||||||
category: Network
|
category: Network
|
||||||
build-type: Simple
|
build-type: Simple
|
||||||
extra-doc-files: CHANGELOG.md
|
-- extra-doc-files: CHANGELOG.md
|
||||||
-- extra-source-files:
|
-- extra-source-files:
|
||||||
|
|
||||||
common warnings
|
common warnings
|
||||||
|
@ -57,6 +57,7 @@ common common-deps
|
||||||
, time
|
, time
|
||||||
, timeit
|
, timeit
|
||||||
, transformers
|
, transformers
|
||||||
|
, typed-process
|
||||||
, uniplate
|
, uniplate
|
||||||
, unordered-containers
|
, unordered-containers
|
||||||
, vector
|
, vector
|
||||||
|
@ -89,7 +90,7 @@ common shared-properties
|
||||||
-- -fno-warn-unused-binds
|
-- -fno-warn-unused-binds
|
||||||
-threaded
|
-threaded
|
||||||
-rtsopts
|
-rtsopts
|
||||||
"-with-rtsopts=-N4 -A64m -AL256m -I0 -T"
|
"-with-rtsopts=-N -A64m -AL256m -I0 -T"
|
||||||
|
|
||||||
|
|
||||||
default-language: Haskell2010
|
default-language: Haskell2010
|
||||||
|
@ -148,6 +149,7 @@ library
|
||||||
HBS2.Peer.RPC.Client.StorageClient
|
HBS2.Peer.RPC.Client.StorageClient
|
||||||
HBS2.Peer.RPC.Internal.Storage
|
HBS2.Peer.RPC.Internal.Storage
|
||||||
HBS2.Peer.RPC.Internal.Types
|
HBS2.Peer.RPC.Internal.Types
|
||||||
|
HBS2.Peer.CLI.Detect
|
||||||
|
|
||||||
other-modules:
|
other-modules:
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@ maintainer: dzuikov@gmail.com
|
||||||
-- copyright:
|
-- copyright:
|
||||||
category: Network
|
category: Network
|
||||||
build-type: Simple
|
build-type: Simple
|
||||||
extra-doc-files: CHANGELOG.md
|
-- extra-doc-files: CHANGELOG.md
|
||||||
-- extra-source-files:
|
-- extra-source-files:
|
||||||
|
|
||||||
common shared-properties
|
common shared-properties
|
||||||
|
|
|
@ -10,7 +10,7 @@ maintainer: dzuikov@gmail.com
|
||||||
-- copyright:
|
-- copyright:
|
||||||
category: System
|
category: System
|
||||||
build-type: Simple
|
build-type: Simple
|
||||||
extra-doc-files: CHANGELOG.md
|
-- extra-doc-files: CHANGELOG.md
|
||||||
-- extra-source-files:
|
-- extra-source-files:
|
||||||
|
|
||||||
common shared-properties
|
common shared-properties
|
||||||
|
@ -96,7 +96,6 @@ library
|
||||||
HBS2.Share.MetaData
|
HBS2.Share.MetaData
|
||||||
|
|
||||||
other-modules:
|
other-modules:
|
||||||
HBS2.Peer.CLI.Detect
|
|
||||||
|
|
||||||
-- other-modules:
|
-- other-modules:
|
||||||
-- other-extensions:
|
-- other-extensions:
|
||||||
|
|
|
@ -10,7 +10,7 @@ license-file: LICENSE
|
||||||
-- copyright:
|
-- copyright:
|
||||||
category: Database
|
category: Database
|
||||||
build-type: Simple
|
build-type: Simple
|
||||||
extra-doc-files: CHANGELOG.md
|
-- extra-doc-files: CHANGELOG.md
|
||||||
-- extra-source-files:
|
-- extra-source-files:
|
||||||
|
|
||||||
common shared-properties
|
common shared-properties
|
||||||
|
|
|
@ -10,7 +10,7 @@ license-file: LICENSE
|
||||||
-- copyright:
|
-- copyright:
|
||||||
category: Network
|
category: Network
|
||||||
build-type: Simple
|
build-type: Simple
|
||||||
extra-doc-files: CHANGELOG.md
|
-- extra-doc-files: CHANGELOG.md
|
||||||
-- extra-source-files:
|
-- extra-source-files:
|
||||||
|
|
||||||
common warnings
|
common warnings
|
||||||
|
@ -945,6 +945,7 @@ executable test-playground
|
||||||
, unliftio
|
, unliftio
|
||||||
, unordered-containers
|
, unordered-containers
|
||||||
, resourcet
|
, resourcet
|
||||||
|
, text-icu >= 0.8.0.3
|
||||||
|
|
||||||
|
|
||||||
executable test-repo-export
|
executable test-repo-export
|
||||||
|
@ -981,11 +982,12 @@ executable test-repo-export
|
||||||
, temporary
|
, temporary
|
||||||
, unliftio
|
, unliftio
|
||||||
, unordered-containers
|
, unordered-containers
|
||||||
, bloomfilter >=2.0.1.2
|
|
||||||
, timeit
|
, timeit
|
||||||
, memory
|
, memory
|
||||||
, deepseq
|
, deepseq
|
||||||
, xxhash-ffi
|
, xxhash-ffi
|
||||||
|
, optparse-generic
|
||||||
|
, interpolatedstring-perl6
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -29,34 +29,14 @@ import Data.ByteString.Lazy (ByteString)
|
||||||
import Codec.Serialise
|
import Codec.Serialise
|
||||||
import Data.Maybe
|
import Data.Maybe
|
||||||
import Data.HashSet qualified as HS
|
import Data.HashSet qualified as HS
|
||||||
|
import Data.Vector qualified as V
|
||||||
|
import Data.Vector ((!))
|
||||||
|
import Data.HashSet (HashSet)
|
||||||
|
import Data.HashSet qualified as HashSet
|
||||||
|
|
||||||
import Data.ByteArray.Hash (SipHash(..), SipKey(..))
|
import Text.InterpolatedString.Perl6 (qc)
|
||||||
import Data.ByteArray.Hash qualified as BA
|
|
||||||
|
|
||||||
import System.TimeIt
|
import Options.Generic
|
||||||
|
|
||||||
-- import Data.BloomFilter.Easy qualified as B
|
|
||||||
import Data.BloomFilter qualified as B
|
|
||||||
import Data.BloomFilter.Easy qualified as B
|
|
||||||
import Data.BloomFilter.Hash qualified as B
|
|
||||||
import Control.Concurrent.STM (flushTQueue)
|
|
||||||
import Control.DeepSeq (deepseq)
|
|
||||||
import Data.Bits
|
|
||||||
import Data.HashMap.Strict (HashMap)
|
|
||||||
import Data.HashMap.Strict qualified as HashMap
|
|
||||||
import Data.Word
|
|
||||||
import Data.Bits
|
|
||||||
|
|
||||||
import Data.Vector.Mutable qualified as V
|
|
||||||
import Data.IntMap qualified as IntMap
|
|
||||||
import Data.IntMap (IntMap)
|
|
||||||
|
|
||||||
import Data.Hashable
|
|
||||||
import Data.Digest.XXHash.FFI
|
|
||||||
|
|
||||||
import Streaming.Prelude qualified as S
|
|
||||||
|
|
||||||
-- import Control.Concurrent.BloomFilter qualified as U
|
|
||||||
|
|
||||||
data RPCEndpoints =
|
data RPCEndpoints =
|
||||||
RPCEndpoints
|
RPCEndpoints
|
||||||
|
@ -102,113 +82,96 @@ runWithRPC action = do
|
||||||
void $ waitAnyCatchCancel [messaging, c1]
|
void $ waitAnyCatchCancel [messaging, c1]
|
||||||
|
|
||||||
|
|
||||||
|
data CLI = CLI { nThreads :: Maybe Int }
|
||||||
|
deriving (Generic)
|
||||||
|
|
||||||
|
instance ParseRecord CLI
|
||||||
|
|
||||||
doAlter :: (Bits a1, Integral a2, Num a1) => a2 -> Maybe a1 -> Maybe a1
|
|
||||||
doAlter j = \case
|
|
||||||
Nothing -> Just (setBit 0 (fromIntegral j))
|
|
||||||
Just x -> Just (setBit x (fromIntegral j))
|
|
||||||
{-# INLINE doAlter #-}
|
|
||||||
|
|
||||||
main :: IO ()
|
main :: IO ()
|
||||||
main = do
|
main = do
|
||||||
ls <- LBS8.readFile "input.txt" <&> LBS8.lines
|
bs <- gitRunCommand [qc|git cat-file --batch-check='%(objectname)' --batch-all-object --unordered --buffer|]
|
||||||
|
>>= orThrowUser "oopsie"
|
||||||
|
let ls = LBS8.lines bs -- & HashSet.fromList
|
||||||
|
print $ length ls
|
||||||
|
|
||||||
z <- newIORef 0
|
main1 :: IO ()
|
||||||
|
main1 = do
|
||||||
let (sz, hn) = B.suggestSizing 5000000 0.01
|
dir <- findGitDir "." >>= orThrowUser "not a git dir"
|
||||||
|
|
||||||
let bloom = B.fromList (\s -> [ xxh32 (LBS.toStrict s) x | x <- [1 .. fromIntegral hn] ]) sz ls
|
|
||||||
-- let bloom = B.fromList (\s -> [ xxh32 (LBS.toStrict s) x | x <- [1 .. fromIntegral 2] ]) sz ls
|
|
||||||
-- let bloom = B.fromList (\s -> [ fromIntegral (hashWithSalt x ls) | x <- [1 .. hn] ]) sz ls
|
|
||||||
|
|
||||||
print $ B.length bloom
|
|
||||||
|
|
||||||
-- v <- V.new @_ @Word8 (sz `div` 8)
|
|
||||||
|
|
||||||
-- thm <- newIORef (HashMap.empty @Word64 @Word64)
|
|
||||||
-- thi <- newIORef (IntMap.empty @Word64)
|
|
||||||
-- tvm <- newTVarIO (HashMap.empty @Word64 @Word64)
|
|
||||||
|
|
||||||
-- tq <- newTQueueIO
|
|
||||||
|
|
||||||
-- haha <- for ls $ \s -> do
|
|
||||||
-- let hashes = [ xxh32 s x `mod` fromIntegral sz | x <- [1 .. 7] ]
|
|
||||||
-- vals <- for hashes $ \h -> do
|
|
||||||
-- let (w,b) = h `divMod` 64
|
|
||||||
-- pure (w, setBit 0 (fromIntegral b) :: Word64)
|
|
||||||
-- pure $ HashMap.fromListWith (.|.) vals
|
|
||||||
|
|
||||||
-- let result = HashMap.unions haha
|
|
||||||
|
|
||||||
-- print $ length result
|
|
||||||
|
|
||||||
-- for_ hashes $ \i -> do
|
|
||||||
-- let (w,b) = i `divMod` 64
|
|
||||||
-- pure
|
|
||||||
-- pure ()
|
|
||||||
|
|
||||||
-- atomically $ mapM_ (writeTQueue tq) hashes
|
|
||||||
|
|
||||||
-- print "FUCK!"
|
|
||||||
|
|
||||||
-- pure ()
|
|
||||||
-- modifyIORef thm (HashMap.alter (doAlter j) (fromIntegral i))
|
|
||||||
|
|
||||||
-- w <- readIORef z
|
|
||||||
-- print w
|
|
||||||
|
|
||||||
-- let bloom = B.easyList 0.01 ls
|
|
||||||
|
|
||||||
-- print $ B.length bloom
|
|
||||||
|
|
||||||
--dir <- findGitDir "." >>= orThrowUser "not a git dir"
|
|
||||||
|
|
||||||
--flip runContT pure do
|
|
||||||
|
|
||||||
-- o <- gitListAllObjects
|
|
||||||
|
|
||||||
-- ep <- ContT runWithRPC
|
|
||||||
|
|
||||||
-- let sto = StorageClient (rpcStorage ep)
|
|
||||||
|
|
||||||
-- cat <- startGitCatFile
|
|
||||||
|
|
||||||
-- -- h <- gitGetHash "HEAD" >>= orThrowUser "wtf1"
|
|
||||||
-- -- rvl <- gitRevList Nothing h
|
|
||||||
-- --
|
|
||||||
|
|
||||||
-- items <- for o $ \(a,GitHash b) -> do
|
|
||||||
-- pure b
|
|
||||||
|
|
||||||
-- liftIO $ print $ "bloom params" <+> pretty (B.suggestSizing (length items) 0.01)
|
|
||||||
|
|
||||||
-- timeItNamed (show $ "build bloom filter" <+> pretty (length items)) do
|
|
||||||
-- let bloom = B.easyList 0.01 items
|
|
||||||
|
|
||||||
-- liftIO $ print $ "bloom filter size" <+> pretty (B.length bloom) <> line
|
|
||||||
-- <> "data size" <+> pretty (LBS.length (serialise items))
|
|
||||||
|
|
||||||
-- timeItNamed "calc siphashes" do
|
|
||||||
|
|
||||||
-- let w = 67108864
|
|
||||||
-- tvm <- newTVarIO (HashMap.empty @Word64 @Bool)
|
|
||||||
-- -- q <- newTQueueIO
|
|
||||||
|
|
||||||
-- for_ items $ \it -> do
|
|
||||||
-- for_ (B.cheapHashes 7 it) $ \hx -> do
|
|
||||||
-- let k = fromIntegral (hx `mod` w)
|
|
||||||
-- atomically $ modifyTVar tvm (HashMap.insert k True)
|
|
||||||
|
|
||||||
|
|
||||||
-- wtf <- liftIO $ readTVarIO tvm
|
let hrs = [ "A9Y5k28STYMg2XGUA5xwpAU3CcQg3Fh5j56E4v1QYV7A"
|
||||||
-- liftIO $ print $ length wtf
|
, "BZuMNqPy1vpxev4H5yKJ4TjTHrZ5HAqPzxDM1ZN74XY2"
|
||||||
|
, "Bs2jEFJTSQnY7z5nActjBBanCEWSYnbzUzC41xhhHwtX"
|
||||||
|
, "2rZxtNqi8haDEhQkVd2v1ddSfDub9bMH4BB9tgwqvxCF"
|
||||||
|
, "Fe1cLjj9BPqHcLowTNwZvHkwT7tAL7dywBocdN3VYeMn"
|
||||||
|
] :: [HashRef]
|
||||||
|
|
||||||
-- liftIO $ print $ LBS.length $ serialise bloom
|
let pt = toPTree (MaxSize 256) (MaxNum 256) hrs
|
||||||
|
|
||||||
|
root <- makeMerkle 0 pt $ \(hx,_,bss) -> do
|
||||||
|
liftIO $ print $ "block:" <+> pretty hx
|
||||||
|
|
||||||
|
print $ pretty root
|
||||||
|
error "stop"
|
||||||
|
|
||||||
|
(CLI mn) <- getRecord "export git repo"
|
||||||
|
|
||||||
|
let n = fromMaybe 1 mn
|
||||||
|
|
||||||
|
flip runContT pure do
|
||||||
|
|
||||||
|
o <- gitListAllObjects
|
||||||
|
|
||||||
|
ep <- ContT runWithRPC
|
||||||
|
|
||||||
|
let sto = StorageClient (rpcStorage ep)
|
||||||
|
|
||||||
|
ou <- newTQueueIO
|
||||||
|
|
||||||
|
qqs <- V.fromList <$> replicateM n newTQueueIO
|
||||||
|
|
||||||
|
w <- liftIO $ async do
|
||||||
|
for_ (zip [0..] o) $ \(i,x) -> do
|
||||||
|
let j = i `mod` V.length qqs
|
||||||
|
atomically $ writeTQueue (qqs ! j) (Just x)
|
||||||
|
for_ qqs $ \q -> do
|
||||||
|
atomically $ writeTQueue q Nothing
|
||||||
|
|
||||||
|
ws <- liftIO $ for (V.toList qqs) $ \q -> async do
|
||||||
|
cat <- startGitCatFile
|
||||||
|
fix \next -> do
|
||||||
|
e <- atomically $ readTQueue q
|
||||||
|
case e of
|
||||||
|
Nothing -> pure ()
|
||||||
|
Just (_,h) -> do
|
||||||
|
void $ runMaybeT do
|
||||||
|
GitObject t lbs <- toMPlus =<< gitReadFromCatFileBatch cat h
|
||||||
|
atomically $ writeTQueue ou (t, h, LBS.length lbs)
|
||||||
|
next
|
||||||
|
|
||||||
|
wou <- liftIO $ async do
|
||||||
|
fix \next -> do
|
||||||
|
r <- atomically $ readTQueue ou
|
||||||
|
print $ pretty r
|
||||||
|
next
|
||||||
|
|
||||||
|
mapM_ wait (w:ws)
|
||||||
|
|
||||||
|
cancel wou
|
||||||
|
|
||||||
|
-- for_ [q1, q2] -> do
|
||||||
|
|
||||||
|
-- cat2 <- startGitCatFile
|
||||||
|
|
||||||
|
-- h <- gitGetHash "HEAD" >>= orThrowUser "wtf1"
|
||||||
|
-- rvl <- gitRevList Nothing h
|
||||||
|
|
||||||
-- liftIO do
|
-- liftIO do
|
||||||
-- allShit' <- for o $ \r@(o,h) -> runMaybeT do
|
-- for_ o $ \r@(o,h) -> runMaybeT do
|
||||||
|
-- pure ()
|
||||||
-- GitObject t lbs <- toMPlus =<< gitReadFromCatFileBatch cat h
|
-- GitObject t lbs <- toMPlus =<< gitReadFromCatFileBatch cat h
|
||||||
-- liftIO $ print $ pretty (t, h)
|
-- liftIO $ print $ pretty (t, h, LBS.length lbs)
|
||||||
-- ght <- writeAsMerkle sto lbs
|
-- ght <- writeAsMerkle sto lbs
|
||||||
|
|
||||||
-- tt <- getBlock sto ght
|
-- tt <- getBlock sto ght
|
||||||
|
|
|
@ -10,7 +10,7 @@ license-file: LICENSE
|
||||||
-- copyright:
|
-- copyright:
|
||||||
category: Network
|
category: Network
|
||||||
build-type: Simple
|
build-type: Simple
|
||||||
extra-doc-files: CHANGELOG.md
|
-- extra-doc-files: CHANGELOG.md
|
||||||
-- extra-source-files:
|
-- extra-source-files:
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,16 +1,38 @@
|
||||||
{
|
{
|
||||||
"nodes": {
|
"nodes": {
|
||||||
|
"db-pipe": {
|
||||||
|
"inputs": {
|
||||||
|
"haskell-flake-utils": "haskell-flake-utils",
|
||||||
|
"nixpkgs": [
|
||||||
|
"hbs2",
|
||||||
|
"nixpkgs"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1700834043,
|
||||||
|
"narHash": "sha256-VDExjkJ2maIP+Baw5V3fhmRtJ4nHpQV/Fxg1H8g69ME=",
|
||||||
|
"ref": "refs/heads/master",
|
||||||
|
"rev": "6050d7949f390c4717293d1d410123439e0fda67",
|
||||||
|
"revCount": 6,
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://git.hbs2.net/5xrwbTzzweS9yeJQnrrUY9gQJfhJf84pbyHhF2MMmSft"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://git.hbs2.net/5xrwbTzzweS9yeJQnrrUY9gQJfhJf84pbyHhF2MMmSft"
|
||||||
|
}
|
||||||
|
},
|
||||||
"extra-container": {
|
"extra-container": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"flake-utils": "flake-utils",
|
"flake-utils": "flake-utils",
|
||||||
"nixpkgs": "nixpkgs"
|
"nixpkgs": "nixpkgs"
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1678566036,
|
"lastModified": 1699821751,
|
||||||
"narHash": "sha256-dq+gCYplCTkbHOH1ERCzuTnwY/RvwMyw/kijPy7C3vE=",
|
"narHash": "sha256-UlId5jvJFmkVcKpn0oZ2VTvWAc/mZy6butRZGk73xXM=",
|
||||||
"owner": "erikarvstedt",
|
"owner": "erikarvstedt",
|
||||||
"repo": "extra-container",
|
"repo": "extra-container",
|
||||||
"rev": "a4fe3227bf63bf8479938e1457ebe1c04fe51ef5",
|
"rev": "842912907bf189ef17a80ca09ba37b6bdfc76c49",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -21,7 +43,7 @@
|
||||||
},
|
},
|
||||||
"fixme": {
|
"fixme": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"haskell-flake-utils": "haskell-flake-utils",
|
"haskell-flake-utils": "haskell-flake-utils_2",
|
||||||
"nixpkgs": [
|
"nixpkgs": [
|
||||||
"hbs2",
|
"hbs2",
|
||||||
"nixpkgs"
|
"nixpkgs"
|
||||||
|
@ -29,26 +51,29 @@
|
||||||
"suckless-conf": "suckless-conf"
|
"suckless-conf": "suckless-conf"
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1679822846,
|
"lastModified": 1697356303,
|
||||||
"narHash": "sha256-bXGorR8cLCVm3Cu7EcTUGNtaxPwqZH8zLrch7r/ST5c=",
|
"narHash": "sha256-hJbJZtx7gdcXaKL8n5J8b/eVyoYe9VxM+037ZK7q8Gw=",
|
||||||
"owner": "voidlizard",
|
"ref": "refs/heads/master",
|
||||||
"repo": "fixme",
|
"rev": "e9b1dcfd78dc766a2255a8125c14b24f0d728c0e",
|
||||||
"rev": "ff3faeeee860b2ed2edf6e69cec26e9b49b167a3",
|
"revCount": 139,
|
||||||
"type": "github"
|
"type": "git",
|
||||||
|
"url": "https://git.hbs2.net/Fujv1Uy4W5d9Z7REEArMxbXSJ8nLLn4dYuvaAs8b86hr"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "voidlizard",
|
"type": "git",
|
||||||
"repo": "fixme",
|
"url": "https://git.hbs2.net/Fujv1Uy4W5d9Z7REEArMxbXSJ8nLLn4dYuvaAs8b86hr"
|
||||||
"type": "github"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"flake-utils": {
|
"flake-utils": {
|
||||||
|
"inputs": {
|
||||||
|
"systems": "systems"
|
||||||
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1667395993,
|
"lastModified": 1685518550,
|
||||||
"narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=",
|
"narHash": "sha256-o2d0KcvaXzTrPRIo0kOLV0/QXHhDQ5DTi+OxcjO8xqY=",
|
||||||
"owner": "numtide",
|
"owner": "numtide",
|
||||||
"repo": "flake-utils",
|
"repo": "flake-utils",
|
||||||
"rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f",
|
"rev": "a1720a10a6cfe8234c0e93907ffe81be440f4cef",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -132,16 +157,31 @@
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"flake-utils_7": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1644229661,
|
||||||
|
"narHash": "sha256-1YdnJAsNy69bpcjuoKdOYQX0YxZBiCYZo4Twxerqv7k=",
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"rev": "3cecb5b042f7f209c56ffd8371b2711a290ec797",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
"haskell-flake-utils": {
|
"haskell-flake-utils": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"flake-utils": "flake-utils_2"
|
"flake-utils": "flake-utils_2"
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1672412555,
|
"lastModified": 1698938553,
|
||||||
"narHash": "sha256-Kaa8F7nQFR3KuS6Y9WRUxeJeZlp6CCubyrRfmiEsW4k=",
|
"narHash": "sha256-oXpTKXioqFbl2mhhvpJIAvgNd+wYyv4ekI+YnJHEJ6s=",
|
||||||
"owner": "ivanovs-4",
|
"owner": "ivanovs-4",
|
||||||
"repo": "haskell-flake-utils",
|
"repo": "haskell-flake-utils",
|
||||||
"rev": "896219e5bde6efac72198550454e9dd9b5ed9ac9",
|
"rev": "19b273b5dc401a0a565e7f75cf50a593871b80c9",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -182,7 +222,6 @@
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "ivanovs-4",
|
"owner": "ivanovs-4",
|
||||||
"ref": "master",
|
|
||||||
"repo": "haskell-flake-utils",
|
"repo": "haskell-flake-utils",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
|
@ -192,17 +231,17 @@
|
||||||
"flake-utils": "flake-utils_5"
|
"flake-utils": "flake-utils_5"
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1672412555,
|
"lastModified": 1698938553,
|
||||||
"narHash": "sha256-Kaa8F7nQFR3KuS6Y9WRUxeJeZlp6CCubyrRfmiEsW4k=",
|
"narHash": "sha256-oXpTKXioqFbl2mhhvpJIAvgNd+wYyv4ekI+YnJHEJ6s=",
|
||||||
"owner": "ivanovs-4",
|
"owner": "ivanovs-4",
|
||||||
"repo": "haskell-flake-utils",
|
"repo": "haskell-flake-utils",
|
||||||
"rev": "896219e5bde6efac72198550454e9dd9b5ed9ac9",
|
"rev": "19b273b5dc401a0a565e7f75cf50a593871b80c9",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "ivanovs-4",
|
"owner": "ivanovs-4",
|
||||||
|
"ref": "master",
|
||||||
"repo": "haskell-flake-utils",
|
"repo": "haskell-flake-utils",
|
||||||
"rev": "896219e5bde6efac72198550454e9dd9b5ed9ac9",
|
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -218,6 +257,25 @@
|
||||||
"rev": "896219e5bde6efac72198550454e9dd9b5ed9ac9",
|
"rev": "896219e5bde6efac72198550454e9dd9b5ed9ac9",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "ivanovs-4",
|
||||||
|
"repo": "haskell-flake-utils",
|
||||||
|
"rev": "896219e5bde6efac72198550454e9dd9b5ed9ac9",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"haskell-flake-utils_6": {
|
||||||
|
"inputs": {
|
||||||
|
"flake-utils": "flake-utils_7"
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1672412555,
|
||||||
|
"narHash": "sha256-Kaa8F7nQFR3KuS6Y9WRUxeJeZlp6CCubyrRfmiEsW4k=",
|
||||||
|
"owner": "ivanovs-4",
|
||||||
|
"repo": "haskell-flake-utils",
|
||||||
|
"rev": "896219e5bde6efac72198550454e9dd9b5ed9ac9",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "ivanovs-4",
|
"owner": "ivanovs-4",
|
||||||
"repo": "haskell-flake-utils",
|
"repo": "haskell-flake-utils",
|
||||||
|
@ -226,8 +284,9 @@
|
||||||
},
|
},
|
||||||
"hbs2": {
|
"hbs2": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
|
"db-pipe": "db-pipe",
|
||||||
"fixme": "fixme",
|
"fixme": "fixme",
|
||||||
"haskell-flake-utils": "haskell-flake-utils_3",
|
"haskell-flake-utils": "haskell-flake-utils_4",
|
||||||
"hspup": "hspup",
|
"hspup": "hspup",
|
||||||
"nixpkgs": [
|
"nixpkgs": [
|
||||||
"nixpkgs"
|
"nixpkgs"
|
||||||
|
@ -236,33 +295,31 @@
|
||||||
"suckless-conf": "suckless-conf_2"
|
"suckless-conf": "suckless-conf_2"
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1681115037,
|
"lastModified": 1707584207,
|
||||||
"narHash": "sha256-CovUWmx6Pup3p/6zhIBAltJiUlh2ukFAI1P4U/vnXNw=",
|
"narHash": "sha256-y4JimRbs6Edfhh1UnJLdX1jrnfcTFmAU+h89ir23gno=",
|
||||||
"owner": "voidlizard",
|
"rev": "039d2bfefcd11f67ed957a71d650e877f8500611",
|
||||||
"repo": "hbs2",
|
"revCount": 1062,
|
||||||
"rev": "21fb2d844076f8b380847854ebbd75cac57e3424",
|
"type": "git",
|
||||||
"type": "github"
|
"url": "http://git.hbs2/BTThPdHKF8XnEq4m6wzbKHKA6geLFK4ydYhBXAqBdHSP"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "voidlizard",
|
"rev": "039d2bfefcd11f67ed957a71d650e877f8500611",
|
||||||
"ref": "injecting-tcp",
|
"type": "git",
|
||||||
"repo": "hbs2",
|
"url": "http://git.hbs2/BTThPdHKF8XnEq4m6wzbKHKA6geLFK4ydYhBXAqBdHSP"
|
||||||
"type": "github"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"home-manager": {
|
"home-manager": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"nixpkgs": [
|
"nixpkgs": [
|
||||||
"nixpkgs"
|
"nixpkgs"
|
||||||
],
|
]
|
||||||
"utils": "utils"
|
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1679394816,
|
"lastModified": 1707467182,
|
||||||
"narHash": "sha256-1V1esJt2YAxsKmRuGuB62RF5vhDAVFDvJXVNhtEO22A=",
|
"narHash": "sha256-/Bw/xgCXfj4nXDd8Xq+r1kaorfsYkkomMf5w5MpsDyA=",
|
||||||
"owner": "nix-community",
|
"owner": "nix-community",
|
||||||
"repo": "home-manager",
|
"repo": "home-manager",
|
||||||
"rev": "e386ec640e16dc91120977285cb8c72c77078164",
|
"rev": "5b9156fa9a8b8beba917b8f9adbfd27bf63e16af",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -273,7 +330,7 @@
|
||||||
},
|
},
|
||||||
"hspup": {
|
"hspup": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"haskell-flake-utils": "haskell-flake-utils_4",
|
"haskell-flake-utils": "haskell-flake-utils_5",
|
||||||
"nixpkgs": [
|
"nixpkgs": [
|
||||||
"hbs2",
|
"hbs2",
|
||||||
"nixpkgs"
|
"nixpkgs"
|
||||||
|
@ -295,42 +352,27 @@
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1669833724,
|
"lastModified": 1685566663,
|
||||||
"narHash": "sha256-/HEZNyGbnQecrgJnfE8d0WC5c1xuPSD2LUpB6YXlg4c=",
|
"narHash": "sha256-btHN1czJ6rzteeCuE/PNrdssqYD2nIA4w48miQAFloM=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "4d2b37a84fad1091b9de401eb450aae66f1a741e",
|
"rev": "4ecab3273592f27479a583fb6d975d4aba3486fe",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "22.11",
|
"ref": "23.05",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"nixpkgs_2": {
|
"nixpkgs_2": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1675758091,
|
"lastModified": 1707451808,
|
||||||
"narHash": "sha256-7gFSQbSVAFUHtGCNHPF7mPc5CcqDk9M2+inlVPZSneg=",
|
"narHash": "sha256-UwDBUNHNRsYKFJzyTMVMTF5qS4xeJlWoeyJf+6vvamU=",
|
||||||
"owner": "NixOS",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"rev": "747927516efcb5e31ba03b7ff32f61f6d47e7d87",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "NixOS",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"nixpkgs_3": {
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1679319606,
|
|
||||||
"narHash": "sha256-wyEMIZB6BnsmJWInEgDZu66hXVMGJEZFl5uDsn27f9M=",
|
|
||||||
"owner": "nixos",
|
"owner": "nixos",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "8bc6945b1224a1cfa679d6801580b1054dba1a5c",
|
"rev": "442d407992384ed9c0e6d352de75b69079904e4e",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -345,7 +387,7 @@
|
||||||
"extra-container": "extra-container",
|
"extra-container": "extra-container",
|
||||||
"hbs2": "hbs2",
|
"hbs2": "hbs2",
|
||||||
"home-manager": "home-manager",
|
"home-manager": "home-manager",
|
||||||
"nixpkgs": "nixpkgs_3"
|
"nixpkgs": "nixpkgs_2"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"saltine": {
|
"saltine": {
|
||||||
|
@ -367,15 +409,19 @@
|
||||||
},
|
},
|
||||||
"suckless-conf": {
|
"suckless-conf": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"haskell-flake-utils": "haskell-flake-utils_2",
|
"haskell-flake-utils": "haskell-flake-utils_3",
|
||||||
"nixpkgs": "nixpkgs_2"
|
"nixpkgs": [
|
||||||
|
"hbs2",
|
||||||
|
"fixme",
|
||||||
|
"nixpkgs"
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1679815688,
|
"lastModified": 1697354514,
|
||||||
"narHash": "sha256-xLvIoeG5krM0CXfWRcwSgHGP7W8i8dsoKP5hUb182hE=",
|
"narHash": "sha256-5doedGj2QU4vPuw1VZor1GGEJTxu0zFeO/PsybFIcn8=",
|
||||||
"owner": "voidlizard",
|
"owner": "voidlizard",
|
||||||
"repo": "suckless-conf",
|
"repo": "suckless-conf",
|
||||||
"rev": "04c432681d3627f180a402674523736f409f964d",
|
"rev": "3f87278bc10ac4f14a6d9d2c75cbbed228509129",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -386,38 +432,38 @@
|
||||||
},
|
},
|
||||||
"suckless-conf_2": {
|
"suckless-conf_2": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"haskell-flake-utils": "haskell-flake-utils_5",
|
"haskell-flake-utils": "haskell-flake-utils_6",
|
||||||
"nixpkgs": [
|
"nixpkgs": [
|
||||||
"hbs2",
|
"hbs2",
|
||||||
"nixpkgs"
|
"nixpkgs"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1679815688,
|
"lastModified": 1704001322,
|
||||||
"narHash": "sha256-xLvIoeG5krM0CXfWRcwSgHGP7W8i8dsoKP5hUb182hE=",
|
"narHash": "sha256-D7T/8wAg5J4KkRw0uB90w3+adY11aQaX7rjmQPXkkQc=",
|
||||||
"owner": "voidlizard",
|
"ref": "refs/heads/master",
|
||||||
"repo": "suckless-conf",
|
"rev": "8cfc1272bb79ef6ad62ae6a625f21e239916d196",
|
||||||
"rev": "04c432681d3627f180a402674523736f409f964d",
|
"revCount": 28,
|
||||||
"type": "github"
|
"type": "git",
|
||||||
|
"url": "https://git.hbs2.net/JAuk1UJzZfbDGKVazSQU5yYQ3NGfk4gVeZzBCduf5TgQ"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "voidlizard",
|
"type": "git",
|
||||||
"repo": "suckless-conf",
|
"url": "https://git.hbs2.net/JAuk1UJzZfbDGKVazSQU5yYQ3NGfk4gVeZzBCduf5TgQ"
|
||||||
"type": "github"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"utils": {
|
"systems": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1676283394,
|
"lastModified": 1681028828,
|
||||||
"narHash": "sha256-XX2f9c3iySLCw54rJ/CZs+ZK6IQy7GXNY4nSOyu2QG4=",
|
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||||
"owner": "numtide",
|
"owner": "nix-systems",
|
||||||
"repo": "flake-utils",
|
"repo": "default",
|
||||||
"rev": "3db36a8b464d0c4532ba1c7dda728f4576d6d073",
|
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "numtide",
|
"owner": "nix-systems",
|
||||||
"repo": "flake-utils",
|
"repo": "default",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,11 +1,12 @@
|
||||||
# See how this flake is used in ./usage.sh
|
# See how this flake is used in ./usage.sh
|
||||||
|
# on host: sudo sysctl -w net.ipv4.ip_forward=1
|
||||||
{
|
{
|
||||||
description = "hbs2-container";
|
description = "hbs2-container";
|
||||||
|
|
||||||
inputs = {
|
inputs = {
|
||||||
extra-container.url = "github:erikarvstedt/extra-container";
|
extra-container.url = "github:erikarvstedt/extra-container";
|
||||||
nixpkgs.url = "github:nixos/nixpkgs/nixpkgs-unstable";
|
nixpkgs.url = "github:nixos/nixpkgs/nixpkgs-unstable";
|
||||||
hbs2.url = "github:voidlizard/hbs2/injecting-tcp";
|
hbs2.url = "git+http://git.hbs2/BTThPdHKF8XnEq4m6wzbKHKA6geLFK4ydYhBXAqBdHSP?rev=039d2bfefcd11f67ed957a71d650e877f8500611";
|
||||||
hbs2.inputs.nixpkgs.follows = "nixpkgs";
|
hbs2.inputs.nixpkgs.follows = "nixpkgs";
|
||||||
|
|
||||||
home-manager.url = "github:nix-community/home-manager";
|
home-manager.url = "github:nix-community/home-manager";
|
||||||
|
@ -27,28 +28,68 @@
|
||||||
# If unset, the nixpkgs input of extra-container flake is used
|
# If unset, the nixpkgs input of extra-container flake is used
|
||||||
nixpkgs = inputs.nixpkgs;
|
nixpkgs = inputs.nixpkgs;
|
||||||
|
|
||||||
# home-manager.homeConfigurations = {
|
|
||||||
# # FIXME replace with your username@hostname
|
|
||||||
# "root@minipig" = home-manager.lib.homeManagerConfiguration {
|
|
||||||
# pkgs = nixpkgs.legacyPackages.x86_64-linux; # Home-manager requires 'pkgs' instance
|
|
||||||
# extraSpecialArgs = { inherit inputs; }; # Pass flake inputs to our config
|
|
||||||
# # > Our main home-manager configuration file <
|
|
||||||
# modules = [ ./home/home.nix ];
|
|
||||||
# };
|
|
||||||
# };
|
|
||||||
|
|
||||||
# Set this to disable `nix run` support
|
# Set this to disable `nix run` support
|
||||||
# addRunner = false;
|
# addRunner = false;
|
||||||
|
|
||||||
config = {
|
config = {
|
||||||
containers.hbs2-test = {
|
containers.hbs2-test = {
|
||||||
extra.addressPrefix = "10.250.0";
|
extra = {
|
||||||
|
addressPrefix = "10.250.0";
|
||||||
|
exposeLocalhost = true;
|
||||||
|
firewallAllowHost = true;
|
||||||
|
enableWAN = true;
|
||||||
|
};
|
||||||
|
|
||||||
config = { pkgs, ... }: {
|
config = { pkgs, ... }: {
|
||||||
|
|
||||||
|
|
||||||
|
imports = [
|
||||||
|
home-manager.nixosModules.home-manager
|
||||||
|
{
|
||||||
|
home-manager.useGlobalPkgs = true;
|
||||||
|
home-manager.useUserPackages = true;
|
||||||
|
home-manager.users.hbs2 = {
|
||||||
|
# import ./config/home.nix;
|
||||||
|
home.stateVersion = "23.05";
|
||||||
|
|
||||||
|
xdg.configFile = {
|
||||||
|
".hbs2-peer/config".text = ''
|
||||||
|
;; hbs2-peer config file
|
||||||
|
|
||||||
|
listen "0.0.0.0:7351"
|
||||||
|
listen-tcp "0.0.0.0:10351"
|
||||||
|
|
||||||
|
known-peer "10.250.0.1:7354"
|
||||||
|
|
||||||
|
; edit path to a keyring file
|
||||||
|
; key "./key"
|
||||||
|
key "./default.key"
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
};
|
||||||
|
home-manager.extraSpecialArgs = {
|
||||||
|
# inherit inputs;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
|
||||||
|
# settings.trusted-users = [ "root" "hbs2" ];
|
||||||
|
|
||||||
|
|
||||||
|
nix = {
|
||||||
|
package = pkgs.nixFlakes;
|
||||||
|
extraOptions = ''
|
||||||
|
experimental-features = nix-command flakes
|
||||||
|
'';
|
||||||
|
#settings.trusted-users = [ "root" "dmz" ];
|
||||||
|
};
|
||||||
|
|
||||||
users.users.hbs2 = {
|
users.users.hbs2 = {
|
||||||
isNormalUser = true;
|
isNormalUser = true;
|
||||||
home = "/home/hbs2";
|
home = "/home/hbs2";
|
||||||
|
packages = with pkgs; [];
|
||||||
};
|
};
|
||||||
|
|
||||||
systemd.services.hello = {
|
systemd.services.hello = {
|
||||||
|
@ -99,10 +140,8 @@ http-port 5001
|
||||||
key "./key"
|
key "./key"
|
||||||
storage "/root/.local/share/hbs2"
|
storage "/root/.local/share/hbs2"
|
||||||
accept-block-announce *
|
accept-block-announce *
|
||||||
download-log "/tmp/download-log"
|
|
||||||
bootstrap-dns "bootstrap.hbs2.net"
|
bootstrap-dns "bootstrap.hbs2.net"
|
||||||
known-peer "10.250.0.1:7354"
|
known-peer "10.250.0.1:7354"
|
||||||
known-peer "10.250.0.1:7351"
|
|
||||||
|
|
||||||
; poll reflog 1 "2YNGdnDBnciF1Kgmx1EZTjKUp1h5pvYAjrHoApbArpeX"
|
; poll reflog 1 "2YNGdnDBnciF1Kgmx1EZTjKUp1h5pvYAjrHoApbArpeX"
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue