Merge branch 'master' into sqlite
This commit is contained in:
commit
d4661959de
152 changed files with 2443 additions and 462 deletions
2
Annex.hs
2
Annex.hs
|
@ -114,7 +114,7 @@ data AnnexState = AnnexState
|
||||||
, fast :: Bool
|
, fast :: Bool
|
||||||
, daemon :: Bool
|
, daemon :: Bool
|
||||||
, branchstate :: BranchState
|
, branchstate :: BranchState
|
||||||
, repoqueue :: Maybe Git.Queue.Queue
|
, repoqueue :: Maybe (Git.Queue.Queue Annex)
|
||||||
, catfilehandles :: M.Map FilePath CatFileHandle
|
, catfilehandles :: M.Map FilePath CatFileHandle
|
||||||
, hashobjecthandle :: Maybe HashObjectHandle
|
, hashobjecthandle :: Maybe HashObjectHandle
|
||||||
, checkattrhandle :: Maybe CheckAttrHandle
|
, checkattrhandle :: Maybe CheckAttrHandle
|
||||||
|
|
|
@ -224,8 +224,9 @@ adjustToCrippledFileSystem :: Annex ()
|
||||||
adjustToCrippledFileSystem = do
|
adjustToCrippledFileSystem = do
|
||||||
warning "Entering an adjusted branch where files are unlocked as this filesystem does not support locked files."
|
warning "Entering an adjusted branch where files are unlocked as this filesystem does not support locked files."
|
||||||
checkVersionSupported
|
checkVersionSupported
|
||||||
whenM (isNothing <$> inRepo Git.Branch.current) $
|
whenM (isNothing <$> inRepo Git.Branch.current) $ do
|
||||||
void $ inRepo $ Git.Branch.commitCommand Git.Branch.AutomaticCommit
|
cmode <- annexCommitMode <$> Annex.getGitConfig
|
||||||
|
void $ inRepo $ Git.Branch.commitCommand cmode
|
||||||
[ Param "--quiet"
|
[ Param "--quiet"
|
||||||
, Param "--allow-empty"
|
, Param "--allow-empty"
|
||||||
, Param "-m"
|
, Param "-m"
|
||||||
|
@ -310,12 +311,16 @@ commitAdjustedTree' :: Sha -> BasisBranch -> [Ref] -> Annex Sha
|
||||||
commitAdjustedTree' treesha (BasisBranch basis) parents =
|
commitAdjustedTree' treesha (BasisBranch basis) parents =
|
||||||
go =<< catCommit basis
|
go =<< catCommit basis
|
||||||
where
|
where
|
||||||
go Nothing = inRepo mkcommit
|
go Nothing = do
|
||||||
go (Just basiscommit) = inRepo $ commitWithMetaData
|
cmode <- annexCommitMode <$> Annex.getGitConfig
|
||||||
(commitAuthorMetaData basiscommit)
|
inRepo $ mkcommit cmode
|
||||||
(commitCommitterMetaData basiscommit)
|
go (Just basiscommit) = do
|
||||||
mkcommit
|
cmode <- annexCommitMode <$> Annex.getGitConfig
|
||||||
mkcommit = Git.Branch.commitTree Git.Branch.AutomaticCommit
|
inRepo $ commitWithMetaData
|
||||||
|
(commitAuthorMetaData basiscommit)
|
||||||
|
(commitCommitterMetaData basiscommit)
|
||||||
|
(mkcommit cmode)
|
||||||
|
mkcommit cmode = Git.Branch.commitTree cmode
|
||||||
adjustedBranchCommitMessage parents treesha
|
adjustedBranchCommitMessage parents treesha
|
||||||
|
|
||||||
{- This message should never be changed. -}
|
{- This message should never be changed. -}
|
||||||
|
@ -444,7 +449,8 @@ mergeToAdjustedBranch tomerge (origbranch, adj) mergeconfig canresolvemerge comm
|
||||||
reparent adjtree adjmergecommit (Just currentcommit) = do
|
reparent adjtree adjmergecommit (Just currentcommit) = do
|
||||||
if (commitTree currentcommit /= adjtree)
|
if (commitTree currentcommit /= adjtree)
|
||||||
then do
|
then do
|
||||||
c <- inRepo $ Git.Branch.commitTree Git.Branch.AutomaticCommit
|
cmode <- annexCommitMode <$> Annex.getGitConfig
|
||||||
|
c <- inRepo $ Git.Branch.commitTree cmode
|
||||||
("Merged " ++ fromRef tomerge) [adjmergecommit]
|
("Merged " ++ fromRef tomerge) [adjmergecommit]
|
||||||
(commitTree currentcommit)
|
(commitTree currentcommit)
|
||||||
inRepo $ Git.Branch.update "updating adjusted branch" currbranch c
|
inRepo $ Git.Branch.update "updating adjusted branch" currbranch c
|
||||||
|
@ -534,12 +540,14 @@ reverseAdjustedCommit commitparent adj (csha, basiscommit) origbranch
|
||||||
| length (commitParent basiscommit) > 1 = return $
|
| length (commitParent basiscommit) > 1 = return $
|
||||||
Left $ "unable to propigate merge commit " ++ show csha ++ " back to " ++ show origbranch
|
Left $ "unable to propigate merge commit " ++ show csha ++ " back to " ++ show origbranch
|
||||||
| otherwise = do
|
| otherwise = do
|
||||||
|
cmode <- annexCommitMode <$> Annex.getGitConfig
|
||||||
treesha <- reverseAdjustedTree commitparent adj csha
|
treesha <- reverseAdjustedTree commitparent adj csha
|
||||||
revadjcommit <- inRepo $ commitWithMetaData
|
revadjcommit <- inRepo $ commitWithMetaData
|
||||||
(commitAuthorMetaData basiscommit)
|
(commitAuthorMetaData basiscommit)
|
||||||
(commitCommitterMetaData basiscommit) $
|
(commitCommitterMetaData basiscommit) $
|
||||||
Git.Branch.commitTree Git.Branch.AutomaticCommit
|
Git.Branch.commitTree cmode
|
||||||
(commitMessage basiscommit) [commitparent] treesha
|
(commitMessage basiscommit)
|
||||||
|
[commitparent] treesha
|
||||||
return (Right revadjcommit)
|
return (Right revadjcommit)
|
||||||
|
|
||||||
{- Adjusts the tree of the basis, changing only the files that the
|
{- Adjusts the tree of the basis, changing only the files that the
|
||||||
|
|
|
@ -109,8 +109,9 @@ getBranch = maybe (hasOrigin >>= go >>= use) return =<< branchsha
|
||||||
[Param "branch", Param $ fromRef name, Param $ fromRef originname]
|
[Param "branch", Param $ fromRef name, Param $ fromRef originname]
|
||||||
fromMaybe (error $ "failed to create " ++ fromRef name)
|
fromMaybe (error $ "failed to create " ++ fromRef name)
|
||||||
<$> branchsha
|
<$> branchsha
|
||||||
go False = withIndex' True $
|
go False = withIndex' True $ do
|
||||||
inRepo $ Git.Branch.commitAlways Git.Branch.AutomaticCommit "branch created" fullname []
|
cmode <- annexCommitMode <$> Annex.getGitConfig
|
||||||
|
inRepo $ Git.Branch.commitAlways cmode "branch created" fullname []
|
||||||
use sha = do
|
use sha = do
|
||||||
setIndexSha sha
|
setIndexSha sha
|
||||||
return sha
|
return sha
|
||||||
|
@ -317,7 +318,8 @@ commitIndex jl branchref message parents = do
|
||||||
commitIndex' :: JournalLocked -> Git.Ref -> String -> String -> Integer -> [Git.Ref] -> Annex ()
|
commitIndex' :: JournalLocked -> Git.Ref -> String -> String -> Integer -> [Git.Ref] -> Annex ()
|
||||||
commitIndex' jl branchref message basemessage retrynum parents = do
|
commitIndex' jl branchref message basemessage retrynum parents = do
|
||||||
updateIndex jl branchref
|
updateIndex jl branchref
|
||||||
committedref <- inRepo $ Git.Branch.commitAlways Git.Branch.AutomaticCommit message fullname parents
|
cmode <- annexCommitMode <$> Annex.getGitConfig
|
||||||
|
committedref <- inRepo $ Git.Branch.commitAlways cmode message fullname parents
|
||||||
setIndexSha committedref
|
setIndexSha committedref
|
||||||
parentrefs <- commitparents <$> catObject committedref
|
parentrefs <- commitparents <$> catObject committedref
|
||||||
when (racedetected branchref parentrefs) $
|
when (racedetected branchref parentrefs) $
|
||||||
|
@ -551,7 +553,8 @@ performTransitionsLocked jl ts neednewlocalbranch transitionedrefs = do
|
||||||
Annex.Queue.flush
|
Annex.Queue.flush
|
||||||
if neednewlocalbranch
|
if neednewlocalbranch
|
||||||
then do
|
then do
|
||||||
committedref <- inRepo $ Git.Branch.commitAlways Git.Branch.AutomaticCommit message fullname transitionedrefs
|
cmode <- annexCommitMode <$> Annex.getGitConfig
|
||||||
|
committedref <- inRepo $ Git.Branch.commitAlways cmode message fullname transitionedrefs
|
||||||
setIndexSha committedref
|
setIndexSha committedref
|
||||||
else do
|
else do
|
||||||
ref <- getBranch
|
ref <- getBranch
|
||||||
|
@ -657,9 +660,10 @@ rememberTreeish treeish graftpoint = lockJournal $ \jl -> do
|
||||||
origtree <- fromMaybe (giveup "unable to determine git-annex branch tree") <$>
|
origtree <- fromMaybe (giveup "unable to determine git-annex branch tree") <$>
|
||||||
inRepo (Git.Ref.tree branchref)
|
inRepo (Git.Ref.tree branchref)
|
||||||
addedt <- inRepo $ Git.Tree.graftTree treeish graftpoint origtree
|
addedt <- inRepo $ Git.Tree.graftTree treeish graftpoint origtree
|
||||||
c <- inRepo $ Git.Branch.commitTree Git.Branch.AutomaticCommit
|
cmode <- annexCommitMode <$> Annex.getGitConfig
|
||||||
|
c <- inRepo $ Git.Branch.commitTree cmode
|
||||||
"graft" [branchref] addedt
|
"graft" [branchref] addedt
|
||||||
c' <- inRepo $ Git.Branch.commitTree Git.Branch.AutomaticCommit
|
c' <- inRepo $ Git.Branch.commitTree cmode
|
||||||
"graft cleanup" [c] origtree
|
"graft cleanup" [c] origtree
|
||||||
inRepo $ Git.Branch.update' fullname c'
|
inRepo $ Git.Branch.update' fullname c'
|
||||||
-- The tree in c' is the same as the tree in branchref,
|
-- The tree in c' is the same as the tree in branchref,
|
||||||
|
|
|
@ -90,10 +90,20 @@ enteringStage newstage a = Annex.getState Annex.workers >>= \case
|
||||||
Nothing -> a
|
Nothing -> a
|
||||||
Just tv -> do
|
Just tv -> do
|
||||||
mytid <- liftIO myThreadId
|
mytid <- liftIO myThreadId
|
||||||
let set = changeStageTo mytid tv newstage
|
let set = changeStageTo mytid tv (const newstage)
|
||||||
let restore = maybe noop (void . changeStageTo mytid tv)
|
let restore = maybe noop (void . changeStageTo mytid tv . const)
|
||||||
bracket set restore (const a)
|
bracket set restore (const a)
|
||||||
|
|
||||||
|
{- Transition the current thread to the initial stage.
|
||||||
|
- This is done once the thread is ready to begin work.
|
||||||
|
-}
|
||||||
|
enteringInitialStage :: Annex ()
|
||||||
|
enteringInitialStage = Annex.getState Annex.workers >>= \case
|
||||||
|
Nothing -> noop
|
||||||
|
Just tv -> do
|
||||||
|
mytid <- liftIO myThreadId
|
||||||
|
void $ changeStageTo mytid tv initialStage
|
||||||
|
|
||||||
{- This needs to leave the WorkerPool with the same number of
|
{- This needs to leave the WorkerPool with the same number of
|
||||||
- idle and active threads, and with the same number of threads for each
|
- idle and active threads, and with the same number of threads for each
|
||||||
- WorkerStage. So, all it can do is swap the WorkerStage of our thread's
|
- WorkerStage. So, all it can do is swap the WorkerStage of our thread's
|
||||||
|
@ -110,14 +120,15 @@ enteringStage newstage a = Annex.getState Annex.workers >>= \case
|
||||||
- in the pool than spareVals. That does not prevent other threads that call
|
- in the pool than spareVals. That does not prevent other threads that call
|
||||||
- this from using them though, so it's fine.
|
- this from using them though, so it's fine.
|
||||||
-}
|
-}
|
||||||
changeStageTo :: ThreadId -> TMVar (WorkerPool AnnexState) -> WorkerStage -> Annex (Maybe WorkerStage)
|
changeStageTo :: ThreadId -> TMVar (WorkerPool AnnexState) -> (UsedStages -> WorkerStage) -> Annex (Maybe WorkerStage)
|
||||||
changeStageTo mytid tv newstage = liftIO $
|
changeStageTo mytid tv getnewstage = liftIO $
|
||||||
replaceidle >>= maybe
|
replaceidle >>= maybe
|
||||||
(return Nothing)
|
(return Nothing)
|
||||||
(either waitidle (return . Just))
|
(either waitidle (return . Just))
|
||||||
where
|
where
|
||||||
replaceidle = atomically $ do
|
replaceidle = atomically $ do
|
||||||
pool <- takeTMVar tv
|
pool <- takeTMVar tv
|
||||||
|
let newstage = getnewstage (usedStages pool)
|
||||||
let notchanging = do
|
let notchanging = do
|
||||||
putTMVar tv pool
|
putTMVar tv pool
|
||||||
return Nothing
|
return Nothing
|
||||||
|
@ -128,7 +139,7 @@ changeStageTo mytid tv newstage = liftIO $
|
||||||
Nothing -> do
|
Nothing -> do
|
||||||
putTMVar tv $
|
putTMVar tv $
|
||||||
addWorkerPool (IdleWorker oldstage) pool'
|
addWorkerPool (IdleWorker oldstage) pool'
|
||||||
return $ Just $ Left (myaid, oldstage)
|
return $ Just $ Left (myaid, newstage, oldstage)
|
||||||
Just pool'' -> do
|
Just pool'' -> do
|
||||||
-- optimisation
|
-- optimisation
|
||||||
putTMVar tv $
|
putTMVar tv $
|
||||||
|
@ -139,27 +150,26 @@ changeStageTo mytid tv newstage = liftIO $
|
||||||
_ -> notchanging
|
_ -> notchanging
|
||||||
else notchanging
|
else notchanging
|
||||||
|
|
||||||
waitidle (myaid, oldstage) = atomically $ do
|
waitidle (myaid, newstage, oldstage) = atomically $ do
|
||||||
pool <- waitIdleWorkerSlot newstage =<< takeTMVar tv
|
pool <- waitIdleWorkerSlot newstage =<< takeTMVar tv
|
||||||
putTMVar tv $ addWorkerPool (ActiveWorker myaid newstage) pool
|
putTMVar tv $ addWorkerPool (ActiveWorker myaid newstage) pool
|
||||||
return (Just oldstage)
|
return (Just oldstage)
|
||||||
|
|
||||||
-- | Waits until there's an idle worker in the worker pool
|
-- | Waits until there's an idle StartStage worker in the worker pool,
|
||||||
-- for its initial stage, removes it from the pool, and returns its state.
|
-- removes it from the pool, and returns its state.
|
||||||
--
|
--
|
||||||
-- If the worker pool is not already allocated, returns Nothing.
|
-- If the worker pool is not already allocated, returns Nothing.
|
||||||
waitInitialWorkerSlot :: TMVar (WorkerPool Annex.AnnexState) -> STM (Maybe (Annex.AnnexState, WorkerStage))
|
waitStartWorkerSlot :: TMVar (WorkerPool Annex.AnnexState) -> STM (Maybe (Annex.AnnexState, WorkerStage))
|
||||||
waitInitialWorkerSlot tv = do
|
waitStartWorkerSlot tv = do
|
||||||
pool <- takeTMVar tv
|
pool <- takeTMVar tv
|
||||||
let stage = initialStage (usedStages pool)
|
st <- go pool
|
||||||
st <- go stage pool
|
return $ Just (st, StartStage)
|
||||||
return $ Just (st, stage)
|
|
||||||
where
|
where
|
||||||
go wantstage pool = case spareVals pool of
|
go pool = case spareVals pool of
|
||||||
[] -> retry
|
[] -> retry
|
||||||
(v:vs) -> do
|
(v:vs) -> do
|
||||||
let pool' = pool { spareVals = vs }
|
let pool' = pool { spareVals = vs }
|
||||||
putTMVar tv =<< waitIdleWorkerSlot wantstage pool'
|
putTMVar tv =<< waitIdleWorkerSlot StartStage pool'
|
||||||
return v
|
return v
|
||||||
|
|
||||||
waitIdleWorkerSlot :: WorkerStage -> WorkerPool Annex.AnnexState -> STM (WorkerPool Annex.AnnexState)
|
waitIdleWorkerSlot :: WorkerStage -> WorkerPool Annex.AnnexState -> STM (WorkerPool Annex.AnnexState)
|
||||||
|
|
|
@ -776,7 +776,7 @@ downloadUrl k p urls file =
|
||||||
-- download command is used.
|
-- download command is used.
|
||||||
meteredFile file (Just p) k $
|
meteredFile file (Just p) k $
|
||||||
Url.withUrlOptions $ \uo ->
|
Url.withUrlOptions $ \uo ->
|
||||||
liftIO $ anyM (\u -> Url.download p u file uo) urls
|
anyM (\u -> Url.download p u file uo) urls
|
||||||
|
|
||||||
{- Copies a key's content, when present, to a temp file.
|
{- Copies a key's content, when present, to a temp file.
|
||||||
- This is used to speed up some rsyncs. -}
|
- This is used to speed up some rsyncs. -}
|
||||||
|
|
|
@ -108,9 +108,9 @@ initialize' mversion = checkCanInitialize $ do
|
||||||
unlessM (isJust <$> getVersion) $
|
unlessM (isJust <$> getVersion) $
|
||||||
setVersion (fromMaybe defaultVersion mversion)
|
setVersion (fromMaybe defaultVersion mversion)
|
||||||
configureSmudgeFilter
|
configureSmudgeFilter
|
||||||
showSideAction "scanning for unlocked files"
|
|
||||||
scanUnlockedFiles
|
|
||||||
unlessM isBareRepo $ do
|
unlessM isBareRepo $ do
|
||||||
|
showSideAction "scanning for unlocked files"
|
||||||
|
scanUnlockedFiles
|
||||||
hookWrite postCheckoutHook
|
hookWrite postCheckoutHook
|
||||||
hookWrite postMergeHook
|
hookWrite postMergeHook
|
||||||
AdjustedBranch.checkAdjustedClone >>= \case
|
AdjustedBranch.checkAdjustedClone >>= \case
|
||||||
|
|
|
@ -192,12 +192,13 @@ restagePointerFile (Restage True) f orig = withTSDelta $ \tsd -> do
|
||||||
-- on all still-unmodified files, using a copy of the index file,
|
-- on all still-unmodified files, using a copy of the index file,
|
||||||
-- to bypass the lock. Then replace the old index file with the new
|
-- to bypass the lock. Then replace the old index file with the new
|
||||||
-- updated index file.
|
-- updated index file.
|
||||||
|
runner :: Git.Queue.InternalActionRunner Annex
|
||||||
runner = Git.Queue.InternalActionRunner "restagePointerFile" $ \r l -> do
|
runner = Git.Queue.InternalActionRunner "restagePointerFile" $ \r l -> do
|
||||||
realindex <- Git.Index.currentIndexFile r
|
realindex <- liftIO $ Git.Index.currentIndexFile r
|
||||||
let lock = Git.Index.indexFileLock realindex
|
let lock = Git.Index.indexFileLock realindex
|
||||||
lockindex = catchMaybeIO $ Git.LockFile.openLock' lock
|
lockindex = liftIO $ catchMaybeIO $ Git.LockFile.openLock' lock
|
||||||
unlockindex = maybe noop Git.LockFile.closeLock
|
unlockindex = liftIO . maybe noop Git.LockFile.closeLock
|
||||||
showwarning = warningIO $ unableToRestage Nothing
|
showwarning = warning $ unableToRestage Nothing
|
||||||
go Nothing = showwarning
|
go Nothing = showwarning
|
||||||
go (Just _) = withTmpDirIn (Git.localGitDir r) "annexindex" $ \tmpdir -> do
|
go (Just _) = withTmpDirIn (Git.localGitDir r) "annexindex" $ \tmpdir -> do
|
||||||
let tmpindex = tmpdir </> "index"
|
let tmpindex = tmpdir </> "index"
|
||||||
|
@ -216,7 +217,7 @@ restagePointerFile (Restage True) f orig = withTSDelta $ \tsd -> do
|
||||||
let replaceindex = catchBoolIO $ do
|
let replaceindex = catchBoolIO $ do
|
||||||
moveFile tmpindex realindex
|
moveFile tmpindex realindex
|
||||||
return True
|
return True
|
||||||
ok <- createLinkOrCopy realindex tmpindex
|
ok <- liftIO $ createLinkOrCopy realindex tmpindex
|
||||||
<&&> updatetmpindex
|
<&&> updatetmpindex
|
||||||
<&&> replaceindex
|
<&&> replaceindex
|
||||||
unless ok showwarning
|
unless ok showwarning
|
||||||
|
|
|
@ -28,24 +28,24 @@ import qualified Git.UpdateIndex
|
||||||
addCommand :: String -> [CommandParam] -> [FilePath] -> Annex ()
|
addCommand :: String -> [CommandParam] -> [FilePath] -> Annex ()
|
||||||
addCommand command params files = do
|
addCommand command params files = do
|
||||||
q <- get
|
q <- get
|
||||||
store <=< flushWhenFull <=< inRepo $
|
store =<< flushWhenFull =<<
|
||||||
Git.Queue.addCommand command params files q
|
(Git.Queue.addCommand command params files q =<< gitRepo)
|
||||||
|
|
||||||
addInternalAction :: Git.Queue.InternalActionRunner -> [(FilePath, IO Bool)] -> Annex ()
|
addInternalAction :: Git.Queue.InternalActionRunner Annex -> [(FilePath, IO Bool)] -> Annex ()
|
||||||
addInternalAction runner files = do
|
addInternalAction runner files = do
|
||||||
q <- get
|
q <- get
|
||||||
store <=< flushWhenFull <=< inRepo $
|
store =<< flushWhenFull =<<
|
||||||
Git.Queue.addInternalAction runner files q
|
(Git.Queue.addInternalAction runner files q =<< gitRepo)
|
||||||
|
|
||||||
{- Adds an update-index stream to the queue. -}
|
{- Adds an update-index stream to the queue. -}
|
||||||
addUpdateIndex :: Git.UpdateIndex.Streamer -> Annex ()
|
addUpdateIndex :: Git.UpdateIndex.Streamer -> Annex ()
|
||||||
addUpdateIndex streamer = do
|
addUpdateIndex streamer = do
|
||||||
q <- get
|
q <- get
|
||||||
store <=< flushWhenFull <=< inRepo $
|
store =<< flushWhenFull =<<
|
||||||
Git.Queue.addUpdateIndex streamer q
|
(Git.Queue.addUpdateIndex streamer q =<< gitRepo)
|
||||||
|
|
||||||
{- Runs the queue if it is full. -}
|
{- Runs the queue if it is full. -}
|
||||||
flushWhenFull :: Git.Queue.Queue -> Annex Git.Queue.Queue
|
flushWhenFull :: Git.Queue.Queue Annex -> Annex (Git.Queue.Queue Annex)
|
||||||
flushWhenFull q
|
flushWhenFull q
|
||||||
| Git.Queue.full q = flush' q
|
| Git.Queue.full q = flush' q
|
||||||
| otherwise = return q
|
| otherwise = return q
|
||||||
|
@ -64,25 +64,25 @@ flush = do
|
||||||
- But, flushing two queues at the same time could lead to failures due to
|
- But, flushing two queues at the same time could lead to failures due to
|
||||||
- git locking files. So, only one queue is allowed to flush at a time.
|
- git locking files. So, only one queue is allowed to flush at a time.
|
||||||
-}
|
-}
|
||||||
flush' :: Git.Queue.Queue -> Annex Git.Queue.Queue
|
flush' :: Git.Queue.Queue Annex -> Annex (Git.Queue.Queue Annex)
|
||||||
flush' q = withExclusiveLock gitAnnexGitQueueLock $ do
|
flush' q = withExclusiveLock gitAnnexGitQueueLock $ do
|
||||||
showStoringStateAction
|
showStoringStateAction
|
||||||
inRepo $ Git.Queue.flush q
|
Git.Queue.flush q =<< gitRepo
|
||||||
|
|
||||||
{- Gets the size of the queue. -}
|
{- Gets the size of the queue. -}
|
||||||
size :: Annex Int
|
size :: Annex Int
|
||||||
size = Git.Queue.size <$> get
|
size = Git.Queue.size <$> get
|
||||||
|
|
||||||
get :: Annex Git.Queue.Queue
|
get :: Annex (Git.Queue.Queue Annex)
|
||||||
get = maybe new return =<< getState repoqueue
|
get = maybe new return =<< getState repoqueue
|
||||||
|
|
||||||
new :: Annex Git.Queue.Queue
|
new :: Annex (Git.Queue.Queue Annex)
|
||||||
new = do
|
new = do
|
||||||
q <- Git.Queue.new . annexQueueSize <$> getGitConfig
|
q <- Git.Queue.new . annexQueueSize <$> getGitConfig
|
||||||
store q
|
store q
|
||||||
return q
|
return q
|
||||||
|
|
||||||
store :: Git.Queue.Queue -> Annex ()
|
store :: Git.Queue.Queue Annex -> Annex ()
|
||||||
store q = changeState $ \s -> s { repoqueue = Just q }
|
store q = changeState $ \s -> s { repoqueue = Just q }
|
||||||
|
|
||||||
mergeFrom :: AnnexState -> Annex ()
|
mergeFrom :: AnnexState -> Annex ()
|
||||||
|
|
|
@ -17,6 +17,7 @@ module Annex.RemoteTrackingBranch
|
||||||
|
|
||||||
import Annex.Common
|
import Annex.Common
|
||||||
import Annex.CatFile
|
import Annex.CatFile
|
||||||
|
import qualified Annex
|
||||||
import Git.Types
|
import Git.Types
|
||||||
import qualified Git.Ref
|
import qualified Git.Ref
|
||||||
import qualified Git.Branch
|
import qualified Git.Branch
|
||||||
|
@ -72,9 +73,10 @@ makeRemoteTrackingBranchMergeCommit tb commitsha =
|
||||||
_ -> return commitsha
|
_ -> return commitsha
|
||||||
|
|
||||||
makeRemoteTrackingBranchMergeCommit' :: Sha -> Sha -> Sha -> Annex Sha
|
makeRemoteTrackingBranchMergeCommit' :: Sha -> Sha -> Sha -> Annex Sha
|
||||||
makeRemoteTrackingBranchMergeCommit' commitsha importedhistory treesha =
|
makeRemoteTrackingBranchMergeCommit' commitsha importedhistory treesha = do
|
||||||
|
cmode <- annexCommitMode <$> Annex.getGitConfig
|
||||||
inRepo $ Git.Branch.commitTree
|
inRepo $ Git.Branch.commitTree
|
||||||
Git.Branch.AutomaticCommit
|
cmode
|
||||||
"remote tracking branch"
|
"remote tracking branch"
|
||||||
[commitsha, importedhistory]
|
[commitsha, importedhistory]
|
||||||
treesha
|
treesha
|
||||||
|
|
|
@ -34,21 +34,9 @@ findExisting name = do
|
||||||
t <- trustMap
|
t <- trustMap
|
||||||
headMaybe
|
headMaybe
|
||||||
. sortBy (comparing $ \(u, _, _) -> Down $ M.lookup u t)
|
. sortBy (comparing $ \(u, _, _) -> Down $ M.lookup u t)
|
||||||
. findByName name
|
. findByRemoteConfig (\c -> lookupName c == Just name)
|
||||||
<$> Logs.Remote.readRemoteLog
|
<$> Logs.Remote.readRemoteLog
|
||||||
|
|
||||||
findByName :: RemoteName -> M.Map UUID RemoteConfig -> [(UUID, RemoteConfig, Maybe (ConfigFrom UUID))]
|
|
||||||
findByName n = map sameasuuid . filter (matching . snd) . M.toList
|
|
||||||
where
|
|
||||||
matching c = case lookupName c of
|
|
||||||
Nothing -> False
|
|
||||||
Just n'
|
|
||||||
| n' == n -> True
|
|
||||||
| otherwise -> False
|
|
||||||
sameasuuid (u, c) = case M.lookup sameasUUIDField c of
|
|
||||||
Nothing -> (u, c, Nothing)
|
|
||||||
Just u' -> (toUUID u', c, Just (ConfigFrom u))
|
|
||||||
|
|
||||||
newConfig
|
newConfig
|
||||||
:: RemoteName
|
:: RemoteName
|
||||||
-> Maybe (Sameas UUID)
|
-> Maybe (Sameas UUID)
|
||||||
|
|
|
@ -101,3 +101,11 @@ removeSameasInherited :: RemoteConfig -> RemoteConfig
|
||||||
removeSameasInherited c = case M.lookup sameasUUIDField c of
|
removeSameasInherited c = case M.lookup sameasUUIDField c of
|
||||||
Nothing -> c
|
Nothing -> c
|
||||||
Just _ -> M.withoutKeys c sameasInherits
|
Just _ -> M.withoutKeys c sameasInherits
|
||||||
|
|
||||||
|
{- Finds remote uuids with matching RemoteConfig. -}
|
||||||
|
findByRemoteConfig :: (RemoteConfig -> Bool) -> M.Map UUID RemoteConfig -> [(UUID, RemoteConfig, Maybe (ConfigFrom UUID))]
|
||||||
|
findByRemoteConfig matching = map sameasuuid . filter (matching . snd) . M.toList
|
||||||
|
where
|
||||||
|
sameasuuid (u, c) = case M.lookup sameasUUIDField c of
|
||||||
|
Nothing -> (u, c, Nothing)
|
||||||
|
Just u' -> (toUUID u', c, Just (ConfigFrom u))
|
||||||
|
|
47
Annex/Url.hs
47
Annex/Url.hs
|
@ -1,24 +1,39 @@
|
||||||
{- Url downloading, with git-annex user agent and configured http
|
{- Url downloading, with git-annex user agent and configured http
|
||||||
- headers, security restrictions, etc.
|
- headers, security restrictions, etc.
|
||||||
-
|
-
|
||||||
- Copyright 2013-2018 Joey Hess <id@joeyh.name>
|
- Copyright 2013-2019 Joey Hess <id@joeyh.name>
|
||||||
-
|
-
|
||||||
- Licensed under the GNU AGPL version 3 or higher.
|
- Licensed under the GNU AGPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
module Annex.Url (
|
module Annex.Url (
|
||||||
module U,
|
|
||||||
withUrlOptions,
|
withUrlOptions,
|
||||||
getUrlOptions,
|
getUrlOptions,
|
||||||
getUserAgent,
|
getUserAgent,
|
||||||
ipAddressesUnlimited,
|
ipAddressesUnlimited,
|
||||||
|
checkBoth,
|
||||||
|
download,
|
||||||
|
exists,
|
||||||
|
getUrlInfo,
|
||||||
|
U.downloadQuiet,
|
||||||
|
U.URLString,
|
||||||
|
U.UrlOptions(..),
|
||||||
|
U.UrlInfo(..),
|
||||||
|
U.sinkResponseFile,
|
||||||
|
U.matchStatusCodeException,
|
||||||
|
U.downloadConduit,
|
||||||
|
U.downloadPartial,
|
||||||
|
U.parseURIRelaxed,
|
||||||
|
U.allowedScheme,
|
||||||
|
U.assumeUrlExists,
|
||||||
) where
|
) where
|
||||||
|
|
||||||
import Annex.Common
|
import Annex.Common
|
||||||
import qualified Annex
|
import qualified Annex
|
||||||
import Utility.Url as U
|
import qualified Utility.Url as U
|
||||||
import Utility.IPAddress
|
import Utility.IPAddress
|
||||||
import Utility.HttpManagerRestricted
|
import Utility.HttpManagerRestricted
|
||||||
|
import Utility.Metered
|
||||||
import qualified BuildInfo
|
import qualified BuildInfo
|
||||||
|
|
||||||
import Network.Socket
|
import Network.Socket
|
||||||
|
@ -43,7 +58,7 @@ getUrlOptions = Annex.getState Annex.urloptions >>= \case
|
||||||
where
|
where
|
||||||
mk = do
|
mk = do
|
||||||
(urldownloader, manager) <- checkallowedaddr
|
(urldownloader, manager) <- checkallowedaddr
|
||||||
mkUrlOptions
|
U.mkUrlOptions
|
||||||
<$> (Just <$> getUserAgent)
|
<$> (Just <$> getUserAgent)
|
||||||
<*> headers
|
<*> headers
|
||||||
<*> pure urldownloader
|
<*> pure urldownloader
|
||||||
|
@ -108,3 +123,27 @@ ipAddressesUnlimited =
|
||||||
|
|
||||||
withUrlOptions :: (U.UrlOptions -> Annex a) -> Annex a
|
withUrlOptions :: (U.UrlOptions -> Annex a) -> Annex a
|
||||||
withUrlOptions a = a =<< getUrlOptions
|
withUrlOptions a = a =<< getUrlOptions
|
||||||
|
|
||||||
|
checkBoth :: U.URLString -> Maybe Integer -> U.UrlOptions -> Annex Bool
|
||||||
|
checkBoth url expected_size uo =
|
||||||
|
liftIO (U.checkBoth url expected_size uo) >>= \case
|
||||||
|
Right r -> return r
|
||||||
|
Left err -> warning err >> return False
|
||||||
|
|
||||||
|
download :: MeterUpdate -> U.URLString -> FilePath -> U.UrlOptions -> Annex Bool
|
||||||
|
download meterupdate url file uo =
|
||||||
|
liftIO (U.download meterupdate url file uo) >>= \case
|
||||||
|
Right () -> return True
|
||||||
|
Left err -> warning err >> return False
|
||||||
|
|
||||||
|
exists :: U.URLString -> U.UrlOptions -> Annex Bool
|
||||||
|
exists url uo = liftIO (U.exists url uo) >>= \case
|
||||||
|
Right b -> return b
|
||||||
|
Left err -> warning err >> return False
|
||||||
|
|
||||||
|
getUrlInfo :: U.URLString -> U.UrlOptions -> Annex U.UrlInfo
|
||||||
|
getUrlInfo url uo = liftIO (U.getUrlInfo url uo) >>= \case
|
||||||
|
Right i -> return i
|
||||||
|
Left err -> do
|
||||||
|
warning err
|
||||||
|
return $ U.UrlInfo False Nothing Nothing
|
||||||
|
|
|
@ -12,6 +12,7 @@ import Annex.View.ViewedFile
|
||||||
import Types.View
|
import Types.View
|
||||||
import Types.MetaData
|
import Types.MetaData
|
||||||
import Annex.MetaData
|
import Annex.MetaData
|
||||||
|
import qualified Annex
|
||||||
import qualified Git
|
import qualified Git
|
||||||
import qualified Git.DiffTree as DiffTree
|
import qualified Git.DiffTree as DiffTree
|
||||||
import qualified Git.Branch
|
import qualified Git.Branch
|
||||||
|
@ -418,7 +419,8 @@ withViewIndex a = do
|
||||||
genViewBranch :: View -> Annex Git.Branch
|
genViewBranch :: View -> Annex Git.Branch
|
||||||
genViewBranch view = withViewIndex $ do
|
genViewBranch view = withViewIndex $ do
|
||||||
let branch = branchView view
|
let branch = branchView view
|
||||||
void $ inRepo $ Git.Branch.commit Git.Branch.AutomaticCommit True (fromRef branch) branch []
|
cmode <- annexCommitMode <$> Annex.getGitConfig
|
||||||
|
void $ inRepo $ Git.Branch.commit cmode True (fromRef branch) branch []
|
||||||
return branch
|
return branch
|
||||||
|
|
||||||
withCurrentView :: (View -> Annex a) -> Annex a
|
withCurrentView :: (View -> Annex a) -> Annex a
|
||||||
|
|
|
@ -21,6 +21,7 @@ import qualified Git.LsTree
|
||||||
import qualified Git.Types
|
import qualified Git.Types
|
||||||
import qualified Database.Keys
|
import qualified Database.Keys
|
||||||
import qualified Database.Keys.SQL
|
import qualified Database.Keys.SQL
|
||||||
|
import Config
|
||||||
|
|
||||||
{- Looks up the key corresponding to an annexed file in the work tree,
|
{- Looks up the key corresponding to an annexed file in the work tree,
|
||||||
- by examining what the file links to.
|
- by examining what the file links to.
|
||||||
|
@ -74,7 +75,7 @@ ifAnnexed file yes no = maybe no yes =<< lookupFile file
|
||||||
- as-is.
|
- as-is.
|
||||||
-}
|
-}
|
||||||
scanUnlockedFiles :: Annex ()
|
scanUnlockedFiles :: Annex ()
|
||||||
scanUnlockedFiles = whenM (inRepo Git.Ref.headExists) $ do
|
scanUnlockedFiles = whenM (inRepo Git.Ref.headExists <&&> not <$> isBareRepo) $ do
|
||||||
Database.Keys.runWriter $
|
Database.Keys.runWriter $
|
||||||
liftIO . Database.Keys.SQL.dropAllAssociatedFiles
|
liftIO . Database.Keys.SQL.dropAllAssociatedFiles
|
||||||
(l, cleanup) <- inRepo $ Git.LsTree.lsTree Git.LsTree.LsTreeRecursive Git.Ref.headRef
|
(l, cleanup) <- inRepo $ Git.LsTree.lsTree Git.LsTree.LsTreeRecursive Git.Ref.headRef
|
||||||
|
|
|
@ -18,7 +18,6 @@ import Annex.Common
|
||||||
import qualified Annex
|
import qualified Annex
|
||||||
import Annex.Content
|
import Annex.Content
|
||||||
import Annex.Url
|
import Annex.Url
|
||||||
import Utility.Url (URLString)
|
|
||||||
import Utility.DiskFree
|
import Utility.DiskFree
|
||||||
import Utility.HtmlDetect
|
import Utility.HtmlDetect
|
||||||
import Utility.Process.Transcript
|
import Utility.Process.Transcript
|
||||||
|
|
|
@ -53,8 +53,9 @@ initRepo True primary_assistant_repo dir desc mgroup = inDir dir $ do
|
||||||
initRepo' desc mgroup
|
initRepo' desc mgroup
|
||||||
{- Initialize the master branch, so things that expect
|
{- Initialize the master branch, so things that expect
|
||||||
- to have it will work, before any files are added. -}
|
- to have it will work, before any files are added. -}
|
||||||
unlessM (Git.Config.isBare <$> gitRepo) $
|
unlessM (Git.Config.isBare <$> gitRepo) $ do
|
||||||
void $ inRepo $ Git.Branch.commitCommand Git.Branch.AutomaticCommit
|
cmode <- annexCommitMode <$> Annex.getGitConfig
|
||||||
|
void $ inRepo $ Git.Branch.commitCommand cmode
|
||||||
[ Param "--quiet"
|
[ Param "--quiet"
|
||||||
, Param "--allow-empty"
|
, Param "--allow-empty"
|
||||||
, Param "-m"
|
, Param "-m"
|
||||||
|
|
|
@ -95,7 +95,9 @@ newAssistantUrl repo = do
|
||||||
- warp-tls listens to http, in order to show an error page, so this works.
|
- warp-tls listens to http, in order to show an error page, so this works.
|
||||||
-}
|
-}
|
||||||
assistantListening :: URLString -> IO Bool
|
assistantListening :: URLString -> IO Bool
|
||||||
assistantListening url = catchBoolIO $ exists url' =<< defUrlOptions
|
assistantListening url = catchBoolIO $ do
|
||||||
|
uo <- defUrlOptions
|
||||||
|
(== Right True) <$> exists url' uo
|
||||||
where
|
where
|
||||||
url' = case parseURI url of
|
url' = case parseURI url of
|
||||||
Nothing -> url
|
Nothing -> url
|
||||||
|
|
|
@ -36,7 +36,6 @@ import qualified Annex
|
||||||
import Utility.InodeCache
|
import Utility.InodeCache
|
||||||
import qualified Database.Keys
|
import qualified Database.Keys
|
||||||
import qualified Command.Sync
|
import qualified Command.Sync
|
||||||
import qualified Git.Branch
|
|
||||||
import Utility.Tuple
|
import Utility.Tuple
|
||||||
import Utility.Metered
|
import Utility.Metered
|
||||||
|
|
||||||
|
@ -231,7 +230,8 @@ commitStaged msg = do
|
||||||
case v of
|
case v of
|
||||||
Left _ -> return False
|
Left _ -> return False
|
||||||
Right _ -> do
|
Right _ -> do
|
||||||
ok <- Command.Sync.commitStaged Git.Branch.AutomaticCommit msg
|
cmode <- annexCommitMode <$> Annex.getGitConfig
|
||||||
|
ok <- Command.Sync.commitStaged cmode msg
|
||||||
when ok $
|
when ok $
|
||||||
Command.Sync.updateBranches =<< getCurrentBranch
|
Command.Sync.updateBranches =<< getCurrentBranch
|
||||||
return ok
|
return ok
|
||||||
|
|
|
@ -14,6 +14,7 @@ import Assistant.Sync
|
||||||
import Utility.DirWatcher
|
import Utility.DirWatcher
|
||||||
import Utility.DirWatcher.Types
|
import Utility.DirWatcher.Types
|
||||||
import Annex.CurrentBranch
|
import Annex.CurrentBranch
|
||||||
|
import qualified Annex
|
||||||
import qualified Annex.Branch
|
import qualified Annex.Branch
|
||||||
import qualified Git
|
import qualified Git
|
||||||
import qualified Git.Branch
|
import qualified Git.Branch
|
||||||
|
@ -80,11 +81,13 @@ onChange file
|
||||||
[ "merging", Git.fromRef changedbranch
|
[ "merging", Git.fromRef changedbranch
|
||||||
, "into", Git.fromRef b
|
, "into", Git.fromRef b
|
||||||
]
|
]
|
||||||
void $ liftAnnex $ Command.Sync.merge
|
void $ liftAnnex $ do
|
||||||
currbranch Command.Sync.mergeConfig
|
cmode <- annexCommitMode <$> Annex.getGitConfig
|
||||||
def
|
Command.Sync.merge
|
||||||
Git.Branch.AutomaticCommit
|
currbranch Command.Sync.mergeConfig
|
||||||
changedbranch
|
def
|
||||||
|
cmode
|
||||||
|
changedbranch
|
||||||
mergecurrent' _ = noop
|
mergecurrent' _ = noop
|
||||||
|
|
||||||
{- Is the first branch a synced branch or remote tracking branch related
|
{- Is the first branch a synced branch or remote tracking branch related
|
||||||
|
|
|
@ -183,7 +183,7 @@ runHandler :: Handler -> FilePath -> Maybe FileStatus -> Assistant ()
|
||||||
runHandler handler file filestatus = void $ do
|
runHandler handler file filestatus = void $ do
|
||||||
r <- tryIO <~> handler (normalize file) filestatus
|
r <- tryIO <~> handler (normalize file) filestatus
|
||||||
case r of
|
case r of
|
||||||
Left e -> liftIO $ warningIO $ show e
|
Left e -> liftAnnex $ warning $ show e
|
||||||
Right Nothing -> noop
|
Right Nothing -> noop
|
||||||
Right (Just change) -> recordChange change
|
Right (Just change) -> recordChange change
|
||||||
where
|
where
|
||||||
|
|
|
@ -40,9 +40,10 @@ import Utility.Metered
|
||||||
import qualified Utility.Lsof as Lsof
|
import qualified Utility.Lsof as Lsof
|
||||||
import qualified BuildInfo
|
import qualified BuildInfo
|
||||||
import qualified Utility.Url as Url
|
import qualified Utility.Url as Url
|
||||||
import qualified Annex.Url as Url
|
import qualified Annex.Url as Url hiding (download)
|
||||||
import Utility.Tuple
|
import Utility.Tuple
|
||||||
|
|
||||||
|
import Data.Either
|
||||||
import qualified Data.Map as M
|
import qualified Data.Map as M
|
||||||
|
|
||||||
{- Upgrade without interaction in the webapp. -}
|
{- Upgrade without interaction in the webapp. -}
|
||||||
|
@ -323,8 +324,8 @@ downloadDistributionInfo = do
|
||||||
liftIO $ withTmpDir "git-annex.tmp" $ \tmpdir -> do
|
liftIO $ withTmpDir "git-annex.tmp" $ \tmpdir -> do
|
||||||
let infof = tmpdir </> "info"
|
let infof = tmpdir </> "info"
|
||||||
let sigf = infof ++ ".sig"
|
let sigf = infof ++ ".sig"
|
||||||
ifM (Url.download nullMeterUpdate distributionInfoUrl infof uo
|
ifM (isRight <$> Url.download nullMeterUpdate distributionInfoUrl infof uo
|
||||||
<&&> Url.download nullMeterUpdate distributionInfoSigUrl sigf uo
|
<&&> (isRight <$> Url.download nullMeterUpdate distributionInfoSigUrl sigf uo)
|
||||||
<&&> verifyDistributionSig gpgcmd sigf)
|
<&&> verifyDistributionSig gpgcmd sigf)
|
||||||
( parseInfoFile <$> readFileStrict infof
|
( parseInfoFile <$> readFileStrict infof
|
||||||
, return Nothing
|
, return Nothing
|
||||||
|
|
|
@ -192,7 +192,7 @@ escapeHeader = escapeURIString (\c -> isUnescapedInURI c && c /= ' ')
|
||||||
getRepoInfo :: RemoteConfig -> Widget
|
getRepoInfo :: RemoteConfig -> Widget
|
||||||
getRepoInfo c = do
|
getRepoInfo c = do
|
||||||
uo <- liftAnnex Url.getUrlOptions
|
uo <- liftAnnex Url.getUrlOptions
|
||||||
exists <- liftIO $ catchDefaultIO False $ Url.exists url uo
|
exists <- liftAnnex $ catchDefaultIO False $ Url.exists url uo
|
||||||
[whamlet|
|
[whamlet|
|
||||||
<a href="#{url}">
|
<a href="#{url}">
|
||||||
Internet Archive item
|
Internet Archive item
|
||||||
|
|
|
@ -26,6 +26,7 @@ import Git.Command
|
||||||
|
|
||||||
import Data.Time.Clock
|
import Data.Time.Clock
|
||||||
import Data.Char
|
import Data.Char
|
||||||
|
import Data.Either
|
||||||
import System.Posix.Directory
|
import System.Posix.Directory
|
||||||
|
|
||||||
-- git-annex distribution signing key (for Joey Hess)
|
-- git-annex distribution signing key (for Joey Hess)
|
||||||
|
@ -86,7 +87,7 @@ getbuild repodir (url, f) = do
|
||||||
putStrLn $ "*** " ++ s
|
putStrLn $ "*** " ++ s
|
||||||
return Nothing
|
return Nothing
|
||||||
uo <- defUrlOptions
|
uo <- defUrlOptions
|
||||||
ifM (download nullMeterUpdate url tmp uo)
|
ifM (isRight <$> download nullMeterUpdate url tmp uo)
|
||||||
( ifM (liftIO $ virusFree tmp)
|
( ifM (liftIO $ virusFree tmp)
|
||||||
( do
|
( do
|
||||||
bv2 <- getbv
|
bv2 <- getbv
|
||||||
|
|
|
@ -50,8 +50,12 @@ installLibs appbase replacement_libs libmap = do
|
||||||
let symdest = appbase </> shortlib
|
let symdest = appbase </> shortlib
|
||||||
-- This is a hack; libraries need to be in the same
|
-- This is a hack; libraries need to be in the same
|
||||||
-- directory as the program, so also link them into the
|
-- directory as the program, so also link them into the
|
||||||
-- extra directory.
|
-- extra and git-core directories so programs in those will
|
||||||
let symdestextra = appbase </> "extra" </> shortlib
|
-- find them.
|
||||||
|
let symdestextra =
|
||||||
|
[ appbase </> "extra" </> shortlib
|
||||||
|
, appbase </> "git-core" </> shortlib
|
||||||
|
]
|
||||||
ifM (doesFileExist dest)
|
ifM (doesFileExist dest)
|
||||||
( return Nothing
|
( return Nothing
|
||||||
, do
|
, do
|
||||||
|
@ -59,9 +63,11 @@ installLibs appbase replacement_libs libmap = do
|
||||||
putStrLn $ "installing " ++ pathlib ++ " as " ++ shortlib
|
putStrLn $ "installing " ++ pathlib ++ " as " ++ shortlib
|
||||||
unlessM (boolSystem "cp" [File pathlib, File dest]
|
unlessM (boolSystem "cp" [File pathlib, File dest]
|
||||||
<&&> boolSystem "chmod" [Param "644", File dest]
|
<&&> boolSystem "chmod" [Param "644", File dest]
|
||||||
<&&> boolSystem "ln" [Param "-s", File fulllib, File symdest]
|
<&&> boolSystem "ln" [Param "-s", File fulllib, File symdest]) $
|
||||||
<&&> boolSystem "ln" [Param "-s", File (".." </> fulllib), File symdestextra]) $
|
|
||||||
error "library install failed"
|
error "library install failed"
|
||||||
|
forM_ symdestextra $ \d ->
|
||||||
|
unlessM (boolSystem "ln" [Param "-s", File (".." </> fulllib), File d]) $
|
||||||
|
error "library linking failed"
|
||||||
return $ Just appbase
|
return $ Just appbase
|
||||||
)
|
)
|
||||||
return (catMaybes libs, replacement_libs', libmap')
|
return (catMaybes libs, replacement_libs', libmap')
|
||||||
|
|
|
@ -56,16 +56,26 @@ installGitLibs topdir = do
|
||||||
if issymlink
|
if issymlink
|
||||||
then do
|
then do
|
||||||
-- many git-core files may symlink to eg
|
-- many git-core files may symlink to eg
|
||||||
-- ../../git. The link targets are put
|
-- ../../bin/git, which is located outside
|
||||||
-- into a subdirectory so all links to
|
-- the git-core directory. The target of
|
||||||
-- .../git get the same binary.
|
-- such links is installed into the progDir
|
||||||
|
-- (if not already there), and the links
|
||||||
|
-- repointed to it.
|
||||||
|
--
|
||||||
|
-- Other git-core files symlink to a file
|
||||||
|
-- beside them in the directory. Those
|
||||||
|
-- links can be copied as-is.
|
||||||
linktarget <- readSymbolicLink f
|
linktarget <- readSymbolicLink f
|
||||||
let linktarget' = gitcoredestdir </> "bin" </> takeFileName linktarget
|
if takeFileName linktarget == linktarget
|
||||||
createDirectoryIfMissing True (takeDirectory linktarget')
|
then cp f destf
|
||||||
L.readFile f >>= L.writeFile linktarget'
|
else do
|
||||||
nukeFile destf
|
let linktarget' = progDir topdir </> takeFileName linktarget
|
||||||
rellinktarget <- relPathDirToFile (takeDirectory destf) linktarget'
|
unlessM (doesFileExist linktarget') $ do
|
||||||
createSymbolicLink rellinktarget destf
|
createDirectoryIfMissing True (takeDirectory linktarget')
|
||||||
|
L.readFile f >>= L.writeFile linktarget'
|
||||||
|
nukeFile destf
|
||||||
|
rellinktarget <- relPathDirToFile (takeDirectory destf) linktarget'
|
||||||
|
createSymbolicLink rellinktarget destf
|
||||||
else cp f destf
|
else cp f destf
|
||||||
|
|
||||||
-- install git's template files
|
-- install git's template files
|
||||||
|
|
35
CHANGELOG
35
CHANGELOG
|
@ -13,11 +13,42 @@ git-annex (8.20191107) UNRELEASED; urgency=medium
|
||||||
Microbenchmarks show around 10-25% speedup of sqlite database operations.
|
Microbenchmarks show around 10-25% speedup of sqlite database operations.
|
||||||
* Improved serialization of filenames and keys to the sqlite databases,
|
* Improved serialization of filenames and keys to the sqlite databases,
|
||||||
avoiding encoding problems.
|
avoiding encoding problems.
|
||||||
* Windows: Fix handling of changes to time zone. (Used to work but was
|
|
||||||
broken in version 7.20181031.)
|
|
||||||
|
|
||||||
-- Joey Hess <id@joeyh.name> Tue, 29 Oct 2019 15:13:03 -0400
|
-- Joey Hess <id@joeyh.name> Tue, 29 Oct 2019 15:13:03 -0400
|
||||||
|
|
||||||
|
git-annex (7.20191115) UNRELEASED; urgency=medium
|
||||||
|
|
||||||
|
* Stop displaying rsync progress, and use git-annex's own progress display
|
||||||
|
for local-to-local repo transfers.
|
||||||
|
* git-lfs: The url provided to initremote/enableremote will now be
|
||||||
|
stored in the git-annex branch, allowing enableremote to be used without
|
||||||
|
an url. initremote --sameas can be used to add additional urls.
|
||||||
|
* git-lfs: When there's a git remote with an url that's known to be
|
||||||
|
used for git-lfs, automatically enable the special remote.
|
||||||
|
* sync, assistant: Pull and push from git-lfs remotes.
|
||||||
|
* Fix bug that made bare repos be treated as non-bare when --git-dir
|
||||||
|
was used.
|
||||||
|
* benchmark: Changed --databases to take a parameter specifiying the size
|
||||||
|
of the database to benchmark.
|
||||||
|
* benchmark --databases: Display size of the populated database.
|
||||||
|
* benchmark --databases: Improve the "addAssociatedFile to (new)"
|
||||||
|
benchmark to really add new values, not overwriting old values.
|
||||||
|
|
||||||
|
-- Joey Hess <id@joeyh.name> Fri, 15 Nov 2019 11:57:19 -0400
|
||||||
|
|
||||||
|
git-annex (7.20191114) upstream; urgency=medium
|
||||||
|
|
||||||
|
* Added annex.allowsign option.
|
||||||
|
* Make --json-error-messages capture more errors,
|
||||||
|
particularly url download errors.
|
||||||
|
* Fix a crash (STM deadlock) when -J is used with multiple files
|
||||||
|
that point to the same key.
|
||||||
|
* linuxstandalone: Fix a regression that broke git-remote-https.
|
||||||
|
* OSX git-annex.app: Fix a problem that prevented using the bundled
|
||||||
|
git-remote-https, git-remote-http, and git-shell.
|
||||||
|
|
||||||
|
-- Joey Hess <id@joeyh.name> Thu, 14 Nov 2019 21:57:59 -0400
|
||||||
|
|
||||||
git-annex (7.20191106) upstream; urgency=medium
|
git-annex (7.20191106) upstream; urgency=medium
|
||||||
|
|
||||||
* init: Fix bug that lost modifications to unlocked files when init is
|
* init: Fix bug that lost modifications to unlocked files when init is
|
||||||
|
|
|
@ -63,7 +63,7 @@ commandAction start = Annex.getState Annex.concurrency >>= \case
|
||||||
runconcurrent = Annex.getState Annex.workers >>= \case
|
runconcurrent = Annex.getState Annex.workers >>= \case
|
||||||
Nothing -> runnonconcurrent
|
Nothing -> runnonconcurrent
|
||||||
Just tv ->
|
Just tv ->
|
||||||
liftIO (atomically (waitInitialWorkerSlot tv)) >>=
|
liftIO (atomically (waitStartWorkerSlot tv)) >>=
|
||||||
maybe runnonconcurrent (runconcurrent' tv)
|
maybe runnonconcurrent (runconcurrent' tv)
|
||||||
runconcurrent' tv (workerst, workerstage) = do
|
runconcurrent' tv (workerst, workerstage) = do
|
||||||
aid <- liftIO $ async $ snd <$> Annex.run workerst
|
aid <- liftIO $ async $ snd <$> Annex.run workerst
|
||||||
|
@ -99,12 +99,13 @@ commandAction start = Annex.getState Annex.concurrency >>= \case
|
||||||
case mkActionItem startmsg' of
|
case mkActionItem startmsg' of
|
||||||
OnlyActionOn k' _ | k' /= k ->
|
OnlyActionOn k' _ | k' /= k ->
|
||||||
concurrentjob' workerst startmsg' perform'
|
concurrentjob' workerst startmsg' perform'
|
||||||
_ -> mkjob workerst startmsg' perform'
|
_ -> beginjob workerst startmsg' perform'
|
||||||
Nothing -> noop
|
Nothing -> noop
|
||||||
_ -> mkjob workerst startmsg perform
|
_ -> beginjob workerst startmsg perform
|
||||||
|
|
||||||
mkjob workerst startmsg perform =
|
beginjob workerst startmsg perform =
|
||||||
inOwnConsoleRegion (Annex.output workerst) $
|
inOwnConsoleRegion (Annex.output workerst) $ do
|
||||||
|
enteringInitialStage
|
||||||
void $ accountCommandAction startmsg $
|
void $ accountCommandAction startmsg $
|
||||||
performconcurrent startmsg perform
|
performconcurrent startmsg perform
|
||||||
|
|
||||||
|
|
|
@ -197,8 +197,7 @@ startWeb o urlstring = go $ fromMaybe bad $ parseURI urlstring
|
||||||
pathmax <- liftIO $ fileNameLengthLimit "."
|
pathmax <- liftIO $ fileNameLengthLimit "."
|
||||||
urlinfo <- if relaxedOption (downloadOptions o)
|
urlinfo <- if relaxedOption (downloadOptions o)
|
||||||
then pure Url.assumeUrlExists
|
then pure Url.assumeUrlExists
|
||||||
else Url.withUrlOptions $
|
else Url.withUrlOptions $ Url.getUrlInfo urlstring
|
||||||
liftIO . Url.getUrlInfo urlstring
|
|
||||||
file <- adjustFile o <$> case fileOption (downloadOptions o) of
|
file <- adjustFile o <$> case fileOption (downloadOptions o) of
|
||||||
Just f -> pure f
|
Just f -> pure f
|
||||||
Nothing -> case Url.urlSuggestedFile urlinfo of
|
Nothing -> case Url.urlSuggestedFile urlinfo of
|
||||||
|
|
|
@ -26,7 +26,7 @@ cmd generator = command "benchmark" SectionTesting
|
||||||
|
|
||||||
data BenchmarkOptions
|
data BenchmarkOptions
|
||||||
= BenchmarkOptions CmdParams CriterionMode
|
= BenchmarkOptions CmdParams CriterionMode
|
||||||
| BenchmarkDatabases CriterionMode
|
| BenchmarkDatabases CriterionMode Integer
|
||||||
|
|
||||||
optParser :: CmdParamsDesc -> Parser BenchmarkOptions
|
optParser :: CmdParamsDesc -> Parser BenchmarkOptions
|
||||||
optParser desc = benchmarkoptions <|> benchmarkdatabases
|
optParser desc = benchmarkoptions <|> benchmarkdatabases
|
||||||
|
@ -36,10 +36,11 @@ optParser desc = benchmarkoptions <|> benchmarkdatabases
|
||||||
<*> criterionopts
|
<*> criterionopts
|
||||||
benchmarkdatabases = BenchmarkDatabases
|
benchmarkdatabases = BenchmarkDatabases
|
||||||
<$> criterionopts
|
<$> criterionopts
|
||||||
<* flag' ()
|
<*> option auto
|
||||||
( long "databases"
|
( long "databases"
|
||||||
|
<> metavar paramNumber
|
||||||
<> help "benchmark sqlite databases"
|
<> help "benchmark sqlite databases"
|
||||||
)
|
)
|
||||||
#ifdef WITH_BENCHMARK
|
#ifdef WITH_BENCHMARK
|
||||||
criterionopts = parseWith defaultConfig
|
criterionopts = parseWith defaultConfig
|
||||||
#else
|
#else
|
||||||
|
@ -51,7 +52,7 @@ seek :: BenchmarkGenerator -> BenchmarkOptions -> CommandSeek
|
||||||
seek generator (BenchmarkOptions ps mode) = do
|
seek generator (BenchmarkOptions ps mode) = do
|
||||||
runner <- generator ps
|
runner <- generator ps
|
||||||
liftIO $ runMode mode [ bench (unwords ps) $ nfIO runner ]
|
liftIO $ runMode mode [ bench (unwords ps) $ nfIO runner ]
|
||||||
seek _ (BenchmarkDatabases mode) = benchmarkDbs mode
|
seek _ (BenchmarkDatabases mode n) = benchmarkDbs mode n
|
||||||
#else
|
#else
|
||||||
seek _ _ = giveup "git-annex is not built with benchmarking support"
|
seek _ _ = giveup "git-annex is not built with benchmarking support"
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -30,7 +30,6 @@ import Utility.InodeCache
|
||||||
import Logs.Location
|
import Logs.Location
|
||||||
import Git.FilePath
|
import Git.FilePath
|
||||||
import Git.Types
|
import Git.Types
|
||||||
import Git.Branch
|
|
||||||
import Types.Import
|
import Types.Import
|
||||||
import Utility.Metered
|
import Utility.Metered
|
||||||
|
|
||||||
|
@ -40,7 +39,7 @@ cmd :: Command
|
||||||
cmd = notBareRepo $
|
cmd = notBareRepo $
|
||||||
withGlobalOptions [jobsOption, jsonOptions, fileMatchingOptions] $
|
withGlobalOptions [jobsOption, jsonOptions, fileMatchingOptions] $
|
||||||
command "import" SectionCommon
|
command "import" SectionCommon
|
||||||
"import files from elsewhere into the repository"
|
"add a tree of files to the repository"
|
||||||
(paramPaths ++ "|BRANCH[:SUBDIR]")
|
(paramPaths ++ "|BRANCH[:SUBDIR]")
|
||||||
(seek <$$> optParser)
|
(seek <$$> optParser)
|
||||||
|
|
||||||
|
@ -266,7 +265,8 @@ seekRemote remote branch msubdir = do
|
||||||
Nothing -> giveup $ "Unable to find base tree for branch " ++ fromRef branch
|
Nothing -> giveup $ "Unable to find base tree for branch " ++ fromRef branch
|
||||||
|
|
||||||
trackingcommit <- fromtrackingbranch Git.Ref.sha
|
trackingcommit <- fromtrackingbranch Git.Ref.sha
|
||||||
let importcommitconfig = ImportCommitConfig trackingcommit AutomaticCommit importmessage
|
cmode <- annexCommitMode <$> Annex.getGitConfig
|
||||||
|
let importcommitconfig = ImportCommitConfig trackingcommit cmode importmessage
|
||||||
let commitimport = commitRemote remote branch tb trackingcommit importtreeconfig importcommitconfig
|
let commitimport = commitRemote remote branch tb trackingcommit importtreeconfig importcommitconfig
|
||||||
|
|
||||||
importabletvar <- liftIO $ newTVarIO Nothing
|
importabletvar <- liftIO $ newTVarIO Nothing
|
||||||
|
|
|
@ -146,13 +146,12 @@ findDownloads u f = catMaybes $ map mk (feedItems f)
|
||||||
downloadFeed :: URLString -> Annex (Maybe String)
|
downloadFeed :: URLString -> Annex (Maybe String)
|
||||||
downloadFeed url
|
downloadFeed url
|
||||||
| Url.parseURIRelaxed url == Nothing = giveup "invalid feed url"
|
| Url.parseURIRelaxed url == Nothing = giveup "invalid feed url"
|
||||||
| otherwise = Url.withUrlOptions $ \uo ->
|
| otherwise = withTmpFile "feed" $ \f h -> do
|
||||||
liftIO $ withTmpFile "feed" $ \f h -> do
|
liftIO $ hClose h
|
||||||
hClose h
|
ifM (Url.withUrlOptions $ Url.download nullMeterUpdate url f)
|
||||||
ifM (Url.download nullMeterUpdate url f uo)
|
( Just <$> liftIO (readFileStrict f)
|
||||||
( Just <$> readFileStrict f
|
, return Nothing
|
||||||
, return Nothing
|
)
|
||||||
)
|
|
||||||
|
|
||||||
performDownload :: ImportFeedOptions -> Cache -> ToDownload -> Annex Bool
|
performDownload :: ImportFeedOptions -> Cache -> ToDownload -> Annex Bool
|
||||||
performDownload opts cache todownload = case location todownload of
|
performDownload opts cache todownload = case location todownload of
|
||||||
|
@ -164,7 +163,7 @@ performDownload opts cache todownload = case location todownload of
|
||||||
urlinfo <- if relaxedOption (downloadOptions opts)
|
urlinfo <- if relaxedOption (downloadOptions opts)
|
||||||
then pure Url.assumeUrlExists
|
then pure Url.assumeUrlExists
|
||||||
else Url.withUrlOptions $
|
else Url.withUrlOptions $
|
||||||
liftIO . Url.getUrlInfo url
|
Url.getUrlInfo url
|
||||||
let dlopts = (downloadOptions opts)
|
let dlopts = (downloadOptions opts)
|
||||||
-- force using the filename
|
-- force using the filename
|
||||||
-- chosen here
|
-- chosen here
|
||||||
|
|
|
@ -19,6 +19,7 @@ import Database.Init
|
||||||
import Utility.Tmp.Dir
|
import Utility.Tmp.Dir
|
||||||
import Git.FilePath
|
import Git.FilePath
|
||||||
import Types.Key
|
import Types.Key
|
||||||
|
import Utility.DataUnits
|
||||||
|
|
||||||
import Criterion.Main
|
import Criterion.Main
|
||||||
import Control.Monad.IO.Class (liftIO)
|
import Control.Monad.IO.Class (liftIO)
|
||||||
|
@ -26,17 +27,12 @@ import qualified Data.ByteString.Char8 as B8
|
||||||
import System.Random
|
import System.Random
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
benchmarkDbs :: CriterionMode -> Annex ()
|
benchmarkDbs :: CriterionMode -> Integer -> Annex ()
|
||||||
#ifdef WITH_BENCHMARK
|
#ifdef WITH_BENCHMARK
|
||||||
benchmarkDbs mode = withTmpDirIn "." "benchmark" $ \tmpdir -> do
|
benchmarkDbs mode n = withTmpDirIn "." "benchmark" $ \tmpdir -> do
|
||||||
-- benchmark different sizes of databases
|
db <- benchDb tmpdir n
|
||||||
dbs <- mapM (benchDb tmpdir)
|
|
||||||
[ 1000
|
|
||||||
, 10000
|
|
||||||
-- , 100000
|
|
||||||
]
|
|
||||||
liftIO $ runMode mode
|
liftIO $ runMode mode
|
||||||
[ bgroup "keys database" $ flip concatMap dbs $ \db ->
|
[ bgroup "keys database"
|
||||||
[ getAssociatedFilesHitBench db
|
[ getAssociatedFilesHitBench db
|
||||||
, getAssociatedFilesMissBench db
|
, getAssociatedFilesMissBench db
|
||||||
, getAssociatedKeyHitBench db
|
, getAssociatedKeyHitBench db
|
||||||
|
@ -78,22 +74,22 @@ addAssociatedFileOldBench (BenchDb h num) = bench ("addAssociatedFile to " ++ sh
|
||||||
addAssociatedFileNewBench :: BenchDb -> Benchmark
|
addAssociatedFileNewBench :: BenchDb -> Benchmark
|
||||||
addAssociatedFileNewBench (BenchDb h num) = bench ("addAssociatedFile to " ++ show num ++ " (new)") $ nfIO $ do
|
addAssociatedFileNewBench (BenchDb h num) = bench ("addAssociatedFile to " ++ show num ++ " (new)") $ nfIO $ do
|
||||||
n <- getStdRandom (randomR (1,num))
|
n <- getStdRandom (randomR (1,num))
|
||||||
SQL.addAssociatedFile (keyN n) (fileN (n+1)) (SQL.WriteHandle h)
|
SQL.addAssociatedFile (keyN n) (fileN (num+n)) (SQL.WriteHandle h)
|
||||||
H.flushDbQueue h
|
H.flushDbQueue h
|
||||||
|
|
||||||
populateAssociatedFiles :: H.DbQueue -> Int -> IO ()
|
populateAssociatedFiles :: H.DbQueue -> Integer -> IO ()
|
||||||
populateAssociatedFiles h num = do
|
populateAssociatedFiles h num = do
|
||||||
forM_ [1..num] $ \n ->
|
forM_ [1..num] $ \n ->
|
||||||
SQL.addAssociatedFile (keyN n) (fileN n) (SQL.WriteHandle h)
|
SQL.addAssociatedFile (keyN n) (fileN n) (SQL.WriteHandle h)
|
||||||
H.flushDbQueue h
|
H.flushDbQueue h
|
||||||
|
|
||||||
keyN :: Int -> Key
|
keyN :: Integer -> Key
|
||||||
keyN n = stubKey
|
keyN n = stubKey
|
||||||
{ keyName = B8.pack $ "key" ++ show n
|
{ keyName = B8.pack $ "key" ++ show n
|
||||||
, keyVariety = OtherKey "BENCH"
|
, keyVariety = OtherKey "BENCH"
|
||||||
}
|
}
|
||||||
|
|
||||||
fileN :: Int -> TopFilePath
|
fileN :: Integer -> TopFilePath
|
||||||
fileN n = asTopFilePath ("file" ++ show n)
|
fileN n = asTopFilePath ("file" ++ show n)
|
||||||
|
|
||||||
keyMiss :: Key
|
keyMiss :: Key
|
||||||
|
@ -102,14 +98,17 @@ keyMiss = keyN 0 -- 0 is never stored
|
||||||
fileMiss :: TopFilePath
|
fileMiss :: TopFilePath
|
||||||
fileMiss = fileN 0 -- 0 is never stored
|
fileMiss = fileN 0 -- 0 is never stored
|
||||||
|
|
||||||
data BenchDb = BenchDb H.DbQueue Int
|
data BenchDb = BenchDb H.DbQueue Integer
|
||||||
|
|
||||||
benchDb :: FilePath -> Int -> Annex BenchDb
|
benchDb :: FilePath -> Integer -> Annex BenchDb
|
||||||
benchDb tmpdir num = do
|
benchDb tmpdir num = do
|
||||||
liftIO $ putStrLn $ "setting up database with " ++ show num
|
liftIO $ putStrLn $ "setting up database with " ++ show num ++ " items"
|
||||||
initDb db SQL.createTables
|
initDb db SQL.createTables
|
||||||
h <- liftIO $ H.openDbQueue H.MultiWriter db SQL.containedTable
|
h <- liftIO $ H.openDbQueue H.MultiWriter db SQL.containedTable
|
||||||
liftIO $ populateAssociatedFiles h num
|
liftIO $ populateAssociatedFiles h num
|
||||||
|
sz <- liftIO $ getFileSize db
|
||||||
|
liftIO $ putStrLn $ "size of database on disk: " ++
|
||||||
|
roughSize storageUnits False sz
|
||||||
return (BenchDb h num)
|
return (BenchDb h num)
|
||||||
where
|
where
|
||||||
db = tmpdir </> show num </> "db"
|
db = tmpdir </> show num </> "db"
|
||||||
|
|
|
@ -94,6 +94,14 @@ store s repo = do
|
||||||
, fullconfig = M.unionWith (++) c (fullconfig repo)
|
, fullconfig = M.unionWith (++) c (fullconfig repo)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
{- Stores a single config setting in a Repo, returning the new version of
|
||||||
|
- the Repo. Config settings can be updated incrementally. -}
|
||||||
|
store' :: String -> String -> Repo -> Repo
|
||||||
|
store' k v repo = repo
|
||||||
|
{ config = M.singleton k v `M.union` config repo
|
||||||
|
, fullconfig = M.unionWith (++) (M.singleton k [v]) (fullconfig repo)
|
||||||
|
}
|
||||||
|
|
||||||
{- Updates the location of a repo, based on its configuration.
|
{- Updates the location of a repo, based on its configuration.
|
||||||
-
|
-
|
||||||
- Git.Construct makes LocalUknown repos, of which only a directory is
|
- Git.Construct makes LocalUknown repos, of which only a directory is
|
||||||
|
|
|
@ -67,8 +67,12 @@ get = do
|
||||||
configure (Just d) _ = do
|
configure (Just d) _ = do
|
||||||
absd <- absPath d
|
absd <- absPath d
|
||||||
curr <- getCurrentDirectory
|
curr <- getCurrentDirectory
|
||||||
Git.Config.read $ newFrom $
|
r <- Git.Config.read $ newFrom $
|
||||||
Local { gitdir = absd, worktree = Just curr }
|
Local { gitdir = absd, worktree = Just curr }
|
||||||
|
return $ if Git.Config.isBare r
|
||||||
|
then r { location = (location r) { worktree = Nothing } }
|
||||||
|
else r
|
||||||
|
|
||||||
configure Nothing Nothing = giveup "Not in a git repository."
|
configure Nothing Nothing = giveup "Not in a git repository."
|
||||||
|
|
||||||
addworktree w r = changelocation r $
|
addworktree w r = changelocation r $
|
||||||
|
|
41
Git/Queue.hs
41
Git/Queue.hs
|
@ -1,6 +1,6 @@
|
||||||
{- git repository command queue
|
{- git repository command queue
|
||||||
-
|
-
|
||||||
- Copyright 2010-2018 Joey Hess <id@joeyh.name>
|
- Copyright 2010-2019 Joey Hess <id@joeyh.name>
|
||||||
-
|
-
|
||||||
- Licensed under the GNU AGPL version 3 or higher.
|
- Licensed under the GNU AGPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
@ -27,9 +27,10 @@ import Git.Command
|
||||||
import qualified Git.UpdateIndex
|
import qualified Git.UpdateIndex
|
||||||
|
|
||||||
import qualified Data.Map.Strict as M
|
import qualified Data.Map.Strict as M
|
||||||
|
import Control.Monad.IO.Class
|
||||||
|
|
||||||
{- Queable actions that can be performed in a git repository. -}
|
{- Queable actions that can be performed in a git repository. -}
|
||||||
data Action
|
data Action m
|
||||||
{- Updating the index file, using a list of streamers that can
|
{- Updating the index file, using a list of streamers that can
|
||||||
- be added to as the queue grows. -}
|
- be added to as the queue grows. -}
|
||||||
= UpdateIndexAction [Git.UpdateIndex.Streamer] -- in reverse order
|
= UpdateIndexAction [Git.UpdateIndex.Streamer] -- in reverse order
|
||||||
|
@ -43,21 +44,21 @@ data Action
|
||||||
{- An internal action to run, on a list of files that can be added
|
{- An internal action to run, on a list of files that can be added
|
||||||
- to as the queue grows. -}
|
- to as the queue grows. -}
|
||||||
| InternalAction
|
| InternalAction
|
||||||
{ getRunner :: InternalActionRunner
|
{ getRunner :: InternalActionRunner m
|
||||||
, getInternalFiles :: [(FilePath, IO Bool)]
|
, getInternalFiles :: [(FilePath, IO Bool)]
|
||||||
}
|
}
|
||||||
|
|
||||||
{- The String must be unique for each internal action. -}
|
{- The String must be unique for each internal action. -}
|
||||||
data InternalActionRunner = InternalActionRunner String (Repo -> [(FilePath, IO Bool)] -> IO ())
|
data InternalActionRunner m = InternalActionRunner String (Repo -> [(FilePath, IO Bool)] -> m ())
|
||||||
|
|
||||||
instance Eq InternalActionRunner where
|
instance Eq (InternalActionRunner m) where
|
||||||
InternalActionRunner s1 _ == InternalActionRunner s2 _ = s1 == s2
|
InternalActionRunner s1 _ == InternalActionRunner s2 _ = s1 == s2
|
||||||
|
|
||||||
{- A key that can uniquely represent an action in a Map. -}
|
{- A key that can uniquely represent an action in a Map. -}
|
||||||
data ActionKey = UpdateIndexActionKey | CommandActionKey String | InternalActionKey String
|
data ActionKey = UpdateIndexActionKey | CommandActionKey String | InternalActionKey String
|
||||||
deriving (Eq, Ord)
|
deriving (Eq, Ord)
|
||||||
|
|
||||||
actionKey :: Action -> ActionKey
|
actionKey :: Action m -> ActionKey
|
||||||
actionKey (UpdateIndexAction _) = UpdateIndexActionKey
|
actionKey (UpdateIndexAction _) = UpdateIndexActionKey
|
||||||
actionKey CommandAction { getSubcommand = s } = CommandActionKey s
|
actionKey CommandAction { getSubcommand = s } = CommandActionKey s
|
||||||
actionKey InternalAction { getRunner = InternalActionRunner s _ } = InternalActionKey s
|
actionKey InternalAction { getRunner = InternalActionRunner s _ } = InternalActionKey s
|
||||||
|
@ -65,10 +66,10 @@ actionKey InternalAction { getRunner = InternalActionRunner s _ } = InternalActi
|
||||||
{- A queue of actions to perform (in any order) on a git repository,
|
{- A queue of actions to perform (in any order) on a git repository,
|
||||||
- with lists of files to perform them on. This allows coalescing
|
- with lists of files to perform them on. This allows coalescing
|
||||||
- similar git commands. -}
|
- similar git commands. -}
|
||||||
data Queue = Queue
|
data Queue m = Queue
|
||||||
{ size :: Int
|
{ size :: Int
|
||||||
, _limit :: Int
|
, _limit :: Int
|
||||||
, items :: M.Map ActionKey Action
|
, items :: M.Map ActionKey (Action m)
|
||||||
}
|
}
|
||||||
|
|
||||||
{- A recommended maximum size for the queue, after which it should be
|
{- A recommended maximum size for the queue, after which it should be
|
||||||
|
@ -84,7 +85,7 @@ defaultLimit :: Int
|
||||||
defaultLimit = 10240
|
defaultLimit = 10240
|
||||||
|
|
||||||
{- Constructor for empty queue. -}
|
{- Constructor for empty queue. -}
|
||||||
new :: Maybe Int -> Queue
|
new :: Maybe Int -> Queue m
|
||||||
new lim = Queue 0 (fromMaybe defaultLimit lim) M.empty
|
new lim = Queue 0 (fromMaybe defaultLimit lim) M.empty
|
||||||
|
|
||||||
{- Adds an git command to the queue.
|
{- Adds an git command to the queue.
|
||||||
|
@ -93,7 +94,7 @@ new lim = Queue 0 (fromMaybe defaultLimit lim) M.empty
|
||||||
- assumed to be equivilant enough to perform in any order with the same
|
- assumed to be equivilant enough to perform in any order with the same
|
||||||
- result.
|
- result.
|
||||||
-}
|
-}
|
||||||
addCommand :: String -> [CommandParam] -> [FilePath] -> Queue -> Repo -> IO Queue
|
addCommand :: MonadIO m => String -> [CommandParam] -> [FilePath] -> Queue m -> Repo -> m (Queue m)
|
||||||
addCommand subcommand params files q repo =
|
addCommand subcommand params files q repo =
|
||||||
updateQueue action different (length files) q repo
|
updateQueue action different (length files) q repo
|
||||||
where
|
where
|
||||||
|
@ -107,7 +108,7 @@ addCommand subcommand params files q repo =
|
||||||
different _ = True
|
different _ = True
|
||||||
|
|
||||||
{- Adds an internal action to the queue. -}
|
{- Adds an internal action to the queue. -}
|
||||||
addInternalAction :: InternalActionRunner -> [(FilePath, IO Bool)] -> Queue -> Repo -> IO Queue
|
addInternalAction :: MonadIO m => InternalActionRunner m -> [(FilePath, IO Bool)] -> Queue m -> Repo -> m (Queue m)
|
||||||
addInternalAction runner files q repo =
|
addInternalAction runner files q repo =
|
||||||
updateQueue action different (length files) q repo
|
updateQueue action different (length files) q repo
|
||||||
where
|
where
|
||||||
|
@ -120,7 +121,7 @@ addInternalAction runner files q repo =
|
||||||
different _ = True
|
different _ = True
|
||||||
|
|
||||||
{- Adds an update-index streamer to the queue. -}
|
{- Adds an update-index streamer to the queue. -}
|
||||||
addUpdateIndex :: Git.UpdateIndex.Streamer -> Queue -> Repo -> IO Queue
|
addUpdateIndex :: MonadIO m => Git.UpdateIndex.Streamer -> Queue m -> Repo -> m (Queue m)
|
||||||
addUpdateIndex streamer q repo =
|
addUpdateIndex streamer q repo =
|
||||||
updateQueue action different 1 q repo
|
updateQueue action different 1 q repo
|
||||||
where
|
where
|
||||||
|
@ -133,7 +134,7 @@ addUpdateIndex streamer q repo =
|
||||||
{- Updates or adds an action in the queue. If the queue already contains a
|
{- Updates or adds an action in the queue. If the queue already contains a
|
||||||
- different action, it will be flushed; this is to ensure that conflicting
|
- different action, it will be flushed; this is to ensure that conflicting
|
||||||
- actions, like add and rm, are run in the right order.-}
|
- actions, like add and rm, are run in the right order.-}
|
||||||
updateQueue :: Action -> (Action -> Bool) -> Int -> Queue -> Repo -> IO Queue
|
updateQueue :: MonadIO m => Action m -> (Action m -> Bool) -> Int -> Queue m -> Repo -> m (Queue m)
|
||||||
updateQueue !action different sizeincrease q repo
|
updateQueue !action different sizeincrease q repo
|
||||||
| null (filter different (M.elems (items q))) = return $ go q
|
| null (filter different (M.elems (items q))) = return $ go q
|
||||||
| otherwise = go <$> flush q repo
|
| otherwise = go <$> flush q repo
|
||||||
|
@ -150,7 +151,7 @@ updateQueue !action different sizeincrease q repo
|
||||||
{- The new value comes first. It probably has a smaller list of files than
|
{- The new value comes first. It probably has a smaller list of files than
|
||||||
- the old value. So, the list append of the new value first is more
|
- the old value. So, the list append of the new value first is more
|
||||||
- efficient. -}
|
- efficient. -}
|
||||||
combineNewOld :: Action -> Action -> Action
|
combineNewOld :: Action m -> Action m -> Action m
|
||||||
combineNewOld (CommandAction _sc1 _ps1 fs1) (CommandAction sc2 ps2 fs2) =
|
combineNewOld (CommandAction _sc1 _ps1 fs1) (CommandAction sc2 ps2 fs2) =
|
||||||
CommandAction sc2 ps2 (fs1++fs2)
|
CommandAction sc2 ps2 (fs1++fs2)
|
||||||
combineNewOld (UpdateIndexAction s1) (UpdateIndexAction s2) =
|
combineNewOld (UpdateIndexAction s1) (UpdateIndexAction s2) =
|
||||||
|
@ -162,18 +163,18 @@ combineNewOld anew _aold = anew
|
||||||
{- Merges the contents of the second queue into the first.
|
{- Merges the contents of the second queue into the first.
|
||||||
- This should only be used when the two queues are known to contain
|
- This should only be used when the two queues are known to contain
|
||||||
- non-conflicting actions. -}
|
- non-conflicting actions. -}
|
||||||
merge :: Queue -> Queue -> Queue
|
merge :: Queue m -> Queue m -> Queue m
|
||||||
merge origq newq = origq
|
merge origq newq = origq
|
||||||
{ size = size origq + size newq
|
{ size = size origq + size newq
|
||||||
, items = M.unionWith combineNewOld (items newq) (items origq)
|
, items = M.unionWith combineNewOld (items newq) (items origq)
|
||||||
}
|
}
|
||||||
|
|
||||||
{- Is a queue large enough that it should be flushed? -}
|
{- Is a queue large enough that it should be flushed? -}
|
||||||
full :: Queue -> Bool
|
full :: Queue m -> Bool
|
||||||
full (Queue cur lim _) = cur >= lim
|
full (Queue cur lim _) = cur >= lim
|
||||||
|
|
||||||
{- Runs a queue on a git repository. -}
|
{- Runs a queue on a git repository. -}
|
||||||
flush :: Queue -> Repo -> IO Queue
|
flush :: MonadIO m => Queue m -> Repo -> m (Queue m)
|
||||||
flush (Queue _ lim m) repo = do
|
flush (Queue _ lim m) repo = do
|
||||||
forM_ (M.elems m) $ runAction repo
|
forM_ (M.elems m) $ runAction repo
|
||||||
return $ Queue 0 lim M.empty
|
return $ Queue 0 lim M.empty
|
||||||
|
@ -184,11 +185,11 @@ flush (Queue _ lim m) repo = do
|
||||||
-
|
-
|
||||||
- Intentionally runs the command even if the list of files is empty;
|
- Intentionally runs the command even if the list of files is empty;
|
||||||
- this allows queueing commands that do not need a list of files. -}
|
- this allows queueing commands that do not need a list of files. -}
|
||||||
runAction :: Repo -> Action -> IO ()
|
runAction :: MonadIO m => Repo -> Action m -> m ()
|
||||||
runAction repo (UpdateIndexAction streamers) =
|
runAction repo (UpdateIndexAction streamers) =
|
||||||
-- list is stored in reverse order
|
-- list is stored in reverse order
|
||||||
Git.UpdateIndex.streamUpdateIndex repo $ reverse streamers
|
liftIO $ Git.UpdateIndex.streamUpdateIndex repo $ reverse streamers
|
||||||
runAction repo action@(CommandAction {}) = do
|
runAction repo action@(CommandAction {}) = liftIO $ do
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
let p = (proc "xargs" $ "-0":"git":toCommand gitparams) { env = gitEnv repo }
|
let p = (proc "xargs" $ "-0":"git":toCommand gitparams) { env = gitEnv repo }
|
||||||
withHandle StdinHandle createProcessSuccess p $ \h -> do
|
withHandle StdinHandle createProcessSuccess p $ \h -> do
|
||||||
|
|
|
@ -51,6 +51,7 @@ makeLegalName s = case filter legal $ replace "/" "_" s of
|
||||||
legal c = isAlphaNum c
|
legal c = isAlphaNum c
|
||||||
|
|
||||||
data RemoteLocation = RemoteUrl String | RemotePath FilePath
|
data RemoteLocation = RemoteUrl String | RemotePath FilePath
|
||||||
|
deriving (Eq)
|
||||||
|
|
||||||
remoteLocationIsUrl :: RemoteLocation -> Bool
|
remoteLocationIsUrl :: RemoteLocation -> Bool
|
||||||
remoteLocationIsUrl (RemoteUrl _) = True
|
remoteLocationIsUrl (RemoteUrl _) = True
|
||||||
|
|
|
@ -206,7 +206,7 @@ downloadTorrentFile u = do
|
||||||
withTmpFileIn othertmp "torrent" $ \f h -> do
|
withTmpFileIn othertmp "torrent" $ \f h -> do
|
||||||
liftIO $ hClose h
|
liftIO $ hClose h
|
||||||
ok <- Url.withUrlOptions $
|
ok <- Url.withUrlOptions $
|
||||||
liftIO . Url.download nullMeterUpdate u f
|
Url.download nullMeterUpdate u f
|
||||||
when ok $
|
when ok $
|
||||||
liftIO $ renameFile f torrent
|
liftIO $ renameFile f torrent
|
||||||
return ok
|
return ok
|
||||||
|
|
|
@ -716,7 +716,7 @@ checkKeyUrl :: Git.Repo -> CheckPresent
|
||||||
checkKeyUrl r k = do
|
checkKeyUrl r k = do
|
||||||
showChecking r
|
showChecking r
|
||||||
us <- getWebUrls k
|
us <- getWebUrls k
|
||||||
anyM (\u -> withUrlOptions $ liftIO . checkBoth u (keySize k)) us
|
anyM (\u -> withUrlOptions $ checkBoth u (keySize k)) us
|
||||||
|
|
||||||
getWebUrls :: Key -> Annex [URLString]
|
getWebUrls :: Key -> Annex [URLString]
|
||||||
getWebUrls key = filter supported <$> getUrls key
|
getWebUrls key = filter supported <$> getUrls key
|
||||||
|
|
|
@ -286,7 +286,7 @@ setupRepo gcryptid r
|
||||||
{- Ask git-annex-shell to configure the repository as a gcrypt
|
{- Ask git-annex-shell to configure the repository as a gcrypt
|
||||||
- repository. May fail if it is too old. -}
|
- repository. May fail if it is too old. -}
|
||||||
gitannexshellsetup = Ssh.onRemote NoConsumeStdin r
|
gitannexshellsetup = Ssh.onRemote NoConsumeStdin r
|
||||||
(boolSystem, return False)
|
(\f p -> liftIO (boolSystem f p), return False)
|
||||||
"gcryptsetup" [ Param gcryptid ] []
|
"gcryptsetup" [ Param gcryptid ] []
|
||||||
|
|
||||||
denyNonFastForwards = "receive.denyNonFastForwards"
|
denyNonFastForwards = "receive.denyNonFastForwards"
|
||||||
|
@ -451,7 +451,7 @@ getGCryptId fast r gc
|
||||||
| Git.repoIsLocal r || Git.repoIsLocalUnknown r = extract <$>
|
| Git.repoIsLocal r || Git.repoIsLocalUnknown r = extract <$>
|
||||||
liftIO (catchMaybeIO $ Git.Config.read r)
|
liftIO (catchMaybeIO $ Git.Config.read r)
|
||||||
| not fast = extract . liftM fst <$> getM (eitherToMaybe <$>)
|
| not fast = extract . liftM fst <$> getM (eitherToMaybe <$>)
|
||||||
[ Ssh.onRemote NoConsumeStdin r (Git.Config.fromPipe r, return (Left $ error "configlist failed")) "configlist" [] []
|
[ Ssh.onRemote NoConsumeStdin r (\f p -> liftIO (Git.Config.fromPipe r f p), return (Left $ error "configlist failed")) "configlist" [] []
|
||||||
, getConfigViaRsync r gc
|
, getConfigViaRsync r gc
|
||||||
]
|
]
|
||||||
| otherwise = return (Nothing, r)
|
| otherwise = return (Nothing, r)
|
||||||
|
|
|
@ -143,7 +143,9 @@ configRead autoinit r = do
|
||||||
(True, _, _)
|
(True, _, _)
|
||||||
| remoteAnnexCheckUUID gc -> tryGitConfigRead autoinit r
|
| remoteAnnexCheckUUID gc -> tryGitConfigRead autoinit r
|
||||||
| otherwise -> return r
|
| otherwise -> return r
|
||||||
(False, _, NoUUID) -> tryGitConfigRead autoinit r
|
(False, _, NoUUID) -> configSpecialGitRemotes r >>= \case
|
||||||
|
Nothing -> tryGitConfigRead autoinit r
|
||||||
|
Just r' -> return r'
|
||||||
_ -> return r
|
_ -> return r
|
||||||
|
|
||||||
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
|
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
|
||||||
|
@ -231,7 +233,7 @@ repoAvail r
|
||||||
tryGitConfigRead :: Bool -> Git.Repo -> Annex Git.Repo
|
tryGitConfigRead :: Bool -> Git.Repo -> Annex Git.Repo
|
||||||
tryGitConfigRead autoinit r
|
tryGitConfigRead autoinit r
|
||||||
| haveconfig r = return r -- already read
|
| haveconfig r = return r -- already read
|
||||||
| Git.repoIsSsh r = store $ do
|
| Git.repoIsSsh r = storeUpdatedRemote $ do
|
||||||
v <- Ssh.onRemote NoConsumeStdin r
|
v <- Ssh.onRemote NoConsumeStdin r
|
||||||
(pipedconfig, return (Left $ giveup "configlist failed"))
|
(pipedconfig, return (Left $ giveup "configlist failed"))
|
||||||
"configlist" [] configlistfields
|
"configlist" [] configlistfields
|
||||||
|
@ -240,30 +242,30 @@ tryGitConfigRead autoinit r
|
||||||
| haveconfig r' -> return r'
|
| haveconfig r' -> return r'
|
||||||
| otherwise -> configlist_failed
|
| otherwise -> configlist_failed
|
||||||
Left _ -> configlist_failed
|
Left _ -> configlist_failed
|
||||||
| Git.repoIsHttp r = store geturlconfig
|
| Git.repoIsHttp r = storeUpdatedRemote geturlconfig
|
||||||
| Git.GCrypt.isEncrypted r = handlegcrypt =<< getConfigMaybe (remoteConfig r "uuid")
|
| Git.GCrypt.isEncrypted r = handlegcrypt =<< getConfigMaybe (remoteConfig r "uuid")
|
||||||
| Git.repoIsUrl r = return r
|
| Git.repoIsUrl r = return r
|
||||||
| otherwise = store $ liftIO $
|
| otherwise = storeUpdatedRemote $ liftIO $
|
||||||
readlocalannexconfig `catchNonAsync` (const $ return r)
|
readlocalannexconfig `catchNonAsync` (const $ return r)
|
||||||
where
|
where
|
||||||
haveconfig = not . M.null . Git.config
|
haveconfig = not . M.null . Git.config
|
||||||
|
|
||||||
pipedconfig cmd params = do
|
pipedconfig cmd params = do
|
||||||
v <- Git.Config.fromPipe r cmd params
|
v <- liftIO $ Git.Config.fromPipe r cmd params
|
||||||
case v of
|
case v of
|
||||||
Right (r', val) -> do
|
Right (r', val) -> do
|
||||||
unless (isUUIDConfigured r' || null val) $ do
|
unless (isUUIDConfigured r' || null val) $ do
|
||||||
warningIO $ "Failed to get annex.uuid configuration of repository " ++ Git.repoDescribe r
|
warning $ "Failed to get annex.uuid configuration of repository " ++ Git.repoDescribe r
|
||||||
warningIO $ "Instead, got: " ++ show val
|
warning $ "Instead, got: " ++ show val
|
||||||
warningIO $ "This is unexpected; please check the network transport!"
|
warning $ "This is unexpected; please check the network transport!"
|
||||||
return $ Right r'
|
return $ Right r'
|
||||||
Left l -> return $ Left l
|
Left l -> return $ Left l
|
||||||
|
|
||||||
geturlconfig = Url.withUrlOptions $ \uo -> do
|
geturlconfig = Url.withUrlOptions $ \uo -> do
|
||||||
v <- liftIO $ withTmpFile "git-annex.tmp" $ \tmpfile h -> do
|
v <- withTmpFile "git-annex.tmp" $ \tmpfile h -> do
|
||||||
hClose h
|
liftIO $ hClose h
|
||||||
let url = Git.repoLocation r ++ "/config"
|
let url = Git.repoLocation r ++ "/config"
|
||||||
ifM (Url.downloadQuiet nullMeterUpdate url tmpfile uo)
|
ifM (liftIO $ Url.downloadQuiet nullMeterUpdate url tmpfile uo)
|
||||||
( Just <$> pipedconfig "git" [Param "config", Param "--null", Param "--list", Param "--file", File tmpfile]
|
( Just <$> pipedconfig "git" [Param "config", Param "--null", Param "--list", Param "--file", File tmpfile]
|
||||||
, return Nothing
|
, return Nothing
|
||||||
)
|
)
|
||||||
|
@ -278,18 +280,6 @@ tryGitConfigRead autoinit r
|
||||||
set_ignore "not usable by git-annex" False
|
set_ignore "not usable by git-annex" False
|
||||||
return r
|
return r
|
||||||
|
|
||||||
store = observe $ \r' -> do
|
|
||||||
l <- Annex.getGitRemotes
|
|
||||||
let rs = exchange l r'
|
|
||||||
Annex.changeState $ \s -> s { Annex.gitremotes = Just rs }
|
|
||||||
|
|
||||||
exchange [] _ = []
|
|
||||||
exchange (old:ls) new
|
|
||||||
| Git.remoteName old == Git.remoteName new =
|
|
||||||
new : exchange ls new
|
|
||||||
| otherwise =
|
|
||||||
old : exchange ls new
|
|
||||||
|
|
||||||
{- Is this remote just not available, or does
|
{- Is this remote just not available, or does
|
||||||
- it not have git-annex-shell?
|
- it not have git-annex-shell?
|
||||||
- Find out by trying to fetch from the remote. -}
|
- Find out by trying to fetch from the remote. -}
|
||||||
|
@ -319,7 +309,7 @@ tryGitConfigRead autoinit r
|
||||||
g <- gitRepo
|
g <- gitRepo
|
||||||
case Git.GCrypt.remoteRepoId g (Git.remoteName r) of
|
case Git.GCrypt.remoteRepoId g (Git.remoteName r) of
|
||||||
Nothing -> return r
|
Nothing -> return r
|
||||||
Just v -> store $ liftIO $ setUUID r $
|
Just v -> storeUpdatedRemote $ liftIO $ setUUID r $
|
||||||
genUUIDInNameSpace gCryptNameSpace v
|
genUUIDInNameSpace gCryptNameSpace v
|
||||||
|
|
||||||
{- The local repo may not yet be initialized, so try to initialize
|
{- The local repo may not yet be initialized, so try to initialize
|
||||||
|
@ -337,6 +327,31 @@ tryGitConfigRead autoinit r
|
||||||
then [(Fields.autoInit, "1")]
|
then [(Fields.autoInit, "1")]
|
||||||
else []
|
else []
|
||||||
|
|
||||||
|
{- Handles special remotes that can be enabled by the presence of
|
||||||
|
- regular git remotes.
|
||||||
|
-
|
||||||
|
- When a remote repo is found to be such a special remote, its
|
||||||
|
- UUID is cached in the git config, and the repo returned with
|
||||||
|
- the UUID set.
|
||||||
|
-}
|
||||||
|
configSpecialGitRemotes :: Git.Repo -> Annex (Maybe Git.Repo)
|
||||||
|
configSpecialGitRemotes r = Remote.GitLFS.configKnownUrl r >>= \case
|
||||||
|
Nothing -> return Nothing
|
||||||
|
Just r' -> Just <$> storeUpdatedRemote (return r')
|
||||||
|
|
||||||
|
storeUpdatedRemote :: Annex Git.Repo -> Annex Git.Repo
|
||||||
|
storeUpdatedRemote = observe $ \r' -> do
|
||||||
|
l <- Annex.getGitRemotes
|
||||||
|
let rs = exchange l r'
|
||||||
|
Annex.changeState $ \s -> s { Annex.gitremotes = Just rs }
|
||||||
|
where
|
||||||
|
exchange [] _ = []
|
||||||
|
exchange (old:ls) new
|
||||||
|
| Git.remoteName old == Git.remoteName new =
|
||||||
|
new : exchange ls new
|
||||||
|
| otherwise =
|
||||||
|
old : exchange ls new
|
||||||
|
|
||||||
{- Checks if a given remote has the content for a key in its annex. -}
|
{- Checks if a given remote has the content for a key in its annex. -}
|
||||||
inAnnex :: Remote -> State -> Key -> Annex Bool
|
inAnnex :: Remote -> State -> Key -> Annex Bool
|
||||||
inAnnex rmt st key = do
|
inAnnex rmt st key = do
|
||||||
|
@ -352,11 +367,10 @@ inAnnex' repo rmt (State connpool duc _ _) key
|
||||||
checkhttp = do
|
checkhttp = do
|
||||||
showChecking repo
|
showChecking repo
|
||||||
gc <- Annex.getGitConfig
|
gc <- Annex.getGitConfig
|
||||||
ifM (Url.withUrlOptions $ \uo -> liftIO $
|
ifM (Url.withUrlOptions $ \uo -> anyM (\u -> Url.checkBoth u (keySize key) uo) (keyUrls gc repo rmt key))
|
||||||
anyM (\u -> Url.checkBoth u (keySize key) uo) (keyUrls gc repo rmt key))
|
( return True
|
||||||
( return True
|
, giveup "not found"
|
||||||
, giveup "not found"
|
)
|
||||||
)
|
|
||||||
checkremote =
|
checkremote =
|
||||||
let fallback = Ssh.inAnnex repo key
|
let fallback = Ssh.inAnnex repo key
|
||||||
in P2PHelper.checkpresent (Ssh.runProto rmt connpool (cantCheck rmt) fallback) key
|
in P2PHelper.checkpresent (Ssh.runProto rmt connpool (cantCheck rmt) fallback) key
|
||||||
|
@ -498,8 +512,9 @@ copyFromRemote'' repo forcersync r st@(State connpool _ _ _) key file dest meter
|
||||||
Just (object, checksuccess) -> do
|
Just (object, checksuccess) -> do
|
||||||
copier <- mkCopier hardlink st params
|
copier <- mkCopier hardlink st params
|
||||||
runTransfer (Transfer Download u key)
|
runTransfer (Transfer Download u key)
|
||||||
file stdRetry
|
file stdRetry $ \p ->
|
||||||
(\p -> copier object dest (combineMeterUpdate p meterupdate) checksuccess)
|
metered (Just (combineMeterUpdate p meterupdate)) key $ \_ p' ->
|
||||||
|
copier object dest p' checksuccess
|
||||||
| Git.repoIsSsh repo = if forcersync
|
| Git.repoIsSsh repo = if forcersync
|
||||||
then fallback meterupdate
|
then fallback meterupdate
|
||||||
else P2PHelper.retrieve
|
else P2PHelper.retrieve
|
||||||
|
@ -632,15 +647,15 @@ copyToRemote' repo r st@(State connpool duc _ _) key file meterupdate
|
||||||
-- run copy from perspective of remote
|
-- run copy from perspective of remote
|
||||||
onLocalFast repo r $ ifM (Annex.Content.inAnnex key)
|
onLocalFast repo r $ ifM (Annex.Content.inAnnex key)
|
||||||
( return True
|
( return True
|
||||||
, do
|
, runTransfer (Transfer Download u key) file stdRetry $ \p -> do
|
||||||
copier <- mkCopier hardlink st params
|
copier <- mkCopier hardlink st params
|
||||||
let verify = Annex.Content.RemoteVerify r
|
let verify = Annex.Content.RemoteVerify r
|
||||||
let rsp = RetrievalAllKeysSecure
|
let rsp = RetrievalAllKeysSecure
|
||||||
runTransfer (Transfer Download u key) file stdRetry $ \p ->
|
res <- Annex.Content.getViaTmp rsp verify key $ \dest ->
|
||||||
let p' = combineMeterUpdate meterupdate p
|
metered (Just (combineMeterUpdate meterupdate p)) key $ \_ p' ->
|
||||||
in Annex.Content.saveState True `after`
|
copier object dest p' (liftIO checksuccessio)
|
||||||
Annex.Content.getViaTmp rsp verify key
|
Annex.Content.saveState True
|
||||||
(\dest -> copier object dest p' (liftIO checksuccessio))
|
return res
|
||||||
)
|
)
|
||||||
copyremotefallback p = Annex.Content.sendAnnex key noop $ \object -> do
|
copyremotefallback p = Annex.Content.sendAnnex key noop $ \object -> do
|
||||||
-- This is too broad really, but recvkey normally
|
-- This is too broad really, but recvkey normally
|
||||||
|
@ -750,7 +765,7 @@ rsyncOrCopyFile st rsyncparams src dest p =
|
||||||
dorsync = do
|
dorsync = do
|
||||||
-- dest may already exist, so make sure rsync can write to it
|
-- dest may already exist, so make sure rsync can write to it
|
||||||
void $ liftIO $ tryIO $ allowWrite dest
|
void $ liftIO $ tryIO $ allowWrite dest
|
||||||
oh <- mkOutputHandler
|
oh <- mkOutputHandlerQuiet
|
||||||
Ssh.rsyncHelper oh (Just p) $
|
Ssh.rsyncHelper oh (Just p) $
|
||||||
rsyncparams ++ [File src, File dest]
|
rsyncparams ++ [File src, File dest]
|
||||||
docopycow = docopywith copyCoW
|
docopycow = docopywith copyCoW
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
- Licensed under the GNU AGPL version 3 or higher.
|
- Licensed under the GNU AGPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
module Remote.GitLFS (remote, gen) where
|
module Remote.GitLFS (remote, gen, configKnownUrl) where
|
||||||
|
|
||||||
import Annex.Common
|
import Annex.Common
|
||||||
import Types.Remote
|
import Types.Remote
|
||||||
|
@ -13,9 +13,11 @@ import Annex.Url
|
||||||
import Types.Key
|
import Types.Key
|
||||||
import Types.Creds
|
import Types.Creds
|
||||||
import qualified Annex
|
import qualified Annex
|
||||||
|
import qualified Annex.SpecialRemote.Config
|
||||||
import qualified Git
|
import qualified Git
|
||||||
import qualified Git.Types as Git
|
import qualified Git.Types as Git
|
||||||
import qualified Git.Url
|
import qualified Git.Url
|
||||||
|
import qualified Git.Remote
|
||||||
import qualified Git.GCrypt
|
import qualified Git.GCrypt
|
||||||
import qualified Git.Credential as Git
|
import qualified Git.Credential as Git
|
||||||
import Config
|
import Config
|
||||||
|
@ -31,8 +33,10 @@ import Crypto
|
||||||
import Backend.Hash
|
import Backend.Hash
|
||||||
import Utility.Hash
|
import Utility.Hash
|
||||||
import Utility.SshHost
|
import Utility.SshHost
|
||||||
|
import Logs.Remote
|
||||||
import Logs.RemoteState
|
import Logs.RemoteState
|
||||||
import qualified Utility.GitLFS as LFS
|
import qualified Utility.GitLFS as LFS
|
||||||
|
import qualified Git.Config
|
||||||
|
|
||||||
import Control.Concurrent.STM
|
import Control.Concurrent.STM
|
||||||
import Data.String
|
import Data.String
|
||||||
|
@ -145,21 +149,46 @@ mySetup _ mu _ c gc = do
|
||||||
, "likely insecure configuration.)"
|
, "likely insecure configuration.)"
|
||||||
]
|
]
|
||||||
|
|
||||||
-- The url is not stored in the remote log, because the same
|
-- Set up remote.name.url to point to the repo,
|
||||||
-- git-lfs repo can be accessed using different urls by different
|
|
||||||
-- people (eg over ssh or http).
|
|
||||||
--
|
|
||||||
-- Instead, set up remote.name.url to point to the repo,
|
|
||||||
-- (so it's also usable by git as a non-special remote),
|
-- (so it's also usable by git as a non-special remote),
|
||||||
-- and set remote.name.git-lfs = true
|
-- and set remote.name.annex-git-lfs = true
|
||||||
let c'' = M.delete "url" c'
|
gitConfigSpecialRemote u c' [("git-lfs", "true")]
|
||||||
gitConfigSpecialRemote u c'' [("git-lfs", "true")]
|
|
||||||
setConfig (ConfigKey ("remote." ++ getRemoteName c ++ ".url")) url
|
setConfig (ConfigKey ("remote." ++ getRemoteName c ++ ".url")) url
|
||||||
return (c'', u)
|
return (c', u)
|
||||||
where
|
where
|
||||||
url = fromMaybe (giveup "Specify url=") (M.lookup "url" c)
|
url = fromMaybe (giveup "Specify url=") (M.lookup "url" c)
|
||||||
remotename = fromJust (lookupName c)
|
remotename = fromJust (lookupName c)
|
||||||
|
|
||||||
|
{- Check if a remote's url is one known to belong to a git-lfs repository.
|
||||||
|
- If so, set the necessary configuration to enable using the remote
|
||||||
|
- with git-lfs. -}
|
||||||
|
configKnownUrl :: Git.Repo -> Annex (Maybe Git.Repo)
|
||||||
|
configKnownUrl r
|
||||||
|
| Git.repoIsUrl r = do
|
||||||
|
l <- readRemoteLog
|
||||||
|
g <- Annex.gitRepo
|
||||||
|
case Annex.SpecialRemote.Config.findByRemoteConfig (match g) l of
|
||||||
|
((u, _, mcu):[]) -> Just <$> go u mcu
|
||||||
|
_ -> return Nothing
|
||||||
|
| otherwise = return Nothing
|
||||||
|
where
|
||||||
|
match g c = fromMaybe False $ do
|
||||||
|
t <- M.lookup Annex.SpecialRemote.Config.typeField c
|
||||||
|
u <- M.lookup "url" c
|
||||||
|
let u' = Git.Remote.parseRemoteLocation u g
|
||||||
|
return $ Git.Remote.RemoteUrl (Git.repoLocation r) == u'
|
||||||
|
&& t == typename remote
|
||||||
|
go u mcu = do
|
||||||
|
r' <- set "uuid" (fromUUID u) =<< set "git-lfs" "true" r
|
||||||
|
case mcu of
|
||||||
|
Just (Annex.SpecialRemote.Config.ConfigFrom cu) ->
|
||||||
|
set "config-uuid" (fromUUID cu) r'
|
||||||
|
Nothing -> return r'
|
||||||
|
set k v r' = do
|
||||||
|
let ck@(ConfigKey k') = remoteConfig r' k
|
||||||
|
setConfig ck v
|
||||||
|
return $ Git.Config.store' k' v r'
|
||||||
|
|
||||||
data LFSHandle = LFSHandle
|
data LFSHandle = LFSHandle
|
||||||
{ downloadEndpoint :: Maybe LFS.Endpoint
|
{ downloadEndpoint :: Maybe LFS.Endpoint
|
||||||
, uploadEndpoint :: Maybe LFS.Endpoint
|
, uploadEndpoint :: Maybe LFS.Endpoint
|
||||||
|
|
|
@ -83,7 +83,7 @@ git_annex_shell cs r command params fields
|
||||||
onRemote
|
onRemote
|
||||||
:: ConsumeStdin
|
:: ConsumeStdin
|
||||||
-> Git.Repo
|
-> Git.Repo
|
||||||
-> (FilePath -> [CommandParam] -> IO a, Annex a)
|
-> (FilePath -> [CommandParam] -> Annex a, Annex a)
|
||||||
-> String
|
-> String
|
||||||
-> [CommandParam]
|
-> [CommandParam]
|
||||||
-> [(Field, String)]
|
-> [(Field, String)]
|
||||||
|
@ -91,7 +91,7 @@ onRemote
|
||||||
onRemote cs r (with, errorval) command params fields = do
|
onRemote cs r (with, errorval) command params fields = do
|
||||||
s <- git_annex_shell cs r command params fields
|
s <- git_annex_shell cs r command params fields
|
||||||
case s of
|
case s of
|
||||||
Just (c, ps) -> liftIO $ with c ps
|
Just (c, ps) -> with c ps
|
||||||
Nothing -> errorval
|
Nothing -> errorval
|
||||||
|
|
||||||
{- Checks if a remote contains a key. -}
|
{- Checks if a remote contains a key. -}
|
||||||
|
@ -100,14 +100,14 @@ inAnnex r k = do
|
||||||
showChecking r
|
showChecking r
|
||||||
onRemote NoConsumeStdin r (runcheck, cantCheck r) "inannex" [Param $ serializeKey k] []
|
onRemote NoConsumeStdin r (runcheck, cantCheck r) "inannex" [Param $ serializeKey k] []
|
||||||
where
|
where
|
||||||
runcheck c p = dispatch =<< safeSystem c p
|
runcheck c p = liftIO $ dispatch =<< safeSystem c p
|
||||||
dispatch ExitSuccess = return True
|
dispatch ExitSuccess = return True
|
||||||
dispatch (ExitFailure 1) = return False
|
dispatch (ExitFailure 1) = return False
|
||||||
dispatch _ = cantCheck r
|
dispatch _ = cantCheck r
|
||||||
|
|
||||||
{- Removes a key from a remote. -}
|
{- Removes a key from a remote. -}
|
||||||
dropKey :: Git.Repo -> Key -> Annex Bool
|
dropKey :: Git.Repo -> Key -> Annex Bool
|
||||||
dropKey r key = onRemote NoConsumeStdin r (boolSystem, return False) "dropkey"
|
dropKey r key = onRemote NoConsumeStdin r (\f p -> liftIO (boolSystem f p), return False) "dropkey"
|
||||||
[ Param "--quiet", Param "--force"
|
[ Param "--quiet", Param "--force"
|
||||||
, Param $ serializeKey key
|
, Param $ serializeKey key
|
||||||
]
|
]
|
||||||
|
|
|
@ -128,4 +128,8 @@ updateRemote remote = do
|
||||||
{- Checks if a remote is syncable using git. -}
|
{- Checks if a remote is syncable using git. -}
|
||||||
gitSyncableRemote :: Remote -> Bool
|
gitSyncableRemote :: Remote -> Bool
|
||||||
gitSyncableRemote r = remotetype r `elem`
|
gitSyncableRemote r = remotetype r `elem`
|
||||||
[ Remote.Git.remote, Remote.GCrypt.remote, Remote.P2P.remote ]
|
[ Remote.Git.remote
|
||||||
|
, Remote.GCrypt.remote
|
||||||
|
, Remote.P2P.remote
|
||||||
|
, Remote.GitLFS.remote
|
||||||
|
]
|
||||||
|
|
13
Remote/S3.hs
13
Remote/S3.hs
|
@ -58,11 +58,10 @@ import Logs.Web
|
||||||
import Logs.MetaData
|
import Logs.MetaData
|
||||||
import Types.MetaData
|
import Types.MetaData
|
||||||
import Utility.Metered
|
import Utility.Metered
|
||||||
import qualified Annex.Url as Url
|
|
||||||
import Utility.DataUnits
|
import Utility.DataUnits
|
||||||
import Annex.Content
|
import Annex.Content
|
||||||
import Annex.Url (getUrlOptions, withUrlOptions)
|
import qualified Annex.Url as Url
|
||||||
import Utility.Url (checkBoth, UrlOptions(..))
|
import Annex.Url (getUrlOptions, withUrlOptions, UrlOptions(..))
|
||||||
import Utility.Env
|
import Utility.Env
|
||||||
|
|
||||||
type BucketName = String
|
type BucketName = String
|
||||||
|
@ -348,7 +347,7 @@ checkKey hv r rs c info k = withS3Handle hv $ \case
|
||||||
Right us -> do
|
Right us -> do
|
||||||
showChecking r
|
showChecking r
|
||||||
let check u = withUrlOptions $
|
let check u = withUrlOptions $
|
||||||
liftIO . checkBoth u (keySize k)
|
Url.checkBoth u (keySize k)
|
||||||
anyM check us
|
anyM check us
|
||||||
|
|
||||||
checkKeyHelper :: S3Info -> S3Handle -> (Either S3.Object S3VersionID) -> Annex Bool
|
checkKeyHelper :: S3Info -> S3Handle -> (Either S3.Object S3VersionID) -> Annex Bool
|
||||||
|
@ -397,7 +396,7 @@ retrieveExportS3 hv r info _k loc f p =
|
||||||
warning $ needS3Creds (uuid r)
|
warning $ needS3Creds (uuid r)
|
||||||
return False
|
return False
|
||||||
Just geturl -> Url.withUrlOptions $
|
Just geturl -> Url.withUrlOptions $
|
||||||
liftIO . Url.download p (geturl exportloc) f
|
Url.download p (geturl exportloc) f
|
||||||
exportloc = bucketExportLocation info loc
|
exportloc = bucketExportLocation info loc
|
||||||
|
|
||||||
removeExportS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Key -> ExportLocation -> Annex Bool
|
removeExportS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Key -> ExportLocation -> Annex Bool
|
||||||
|
@ -417,8 +416,8 @@ checkPresentExportS3 :: S3HandleVar -> Remote -> S3Info -> Key -> ExportLocation
|
||||||
checkPresentExportS3 hv r info k loc = withS3Handle hv $ \case
|
checkPresentExportS3 hv r info k loc = withS3Handle hv $ \case
|
||||||
Just h -> checkKeyHelper info h (Left (T.pack $ bucketExportLocation info loc))
|
Just h -> checkKeyHelper info h (Left (T.pack $ bucketExportLocation info loc))
|
||||||
Nothing -> case getPublicUrlMaker info of
|
Nothing -> case getPublicUrlMaker info of
|
||||||
Just geturl -> withUrlOptions $ liftIO .
|
Just geturl -> withUrlOptions $
|
||||||
checkBoth (geturl $ bucketExportLocation info loc) (keySize k)
|
Url.checkBoth (geturl $ bucketExportLocation info loc) (keySize k)
|
||||||
Nothing -> do
|
Nothing -> do
|
||||||
warning $ needS3Creds (uuid r)
|
warning $ needS3Creds (uuid r)
|
||||||
giveup "No S3 credentials configured"
|
giveup "No S3 credentials configured"
|
||||||
|
|
|
@ -116,9 +116,8 @@ checkKey' key us = firsthit us (Right False) $ \u -> do
|
||||||
showChecking u'
|
showChecking u'
|
||||||
case downloader of
|
case downloader of
|
||||||
YoutubeDownloader -> youtubeDlCheck u'
|
YoutubeDownloader -> youtubeDlCheck u'
|
||||||
_ -> do
|
_ -> catchMsgIO $
|
||||||
Url.withUrlOptions $ liftIO . catchMsgIO .
|
Url.withUrlOptions $ Url.checkBoth u' (keySize key)
|
||||||
Url.checkBoth u' (keySize key)
|
|
||||||
where
|
where
|
||||||
firsthit [] miss _ = return miss
|
firsthit [] miss _ = return miss
|
||||||
firsthit (u:rest) _ a = do
|
firsthit (u:rest) _ a = do
|
||||||
|
|
|
@ -170,5 +170,5 @@ updateTransportHandle :: TransportHandle -> IO TransportHandle
|
||||||
updateTransportHandle h@(TransportHandle _g annexstate) = do
|
updateTransportHandle h@(TransportHandle _g annexstate) = do
|
||||||
g' <- liftAnnex h $ do
|
g' <- liftAnnex h $ do
|
||||||
reloadConfig
|
reloadConfig
|
||||||
Annex.fromRepo id
|
Annex.gitRepo
|
||||||
return (TransportHandle (LocalRepo g') annexstate)
|
return (TransportHandle (LocalRepo g') annexstate)
|
||||||
|
|
|
@ -69,7 +69,7 @@ server ichan th@(TransportHandle (LocalRepo r) _) = go
|
||||||
)
|
)
|
||||||
unless ok $ do
|
unless ok $ do
|
||||||
hClose conn
|
hClose conn
|
||||||
warningIO "dropped Tor connection, too busy"
|
liftAnnex th $ warning "dropped Tor connection, too busy"
|
||||||
|
|
||||||
handlecontrol servicerunning = do
|
handlecontrol servicerunning = do
|
||||||
msg <- atomically $ readTChan ichan
|
msg <- atomically $ readTChan ichan
|
||||||
|
|
27
Test.hs
27
Test.hs
|
@ -84,6 +84,7 @@ import qualified Utility.Base64
|
||||||
import qualified Utility.Tmp.Dir
|
import qualified Utility.Tmp.Dir
|
||||||
import qualified Utility.FileSystemEncoding
|
import qualified Utility.FileSystemEncoding
|
||||||
import qualified Utility.Aeson
|
import qualified Utility.Aeson
|
||||||
|
import qualified Utility.CopyFile
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
import qualified Remote.Helper.Encryptable
|
import qualified Remote.Helper.Encryptable
|
||||||
import qualified Types.Crypto
|
import qualified Types.Crypto
|
||||||
|
@ -248,6 +249,7 @@ unitTests note = testGroup ("Unit Tests " ++ note)
|
||||||
, testCase "info" test_info
|
, testCase "info" test_info
|
||||||
, testCase "version" test_version
|
, testCase "version" test_version
|
||||||
, testCase "sync" test_sync
|
, testCase "sync" test_sync
|
||||||
|
, testCase "concurrent get of dup key regression" test_concurrent_get_of_dup_key_regression
|
||||||
, testCase "union merge regression" test_union_merge_regression
|
, testCase "union merge regression" test_union_merge_regression
|
||||||
, testCase "adjusted branch merge regression" test_adjusted_branch_merge_regression
|
, testCase "adjusted branch merge regression" test_adjusted_branch_merge_regression
|
||||||
, testCase "adjusted branch subtree regression" test_adjusted_branch_subtree_regression
|
, testCase "adjusted branch subtree regression" test_adjusted_branch_subtree_regression
|
||||||
|
@ -951,6 +953,31 @@ test_sync = intmpclonerepo $ do
|
||||||
git_annex "sync" ["--content"] @? "sync failed"
|
git_annex "sync" ["--content"] @? "sync failed"
|
||||||
git_annex_expectoutput "find" ["--in", "."] []
|
git_annex_expectoutput "find" ["--in", "."] []
|
||||||
|
|
||||||
|
{- Regression test for the concurrency bug fixed in
|
||||||
|
- 667d38a8f11c1ee8f256cdbd80e225c2bae06595 -}
|
||||||
|
test_concurrent_get_of_dup_key_regression :: Assertion
|
||||||
|
test_concurrent_get_of_dup_key_regression = intmpclonerepo $ do
|
||||||
|
makedup dupfile
|
||||||
|
-- This was sufficient currency to trigger the bug.
|
||||||
|
git_annex "get" ["-J1", annexedfile, dupfile]
|
||||||
|
@? "concurrent get -J1 with dup failed"
|
||||||
|
git_annex "drop" ["-J1"]
|
||||||
|
@? "drop with dup failed"
|
||||||
|
-- With -J2, one more dup file was needed to trigger the bug.
|
||||||
|
makedup dupfile2
|
||||||
|
git_annex "get" ["-J2", annexedfile, dupfile, dupfile2]
|
||||||
|
@? "concurrent get -J2 with dup failed"
|
||||||
|
git_annex "drop" ["-J2"]
|
||||||
|
@? "drop with dup failed"
|
||||||
|
where
|
||||||
|
dupfile = annexedfile ++ "2"
|
||||||
|
dupfile2 = annexedfile ++ "3"
|
||||||
|
makedup f = do
|
||||||
|
Utility.CopyFile.copyFileExternal Utility.CopyFile.CopyAllMetaData annexedfile f
|
||||||
|
@? "copying annexed file failed"
|
||||||
|
boolSystem "git" [Param "add", File f]
|
||||||
|
@? "git add failed"
|
||||||
|
|
||||||
{- Regression test for union merge bug fixed in
|
{- Regression test for union merge bug fixed in
|
||||||
- 0214e0fb175a608a49b812d81b4632c081f63027 -}
|
- 0214e0fb175a608a49b812d81b4632c081f63027 -}
|
||||||
test_union_merge_regression :: Assertion
|
test_union_merge_regression :: Assertion
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{- git-annex configuration
|
{- git-annex configuration
|
||||||
-
|
-
|
||||||
- Copyright 2012-2015 Joey Hess <id@joeyh.name>
|
- Copyright 2012-2019 Joey Hess <id@joeyh.name>
|
||||||
-
|
-
|
||||||
- Licensed under the GNU AGPL version 3 or higher.
|
- Licensed under the GNU AGPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
@ -21,6 +21,7 @@ import qualified Git.Config
|
||||||
import qualified Git.Construct
|
import qualified Git.Construct
|
||||||
import Git.Types
|
import Git.Types
|
||||||
import Git.ConfigTypes
|
import Git.ConfigTypes
|
||||||
|
import Git.Branch (CommitMode(..))
|
||||||
import Utility.DataUnits
|
import Utility.DataUnits
|
||||||
import Config.Cost
|
import Config.Cost
|
||||||
import Types.UUID
|
import Types.UUID
|
||||||
|
@ -105,6 +106,7 @@ data GitConfig = GitConfig
|
||||||
, annexJobs :: Concurrency
|
, annexJobs :: Concurrency
|
||||||
, annexCacheCreds :: Bool
|
, annexCacheCreds :: Bool
|
||||||
, annexAutoUpgradeRepository :: Bool
|
, annexAutoUpgradeRepository :: Bool
|
||||||
|
, annexCommitMode :: CommitMode
|
||||||
, coreSymlinks :: Bool
|
, coreSymlinks :: Bool
|
||||||
, coreSharedRepository :: SharedRepository
|
, coreSharedRepository :: SharedRepository
|
||||||
, receiveDenyCurrentBranch :: DenyCurrentBranch
|
, receiveDenyCurrentBranch :: DenyCurrentBranch
|
||||||
|
@ -186,6 +188,9 @@ extractGitConfig r = GitConfig
|
||||||
parseConcurrency =<< getmaybe (annex "jobs")
|
parseConcurrency =<< getmaybe (annex "jobs")
|
||||||
, annexCacheCreds = getbool (annex "cachecreds") True
|
, annexCacheCreds = getbool (annex "cachecreds") True
|
||||||
, annexAutoUpgradeRepository = getbool (annex "autoupgraderepository") True
|
, annexAutoUpgradeRepository = getbool (annex "autoupgraderepository") True
|
||||||
|
, annexCommitMode = if getbool (annex "allowsign") False
|
||||||
|
then ManualCommit
|
||||||
|
else AutomaticCommit
|
||||||
, coreSymlinks = getbool "core.symlinks" True
|
, coreSymlinks = getbool "core.symlinks" True
|
||||||
, coreSharedRepository = getSharedRepository r
|
, coreSharedRepository = getSharedRepository r
|
||||||
, receiveDenyCurrentBranch = getDenyCurrentBranch r
|
, receiveDenyCurrentBranch = getDenyCurrentBranch r
|
||||||
|
|
|
@ -20,7 +20,14 @@ data WorkerPool t = WorkerPool
|
||||||
-- but there can temporarily be fewer values, when a thread is
|
-- but there can temporarily be fewer values, when a thread is
|
||||||
-- changing between stages.
|
-- changing between stages.
|
||||||
}
|
}
|
||||||
deriving (Show)
|
|
||||||
|
instance Show (WorkerPool t) where
|
||||||
|
show p = unwords
|
||||||
|
[ "WorkerPool"
|
||||||
|
, show (usedStages p)
|
||||||
|
, show (workerList p)
|
||||||
|
, show (length (spareVals p))
|
||||||
|
]
|
||||||
|
|
||||||
-- | A worker can either be idle or running an Async action.
|
-- | A worker can either be idle or running an Async action.
|
||||||
-- And it is used for some stage.
|
-- And it is used for some stage.
|
||||||
|
@ -33,7 +40,12 @@ instance Show (Worker t) where
|
||||||
show (ActiveWorker _ s) = "ActiveWorker " ++ show s
|
show (ActiveWorker _ s) = "ActiveWorker " ++ show s
|
||||||
|
|
||||||
data WorkerStage
|
data WorkerStage
|
||||||
= PerformStage
|
= StartStage
|
||||||
|
-- ^ All threads start in this stage, and then transition away from
|
||||||
|
-- it to the initialStage when they begin doing work. This should
|
||||||
|
-- never be included in UsedStages, because transition from some
|
||||||
|
-- other stage back to this one could result in a deadlock.
|
||||||
|
| PerformStage
|
||||||
-- ^ Running a CommandPerform action.
|
-- ^ Running a CommandPerform action.
|
||||||
| CleanupStage
|
| CleanupStage
|
||||||
-- ^ Running a CommandCleanup action.
|
-- ^ Running a CommandCleanup action.
|
||||||
|
@ -95,12 +107,13 @@ workerAsync (ActiveWorker aid _) = Just aid
|
||||||
allocateWorkerPool :: t -> Int -> UsedStages -> WorkerPool t
|
allocateWorkerPool :: t -> Int -> UsedStages -> WorkerPool t
|
||||||
allocateWorkerPool t n u = WorkerPool
|
allocateWorkerPool t n u = WorkerPool
|
||||||
{ usedStages = u
|
{ usedStages = u
|
||||||
, workerList = take totalthreads $ map IdleWorker stages
|
, workerList = map IdleWorker $
|
||||||
|
take totalthreads $ concat $ repeat stages
|
||||||
, spareVals = replicate totalthreads t
|
, spareVals = replicate totalthreads t
|
||||||
}
|
}
|
||||||
where
|
where
|
||||||
stages = concat $ repeat $ S.toList $ stageSet u
|
stages = StartStage : S.toList (stageSet u)
|
||||||
totalthreads = n * S.size (stageSet u)
|
totalthreads = n * length stages
|
||||||
|
|
||||||
addWorkerPool :: Worker t -> WorkerPool t -> WorkerPool t
|
addWorkerPool :: Worker t -> WorkerPool t -> WorkerPool t
|
||||||
addWorkerPool w pool = pool { workerList = w : workerList pool }
|
addWorkerPool w pool = pool { workerList = w : workerList pool }
|
||||||
|
|
|
@ -7,7 +7,9 @@
|
||||||
- License: BSD-2-clause
|
- License: BSD-2-clause
|
||||||
-}
|
-}
|
||||||
|
|
||||||
module Utility.Android where
|
module Utility.Android (
|
||||||
|
osAndroid
|
||||||
|
) where
|
||||||
|
|
||||||
#ifdef linux_HOST_OS
|
#ifdef linux_HOST_OS
|
||||||
import Common
|
import Common
|
||||||
|
|
|
@ -5,7 +5,9 @@
|
||||||
- License: BSD-2-clause
|
- License: BSD-2-clause
|
||||||
-}
|
-}
|
||||||
|
|
||||||
module Utility.Applicative where
|
module Utility.Applicative (
|
||||||
|
(<$$>),
|
||||||
|
) where
|
||||||
|
|
||||||
{- Like <$> , but supports one level of currying.
|
{- Like <$> , but supports one level of currying.
|
||||||
-
|
-
|
||||||
|
|
|
@ -7,7 +7,14 @@
|
||||||
|
|
||||||
{-# LANGUAGE CPP #-}
|
{-# LANGUAGE CPP #-}
|
||||||
|
|
||||||
module Utility.Batch where
|
module Utility.Batch (
|
||||||
|
batch,
|
||||||
|
BatchCommandMaker,
|
||||||
|
getBatchCommandMaker,
|
||||||
|
toBatchCommand,
|
||||||
|
batchCommand,
|
||||||
|
batchCommandEnv,
|
||||||
|
) where
|
||||||
|
|
||||||
import Common
|
import Common
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,13 @@
|
||||||
|
|
||||||
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
|
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
|
||||||
|
|
||||||
module Utility.DBus where
|
module Utility.DBus (
|
||||||
|
ServiceName,
|
||||||
|
listServiceNames,
|
||||||
|
callDBus,
|
||||||
|
runClient,
|
||||||
|
persistentClient,
|
||||||
|
) where
|
||||||
|
|
||||||
import Utility.PartialPrelude
|
import Utility.PartialPrelude
|
||||||
import Utility.Exception
|
import Utility.Exception
|
||||||
|
|
|
@ -7,7 +7,12 @@
|
||||||
|
|
||||||
{-# LANGUAGE CPP #-}
|
{-# LANGUAGE CPP #-}
|
||||||
|
|
||||||
module Utility.Daemon where
|
module Utility.Daemon (
|
||||||
|
daemonize,
|
||||||
|
foreground,
|
||||||
|
checkDaemon,
|
||||||
|
stopDaemon,
|
||||||
|
) where
|
||||||
|
|
||||||
import Common
|
import Common
|
||||||
import Utility.PID
|
import Utility.PID
|
||||||
|
|
|
@ -7,7 +7,10 @@
|
||||||
|
|
||||||
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
||||||
|
|
||||||
module Utility.Data where
|
module Utility.Data (
|
||||||
|
firstJust,
|
||||||
|
eitherToMaybe,
|
||||||
|
) where
|
||||||
|
|
||||||
{- First item in the list that is not Nothing. -}
|
{- First item in the list that is not Nothing. -}
|
||||||
firstJust :: Eq a => [Maybe a] -> Maybe a
|
firstJust :: Eq a => [Maybe a] -> Maybe a
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
{-# LANGUAGE CPP #-}
|
{-# LANGUAGE CPP #-}
|
||||||
{-# LANGUAGE ScopedTypeVariables #-}
|
{-# LANGUAGE ScopedTypeVariables #-}
|
||||||
|
|
||||||
module Utility.DebugLocks where
|
module Utility.DebugLocks (debugLocks) where
|
||||||
|
|
||||||
import Control.Monad.Catch
|
import Control.Monad.Catch
|
||||||
import Control.Monad.IO.Class
|
import Control.Monad.IO.Class
|
||||||
|
|
|
@ -11,7 +11,15 @@
|
||||||
|
|
||||||
{-# LANGUAGE CPP #-}
|
{-# LANGUAGE CPP #-}
|
||||||
|
|
||||||
module Utility.DirWatcher where
|
module Utility.DirWatcher (
|
||||||
|
canWatch,
|
||||||
|
eventsCoalesce,
|
||||||
|
closingTracked,
|
||||||
|
modifyTracked,
|
||||||
|
DirWatcherHandle,
|
||||||
|
watchDir,
|
||||||
|
stopWatchDir,
|
||||||
|
) where
|
||||||
|
|
||||||
import Utility.DirWatcher.Types
|
import Utility.DirWatcher.Types
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
- License: BSD-2-clause
|
- License: BSD-2-clause
|
||||||
-}
|
-}
|
||||||
|
|
||||||
module Utility.DirWatcher.FSEvents where
|
module Utility.DirWatcher.FSEvents (watchDir) where
|
||||||
|
|
||||||
import Common hiding (isDirectory)
|
import Common hiding (isDirectory)
|
||||||
import Utility.DirWatcher.Types
|
import Utility.DirWatcher.Types
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
- License: BSD-2-clause
|
- License: BSD-2-clause
|
||||||
-}
|
-}
|
||||||
|
|
||||||
module Utility.DirWatcher.INotify where
|
module Utility.DirWatcher.INotify (watchDir) where
|
||||||
|
|
||||||
import Common hiding (isDirectory)
|
import Common hiding (isDirectory)
|
||||||
import Utility.ThreadLock
|
import Utility.ThreadLock
|
||||||
|
|
|
@ -5,7 +5,11 @@
|
||||||
- License: BSD-2-clause
|
- License: BSD-2-clause
|
||||||
-}
|
-}
|
||||||
|
|
||||||
module Utility.DirWatcher.Types where
|
module Utility.DirWatcher.Types (
|
||||||
|
Hook,
|
||||||
|
WatchHooks(..),
|
||||||
|
mkWatchHooks,
|
||||||
|
) where
|
||||||
|
|
||||||
import Common
|
import Common
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
- License: BSD-2-clause
|
- License: BSD-2-clause
|
||||||
-}
|
-}
|
||||||
|
|
||||||
module Utility.DirWatcher.Win32Notify where
|
module Utility.DirWatcher.Win32Notify (watchDir) where
|
||||||
|
|
||||||
import Common hiding (isDirectory)
|
import Common hiding (isDirectory)
|
||||||
import Utility.DirWatcher.Types
|
import Utility.DirWatcher.Types
|
||||||
|
|
|
@ -9,11 +9,16 @@
|
||||||
{-# LANGUAGE LambdaCase #-}
|
{-# LANGUAGE LambdaCase #-}
|
||||||
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
||||||
|
|
||||||
module Utility.Directory.Stream where
|
module Utility.Directory.Stream (
|
||||||
|
DirectoryHandle,
|
||||||
|
openDirectory,
|
||||||
|
closeDirectory,
|
||||||
|
readDirectory,
|
||||||
|
isDirectoryEmpty,
|
||||||
|
) where
|
||||||
|
|
||||||
import Control.Monad
|
import Control.Monad
|
||||||
import System.FilePath
|
import System.FilePath
|
||||||
import System.IO.Unsafe (unsafeInterleaveIO)
|
|
||||||
import Control.Concurrent
|
import Control.Concurrent
|
||||||
import Data.Maybe
|
import Data.Maybe
|
||||||
import Prelude
|
import Prelude
|
||||||
|
@ -100,22 +105,6 @@ readDirectory hdl@(DirectoryHandle _ h fdat mv) = do
|
||||||
return (Just filename)
|
return (Just filename)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
-- | Like getDirectoryContents, but rather than buffering the whole
|
|
||||||
-- directory content in memory, lazily streams.
|
|
||||||
--
|
|
||||||
-- This is like lazy readFile in that the handle to the directory remains
|
|
||||||
-- open until the whole list is consumed, or until the list is garbage
|
|
||||||
-- collected. So use with caution particularly when traversing directory
|
|
||||||
-- trees.
|
|
||||||
streamDirectoryContents :: FilePath -> IO [FilePath]
|
|
||||||
streamDirectoryContents d = openDirectory d >>= collect
|
|
||||||
where
|
|
||||||
collect hdl = readDirectory hdl >>= \case
|
|
||||||
Nothing -> return []
|
|
||||||
Just f -> do
|
|
||||||
rest <- unsafeInterleaveIO (collect hdl)
|
|
||||||
return (f:rest)
|
|
||||||
|
|
||||||
-- | True only when directory exists and contains nothing.
|
-- | True only when directory exists and contains nothing.
|
||||||
-- Throws exception if directory does not exist.
|
-- Throws exception if directory does not exist.
|
||||||
isDirectoryEmpty :: FilePath -> IO Bool
|
isDirectoryEmpty :: FilePath -> IO Bool
|
||||||
|
|
|
@ -1,11 +1,23 @@
|
||||||
{- a simple graphviz / dot(1) digraph description generator library
|
{- a simple graphviz / dot(1) digraph description generator library
|
||||||
|
-
|
||||||
|
- import qualified
|
||||||
-
|
-
|
||||||
- Copyright 2010 Joey Hess <id@joeyh.name>
|
- Copyright 2010 Joey Hess <id@joeyh.name>
|
||||||
-
|
-
|
||||||
- License: BSD-2-clause
|
- License: BSD-2-clause
|
||||||
-}
|
-}
|
||||||
|
|
||||||
module Utility.Dot where -- import qualified
|
module Utility.Dot (
|
||||||
|
graph,
|
||||||
|
graphNode,
|
||||||
|
graphEdge,
|
||||||
|
label,
|
||||||
|
attr,
|
||||||
|
fillColor,
|
||||||
|
subGraph,
|
||||||
|
indent,
|
||||||
|
quote,
|
||||||
|
) where
|
||||||
|
|
||||||
{- generates a graph description from a list of lines -}
|
{- generates a graph description from a list of lines -}
|
||||||
graph :: [String] -> String
|
graph :: [String] -> String
|
||||||
|
|
|
@ -7,7 +7,11 @@
|
||||||
|
|
||||||
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
||||||
|
|
||||||
module Utility.DottedVersion where
|
module Utility.DottedVersion (
|
||||||
|
DottedVersion,
|
||||||
|
fromDottedVersion,
|
||||||
|
normalize,
|
||||||
|
) where
|
||||||
|
|
||||||
import Common
|
import Common
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,14 @@
|
||||||
{-# LANGUAGE CPP #-}
|
{-# LANGUAGE CPP #-}
|
||||||
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
||||||
|
|
||||||
module Utility.Env where
|
module Utility.Env (
|
||||||
|
getEnv,
|
||||||
|
getEnvDefault,
|
||||||
|
getEnvironment,
|
||||||
|
addEntry,
|
||||||
|
addEntries,
|
||||||
|
delEntry,
|
||||||
|
) where
|
||||||
|
|
||||||
#ifdef mingw32_HOST_OS
|
#ifdef mingw32_HOST_OS
|
||||||
import Utility.Exception
|
import Utility.Exception
|
||||||
|
|
|
@ -7,7 +7,10 @@
|
||||||
|
|
||||||
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
||||||
|
|
||||||
module Utility.Env.Basic where
|
module Utility.Env.Basic (
|
||||||
|
getEnv,
|
||||||
|
getEnvDefault,
|
||||||
|
) where
|
||||||
|
|
||||||
import Utility.Exception
|
import Utility.Exception
|
||||||
import Control.Applicative
|
import Control.Applicative
|
||||||
|
|
|
@ -7,7 +7,10 @@
|
||||||
|
|
||||||
{-# LANGUAGE CPP #-}
|
{-# LANGUAGE CPP #-}
|
||||||
|
|
||||||
module Utility.Env.Set where
|
module Utility.Env.Set (
|
||||||
|
setEnv,
|
||||||
|
unsetEnv,
|
||||||
|
) where
|
||||||
|
|
||||||
#ifdef mingw32_HOST_OS
|
#ifdef mingw32_HOST_OS
|
||||||
import qualified System.SetEnv
|
import qualified System.SetEnv
|
||||||
|
|
|
@ -5,7 +5,11 @@
|
||||||
|
|
||||||
{-# LANGUAGE CPP #-}
|
{-# LANGUAGE CPP #-}
|
||||||
|
|
||||||
module Utility.FileSize where
|
module Utility.FileSize (
|
||||||
|
FileSize,
|
||||||
|
getFileSize,
|
||||||
|
getFileSize',
|
||||||
|
) where
|
||||||
|
|
||||||
import System.PosixCompat.Files
|
import System.PosixCompat.Files
|
||||||
#ifdef mingw32_HOST_OS
|
#ifdef mingw32_HOST_OS
|
||||||
|
|
|
@ -7,7 +7,32 @@
|
||||||
|
|
||||||
{-# LANGUAGE CPP #-}
|
{-# LANGUAGE CPP #-}
|
||||||
|
|
||||||
module Utility.Gpg where
|
module Utility.Gpg (
|
||||||
|
KeyId,
|
||||||
|
KeyIds(..),
|
||||||
|
GpgCmd(..),
|
||||||
|
mkGpgCmd,
|
||||||
|
boolGpgCmd,
|
||||||
|
pkEncTo,
|
||||||
|
stdEncryptionParams,
|
||||||
|
pipeStrict,
|
||||||
|
feedRead,
|
||||||
|
pipeLazy,
|
||||||
|
findPubKeys,
|
||||||
|
UserId,
|
||||||
|
secretKeys,
|
||||||
|
KeyType(..),
|
||||||
|
maxRecommendedKeySize,
|
||||||
|
genSecretKey,
|
||||||
|
genRandom,
|
||||||
|
testKeyId,
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
|
testHarness,
|
||||||
|
testTestHarness,
|
||||||
|
checkEncryptionFile,
|
||||||
|
checkEncryptionStream,
|
||||||
|
#endif
|
||||||
|
) where
|
||||||
|
|
||||||
import Common
|
import Common
|
||||||
import qualified BuildInfo
|
import qualified BuildInfo
|
||||||
|
@ -279,6 +304,7 @@ genRandom cmd highQuality size = checksize <$> readStrict cmd params
|
||||||
- It has an empty passphrase. -}
|
- It has an empty passphrase. -}
|
||||||
testKeyId :: String
|
testKeyId :: String
|
||||||
testKeyId = "129D6E0AC537B9C7"
|
testKeyId = "129D6E0AC537B9C7"
|
||||||
|
|
||||||
testKey :: String
|
testKey :: String
|
||||||
testKey = keyBlock True
|
testKey = keyBlock True
|
||||||
[ "mI0ETvFAZgEEAKnqwWgZqznMhi1RQExem2H8t3OyKDxaNN3rBN8T6LWGGqAYV4wT"
|
[ "mI0ETvFAZgEEAKnqwWgZqznMhi1RQExem2H8t3OyKDxaNN3rBN8T6LWGGqAYV4wT"
|
||||||
|
@ -299,6 +325,7 @@ testKey = keyBlock True
|
||||||
, "+gQkDF9/"
|
, "+gQkDF9/"
|
||||||
, "=1k11"
|
, "=1k11"
|
||||||
]
|
]
|
||||||
|
|
||||||
testSecretKey :: String
|
testSecretKey :: String
|
||||||
testSecretKey = keyBlock False
|
testSecretKey = keyBlock False
|
||||||
[ "lQHYBE7xQGYBBACp6sFoGas5zIYtUUBMXpth/Ldzsig8WjTd6wTfE+i1hhqgGFeM"
|
[ "lQHYBE7xQGYBBACp6sFoGas5zIYtUUBMXpth/Ldzsig8WjTd6wTfE+i1hhqgGFeM"
|
||||||
|
@ -332,6 +359,7 @@ testSecretKey = keyBlock False
|
||||||
, "IJf+/dFjxEmflWpbxw/36pEd/EReLX8b8qDIYadK6BpiWN9xgEiBv/oEJAxffw=="
|
, "IJf+/dFjxEmflWpbxw/36pEd/EReLX8b8qDIYadK6BpiWN9xgEiBv/oEJAxffw=="
|
||||||
, "=LDsg"
|
, "=LDsg"
|
||||||
]
|
]
|
||||||
|
|
||||||
keyBlock :: Bool -> [String] -> String
|
keyBlock :: Bool -> [String] -> String
|
||||||
keyBlock public ls = unlines
|
keyBlock public ls = unlines
|
||||||
[ "-----BEGIN PGP "++t++" KEY BLOCK-----"
|
[ "-----BEGIN PGP "++t++" KEY BLOCK-----"
|
||||||
|
@ -381,9 +409,7 @@ testTestHarness :: FilePath -> GpgCmd -> IO Bool
|
||||||
testTestHarness tmpdir cmd = do
|
testTestHarness tmpdir cmd = do
|
||||||
keys <- testHarness tmpdir cmd $ findPubKeys cmd testKeyId
|
keys <- testHarness tmpdir cmd $ findPubKeys cmd testKeyId
|
||||||
return $ KeyIds [testKeyId] == keys
|
return $ KeyIds [testKeyId] == keys
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifndef mingw32_HOST_OS
|
|
||||||
checkEncryptionFile :: GpgCmd -> FilePath -> Maybe KeyIds -> IO Bool
|
checkEncryptionFile :: GpgCmd -> FilePath -> Maybe KeyIds -> IO Bool
|
||||||
checkEncryptionFile cmd filename keys =
|
checkEncryptionFile cmd filename keys =
|
||||||
checkGpgPackets cmd keys =<< readStrict cmd params
|
checkGpgPackets cmd keys =<< readStrict cmd params
|
||||||
|
|
|
@ -5,7 +5,11 @@
|
||||||
- License: BSD-2-clause
|
- License: BSD-2-clause
|
||||||
-}
|
-}
|
||||||
|
|
||||||
module Utility.HtmlDetect where
|
module Utility.HtmlDetect (
|
||||||
|
isHtml,
|
||||||
|
isHtmlBs,
|
||||||
|
htmlPrefixLength,
|
||||||
|
) where
|
||||||
|
|
||||||
import Text.HTML.TagSoup
|
import Text.HTML.TagSoup
|
||||||
import Data.Char
|
import Data.Char
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
- License: BSD-2-clause
|
- License: BSD-2-clause
|
||||||
-}
|
-}
|
||||||
|
|
||||||
module Utility.HumanNumber where
|
module Utility.HumanNumber (showImprecise) where
|
||||||
|
|
||||||
{- Displays a fractional value as a string with a limited number
|
{- Displays a fractional value as a string with a limited number
|
||||||
- of decimal digits. -}
|
- of decimal digits. -}
|
||||||
|
|
|
@ -5,7 +5,12 @@
|
||||||
- License: BSD-2-clause
|
- License: BSD-2-clause
|
||||||
-}
|
-}
|
||||||
|
|
||||||
module Utility.IPAddress where
|
module Utility.IPAddress (
|
||||||
|
extractIPAddress,
|
||||||
|
isLoopbackAddress,
|
||||||
|
isPrivateAddress,
|
||||||
|
makeAddressMatcher,
|
||||||
|
) where
|
||||||
|
|
||||||
import Utility.Exception
|
import Utility.Exception
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,11 @@
|
||||||
- License: BSD-2-clause
|
- License: BSD-2-clause
|
||||||
-}
|
-}
|
||||||
|
|
||||||
module Utility.LinuxMkLibs where
|
module Utility.LinuxMkLibs (
|
||||||
|
installLib,
|
||||||
|
parseLdd,
|
||||||
|
glibcLibs,
|
||||||
|
) where
|
||||||
|
|
||||||
import Utility.PartialPrelude
|
import Utility.PartialPrelude
|
||||||
import Utility.Directory
|
import Utility.Directory
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
- License: BSD-2-clause
|
- License: BSD-2-clause
|
||||||
-}
|
-}
|
||||||
|
|
||||||
module Utility.LockFile.LockStatus where
|
module Utility.LockFile.LockStatus (LockStatus(..)) where
|
||||||
|
|
||||||
import System.Posix
|
import System.Posix
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,15 @@
|
||||||
|
|
||||||
{-# LANGUAGE CPP #-}
|
{-# LANGUAGE CPP #-}
|
||||||
|
|
||||||
module Utility.LogFile where
|
module Utility.LogFile (
|
||||||
|
openLog,
|
||||||
|
listLogs,
|
||||||
|
maxLogs,
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
|
redirLog,
|
||||||
|
redir,
|
||||||
|
#endif
|
||||||
|
) where
|
||||||
|
|
||||||
import Common
|
import Common
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,12 @@
|
||||||
- License: BSD-2-clause
|
- License: BSD-2-clause
|
||||||
-}
|
-}
|
||||||
|
|
||||||
module Utility.Lsof where
|
module Utility.Lsof (
|
||||||
|
LsofOpenMode(..),
|
||||||
|
setup,
|
||||||
|
queryDir,
|
||||||
|
query,
|
||||||
|
) where
|
||||||
|
|
||||||
import Common
|
import Common
|
||||||
import BuildInfo
|
import BuildInfo
|
||||||
|
|
|
@ -7,7 +7,40 @@
|
||||||
|
|
||||||
{-# LANGUAGE TypeSynonymInstances, BangPatterns #-}
|
{-# LANGUAGE TypeSynonymInstances, BangPatterns #-}
|
||||||
|
|
||||||
module Utility.Metered where
|
module Utility.Metered (
|
||||||
|
MeterUpdate,
|
||||||
|
nullMeterUpdate,
|
||||||
|
combineMeterUpdate,
|
||||||
|
BytesProcessed(..),
|
||||||
|
toBytesProcessed,
|
||||||
|
fromBytesProcessed,
|
||||||
|
addBytesProcessed,
|
||||||
|
zeroBytesProcessed,
|
||||||
|
withMeteredFile,
|
||||||
|
meteredWrite,
|
||||||
|
meteredWrite',
|
||||||
|
meteredWriteFile,
|
||||||
|
offsetMeterUpdate,
|
||||||
|
hGetContentsMetered,
|
||||||
|
hGetMetered,
|
||||||
|
defaultChunkSize,
|
||||||
|
watchFileSize,
|
||||||
|
OutputHandler(..),
|
||||||
|
ProgressParser,
|
||||||
|
commandMeter,
|
||||||
|
commandMeter',
|
||||||
|
demeterCommand,
|
||||||
|
demeterCommandEnv,
|
||||||
|
avoidProgress,
|
||||||
|
rateLimitMeterUpdate,
|
||||||
|
Meter,
|
||||||
|
mkMeter,
|
||||||
|
setMeterTotalSize,
|
||||||
|
updateMeter,
|
||||||
|
displayMeterHandle,
|
||||||
|
clearMeterHandle,
|
||||||
|
bandwidthMeter,
|
||||||
|
) where
|
||||||
|
|
||||||
import Common
|
import Common
|
||||||
import Utility.Percentage
|
import Utility.Percentage
|
||||||
|
@ -80,11 +113,6 @@ withMeteredFile :: FilePath -> MeterUpdate -> (L.ByteString -> IO a) -> IO a
|
||||||
withMeteredFile f meterupdate a = withBinaryFile f ReadMode $ \h ->
|
withMeteredFile f meterupdate a = withBinaryFile f ReadMode $ \h ->
|
||||||
hGetContentsMetered h meterupdate >>= a
|
hGetContentsMetered h meterupdate >>= a
|
||||||
|
|
||||||
{- Sends the content of a file to a Handle, updating the meter as it's
|
|
||||||
- written. -}
|
|
||||||
streamMeteredFile :: FilePath -> MeterUpdate -> Handle -> IO ()
|
|
||||||
streamMeteredFile f meterupdate h = withMeteredFile f meterupdate $ L.hPut h
|
|
||||||
|
|
||||||
{- Writes a ByteString to a Handle, updating a meter as it's written. -}
|
{- Writes a ByteString to a Handle, updating a meter as it's written. -}
|
||||||
meteredWrite :: MeterUpdate -> Handle -> L.ByteString -> IO ()
|
meteredWrite :: MeterUpdate -> Handle -> L.ByteString -> IO ()
|
||||||
meteredWrite meterupdate h = void . meteredWrite' meterupdate h
|
meteredWrite meterupdate h = void . meteredWrite' meterupdate h
|
||||||
|
|
|
@ -7,7 +7,19 @@
|
||||||
|
|
||||||
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
||||||
|
|
||||||
module Utility.Misc where
|
module Utility.Misc (
|
||||||
|
hGetContentsStrict,
|
||||||
|
readFileStrict,
|
||||||
|
separate,
|
||||||
|
firstLine,
|
||||||
|
segment,
|
||||||
|
segmentDelim,
|
||||||
|
massReplace,
|
||||||
|
hGetSomeString,
|
||||||
|
exitBool,
|
||||||
|
|
||||||
|
prop_segment_regressionTest,
|
||||||
|
) where
|
||||||
|
|
||||||
import System.IO
|
import System.IO
|
||||||
import Control.Monad
|
import Control.Monad
|
||||||
|
|
|
@ -7,7 +7,19 @@
|
||||||
|
|
||||||
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
||||||
|
|
||||||
module Utility.Monad where
|
module Utility.Monad (
|
||||||
|
firstM,
|
||||||
|
getM,
|
||||||
|
anyM,
|
||||||
|
allM,
|
||||||
|
untilTrue,
|
||||||
|
ifM,
|
||||||
|
(<||>),
|
||||||
|
(<&&>),
|
||||||
|
observe,
|
||||||
|
after,
|
||||||
|
noop,
|
||||||
|
) where
|
||||||
|
|
||||||
import Data.Maybe
|
import Data.Maybe
|
||||||
import Control.Monad
|
import Control.Monad
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
|
|
||||||
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
||||||
|
|
||||||
module Utility.Network where
|
module Utility.Network (getHostname) where
|
||||||
|
|
||||||
import Utility.Process
|
import Utility.Process
|
||||||
import Utility.Exception
|
import Utility.Exception
|
||||||
|
|
|
@ -7,7 +7,12 @@
|
||||||
|
|
||||||
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
||||||
|
|
||||||
module Utility.OSX where
|
module Utility.OSX (
|
||||||
|
autoStartBase,
|
||||||
|
systemAutoStart,
|
||||||
|
userAutoStart,
|
||||||
|
genOSXAutoStartFile,
|
||||||
|
) where
|
||||||
|
|
||||||
import Utility.UserInfo
|
import Utility.UserInfo
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,10 @@
|
||||||
- License: BSD-2-clause
|
- License: BSD-2-clause
|
||||||
-}
|
-}
|
||||||
|
|
||||||
module Utility.OptParse where
|
module Utility.OptParse (
|
||||||
|
invertableSwitch,
|
||||||
|
invertableSwitch',
|
||||||
|
) where
|
||||||
|
|
||||||
import Options.Applicative
|
import Options.Applicative
|
||||||
import Data.Monoid
|
import Data.Monoid
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
|
|
||||||
{-# LANGUAGE CPP #-}
|
{-# LANGUAGE CPP #-}
|
||||||
|
|
||||||
module Utility.PID where
|
module Utility.PID (PID, getPID) where
|
||||||
|
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
import System.Posix.Types (ProcessID)
|
import System.Posix.Types (ProcessID)
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
- License: BSD-2-clause
|
- License: BSD-2-clause
|
||||||
-}
|
-}
|
||||||
|
|
||||||
module Utility.Parallel where
|
module Utility.Parallel (inParallel) where
|
||||||
|
|
||||||
import Common
|
import Common
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,18 @@
|
||||||
|
|
||||||
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
||||||
|
|
||||||
module Utility.PartialPrelude where
|
module Utility.PartialPrelude (
|
||||||
|
Utility.PartialPrelude.read,
|
||||||
|
Utility.PartialPrelude.head,
|
||||||
|
Utility.PartialPrelude.tail,
|
||||||
|
Utility.PartialPrelude.init,
|
||||||
|
Utility.PartialPrelude.last,
|
||||||
|
Utility.PartialPrelude.readish,
|
||||||
|
Utility.PartialPrelude.headMaybe,
|
||||||
|
Utility.PartialPrelude.lastMaybe,
|
||||||
|
Utility.PartialPrelude.beginning,
|
||||||
|
Utility.PartialPrelude.end,
|
||||||
|
) where
|
||||||
|
|
||||||
import qualified Data.Maybe
|
import qualified Data.Maybe
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,29 @@
|
||||||
{-# LANGUAGE CPP #-}
|
{-# LANGUAGE CPP #-}
|
||||||
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
||||||
|
|
||||||
module Utility.Path where
|
module Utility.Path (
|
||||||
|
simplifyPath,
|
||||||
|
absPathFrom,
|
||||||
|
parentDir,
|
||||||
|
upFrom,
|
||||||
|
dirContains,
|
||||||
|
absPath,
|
||||||
|
relPathCwdToFile,
|
||||||
|
relPathDirToFile,
|
||||||
|
relPathDirToFileAbs,
|
||||||
|
segmentPaths,
|
||||||
|
runSegmentPaths,
|
||||||
|
relHome,
|
||||||
|
inPath,
|
||||||
|
searchPath,
|
||||||
|
dotfile,
|
||||||
|
sanitizeFilePath,
|
||||||
|
splitShortExtensions,
|
||||||
|
|
||||||
|
prop_upFrom_basics,
|
||||||
|
prop_relPathDirToFile_basics,
|
||||||
|
prop_relPathDirToFile_regressionTest,
|
||||||
|
) where
|
||||||
|
|
||||||
import System.FilePath
|
import System.FilePath
|
||||||
import Data.List
|
import Data.List
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
{-# LANGUAGE CPP #-}
|
{-# LANGUAGE CPP #-}
|
||||||
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
||||||
|
|
||||||
module Utility.Path.Max where
|
module Utility.Path.Max (fileNameLengthLimit) where
|
||||||
|
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
import Utility.Exception
|
import Utility.Exception
|
||||||
|
|
|
@ -8,7 +8,11 @@
|
||||||
{-# LANGUAGE CPP #-}
|
{-# LANGUAGE CPP #-}
|
||||||
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
||||||
|
|
||||||
module Utility.Process.Transcript where
|
module Utility.Process.Transcript (
|
||||||
|
processTranscript,
|
||||||
|
processTranscript',
|
||||||
|
processTranscript'',
|
||||||
|
) where
|
||||||
|
|
||||||
import Utility.Process
|
import Utility.Process
|
||||||
import Utility.Misc
|
import Utility.Misc
|
||||||
|
|
|
@ -7,7 +7,17 @@
|
||||||
|
|
||||||
{-# LANGUAGE CPP #-}
|
{-# LANGUAGE CPP #-}
|
||||||
|
|
||||||
module Utility.Rsync where
|
module Utility.Rsync (
|
||||||
|
rsyncShell,
|
||||||
|
rsyncServerSend,
|
||||||
|
rsyncServerReceive,
|
||||||
|
rsyncUseDestinationPermissions,
|
||||||
|
rsync,
|
||||||
|
rsyncUrlIsShell,
|
||||||
|
rsyncUrlIsPath,
|
||||||
|
rsyncProgress,
|
||||||
|
filterRsyncSafeOptions,
|
||||||
|
) where
|
||||||
|
|
||||||
import Common
|
import Common
|
||||||
import Utility.Metered
|
import Utility.Metered
|
||||||
|
@ -161,10 +171,8 @@ filterRsyncSafeOptions = fst3 . getOpt Permute
|
||||||
- The virtual filesystem contains:
|
- The virtual filesystem contains:
|
||||||
- /c, /d, ... mount points for Windows drives
|
- /c, /d, ... mount points for Windows drives
|
||||||
-}
|
-}
|
||||||
|
#ifdef mingw32_HOST_OS
|
||||||
toMSYS2Path :: FilePath -> FilePath
|
toMSYS2Path :: FilePath -> FilePath
|
||||||
#ifndef mingw32_HOST_OS
|
|
||||||
toMSYS2Path = id
|
|
||||||
#else
|
|
||||||
toMSYS2Path p
|
toMSYS2Path p
|
||||||
| null drive = recombine parts
|
| null drive = recombine parts
|
||||||
| otherwise = recombine $ "/" : driveletter drive : parts
|
| otherwise = recombine $ "/" : driveletter drive : parts
|
||||||
|
|
|
@ -7,7 +7,15 @@
|
||||||
|
|
||||||
{-# LANGUAGE CPP #-}
|
{-# LANGUAGE CPP #-}
|
||||||
|
|
||||||
module Utility.Su where
|
module Utility.Su (
|
||||||
|
WhosePassword(..),
|
||||||
|
PasswordPrompt(..),
|
||||||
|
describePasswordPrompt,
|
||||||
|
describePasswordPrompt',
|
||||||
|
SuCommand,
|
||||||
|
runSuCommand,
|
||||||
|
mkSuCommand,
|
||||||
|
) where
|
||||||
|
|
||||||
import Common
|
import Common
|
||||||
|
|
||||||
|
|
165
Utility/Url.hs
165
Utility/Url.hs
|
@ -138,22 +138,14 @@ curlParams uo ps = ps ++ uaparams ++ headerparams ++ addedparams ++ schemeparams
|
||||||
]
|
]
|
||||||
schemelist = map fromScheme $ S.toList $ allowedSchemes uo
|
schemelist = map fromScheme $ S.toList $ allowedSchemes uo
|
||||||
|
|
||||||
checkPolicy :: UrlOptions -> URI -> a -> (String -> IO b) -> IO a -> IO a
|
checkPolicy :: UrlOptions -> URI -> IO (Either String a) -> IO (Either String a)
|
||||||
checkPolicy uo u onerr displayerror a
|
checkPolicy uo u a
|
||||||
| allowedScheme uo u = a
|
| allowedScheme uo u = a
|
||||||
| otherwise = do
|
| otherwise = return $ Left $
|
||||||
void $ displayerror $
|
"Configuration does not allow accessing " ++ show u
|
||||||
"Configuration does not allow accessing " ++ show u
|
|
||||||
return onerr
|
|
||||||
|
|
||||||
unsupportedUrlScheme :: URI -> (String -> IO a) -> IO a
|
unsupportedUrlScheme :: URI -> String
|
||||||
unsupportedUrlScheme u displayerror =
|
unsupportedUrlScheme u = "Unsupported url scheme " ++ show u
|
||||||
displayerror $ "Unsupported url scheme " ++ show u
|
|
||||||
|
|
||||||
warnError :: String -> IO ()
|
|
||||||
warnError msg = do
|
|
||||||
hPutStrLn stderr msg
|
|
||||||
hFlush stderr
|
|
||||||
|
|
||||||
allowedScheme :: UrlOptions -> URI -> Bool
|
allowedScheme :: UrlOptions -> URI -> Bool
|
||||||
allowedScheme uo u = uscheme `S.member` allowedSchemes uo
|
allowedScheme uo u = uscheme `S.member` allowedSchemes uo
|
||||||
|
@ -161,14 +153,18 @@ allowedScheme uo u = uscheme `S.member` allowedSchemes uo
|
||||||
uscheme = mkScheme $ takeWhile (/=':') (uriScheme u)
|
uscheme = mkScheme $ takeWhile (/=':') (uriScheme u)
|
||||||
|
|
||||||
{- Checks that an url exists and could be successfully downloaded,
|
{- Checks that an url exists and could be successfully downloaded,
|
||||||
- also checking that its size, if available, matches a specified size. -}
|
- also checking that its size, if available, matches a specified size.
|
||||||
checkBoth :: URLString -> Maybe Integer -> UrlOptions -> IO Bool
|
-
|
||||||
checkBoth url expected_size uo = do
|
- The Left error is returned if policy does not allow accessing the url
|
||||||
v <- check url expected_size uo
|
- or the url scheme is not supported.
|
||||||
return (fst v && snd v)
|
-}
|
||||||
|
checkBoth :: URLString -> Maybe Integer -> UrlOptions -> IO (Either String Bool)
|
||||||
|
checkBoth url expected_size uo = fmap go <$> check url expected_size uo
|
||||||
|
where
|
||||||
|
go v = fst v && snd v
|
||||||
|
|
||||||
check :: URLString -> Maybe Integer -> UrlOptions -> IO (Bool, Bool)
|
check :: URLString -> Maybe Integer -> UrlOptions -> IO (Either String (Bool, Bool))
|
||||||
check url expected_size uo = go <$> getUrlInfo url uo
|
check url expected_size uo = fmap go <$> getUrlInfo url uo
|
||||||
where
|
where
|
||||||
go (UrlInfo False _ _) = (False, False)
|
go (UrlInfo False _ _) = (False, False)
|
||||||
go (UrlInfo True Nothing _) = (True, True)
|
go (UrlInfo True Nothing _) = (True, True)
|
||||||
|
@ -176,8 +172,8 @@ check url expected_size uo = go <$> getUrlInfo url uo
|
||||||
Just _ -> (True, expected_size == s)
|
Just _ -> (True, expected_size == s)
|
||||||
Nothing -> (True, True)
|
Nothing -> (True, True)
|
||||||
|
|
||||||
exists :: URLString -> UrlOptions -> IO Bool
|
exists :: URLString -> UrlOptions -> IO (Either String Bool)
|
||||||
exists url uo = urlExists <$> getUrlInfo url uo
|
exists url uo = fmap urlExists <$> getUrlInfo url uo
|
||||||
|
|
||||||
data UrlInfo = UrlInfo
|
data UrlInfo = UrlInfo
|
||||||
{ urlExists :: Bool
|
{ urlExists :: Bool
|
||||||
|
@ -190,32 +186,36 @@ assumeUrlExists :: UrlInfo
|
||||||
assumeUrlExists = UrlInfo True Nothing Nothing
|
assumeUrlExists = UrlInfo True Nothing Nothing
|
||||||
|
|
||||||
{- Checks that an url exists and could be successfully downloaded,
|
{- Checks that an url exists and could be successfully downloaded,
|
||||||
- also returning its size and suggested filename if available. -}
|
- also returning its size and suggested filename if available.
|
||||||
getUrlInfo :: URLString -> UrlOptions -> IO UrlInfo
|
-
|
||||||
|
- The Left error is returned if policy does not allow accessing the url
|
||||||
|
- or the url scheme is not supported.
|
||||||
|
-}
|
||||||
|
getUrlInfo :: URLString -> UrlOptions -> IO (Either String UrlInfo)
|
||||||
getUrlInfo url uo = case parseURIRelaxed url of
|
getUrlInfo url uo = case parseURIRelaxed url of
|
||||||
Just u -> checkPolicy uo u dne warnError $
|
Just u -> checkPolicy uo u (go u)
|
||||||
case (urlDownloader uo, parseUrlRequest (show u)) of
|
Nothing -> return (Right dne)
|
||||||
(DownloadWithConduit (DownloadWithCurlRestricted r), Just req) -> catchJust
|
where
|
||||||
-- When http redirects to a protocol which
|
go :: URI -> IO (Either String UrlInfo)
|
||||||
-- conduit does not support, it will throw
|
go u = case (urlDownloader uo, parseUrlRequest (show u)) of
|
||||||
-- a StatusCodeException with found302
|
(DownloadWithConduit (DownloadWithCurlRestricted r), Just req) -> catchJust
|
||||||
-- and a Response with the redir Location.
|
-- When http redirects to a protocol which
|
||||||
(matchStatusCodeException (== found302))
|
-- conduit does not support, it will throw
|
||||||
(existsconduit req)
|
-- a StatusCodeException with found302
|
||||||
(followredir r)
|
-- and a Response with the redir Location.
|
||||||
`catchNonAsync` (const $ return dne)
|
(matchStatusCodeException (== found302))
|
||||||
(DownloadWithConduit (DownloadWithCurlRestricted r), Nothing)
|
(Right <$> existsconduit req)
|
||||||
| isfileurl u -> existsfile u
|
(followredir r)
|
||||||
| isftpurl u -> existscurlrestricted r u url ftpport
|
`catchNonAsync` (const $ return $ Right dne)
|
||||||
`catchNonAsync` (const $ return dne)
|
(DownloadWithConduit (DownloadWithCurlRestricted r), Nothing)
|
||||||
| otherwise -> do
|
| isfileurl u -> Right <$> existsfile u
|
||||||
unsupportedUrlScheme u warnError
|
| isftpurl u -> (Right <$> existscurlrestricted r u url ftpport)
|
||||||
return dne
|
`catchNonAsync` (const $ return $ Right dne)
|
||||||
(DownloadWithCurl _, _)
|
| otherwise -> return $ Left $ unsupportedUrlScheme u
|
||||||
| isfileurl u -> existsfile u
|
(DownloadWithCurl _, _)
|
||||||
| otherwise -> existscurl u (basecurlparams url)
|
| isfileurl u -> Right <$> existsfile u
|
||||||
Nothing -> return dne
|
| otherwise -> Right <$> existscurl u (basecurlparams url)
|
||||||
where
|
|
||||||
dne = UrlInfo False Nothing Nothing
|
dne = UrlInfo False Nothing Nothing
|
||||||
found sz f = return $ UrlInfo True sz f
|
found sz f = return $ UrlInfo True sz f
|
||||||
|
|
||||||
|
@ -291,11 +291,11 @@ getUrlInfo url uo = case parseURIRelaxed url of
|
||||||
-- http to file redirect would not be secure,
|
-- http to file redirect would not be secure,
|
||||||
-- and http-conduit follows http to http.
|
-- and http-conduit follows http to http.
|
||||||
Just u' | isftpurl u' ->
|
Just u' | isftpurl u' ->
|
||||||
checkPolicy uo u' dne warnError $
|
checkPolicy uo u' $ Right <$>
|
||||||
existscurlrestricted r u' url' ftpport
|
existscurlrestricted r u' url' ftpport
|
||||||
_ -> return dne
|
_ -> return (Right dne)
|
||||||
Nothing -> return dne
|
Nothing -> return (Right dne)
|
||||||
followredir _ _ = return dne
|
followredir _ _ = return (Right dne)
|
||||||
|
|
||||||
-- Parse eg: attachment; filename="fname.ext"
|
-- Parse eg: attachment; filename="fname.ext"
|
||||||
-- per RFC 2616
|
-- per RFC 2616
|
||||||
|
@ -317,31 +317,32 @@ headRequest r = r
|
||||||
|
|
||||||
{- Download a perhaps large file, with auto-resume of incomplete downloads.
|
{- Download a perhaps large file, with auto-resume of incomplete downloads.
|
||||||
-
|
-
|
||||||
- Displays error message on stderr when download failed.
|
- When the download fails, returns an error message.
|
||||||
-}
|
-}
|
||||||
download :: MeterUpdate -> URLString -> FilePath -> UrlOptions -> IO Bool
|
download :: MeterUpdate -> URLString -> FilePath -> UrlOptions -> IO (Either String ())
|
||||||
download = download' False
|
download = download' False
|
||||||
|
|
||||||
{- Avoids displaying any error message. -}
|
{- Avoids displaying any error message, including silencing curl errors. -}
|
||||||
downloadQuiet :: MeterUpdate -> URLString -> FilePath -> UrlOptions -> IO Bool
|
downloadQuiet :: MeterUpdate -> URLString -> FilePath -> UrlOptions -> IO Bool
|
||||||
downloadQuiet = download' True
|
downloadQuiet meterupdate url file uo = isRight
|
||||||
|
<$> download' True meterupdate url file uo
|
||||||
|
|
||||||
download' :: Bool -> MeterUpdate -> URLString -> FilePath -> UrlOptions -> IO Bool
|
download' :: Bool -> MeterUpdate -> URLString -> FilePath -> UrlOptions -> IO (Either String ())
|
||||||
download' noerror meterupdate url file uo =
|
download' nocurlerror meterupdate url file uo =
|
||||||
catchJust matchHttpException go showhttpexception
|
catchJust matchHttpException go showhttpexception
|
||||||
`catchNonAsync` (dlfailed . show)
|
`catchNonAsync` (dlfailed . show)
|
||||||
where
|
where
|
||||||
go = case parseURIRelaxed url of
|
go = case parseURIRelaxed url of
|
||||||
Just u -> checkPolicy uo u False dlfailed $
|
Just u -> checkPolicy uo u $
|
||||||
case (urlDownloader uo, parseUrlRequest (show u)) of
|
case (urlDownloader uo, parseUrlRequest (show u)) of
|
||||||
(DownloadWithConduit (DownloadWithCurlRestricted r), Just req) -> catchJust
|
(DownloadWithConduit (DownloadWithCurlRestricted r), Just req) -> catchJust
|
||||||
(matchStatusCodeException (== found302))
|
(matchStatusCodeException (== found302))
|
||||||
(downloadConduit meterupdate req file uo >> return True)
|
(downloadConduit meterupdate req file uo >> return (Right ()))
|
||||||
(followredir r)
|
(followredir r)
|
||||||
(DownloadWithConduit (DownloadWithCurlRestricted r), Nothing)
|
(DownloadWithConduit (DownloadWithCurlRestricted r), Nothing)
|
||||||
| isfileurl u -> downloadfile u
|
| isfileurl u -> downloadfile u
|
||||||
| isftpurl u -> downloadcurlrestricted r u url ftpport
|
| isftpurl u -> downloadcurlrestricted r u url ftpport
|
||||||
| otherwise -> unsupportedUrlScheme u dlfailed
|
| otherwise -> dlfailed $ unsupportedUrlScheme u
|
||||||
(DownloadWithCurl _, _)
|
(DownloadWithCurl _, _)
|
||||||
| isfileurl u -> downloadfile u
|
| isfileurl u -> downloadfile u
|
||||||
| otherwise -> downloadcurl url basecurlparams
|
| otherwise -> downloadcurl url basecurlparams
|
||||||
|
@ -354,27 +355,20 @@ download' noerror meterupdate url file uo =
|
||||||
|
|
||||||
ftpport = 21
|
ftpport = 21
|
||||||
|
|
||||||
showhttpexception he = do
|
showhttpexception he = dlfailed $ case he of
|
||||||
let msg = case he of
|
HttpExceptionRequest _ (StatusCodeException r _) ->
|
||||||
HttpExceptionRequest _ (StatusCodeException r _) ->
|
B8.toString $ statusMessage $ responseStatus r
|
||||||
B8.toString $ statusMessage $ responseStatus r
|
HttpExceptionRequest _ (InternalException ie) ->
|
||||||
HttpExceptionRequest _ (InternalException ie) ->
|
case fromException ie of
|
||||||
case fromException ie of
|
Nothing -> show ie
|
||||||
Nothing -> show ie
|
Just (ConnectionRestricted why) -> why
|
||||||
Just (ConnectionRestricted why) -> why
|
HttpExceptionRequest _ other -> show other
|
||||||
HttpExceptionRequest _ other -> show other
|
_ -> show he
|
||||||
_ -> show he
|
|
||||||
dlfailed msg
|
|
||||||
|
|
||||||
dlfailed msg
|
|
||||||
| noerror = return False
|
|
||||||
| otherwise = do
|
|
||||||
hPutStrLn stderr $ "download failed: " ++ msg
|
|
||||||
hFlush stderr
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
dlfailed msg = return $ Left $ "download failed: " ++ msg
|
||||||
|
|
||||||
basecurlparams = curlParams uo
|
basecurlparams = curlParams uo
|
||||||
[ if noerror
|
[ if nocurlerror
|
||||||
then Param "-S"
|
then Param "-S"
|
||||||
else Param "-sS"
|
else Param "-sS"
|
||||||
, Param "-f"
|
, Param "-f"
|
||||||
|
@ -387,7 +381,10 @@ download' noerror meterupdate url file uo =
|
||||||
-- if the url happens to be empty, so pre-create.
|
-- if the url happens to be empty, so pre-create.
|
||||||
unlessM (doesFileExist file) $
|
unlessM (doesFileExist file) $
|
||||||
writeFile file ""
|
writeFile file ""
|
||||||
boolSystem "curl" (curlparams ++ [Param "-o", File file, File rawurl])
|
ifM (boolSystem "curl" (curlparams ++ [Param "-o", File file, File rawurl]))
|
||||||
|
( return $ Right ()
|
||||||
|
, return $ Left "download failed"
|
||||||
|
)
|
||||||
|
|
||||||
downloadcurlrestricted r u rawurl defport =
|
downloadcurlrestricted r u rawurl defport =
|
||||||
downloadcurl rawurl =<< curlRestrictedParams r u defport basecurlparams
|
downloadcurl rawurl =<< curlRestrictedParams r u defport basecurlparams
|
||||||
|
@ -396,7 +393,7 @@ download' noerror meterupdate url file uo =
|
||||||
let src = unEscapeString (uriPath u)
|
let src = unEscapeString (uriPath u)
|
||||||
withMeteredFile src meterupdate $
|
withMeteredFile src meterupdate $
|
||||||
L.writeFile file
|
L.writeFile file
|
||||||
return True
|
return $ Right ()
|
||||||
|
|
||||||
-- Conduit does not support ftp, so will throw an exception on a
|
-- Conduit does not support ftp, so will throw an exception on a
|
||||||
-- redirect to a ftp url; fall back to curl.
|
-- redirect to a ftp url; fall back to curl.
|
||||||
|
@ -404,7 +401,7 @@ download' noerror meterupdate url file uo =
|
||||||
case headMaybe $ map decodeBS $ getResponseHeader hLocation resp of
|
case headMaybe $ map decodeBS $ getResponseHeader hLocation resp of
|
||||||
Just url' -> case parseURIRelaxed url' of
|
Just url' -> case parseURIRelaxed url' of
|
||||||
Just u' | isftpurl u' ->
|
Just u' | isftpurl u' ->
|
||||||
checkPolicy uo u' False dlfailed $
|
checkPolicy uo u' $
|
||||||
downloadcurlrestricted r u' url' ftpport
|
downloadcurlrestricted r u' url' ftpport
|
||||||
_ -> throwIO ex
|
_ -> throwIO ex
|
||||||
Nothing -> throwIO ex
|
Nothing -> throwIO ex
|
||||||
|
@ -448,7 +445,7 @@ downloadConduit meterupdate req file uo =
|
||||||
liftIO $ debugM "url" (show req'')
|
liftIO $ debugM "url" (show req'')
|
||||||
resp <- http req'' (httpManager uo)
|
resp <- http req'' (httpManager uo)
|
||||||
if responseStatus resp == partialContent206
|
if responseStatus resp == partialContent206
|
||||||
then store (BytesProcessed sz) AppendMode resp
|
then store (toBytesProcessed sz) AppendMode resp
|
||||||
else if responseStatus resp == ok200
|
else if responseStatus resp == ok200
|
||||||
then store zeroBytesProcessed WriteMode resp
|
then store zeroBytesProcessed WriteMode resp
|
||||||
else respfailure resp
|
else respfailure resp
|
||||||
|
|
12
doc/bugs/OSX_dmg_git-core_binaries_do_not_link.mdwn
Normal file
12
doc/bugs/OSX_dmg_git-core_binaries_do_not_link.mdwn
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
The OSX .dmg contains a few binaries in git-core like git-remote-http.
|
||||||
|
They have been adjusted by otool to link to libraries in the same directory
|
||||||
|
as the binary. However, the libraries are not located in the git-core
|
||||||
|
directory, but in its parent directory, and so the git-core binaries don't
|
||||||
|
link.
|
||||||
|
|
||||||
|
I don't think this is a new regression, but not entirely sure.
|
||||||
|
|
||||||
|
Seems that OSXMkLibs could symlink ../lib into git-core.
|
||||||
|
--[[Joey]]
|
||||||
|
|
||||||
|
> [[fixed|done]] --[[Joey]]
|
|
@ -0,0 +1,52 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="xwvvvvwx"
|
||||||
|
avatar="http://cdn.libravatar.org/avatar/7198160b33539b5b1b2d56ca85c562d9"
|
||||||
|
subject="comment 14"
|
||||||
|
date="2019-11-21T17:32:31Z"
|
||||||
|
content="""
|
||||||
|
I just reproduced this when pushing to a gcrypt remote on rsync.net using the assistant. There is only one client pushing to the gcrypt remote.
|
||||||
|
|
||||||
|
It was during the initial sync of a moderately large amount of data (~22G), perhaps this has something to do with it?
|
||||||
|
|
||||||
|
I could reproduce the issue by cloning with gcrypt directly (`git clone gcrypt::ssh://....`).
|
||||||
|
|
||||||
|
I was able to recover by following the steps outlined in Schnouki's comment (#12), but this is obviously quite an unsatisfactory fix.
|
||||||
|
|
||||||
|
I am using annex to replicate important personal data, and I find this issue highly concerning.
|
||||||
|
|
||||||
|
Foolishly, I did not keep a copy of the bad repo before I forced pushed over it on the remote, so I do not have a copy available to experiment with :(
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## logs
|
||||||
|
|
||||||
|
`daemon.log` excerpt: [https://ipfs.io/ipfs/QmcoPuTLY2v5FWPABQLVwgyqW5WdsvkBbVS33cJh6zjzi4](https://ipfs.io/ipfs/QmcoPuTLY2v5FWPABQLVwgyqW5WdsvkBbVS33cJh6zjzi4)
|
||||||
|
|
||||||
|
|
||||||
|
`git clone` output:
|
||||||
|
|
||||||
|
```
|
||||||
|
[annex@xwvvvvwx:~]$ git clone gcrypt::ssh://<URL> remote
|
||||||
|
Cloning into 'remote'...
|
||||||
|
gcrypt: Decrypting manifest
|
||||||
|
gpg: Signature made Thu 21 Nov 2019 04:02:40 PM CET
|
||||||
|
gpg: using RSA key 92E9F58E9F8C6845423C251AACD9A98951774194
|
||||||
|
gpg: Good signature from \"git-annex <annex@xwvvvvwx.com>\" [ultimate]
|
||||||
|
gcrypt: Remote ID is :id:tWrcOFKu2yX7y+jLDLxm
|
||||||
|
gcrypt: Packfile e7b619864585f3c921b491fd041127cf0ae33c4480810610dcb2e37ec46a82be does not match digest!
|
||||||
|
fatal: early EOF
|
||||||
|
```
|
||||||
|
|
||||||
|
`git annex version`:
|
||||||
|
|
||||||
|
```
|
||||||
|
git-annex version: 7.20191114
|
||||||
|
build flags: Assistant Webapp Pairing S3 WebDAV Inotify DBus DesktopNotify TorrentParser MagicMime Feeds Testsuite
|
||||||
|
dependency versions: aws-0.21.1 bloomfilter-2.0.1.0 cryptonite-0.25 DAV-1.3.3 feed-1.2.0.1 ghc-8.6.5 http-client-0.6.4 persistent-sqlite-2.9.3 torrent-10000.1.1 uuid-1.3.13 yesod-1.6.0
|
||||||
|
key/value backends: SHA256E SHA256 SHA512E SHA512 SHA224E SHA224 SHA384E SHA384 SHA3_256E SHA3_256 SHA3_512E SHA3_512 SHA3_224E SHA3_224 SHA3_384E SHA3_384 SKEIN256E SKEIN256 SKEIN512E SKEIN512 BLAKE2B256E BLAKE2B256 BLAKE2B512E BLAKE2B512 BLAKE2B160E BLAKE2B160 BLAKE2B224E BLAKE2B224 BLAKE2B384E BLAKE2B384 BLAKE2BP512E BLAKE2BP512 BLAKE2S256E BLAKE2S256 BLAKE2S160E BLAKE2S160 BLAKE2S224E BLAKE2S224 BLAKE2SP256E BLAKE2SP256 BLAKE2SP224E BLAKE2SP224 SHA1E SHA1 MD5E MD5 WORM URL
|
||||||
|
remote types: git gcrypt p2p S3 bup directory rsync web bittorrent webdav adb tahoe glacier ddar git-lfs hook external
|
||||||
|
operating system: linux x86_64
|
||||||
|
supported repository versions: 7
|
||||||
|
upgrade supported from repository versions: 0 1 2 3 4 5 6
|
||||||
|
```
|
||||||
|
"""]]
|
|
@ -0,0 +1,24 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="joey"
|
||||||
|
subject="""comment 3"""
|
||||||
|
date="2019-11-13T19:29:34Z"
|
||||||
|
content="""
|
||||||
|
--debug might provide some clue in its http dump.
|
||||||
|
|
||||||
|
The ParseError comes from attoparsec. Seems likely that aeson/aws is what's
|
||||||
|
using it there, and that it is failing to parse something from S3.
|
||||||
|
|
||||||
|
Of course, the malloc error suggests a low-level memory problem, probably
|
||||||
|
from C code. I don't think git-annex contains anything like that, so it
|
||||||
|
must be from a dependency.
|
||||||
|
|
||||||
|
The S3 signature being wrong again points to the aws library, or something
|
||||||
|
lower level. And then the following double free is another low-level memory
|
||||||
|
problem.
|
||||||
|
|
||||||
|
So there's a pattern, and it seems to extend across linux and OSX.
|
||||||
|
|
||||||
|
Kind of wondering if something in the library stack is somehow failing to
|
||||||
|
be concurrency safe. If two http requests end up using the same memory,
|
||||||
|
it would kind of explain all of this.
|
||||||
|
"""]]
|
406
doc/bugs/cygwin.mdwn
Normal file
406
doc/bugs/cygwin.mdwn
Normal file
|
@ -0,0 +1,406 @@
|
||||||
|
Cygwin do not work with git-annex windows installed version
|
||||||
|
|
||||||
|
|
||||||
|
### What steps will reproduce the problem?
|
||||||
|
* Install git-annex windows version
|
||||||
|
* Try run git annex test under cygwin, and got 65 test failed out of 101.
|
||||||
|
* Try run git annex test under git bash windows and got 101 test passed.
|
||||||
|
* NOTE: git-lfs windows installed version working fine under cygwin and git bash windows.
|
||||||
|
|
||||||
|
|
||||||
|
### What version of git-annex are you using? On what operating system?
|
||||||
|
git-annex version: 7.20191106-ge486fd5e0
|
||||||
|
build flags: Assistant Webapp Pairing S3 WebDAV TorrentParser Feeds Testsuite
|
||||||
|
dependency versions: aws-0.21.1 bloomfilter-2.0.1.0 cryptonite-0.25 DAV-1.3.3 feed-1.0.1.0 ghc-8.6.5 http-client-0.5.14 persistent-sqlite-2.9.3 torrent-10000.1.1 uuid-1.3.13 yesod-1.6.0
|
||||||
|
key/value backends: SHA256E SHA256 SHA512E SHA512 SHA224E SHA224 SHA384E SHA384 SHA3_256E SHA3_256 SHA3_512E SHA3_512 SHA3_224E SHA3_224 SHA3_384E SHA3_384 SKEIN256E SKEIN256 SKEIN512E SKEIN512 BLAKE2B256E BLAKE2B256 BLAKE2B512E BLAKE2B512 BLAKE2B160E BLAKE2B160 BLAKE2B224E BLAKE2B224 BLAKE2B384E BLAKE2B384 BLAKE2BP512E BLAKE2BP512 BLAKE2S256E BLAKE2S256 BLAKE2S160E BLAKE2S160 BLAKE2S224E BLAKE2S224 BLAKE2SP256E BLAKE2SP256 BLAKE2SP224E BLAKE2SP224 SHA1E SHA1 MD5E MD5 WORM URL
|
||||||
|
remote types: git gcrypt p2p S3 bup directory rsync web bittorrent webdav adb tahoe glacier ddar git-lfs hook external
|
||||||
|
operating system: mingw32 x86_64
|
||||||
|
supported repository versions: 7
|
||||||
|
upgrade supported from repository versions: 2 3 4 5 6
|
||||||
|
|
||||||
|
### Please provide any additional information below.
|
||||||
|
|
||||||
|
[[!format sh """
|
||||||
|
# If you can, paste a complete transcript of the problem occurring here.
|
||||||
|
# If the problem is with the git-annex assistant, paste in .git/annex/daemon.log
|
||||||
|
Cygwin ~/git..t_tools/wget/cache (test)
|
||||||
|
(506)$ git-annex test
|
||||||
|
Tests
|
||||||
|
QuickCheck
|
||||||
|
prop_encode_decode_roundtrip: OK (0.04s)
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_encode_c_decode_c_roundtrip: OK (0.03s)
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_isomorphic_key_encode: OK
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_isomorphic_shellEscape: OK (0.02s)
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_isomorphic_shellEscape_multiword: OK (0.70s)
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_isomorphic_configEscape: OK (0.02s)
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_parse_show_Config: OK (0.04s)
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_upFrom_basics: OK (0.02s)
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_relPathDirToFile_basics: OK (0.03s)
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_relPathDirToFile_regressionTest: OK
|
||||||
|
+++ OK, passed 1 test.
|
||||||
|
prop_cost_sane: OK
|
||||||
|
+++ OK, passed 1 test.
|
||||||
|
prop_matcher_sane: OK
|
||||||
|
+++ OK, passed 1 test.
|
||||||
|
prop_HmacSha1WithCipher_sane: OK
|
||||||
|
+++ OK, passed 1 test.
|
||||||
|
prop_VectorClock_sane: OK
|
||||||
|
+++ OK, passed 1 test.
|
||||||
|
prop_addMapLog_sane: OK
|
||||||
|
+++ OK, passed 1 test.
|
||||||
|
prop_verifiable_sane: OK (0.07s)
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_segment_regressionTest: OK
|
||||||
|
+++ OK, passed 1 test.
|
||||||
|
prop_read_write_transferinfo: OK (0.04s)
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_read_show_inodecache: OK (0.02s)
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_parse_build_presence_log: OK (1.27s)
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_parse_build_contentidentifier_log: OK (1.23s)
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_read_show_TrustLevel: OK
|
||||||
|
+++ OK, passed 1 test.
|
||||||
|
prop_parse_build_TrustLevelLog: OK
|
||||||
|
+++ OK, passed 1 test.
|
||||||
|
prop_hashes_stable: OK
|
||||||
|
+++ OK, passed 1 test.
|
||||||
|
prop_mac_stable: OK
|
||||||
|
+++ OK, passed 1 test.
|
||||||
|
prop_schedule_roundtrips: OK (0.01s)
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_past_sane: OK
|
||||||
|
+++ OK, passed 1 test.
|
||||||
|
prop_duration_roundtrips: OK
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_metadata_sane: OK (0.86s)
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_metadata_serialize: OK (0.84s)
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_branchView_legal: OK (0.77s)
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_viewPath_roundtrips: OK (0.03s)
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_view_roundtrips: OK (0.52s)
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_viewedFile_rountrips: OK (0.02s)
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_b64_roundtrips: OK
|
||||||
|
+++ OK, passed 1000 tests.
|
||||||
|
prop_standardGroups_parse: OK
|
||||||
|
+++ OK, passed 1 test.
|
||||||
|
Unit Tests v7 adjusted unlocked branch
|
||||||
|
add dup: Init Tests
|
||||||
|
init: init test repo
|
||||||
|
Detected a filesystem without fifo support.
|
||||||
|
|
||||||
|
Disabling ssh connection caching.
|
||||||
|
|
||||||
|
Detected a crippled filesystem.
|
||||||
|
|
||||||
|
Disabling core.symlinks.
|
||||||
|
(scanning for unlocked files...)
|
||||||
|
|
||||||
|
Entering an adjusted branch where files are unlocked as this filesystem does not support locked files.
|
||||||
|
not found .
|
||||||
|
git-annex.exe: pre-commit: 1 failed
|
||||||
|
|
||||||
|
Failed to enter adjusted branch!
|
||||||
|
ok
|
||||||
|
(recording state in git...)
|
||||||
|
not found .
|
||||||
|
git-annex.exe: pre-commit: 1 failed
|
||||||
|
FAIL (6.92s)
|
||||||
|
.\\Test\\Framework.hs:469:
|
||||||
|
git commit failed
|
||||||
|
add: add foo
|
||||||
|
ok
|
||||||
|
(recording state in git...)
|
||||||
|
add sha1foo
|
||||||
|
ok
|
||||||
|
(recording state in git...)
|
||||||
|
not found .
|
||||||
|
git-annex.exe: pre-commit: 1 failed
|
||||||
|
FAIL (8.10s)
|
||||||
|
Test.hs:303:
|
||||||
|
git commit failed
|
||||||
|
|
||||||
|
2 out of 2 tests failed (15.02s)
|
||||||
|
FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
add extras: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
export_import: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
export_import_subdir: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
shared clone: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
log: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
import: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
reinject: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
unannex (no copy): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
unannex (with copy): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
drop (no remote): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
drop (with remote): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
drop (untrusted remote): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
get: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
get (ssh remote): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
move: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
move (ssh remote): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
copy: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
lock: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
lock --force: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
edit (no pre-commit): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
edit (pre-commit): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
partial commit: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
fix: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
trust: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
fsck (basics): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
fsck (bare): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
fsck (local untrusted): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
fsck (remote untrusted): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
fsck --from remote: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
migrate: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
migrate (via gitattributes): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
unused: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
describe: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
find: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
merge: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
info: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
version: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
sync: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
union merge regression: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
adjusted branch merge regression: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
adjusted branch subtree regression: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
conflict resolution: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
conflict resolution (adjusted branch): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
conflict resolution movein regression: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
conflict resolution (mixed directory and file): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
conflict resolution symlink bit: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
conflict resolution (uncommitted local file): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
conflict resolution (removed file): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
conflict resolution (nonannexed file): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
conflict resolution (nonannexed symlink): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
conflict resolution (mixed locked and unlocked file): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
map: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
uninit: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
uninit (in git-annex branch): FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
upgrade: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
whereis: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
hook remote: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
directory remote: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
rsync remote: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
bup remote: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
crypto: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
preferred content: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
add subdirs: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
addurl: FAIL
|
||||||
|
Exception: init tests failed! cannot continue
|
||||||
|
CallStack (from HasCallStack):
|
||||||
|
error, called at .\\Test\\Framework.hs:427:33 in main:Test.Framework
|
||||||
|
|
||||||
|
65 out of 101 tests failed (21.64s)
|
||||||
|
(Failures above could be due to a bug in git-annex, or an incompatibility
|
||||||
|
with utilities, such as git, installed on this system.)
|
||||||
|
# End of transcript or log.
|
||||||
|
"""]]
|
||||||
|
|
||||||
|
|
|
@ -22,3 +22,8 @@ If user convenience was something to strive for here, it should technically be p
|
||||||
[[!meta author=yoh]]
|
[[!meta author=yoh]]
|
||||||
[[!tag projects/datalad]]
|
[[!tag projects/datalad]]
|
||||||
|
|
||||||
|
> [[fixed|done]], and I also converted a number of other places
|
||||||
|
> where an error could leak through to stderr, although there are still
|
||||||
|
> some places where direct writes to stderr happen -- I'll probably never
|
||||||
|
> be able to guarantee --json-error-messages catches every possible stderr
|
||||||
|
> output. --[[Joey]]
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="joey"
|
||||||
|
subject="""comment 1"""
|
||||||
|
date="2019-11-19T17:12:41Z"
|
||||||
|
content="""
|
||||||
|
I think that you can accomplish what you want by making the directory
|
||||||
|
you're importing from be a directory special remote with exporttree=yes
|
||||||
|
importtree=yes and use the new `git annex import master --from remote`
|
||||||
|
|
||||||
|
If that does not do what you want, I'd prefer to look at making it be able
|
||||||
|
to do so. I hope to eventually remove the legacy git-annex import from
|
||||||
|
directory, since we have this new more general interface.
|
||||||
|
"""]]
|
|
@ -0,0 +1,11 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="joey"
|
||||||
|
subject="""comment 2"""
|
||||||
|
date="2019-11-13T19:37:16Z"
|
||||||
|
content="""
|
||||||
|
The signal 11 is very significant. It points to a problem in a lower-level
|
||||||
|
library (or ghc runtime), or perhaps a bad memory problem. git-annex does
|
||||||
|
not itself contain any code that can segfault, afaik.
|
||||||
|
|
||||||
|
Almost certianly the same as the other bug.
|
||||||
|
"""]]
|
|
@ -0,0 +1,73 @@
|
||||||
|
Originally was trying to reproduce [datalad/issues/3653](https://github.com/datalad/datalad/issues/3653) assuming that multiple files pointed to the same key.
|
||||||
|
It was not the case, and my attempt revealed another bug - annex inability to "obtain" files in parallel when multiple of them point to the same key:
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>setup of original repo(click to expand)</summary>
|
||||||
|
|
||||||
|
[[!format sh """
|
||||||
|
/tmp > mkdir src; (cd src; git init; git annex init; dd if=/dev/zero of=1 count=1024 bs=1024; for f in {2..10}; do cp 1 $f; done ; git annex add *; git commit -m added; )
|
||||||
|
Initialized empty Git repository in /tmp/src/.git/
|
||||||
|
init (scanning for unlocked files...)
|
||||||
|
ok
|
||||||
|
(recording state in git...)
|
||||||
|
1024+0 records in
|
||||||
|
1024+0 records out
|
||||||
|
1048576 bytes (1.0 MB, 1.0 MiB) copied, 0.00106651 s, 983 MB/s
|
||||||
|
add 1
|
||||||
|
ok
|
||||||
|
add 10
|
||||||
|
ok
|
||||||
|
add 2
|
||||||
|
ok
|
||||||
|
add 3
|
||||||
|
ok
|
||||||
|
add 4
|
||||||
|
ok
|
||||||
|
add 5
|
||||||
|
ok
|
||||||
|
add 6
|
||||||
|
ok
|
||||||
|
add 7
|
||||||
|
ok
|
||||||
|
add 8
|
||||||
|
ok
|
||||||
|
add 9
|
||||||
|
ok
|
||||||
|
(recording state in git...)
|
||||||
|
[master (root-commit) 63b1163] added
|
||||||
|
10 files changed, 10 insertions(+)
|
||||||
|
create mode 120000 1
|
||||||
|
create mode 120000 10
|
||||||
|
create mode 120000 2
|
||||||
|
create mode 120000 3
|
||||||
|
create mode 120000 4
|
||||||
|
create mode 120000 5
|
||||||
|
create mode 120000 6
|
||||||
|
create mode 120000 7
|
||||||
|
create mode 120000 8
|
||||||
|
create mode 120000 9
|
||||||
|
"""]]
|
||||||
|
</details>
|
||||||
|
|
||||||
|
And that is what happens then when we try to get the same key in parallel:
|
||||||
|
[[!format sh """
|
||||||
|
|
||||||
|
/tmp > git clone src dst; (cd dst; git annex get -J 5 *; )
|
||||||
|
Cloning into 'dst'...
|
||||||
|
done.
|
||||||
|
(merging origin/git-annex into git-annex...)
|
||||||
|
(recording state in git...)
|
||||||
|
(scanning for unlocked files...)
|
||||||
|
get 2 (from origin...) (checksum...)
|
||||||
|
git-annex: thread blocked indefinitely in an STM transaction
|
||||||
|
failed
|
||||||
|
git-annex: thread blocked indefinitely in an MVar operation
|
||||||
|
|
||||||
|
"""]]
|
||||||
|
|
||||||
|
I felt like it is an old issue but failed to find a trace of it upon a quick lookup
|
||||||
|
|
||||||
|
[[!meta author=yoh]]
|
||||||
|
[[!tag projects/datalad]]
|
||||||
|
|
||||||
|
> [[fixed|done]] --[[Joey]]
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue