fix some mixed space+tab indentation
This fixes all instances of " \t" in the code base. Most common case seems to be after a "where" line; probably vim copied the two space layout of that line. Done as a background task while listening to episode 2 of the Type Theory podcast.
This commit is contained in:
parent
8f69d55f03
commit
7b50b3c057
131 changed files with 242 additions and 242 deletions
|
@ -454,7 +454,7 @@ handleTransitions jl localts refs = do
|
||||||
ignoreRefs untransitionedrefs
|
ignoreRefs untransitionedrefs
|
||||||
return True
|
return True
|
||||||
where
|
where
|
||||||
getreftransition ref = do
|
getreftransition ref = do
|
||||||
ts <- parseTransitionsStrictly "remote" . decodeBS
|
ts <- parseTransitionsStrictly "remote" . decodeBS
|
||||||
<$> catFile ref transitionsLog
|
<$> catFile ref transitionsLog
|
||||||
return (ref, ts)
|
return (ref, ts)
|
||||||
|
@ -470,7 +470,7 @@ ignoreRefs rs = do
|
||||||
getIgnoredRefs :: Annex (S.Set Git.Ref)
|
getIgnoredRefs :: Annex (S.Set Git.Ref)
|
||||||
getIgnoredRefs = S.fromList . mapMaybe Git.Sha.extractSha . lines <$> content
|
getIgnoredRefs = S.fromList . mapMaybe Git.Sha.extractSha . lines <$> content
|
||||||
where
|
where
|
||||||
content = do
|
content = do
|
||||||
f <- fromRepo gitAnnexIgnoredRefs
|
f <- fromRepo gitAnnexIgnoredRefs
|
||||||
liftIO $ catchDefaultIO "" $ readFile f
|
liftIO $ catchDefaultIO "" $ readFile f
|
||||||
|
|
||||||
|
@ -498,7 +498,7 @@ performTransitionsLocked jl ts neednewlocalbranch transitionedrefs = do
|
||||||
ref <- getBranch
|
ref <- getBranch
|
||||||
commitIndex jl ref message (nub $ fullname:transitionedrefs)
|
commitIndex jl ref message (nub $ fullname:transitionedrefs)
|
||||||
where
|
where
|
||||||
message
|
message
|
||||||
| neednewlocalbranch && null transitionedrefs = "new branch for transition " ++ tdesc
|
| neednewlocalbranch && null transitionedrefs = "new branch for transition " ++ tdesc
|
||||||
| otherwise = "continuing transition " ++ tdesc
|
| otherwise = "continuing transition " ++ tdesc
|
||||||
tdesc = show $ map describeTransition $ transitionList ts
|
tdesc = show $ map describeTransition $ transitionList ts
|
||||||
|
|
|
@ -100,10 +100,10 @@ catKey' modeguaranteed sha mode
|
||||||
catLink :: Bool -> Sha -> Annex String
|
catLink :: Bool -> Sha -> Annex String
|
||||||
catLink modeguaranteed sha = fromInternalGitPath . decodeBS <$> get
|
catLink modeguaranteed sha = fromInternalGitPath . decodeBS <$> get
|
||||||
where
|
where
|
||||||
-- If the mode is not guaranteed to be correct, avoid
|
-- If the mode is not guaranteed to be correct, avoid
|
||||||
-- buffering the whole file content, which might be large.
|
-- buffering the whole file content, which might be large.
|
||||||
-- 8192 is enough if it really is a symlink.
|
-- 8192 is enough if it really is a symlink.
|
||||||
get
|
get
|
||||||
| modeguaranteed = catObject sha
|
| modeguaranteed = catObject sha
|
||||||
| otherwise = L.take 8192 <$> catObject sha
|
| otherwise = L.take 8192 <$> catObject sha
|
||||||
|
|
||||||
|
@ -120,7 +120,7 @@ catKeyChecked :: Bool -> Ref -> Annex (Maybe Key)
|
||||||
catKeyChecked needhead ref@(Ref r) =
|
catKeyChecked needhead ref@(Ref r) =
|
||||||
catKey' False ref =<< findmode <$> catTree treeref
|
catKey' False ref =<< findmode <$> catTree treeref
|
||||||
where
|
where
|
||||||
pathparts = split "/" r
|
pathparts = split "/" r
|
||||||
dir = intercalate "/" $ take (length pathparts - 1) pathparts
|
dir = intercalate "/" $ take (length pathparts - 1) pathparts
|
||||||
file = fromMaybe "" $ lastMaybe pathparts
|
file = fromMaybe "" $ lastMaybe pathparts
|
||||||
treeref = Ref $ if needhead then "HEAD" ++ dir ++ "/" else dir ++ "/"
|
treeref = Ref $ if needhead then "HEAD" ++ dir ++ "/" else dir ++ "/"
|
||||||
|
|
|
@ -18,7 +18,7 @@ import qualified Annex
|
||||||
checkIgnored :: FilePath -> Annex Bool
|
checkIgnored :: FilePath -> Annex Bool
|
||||||
checkIgnored file = go =<< checkIgnoreHandle
|
checkIgnored file = go =<< checkIgnoreHandle
|
||||||
where
|
where
|
||||||
go Nothing = return False
|
go Nothing = return False
|
||||||
go (Just h) = liftIO $ Git.checkIgnored h file
|
go (Just h) = liftIO $ Git.checkIgnored h file
|
||||||
|
|
||||||
checkIgnoreHandle :: Annex (Maybe Git.CheckIgnoreHandle)
|
checkIgnoreHandle :: Annex (Maybe Git.CheckIgnoreHandle)
|
||||||
|
|
|
@ -456,7 +456,7 @@ removeAnnex (ContentLock key) = withObjectLoc key remove removedirect
|
||||||
secureErase :: FilePath -> Annex ()
|
secureErase :: FilePath -> Annex ()
|
||||||
secureErase file = maybe noop go =<< annexSecureEraseCommand <$> Annex.getGitConfig
|
secureErase file = maybe noop go =<< annexSecureEraseCommand <$> Annex.getGitConfig
|
||||||
where
|
where
|
||||||
go basecmd = void $ liftIO $
|
go basecmd = void $ liftIO $
|
||||||
boolSystem "sh" [Param "-c", Param $ gencmd basecmd]
|
boolSystem "sh" [Param "-c", Param $ gencmd basecmd]
|
||||||
gencmd = massReplace [ ("%file", shellEscape file) ]
|
gencmd = massReplace [ ("%file", shellEscape file) ]
|
||||||
|
|
||||||
|
@ -555,7 +555,7 @@ saveState nocommit = doSideAction $ do
|
||||||
downloadUrl :: [Url.URLString] -> FilePath -> Annex Bool
|
downloadUrl :: [Url.URLString] -> FilePath -> Annex Bool
|
||||||
downloadUrl urls file = go =<< annexWebDownloadCommand <$> Annex.getGitConfig
|
downloadUrl urls file = go =<< annexWebDownloadCommand <$> Annex.getGitConfig
|
||||||
where
|
where
|
||||||
go Nothing = Url.withUrlOptions $ \uo ->
|
go Nothing = Url.withUrlOptions $ \uo ->
|
||||||
anyM (\u -> Url.download u file uo) urls
|
anyM (\u -> Url.download u file uo) urls
|
||||||
go (Just basecmd) = liftIO $ anyM (downloadcmd basecmd) urls
|
go (Just basecmd) = liftIO $ anyM (downloadcmd basecmd) urls
|
||||||
downloadcmd basecmd url =
|
downloadcmd basecmd url =
|
||||||
|
|
|
@ -347,7 +347,7 @@ toDirectGen k f = do
|
||||||
(dloc:_) -> return $ Just $ fromdirect dloc
|
(dloc:_) -> return $ Just $ fromdirect dloc
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
fromindirect loc = do
|
fromindirect loc = do
|
||||||
{- Move content from annex to direct file. -}
|
{- Move content from annex to direct file. -}
|
||||||
updateInodeCache k loc
|
updateInodeCache k loc
|
||||||
void $ addAssociatedFile k f
|
void $ addAssociatedFile k f
|
||||||
|
|
|
@ -45,7 +45,7 @@ checkEnvironmentIO =
|
||||||
ensureEnv "GIT_COMMITTER_NAME" username
|
ensureEnv "GIT_COMMITTER_NAME" username
|
||||||
where
|
where
|
||||||
#ifndef __ANDROID__
|
#ifndef __ANDROID__
|
||||||
-- existing environment is not overwritten
|
-- existing environment is not overwritten
|
||||||
ensureEnv var val = void $ setEnv var val False
|
ensureEnv var val = void $ setEnv var val False
|
||||||
#else
|
#else
|
||||||
-- Environment setting is broken on Android, so this is dealt with
|
-- Environment setting is broken on Android, so this is dealt with
|
||||||
|
@ -59,7 +59,7 @@ checkEnvironmentIO =
|
||||||
ensureCommit :: Annex a -> Annex a
|
ensureCommit :: Annex a -> Annex a
|
||||||
ensureCommit a = either retry return =<< tryNonAsync a
|
ensureCommit a = either retry return =<< tryNonAsync a
|
||||||
where
|
where
|
||||||
retry _ = do
|
retry _ = do
|
||||||
name <- liftIO myUserName
|
name <- liftIO myUserName
|
||||||
setConfig (ConfigKey "user.name") name
|
setConfig (ConfigKey "user.name") name
|
||||||
setConfig (ConfigKey "user.email") name
|
setConfig (ConfigKey "user.email") name
|
||||||
|
|
|
@ -106,7 +106,7 @@ tokenizeMatcher = filter (not . null ) . concatMap splitparens . words
|
||||||
largeFilesMatcher :: Annex (FileMatcher Annex)
|
largeFilesMatcher :: Annex (FileMatcher Annex)
|
||||||
largeFilesMatcher = go =<< annexLargeFiles <$> Annex.getGitConfig
|
largeFilesMatcher = go =<< annexLargeFiles <$> Annex.getGitConfig
|
||||||
where
|
where
|
||||||
go Nothing = return matchAll
|
go Nothing = return matchAll
|
||||||
go (Just expr) = do
|
go (Just expr) = do
|
||||||
gm <- groupMap
|
gm <- groupMap
|
||||||
rc <- readRemoteLog
|
rc <- readRemoteLog
|
||||||
|
|
|
@ -33,7 +33,7 @@ replaceFileOr file action rollback = do
|
||||||
tmpfile <- liftIO $ setup tmpdir
|
tmpfile <- liftIO $ setup tmpdir
|
||||||
go tmpfile `catchNonAsync` (const $ rollback tmpfile)
|
go tmpfile `catchNonAsync` (const $ rollback tmpfile)
|
||||||
where
|
where
|
||||||
setup tmpdir = do
|
setup tmpdir = do
|
||||||
(tmpfile, h) <- openTempFileWithDefaultPermissions tmpdir "tmp"
|
(tmpfile, h) <- openTempFileWithDefaultPermissions tmpdir "tmp"
|
||||||
hClose h
|
hClose h
|
||||||
return tmpfile
|
return tmpfile
|
||||||
|
|
|
@ -78,10 +78,10 @@ bestSocketPath abssocketfile = do
|
||||||
then Just socketfile
|
then Just socketfile
|
||||||
else Nothing
|
else Nothing
|
||||||
where
|
where
|
||||||
-- ssh appends a 16 char extension to the socket when setting it
|
-- ssh appends a 16 char extension to the socket when setting it
|
||||||
-- up, which needs to be taken into account when checking
|
-- up, which needs to be taken into account when checking
|
||||||
-- that a valid socket was constructed.
|
-- that a valid socket was constructed.
|
||||||
sshgarbage = replicate (1+16) 'X'
|
sshgarbage = replicate (1+16) 'X'
|
||||||
|
|
||||||
sshConnectionCachingParams :: FilePath -> [CommandParam]
|
sshConnectionCachingParams :: FilePath -> [CommandParam]
|
||||||
sshConnectionCachingParams socketfile =
|
sshConnectionCachingParams socketfile =
|
||||||
|
|
|
@ -69,7 +69,7 @@ runTransfer' ignorelock t file shouldretry a = do
|
||||||
return False
|
return False
|
||||||
else do
|
else do
|
||||||
ok <- retry info metervar $
|
ok <- retry info metervar $
|
||||||
bracketIO (return fd) (cleanup tfile) (const $ a meter)
|
bracketIO (return fd) (cleanup tfile) (const $ a meter)
|
||||||
unless ok $ recordFailedTransfer t info
|
unless ok $ recordFailedTransfer t info
|
||||||
return ok
|
return ok
|
||||||
where
|
where
|
||||||
|
|
|
@ -102,7 +102,7 @@ refineView origview = checksize . calc Unchanged origview
|
||||||
let (components', viewchanges) = runWriter $
|
let (components', viewchanges) = runWriter $
|
||||||
mapM (\c -> updateViewComponent c field vf) (viewComponents view)
|
mapM (\c -> updateViewComponent c field vf) (viewComponents view)
|
||||||
viewchange = if field `elem` map viewField (viewComponents origview)
|
viewchange = if field `elem` map viewField (viewComponents origview)
|
||||||
then maximum viewchanges
|
then maximum viewchanges
|
||||||
else Narrowing
|
else Narrowing
|
||||||
in (view { viewComponents = components' }, viewchange)
|
in (view { viewComponents = components' }, viewchange)
|
||||||
| otherwise =
|
| otherwise =
|
||||||
|
@ -207,7 +207,7 @@ viewComponentMatcher :: ViewComponent -> (MetaData -> Maybe [MetaValue])
|
||||||
viewComponentMatcher viewcomponent = \metadata ->
|
viewComponentMatcher viewcomponent = \metadata ->
|
||||||
matcher (currentMetaDataValues metafield metadata)
|
matcher (currentMetaDataValues metafield metadata)
|
||||||
where
|
where
|
||||||
metafield = viewField viewcomponent
|
metafield = viewField viewcomponent
|
||||||
matcher = case viewFilter viewcomponent of
|
matcher = case viewFilter viewcomponent of
|
||||||
FilterValues s -> \values -> setmatches $
|
FilterValues s -> \values -> setmatches $
|
||||||
S.intersection s values
|
S.intersection s values
|
||||||
|
@ -236,8 +236,8 @@ toViewPath = concatMap escapeslash . fromMetaValue
|
||||||
fromViewPath :: FilePath -> MetaValue
|
fromViewPath :: FilePath -> MetaValue
|
||||||
fromViewPath = toMetaValue . deescapeslash []
|
fromViewPath = toMetaValue . deescapeslash []
|
||||||
where
|
where
|
||||||
deescapeslash s [] = reverse s
|
deescapeslash s [] = reverse s
|
||||||
deescapeslash s (c:cs)
|
deescapeslash s (c:cs)
|
||||||
| c == pseudoSlash = case cs of
|
| c == pseudoSlash = case cs of
|
||||||
(c':cs')
|
(c':cs')
|
||||||
| c' == pseudoSlash -> deescapeslash (pseudoSlash:s) cs'
|
| c' == pseudoSlash -> deescapeslash (pseudoSlash:s) cs'
|
||||||
|
|
|
@ -58,7 +58,7 @@ viewedFileReuse = takeFileName
|
||||||
dirFromViewedFile :: ViewedFile -> FilePath
|
dirFromViewedFile :: ViewedFile -> FilePath
|
||||||
dirFromViewedFile = joinPath . drop 1 . sep [] ""
|
dirFromViewedFile = joinPath . drop 1 . sep [] ""
|
||||||
where
|
where
|
||||||
sep l _ [] = reverse l
|
sep l _ [] = reverse l
|
||||||
sep l curr (c:cs)
|
sep l curr (c:cs)
|
||||||
| c == '%' = sep (reverse curr:l) "" cs
|
| c == '%' = sep (reverse curr:l) "" cs
|
||||||
| c == '\\' = case cs of
|
| c == '\\' = case cs of
|
||||||
|
|
|
@ -119,7 +119,7 @@ startDaemon assistant foreground startdelay cannotrun listenhost startbrowser =
|
||||||
)
|
)
|
||||||
#endif
|
#endif
|
||||||
where
|
where
|
||||||
desc
|
desc
|
||||||
| assistant = "assistant"
|
| assistant = "assistant"
|
||||||
| otherwise = "watch"
|
| otherwise = "watch"
|
||||||
start daemonize webappwaiter = withThreadState $ \st -> do
|
start daemonize webappwaiter = withThreadState $ \st -> do
|
||||||
|
|
|
@ -145,7 +145,7 @@ syncResultAlert' succeeded failed = makeAlertFiller (not $ null succeeded) $
|
||||||
, alertHeader = Just $ tenseWords msg
|
, alertHeader = Just $ tenseWords msg
|
||||||
}
|
}
|
||||||
where
|
where
|
||||||
msg
|
msg
|
||||||
| null succeeded = ["Failed to sync with", showRemotes failed]
|
| null succeeded = ["Failed to sync with", showRemotes failed]
|
||||||
| null failed = ["Synced with", showRemotes succeeded]
|
| null failed = ["Synced with", showRemotes succeeded]
|
||||||
| otherwise =
|
| otherwise =
|
||||||
|
|
|
@ -119,7 +119,7 @@ mergeAlert i al m = maybe updatePrune updateCombine (alertCombiner al)
|
||||||
where
|
where
|
||||||
bloat = M.size m' - maxAlerts
|
bloat = M.size m' - maxAlerts
|
||||||
pruneold l =
|
pruneold l =
|
||||||
let (f, rest) = partition (\(_, a) -> isFiller a) l
|
let (f, rest) = partition (\(_, a) -> isFiller a) l
|
||||||
in drop bloat f ++ rest
|
in drop bloat f ++ rest
|
||||||
updatePrune = pruneBloat $ M.filterWithKey pruneSame $
|
updatePrune = pruneBloat $ M.filterWithKey pruneSame $
|
||||||
M.insertWith' const i al m
|
M.insertWith' const i al m
|
||||||
|
|
|
@ -65,7 +65,7 @@ calcSyncRemotes = do
|
||||||
, syncingToCloudRemote = any iscloud syncdata
|
, syncingToCloudRemote = any iscloud syncdata
|
||||||
}
|
}
|
||||||
where
|
where
|
||||||
iscloud r = not (Remote.readonly r) && Remote.availability r == Remote.GloballyAvailable
|
iscloud r = not (Remote.readonly r) && Remote.availability r == Remote.GloballyAvailable
|
||||||
|
|
||||||
{- Updates the syncRemotes list from the list of all remotes in Annex state. -}
|
{- Updates the syncRemotes list from the list of all remotes in Annex state. -}
|
||||||
updateSyncRemotes :: Assistant ()
|
updateSyncRemotes :: Assistant ()
|
||||||
|
|
|
@ -62,7 +62,7 @@ removableRemote urlrenderer uuid = do
|
||||||
<$> liftAnnex (Remote.remoteFromUUID uuid)
|
<$> liftAnnex (Remote.remoteFromUUID uuid)
|
||||||
mapM_ (queueremaining r) keys
|
mapM_ (queueremaining r) keys
|
||||||
where
|
where
|
||||||
queueremaining r k =
|
queueremaining r k =
|
||||||
queueTransferWhenSmall "remaining object in unwanted remote"
|
queueTransferWhenSmall "remaining object in unwanted remote"
|
||||||
Nothing (Transfer Download uuid k) r
|
Nothing (Transfer Download uuid k) r
|
||||||
{- Scanning for keys can take a long time; do not tie up
|
{- Scanning for keys can take a long time; do not tie up
|
||||||
|
|
|
@ -20,7 +20,7 @@ newUserId :: IO UserId
|
||||||
newUserId = do
|
newUserId = do
|
||||||
oldkeys <- secretKeys
|
oldkeys <- secretKeys
|
||||||
username <- myUserName
|
username <- myUserName
|
||||||
let basekeyname = username ++ "'s git-annex encryption key"
|
let basekeyname = username ++ "'s git-annex encryption key"
|
||||||
return $ Prelude.head $ filter (\n -> M.null $ M.filter (== n) oldkeys)
|
return $ Prelude.head $ filter (\n -> M.null $ M.filter (== n) oldkeys)
|
||||||
( basekeyname
|
( basekeyname
|
||||||
: map (\n -> basekeyname ++ show n) ([2..] :: [Int])
|
: map (\n -> basekeyname ++ show n) ([2..] :: [Int])
|
||||||
|
|
|
@ -48,7 +48,7 @@ makeRsyncRemote :: RemoteName -> String -> Annex String
|
||||||
makeRsyncRemote name location = makeRemote name location $ const $ void $
|
makeRsyncRemote name location = makeRemote name location $ const $ void $
|
||||||
go =<< Command.InitRemote.findExisting name
|
go =<< Command.InitRemote.findExisting name
|
||||||
where
|
where
|
||||||
go Nothing = setupSpecialRemote name Rsync.remote config Nothing
|
go Nothing = setupSpecialRemote name Rsync.remote config Nothing
|
||||||
(Nothing, Command.InitRemote.newConfig name)
|
(Nothing, Command.InitRemote.newConfig name)
|
||||||
go (Just (u, c)) = setupSpecialRemote name Rsync.remote config Nothing
|
go (Just (u, c)) = setupSpecialRemote name Rsync.remote config Nothing
|
||||||
(Just u, c)
|
(Just u, c)
|
||||||
|
|
|
@ -80,7 +80,7 @@ checkImportantNetMessages (storedclient, sentclient) = go <<~ netMessager
|
||||||
queuePushInitiation :: NetMessage -> Assistant ()
|
queuePushInitiation :: NetMessage -> Assistant ()
|
||||||
queuePushInitiation msg@(Pushing clientid stage) = do
|
queuePushInitiation msg@(Pushing clientid stage) = do
|
||||||
tv <- getPushInitiationQueue side
|
tv <- getPushInitiationQueue side
|
||||||
liftIO $ atomically $ do
|
liftIO $ atomically $ do
|
||||||
r <- tryTakeTMVar tv
|
r <- tryTakeTMVar tv
|
||||||
case r of
|
case r of
|
||||||
Nothing -> putTMVar tv [msg]
|
Nothing -> putTMVar tv [msg]
|
||||||
|
@ -88,7 +88,7 @@ queuePushInitiation msg@(Pushing clientid stage) = do
|
||||||
let !l' = msg : filter differentclient l
|
let !l' = msg : filter differentclient l
|
||||||
putTMVar tv l'
|
putTMVar tv l'
|
||||||
where
|
where
|
||||||
side = pushDestinationSide stage
|
side = pushDestinationSide stage
|
||||||
differentclient (Pushing cid _) = cid /= clientid
|
differentclient (Pushing cid _) = cid /= clientid
|
||||||
differentclient _ = True
|
differentclient _ = True
|
||||||
queuePushInitiation _ = noop
|
queuePushInitiation _ = noop
|
||||||
|
|
|
@ -63,7 +63,7 @@ runRepair u mrmt destructiverepair = do
|
||||||
|
|
||||||
return ok
|
return ok
|
||||||
where
|
where
|
||||||
localrepair fsckresults = do
|
localrepair fsckresults = do
|
||||||
-- Stop the watcher from running while running repairs.
|
-- Stop the watcher from running while running repairs.
|
||||||
changeSyncable Nothing False
|
changeSyncable Nothing False
|
||||||
|
|
||||||
|
@ -140,9 +140,9 @@ repairStaleGitLocks r = do
|
||||||
repairStaleLocks :: [FilePath] -> Assistant ()
|
repairStaleLocks :: [FilePath] -> Assistant ()
|
||||||
repairStaleLocks lockfiles = go =<< getsizes
|
repairStaleLocks lockfiles = go =<< getsizes
|
||||||
where
|
where
|
||||||
getsize lf = catchMaybeIO $
|
getsize lf = catchMaybeIO $
|
||||||
(\s -> (lf, fileSize s)) <$> getFileStatus lf
|
(\s -> (lf, fileSize s)) <$> getFileStatus lf
|
||||||
getsizes = liftIO $ catMaybes <$> mapM getsize lockfiles
|
getsizes = liftIO $ catMaybes <$> mapM getsize lockfiles
|
||||||
go [] = return ()
|
go [] = return ()
|
||||||
go l = ifM (liftIO $ null <$> Lsof.query ("--" : map fst l))
|
go l = ifM (liftIO $ null <$> Lsof.query ("--" : map fst l))
|
||||||
( do
|
( do
|
||||||
|
|
|
@ -92,7 +92,7 @@ parseSshUrl u
|
||||||
, sshCapabilities = []
|
, sshCapabilities = []
|
||||||
}
|
}
|
||||||
where
|
where
|
||||||
(user, host) = if '@' `elem` userhost
|
(user, host) = if '@' `elem` userhost
|
||||||
then separate (== '@') userhost
|
then separate (== '@') userhost
|
||||||
else ("", userhost)
|
else ("", userhost)
|
||||||
fromrsync s
|
fromrsync s
|
||||||
|
@ -260,7 +260,7 @@ setupSshKeyPair sshkeypair sshdata = do
|
||||||
fixSshKeyPairIdentitiesOnly :: IO ()
|
fixSshKeyPairIdentitiesOnly :: IO ()
|
||||||
fixSshKeyPairIdentitiesOnly = changeUserSshConfig $ unlines . go [] . lines
|
fixSshKeyPairIdentitiesOnly = changeUserSshConfig $ unlines . go [] . lines
|
||||||
where
|
where
|
||||||
go c [] = reverse c
|
go c [] = reverse c
|
||||||
go c (l:[])
|
go c (l:[])
|
||||||
| all (`isInfixOf` l) indicators = go (fixedline l:l:c) []
|
| all (`isInfixOf` l) indicators = go (fixedline l:l:c) []
|
||||||
| otherwise = go (l:c) []
|
| otherwise = go (l:c) []
|
||||||
|
@ -268,7 +268,7 @@ fixSshKeyPairIdentitiesOnly = changeUserSshConfig $ unlines . go [] . lines
|
||||||
| all (`isInfixOf` l) indicators && not ("IdentitiesOnly" `isInfixOf` next) =
|
| all (`isInfixOf` l) indicators && not ("IdentitiesOnly" `isInfixOf` next) =
|
||||||
go (fixedline l:l:c) (next:rest)
|
go (fixedline l:l:c) (next:rest)
|
||||||
| otherwise = go (l:c) (next:rest)
|
| otherwise = go (l:c) (next:rest)
|
||||||
indicators = ["IdentityFile", "key.git-annex"]
|
indicators = ["IdentityFile", "key.git-annex"]
|
||||||
fixedline tmpl = takeWhile isSpace tmpl ++ "IdentitiesOnly yes"
|
fixedline tmpl = takeWhile isSpace tmpl ++ "IdentitiesOnly yes"
|
||||||
|
|
||||||
{- Add StrictHostKeyChecking to any ssh config stanzas that were written
|
{- Add StrictHostKeyChecking to any ssh config stanzas that were written
|
||||||
|
|
|
@ -164,8 +164,8 @@ waitChangeTime a = waitchanges 0
|
||||||
-}
|
-}
|
||||||
aftermaxcommit oldchanges = loop (30 :: Int)
|
aftermaxcommit oldchanges = loop (30 :: Int)
|
||||||
where
|
where
|
||||||
loop 0 = continue oldchanges
|
loop 0 = continue oldchanges
|
||||||
loop n = do
|
loop n = do
|
||||||
liftAnnex noop -- ensure Annex state is free
|
liftAnnex noop -- ensure Annex state is free
|
||||||
liftIO $ threadDelaySeconds (Seconds 1)
|
liftIO $ threadDelaySeconds (Seconds 1)
|
||||||
changes <- getAnyChanges
|
changes <- getAnyChanges
|
||||||
|
@ -301,7 +301,7 @@ handleAdds havelsof delayadd cs = returnWhen (null incomplete) $ do
|
||||||
add change@(InProcessAddChange { keySource = ks }) =
|
add change@(InProcessAddChange { keySource = ks }) =
|
||||||
catchDefaultIO Nothing <~> doadd
|
catchDefaultIO Nothing <~> doadd
|
||||||
where
|
where
|
||||||
doadd = sanitycheck ks $ do
|
doadd = sanitycheck ks $ do
|
||||||
(mkey, mcache) <- liftAnnex $ do
|
(mkey, mcache) <- liftAnnex $ do
|
||||||
showStart "add" $ keyFilename ks
|
showStart "add" $ keyFilename ks
|
||||||
Command.Add.ingest $ Just ks
|
Command.Add.ingest $ Just ks
|
||||||
|
|
|
@ -87,7 +87,7 @@ cronnerThread urlrenderer = namedThreadUnchecked "Cronner" $ do
|
||||||
liftIO $ waitNotification h
|
liftIO $ waitNotification h
|
||||||
debug ["reloading changed activities"]
|
debug ["reloading changed activities"]
|
||||||
go h amap' nmap'
|
go h amap' nmap'
|
||||||
startactivities as lastruntimes = forM as $ \activity ->
|
startactivities as lastruntimes = forM as $ \activity ->
|
||||||
case connectActivityUUID activity of
|
case connectActivityUUID activity of
|
||||||
Nothing -> do
|
Nothing -> do
|
||||||
runner <- asIO2 (sleepingActivityThread urlrenderer)
|
runner <- asIO2 (sleepingActivityThread urlrenderer)
|
||||||
|
@ -108,8 +108,8 @@ cronnerThread urlrenderer = namedThreadUnchecked "Cronner" $ do
|
||||||
sleepingActivityThread :: UrlRenderer -> ScheduledActivity -> Maybe LocalTime -> Assistant ()
|
sleepingActivityThread :: UrlRenderer -> ScheduledActivity -> Maybe LocalTime -> Assistant ()
|
||||||
sleepingActivityThread urlrenderer activity lasttime = go lasttime =<< getnexttime lasttime
|
sleepingActivityThread urlrenderer activity lasttime = go lasttime =<< getnexttime lasttime
|
||||||
where
|
where
|
||||||
getnexttime = liftIO . nextTime schedule
|
getnexttime = liftIO . nextTime schedule
|
||||||
go _ Nothing = debug ["no scheduled events left for", desc]
|
go _ Nothing = debug ["no scheduled events left for", desc]
|
||||||
go l (Just (NextTimeExactly t)) = waitrun l t Nothing
|
go l (Just (NextTimeExactly t)) = waitrun l t Nothing
|
||||||
go l (Just (NextTimeWindow windowstart windowend)) =
|
go l (Just (NextTimeWindow windowstart windowend)) =
|
||||||
waitrun l windowstart (Just windowend)
|
waitrun l windowstart (Just windowend)
|
||||||
|
@ -129,7 +129,7 @@ sleepingActivityThread urlrenderer activity lasttime = go lasttime =<< getnextti
|
||||||
go l =<< getnexttime l
|
go l =<< getnexttime l
|
||||||
else run nowt
|
else run nowt
|
||||||
where
|
where
|
||||||
tolate nowt tz = case mmaxt of
|
tolate nowt tz = case mmaxt of
|
||||||
Just maxt -> nowt > maxt
|
Just maxt -> nowt > maxt
|
||||||
-- allow the job to start 10 minutes late
|
-- allow the job to start 10 minutes late
|
||||||
Nothing ->diffUTCTime
|
Nothing ->diffUTCTime
|
||||||
|
|
|
@ -258,7 +258,7 @@ checkOldUnused :: UrlRenderer -> Assistant ()
|
||||||
checkOldUnused urlrenderer = go =<< annexExpireUnused <$> liftAnnex Annex.getGitConfig
|
checkOldUnused urlrenderer = go =<< annexExpireUnused <$> liftAnnex Annex.getGitConfig
|
||||||
where
|
where
|
||||||
go (Just Nothing) = noop
|
go (Just Nothing) = noop
|
||||||
go (Just (Just expireunused)) = expireUnused (Just expireunused)
|
go (Just (Just expireunused)) = expireUnused (Just expireunused)
|
||||||
go Nothing = maybe noop prompt =<< describeUnusedWhenBig
|
go Nothing = maybe noop prompt =<< describeUnusedWhenBig
|
||||||
|
|
||||||
prompt msg =
|
prompt msg =
|
||||||
|
|
|
@ -51,9 +51,9 @@ upgradeWatcherThread urlrenderer = namedThread "UpgradeWatcher" $ do
|
||||||
let depth = length (splitPath dir) + 1
|
let depth = length (splitPath dir) + 1
|
||||||
let nosubdirs f = length (splitPath f) == depth
|
let nosubdirs f = length (splitPath f) == depth
|
||||||
void $ liftIO $ watchDir dir nosubdirs False hooks (startup mvar)
|
void $ liftIO $ watchDir dir nosubdirs False hooks (startup mvar)
|
||||||
-- Ignore bogus events generated during the startup scan.
|
-- Ignore bogus events generated during the startup scan.
|
||||||
-- We ask the watcher to not generate them, but just to be safe..
|
-- We ask the watcher to not generate them, but just to be safe..
|
||||||
startup mvar scanner = do
|
startup mvar scanner = do
|
||||||
r <- scanner
|
r <- scanner
|
||||||
void $ swapMVar mvar Started
|
void $ swapMVar mvar Started
|
||||||
return r
|
return r
|
||||||
|
|
|
@ -39,7 +39,7 @@ upgraderThread urlrenderer = namedThread "Upgrader" $
|
||||||
h <- liftIO . newNotificationHandle False . networkConnectedNotifier =<< getDaemonStatus
|
h <- liftIO . newNotificationHandle False . networkConnectedNotifier =<< getDaemonStatus
|
||||||
go h =<< liftIO getCurrentTime
|
go h =<< liftIO getCurrentTime
|
||||||
where
|
where
|
||||||
{- Wait for a network connection event. Then see if it's been
|
{- Wait for a network connection event. Then see if it's been
|
||||||
- half a day since the last upgrade check. If so, proceed with
|
- half a day since the last upgrade check. If so, proceed with
|
||||||
- check. -}
|
- check. -}
|
||||||
go h lastchecked = do
|
go h lastchecked = do
|
||||||
|
|
|
@ -192,7 +192,7 @@ runHandler handler file filestatus = void $ do
|
||||||
liftAnnex Annex.Queue.flushWhenFull
|
liftAnnex Annex.Queue.flushWhenFull
|
||||||
recordChange change
|
recordChange change
|
||||||
where
|
where
|
||||||
normalize f
|
normalize f
|
||||||
| "./" `isPrefixOf` file = drop 2 f
|
| "./" `isPrefixOf` file = drop 2 f
|
||||||
| otherwise = f
|
| otherwise = f
|
||||||
|
|
||||||
|
@ -246,7 +246,7 @@ onAddDirect symlinkssupported matcher file fs = do
|
||||||
debug ["add direct", file]
|
debug ["add direct", file]
|
||||||
add matcher file
|
add matcher file
|
||||||
where
|
where
|
||||||
{- On a filesystem without symlinks, we'll get changes for regular
|
{- On a filesystem without symlinks, we'll get changes for regular
|
||||||
- files that git uses to stand-in for symlinks. Detect when
|
- files that git uses to stand-in for symlinks. Detect when
|
||||||
- this happens, and stage the symlink, rather than annexing the
|
- this happens, and stage the symlink, rather than annexing the
|
||||||
- file. -}
|
- file. -}
|
||||||
|
@ -276,7 +276,7 @@ onAddSymlink isdirect file filestatus = unlessIgnored file $ do
|
||||||
onAddSymlink' :: Maybe String -> Maybe Key -> Bool -> Handler
|
onAddSymlink' :: Maybe String -> Maybe Key -> Bool -> Handler
|
||||||
onAddSymlink' linktarget mk isdirect file filestatus = go mk
|
onAddSymlink' linktarget mk isdirect file filestatus = go mk
|
||||||
where
|
where
|
||||||
go (Just key) = do
|
go (Just key) = do
|
||||||
when isdirect $
|
when isdirect $
|
||||||
liftAnnex $ void $ addAssociatedFile key file
|
liftAnnex $ void $ addAssociatedFile key file
|
||||||
link <- liftAnnex $ inRepo $ gitAnnexLink file key
|
link <- liftAnnex $ inRepo $ gitAnnexLink file key
|
||||||
|
|
|
@ -97,7 +97,7 @@ webAppThread assistantdata urlrenderer noannex cannotrun postfirstrun listenhost
|
||||||
urlfile <- getAnnex' $ fromRepo gitAnnexUrlFile
|
urlfile <- getAnnex' $ fromRepo gitAnnexUrlFile
|
||||||
go tlssettings addr webapp htmlshim (Just urlfile)
|
go tlssettings addr webapp htmlshim (Just urlfile)
|
||||||
where
|
where
|
||||||
-- The webapp thread does not wait for the startupSanityCheckThread
|
-- The webapp thread does not wait for the startupSanityCheckThread
|
||||||
-- to finish, so that the user interface remains responsive while
|
-- to finish, so that the user interface remains responsive while
|
||||||
-- that's going on.
|
-- that's going on.
|
||||||
thread = namedThreadUnchecked "WebApp"
|
thread = namedThreadUnchecked "WebApp"
|
||||||
|
|
|
@ -131,7 +131,7 @@ xmppClient urlrenderer d creds xmppuuid =
|
||||||
{- XEP-0199 says that the server will respond with either
|
{- XEP-0199 says that the server will respond with either
|
||||||
- a ping response or an error message. Either will
|
- a ping response or an error message. Either will
|
||||||
- cause traffic, so good enough. -}
|
- cause traffic, so good enough. -}
|
||||||
pingstanza = xmppPing selfjid
|
pingstanza = xmppPing selfjid
|
||||||
|
|
||||||
handlemsg selfjid (PresenceMessage p) = do
|
handlemsg selfjid (PresenceMessage p) = do
|
||||||
void $ inAssistant $
|
void $ inAssistant $
|
||||||
|
|
|
@ -34,7 +34,7 @@ xmppReceivePackThread = pusherThread "XMPPReceivePack" ReceivePack
|
||||||
pusherThread :: String -> PushSide -> UrlRenderer -> NamedThread
|
pusherThread :: String -> PushSide -> UrlRenderer -> NamedThread
|
||||||
pusherThread threadname side urlrenderer = namedThread threadname $ go Nothing
|
pusherThread threadname side urlrenderer = namedThread threadname $ go Nothing
|
||||||
where
|
where
|
||||||
go lastpushedto = do
|
go lastpushedto = do
|
||||||
msg <- waitPushInitiation side $ selectNextPush lastpushedto
|
msg <- waitPushInitiation side $ selectNextPush lastpushedto
|
||||||
debug ["started running push", logNetMessage msg]
|
debug ["started running push", logNetMessage msg]
|
||||||
|
|
||||||
|
@ -78,4 +78,4 @@ selectNextPush lastpushedto l = go [] l
|
||||||
(Pushing clientid _)
|
(Pushing clientid _)
|
||||||
| Just clientid /= lastpushedto -> (m, rejected ++ ms)
|
| Just clientid /= lastpushedto -> (m, rejected ++ ms)
|
||||||
_ -> go (m:rejected) ms
|
_ -> go (m:rejected) ms
|
||||||
go [] [] = undefined
|
go [] [] = undefined
|
||||||
|
|
|
@ -92,7 +92,7 @@ queueTransfersMatching matching reason schedule k f direction
|
||||||
filterM (wantSend True (Just k) f . Remote.uuid) $
|
filterM (wantSend True (Just k) f . Remote.uuid) $
|
||||||
filter (\r -> not (inset s r || Remote.readonly r)) rs
|
filter (\r -> not (inset s r || Remote.readonly r)) rs
|
||||||
where
|
where
|
||||||
locs = S.fromList <$> Remote.keyLocations k
|
locs = S.fromList <$> Remote.keyLocations k
|
||||||
inset s r = S.member (Remote.uuid r) s
|
inset s r = S.member (Remote.uuid r) s
|
||||||
gentransfer r = Transfer
|
gentransfer r = Transfer
|
||||||
{ transferDirection = direction
|
{ transferDirection = direction
|
||||||
|
|
|
@ -85,7 +85,7 @@ logNetMessage (Pushing c stage) = show $ Pushing (logClientID c) $
|
||||||
SendPackOutput n _ -> SendPackOutput n elided
|
SendPackOutput n _ -> SendPackOutput n elided
|
||||||
s -> s
|
s -> s
|
||||||
where
|
where
|
||||||
elided = T.encodeUtf8 $ T.pack "<elided>"
|
elided = T.encodeUtf8 $ T.pack "<elided>"
|
||||||
logNetMessage (PairingNotification stage c uuid) =
|
logNetMessage (PairingNotification stage c uuid) =
|
||||||
show $ PairingNotification stage (logClientID c) uuid
|
show $ PairingNotification stage (logClientID c) uuid
|
||||||
logNetMessage m = show m
|
logNetMessage m = show m
|
||||||
|
|
|
@ -78,7 +78,7 @@ upgradedEnv = "GIT_ANNEX_UPGRADED"
|
||||||
startDistributionDownload :: GitAnnexDistribution -> Assistant ()
|
startDistributionDownload :: GitAnnexDistribution -> Assistant ()
|
||||||
startDistributionDownload d = go =<< liftIO . newVersionLocation d =<< liftIO oldVersionLocation
|
startDistributionDownload d = go =<< liftIO . newVersionLocation d =<< liftIO oldVersionLocation
|
||||||
where
|
where
|
||||||
go Nothing = debug ["Skipping redundant upgrade"]
|
go Nothing = debug ["Skipping redundant upgrade"]
|
||||||
go (Just dest) = do
|
go (Just dest) = do
|
||||||
liftAnnex $ setUrlPresent k u
|
liftAnnex $ setUrlPresent k u
|
||||||
hook <- asIO1 $ distributionDownloadComplete d dest cleanup
|
hook <- asIO1 $ distributionDownloadComplete d dest cleanup
|
||||||
|
|
|
@ -207,7 +207,7 @@ makeAWSRemote maker remotetype defaultgroup (AWSCreds ak sk) name config =
|
||||||
setupCloudRemote defaultgroup Nothing $
|
setupCloudRemote defaultgroup Nothing $
|
||||||
maker hostname remotetype (Just creds) config
|
maker hostname remotetype (Just creds) config
|
||||||
where
|
where
|
||||||
creds = (T.unpack ak, T.unpack sk)
|
creds = (T.unpack ak, T.unpack sk)
|
||||||
{- AWS services use the remote name as the basis for a host
|
{- AWS services use the remote name as the basis for a host
|
||||||
- name, so filter it to contain valid characters. -}
|
- name, so filter it to contain valid characters. -}
|
||||||
hostname = case filter isAlphaNum name of
|
hostname = case filter isAlphaNum name of
|
||||||
|
|
|
@ -36,7 +36,7 @@ notCurrentRepo uuid a = do
|
||||||
then redirect DeleteCurrentRepositoryR
|
then redirect DeleteCurrentRepositoryR
|
||||||
else go =<< liftAnnex (Remote.remoteFromUUID uuid)
|
else go =<< liftAnnex (Remote.remoteFromUUID uuid)
|
||||||
where
|
where
|
||||||
go Nothing = error "Unknown UUID"
|
go Nothing = error "Unknown UUID"
|
||||||
go (Just _) = a
|
go (Just _) = a
|
||||||
|
|
||||||
handleXMPPRemoval :: UUID -> Handler Html -> Handler Html
|
handleXMPPRemoval :: UUID -> Handler Html -> Handler Html
|
||||||
|
|
|
@ -136,7 +136,7 @@ setRepoConfig uuid mremote oldc newc = do
|
||||||
when syncableChanged $
|
when syncableChanged $
|
||||||
liftAssistant $ changeSyncable mremote (repoSyncable newc)
|
liftAssistant $ changeSyncable mremote (repoSyncable newc)
|
||||||
where
|
where
|
||||||
syncableChanged = repoSyncable oldc /= repoSyncable newc
|
syncableChanged = repoSyncable oldc /= repoSyncable newc
|
||||||
associatedDirectoryChanged = repoAssociatedDirectory oldc /= repoAssociatedDirectory newc
|
associatedDirectoryChanged = repoAssociatedDirectory oldc /= repoAssociatedDirectory newc
|
||||||
groupChanged = repoGroup oldc /= repoGroup newc
|
groupChanged = repoGroup oldc /= repoGroup newc
|
||||||
nameChanged = isJust mremote && legalName oldc /= legalName newc
|
nameChanged = isJust mremote && legalName oldc /= legalName newc
|
||||||
|
@ -255,7 +255,7 @@ getGitRepoInfo r = do
|
||||||
|
|
||||||
getRepoEncryption :: Maybe Remote.Remote -> Maybe Remote.RemoteConfig -> Widget
|
getRepoEncryption :: Maybe Remote.Remote -> Maybe Remote.RemoteConfig -> Widget
|
||||||
getRepoEncryption (Just _) (Just c) = case extractCipher c of
|
getRepoEncryption (Just _) (Just c) = case extractCipher c of
|
||||||
Nothing ->
|
Nothing ->
|
||||||
[whamlet|not encrypted|]
|
[whamlet|not encrypted|]
|
||||||
(Just (SharedCipher _)) ->
|
(Just (SharedCipher _)) ->
|
||||||
[whamlet|encrypted: encryption key stored in git repository|]
|
[whamlet|encrypted: encryption key stored in git repository|]
|
||||||
|
@ -274,7 +274,7 @@ getUpgradeRepositoryR :: RepoId -> Handler ()
|
||||||
getUpgradeRepositoryR (RepoUUID _) = redirect DashboardR
|
getUpgradeRepositoryR (RepoUUID _) = redirect DashboardR
|
||||||
getUpgradeRepositoryR r = go =<< liftAnnex (repoIdRemote r)
|
getUpgradeRepositoryR r = go =<< liftAnnex (repoIdRemote r)
|
||||||
where
|
where
|
||||||
go Nothing = redirect DashboardR
|
go Nothing = redirect DashboardR
|
||||||
go (Just rmt) = do
|
go (Just rmt) = do
|
||||||
liftIO fixSshKeyPairIdentitiesOnly
|
liftIO fixSshKeyPairIdentitiesOnly
|
||||||
liftAnnex $ setConfig
|
liftAnnex $ setConfig
|
||||||
|
|
|
@ -60,7 +60,7 @@ runFsckForm new activity = case activity of
|
||||||
ScheduledSelfFsck s d -> go s d =<< liftAnnex getUUID
|
ScheduledSelfFsck s d -> go s d =<< liftAnnex getUUID
|
||||||
ScheduledRemoteFsck ru s d -> go s d ru
|
ScheduledRemoteFsck ru s d -> go s d ru
|
||||||
where
|
where
|
||||||
go (Schedule r t) d ru = do
|
go (Schedule r t) d ru = do
|
||||||
u <- liftAnnex getUUID
|
u <- liftAnnex getUUID
|
||||||
repolist <- liftAssistant (getrepolist ru)
|
repolist <- liftAssistant (getrepolist ru)
|
||||||
runFormPostNoToken $ \msg -> do
|
runFormPostNoToken $ \msg -> do
|
||||||
|
|
|
@ -201,7 +201,7 @@ $if (not exists)
|
||||||
have been uploaded, and the Internet Archive has processed them.
|
have been uploaded, and the Internet Archive has processed them.
|
||||||
|]
|
|]
|
||||||
where
|
where
|
||||||
bucket = fromMaybe "" $ M.lookup "bucket" c
|
bucket = fromMaybe "" $ M.lookup "bucket" c
|
||||||
#ifdef WITH_S3
|
#ifdef WITH_S3
|
||||||
url = S3.iaItemUrl bucket
|
url = S3.iaItemUrl bucket
|
||||||
#else
|
#else
|
||||||
|
|
|
@ -175,7 +175,7 @@ getAndroidCameraRepositoryR :: Handler ()
|
||||||
getAndroidCameraRepositoryR =
|
getAndroidCameraRepositoryR =
|
||||||
startFullAssistant "/sdcard/DCIM" SourceGroup $ Just addignore
|
startFullAssistant "/sdcard/DCIM" SourceGroup $ Just addignore
|
||||||
where
|
where
|
||||||
addignore = do
|
addignore = do
|
||||||
liftIO $ unlessM (doesFileExist ".gitignore") $
|
liftIO $ unlessM (doesFileExist ".gitignore") $
|
||||||
writeFile ".gitignore" ".thumbnails"
|
writeFile ".gitignore" ".thumbnails"
|
||||||
void $ inRepo $
|
void $ inRepo $
|
||||||
|
@ -274,8 +274,8 @@ getConfirmAddDriveR drive = ifM (liftIO $ probeRepoExists dir)
|
||||||
, newrepo
|
, newrepo
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
dir = removableDriveRepository drive
|
dir = removableDriveRepository drive
|
||||||
newrepo = do
|
newrepo = do
|
||||||
secretkeys <- sortBy (comparing snd) . M.toList
|
secretkeys <- sortBy (comparing snd) . M.toList
|
||||||
<$> liftIO secretKeys
|
<$> liftIO secretKeys
|
||||||
page "Encrypt repository?" (Just Configuration) $
|
page "Encrypt repository?" (Just Configuration) $
|
||||||
|
@ -338,7 +338,7 @@ getFinishAddDriveR drive = go
|
||||||
liftAnnex $ defaultStandardGroup u TransferGroup
|
liftAnnex $ defaultStandardGroup u TransferGroup
|
||||||
liftAssistant $ immediateSyncRemote r
|
liftAssistant $ immediateSyncRemote r
|
||||||
redirect $ EditNewRepositoryR u
|
redirect $ EditNewRepositoryR u
|
||||||
mountpoint = T.unpack (mountPoint drive)
|
mountpoint = T.unpack (mountPoint drive)
|
||||||
dir = removableDriveRepository drive
|
dir = removableDriveRepository drive
|
||||||
remotename = takeFileName mountpoint
|
remotename = takeFileName mountpoint
|
||||||
|
|
||||||
|
|
|
@ -72,7 +72,7 @@ getStartXMPPPairSelfR :: Handler Html
|
||||||
#ifdef WITH_XMPP
|
#ifdef WITH_XMPP
|
||||||
getStartXMPPPairSelfR = go =<< liftAnnex getXMPPCreds
|
getStartXMPPPairSelfR = go =<< liftAnnex getXMPPCreds
|
||||||
where
|
where
|
||||||
go Nothing = do
|
go Nothing = do
|
||||||
-- go get XMPP configured, then come back
|
-- go get XMPP configured, then come back
|
||||||
redirect XMPPConfigForPairSelfR
|
redirect XMPPConfigForPairSelfR
|
||||||
go (Just creds) = do
|
go (Just creds) = do
|
||||||
|
|
|
@ -193,7 +193,7 @@ postEnableSshGCryptR :: UUID -> Handler Html
|
||||||
postEnableSshGCryptR u = whenGcryptInstalled $
|
postEnableSshGCryptR u = whenGcryptInstalled $
|
||||||
enableSshRemote getsshinput enableRsyncNetGCrypt enablegcrypt u
|
enableSshRemote getsshinput enableRsyncNetGCrypt enablegcrypt u
|
||||||
where
|
where
|
||||||
enablegcrypt sshdata _ = prepSsh False sshdata $ \sshdata' ->
|
enablegcrypt sshdata _ = prepSsh False sshdata $ \sshdata' ->
|
||||||
sshConfigurator $
|
sshConfigurator $
|
||||||
checkExistingGCrypt sshdata' $
|
checkExistingGCrypt sshdata' $
|
||||||
error "Expected to find an encrypted git repository, but did not."
|
error "Expected to find an encrypted git repository, but did not."
|
||||||
|
@ -232,7 +232,7 @@ enableSshRemote getsshinput rsyncnetsetup genericsetup u = do
|
||||||
_ -> showform form enctype UntestedServer
|
_ -> showform form enctype UntestedServer
|
||||||
_ -> redirect AddSshR
|
_ -> redirect AddSshR
|
||||||
where
|
where
|
||||||
unmangle sshdata = sshdata
|
unmangle sshdata = sshdata
|
||||||
{ sshHostName = T.pack $ unMangleSshHostName $
|
{ sshHostName = T.pack $ unMangleSshHostName $
|
||||||
T.unpack $ sshHostName sshdata
|
T.unpack $ sshHostName sshdata
|
||||||
}
|
}
|
||||||
|
@ -423,7 +423,7 @@ getConfirmSshR sshdata u
|
||||||
secretkeys <- sortBy (comparing snd) . M.toList
|
secretkeys <- sortBy (comparing snd) . M.toList
|
||||||
<$> liftIO secretKeys
|
<$> liftIO secretKeys
|
||||||
$(widgetFile "configurators/ssh/confirm")
|
$(widgetFile "configurators/ssh/confirm")
|
||||||
handleexisting Nothing = sshConfigurator $
|
handleexisting Nothing = sshConfigurator $
|
||||||
-- Not a UUID we know, so prompt about combining.
|
-- Not a UUID we know, so prompt about combining.
|
||||||
$(widgetFile "configurators/ssh/combine")
|
$(widgetFile "configurators/ssh/combine")
|
||||||
handleexisting (Just _) = prepSsh False sshdata $ \sshdata' -> do
|
handleexisting (Just _) = prepSsh False sshdata $ \sshdata' -> do
|
||||||
|
@ -471,7 +471,7 @@ checkExistingGCrypt sshdata nope = checkGCryptRepoEncryption repourl nope nope $
|
||||||
combineExistingGCrypt sshdata u
|
combineExistingGCrypt sshdata u
|
||||||
Nothing -> error "The location contains a gcrypt repository that is not a git-annex special remote. This is not supported."
|
Nothing -> error "The location contains a gcrypt repository that is not a git-annex special remote. This is not supported."
|
||||||
where
|
where
|
||||||
repourl = genSshUrl sshdata
|
repourl = genSshUrl sshdata
|
||||||
|
|
||||||
{- Enables an existing gcrypt special remote. -}
|
{- Enables an existing gcrypt special remote. -}
|
||||||
enableGCrypt :: SshData -> RemoteName -> Handler Html
|
enableGCrypt :: SshData -> RemoteName -> Handler Html
|
||||||
|
@ -488,7 +488,7 @@ combineExistingGCrypt sshdata u = do
|
||||||
reponame <- liftAnnex $ getGCryptRemoteName u repourl
|
reponame <- liftAnnex $ getGCryptRemoteName u repourl
|
||||||
enableGCrypt sshdata reponame
|
enableGCrypt sshdata reponame
|
||||||
where
|
where
|
||||||
repourl = genSshUrl sshdata
|
repourl = genSshUrl sshdata
|
||||||
|
|
||||||
{- Sets up remote repository for ssh, or directory for rsync. -}
|
{- Sets up remote repository for ssh, or directory for rsync. -}
|
||||||
prepSsh :: Bool -> SshData -> (SshData -> Handler Html) -> Handler Html
|
prepSsh :: Bool -> SshData -> (SshData -> Handler Html) -> Handler Html
|
||||||
|
@ -579,7 +579,7 @@ postAddRsyncNetR = do
|
||||||
"That is not a rsync.net host name."
|
"That is not a rsync.net host name."
|
||||||
_ -> showform UntestedServer
|
_ -> showform UntestedServer
|
||||||
where
|
where
|
||||||
inpage = page "Add a Rsync.net repository" (Just Configuration)
|
inpage = page "Add a Rsync.net repository" (Just Configuration)
|
||||||
hostnamefield = textField `withExpandableNote` ("Help", help)
|
hostnamefield = textField `withExpandableNote` ("Help", help)
|
||||||
help = [whamlet|
|
help = [whamlet|
|
||||||
<div>
|
<div>
|
||||||
|
|
|
@ -150,7 +150,7 @@ getXMPPRemotes :: Assistant [(JID, Remote)]
|
||||||
getXMPPRemotes = catMaybes . map pair . filter Remote.isXMPPRemote . syncGitRemotes
|
getXMPPRemotes = catMaybes . map pair . filter Remote.isXMPPRemote . syncGitRemotes
|
||||||
<$> getDaemonStatus
|
<$> getDaemonStatus
|
||||||
where
|
where
|
||||||
pair r = maybe Nothing (\jid -> Just (jid, r)) $
|
pair r = maybe Nothing (\jid -> Just (jid, r)) $
|
||||||
parseJID $ getXMPPClientID r
|
parseJID $ getXMPPClientID r
|
||||||
|
|
||||||
data XMPPForm = XMPPForm
|
data XMPPForm = XMPPForm
|
||||||
|
@ -197,8 +197,8 @@ testXMPP creds = do
|
||||||
}
|
}
|
||||||
_ -> return $ Left $ intercalate "; " $ map formatlog bad
|
_ -> return $ Left $ intercalate "; " $ map formatlog bad
|
||||||
where
|
where
|
||||||
formatlog ((h, p), Left e) = "host " ++ h ++ ":" ++ showport p ++ " failed: " ++ show e
|
formatlog ((h, p), Left e) = "host " ++ h ++ ":" ++ showport p ++ " failed: " ++ show e
|
||||||
formatlog _ = ""
|
formatlog _ = ""
|
||||||
|
|
||||||
showport (PortNumber n) = show n
|
showport (PortNumber n) = show n
|
||||||
showport (Service s) = s
|
showport (Service s) = s
|
||||||
|
|
|
@ -129,7 +129,7 @@ withExpandableNote field (toggle, note) = withNote field $ [whamlet|
|
||||||
^{note}
|
^{note}
|
||||||
|]
|
|]
|
||||||
where
|
where
|
||||||
ident = "toggle_" ++ toggle
|
ident = "toggle_" ++ toggle
|
||||||
|
|
||||||
{- Adds a check box to an AForm to control encryption. -}
|
{- Adds a check box to an AForm to control encryption. -}
|
||||||
#if MIN_VERSION_yesod(1,2,0)
|
#if MIN_VERSION_yesod(1,2,0)
|
||||||
|
|
|
@ -196,7 +196,7 @@ repoList reposelector
|
||||||
_ -> Nothing
|
_ -> Nothing
|
||||||
_ -> Nothing
|
_ -> Nothing
|
||||||
where
|
where
|
||||||
getconfig k = M.lookup k =<< M.lookup u m
|
getconfig k = M.lookup k =<< M.lookup u m
|
||||||
val iscloud r = Just (iscloud, (RepoUUID u, DisabledRepoActions $ r u))
|
val iscloud r = Just (iscloud, (RepoUUID u, DisabledRepoActions $ r u))
|
||||||
list l = do
|
list l = do
|
||||||
cc <- currentlyConnectedRemotes <$> liftAssistant getDaemonStatus
|
cc <- currentlyConnectedRemotes <$> liftAssistant getDaemonStatus
|
||||||
|
@ -232,13 +232,13 @@ getRepositoriesReorderR = do
|
||||||
liftAssistant updateSyncRemotes
|
liftAssistant updateSyncRemotes
|
||||||
where
|
where
|
||||||
go _ Nothing = noop
|
go _ Nothing = noop
|
||||||
go list (Just remote) = do
|
go list (Just remote) = do
|
||||||
rs <- catMaybes <$> mapM repoIdRemote list
|
rs <- catMaybes <$> mapM repoIdRemote list
|
||||||
forM_ (reorderCosts remote rs) $ \(r, newcost) ->
|
forM_ (reorderCosts remote rs) $ \(r, newcost) ->
|
||||||
when (Remote.cost r /= newcost) $
|
when (Remote.cost r /= newcost) $
|
||||||
setRemoteCost (Remote.repo r) newcost
|
setRemoteCost (Remote.repo r) newcost
|
||||||
void remoteListRefresh
|
void remoteListRefresh
|
||||||
fromjs = fromMaybe (RepoUUID NoUUID) . readish . T.unpack
|
fromjs = fromMaybe (RepoUUID NoUUID) . readish . T.unpack
|
||||||
|
|
||||||
reorderCosts :: Remote -> [Remote] -> [(Remote, Cost)]
|
reorderCosts :: Remote -> [Remote] -> [(Remote, Cost)]
|
||||||
reorderCosts remote rs = zip rs'' (insertCostAfter costs i)
|
reorderCosts remote rs = zip rs'' (insertCostAfter costs i)
|
||||||
|
|
|
@ -195,7 +195,7 @@ decodeMessage m = decode =<< gitAnnexTagInfo m
|
||||||
<*> a i
|
<*> a i
|
||||||
gen c i = c . toUUID <$> headMaybe (words (T.unpack (tagValue i)))
|
gen c i = c . toUUID <$> headMaybe (words (T.unpack (tagValue i)))
|
||||||
seqgen c i = do
|
seqgen c i = do
|
||||||
packet <- decodeTagContent $ tagElement i
|
packet <- decodeTagContent $ tagElement i
|
||||||
let seqnum = fromMaybe 0 $ readish $ T.unpack $ tagValue i
|
let seqnum = fromMaybe 0 $ readish $ T.unpack $ tagValue i
|
||||||
return $ c seqnum packet
|
return $ c seqnum packet
|
||||||
shasgen c i = do
|
shasgen c i = do
|
||||||
|
|
|
@ -152,7 +152,7 @@ xmppPush cid gitpush = do
|
||||||
|
|
||||||
fromxmpp outh controlh = withPushMessagesInSequence cid SendPack handlemsg
|
fromxmpp outh controlh = withPushMessagesInSequence cid SendPack handlemsg
|
||||||
where
|
where
|
||||||
handlemsg (Just (Pushing _ (ReceivePackOutput _ b))) =
|
handlemsg (Just (Pushing _ (ReceivePackOutput _ b))) =
|
||||||
liftIO $ writeChunk outh b
|
liftIO $ writeChunk outh b
|
||||||
handlemsg (Just (Pushing _ (ReceivePackDone exitcode))) =
|
handlemsg (Just (Pushing _ (ReceivePackDone exitcode))) =
|
||||||
liftIO $ do
|
liftIO $ do
|
||||||
|
@ -266,7 +266,7 @@ xmppReceivePack cid = do
|
||||||
relaytoxmpp seqnum' outh
|
relaytoxmpp seqnum' outh
|
||||||
relayfromxmpp inh = withPushMessagesInSequence cid ReceivePack handlemsg
|
relayfromxmpp inh = withPushMessagesInSequence cid ReceivePack handlemsg
|
||||||
where
|
where
|
||||||
handlemsg (Just (Pushing _ (SendPackOutput _ b))) =
|
handlemsg (Just (Pushing _ (SendPackOutput _ b))) =
|
||||||
liftIO $ writeChunk inh b
|
liftIO $ writeChunk inh b
|
||||||
handlemsg (Just _) = noop
|
handlemsg (Just _) = noop
|
||||||
handlemsg Nothing = do
|
handlemsg Nothing = do
|
||||||
|
@ -337,7 +337,7 @@ handlePushNotice (Pushing cid (CanPush theiruuid shas)) =
|
||||||
, go
|
, go
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
go = do
|
go = do
|
||||||
u <- liftAnnex getUUID
|
u <- liftAnnex getUUID
|
||||||
sendNetMessage $ Pushing cid (PushRequest u)
|
sendNetMessage $ Pushing cid (PushRequest u)
|
||||||
haveall l = liftAnnex $ not <$> anyM donthave l
|
haveall l = liftAnnex $ not <$> anyM donthave l
|
||||||
|
@ -359,9 +359,9 @@ writeChunk h b = do
|
||||||
withPushMessagesInSequence :: ClientID -> PushSide -> (Maybe NetMessage -> Assistant ()) -> Assistant ()
|
withPushMessagesInSequence :: ClientID -> PushSide -> (Maybe NetMessage -> Assistant ()) -> Assistant ()
|
||||||
withPushMessagesInSequence cid side a = loop 0
|
withPushMessagesInSequence cid side a = loop 0
|
||||||
where
|
where
|
||||||
loop seqnum = do
|
loop seqnum = do
|
||||||
m <- timeout xmppTimeout <~> waitInbox cid side
|
m <- timeout xmppTimeout <~> waitInbox cid side
|
||||||
let go s = a m >> loop s
|
let go s = a m >> loop s
|
||||||
let next = seqnum + 1
|
let next = seqnum + 1
|
||||||
case extractSequence =<< m of
|
case extractSequence =<< m of
|
||||||
Just seqnum'
|
Just seqnum'
|
||||||
|
|
|
@ -144,7 +144,7 @@ trivialMigrate oldkey newbackend
|
||||||
hashFile :: Hash -> FilePath -> Integer -> Annex String
|
hashFile :: Hash -> FilePath -> Integer -> Annex String
|
||||||
hashFile hash file filesize = liftIO $ go hash
|
hashFile hash file filesize = liftIO $ go hash
|
||||||
where
|
where
|
||||||
go (SHAHash hashsize) = case shaHasher hashsize filesize of
|
go (SHAHash hashsize) = case shaHasher hashsize filesize of
|
||||||
Left sha -> sha <$> L.readFile file
|
Left sha -> sha <$> L.readFile file
|
||||||
Right command ->
|
Right command ->
|
||||||
either error return
|
either error return
|
||||||
|
|
|
@ -58,13 +58,13 @@ parseGccLink = do
|
||||||
collect2params <- restOfLine
|
collect2params <- restOfLine
|
||||||
return $ CmdParams (path ++ collectcmd) (escapeDosPaths collect2params) cenv
|
return $ CmdParams (path ++ collectcmd) (escapeDosPaths collect2params) cenv
|
||||||
where
|
where
|
||||||
collectcmd = "collect2.exe"
|
collectcmd = "collect2.exe"
|
||||||
collectgccenv = "COLLECT_GCC"
|
collectgccenv = "COLLECT_GCC"
|
||||||
collectltoenv = "COLLECT_LTO_WRAPPER"
|
collectltoenv = "COLLECT_LTO_WRAPPER"
|
||||||
pathenv = "COMPILER_PATH"
|
pathenv = "COMPILER_PATH"
|
||||||
libpathenv = "LIBRARY_PATH"
|
libpathenv = "LIBRARY_PATH"
|
||||||
optenv = "COLLECT_GCC_OPTIONS"
|
optenv = "COLLECT_GCC_OPTIONS"
|
||||||
collectenv = do
|
collectenv = do
|
||||||
void $ many1 $ do
|
void $ many1 $ do
|
||||||
notFollowedBy $ string collectgccenv
|
notFollowedBy $ string collectgccenv
|
||||||
restOfLine
|
restOfLine
|
||||||
|
@ -148,7 +148,7 @@ runAtFile p s f extraparams = do
|
||||||
removeFile f
|
removeFile f
|
||||||
return out
|
return out
|
||||||
where
|
where
|
||||||
c = case parse p "" s of
|
c = case parse p "" s of
|
||||||
Left e -> error $
|
Left e -> error $
|
||||||
(show e) ++
|
(show e) ++
|
||||||
"\n<<<\n" ++ s ++ "\n>>>"
|
"\n<<<\n" ++ s ++ "\n>>>"
|
||||||
|
|
|
@ -86,7 +86,7 @@ number = read <$> many1 digit
|
||||||
coordsParser :: Parser (Coord, Coord)
|
coordsParser :: Parser (Coord, Coord)
|
||||||
coordsParser = (try singleline <|> try weird <|> multiline) <?> "Coords"
|
coordsParser = (try singleline <|> try weird <|> multiline) <?> "Coords"
|
||||||
where
|
where
|
||||||
singleline = do
|
singleline = do
|
||||||
line <- number
|
line <- number
|
||||||
void $ char ':'
|
void $ char ':'
|
||||||
startcol <- number
|
startcol <- number
|
||||||
|
@ -151,7 +151,7 @@ spliceParser = do
|
||||||
(unlines codelines)
|
(unlines codelines)
|
||||||
splicetype
|
splicetype
|
||||||
where
|
where
|
||||||
tosplicetype "declarations" = SpliceDeclaration
|
tosplicetype "declarations" = SpliceDeclaration
|
||||||
tosplicetype "expression" = SpliceExpression
|
tosplicetype "expression" = SpliceExpression
|
||||||
tosplicetype s = error $ "unknown splice type: " ++ s
|
tosplicetype s = error $ "unknown splice type: " ++ s
|
||||||
|
|
||||||
|
@ -177,7 +177,7 @@ spliceParser = do
|
||||||
splicesExtractor :: Parser [Splice]
|
splicesExtractor :: Parser [Splice]
|
||||||
splicesExtractor = rights <$> many extract
|
splicesExtractor = rights <$> many extract
|
||||||
where
|
where
|
||||||
extract = try (Right <$> spliceParser) <|> (Left <$> compilerJunkLine)
|
extract = try (Right <$> spliceParser) <|> (Left <$> compilerJunkLine)
|
||||||
compilerJunkLine = restOfLine
|
compilerJunkLine = restOfLine
|
||||||
|
|
||||||
{- Modifies the source file, expanding the splices, which all must
|
{- Modifies the source file, expanding the splices, which all must
|
||||||
|
@ -214,8 +214,8 @@ applySplices destdir imports splices@(first:_) = do
|
||||||
hPutStr h newcontent
|
hPutStr h newcontent
|
||||||
hClose h
|
hClose h
|
||||||
where
|
where
|
||||||
expand lls [] = lls
|
expand lls [] = lls
|
||||||
expand lls (s:rest)
|
expand lls (s:rest)
|
||||||
| isExpressionSplice s = expand (expandExpressionSplice s lls) rest
|
| isExpressionSplice s = expand (expandExpressionSplice s lls) rest
|
||||||
| otherwise = expand (expandDeclarationSplice s lls) rest
|
| otherwise = expand (expandDeclarationSplice s lls) rest
|
||||||
|
|
||||||
|
@ -291,12 +291,12 @@ expandExpressionSplice sp lls = concat [before, spliced:padding, end]
|
||||||
-- ie: bar $(splice)
|
-- ie: bar $(splice)
|
||||||
| otherwise = s ++ " $ "
|
| otherwise = s ++ " $ "
|
||||||
where
|
where
|
||||||
s' = filter (not . isSpace) s
|
s' = filter (not . isSpace) s
|
||||||
|
|
||||||
findindent = length . takeWhile isSpace
|
findindent = length . takeWhile isSpace
|
||||||
addindent n = unlines . map (i ++) . lines
|
addindent n = unlines . map (i ++) . lines
|
||||||
where
|
where
|
||||||
i = take n $ repeat ' '
|
i = take n $ repeat ' '
|
||||||
|
|
||||||
{- Tweaks code output by GHC in splices to actually build. Yipes. -}
|
{- Tweaks code output by GHC in splices to actually build. Yipes. -}
|
||||||
mangleCode :: String -> String
|
mangleCode :: String -> String
|
||||||
|
@ -315,7 +315,7 @@ mangleCode = flip_colon
|
||||||
. remove_package_version
|
. remove_package_version
|
||||||
. emptylambda
|
. emptylambda
|
||||||
where
|
where
|
||||||
{- Lambdas are often output without parens around them.
|
{- Lambdas are often output without parens around them.
|
||||||
- This breaks when the lambda is immediately applied to a
|
- This breaks when the lambda is immediately applied to a
|
||||||
- parameter.
|
- parameter.
|
||||||
-
|
-
|
||||||
|
@ -409,7 +409,7 @@ mangleCode = flip_colon
|
||||||
|
|
||||||
restofline = manyTill (noneOf "\n") newline
|
restofline = manyTill (noneOf "\n") newline
|
||||||
|
|
||||||
{- For some reason, GHC sometimes doesn't like the multiline
|
{- For some reason, GHC sometimes doesn't like the multiline
|
||||||
- strings it creates. It seems to get hung up on \{ at the
|
- strings it creates. It seems to get hung up on \{ at the
|
||||||
- start of a new line sometimes, wanting it to not be escaped.
|
- start of a new line sometimes, wanting it to not be escaped.
|
||||||
-
|
-
|
||||||
|
@ -646,7 +646,7 @@ parsecAndReplace p s = case parse find "" s of
|
||||||
Left _e -> s
|
Left _e -> s
|
||||||
Right l -> concatMap (either return id) l
|
Right l -> concatMap (either return id) l
|
||||||
where
|
where
|
||||||
find :: Parser [Either Char String]
|
find :: Parser [Either Char String]
|
||||||
find = many $ try (Right <$> p) <|> (Left <$> anyChar)
|
find = many $ try (Right <$> p) <|> (Left <$> anyChar)
|
||||||
|
|
||||||
main :: IO ()
|
main :: IO ()
|
||||||
|
@ -654,7 +654,7 @@ main = go =<< getArgs
|
||||||
where
|
where
|
||||||
go (destdir:log:header:[]) = run destdir log (Just header)
|
go (destdir:log:header:[]) = run destdir log (Just header)
|
||||||
go (destdir:log:[]) = run destdir log Nothing
|
go (destdir:log:[]) = run destdir log Nothing
|
||||||
go _ = error "usage: EvilSplicer destdir logfile [headerfile]"
|
go _ = error "usage: EvilSplicer destdir logfile [headerfile]"
|
||||||
|
|
||||||
run destdir log mheader = do
|
run destdir log mheader = do
|
||||||
r <- parseFromFile splicesExtractor log
|
r <- parseFromFile splicesExtractor log
|
||||||
|
|
|
@ -103,7 +103,7 @@ makeInstaller gitannex license extrabins launchers = nsis $ do
|
||||||
name "git-annex"
|
name "git-annex"
|
||||||
outFile $ str installer
|
outFile $ str installer
|
||||||
{- Installing into the same directory as git avoids needing to modify
|
{- Installing into the same directory as git avoids needing to modify
|
||||||
- path myself, since the git installer already does it. -}
|
- path myself, since the git installer already does it. -}
|
||||||
installDir gitInstallDir
|
installDir gitInstallDir
|
||||||
requestExecutionLevel Admin
|
requestExecutionLevel Admin
|
||||||
|
|
||||||
|
|
|
@ -112,7 +112,7 @@ expand_rpath libs replacement_libs cmd
|
||||||
return $ map (replacem m) libs
|
return $ map (replacem m) libs
|
||||||
| otherwise = return libs
|
| otherwise = return libs
|
||||||
where
|
where
|
||||||
probe c = "DYLD_PRINT_RPATHS=1 " ++ c ++ " --getting-rpath-dummy-option 2>&1 | grep RPATH"
|
probe c = "DYLD_PRINT_RPATHS=1 " ++ c ++ " --getting-rpath-dummy-option 2>&1 | grep RPATH"
|
||||||
parse s = case words s of
|
parse s = case words s of
|
||||||
("RPATH":"successful":"expansion":"of":old:"to:":new:[]) ->
|
("RPATH":"successful":"expansion":"of":old:"to:":new:[]) ->
|
||||||
Just (old, new)
|
Just (old, new)
|
||||||
|
|
|
@ -35,7 +35,7 @@ noDaemonRunning :: Command -> Command
|
||||||
noDaemonRunning = addCheck $ whenM (isJust <$> daemonpid) $
|
noDaemonRunning = addCheck $ whenM (isJust <$> daemonpid) $
|
||||||
error "You cannot run this command while git-annex watch or git-annex assistant is running."
|
error "You cannot run this command while git-annex watch or git-annex assistant is running."
|
||||||
where
|
where
|
||||||
daemonpid = liftIO . checkDaemon =<< fromRepo gitAnnexPidFile
|
daemonpid = liftIO . checkDaemon =<< fromRepo gitAnnexPidFile
|
||||||
|
|
||||||
dontCheck :: CommandCheck -> Command -> Command
|
dontCheck :: CommandCheck -> Command -> Command
|
||||||
dontCheck check cmd = mutateCheck cmd $ \c -> filter (/= check) c
|
dontCheck check cmd = mutateCheck cmd $ \c -> filter (/= check) c
|
||||||
|
|
|
@ -58,7 +58,7 @@ dispatch fuzzyok allargs allcmds commonoptions fields header getgitrepo = do
|
||||||
shutdown $ cmdnocommit cmd
|
shutdown $ cmdnocommit cmd
|
||||||
go _flags params (Left e) = do
|
go _flags params (Left e) = do
|
||||||
when fuzzy $
|
when fuzzy $
|
||||||
autocorrect =<< Git.Config.global
|
autocorrect =<< Git.Config.global
|
||||||
maybe (throw e) (\a -> a params) (cmdnorepo cmd)
|
maybe (throw e) (\a -> a params) (cmdnorepo cmd)
|
||||||
err msg = msg ++ "\n\n" ++ usage header allcmds
|
err msg = msg ++ "\n\n" ++ usage header allcmds
|
||||||
cmd = Prelude.head cmds
|
cmd = Prelude.head cmds
|
||||||
|
|
|
@ -66,7 +66,7 @@ options = commonOptions ++
|
||||||
check u = unexpectedUUID expected u
|
check u = unexpectedUUID expected u
|
||||||
checkGCryptUUID expected = check =<< getGCryptUUID True =<< gitRepo
|
checkGCryptUUID expected = check =<< getGCryptUUID True =<< gitRepo
|
||||||
where
|
where
|
||||||
check (Just u) | u == toUUID expected = noop
|
check (Just u) | u == toUUID expected = noop
|
||||||
check Nothing = unexpected expected "uninitialized repository"
|
check Nothing = unexpected expected "uninitialized repository"
|
||||||
check (Just u) = unexpectedUUID expected u
|
check (Just u) = unexpectedUUID expected u
|
||||||
unexpectedUUID expected u = unexpected expected $ "UUID " ++ fromUUID u
|
unexpectedUUID expected u = unexpected expected $ "UUID " ++ fromUUID u
|
||||||
|
|
|
@ -107,7 +107,7 @@ withFilesUnlocked' :: ([FilePath] -> Git.Repo -> IO ([FilePath], IO Bool)) -> (F
|
||||||
withFilesUnlocked' typechanged a params = seekActions $
|
withFilesUnlocked' typechanged a params = seekActions $
|
||||||
prepFiltered a unlockedfiles
|
prepFiltered a unlockedfiles
|
||||||
where
|
where
|
||||||
check f = liftIO (notSymlink f) <&&>
|
check f = liftIO (notSymlink f) <&&>
|
||||||
(isJust <$> catKeyFile f <||> isJust <$> catKeyFileHEAD f)
|
(isJust <$> catKeyFile f <||> isJust <$> catKeyFileHEAD f)
|
||||||
unlockedfiles = filterM check =<< seekHelper typechanged params
|
unlockedfiles = filterM check =<< seekHelper typechanged params
|
||||||
|
|
||||||
|
@ -165,7 +165,7 @@ withKeyOptions keyop fallbackop params = do
|
||||||
Just k -> go auto $ return [k]
|
Just k -> go auto $ return [k]
|
||||||
_ -> error "Can only specify one of file names, --all, --unused, or --key"
|
_ -> error "Can only specify one of file names, --all, --unused, or --key"
|
||||||
where
|
where
|
||||||
go True _ = error "Cannot use --auto with --all or --unused or --key"
|
go True _ = error "Cannot use --auto with --all or --unused or --key"
|
||||||
go False a = do
|
go False a = do
|
||||||
matcher <- Limit.getMatcher
|
matcher <- Limit.getMatcher
|
||||||
seekActions $ map (process matcher) <$> a
|
seekActions $ map (process matcher) <$> a
|
||||||
|
|
|
@ -125,7 +125,7 @@ lockDown' file = ifM crippledFileSystem
|
||||||
- This is not done in direct mode, because files there need to
|
- This is not done in direct mode, because files there need to
|
||||||
- remain writable at all times.
|
- remain writable at all times.
|
||||||
-}
|
-}
|
||||||
go tmp = do
|
go tmp = do
|
||||||
unlessM isDirect $
|
unlessM isDirect $
|
||||||
freezeContent file
|
freezeContent file
|
||||||
withTSDelta $ \delta -> liftIO $ do
|
withTSDelta $ \delta -> liftIO $ do
|
||||||
|
@ -134,7 +134,7 @@ lockDown' file = ifM crippledFileSystem
|
||||||
hClose h
|
hClose h
|
||||||
nukeFile tmpfile
|
nukeFile tmpfile
|
||||||
withhardlink delta tmpfile `catchIO` const (nohardlink delta)
|
withhardlink delta tmpfile `catchIO` const (nohardlink delta)
|
||||||
nohardlink delta = do
|
nohardlink delta = do
|
||||||
cache <- genInodeCache file delta
|
cache <- genInodeCache file delta
|
||||||
return KeySource
|
return KeySource
|
||||||
{ keyFilename = file
|
{ keyFilename = file
|
||||||
|
@ -207,7 +207,7 @@ finishIngestDirect key source = do
|
||||||
perform :: FilePath -> CommandPerform
|
perform :: FilePath -> CommandPerform
|
||||||
perform file = lockDown file >>= ingest >>= go
|
perform file = lockDown file >>= ingest >>= go
|
||||||
where
|
where
|
||||||
go (Just key, cache) = next $ cleanup file key cache True
|
go (Just key, cache) = next $ cleanup file key cache True
|
||||||
go (Nothing, _) = stop
|
go (Nothing, _) = stop
|
||||||
|
|
||||||
{- On error, put the file back so it doesn't seem to have vanished.
|
{- On error, put the file back so it doesn't seem to have vanished.
|
||||||
|
|
|
@ -56,7 +56,7 @@ seek ps = do
|
||||||
start :: Bool -> Maybe FilePath -> Maybe Int -> String -> CommandStart
|
start :: Bool -> Maybe FilePath -> Maybe Int -> String -> CommandStart
|
||||||
start relaxed optfile pathdepth s = go $ fromMaybe bad $ parseURI s
|
start relaxed optfile pathdepth s = go $ fromMaybe bad $ parseURI s
|
||||||
where
|
where
|
||||||
(s', downloader) = getDownloader s
|
(s', downloader) = getDownloader s
|
||||||
bad = fromMaybe (error $ "bad url " ++ s') $
|
bad = fromMaybe (error $ "bad url " ++ s') $
|
||||||
parseURI $ escapeURIString isUnescapedInURI s'
|
parseURI $ escapeURIString isUnescapedInURI s'
|
||||||
choosefile = flip fromMaybe optfile
|
choosefile = flip fromMaybe optfile
|
||||||
|
@ -95,8 +95,8 @@ start relaxed optfile pathdepth s = go $ fromMaybe bad $ parseURI s
|
||||||
performQuvi :: Bool -> URLString -> URLString -> FilePath -> CommandPerform
|
performQuvi :: Bool -> URLString -> URLString -> FilePath -> CommandPerform
|
||||||
performQuvi relaxed pageurl videourl file = ifAnnexed file addurl geturl
|
performQuvi relaxed pageurl videourl file = ifAnnexed file addurl geturl
|
||||||
where
|
where
|
||||||
quviurl = setDownloader pageurl QuviDownloader
|
quviurl = setDownloader pageurl QuviDownloader
|
||||||
addurl key = next $ cleanup quviurl file key Nothing
|
addurl key = next $ cleanup quviurl file key Nothing
|
||||||
geturl = next $ isJust <$> addUrlFileQuvi relaxed quviurl videourl file
|
geturl = next $ isJust <$> addUrlFileQuvi relaxed quviurl videourl file
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -189,7 +189,7 @@ download url file = do
|
||||||
, return Nothing
|
, return Nothing
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
runtransfer dummykey tmp = Transfer.notifyTransfer Transfer.Download (Just file) $
|
runtransfer dummykey tmp = Transfer.notifyTransfer Transfer.Download (Just file) $
|
||||||
Transfer.download webUUID dummykey (Just file) Transfer.forwardRetry $ const $ do
|
Transfer.download webUUID dummykey (Just file) Transfer.forwardRetry $ const $ do
|
||||||
liftIO $ createDirectoryIfMissing True (parentDir tmp)
|
liftIO $ createDirectoryIfMissing True (parentDir tmp)
|
||||||
downloadUrl [url] tmp
|
downloadUrl [url] tmp
|
||||||
|
|
|
@ -29,7 +29,7 @@ start = do
|
||||||
showConfig coreGCryptId =<< fromRepo (Git.Config.get coreGCryptId "")
|
showConfig coreGCryptId =<< fromRepo (Git.Config.get coreGCryptId "")
|
||||||
stop
|
stop
|
||||||
where
|
where
|
||||||
showConfig k v = liftIO $ putStrLn $ k ++ "=" ++ v
|
showConfig k v = liftIO $ putStrLn $ k ++ "=" ++ v
|
||||||
|
|
||||||
{- The repository may not yet have a UUID; automatically initialize it
|
{- The repository may not yet have a UUID; automatically initialize it
|
||||||
- when there's a git-annex branch available. -}
|
- when there's a git-annex branch available. -}
|
||||||
|
|
|
@ -23,7 +23,7 @@ seek ps = do
|
||||||
to <- getOptionField toOption Remote.byNameWithUUID
|
to <- getOptionField toOption Remote.byNameWithUUID
|
||||||
from <- getOptionField fromOption Remote.byNameWithUUID
|
from <- getOptionField fromOption Remote.byNameWithUUID
|
||||||
withKeyOptions
|
withKeyOptions
|
||||||
(Command.Move.startKey to from False)
|
(Command.Move.startKey to from False)
|
||||||
(withFilesInGit $ whenAnnexed $ start to from)
|
(withFilesInGit $ whenAnnexed $ start to from)
|
||||||
ps
|
ps
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,7 @@ start (name:ws) = go =<< InitRemote.findExisting name
|
||||||
where
|
where
|
||||||
config = Logs.Remote.keyValToConfig ws
|
config = Logs.Remote.keyValToConfig ws
|
||||||
|
|
||||||
go Nothing = unknownNameError "Unknown special remote name."
|
go Nothing = unknownNameError "Unknown special remote name."
|
||||||
go (Just (u, c)) = do
|
go (Just (u, c)) = do
|
||||||
let fullconfig = config `M.union` c
|
let fullconfig = config `M.union` c
|
||||||
t <- InitRemote.findType fullconfig
|
t <- InitRemote.findType fullconfig
|
||||||
|
|
|
@ -329,7 +329,7 @@ checkKeySizeOr bad key file = case Types.Key.keySize key of
|
||||||
checkBackend :: Backend -> Key -> Maybe FilePath -> Annex Bool
|
checkBackend :: Backend -> Key -> Maybe FilePath -> Annex Bool
|
||||||
checkBackend backend key mfile = go =<< isDirect
|
checkBackend backend key mfile = go =<< isDirect
|
||||||
where
|
where
|
||||||
go False = do
|
go False = do
|
||||||
content <- calcRepo $ gitAnnexLocation key
|
content <- calcRepo $ gitAnnexLocation key
|
||||||
checkBackendOr badContent backend key content
|
checkBackendOr badContent backend key content
|
||||||
go True = maybe nocheck checkdirect mfile
|
go True = maybe nocheck checkdirect mfile
|
||||||
|
|
|
@ -47,7 +47,7 @@ guardTest = unlessM (fromMaybe False . Git.Config.isTrue <$> getConfig key "") $
|
||||||
, "Refusing to run fuzz tests, since " ++ keyname ++ " is not set!"
|
, "Refusing to run fuzz tests, since " ++ keyname ++ " is not set!"
|
||||||
]
|
]
|
||||||
where
|
where
|
||||||
key = annexConfig "eat-my-repository"
|
key = annexConfig "eat-my-repository"
|
||||||
(ConfigKey keyname) = key
|
(ConfigKey keyname) = key
|
||||||
|
|
||||||
|
|
||||||
|
@ -257,7 +257,7 @@ existingDir = do
|
||||||
newFile :: IO (Maybe FuzzFile)
|
newFile :: IO (Maybe FuzzFile)
|
||||||
newFile = go (100 :: Int)
|
newFile = go (100 :: Int)
|
||||||
where
|
where
|
||||||
go 0 = return Nothing
|
go 0 = return Nothing
|
||||||
go n = do
|
go n = do
|
||||||
f <- genFuzzFile
|
f <- genFuzzFile
|
||||||
ifM (doesnotexist (toFilePath f))
|
ifM (doesnotexist (toFilePath f))
|
||||||
|
@ -268,7 +268,7 @@ newFile = go (100 :: Int)
|
||||||
newDir :: FilePath -> IO (Maybe FuzzDir)
|
newDir :: FilePath -> IO (Maybe FuzzDir)
|
||||||
newDir parent = go (100 :: Int)
|
newDir parent = go (100 :: Int)
|
||||||
where
|
where
|
||||||
go 0 = return Nothing
|
go 0 = return Nothing
|
||||||
go n = do
|
go n = do
|
||||||
(FuzzDir d) <- genFuzzDir
|
(FuzzDir d) <- genFuzzDir
|
||||||
ifM (doesnotexist (parent </> d))
|
ifM (doesnotexist (parent </> d))
|
||||||
|
|
|
@ -48,7 +48,7 @@ start' expensivecheck from key afile = stopUnless (not <$> inAnnex key) $
|
||||||
stopUnless (Command.Move.fromOk src key) $
|
stopUnless (Command.Move.fromOk src key) $
|
||||||
go $ Command.Move.fromPerform src False key afile
|
go $ Command.Move.fromPerform src False key afile
|
||||||
where
|
where
|
||||||
go a = do
|
go a = do
|
||||||
showStart' "get" key afile
|
showStart' "get" key afile
|
||||||
next a
|
next a
|
||||||
|
|
||||||
|
|
|
@ -50,8 +50,8 @@ getDuplicateMode = gen
|
||||||
<*> getflag cleanDuplicatesOption
|
<*> getflag cleanDuplicatesOption
|
||||||
<*> getflag skipDuplicatesOption
|
<*> getflag skipDuplicatesOption
|
||||||
where
|
where
|
||||||
getflag = Annex.getFlag . optionName
|
getflag = Annex.getFlag . optionName
|
||||||
gen False False False False = Default
|
gen False False False False = Default
|
||||||
gen True False False False = Duplicate
|
gen True False False False = Duplicate
|
||||||
gen False True False False = DeDuplicate
|
gen False True False False = DeDuplicate
|
||||||
gen False False True False = CleanDuplicates
|
gen False False True False = CleanDuplicates
|
||||||
|
|
|
@ -153,7 +153,7 @@ performDownload relaxed cache todownload = case location todownload of
|
||||||
rundownload videourl ("." ++ Quvi.linkSuffix link) $
|
rundownload videourl ("." ++ Quvi.linkSuffix link) $
|
||||||
addUrlFileQuvi relaxed quviurl videourl
|
addUrlFileQuvi relaxed quviurl videourl
|
||||||
where
|
where
|
||||||
forced = Annex.getState Annex.force
|
forced = Annex.getState Annex.force
|
||||||
|
|
||||||
{- Avoids downloading any urls that are already known to be
|
{- Avoids downloading any urls that are already known to be
|
||||||
- associated with a file in the annex, unless forced. -}
|
- associated with a file in the annex, unless forced. -}
|
||||||
|
@ -192,7 +192,7 @@ performDownload relaxed cache todownload = case location todownload of
|
||||||
, return $ Just f
|
, return $ Just f
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
f = if n < 2
|
f = if n < 2
|
||||||
then file
|
then file
|
||||||
else
|
else
|
||||||
let (d, base) = splitFileName file
|
let (d, base) = splitFileName file
|
||||||
|
|
|
@ -94,7 +94,7 @@ perform = do
|
||||||
warnlocked
|
warnlocked
|
||||||
showEndOk
|
showEndOk
|
||||||
|
|
||||||
warnlocked :: SomeException -> Annex ()
|
warnlocked :: SomeException -> Annex ()
|
||||||
warnlocked e = do
|
warnlocked e = do
|
||||||
warning $ show e
|
warning $ show e
|
||||||
warning "leaving this file as-is; correct this problem and run git annex add on it"
|
warning "leaving this file as-is; correct this problem and run git annex add on it"
|
||||||
|
|
|
@ -100,7 +100,7 @@ localInfo dir = showCustom (unwords ["info", dir]) $ do
|
||||||
evalStateT (mapM_ showStat stats) =<< getLocalStatInfo dir
|
evalStateT (mapM_ showStat stats) =<< getLocalStatInfo dir
|
||||||
return True
|
return True
|
||||||
where
|
where
|
||||||
tostats = map (\s -> s dir)
|
tostats = map (\s -> s dir)
|
||||||
|
|
||||||
selStats :: [Stat] -> [Stat] -> Annex [Stat]
|
selStats :: [Stat] -> [Stat] -> Annex [Stat]
|
||||||
selStats fast_stats slow_stats = do
|
selStats fast_stats slow_stats = do
|
||||||
|
|
|
@ -65,7 +65,7 @@ upgradableKey backend key = isNothing (Types.Key.keySize key) || backendupgradab
|
||||||
perform :: FilePath -> Key -> Backend -> Backend -> CommandPerform
|
perform :: FilePath -> Key -> Backend -> Backend -> CommandPerform
|
||||||
perform file oldkey oldbackend newbackend = go =<< genkey
|
perform file oldkey oldbackend newbackend = go =<< genkey
|
||||||
where
|
where
|
||||||
go Nothing = stop
|
go Nothing = stop
|
||||||
go (Just (newkey, knowngoodcontent))
|
go (Just (newkey, knowngoodcontent))
|
||||||
| knowngoodcontent = finish newkey
|
| knowngoodcontent = finish newkey
|
||||||
| otherwise = stopUnless checkcontent $ finish newkey
|
| otherwise = stopUnless checkcontent $ finish newkey
|
||||||
|
|
|
@ -63,7 +63,7 @@ start key = fieldTransfer Download key $ \_p ->
|
||||||
Nothing -> return True
|
Nothing -> return True
|
||||||
Just size -> do
|
Just size -> do
|
||||||
size' <- fromIntegral . fileSize
|
size' <- fromIntegral . fileSize
|
||||||
<$> liftIO (getFileStatus tmp)
|
<$> liftIO (getFileStatus tmp)
|
||||||
return $ size == size'
|
return $ size == size'
|
||||||
if oksize
|
if oksize
|
||||||
then case Backend.maybeLookupBackendName (Types.Key.keyBackendName key) of
|
then case Backend.maybeLookupBackendName (Types.Key.keyBackendName key) of
|
||||||
|
@ -76,7 +76,7 @@ start key = fieldTransfer Download key $ \_p ->
|
||||||
warning "recvkey: received key with wrong size; discarding"
|
warning "recvkey: received key with wrong size; discarding"
|
||||||
return False
|
return False
|
||||||
where
|
where
|
||||||
runfsck check = ifM (check key tmp)
|
runfsck check = ifM (check key tmp)
|
||||||
( return True
|
( return True
|
||||||
, do
|
, do
|
||||||
warning "recvkey: received key from direct mode repository seems to have changed as it was transferred; discarding"
|
warning "recvkey: received key from direct mode repository seems to have changed as it was transferred; discarding"
|
||||||
|
|
|
@ -27,7 +27,7 @@ seek = withWords start
|
||||||
start :: [String] -> CommandStart
|
start :: [String] -> CommandStart
|
||||||
start = parse
|
start = parse
|
||||||
where
|
where
|
||||||
parse (name:[]) = go name performGet
|
parse (name:[]) = go name performGet
|
||||||
parse (name:expr:[]) = go name $ \uuid -> do
|
parse (name:expr:[]) = go name $ \uuid -> do
|
||||||
showStart "schedile" name
|
showStart "schedile" name
|
||||||
performSet expr uuid
|
performSet expr uuid
|
||||||
|
|
|
@ -356,7 +356,7 @@ syncFile rs f k = do
|
||||||
handleDropsFrom locs' rs "unwanted" True k (Just f)
|
handleDropsFrom locs' rs "unwanted" True k (Just f)
|
||||||
Nothing callCommandAction
|
Nothing callCommandAction
|
||||||
where
|
where
|
||||||
wantget have = allM id
|
wantget have = allM id
|
||||||
[ pure (not $ null have)
|
[ pure (not $ null have)
|
||||||
, not <$> inAnnex k
|
, not <$> inAnnex k
|
||||||
, wantGet True (Just k) (Just f)
|
, wantGet True (Just k) (Just f)
|
||||||
|
|
|
@ -57,7 +57,7 @@ runRequests readh writeh a = do
|
||||||
fileEncoding writeh
|
fileEncoding writeh
|
||||||
go =<< readrequests
|
go =<< readrequests
|
||||||
where
|
where
|
||||||
go (d:rn:k:f:rest) = do
|
go (d:rn:k:f:rest) = do
|
||||||
case (deserialize d, deserialize rn, deserialize k, deserialize f) of
|
case (deserialize d, deserialize rn, deserialize k, deserialize f) of
|
||||||
(Just direction, Just remotename, Just key, Just file) -> do
|
(Just direction, Just remotename, Just key, Just file) -> do
|
||||||
mremote <- Remote.byName' remotename
|
mremote <- Remote.byName' remotename
|
||||||
|
|
|
@ -100,7 +100,7 @@ prepareRemoveAnnexDir annexdir =
|
||||||
removeUnannexed :: [Key] -> Annex [Key]
|
removeUnannexed :: [Key] -> Annex [Key]
|
||||||
removeUnannexed = go []
|
removeUnannexed = go []
|
||||||
where
|
where
|
||||||
go c [] = return c
|
go c [] = return c
|
||||||
go c (k:ks) = ifM (inAnnexCheck k $ liftIO . enoughlinks)
|
go c (k:ks) = ifM (inAnnexCheck k $ liftIO . enoughlinks)
|
||||||
( do
|
( do
|
||||||
lockContent k removeAnnex
|
lockContent k removeAnnex
|
||||||
|
|
|
@ -136,7 +136,7 @@ genCfg cfg descs = unlines $ intercalate [""]
|
||||||
(\(s, u) -> line "group" u $ unwords $ S.toList s)
|
(\(s, u) -> line "group" u $ unwords $ S.toList s)
|
||||||
(\u -> lcom $ line "group" u "")
|
(\u -> lcom $ line "group" u "")
|
||||||
where
|
where
|
||||||
grouplist = unwords $ map fromStandardGroup [minBound..]
|
grouplist = unwords $ map fromStandardGroup [minBound..]
|
||||||
|
|
||||||
preferredcontent = settings cfg descs cfgPreferredContentMap
|
preferredcontent = settings cfg descs cfgPreferredContentMap
|
||||||
[ com "Repository preferred contents"
|
[ com "Repository preferred contents"
|
||||||
|
@ -157,7 +157,7 @@ genCfg cfg descs = unlines $ intercalate [""]
|
||||||
(\(s, g) -> gline g s)
|
(\(s, g) -> gline g s)
|
||||||
(\g -> gline g "")
|
(\g -> gline g "")
|
||||||
where
|
where
|
||||||
gline g value = [ unwords ["groupwanted", g, "=", value] ]
|
gline g value = [ unwords ["groupwanted", g, "=", value] ]
|
||||||
allgroups = S.unions $ stdgroups : M.elems (cfgGroupMap cfg)
|
allgroups = S.unions $ stdgroups : M.elems (cfgGroupMap cfg)
|
||||||
stdgroups = S.fromList $ map fromStandardGroup [minBound..maxBound]
|
stdgroups = S.fromList $ map fromStandardGroup [minBound..maxBound]
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,7 @@ seek = withWords start
|
||||||
start :: [String] -> CommandStart
|
start :: [String] -> CommandStart
|
||||||
start = parse
|
start = parse
|
||||||
where
|
where
|
||||||
parse (name:[]) = go name performGet
|
parse (name:[]) = go name performGet
|
||||||
parse (name:expr:[]) = go name $ \uuid -> do
|
parse (name:expr:[]) = go name $ \uuid -> do
|
||||||
showStart "wanted" name
|
showStart "wanted" name
|
||||||
performSet expr uuid
|
performSet expr uuid
|
||||||
|
|
|
@ -52,7 +52,7 @@ insertCostAfter l pos
|
||||||
| otherwise =
|
| otherwise =
|
||||||
firstsegment ++ [costBetween item nextitem ] ++ lastsegment
|
firstsegment ++ [costBetween item nextitem ] ++ lastsegment
|
||||||
where
|
where
|
||||||
nextpos = pos + 1
|
nextpos = pos + 1
|
||||||
maxpos = length l - 1
|
maxpos = length l - 1
|
||||||
|
|
||||||
item = l !! pos
|
item = l !! pos
|
||||||
|
|
|
@ -66,4 +66,4 @@ readProgramFile = do
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
cmd = "git-annex"
|
cmd = "git-annex"
|
||||||
|
|
|
@ -94,7 +94,7 @@ catTree :: CatFileHandle -> Ref -> IO [(FilePath, FileMode)]
|
||||||
catTree h treeref = go <$> catObjectDetails h treeref
|
catTree h treeref = go <$> catObjectDetails h treeref
|
||||||
where
|
where
|
||||||
go (Just (b, _, TreeObject)) = parsetree [] b
|
go (Just (b, _, TreeObject)) = parsetree [] b
|
||||||
go _ = []
|
go _ = []
|
||||||
|
|
||||||
parsetree c b = case L.break (== 0) b of
|
parsetree c b = case L.break (== 0) b of
|
||||||
(modefile, rest)
|
(modefile, rest)
|
||||||
|
|
|
@ -79,7 +79,7 @@ pipeWriteRead params writer repo = assertLocal repo $
|
||||||
writeReadProcessEnv "git" (toCommand $ gitCommandLine params repo)
|
writeReadProcessEnv "git" (toCommand $ gitCommandLine params repo)
|
||||||
(gitEnv repo) writer (Just adjusthandle)
|
(gitEnv repo) writer (Just adjusthandle)
|
||||||
where
|
where
|
||||||
adjusthandle h = do
|
adjusthandle h = do
|
||||||
fileEncoding h
|
fileEncoding h
|
||||||
hSetNewlineMode h noNewlineTranslation
|
hSetNewlineMode h noNewlineTranslation
|
||||||
|
|
||||||
|
@ -117,7 +117,7 @@ gitCoProcessStart restartable params repo = CoProcess.start numrestarts "git"
|
||||||
(toCommand $ gitCommandLine params repo)
|
(toCommand $ gitCommandLine params repo)
|
||||||
(gitEnv repo)
|
(gitEnv repo)
|
||||||
where
|
where
|
||||||
{- If a long-running git command like cat-file --batch
|
{- If a long-running git command like cat-file --batch
|
||||||
- crashes, it will likely start up again ok. If it keeps crashing
|
- crashes, it will likely start up again ok. If it keeps crashing
|
||||||
- 10 times, something is badly wrong. -}
|
- 10 times, something is badly wrong. -}
|
||||||
numrestarts = if restartable then 10 else 0
|
numrestarts = if restartable then 10 else 0
|
||||||
|
|
|
@ -167,7 +167,7 @@ coreBare = "core.bare"
|
||||||
fromPipe :: Repo -> String -> [CommandParam] -> IO (Either SomeException (Repo, String))
|
fromPipe :: Repo -> String -> [CommandParam] -> IO (Either SomeException (Repo, String))
|
||||||
fromPipe r cmd params = try $
|
fromPipe r cmd params = try $
|
||||||
withHandle StdoutHandle createProcessSuccess p $ \h -> do
|
withHandle StdoutHandle createProcessSuccess p $ \h -> do
|
||||||
fileEncoding h
|
fileEncoding h
|
||||||
val <- hGetContentsStrict h
|
val <- hGetContentsStrict h
|
||||||
r' <- store val r
|
r' <- store val r
|
||||||
return (r', val)
|
return (r', val)
|
||||||
|
|
|
@ -38,12 +38,12 @@ isEncrypted _ = False
|
||||||
encryptedRemote :: Repo -> Repo -> IO Repo
|
encryptedRemote :: Repo -> Repo -> IO Repo
|
||||||
encryptedRemote baserepo = go
|
encryptedRemote baserepo = go
|
||||||
where
|
where
|
||||||
go Repo { location = Url url }
|
go Repo { location = Url url }
|
||||||
| urlPrefix `isPrefixOf` u =
|
| urlPrefix `isPrefixOf` u =
|
||||||
fromRemoteLocation (drop plen u) baserepo
|
fromRemoteLocation (drop plen u) baserepo
|
||||||
| otherwise = notencrypted
|
| otherwise = notencrypted
|
||||||
where
|
where
|
||||||
u = show url
|
u = show url
|
||||||
plen = length urlPrefix
|
plen = length urlPrefix
|
||||||
go _ = notencrypted
|
go _ = notencrypted
|
||||||
notencrypted = error "not a gcrypt encrypted repository"
|
notencrypted = error "not a gcrypt encrypted repository"
|
||||||
|
@ -92,7 +92,7 @@ getParticiantList globalconfigrepo repo remotename = KeyIds $ parse $ firstJust
|
||||||
]
|
]
|
||||||
where
|
where
|
||||||
defaultkey = "gcrypt.participants"
|
defaultkey = "gcrypt.participants"
|
||||||
parse (Just "simple") = []
|
parse (Just "simple") = []
|
||||||
parse (Just l) = words l
|
parse (Just l) = words l
|
||||||
parse Nothing = []
|
parse Nothing = []
|
||||||
|
|
||||||
|
|
|
@ -44,7 +44,7 @@ lsTreeParams t = [ Params "ls-tree --full-tree -z -r --", File $ fromRef t ]
|
||||||
lsTreeFiles :: Ref -> [FilePath] -> Repo -> IO [TreeItem]
|
lsTreeFiles :: Ref -> [FilePath] -> Repo -> IO [TreeItem]
|
||||||
lsTreeFiles t fs repo = map parseLsTree <$> pipeNullSplitStrict ps repo
|
lsTreeFiles t fs repo = map parseLsTree <$> pipeNullSplitStrict ps repo
|
||||||
where
|
where
|
||||||
ps = [Params "ls-tree --full-tree -z --", File $ fromRef t] ++ map File fs
|
ps = [Params "ls-tree --full-tree -z --", File $ fromRef t] ++ map File fs
|
||||||
|
|
||||||
{- Parses a line of ls-tree output.
|
{- Parses a line of ls-tree output.
|
||||||
- (The --long format is not currently supported.) -}
|
- (The --long format is not currently supported.) -}
|
||||||
|
|
|
@ -70,7 +70,7 @@ remoteLocationIsSshUrl _ = False
|
||||||
parseRemoteLocation :: String -> Repo -> RemoteLocation
|
parseRemoteLocation :: String -> Repo -> RemoteLocation
|
||||||
parseRemoteLocation s repo = ret $ calcloc s
|
parseRemoteLocation s repo = ret $ calcloc s
|
||||||
where
|
where
|
||||||
ret v
|
ret v
|
||||||
#ifdef mingw32_HOST_OS
|
#ifdef mingw32_HOST_OS
|
||||||
| dosstyle v = RemotePath (dospath v)
|
| dosstyle v = RemotePath (dospath v)
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -222,7 +222,7 @@ badBranches missing r = filterM isbad =<< getAllRefs r
|
||||||
getAllRefs :: Repo -> IO [Ref]
|
getAllRefs :: Repo -> IO [Ref]
|
||||||
getAllRefs r = map toref <$> dirContentsRecursive refdir
|
getAllRefs r = map toref <$> dirContentsRecursive refdir
|
||||||
where
|
where
|
||||||
refdir = localGitDir r </> "refs"
|
refdir = localGitDir r </> "refs"
|
||||||
toref = Ref . relPathDirToFile (localGitDir r)
|
toref = Ref . relPathDirToFile (localGitDir r)
|
||||||
|
|
||||||
explodePackedRefsFile :: Repo -> IO ()
|
explodePackedRefsFile :: Repo -> IO ()
|
||||||
|
@ -411,7 +411,7 @@ displayList items header
|
||||||
putStrLn header
|
putStrLn header
|
||||||
putStr $ unlines $ map (\i -> "\t" ++ i) truncateditems
|
putStr $ unlines $ map (\i -> "\t" ++ i) truncateditems
|
||||||
where
|
where
|
||||||
numitems = length items
|
numitems = length items
|
||||||
truncateditems
|
truncateditems
|
||||||
| numitems > 10 = take 10 items ++ ["(and " ++ show (numitems - 10) ++ " more)"]
|
| numitems > 10 = take 10 items ++ ["(and " ++ show (numitems - 10) ++ " more)"]
|
||||||
| otherwise = items
|
| otherwise = items
|
||||||
|
|
|
@ -21,7 +21,7 @@ instance Show GitVersion where
|
||||||
installed :: IO GitVersion
|
installed :: IO GitVersion
|
||||||
installed = normalize . extract <$> readProcess "git" ["--version"]
|
installed = normalize . extract <$> readProcess "git" ["--version"]
|
||||||
where
|
where
|
||||||
extract s = case lines s of
|
extract s = case lines s of
|
||||||
[] -> ""
|
[] -> ""
|
||||||
(l:_) -> unwords $ drop 2 $ words l
|
(l:_) -> unwords $ drop 2 $ words l
|
||||||
|
|
||||||
|
|
4
Limit.hs
4
Limit.hs
|
@ -234,7 +234,7 @@ limitSize vs s = case readSize dataUnits s of
|
||||||
Nothing -> Left "bad size"
|
Nothing -> Left "bad size"
|
||||||
Just sz -> Right $ go sz
|
Just sz -> Right $ go sz
|
||||||
where
|
where
|
||||||
go sz _ (MatchingFile fi) = lookupFileKey fi >>= check fi sz
|
go sz _ (MatchingFile fi) = lookupFileKey fi >>= check fi sz
|
||||||
go sz _ (MatchingKey key) = checkkey sz key
|
go sz _ (MatchingKey key) = checkkey sz key
|
||||||
checkkey sz key = return $ keySize key `vs` Just sz
|
checkkey sz key = return $ keySize key `vs` Just sz
|
||||||
check _ sz (Just key) = checkkey sz key
|
check _ sz (Just key) = checkkey sz key
|
||||||
|
@ -254,7 +254,7 @@ limitMetaData s = case parseMetaData s of
|
||||||
let cglob = compileGlob (fromMetaValue v) CaseInsensative
|
let cglob = compileGlob (fromMetaValue v) CaseInsensative
|
||||||
in Right $ const $ checkKey (check f cglob)
|
in Right $ const $ checkKey (check f cglob)
|
||||||
where
|
where
|
||||||
check f cglob k = not . S.null
|
check f cglob k = not . S.null
|
||||||
. S.filter (matchGlob cglob . fromMetaValue)
|
. S.filter (matchGlob cglob . fromMetaValue)
|
||||||
. metaDataValues f <$> getCurrentMetaData k
|
. metaDataValues f <$> getCurrentMetaData k
|
||||||
|
|
||||||
|
|
|
@ -148,7 +148,7 @@ gitAnnexLink file key r = do
|
||||||
loc <- gitAnnexLocation' key r False
|
loc <- gitAnnexLocation' key r False
|
||||||
return $ relPathDirToFile (parentDir absfile) loc
|
return $ relPathDirToFile (parentDir absfile) loc
|
||||||
where
|
where
|
||||||
whoops = error $ "unable to normalize " ++ file
|
whoops = error $ "unable to normalize " ++ file
|
||||||
|
|
||||||
{- File used to lock a key's content. -}
|
{- File used to lock a key's content. -}
|
||||||
gitAnnexContentLock :: Key -> Git.Repo -> GitConfig -> IO FilePath
|
gitAnnexContentLock :: Key -> Git.Repo -> GitConfig -> IO FilePath
|
||||||
|
@ -356,7 +356,7 @@ isLinkToAnnex s = (pathSeparator:objectDir) `isInfixOf` s
|
||||||
preSanitizeKeyName :: String -> String
|
preSanitizeKeyName :: String -> String
|
||||||
preSanitizeKeyName = concatMap escape
|
preSanitizeKeyName = concatMap escape
|
||||||
where
|
where
|
||||||
escape c
|
escape c
|
||||||
| isAsciiUpper c || isAsciiLower c || isDigit c = [c]
|
| isAsciiUpper c || isAsciiLower c || isDigit c = [c]
|
||||||
| c `elem` ".-_ " = [c] -- common, assumed safe
|
| c `elem` ".-_ " = [c] -- common, assumed safe
|
||||||
| c `elem` "/%:" = [c] -- handled by keyFile
|
| c `elem` "/%:" = [c] -- handled by keyFile
|
||||||
|
|
8
Logs.hs
8
Logs.hs
|
@ -117,7 +117,7 @@ urlLogFileKey path
|
||||||
| ext == urlLogExt = fileKey base
|
| ext == urlLogExt = fileKey base
|
||||||
| otherwise = Nothing
|
| otherwise = Nothing
|
||||||
where
|
where
|
||||||
file = takeFileName path
|
file = takeFileName path
|
||||||
(base, ext) = splitAt (length file - extlen) file
|
(base, ext) = splitAt (length file - extlen) file
|
||||||
extlen = length urlLogExt
|
extlen = length urlLogExt
|
||||||
|
|
||||||
|
@ -144,7 +144,7 @@ chunkLogFileKey path
|
||||||
| ext == chunkLogExt = fileKey base
|
| ext == chunkLogExt = fileKey base
|
||||||
| otherwise = Nothing
|
| otherwise = Nothing
|
||||||
where
|
where
|
||||||
file = takeFileName path
|
file = takeFileName path
|
||||||
(base, ext) = splitAt (length file - extlen) file
|
(base, ext) = splitAt (length file - extlen) file
|
||||||
extlen = length chunkLogExt
|
extlen = length chunkLogExt
|
||||||
|
|
||||||
|
@ -176,10 +176,10 @@ prop_logs_sane dummykey = and
|
||||||
, expect gotOtherLog (getLogVariety $ numcopiesLog)
|
, expect gotOtherLog (getLogVariety $ numcopiesLog)
|
||||||
]
|
]
|
||||||
where
|
where
|
||||||
expect = maybe False
|
expect = maybe False
|
||||||
gotUUIDBasedLog UUIDBasedLog = True
|
gotUUIDBasedLog UUIDBasedLog = True
|
||||||
gotUUIDBasedLog _ = False
|
gotUUIDBasedLog _ = False
|
||||||
gotNewUUIDBasedLog NewUUIDBasedLog = True
|
gotNewUUIDBasedLog NewUUIDBasedLog = True
|
||||||
gotNewUUIDBasedLog _ = False
|
gotNewUUIDBasedLog _ = False
|
||||||
gotChunkLog (ChunkLog k) = k == dummykey
|
gotChunkLog (ChunkLog k) = k == dummykey
|
||||||
gotChunkLog _ = False
|
gotChunkLog _ = False
|
||||||
|
|
|
@ -28,7 +28,7 @@ writeFsckResults u fsckresults = do
|
||||||
| S.null s -> nukeFile logfile
|
| S.null s -> nukeFile logfile
|
||||||
| otherwise -> store s t logfile
|
| otherwise -> store s t logfile
|
||||||
where
|
where
|
||||||
store s t logfile = do
|
store s t logfile = do
|
||||||
createDirectoryIfMissing True (parentDir logfile)
|
createDirectoryIfMissing True (parentDir logfile)
|
||||||
liftIO $ viaTmp writeFile logfile $ serialize s t
|
liftIO $ viaTmp writeFile logfile $ serialize s t
|
||||||
serialize s t =
|
serialize s t =
|
||||||
|
|
|
@ -67,7 +67,7 @@ getCurrentMetaData k = do
|
||||||
return $ currentMetaData $ unionMetaData loggedmeta
|
return $ currentMetaData $ unionMetaData loggedmeta
|
||||||
(lastchanged ls loggedmeta)
|
(lastchanged ls loggedmeta)
|
||||||
where
|
where
|
||||||
lastchanged [] _ = emptyMetaData
|
lastchanged [] _ = emptyMetaData
|
||||||
lastchanged ls (MetaData currentlyset) =
|
lastchanged ls (MetaData currentlyset) =
|
||||||
let m = foldl' (flip M.union) M.empty (map genlastchanged ls)
|
let m = foldl' (flip M.union) M.empty (map genlastchanged ls)
|
||||||
in MetaData $
|
in MetaData $
|
||||||
|
|
|
@ -35,7 +35,7 @@ scheduleSet uuid@(UUID _) activities = do
|
||||||
Annex.Branch.change scheduleLog $
|
Annex.Branch.change scheduleLog $
|
||||||
showLog id . changeLog ts uuid val . parseLog Just
|
showLog id . changeLog ts uuid val . parseLog Just
|
||||||
where
|
where
|
||||||
val = fromScheduledActivities activities
|
val = fromScheduledActivities activities
|
||||||
scheduleSet NoUUID _ = error "unknown UUID; cannot modify"
|
scheduleSet NoUUID _ = error "unknown UUID; cannot modify"
|
||||||
|
|
||||||
scheduleMap :: Annex (M.Map UUID [ScheduledActivity])
|
scheduleMap :: Annex (M.Map UUID [ScheduledActivity])
|
||||||
|
|
|
@ -53,7 +53,7 @@ showTransitions = unlines . map showTransitionLine . S.elems
|
||||||
parseTransitions :: String -> Maybe Transitions
|
parseTransitions :: String -> Maybe Transitions
|
||||||
parseTransitions = check . map parseTransitionLine . lines
|
parseTransitions = check . map parseTransitionLine . lines
|
||||||
where
|
where
|
||||||
check l
|
check l
|
||||||
| all isJust l = Just $ S.fromList $ catMaybes l
|
| all isJust l = Just $ S.fromList $ catMaybes l
|
||||||
| otherwise = Nothing
|
| otherwise = Nothing
|
||||||
|
|
||||||
|
@ -68,8 +68,8 @@ showTransitionLine (TransitionLine ts t) = unwords [show t, show ts]
|
||||||
parseTransitionLine :: String -> Maybe TransitionLine
|
parseTransitionLine :: String -> Maybe TransitionLine
|
||||||
parseTransitionLine s = TransitionLine <$> pdate ds <*> readish ts
|
parseTransitionLine s = TransitionLine <$> pdate ds <*> readish ts
|
||||||
where
|
where
|
||||||
ws = words s
|
ws = words s
|
||||||
ts = Prelude.head ws
|
ts = Prelude.head ws
|
||||||
ds = unwords $ Prelude.tail ws
|
ds = unwords $ Prelude.tail ws
|
||||||
pdate = utcTimeToPOSIXSeconds <$$> parseTime defaultTimeLocale "%s%Qs"
|
pdate = utcTimeToPOSIXSeconds <$$> parseTime defaultTimeLocale "%s%Qs"
|
||||||
|
|
||||||
|
|
|
@ -76,7 +76,7 @@ knownUrls = do
|
||||||
return $ concat r
|
return $ concat r
|
||||||
where
|
where
|
||||||
geturls Nothing = return []
|
geturls Nothing = return []
|
||||||
geturls (Just logsha) = getLog . L.unpack <$> catObject logsha
|
geturls (Just logsha) = getLog . L.unpack <$> catObject logsha
|
||||||
|
|
||||||
data Downloader = DefaultDownloader | QuviDownloader
|
data Downloader = DefaultDownloader | QuviDownloader
|
||||||
|
|
||||||
|
|
|
@ -101,7 +101,7 @@ byName (Just n) = either error Just <$> byName' n
|
||||||
byNameWithUUID :: Maybe RemoteName -> Annex (Maybe Remote)
|
byNameWithUUID :: Maybe RemoteName -> Annex (Maybe Remote)
|
||||||
byNameWithUUID = checkuuid <=< byName
|
byNameWithUUID = checkuuid <=< byName
|
||||||
where
|
where
|
||||||
checkuuid Nothing = return Nothing
|
checkuuid Nothing = return Nothing
|
||||||
checkuuid (Just r)
|
checkuuid (Just r)
|
||||||
| uuid r == NoUUID =
|
| uuid r == NoUUID =
|
||||||
if remoteAnnexIgnore (gitconfig r)
|
if remoteAnnexIgnore (gitconfig r)
|
||||||
|
|
|
@ -169,7 +169,7 @@ handleRequest' lck external req mp responsehandler
|
||||||
go
|
go
|
||||||
| otherwise = go
|
| otherwise = go
|
||||||
where
|
where
|
||||||
go = do
|
go = do
|
||||||
sendMessage lck external req
|
sendMessage lck external req
|
||||||
loop
|
loop
|
||||||
loop = receiveMessage lck external responsehandler
|
loop = receiveMessage lck external responsehandler
|
||||||
|
|
|
@ -147,7 +147,7 @@ rsyncTransport r
|
||||||
| ":" `isInfixOf` loc = sshtransport $ separate (== ':') loc
|
| ":" `isInfixOf` loc = sshtransport $ separate (== ':') loc
|
||||||
| otherwise = othertransport
|
| otherwise = othertransport
|
||||||
where
|
where
|
||||||
loc = Git.repoLocation r
|
loc = Git.repoLocation r
|
||||||
sshtransport (host, path) = do
|
sshtransport (host, path) = do
|
||||||
let rsyncpath = if "/~/" `isPrefixOf` path
|
let rsyncpath = if "/~/" `isPrefixOf` path
|
||||||
then drop 3 path
|
then drop 3 path
|
||||||
|
@ -166,7 +166,7 @@ gCryptSetup :: Maybe UUID -> Maybe CredPair -> RemoteConfig -> Annex (RemoteConf
|
||||||
gCryptSetup mu _ c = go $ M.lookup "gitrepo" c
|
gCryptSetup mu _ c = go $ M.lookup "gitrepo" c
|
||||||
where
|
where
|
||||||
remotename = fromJust (M.lookup "name" c)
|
remotename = fromJust (M.lookup "name" c)
|
||||||
go Nothing = error "Specify gitrepo="
|
go Nothing = error "Specify gitrepo="
|
||||||
go (Just gitrepo) = do
|
go (Just gitrepo) = do
|
||||||
(c', _encsetup) <- encryptionSetup c
|
(c', _encsetup) <- encryptionSetup c
|
||||||
inRepo $ Git.Command.run
|
inRepo $ Git.Command.run
|
||||||
|
@ -234,7 +234,7 @@ setupRepo gcryptid r
|
||||||
- create the objectDir on the remote,
|
- create the objectDir on the remote,
|
||||||
- which is needed for direct rsync of objects to work.
|
- which is needed for direct rsync of objects to work.
|
||||||
-}
|
-}
|
||||||
rsyncsetup = Remote.Rsync.withRsyncScratchDir $ \tmp -> do
|
rsyncsetup = Remote.Rsync.withRsyncScratchDir $ \tmp -> do
|
||||||
liftIO $ createDirectoryIfMissing True $ tmp </> objectDir
|
liftIO $ createDirectoryIfMissing True $ tmp </> objectDir
|
||||||
(rsynctransport, rsyncurl, _) <- rsyncTransport r
|
(rsynctransport, rsyncurl, _) <- rsyncTransport r
|
||||||
let tmpconfig = tmp </> "config"
|
let tmpconfig = tmp </> "config"
|
||||||
|
@ -266,7 +266,7 @@ isShell r = case method of
|
||||||
AccessShell -> True
|
AccessShell -> True
|
||||||
_ -> False
|
_ -> False
|
||||||
where
|
where
|
||||||
method = toAccessMethod $ fromMaybe "" $
|
method = toAccessMethod $ fromMaybe "" $
|
||||||
remoteAnnexGCrypt $ gitconfig r
|
remoteAnnexGCrypt $ gitconfig r
|
||||||
|
|
||||||
shellOrRsync :: Remote -> Annex a -> Annex a -> Annex a
|
shellOrRsync :: Remote -> Annex a -> Annex a -> Annex a
|
||||||
|
@ -352,7 +352,7 @@ checkKey r rsyncopts k
|
||||||
| Git.repoIsSsh (repo r) = shellOrRsync r checkshell checkrsync
|
| Git.repoIsSsh (repo r) = shellOrRsync r checkshell checkrsync
|
||||||
| otherwise = unsupportedUrl
|
| otherwise = unsupportedUrl
|
||||||
where
|
where
|
||||||
checkrsync = Remote.Rsync.checkKey (repo r) rsyncopts k
|
checkrsync = Remote.Rsync.checkKey (repo r) rsyncopts k
|
||||||
checkshell = Ssh.inAnnex (repo r) k
|
checkshell = Ssh.inAnnex (repo r) k
|
||||||
|
|
||||||
{- Annexed objects are hashed using lower-case directories for max
|
{- Annexed objects are hashed using lower-case directories for max
|
||||||
|
|
|
@ -305,7 +305,7 @@ inAnnex rmt key
|
||||||
| Git.repoIsUrl r = checkremote
|
| Git.repoIsUrl r = checkremote
|
||||||
| otherwise = checklocal
|
| otherwise = checklocal
|
||||||
where
|
where
|
||||||
r = repo rmt
|
r = repo rmt
|
||||||
checkhttp = do
|
checkhttp = do
|
||||||
showChecking r
|
showChecking r
|
||||||
ifM (Url.withUrlOptions $ \uo -> anyM (\u -> Url.checkBoth u (keySize key) uo) (keyUrls rmt key))
|
ifM (Url.withUrlOptions $ \uo -> anyM (\u -> Url.checkBoth u (keySize key) uo) (keyUrls rmt key))
|
||||||
|
|
|
@ -123,7 +123,7 @@ storeChunks u chunkconfig k f p storer checker =
|
||||||
|
|
||||||
loop bytesprocessed (chunk, bs) chunkkeys
|
loop bytesprocessed (chunk, bs) chunkkeys
|
||||||
| L.null chunk && numchunks > 0 = do
|
| L.null chunk && numchunks > 0 = do
|
||||||
-- Once all chunks are successfully
|
-- Once all chunks are successfully
|
||||||
-- stored, update the chunk log.
|
-- stored, update the chunk log.
|
||||||
chunksStored u k (FixedSizeChunks chunksize) numchunks
|
chunksStored u k (FixedSizeChunks chunksize) numchunks
|
||||||
return True
|
return True
|
||||||
|
@ -138,7 +138,7 @@ storeChunks u chunkconfig k f p storer checker =
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
numchunks = numChunks chunkkeys
|
numchunks = numChunks chunkkeys
|
||||||
{- The MeterUpdate that is passed to the action
|
{- The MeterUpdate that is passed to the action
|
||||||
- storing a chunk is offset, so that it reflects
|
- storing a chunk is offset, so that it reflects
|
||||||
- the total bytes that have already been stored
|
- the total bytes that have already been stored
|
||||||
- in previous chunks. -}
|
- in previous chunks. -}
|
||||||
|
@ -290,7 +290,7 @@ retrieveChunks retriever u chunkconfig encryptor basek dest basep sink
|
||||||
hSeek h AbsoluteSeek startpoint
|
hSeek h AbsoluteSeek startpoint
|
||||||
return h
|
return h
|
||||||
|
|
||||||
{- Progress meter updating is a bit tricky: If the Retriever
|
{- Progress meter updating is a bit tricky: If the Retriever
|
||||||
- populates a file, it is responsible for updating progress
|
- populates a file, it is responsible for updating progress
|
||||||
- as the file is being retrieved.
|
- as the file is being retrieved.
|
||||||
-
|
-
|
||||||
|
|
|
@ -196,7 +196,7 @@ specialRemote' cfg c preparestorer prepareretriever prepareremover preparecheckp
|
||||||
retrieveKeyFileGen k dest p enc =
|
retrieveKeyFileGen k dest p enc =
|
||||||
safely $ prepareretriever k $ safely . go
|
safely $ prepareretriever k $ safely . go
|
||||||
where
|
where
|
||||||
go (Just retriever) = displayprogress p k $ \p' ->
|
go (Just retriever) = displayprogress p k $ \p' ->
|
||||||
retrieveChunks retriever (uuid baser) chunkconfig
|
retrieveChunks retriever (uuid baser) chunkconfig
|
||||||
enck k dest p' (sink dest enc)
|
enck k dest p' (sink dest enc)
|
||||||
go Nothing = return False
|
go Nothing = return False
|
||||||
|
@ -210,7 +210,7 @@ specialRemote' cfg c preparestorer prepareretriever prepareremover preparecheckp
|
||||||
|
|
||||||
checkPresentGen k enc = preparecheckpresent k go
|
checkPresentGen k enc = preparecheckpresent k go
|
||||||
where
|
where
|
||||||
go (Just checker) = checkPresentChunks checker (uuid baser) chunkconfig enck k
|
go (Just checker) = checkPresentChunks checker (uuid baser) chunkconfig enck k
|
||||||
go Nothing = cantCheck baser
|
go Nothing = cantCheck baser
|
||||||
enck = maybe id snd enc
|
enck = maybe id snd enc
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue