fix some mixed space+tab indentation
This fixes all instances of " \t" in the code base. Most common case seems to be after a "where" line; probably vim copied the two space layout of that line. Done as a background task while listening to episode 2 of the Type Theory podcast.
This commit is contained in:
parent
8f69d55f03
commit
7b50b3c057
131 changed files with 242 additions and 242 deletions
|
@ -454,7 +454,7 @@ handleTransitions jl localts refs = do
|
|||
ignoreRefs untransitionedrefs
|
||||
return True
|
||||
where
|
||||
getreftransition ref = do
|
||||
getreftransition ref = do
|
||||
ts <- parseTransitionsStrictly "remote" . decodeBS
|
||||
<$> catFile ref transitionsLog
|
||||
return (ref, ts)
|
||||
|
@ -470,7 +470,7 @@ ignoreRefs rs = do
|
|||
getIgnoredRefs :: Annex (S.Set Git.Ref)
|
||||
getIgnoredRefs = S.fromList . mapMaybe Git.Sha.extractSha . lines <$> content
|
||||
where
|
||||
content = do
|
||||
content = do
|
||||
f <- fromRepo gitAnnexIgnoredRefs
|
||||
liftIO $ catchDefaultIO "" $ readFile f
|
||||
|
||||
|
@ -498,7 +498,7 @@ performTransitionsLocked jl ts neednewlocalbranch transitionedrefs = do
|
|||
ref <- getBranch
|
||||
commitIndex jl ref message (nub $ fullname:transitionedrefs)
|
||||
where
|
||||
message
|
||||
message
|
||||
| neednewlocalbranch && null transitionedrefs = "new branch for transition " ++ tdesc
|
||||
| otherwise = "continuing transition " ++ tdesc
|
||||
tdesc = show $ map describeTransition $ transitionList ts
|
||||
|
|
|
@ -100,10 +100,10 @@ catKey' modeguaranteed sha mode
|
|||
catLink :: Bool -> Sha -> Annex String
|
||||
catLink modeguaranteed sha = fromInternalGitPath . decodeBS <$> get
|
||||
where
|
||||
-- If the mode is not guaranteed to be correct, avoid
|
||||
-- If the mode is not guaranteed to be correct, avoid
|
||||
-- buffering the whole file content, which might be large.
|
||||
-- 8192 is enough if it really is a symlink.
|
||||
get
|
||||
get
|
||||
| modeguaranteed = catObject sha
|
||||
| otherwise = L.take 8192 <$> catObject sha
|
||||
|
||||
|
@ -120,7 +120,7 @@ catKeyChecked :: Bool -> Ref -> Annex (Maybe Key)
|
|||
catKeyChecked needhead ref@(Ref r) =
|
||||
catKey' False ref =<< findmode <$> catTree treeref
|
||||
where
|
||||
pathparts = split "/" r
|
||||
pathparts = split "/" r
|
||||
dir = intercalate "/" $ take (length pathparts - 1) pathparts
|
||||
file = fromMaybe "" $ lastMaybe pathparts
|
||||
treeref = Ref $ if needhead then "HEAD" ++ dir ++ "/" else dir ++ "/"
|
||||
|
|
|
@ -18,7 +18,7 @@ import qualified Annex
|
|||
checkIgnored :: FilePath -> Annex Bool
|
||||
checkIgnored file = go =<< checkIgnoreHandle
|
||||
where
|
||||
go Nothing = return False
|
||||
go Nothing = return False
|
||||
go (Just h) = liftIO $ Git.checkIgnored h file
|
||||
|
||||
checkIgnoreHandle :: Annex (Maybe Git.CheckIgnoreHandle)
|
||||
|
|
|
@ -456,7 +456,7 @@ removeAnnex (ContentLock key) = withObjectLoc key remove removedirect
|
|||
secureErase :: FilePath -> Annex ()
|
||||
secureErase file = maybe noop go =<< annexSecureEraseCommand <$> Annex.getGitConfig
|
||||
where
|
||||
go basecmd = void $ liftIO $
|
||||
go basecmd = void $ liftIO $
|
||||
boolSystem "sh" [Param "-c", Param $ gencmd basecmd]
|
||||
gencmd = massReplace [ ("%file", shellEscape file) ]
|
||||
|
||||
|
@ -555,7 +555,7 @@ saveState nocommit = doSideAction $ do
|
|||
downloadUrl :: [Url.URLString] -> FilePath -> Annex Bool
|
||||
downloadUrl urls file = go =<< annexWebDownloadCommand <$> Annex.getGitConfig
|
||||
where
|
||||
go Nothing = Url.withUrlOptions $ \uo ->
|
||||
go Nothing = Url.withUrlOptions $ \uo ->
|
||||
anyM (\u -> Url.download u file uo) urls
|
||||
go (Just basecmd) = liftIO $ anyM (downloadcmd basecmd) urls
|
||||
downloadcmd basecmd url =
|
||||
|
|
|
@ -347,7 +347,7 @@ toDirectGen k f = do
|
|||
(dloc:_) -> return $ Just $ fromdirect dloc
|
||||
)
|
||||
where
|
||||
fromindirect loc = do
|
||||
fromindirect loc = do
|
||||
{- Move content from annex to direct file. -}
|
||||
updateInodeCache k loc
|
||||
void $ addAssociatedFile k f
|
||||
|
|
|
@ -45,7 +45,7 @@ checkEnvironmentIO =
|
|||
ensureEnv "GIT_COMMITTER_NAME" username
|
||||
where
|
||||
#ifndef __ANDROID__
|
||||
-- existing environment is not overwritten
|
||||
-- existing environment is not overwritten
|
||||
ensureEnv var val = void $ setEnv var val False
|
||||
#else
|
||||
-- Environment setting is broken on Android, so this is dealt with
|
||||
|
@ -59,7 +59,7 @@ checkEnvironmentIO =
|
|||
ensureCommit :: Annex a -> Annex a
|
||||
ensureCommit a = either retry return =<< tryNonAsync a
|
||||
where
|
||||
retry _ = do
|
||||
retry _ = do
|
||||
name <- liftIO myUserName
|
||||
setConfig (ConfigKey "user.name") name
|
||||
setConfig (ConfigKey "user.email") name
|
||||
|
|
|
@ -106,7 +106,7 @@ tokenizeMatcher = filter (not . null ) . concatMap splitparens . words
|
|||
largeFilesMatcher :: Annex (FileMatcher Annex)
|
||||
largeFilesMatcher = go =<< annexLargeFiles <$> Annex.getGitConfig
|
||||
where
|
||||
go Nothing = return matchAll
|
||||
go Nothing = return matchAll
|
||||
go (Just expr) = do
|
||||
gm <- groupMap
|
||||
rc <- readRemoteLog
|
||||
|
|
|
@ -33,7 +33,7 @@ replaceFileOr file action rollback = do
|
|||
tmpfile <- liftIO $ setup tmpdir
|
||||
go tmpfile `catchNonAsync` (const $ rollback tmpfile)
|
||||
where
|
||||
setup tmpdir = do
|
||||
setup tmpdir = do
|
||||
(tmpfile, h) <- openTempFileWithDefaultPermissions tmpdir "tmp"
|
||||
hClose h
|
||||
return tmpfile
|
||||
|
|
|
@ -78,10 +78,10 @@ bestSocketPath abssocketfile = do
|
|||
then Just socketfile
|
||||
else Nothing
|
||||
where
|
||||
-- ssh appends a 16 char extension to the socket when setting it
|
||||
-- ssh appends a 16 char extension to the socket when setting it
|
||||
-- up, which needs to be taken into account when checking
|
||||
-- that a valid socket was constructed.
|
||||
sshgarbage = replicate (1+16) 'X'
|
||||
sshgarbage = replicate (1+16) 'X'
|
||||
|
||||
sshConnectionCachingParams :: FilePath -> [CommandParam]
|
||||
sshConnectionCachingParams socketfile =
|
||||
|
|
|
@ -69,7 +69,7 @@ runTransfer' ignorelock t file shouldretry a = do
|
|||
return False
|
||||
else do
|
||||
ok <- retry info metervar $
|
||||
bracketIO (return fd) (cleanup tfile) (const $ a meter)
|
||||
bracketIO (return fd) (cleanup tfile) (const $ a meter)
|
||||
unless ok $ recordFailedTransfer t info
|
||||
return ok
|
||||
where
|
||||
|
|
|
@ -102,7 +102,7 @@ refineView origview = checksize . calc Unchanged origview
|
|||
let (components', viewchanges) = runWriter $
|
||||
mapM (\c -> updateViewComponent c field vf) (viewComponents view)
|
||||
viewchange = if field `elem` map viewField (viewComponents origview)
|
||||
then maximum viewchanges
|
||||
then maximum viewchanges
|
||||
else Narrowing
|
||||
in (view { viewComponents = components' }, viewchange)
|
||||
| otherwise =
|
||||
|
@ -207,7 +207,7 @@ viewComponentMatcher :: ViewComponent -> (MetaData -> Maybe [MetaValue])
|
|||
viewComponentMatcher viewcomponent = \metadata ->
|
||||
matcher (currentMetaDataValues metafield metadata)
|
||||
where
|
||||
metafield = viewField viewcomponent
|
||||
metafield = viewField viewcomponent
|
||||
matcher = case viewFilter viewcomponent of
|
||||
FilterValues s -> \values -> setmatches $
|
||||
S.intersection s values
|
||||
|
@ -236,8 +236,8 @@ toViewPath = concatMap escapeslash . fromMetaValue
|
|||
fromViewPath :: FilePath -> MetaValue
|
||||
fromViewPath = toMetaValue . deescapeslash []
|
||||
where
|
||||
deescapeslash s [] = reverse s
|
||||
deescapeslash s (c:cs)
|
||||
deescapeslash s [] = reverse s
|
||||
deescapeslash s (c:cs)
|
||||
| c == pseudoSlash = case cs of
|
||||
(c':cs')
|
||||
| c' == pseudoSlash -> deescapeslash (pseudoSlash:s) cs'
|
||||
|
|
|
@ -58,7 +58,7 @@ viewedFileReuse = takeFileName
|
|||
dirFromViewedFile :: ViewedFile -> FilePath
|
||||
dirFromViewedFile = joinPath . drop 1 . sep [] ""
|
||||
where
|
||||
sep l _ [] = reverse l
|
||||
sep l _ [] = reverse l
|
||||
sep l curr (c:cs)
|
||||
| c == '%' = sep (reverse curr:l) "" cs
|
||||
| c == '\\' = case cs of
|
||||
|
|
|
@ -119,7 +119,7 @@ startDaemon assistant foreground startdelay cannotrun listenhost startbrowser =
|
|||
)
|
||||
#endif
|
||||
where
|
||||
desc
|
||||
desc
|
||||
| assistant = "assistant"
|
||||
| otherwise = "watch"
|
||||
start daemonize webappwaiter = withThreadState $ \st -> do
|
||||
|
|
|
@ -145,7 +145,7 @@ syncResultAlert' succeeded failed = makeAlertFiller (not $ null succeeded) $
|
|||
, alertHeader = Just $ tenseWords msg
|
||||
}
|
||||
where
|
||||
msg
|
||||
msg
|
||||
| null succeeded = ["Failed to sync with", showRemotes failed]
|
||||
| null failed = ["Synced with", showRemotes succeeded]
|
||||
| otherwise =
|
||||
|
|
|
@ -119,7 +119,7 @@ mergeAlert i al m = maybe updatePrune updateCombine (alertCombiner al)
|
|||
where
|
||||
bloat = M.size m' - maxAlerts
|
||||
pruneold l =
|
||||
let (f, rest) = partition (\(_, a) -> isFiller a) l
|
||||
let (f, rest) = partition (\(_, a) -> isFiller a) l
|
||||
in drop bloat f ++ rest
|
||||
updatePrune = pruneBloat $ M.filterWithKey pruneSame $
|
||||
M.insertWith' const i al m
|
||||
|
|
|
@ -65,7 +65,7 @@ calcSyncRemotes = do
|
|||
, syncingToCloudRemote = any iscloud syncdata
|
||||
}
|
||||
where
|
||||
iscloud r = not (Remote.readonly r) && Remote.availability r == Remote.GloballyAvailable
|
||||
iscloud r = not (Remote.readonly r) && Remote.availability r == Remote.GloballyAvailable
|
||||
|
||||
{- Updates the syncRemotes list from the list of all remotes in Annex state. -}
|
||||
updateSyncRemotes :: Assistant ()
|
||||
|
|
|
@ -62,7 +62,7 @@ removableRemote urlrenderer uuid = do
|
|||
<$> liftAnnex (Remote.remoteFromUUID uuid)
|
||||
mapM_ (queueremaining r) keys
|
||||
where
|
||||
queueremaining r k =
|
||||
queueremaining r k =
|
||||
queueTransferWhenSmall "remaining object in unwanted remote"
|
||||
Nothing (Transfer Download uuid k) r
|
||||
{- Scanning for keys can take a long time; do not tie up
|
||||
|
|
|
@ -20,7 +20,7 @@ newUserId :: IO UserId
|
|||
newUserId = do
|
||||
oldkeys <- secretKeys
|
||||
username <- myUserName
|
||||
let basekeyname = username ++ "'s git-annex encryption key"
|
||||
let basekeyname = username ++ "'s git-annex encryption key"
|
||||
return $ Prelude.head $ filter (\n -> M.null $ M.filter (== n) oldkeys)
|
||||
( basekeyname
|
||||
: map (\n -> basekeyname ++ show n) ([2..] :: [Int])
|
||||
|
|
|
@ -48,7 +48,7 @@ makeRsyncRemote :: RemoteName -> String -> Annex String
|
|||
makeRsyncRemote name location = makeRemote name location $ const $ void $
|
||||
go =<< Command.InitRemote.findExisting name
|
||||
where
|
||||
go Nothing = setupSpecialRemote name Rsync.remote config Nothing
|
||||
go Nothing = setupSpecialRemote name Rsync.remote config Nothing
|
||||
(Nothing, Command.InitRemote.newConfig name)
|
||||
go (Just (u, c)) = setupSpecialRemote name Rsync.remote config Nothing
|
||||
(Just u, c)
|
||||
|
|
|
@ -80,7 +80,7 @@ checkImportantNetMessages (storedclient, sentclient) = go <<~ netMessager
|
|||
queuePushInitiation :: NetMessage -> Assistant ()
|
||||
queuePushInitiation msg@(Pushing clientid stage) = do
|
||||
tv <- getPushInitiationQueue side
|
||||
liftIO $ atomically $ do
|
||||
liftIO $ atomically $ do
|
||||
r <- tryTakeTMVar tv
|
||||
case r of
|
||||
Nothing -> putTMVar tv [msg]
|
||||
|
@ -88,7 +88,7 @@ queuePushInitiation msg@(Pushing clientid stage) = do
|
|||
let !l' = msg : filter differentclient l
|
||||
putTMVar tv l'
|
||||
where
|
||||
side = pushDestinationSide stage
|
||||
side = pushDestinationSide stage
|
||||
differentclient (Pushing cid _) = cid /= clientid
|
||||
differentclient _ = True
|
||||
queuePushInitiation _ = noop
|
||||
|
|
|
@ -63,7 +63,7 @@ runRepair u mrmt destructiverepair = do
|
|||
|
||||
return ok
|
||||
where
|
||||
localrepair fsckresults = do
|
||||
localrepair fsckresults = do
|
||||
-- Stop the watcher from running while running repairs.
|
||||
changeSyncable Nothing False
|
||||
|
||||
|
@ -140,9 +140,9 @@ repairStaleGitLocks r = do
|
|||
repairStaleLocks :: [FilePath] -> Assistant ()
|
||||
repairStaleLocks lockfiles = go =<< getsizes
|
||||
where
|
||||
getsize lf = catchMaybeIO $
|
||||
getsize lf = catchMaybeIO $
|
||||
(\s -> (lf, fileSize s)) <$> getFileStatus lf
|
||||
getsizes = liftIO $ catMaybes <$> mapM getsize lockfiles
|
||||
getsizes = liftIO $ catMaybes <$> mapM getsize lockfiles
|
||||
go [] = return ()
|
||||
go l = ifM (liftIO $ null <$> Lsof.query ("--" : map fst l))
|
||||
( do
|
||||
|
|
|
@ -92,7 +92,7 @@ parseSshUrl u
|
|||
, sshCapabilities = []
|
||||
}
|
||||
where
|
||||
(user, host) = if '@' `elem` userhost
|
||||
(user, host) = if '@' `elem` userhost
|
||||
then separate (== '@') userhost
|
||||
else ("", userhost)
|
||||
fromrsync s
|
||||
|
@ -260,7 +260,7 @@ setupSshKeyPair sshkeypair sshdata = do
|
|||
fixSshKeyPairIdentitiesOnly :: IO ()
|
||||
fixSshKeyPairIdentitiesOnly = changeUserSshConfig $ unlines . go [] . lines
|
||||
where
|
||||
go c [] = reverse c
|
||||
go c [] = reverse c
|
||||
go c (l:[])
|
||||
| all (`isInfixOf` l) indicators = go (fixedline l:l:c) []
|
||||
| otherwise = go (l:c) []
|
||||
|
@ -268,7 +268,7 @@ fixSshKeyPairIdentitiesOnly = changeUserSshConfig $ unlines . go [] . lines
|
|||
| all (`isInfixOf` l) indicators && not ("IdentitiesOnly" `isInfixOf` next) =
|
||||
go (fixedline l:l:c) (next:rest)
|
||||
| otherwise = go (l:c) (next:rest)
|
||||
indicators = ["IdentityFile", "key.git-annex"]
|
||||
indicators = ["IdentityFile", "key.git-annex"]
|
||||
fixedline tmpl = takeWhile isSpace tmpl ++ "IdentitiesOnly yes"
|
||||
|
||||
{- Add StrictHostKeyChecking to any ssh config stanzas that were written
|
||||
|
|
|
@ -164,8 +164,8 @@ waitChangeTime a = waitchanges 0
|
|||
-}
|
||||
aftermaxcommit oldchanges = loop (30 :: Int)
|
||||
where
|
||||
loop 0 = continue oldchanges
|
||||
loop n = do
|
||||
loop 0 = continue oldchanges
|
||||
loop n = do
|
||||
liftAnnex noop -- ensure Annex state is free
|
||||
liftIO $ threadDelaySeconds (Seconds 1)
|
||||
changes <- getAnyChanges
|
||||
|
@ -301,7 +301,7 @@ handleAdds havelsof delayadd cs = returnWhen (null incomplete) $ do
|
|||
add change@(InProcessAddChange { keySource = ks }) =
|
||||
catchDefaultIO Nothing <~> doadd
|
||||
where
|
||||
doadd = sanitycheck ks $ do
|
||||
doadd = sanitycheck ks $ do
|
||||
(mkey, mcache) <- liftAnnex $ do
|
||||
showStart "add" $ keyFilename ks
|
||||
Command.Add.ingest $ Just ks
|
||||
|
|
|
@ -87,7 +87,7 @@ cronnerThread urlrenderer = namedThreadUnchecked "Cronner" $ do
|
|||
liftIO $ waitNotification h
|
||||
debug ["reloading changed activities"]
|
||||
go h amap' nmap'
|
||||
startactivities as lastruntimes = forM as $ \activity ->
|
||||
startactivities as lastruntimes = forM as $ \activity ->
|
||||
case connectActivityUUID activity of
|
||||
Nothing -> do
|
||||
runner <- asIO2 (sleepingActivityThread urlrenderer)
|
||||
|
@ -108,8 +108,8 @@ cronnerThread urlrenderer = namedThreadUnchecked "Cronner" $ do
|
|||
sleepingActivityThread :: UrlRenderer -> ScheduledActivity -> Maybe LocalTime -> Assistant ()
|
||||
sleepingActivityThread urlrenderer activity lasttime = go lasttime =<< getnexttime lasttime
|
||||
where
|
||||
getnexttime = liftIO . nextTime schedule
|
||||
go _ Nothing = debug ["no scheduled events left for", desc]
|
||||
getnexttime = liftIO . nextTime schedule
|
||||
go _ Nothing = debug ["no scheduled events left for", desc]
|
||||
go l (Just (NextTimeExactly t)) = waitrun l t Nothing
|
||||
go l (Just (NextTimeWindow windowstart windowend)) =
|
||||
waitrun l windowstart (Just windowend)
|
||||
|
@ -129,7 +129,7 @@ sleepingActivityThread urlrenderer activity lasttime = go lasttime =<< getnextti
|
|||
go l =<< getnexttime l
|
||||
else run nowt
|
||||
where
|
||||
tolate nowt tz = case mmaxt of
|
||||
tolate nowt tz = case mmaxt of
|
||||
Just maxt -> nowt > maxt
|
||||
-- allow the job to start 10 minutes late
|
||||
Nothing ->diffUTCTime
|
||||
|
|
|
@ -258,7 +258,7 @@ checkOldUnused :: UrlRenderer -> Assistant ()
|
|||
checkOldUnused urlrenderer = go =<< annexExpireUnused <$> liftAnnex Annex.getGitConfig
|
||||
where
|
||||
go (Just Nothing) = noop
|
||||
go (Just (Just expireunused)) = expireUnused (Just expireunused)
|
||||
go (Just (Just expireunused)) = expireUnused (Just expireunused)
|
||||
go Nothing = maybe noop prompt =<< describeUnusedWhenBig
|
||||
|
||||
prompt msg =
|
||||
|
|
|
@ -51,9 +51,9 @@ upgradeWatcherThread urlrenderer = namedThread "UpgradeWatcher" $ do
|
|||
let depth = length (splitPath dir) + 1
|
||||
let nosubdirs f = length (splitPath f) == depth
|
||||
void $ liftIO $ watchDir dir nosubdirs False hooks (startup mvar)
|
||||
-- Ignore bogus events generated during the startup scan.
|
||||
-- Ignore bogus events generated during the startup scan.
|
||||
-- We ask the watcher to not generate them, but just to be safe..
|
||||
startup mvar scanner = do
|
||||
startup mvar scanner = do
|
||||
r <- scanner
|
||||
void $ swapMVar mvar Started
|
||||
return r
|
||||
|
|
|
@ -39,7 +39,7 @@ upgraderThread urlrenderer = namedThread "Upgrader" $
|
|||
h <- liftIO . newNotificationHandle False . networkConnectedNotifier =<< getDaemonStatus
|
||||
go h =<< liftIO getCurrentTime
|
||||
where
|
||||
{- Wait for a network connection event. Then see if it's been
|
||||
{- Wait for a network connection event. Then see if it's been
|
||||
- half a day since the last upgrade check. If so, proceed with
|
||||
- check. -}
|
||||
go h lastchecked = do
|
||||
|
|
|
@ -192,7 +192,7 @@ runHandler handler file filestatus = void $ do
|
|||
liftAnnex Annex.Queue.flushWhenFull
|
||||
recordChange change
|
||||
where
|
||||
normalize f
|
||||
normalize f
|
||||
| "./" `isPrefixOf` file = drop 2 f
|
||||
| otherwise = f
|
||||
|
||||
|
@ -246,7 +246,7 @@ onAddDirect symlinkssupported matcher file fs = do
|
|||
debug ["add direct", file]
|
||||
add matcher file
|
||||
where
|
||||
{- On a filesystem without symlinks, we'll get changes for regular
|
||||
{- On a filesystem without symlinks, we'll get changes for regular
|
||||
- files that git uses to stand-in for symlinks. Detect when
|
||||
- this happens, and stage the symlink, rather than annexing the
|
||||
- file. -}
|
||||
|
@ -276,7 +276,7 @@ onAddSymlink isdirect file filestatus = unlessIgnored file $ do
|
|||
onAddSymlink' :: Maybe String -> Maybe Key -> Bool -> Handler
|
||||
onAddSymlink' linktarget mk isdirect file filestatus = go mk
|
||||
where
|
||||
go (Just key) = do
|
||||
go (Just key) = do
|
||||
when isdirect $
|
||||
liftAnnex $ void $ addAssociatedFile key file
|
||||
link <- liftAnnex $ inRepo $ gitAnnexLink file key
|
||||
|
|
|
@ -97,7 +97,7 @@ webAppThread assistantdata urlrenderer noannex cannotrun postfirstrun listenhost
|
|||
urlfile <- getAnnex' $ fromRepo gitAnnexUrlFile
|
||||
go tlssettings addr webapp htmlshim (Just urlfile)
|
||||
where
|
||||
-- The webapp thread does not wait for the startupSanityCheckThread
|
||||
-- The webapp thread does not wait for the startupSanityCheckThread
|
||||
-- to finish, so that the user interface remains responsive while
|
||||
-- that's going on.
|
||||
thread = namedThreadUnchecked "WebApp"
|
||||
|
|
|
@ -131,7 +131,7 @@ xmppClient urlrenderer d creds xmppuuid =
|
|||
{- XEP-0199 says that the server will respond with either
|
||||
- a ping response or an error message. Either will
|
||||
- cause traffic, so good enough. -}
|
||||
pingstanza = xmppPing selfjid
|
||||
pingstanza = xmppPing selfjid
|
||||
|
||||
handlemsg selfjid (PresenceMessage p) = do
|
||||
void $ inAssistant $
|
||||
|
|
|
@ -34,7 +34,7 @@ xmppReceivePackThread = pusherThread "XMPPReceivePack" ReceivePack
|
|||
pusherThread :: String -> PushSide -> UrlRenderer -> NamedThread
|
||||
pusherThread threadname side urlrenderer = namedThread threadname $ go Nothing
|
||||
where
|
||||
go lastpushedto = do
|
||||
go lastpushedto = do
|
||||
msg <- waitPushInitiation side $ selectNextPush lastpushedto
|
||||
debug ["started running push", logNetMessage msg]
|
||||
|
||||
|
@ -78,4 +78,4 @@ selectNextPush lastpushedto l = go [] l
|
|||
(Pushing clientid _)
|
||||
| Just clientid /= lastpushedto -> (m, rejected ++ ms)
|
||||
_ -> go (m:rejected) ms
|
||||
go [] [] = undefined
|
||||
go [] [] = undefined
|
||||
|
|
|
@ -92,7 +92,7 @@ queueTransfersMatching matching reason schedule k f direction
|
|||
filterM (wantSend True (Just k) f . Remote.uuid) $
|
||||
filter (\r -> not (inset s r || Remote.readonly r)) rs
|
||||
where
|
||||
locs = S.fromList <$> Remote.keyLocations k
|
||||
locs = S.fromList <$> Remote.keyLocations k
|
||||
inset s r = S.member (Remote.uuid r) s
|
||||
gentransfer r = Transfer
|
||||
{ transferDirection = direction
|
||||
|
|
|
@ -85,7 +85,7 @@ logNetMessage (Pushing c stage) = show $ Pushing (logClientID c) $
|
|||
SendPackOutput n _ -> SendPackOutput n elided
|
||||
s -> s
|
||||
where
|
||||
elided = T.encodeUtf8 $ T.pack "<elided>"
|
||||
elided = T.encodeUtf8 $ T.pack "<elided>"
|
||||
logNetMessage (PairingNotification stage c uuid) =
|
||||
show $ PairingNotification stage (logClientID c) uuid
|
||||
logNetMessage m = show m
|
||||
|
|
|
@ -78,7 +78,7 @@ upgradedEnv = "GIT_ANNEX_UPGRADED"
|
|||
startDistributionDownload :: GitAnnexDistribution -> Assistant ()
|
||||
startDistributionDownload d = go =<< liftIO . newVersionLocation d =<< liftIO oldVersionLocation
|
||||
where
|
||||
go Nothing = debug ["Skipping redundant upgrade"]
|
||||
go Nothing = debug ["Skipping redundant upgrade"]
|
||||
go (Just dest) = do
|
||||
liftAnnex $ setUrlPresent k u
|
||||
hook <- asIO1 $ distributionDownloadComplete d dest cleanup
|
||||
|
|
|
@ -207,7 +207,7 @@ makeAWSRemote maker remotetype defaultgroup (AWSCreds ak sk) name config =
|
|||
setupCloudRemote defaultgroup Nothing $
|
||||
maker hostname remotetype (Just creds) config
|
||||
where
|
||||
creds = (T.unpack ak, T.unpack sk)
|
||||
creds = (T.unpack ak, T.unpack sk)
|
||||
{- AWS services use the remote name as the basis for a host
|
||||
- name, so filter it to contain valid characters. -}
|
||||
hostname = case filter isAlphaNum name of
|
||||
|
|
|
@ -36,7 +36,7 @@ notCurrentRepo uuid a = do
|
|||
then redirect DeleteCurrentRepositoryR
|
||||
else go =<< liftAnnex (Remote.remoteFromUUID uuid)
|
||||
where
|
||||
go Nothing = error "Unknown UUID"
|
||||
go Nothing = error "Unknown UUID"
|
||||
go (Just _) = a
|
||||
|
||||
handleXMPPRemoval :: UUID -> Handler Html -> Handler Html
|
||||
|
|
|
@ -136,7 +136,7 @@ setRepoConfig uuid mremote oldc newc = do
|
|||
when syncableChanged $
|
||||
liftAssistant $ changeSyncable mremote (repoSyncable newc)
|
||||
where
|
||||
syncableChanged = repoSyncable oldc /= repoSyncable newc
|
||||
syncableChanged = repoSyncable oldc /= repoSyncable newc
|
||||
associatedDirectoryChanged = repoAssociatedDirectory oldc /= repoAssociatedDirectory newc
|
||||
groupChanged = repoGroup oldc /= repoGroup newc
|
||||
nameChanged = isJust mremote && legalName oldc /= legalName newc
|
||||
|
@ -255,7 +255,7 @@ getGitRepoInfo r = do
|
|||
|
||||
getRepoEncryption :: Maybe Remote.Remote -> Maybe Remote.RemoteConfig -> Widget
|
||||
getRepoEncryption (Just _) (Just c) = case extractCipher c of
|
||||
Nothing ->
|
||||
Nothing ->
|
||||
[whamlet|not encrypted|]
|
||||
(Just (SharedCipher _)) ->
|
||||
[whamlet|encrypted: encryption key stored in git repository|]
|
||||
|
@ -274,7 +274,7 @@ getUpgradeRepositoryR :: RepoId -> Handler ()
|
|||
getUpgradeRepositoryR (RepoUUID _) = redirect DashboardR
|
||||
getUpgradeRepositoryR r = go =<< liftAnnex (repoIdRemote r)
|
||||
where
|
||||
go Nothing = redirect DashboardR
|
||||
go Nothing = redirect DashboardR
|
||||
go (Just rmt) = do
|
||||
liftIO fixSshKeyPairIdentitiesOnly
|
||||
liftAnnex $ setConfig
|
||||
|
|
|
@ -60,7 +60,7 @@ runFsckForm new activity = case activity of
|
|||
ScheduledSelfFsck s d -> go s d =<< liftAnnex getUUID
|
||||
ScheduledRemoteFsck ru s d -> go s d ru
|
||||
where
|
||||
go (Schedule r t) d ru = do
|
||||
go (Schedule r t) d ru = do
|
||||
u <- liftAnnex getUUID
|
||||
repolist <- liftAssistant (getrepolist ru)
|
||||
runFormPostNoToken $ \msg -> do
|
||||
|
|
|
@ -201,7 +201,7 @@ $if (not exists)
|
|||
have been uploaded, and the Internet Archive has processed them.
|
||||
|]
|
||||
where
|
||||
bucket = fromMaybe "" $ M.lookup "bucket" c
|
||||
bucket = fromMaybe "" $ M.lookup "bucket" c
|
||||
#ifdef WITH_S3
|
||||
url = S3.iaItemUrl bucket
|
||||
#else
|
||||
|
|
|
@ -175,7 +175,7 @@ getAndroidCameraRepositoryR :: Handler ()
|
|||
getAndroidCameraRepositoryR =
|
||||
startFullAssistant "/sdcard/DCIM" SourceGroup $ Just addignore
|
||||
where
|
||||
addignore = do
|
||||
addignore = do
|
||||
liftIO $ unlessM (doesFileExist ".gitignore") $
|
||||
writeFile ".gitignore" ".thumbnails"
|
||||
void $ inRepo $
|
||||
|
@ -274,8 +274,8 @@ getConfirmAddDriveR drive = ifM (liftIO $ probeRepoExists dir)
|
|||
, newrepo
|
||||
)
|
||||
where
|
||||
dir = removableDriveRepository drive
|
||||
newrepo = do
|
||||
dir = removableDriveRepository drive
|
||||
newrepo = do
|
||||
secretkeys <- sortBy (comparing snd) . M.toList
|
||||
<$> liftIO secretKeys
|
||||
page "Encrypt repository?" (Just Configuration) $
|
||||
|
@ -338,7 +338,7 @@ getFinishAddDriveR drive = go
|
|||
liftAnnex $ defaultStandardGroup u TransferGroup
|
||||
liftAssistant $ immediateSyncRemote r
|
||||
redirect $ EditNewRepositoryR u
|
||||
mountpoint = T.unpack (mountPoint drive)
|
||||
mountpoint = T.unpack (mountPoint drive)
|
||||
dir = removableDriveRepository drive
|
||||
remotename = takeFileName mountpoint
|
||||
|
||||
|
|
|
@ -72,7 +72,7 @@ getStartXMPPPairSelfR :: Handler Html
|
|||
#ifdef WITH_XMPP
|
||||
getStartXMPPPairSelfR = go =<< liftAnnex getXMPPCreds
|
||||
where
|
||||
go Nothing = do
|
||||
go Nothing = do
|
||||
-- go get XMPP configured, then come back
|
||||
redirect XMPPConfigForPairSelfR
|
||||
go (Just creds) = do
|
||||
|
|
|
@ -193,7 +193,7 @@ postEnableSshGCryptR :: UUID -> Handler Html
|
|||
postEnableSshGCryptR u = whenGcryptInstalled $
|
||||
enableSshRemote getsshinput enableRsyncNetGCrypt enablegcrypt u
|
||||
where
|
||||
enablegcrypt sshdata _ = prepSsh False sshdata $ \sshdata' ->
|
||||
enablegcrypt sshdata _ = prepSsh False sshdata $ \sshdata' ->
|
||||
sshConfigurator $
|
||||
checkExistingGCrypt sshdata' $
|
||||
error "Expected to find an encrypted git repository, but did not."
|
||||
|
@ -232,7 +232,7 @@ enableSshRemote getsshinput rsyncnetsetup genericsetup u = do
|
|||
_ -> showform form enctype UntestedServer
|
||||
_ -> redirect AddSshR
|
||||
where
|
||||
unmangle sshdata = sshdata
|
||||
unmangle sshdata = sshdata
|
||||
{ sshHostName = T.pack $ unMangleSshHostName $
|
||||
T.unpack $ sshHostName sshdata
|
||||
}
|
||||
|
@ -423,7 +423,7 @@ getConfirmSshR sshdata u
|
|||
secretkeys <- sortBy (comparing snd) . M.toList
|
||||
<$> liftIO secretKeys
|
||||
$(widgetFile "configurators/ssh/confirm")
|
||||
handleexisting Nothing = sshConfigurator $
|
||||
handleexisting Nothing = sshConfigurator $
|
||||
-- Not a UUID we know, so prompt about combining.
|
||||
$(widgetFile "configurators/ssh/combine")
|
||||
handleexisting (Just _) = prepSsh False sshdata $ \sshdata' -> do
|
||||
|
@ -471,7 +471,7 @@ checkExistingGCrypt sshdata nope = checkGCryptRepoEncryption repourl nope nope $
|
|||
combineExistingGCrypt sshdata u
|
||||
Nothing -> error "The location contains a gcrypt repository that is not a git-annex special remote. This is not supported."
|
||||
where
|
||||
repourl = genSshUrl sshdata
|
||||
repourl = genSshUrl sshdata
|
||||
|
||||
{- Enables an existing gcrypt special remote. -}
|
||||
enableGCrypt :: SshData -> RemoteName -> Handler Html
|
||||
|
@ -488,7 +488,7 @@ combineExistingGCrypt sshdata u = do
|
|||
reponame <- liftAnnex $ getGCryptRemoteName u repourl
|
||||
enableGCrypt sshdata reponame
|
||||
where
|
||||
repourl = genSshUrl sshdata
|
||||
repourl = genSshUrl sshdata
|
||||
|
||||
{- Sets up remote repository for ssh, or directory for rsync. -}
|
||||
prepSsh :: Bool -> SshData -> (SshData -> Handler Html) -> Handler Html
|
||||
|
@ -579,7 +579,7 @@ postAddRsyncNetR = do
|
|||
"That is not a rsync.net host name."
|
||||
_ -> showform UntestedServer
|
||||
where
|
||||
inpage = page "Add a Rsync.net repository" (Just Configuration)
|
||||
inpage = page "Add a Rsync.net repository" (Just Configuration)
|
||||
hostnamefield = textField `withExpandableNote` ("Help", help)
|
||||
help = [whamlet|
|
||||
<div>
|
||||
|
|
|
@ -150,7 +150,7 @@ getXMPPRemotes :: Assistant [(JID, Remote)]
|
|||
getXMPPRemotes = catMaybes . map pair . filter Remote.isXMPPRemote . syncGitRemotes
|
||||
<$> getDaemonStatus
|
||||
where
|
||||
pair r = maybe Nothing (\jid -> Just (jid, r)) $
|
||||
pair r = maybe Nothing (\jid -> Just (jid, r)) $
|
||||
parseJID $ getXMPPClientID r
|
||||
|
||||
data XMPPForm = XMPPForm
|
||||
|
@ -197,8 +197,8 @@ testXMPP creds = do
|
|||
}
|
||||
_ -> return $ Left $ intercalate "; " $ map formatlog bad
|
||||
where
|
||||
formatlog ((h, p), Left e) = "host " ++ h ++ ":" ++ showport p ++ " failed: " ++ show e
|
||||
formatlog _ = ""
|
||||
formatlog ((h, p), Left e) = "host " ++ h ++ ":" ++ showport p ++ " failed: " ++ show e
|
||||
formatlog _ = ""
|
||||
|
||||
showport (PortNumber n) = show n
|
||||
showport (Service s) = s
|
||||
|
|
|
@ -129,7 +129,7 @@ withExpandableNote field (toggle, note) = withNote field $ [whamlet|
|
|||
^{note}
|
||||
|]
|
||||
where
|
||||
ident = "toggle_" ++ toggle
|
||||
ident = "toggle_" ++ toggle
|
||||
|
||||
{- Adds a check box to an AForm to control encryption. -}
|
||||
#if MIN_VERSION_yesod(1,2,0)
|
||||
|
|
|
@ -196,7 +196,7 @@ repoList reposelector
|
|||
_ -> Nothing
|
||||
_ -> Nothing
|
||||
where
|
||||
getconfig k = M.lookup k =<< M.lookup u m
|
||||
getconfig k = M.lookup k =<< M.lookup u m
|
||||
val iscloud r = Just (iscloud, (RepoUUID u, DisabledRepoActions $ r u))
|
||||
list l = do
|
||||
cc <- currentlyConnectedRemotes <$> liftAssistant getDaemonStatus
|
||||
|
@ -232,13 +232,13 @@ getRepositoriesReorderR = do
|
|||
liftAssistant updateSyncRemotes
|
||||
where
|
||||
go _ Nothing = noop
|
||||
go list (Just remote) = do
|
||||
go list (Just remote) = do
|
||||
rs <- catMaybes <$> mapM repoIdRemote list
|
||||
forM_ (reorderCosts remote rs) $ \(r, newcost) ->
|
||||
when (Remote.cost r /= newcost) $
|
||||
setRemoteCost (Remote.repo r) newcost
|
||||
void remoteListRefresh
|
||||
fromjs = fromMaybe (RepoUUID NoUUID) . readish . T.unpack
|
||||
fromjs = fromMaybe (RepoUUID NoUUID) . readish . T.unpack
|
||||
|
||||
reorderCosts :: Remote -> [Remote] -> [(Remote, Cost)]
|
||||
reorderCosts remote rs = zip rs'' (insertCostAfter costs i)
|
||||
|
|
|
@ -195,7 +195,7 @@ decodeMessage m = decode =<< gitAnnexTagInfo m
|
|||
<*> a i
|
||||
gen c i = c . toUUID <$> headMaybe (words (T.unpack (tagValue i)))
|
||||
seqgen c i = do
|
||||
packet <- decodeTagContent $ tagElement i
|
||||
packet <- decodeTagContent $ tagElement i
|
||||
let seqnum = fromMaybe 0 $ readish $ T.unpack $ tagValue i
|
||||
return $ c seqnum packet
|
||||
shasgen c i = do
|
||||
|
|
|
@ -152,7 +152,7 @@ xmppPush cid gitpush = do
|
|||
|
||||
fromxmpp outh controlh = withPushMessagesInSequence cid SendPack handlemsg
|
||||
where
|
||||
handlemsg (Just (Pushing _ (ReceivePackOutput _ b))) =
|
||||
handlemsg (Just (Pushing _ (ReceivePackOutput _ b))) =
|
||||
liftIO $ writeChunk outh b
|
||||
handlemsg (Just (Pushing _ (ReceivePackDone exitcode))) =
|
||||
liftIO $ do
|
||||
|
@ -266,7 +266,7 @@ xmppReceivePack cid = do
|
|||
relaytoxmpp seqnum' outh
|
||||
relayfromxmpp inh = withPushMessagesInSequence cid ReceivePack handlemsg
|
||||
where
|
||||
handlemsg (Just (Pushing _ (SendPackOutput _ b))) =
|
||||
handlemsg (Just (Pushing _ (SendPackOutput _ b))) =
|
||||
liftIO $ writeChunk inh b
|
||||
handlemsg (Just _) = noop
|
||||
handlemsg Nothing = do
|
||||
|
@ -337,7 +337,7 @@ handlePushNotice (Pushing cid (CanPush theiruuid shas)) =
|
|||
, go
|
||||
)
|
||||
where
|
||||
go = do
|
||||
go = do
|
||||
u <- liftAnnex getUUID
|
||||
sendNetMessage $ Pushing cid (PushRequest u)
|
||||
haveall l = liftAnnex $ not <$> anyM donthave l
|
||||
|
@ -359,9 +359,9 @@ writeChunk h b = do
|
|||
withPushMessagesInSequence :: ClientID -> PushSide -> (Maybe NetMessage -> Assistant ()) -> Assistant ()
|
||||
withPushMessagesInSequence cid side a = loop 0
|
||||
where
|
||||
loop seqnum = do
|
||||
loop seqnum = do
|
||||
m <- timeout xmppTimeout <~> waitInbox cid side
|
||||
let go s = a m >> loop s
|
||||
let go s = a m >> loop s
|
||||
let next = seqnum + 1
|
||||
case extractSequence =<< m of
|
||||
Just seqnum'
|
||||
|
|
|
@ -144,7 +144,7 @@ trivialMigrate oldkey newbackend
|
|||
hashFile :: Hash -> FilePath -> Integer -> Annex String
|
||||
hashFile hash file filesize = liftIO $ go hash
|
||||
where
|
||||
go (SHAHash hashsize) = case shaHasher hashsize filesize of
|
||||
go (SHAHash hashsize) = case shaHasher hashsize filesize of
|
||||
Left sha -> sha <$> L.readFile file
|
||||
Right command ->
|
||||
either error return
|
||||
|
|
|
@ -58,13 +58,13 @@ parseGccLink = do
|
|||
collect2params <- restOfLine
|
||||
return $ CmdParams (path ++ collectcmd) (escapeDosPaths collect2params) cenv
|
||||
where
|
||||
collectcmd = "collect2.exe"
|
||||
collectgccenv = "COLLECT_GCC"
|
||||
collectcmd = "collect2.exe"
|
||||
collectgccenv = "COLLECT_GCC"
|
||||
collectltoenv = "COLLECT_LTO_WRAPPER"
|
||||
pathenv = "COMPILER_PATH"
|
||||
libpathenv = "LIBRARY_PATH"
|
||||
optenv = "COLLECT_GCC_OPTIONS"
|
||||
collectenv = do
|
||||
optenv = "COLLECT_GCC_OPTIONS"
|
||||
collectenv = do
|
||||
void $ many1 $ do
|
||||
notFollowedBy $ string collectgccenv
|
||||
restOfLine
|
||||
|
@ -148,7 +148,7 @@ runAtFile p s f extraparams = do
|
|||
removeFile f
|
||||
return out
|
||||
where
|
||||
c = case parse p "" s of
|
||||
c = case parse p "" s of
|
||||
Left e -> error $
|
||||
(show e) ++
|
||||
"\n<<<\n" ++ s ++ "\n>>>"
|
||||
|
|
|
@ -86,7 +86,7 @@ number = read <$> many1 digit
|
|||
coordsParser :: Parser (Coord, Coord)
|
||||
coordsParser = (try singleline <|> try weird <|> multiline) <?> "Coords"
|
||||
where
|
||||
singleline = do
|
||||
singleline = do
|
||||
line <- number
|
||||
void $ char ':'
|
||||
startcol <- number
|
||||
|
@ -151,7 +151,7 @@ spliceParser = do
|
|||
(unlines codelines)
|
||||
splicetype
|
||||
where
|
||||
tosplicetype "declarations" = SpliceDeclaration
|
||||
tosplicetype "declarations" = SpliceDeclaration
|
||||
tosplicetype "expression" = SpliceExpression
|
||||
tosplicetype s = error $ "unknown splice type: " ++ s
|
||||
|
||||
|
@ -177,7 +177,7 @@ spliceParser = do
|
|||
splicesExtractor :: Parser [Splice]
|
||||
splicesExtractor = rights <$> many extract
|
||||
where
|
||||
extract = try (Right <$> spliceParser) <|> (Left <$> compilerJunkLine)
|
||||
extract = try (Right <$> spliceParser) <|> (Left <$> compilerJunkLine)
|
||||
compilerJunkLine = restOfLine
|
||||
|
||||
{- Modifies the source file, expanding the splices, which all must
|
||||
|
@ -214,8 +214,8 @@ applySplices destdir imports splices@(first:_) = do
|
|||
hPutStr h newcontent
|
||||
hClose h
|
||||
where
|
||||
expand lls [] = lls
|
||||
expand lls (s:rest)
|
||||
expand lls [] = lls
|
||||
expand lls (s:rest)
|
||||
| isExpressionSplice s = expand (expandExpressionSplice s lls) rest
|
||||
| otherwise = expand (expandDeclarationSplice s lls) rest
|
||||
|
||||
|
@ -291,12 +291,12 @@ expandExpressionSplice sp lls = concat [before, spliced:padding, end]
|
|||
-- ie: bar $(splice)
|
||||
| otherwise = s ++ " $ "
|
||||
where
|
||||
s' = filter (not . isSpace) s
|
||||
s' = filter (not . isSpace) s
|
||||
|
||||
findindent = length . takeWhile isSpace
|
||||
addindent n = unlines . map (i ++) . lines
|
||||
where
|
||||
i = take n $ repeat ' '
|
||||
i = take n $ repeat ' '
|
||||
|
||||
{- Tweaks code output by GHC in splices to actually build. Yipes. -}
|
||||
mangleCode :: String -> String
|
||||
|
@ -315,7 +315,7 @@ mangleCode = flip_colon
|
|||
. remove_package_version
|
||||
. emptylambda
|
||||
where
|
||||
{- Lambdas are often output without parens around them.
|
||||
{- Lambdas are often output without parens around them.
|
||||
- This breaks when the lambda is immediately applied to a
|
||||
- parameter.
|
||||
-
|
||||
|
@ -409,7 +409,7 @@ mangleCode = flip_colon
|
|||
|
||||
restofline = manyTill (noneOf "\n") newline
|
||||
|
||||
{- For some reason, GHC sometimes doesn't like the multiline
|
||||
{- For some reason, GHC sometimes doesn't like the multiline
|
||||
- strings it creates. It seems to get hung up on \{ at the
|
||||
- start of a new line sometimes, wanting it to not be escaped.
|
||||
-
|
||||
|
@ -646,7 +646,7 @@ parsecAndReplace p s = case parse find "" s of
|
|||
Left _e -> s
|
||||
Right l -> concatMap (either return id) l
|
||||
where
|
||||
find :: Parser [Either Char String]
|
||||
find :: Parser [Either Char String]
|
||||
find = many $ try (Right <$> p) <|> (Left <$> anyChar)
|
||||
|
||||
main :: IO ()
|
||||
|
@ -654,7 +654,7 @@ main = go =<< getArgs
|
|||
where
|
||||
go (destdir:log:header:[]) = run destdir log (Just header)
|
||||
go (destdir:log:[]) = run destdir log Nothing
|
||||
go _ = error "usage: EvilSplicer destdir logfile [headerfile]"
|
||||
go _ = error "usage: EvilSplicer destdir logfile [headerfile]"
|
||||
|
||||
run destdir log mheader = do
|
||||
r <- parseFromFile splicesExtractor log
|
||||
|
|
|
@ -103,7 +103,7 @@ makeInstaller gitannex license extrabins launchers = nsis $ do
|
|||
name "git-annex"
|
||||
outFile $ str installer
|
||||
{- Installing into the same directory as git avoids needing to modify
|
||||
- path myself, since the git installer already does it. -}
|
||||
- path myself, since the git installer already does it. -}
|
||||
installDir gitInstallDir
|
||||
requestExecutionLevel Admin
|
||||
|
||||
|
|
|
@ -112,7 +112,7 @@ expand_rpath libs replacement_libs cmd
|
|||
return $ map (replacem m) libs
|
||||
| otherwise = return libs
|
||||
where
|
||||
probe c = "DYLD_PRINT_RPATHS=1 " ++ c ++ " --getting-rpath-dummy-option 2>&1 | grep RPATH"
|
||||
probe c = "DYLD_PRINT_RPATHS=1 " ++ c ++ " --getting-rpath-dummy-option 2>&1 | grep RPATH"
|
||||
parse s = case words s of
|
||||
("RPATH":"successful":"expansion":"of":old:"to:":new:[]) ->
|
||||
Just (old, new)
|
||||
|
|
|
@ -35,7 +35,7 @@ noDaemonRunning :: Command -> Command
|
|||
noDaemonRunning = addCheck $ whenM (isJust <$> daemonpid) $
|
||||
error "You cannot run this command while git-annex watch or git-annex assistant is running."
|
||||
where
|
||||
daemonpid = liftIO . checkDaemon =<< fromRepo gitAnnexPidFile
|
||||
daemonpid = liftIO . checkDaemon =<< fromRepo gitAnnexPidFile
|
||||
|
||||
dontCheck :: CommandCheck -> Command -> Command
|
||||
dontCheck check cmd = mutateCheck cmd $ \c -> filter (/= check) c
|
||||
|
|
|
@ -58,7 +58,7 @@ dispatch fuzzyok allargs allcmds commonoptions fields header getgitrepo = do
|
|||
shutdown $ cmdnocommit cmd
|
||||
go _flags params (Left e) = do
|
||||
when fuzzy $
|
||||
autocorrect =<< Git.Config.global
|
||||
autocorrect =<< Git.Config.global
|
||||
maybe (throw e) (\a -> a params) (cmdnorepo cmd)
|
||||
err msg = msg ++ "\n\n" ++ usage header allcmds
|
||||
cmd = Prelude.head cmds
|
||||
|
|
|
@ -66,7 +66,7 @@ options = commonOptions ++
|
|||
check u = unexpectedUUID expected u
|
||||
checkGCryptUUID expected = check =<< getGCryptUUID True =<< gitRepo
|
||||
where
|
||||
check (Just u) | u == toUUID expected = noop
|
||||
check (Just u) | u == toUUID expected = noop
|
||||
check Nothing = unexpected expected "uninitialized repository"
|
||||
check (Just u) = unexpectedUUID expected u
|
||||
unexpectedUUID expected u = unexpected expected $ "UUID " ++ fromUUID u
|
||||
|
|
|
@ -107,7 +107,7 @@ withFilesUnlocked' :: ([FilePath] -> Git.Repo -> IO ([FilePath], IO Bool)) -> (F
|
|||
withFilesUnlocked' typechanged a params = seekActions $
|
||||
prepFiltered a unlockedfiles
|
||||
where
|
||||
check f = liftIO (notSymlink f) <&&>
|
||||
check f = liftIO (notSymlink f) <&&>
|
||||
(isJust <$> catKeyFile f <||> isJust <$> catKeyFileHEAD f)
|
||||
unlockedfiles = filterM check =<< seekHelper typechanged params
|
||||
|
||||
|
@ -165,7 +165,7 @@ withKeyOptions keyop fallbackop params = do
|
|||
Just k -> go auto $ return [k]
|
||||
_ -> error "Can only specify one of file names, --all, --unused, or --key"
|
||||
where
|
||||
go True _ = error "Cannot use --auto with --all or --unused or --key"
|
||||
go True _ = error "Cannot use --auto with --all or --unused or --key"
|
||||
go False a = do
|
||||
matcher <- Limit.getMatcher
|
||||
seekActions $ map (process matcher) <$> a
|
||||
|
|
|
@ -125,7 +125,7 @@ lockDown' file = ifM crippledFileSystem
|
|||
- This is not done in direct mode, because files there need to
|
||||
- remain writable at all times.
|
||||
-}
|
||||
go tmp = do
|
||||
go tmp = do
|
||||
unlessM isDirect $
|
||||
freezeContent file
|
||||
withTSDelta $ \delta -> liftIO $ do
|
||||
|
@ -134,7 +134,7 @@ lockDown' file = ifM crippledFileSystem
|
|||
hClose h
|
||||
nukeFile tmpfile
|
||||
withhardlink delta tmpfile `catchIO` const (nohardlink delta)
|
||||
nohardlink delta = do
|
||||
nohardlink delta = do
|
||||
cache <- genInodeCache file delta
|
||||
return KeySource
|
||||
{ keyFilename = file
|
||||
|
@ -207,7 +207,7 @@ finishIngestDirect key source = do
|
|||
perform :: FilePath -> CommandPerform
|
||||
perform file = lockDown file >>= ingest >>= go
|
||||
where
|
||||
go (Just key, cache) = next $ cleanup file key cache True
|
||||
go (Just key, cache) = next $ cleanup file key cache True
|
||||
go (Nothing, _) = stop
|
||||
|
||||
{- On error, put the file back so it doesn't seem to have vanished.
|
||||
|
|
|
@ -56,7 +56,7 @@ seek ps = do
|
|||
start :: Bool -> Maybe FilePath -> Maybe Int -> String -> CommandStart
|
||||
start relaxed optfile pathdepth s = go $ fromMaybe bad $ parseURI s
|
||||
where
|
||||
(s', downloader) = getDownloader s
|
||||
(s', downloader) = getDownloader s
|
||||
bad = fromMaybe (error $ "bad url " ++ s') $
|
||||
parseURI $ escapeURIString isUnescapedInURI s'
|
||||
choosefile = flip fromMaybe optfile
|
||||
|
@ -95,8 +95,8 @@ start relaxed optfile pathdepth s = go $ fromMaybe bad $ parseURI s
|
|||
performQuvi :: Bool -> URLString -> URLString -> FilePath -> CommandPerform
|
||||
performQuvi relaxed pageurl videourl file = ifAnnexed file addurl geturl
|
||||
where
|
||||
quviurl = setDownloader pageurl QuviDownloader
|
||||
addurl key = next $ cleanup quviurl file key Nothing
|
||||
quviurl = setDownloader pageurl QuviDownloader
|
||||
addurl key = next $ cleanup quviurl file key Nothing
|
||||
geturl = next $ isJust <$> addUrlFileQuvi relaxed quviurl videourl file
|
||||
#endif
|
||||
|
||||
|
@ -189,7 +189,7 @@ download url file = do
|
|||
, return Nothing
|
||||
)
|
||||
where
|
||||
runtransfer dummykey tmp = Transfer.notifyTransfer Transfer.Download (Just file) $
|
||||
runtransfer dummykey tmp = Transfer.notifyTransfer Transfer.Download (Just file) $
|
||||
Transfer.download webUUID dummykey (Just file) Transfer.forwardRetry $ const $ do
|
||||
liftIO $ createDirectoryIfMissing True (parentDir tmp)
|
||||
downloadUrl [url] tmp
|
||||
|
|
|
@ -29,7 +29,7 @@ start = do
|
|||
showConfig coreGCryptId =<< fromRepo (Git.Config.get coreGCryptId "")
|
||||
stop
|
||||
where
|
||||
showConfig k v = liftIO $ putStrLn $ k ++ "=" ++ v
|
||||
showConfig k v = liftIO $ putStrLn $ k ++ "=" ++ v
|
||||
|
||||
{- The repository may not yet have a UUID; automatically initialize it
|
||||
- when there's a git-annex branch available. -}
|
||||
|
|
|
@ -23,7 +23,7 @@ seek ps = do
|
|||
to <- getOptionField toOption Remote.byNameWithUUID
|
||||
from <- getOptionField fromOption Remote.byNameWithUUID
|
||||
withKeyOptions
|
||||
(Command.Move.startKey to from False)
|
||||
(Command.Move.startKey to from False)
|
||||
(withFilesInGit $ whenAnnexed $ start to from)
|
||||
ps
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ start (name:ws) = go =<< InitRemote.findExisting name
|
|||
where
|
||||
config = Logs.Remote.keyValToConfig ws
|
||||
|
||||
go Nothing = unknownNameError "Unknown special remote name."
|
||||
go Nothing = unknownNameError "Unknown special remote name."
|
||||
go (Just (u, c)) = do
|
||||
let fullconfig = config `M.union` c
|
||||
t <- InitRemote.findType fullconfig
|
||||
|
|
|
@ -329,7 +329,7 @@ checkKeySizeOr bad key file = case Types.Key.keySize key of
|
|||
checkBackend :: Backend -> Key -> Maybe FilePath -> Annex Bool
|
||||
checkBackend backend key mfile = go =<< isDirect
|
||||
where
|
||||
go False = do
|
||||
go False = do
|
||||
content <- calcRepo $ gitAnnexLocation key
|
||||
checkBackendOr badContent backend key content
|
||||
go True = maybe nocheck checkdirect mfile
|
||||
|
|
|
@ -47,7 +47,7 @@ guardTest = unlessM (fromMaybe False . Git.Config.isTrue <$> getConfig key "") $
|
|||
, "Refusing to run fuzz tests, since " ++ keyname ++ " is not set!"
|
||||
]
|
||||
where
|
||||
key = annexConfig "eat-my-repository"
|
||||
key = annexConfig "eat-my-repository"
|
||||
(ConfigKey keyname) = key
|
||||
|
||||
|
||||
|
@ -257,7 +257,7 @@ existingDir = do
|
|||
newFile :: IO (Maybe FuzzFile)
|
||||
newFile = go (100 :: Int)
|
||||
where
|
||||
go 0 = return Nothing
|
||||
go 0 = return Nothing
|
||||
go n = do
|
||||
f <- genFuzzFile
|
||||
ifM (doesnotexist (toFilePath f))
|
||||
|
@ -268,7 +268,7 @@ newFile = go (100 :: Int)
|
|||
newDir :: FilePath -> IO (Maybe FuzzDir)
|
||||
newDir parent = go (100 :: Int)
|
||||
where
|
||||
go 0 = return Nothing
|
||||
go 0 = return Nothing
|
||||
go n = do
|
||||
(FuzzDir d) <- genFuzzDir
|
||||
ifM (doesnotexist (parent </> d))
|
||||
|
|
|
@ -48,7 +48,7 @@ start' expensivecheck from key afile = stopUnless (not <$> inAnnex key) $
|
|||
stopUnless (Command.Move.fromOk src key) $
|
||||
go $ Command.Move.fromPerform src False key afile
|
||||
where
|
||||
go a = do
|
||||
go a = do
|
||||
showStart' "get" key afile
|
||||
next a
|
||||
|
||||
|
|
|
@ -50,8 +50,8 @@ getDuplicateMode = gen
|
|||
<*> getflag cleanDuplicatesOption
|
||||
<*> getflag skipDuplicatesOption
|
||||
where
|
||||
getflag = Annex.getFlag . optionName
|
||||
gen False False False False = Default
|
||||
getflag = Annex.getFlag . optionName
|
||||
gen False False False False = Default
|
||||
gen True False False False = Duplicate
|
||||
gen False True False False = DeDuplicate
|
||||
gen False False True False = CleanDuplicates
|
||||
|
|
|
@ -153,7 +153,7 @@ performDownload relaxed cache todownload = case location todownload of
|
|||
rundownload videourl ("." ++ Quvi.linkSuffix link) $
|
||||
addUrlFileQuvi relaxed quviurl videourl
|
||||
where
|
||||
forced = Annex.getState Annex.force
|
||||
forced = Annex.getState Annex.force
|
||||
|
||||
{- Avoids downloading any urls that are already known to be
|
||||
- associated with a file in the annex, unless forced. -}
|
||||
|
@ -192,7 +192,7 @@ performDownload relaxed cache todownload = case location todownload of
|
|||
, return $ Just f
|
||||
)
|
||||
where
|
||||
f = if n < 2
|
||||
f = if n < 2
|
||||
then file
|
||||
else
|
||||
let (d, base) = splitFileName file
|
||||
|
|
|
@ -94,7 +94,7 @@ perform = do
|
|||
warnlocked
|
||||
showEndOk
|
||||
|
||||
warnlocked :: SomeException -> Annex ()
|
||||
warnlocked :: SomeException -> Annex ()
|
||||
warnlocked e = do
|
||||
warning $ show e
|
||||
warning "leaving this file as-is; correct this problem and run git annex add on it"
|
||||
|
|
|
@ -100,7 +100,7 @@ localInfo dir = showCustom (unwords ["info", dir]) $ do
|
|||
evalStateT (mapM_ showStat stats) =<< getLocalStatInfo dir
|
||||
return True
|
||||
where
|
||||
tostats = map (\s -> s dir)
|
||||
tostats = map (\s -> s dir)
|
||||
|
||||
selStats :: [Stat] -> [Stat] -> Annex [Stat]
|
||||
selStats fast_stats slow_stats = do
|
||||
|
|
|
@ -65,7 +65,7 @@ upgradableKey backend key = isNothing (Types.Key.keySize key) || backendupgradab
|
|||
perform :: FilePath -> Key -> Backend -> Backend -> CommandPerform
|
||||
perform file oldkey oldbackend newbackend = go =<< genkey
|
||||
where
|
||||
go Nothing = stop
|
||||
go Nothing = stop
|
||||
go (Just (newkey, knowngoodcontent))
|
||||
| knowngoodcontent = finish newkey
|
||||
| otherwise = stopUnless checkcontent $ finish newkey
|
||||
|
|
|
@ -63,7 +63,7 @@ start key = fieldTransfer Download key $ \_p ->
|
|||
Nothing -> return True
|
||||
Just size -> do
|
||||
size' <- fromIntegral . fileSize
|
||||
<$> liftIO (getFileStatus tmp)
|
||||
<$> liftIO (getFileStatus tmp)
|
||||
return $ size == size'
|
||||
if oksize
|
||||
then case Backend.maybeLookupBackendName (Types.Key.keyBackendName key) of
|
||||
|
@ -76,7 +76,7 @@ start key = fieldTransfer Download key $ \_p ->
|
|||
warning "recvkey: received key with wrong size; discarding"
|
||||
return False
|
||||
where
|
||||
runfsck check = ifM (check key tmp)
|
||||
runfsck check = ifM (check key tmp)
|
||||
( return True
|
||||
, do
|
||||
warning "recvkey: received key from direct mode repository seems to have changed as it was transferred; discarding"
|
||||
|
|
|
@ -27,7 +27,7 @@ seek = withWords start
|
|||
start :: [String] -> CommandStart
|
||||
start = parse
|
||||
where
|
||||
parse (name:[]) = go name performGet
|
||||
parse (name:[]) = go name performGet
|
||||
parse (name:expr:[]) = go name $ \uuid -> do
|
||||
showStart "schedile" name
|
||||
performSet expr uuid
|
||||
|
|
|
@ -356,7 +356,7 @@ syncFile rs f k = do
|
|||
handleDropsFrom locs' rs "unwanted" True k (Just f)
|
||||
Nothing callCommandAction
|
||||
where
|
||||
wantget have = allM id
|
||||
wantget have = allM id
|
||||
[ pure (not $ null have)
|
||||
, not <$> inAnnex k
|
||||
, wantGet True (Just k) (Just f)
|
||||
|
|
|
@ -57,7 +57,7 @@ runRequests readh writeh a = do
|
|||
fileEncoding writeh
|
||||
go =<< readrequests
|
||||
where
|
||||
go (d:rn:k:f:rest) = do
|
||||
go (d:rn:k:f:rest) = do
|
||||
case (deserialize d, deserialize rn, deserialize k, deserialize f) of
|
||||
(Just direction, Just remotename, Just key, Just file) -> do
|
||||
mremote <- Remote.byName' remotename
|
||||
|
|
|
@ -100,7 +100,7 @@ prepareRemoveAnnexDir annexdir =
|
|||
removeUnannexed :: [Key] -> Annex [Key]
|
||||
removeUnannexed = go []
|
||||
where
|
||||
go c [] = return c
|
||||
go c [] = return c
|
||||
go c (k:ks) = ifM (inAnnexCheck k $ liftIO . enoughlinks)
|
||||
( do
|
||||
lockContent k removeAnnex
|
||||
|
|
|
@ -136,7 +136,7 @@ genCfg cfg descs = unlines $ intercalate [""]
|
|||
(\(s, u) -> line "group" u $ unwords $ S.toList s)
|
||||
(\u -> lcom $ line "group" u "")
|
||||
where
|
||||
grouplist = unwords $ map fromStandardGroup [minBound..]
|
||||
grouplist = unwords $ map fromStandardGroup [minBound..]
|
||||
|
||||
preferredcontent = settings cfg descs cfgPreferredContentMap
|
||||
[ com "Repository preferred contents"
|
||||
|
@ -157,7 +157,7 @@ genCfg cfg descs = unlines $ intercalate [""]
|
|||
(\(s, g) -> gline g s)
|
||||
(\g -> gline g "")
|
||||
where
|
||||
gline g value = [ unwords ["groupwanted", g, "=", value] ]
|
||||
gline g value = [ unwords ["groupwanted", g, "=", value] ]
|
||||
allgroups = S.unions $ stdgroups : M.elems (cfgGroupMap cfg)
|
||||
stdgroups = S.fromList $ map fromStandardGroup [minBound..maxBound]
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ seek = withWords start
|
|||
start :: [String] -> CommandStart
|
||||
start = parse
|
||||
where
|
||||
parse (name:[]) = go name performGet
|
||||
parse (name:[]) = go name performGet
|
||||
parse (name:expr:[]) = go name $ \uuid -> do
|
||||
showStart "wanted" name
|
||||
performSet expr uuid
|
||||
|
|
|
@ -52,7 +52,7 @@ insertCostAfter l pos
|
|||
| otherwise =
|
||||
firstsegment ++ [costBetween item nextitem ] ++ lastsegment
|
||||
where
|
||||
nextpos = pos + 1
|
||||
nextpos = pos + 1
|
||||
maxpos = length l - 1
|
||||
|
||||
item = l !! pos
|
||||
|
|
|
@ -66,4 +66,4 @@ readProgramFile = do
|
|||
)
|
||||
)
|
||||
where
|
||||
cmd = "git-annex"
|
||||
cmd = "git-annex"
|
||||
|
|
|
@ -94,7 +94,7 @@ catTree :: CatFileHandle -> Ref -> IO [(FilePath, FileMode)]
|
|||
catTree h treeref = go <$> catObjectDetails h treeref
|
||||
where
|
||||
go (Just (b, _, TreeObject)) = parsetree [] b
|
||||
go _ = []
|
||||
go _ = []
|
||||
|
||||
parsetree c b = case L.break (== 0) b of
|
||||
(modefile, rest)
|
||||
|
|
|
@ -79,7 +79,7 @@ pipeWriteRead params writer repo = assertLocal repo $
|
|||
writeReadProcessEnv "git" (toCommand $ gitCommandLine params repo)
|
||||
(gitEnv repo) writer (Just adjusthandle)
|
||||
where
|
||||
adjusthandle h = do
|
||||
adjusthandle h = do
|
||||
fileEncoding h
|
||||
hSetNewlineMode h noNewlineTranslation
|
||||
|
||||
|
@ -117,7 +117,7 @@ gitCoProcessStart restartable params repo = CoProcess.start numrestarts "git"
|
|||
(toCommand $ gitCommandLine params repo)
|
||||
(gitEnv repo)
|
||||
where
|
||||
{- If a long-running git command like cat-file --batch
|
||||
{- If a long-running git command like cat-file --batch
|
||||
- crashes, it will likely start up again ok. If it keeps crashing
|
||||
- 10 times, something is badly wrong. -}
|
||||
numrestarts = if restartable then 10 else 0
|
||||
|
|
|
@ -167,7 +167,7 @@ coreBare = "core.bare"
|
|||
fromPipe :: Repo -> String -> [CommandParam] -> IO (Either SomeException (Repo, String))
|
||||
fromPipe r cmd params = try $
|
||||
withHandle StdoutHandle createProcessSuccess p $ \h -> do
|
||||
fileEncoding h
|
||||
fileEncoding h
|
||||
val <- hGetContentsStrict h
|
||||
r' <- store val r
|
||||
return (r', val)
|
||||
|
|
|
@ -38,12 +38,12 @@ isEncrypted _ = False
|
|||
encryptedRemote :: Repo -> Repo -> IO Repo
|
||||
encryptedRemote baserepo = go
|
||||
where
|
||||
go Repo { location = Url url }
|
||||
go Repo { location = Url url }
|
||||
| urlPrefix `isPrefixOf` u =
|
||||
fromRemoteLocation (drop plen u) baserepo
|
||||
| otherwise = notencrypted
|
||||
where
|
||||
u = show url
|
||||
u = show url
|
||||
plen = length urlPrefix
|
||||
go _ = notencrypted
|
||||
notencrypted = error "not a gcrypt encrypted repository"
|
||||
|
@ -92,7 +92,7 @@ getParticiantList globalconfigrepo repo remotename = KeyIds $ parse $ firstJust
|
|||
]
|
||||
where
|
||||
defaultkey = "gcrypt.participants"
|
||||
parse (Just "simple") = []
|
||||
parse (Just "simple") = []
|
||||
parse (Just l) = words l
|
||||
parse Nothing = []
|
||||
|
||||
|
|
|
@ -44,7 +44,7 @@ lsTreeParams t = [ Params "ls-tree --full-tree -z -r --", File $ fromRef t ]
|
|||
lsTreeFiles :: Ref -> [FilePath] -> Repo -> IO [TreeItem]
|
||||
lsTreeFiles t fs repo = map parseLsTree <$> pipeNullSplitStrict ps repo
|
||||
where
|
||||
ps = [Params "ls-tree --full-tree -z --", File $ fromRef t] ++ map File fs
|
||||
ps = [Params "ls-tree --full-tree -z --", File $ fromRef t] ++ map File fs
|
||||
|
||||
{- Parses a line of ls-tree output.
|
||||
- (The --long format is not currently supported.) -}
|
||||
|
|
|
@ -70,7 +70,7 @@ remoteLocationIsSshUrl _ = False
|
|||
parseRemoteLocation :: String -> Repo -> RemoteLocation
|
||||
parseRemoteLocation s repo = ret $ calcloc s
|
||||
where
|
||||
ret v
|
||||
ret v
|
||||
#ifdef mingw32_HOST_OS
|
||||
| dosstyle v = RemotePath (dospath v)
|
||||
#endif
|
||||
|
|
|
@ -222,7 +222,7 @@ badBranches missing r = filterM isbad =<< getAllRefs r
|
|||
getAllRefs :: Repo -> IO [Ref]
|
||||
getAllRefs r = map toref <$> dirContentsRecursive refdir
|
||||
where
|
||||
refdir = localGitDir r </> "refs"
|
||||
refdir = localGitDir r </> "refs"
|
||||
toref = Ref . relPathDirToFile (localGitDir r)
|
||||
|
||||
explodePackedRefsFile :: Repo -> IO ()
|
||||
|
@ -411,7 +411,7 @@ displayList items header
|
|||
putStrLn header
|
||||
putStr $ unlines $ map (\i -> "\t" ++ i) truncateditems
|
||||
where
|
||||
numitems = length items
|
||||
numitems = length items
|
||||
truncateditems
|
||||
| numitems > 10 = take 10 items ++ ["(and " ++ show (numitems - 10) ++ " more)"]
|
||||
| otherwise = items
|
||||
|
|
|
@ -21,7 +21,7 @@ instance Show GitVersion where
|
|||
installed :: IO GitVersion
|
||||
installed = normalize . extract <$> readProcess "git" ["--version"]
|
||||
where
|
||||
extract s = case lines s of
|
||||
extract s = case lines s of
|
||||
[] -> ""
|
||||
(l:_) -> unwords $ drop 2 $ words l
|
||||
|
||||
|
|
4
Limit.hs
4
Limit.hs
|
@ -234,7 +234,7 @@ limitSize vs s = case readSize dataUnits s of
|
|||
Nothing -> Left "bad size"
|
||||
Just sz -> Right $ go sz
|
||||
where
|
||||
go sz _ (MatchingFile fi) = lookupFileKey fi >>= check fi sz
|
||||
go sz _ (MatchingFile fi) = lookupFileKey fi >>= check fi sz
|
||||
go sz _ (MatchingKey key) = checkkey sz key
|
||||
checkkey sz key = return $ keySize key `vs` Just sz
|
||||
check _ sz (Just key) = checkkey sz key
|
||||
|
@ -254,7 +254,7 @@ limitMetaData s = case parseMetaData s of
|
|||
let cglob = compileGlob (fromMetaValue v) CaseInsensative
|
||||
in Right $ const $ checkKey (check f cglob)
|
||||
where
|
||||
check f cglob k = not . S.null
|
||||
check f cglob k = not . S.null
|
||||
. S.filter (matchGlob cglob . fromMetaValue)
|
||||
. metaDataValues f <$> getCurrentMetaData k
|
||||
|
||||
|
|
|
@ -148,7 +148,7 @@ gitAnnexLink file key r = do
|
|||
loc <- gitAnnexLocation' key r False
|
||||
return $ relPathDirToFile (parentDir absfile) loc
|
||||
where
|
||||
whoops = error $ "unable to normalize " ++ file
|
||||
whoops = error $ "unable to normalize " ++ file
|
||||
|
||||
{- File used to lock a key's content. -}
|
||||
gitAnnexContentLock :: Key -> Git.Repo -> GitConfig -> IO FilePath
|
||||
|
@ -356,7 +356,7 @@ isLinkToAnnex s = (pathSeparator:objectDir) `isInfixOf` s
|
|||
preSanitizeKeyName :: String -> String
|
||||
preSanitizeKeyName = concatMap escape
|
||||
where
|
||||
escape c
|
||||
escape c
|
||||
| isAsciiUpper c || isAsciiLower c || isDigit c = [c]
|
||||
| c `elem` ".-_ " = [c] -- common, assumed safe
|
||||
| c `elem` "/%:" = [c] -- handled by keyFile
|
||||
|
|
8
Logs.hs
8
Logs.hs
|
@ -117,7 +117,7 @@ urlLogFileKey path
|
|||
| ext == urlLogExt = fileKey base
|
||||
| otherwise = Nothing
|
||||
where
|
||||
file = takeFileName path
|
||||
file = takeFileName path
|
||||
(base, ext) = splitAt (length file - extlen) file
|
||||
extlen = length urlLogExt
|
||||
|
||||
|
@ -144,7 +144,7 @@ chunkLogFileKey path
|
|||
| ext == chunkLogExt = fileKey base
|
||||
| otherwise = Nothing
|
||||
where
|
||||
file = takeFileName path
|
||||
file = takeFileName path
|
||||
(base, ext) = splitAt (length file - extlen) file
|
||||
extlen = length chunkLogExt
|
||||
|
||||
|
@ -176,10 +176,10 @@ prop_logs_sane dummykey = and
|
|||
, expect gotOtherLog (getLogVariety $ numcopiesLog)
|
||||
]
|
||||
where
|
||||
expect = maybe False
|
||||
expect = maybe False
|
||||
gotUUIDBasedLog UUIDBasedLog = True
|
||||
gotUUIDBasedLog _ = False
|
||||
gotNewUUIDBasedLog NewUUIDBasedLog = True
|
||||
gotNewUUIDBasedLog NewUUIDBasedLog = True
|
||||
gotNewUUIDBasedLog _ = False
|
||||
gotChunkLog (ChunkLog k) = k == dummykey
|
||||
gotChunkLog _ = False
|
||||
|
|
|
@ -28,7 +28,7 @@ writeFsckResults u fsckresults = do
|
|||
| S.null s -> nukeFile logfile
|
||||
| otherwise -> store s t logfile
|
||||
where
|
||||
store s t logfile = do
|
||||
store s t logfile = do
|
||||
createDirectoryIfMissing True (parentDir logfile)
|
||||
liftIO $ viaTmp writeFile logfile $ serialize s t
|
||||
serialize s t =
|
||||
|
|
|
@ -67,7 +67,7 @@ getCurrentMetaData k = do
|
|||
return $ currentMetaData $ unionMetaData loggedmeta
|
||||
(lastchanged ls loggedmeta)
|
||||
where
|
||||
lastchanged [] _ = emptyMetaData
|
||||
lastchanged [] _ = emptyMetaData
|
||||
lastchanged ls (MetaData currentlyset) =
|
||||
let m = foldl' (flip M.union) M.empty (map genlastchanged ls)
|
||||
in MetaData $
|
||||
|
|
|
@ -35,7 +35,7 @@ scheduleSet uuid@(UUID _) activities = do
|
|||
Annex.Branch.change scheduleLog $
|
||||
showLog id . changeLog ts uuid val . parseLog Just
|
||||
where
|
||||
val = fromScheduledActivities activities
|
||||
val = fromScheduledActivities activities
|
||||
scheduleSet NoUUID _ = error "unknown UUID; cannot modify"
|
||||
|
||||
scheduleMap :: Annex (M.Map UUID [ScheduledActivity])
|
||||
|
|
|
@ -53,7 +53,7 @@ showTransitions = unlines . map showTransitionLine . S.elems
|
|||
parseTransitions :: String -> Maybe Transitions
|
||||
parseTransitions = check . map parseTransitionLine . lines
|
||||
where
|
||||
check l
|
||||
check l
|
||||
| all isJust l = Just $ S.fromList $ catMaybes l
|
||||
| otherwise = Nothing
|
||||
|
||||
|
@ -68,8 +68,8 @@ showTransitionLine (TransitionLine ts t) = unwords [show t, show ts]
|
|||
parseTransitionLine :: String -> Maybe TransitionLine
|
||||
parseTransitionLine s = TransitionLine <$> pdate ds <*> readish ts
|
||||
where
|
||||
ws = words s
|
||||
ts = Prelude.head ws
|
||||
ws = words s
|
||||
ts = Prelude.head ws
|
||||
ds = unwords $ Prelude.tail ws
|
||||
pdate = utcTimeToPOSIXSeconds <$$> parseTime defaultTimeLocale "%s%Qs"
|
||||
|
||||
|
|
|
@ -76,7 +76,7 @@ knownUrls = do
|
|||
return $ concat r
|
||||
where
|
||||
geturls Nothing = return []
|
||||
geturls (Just logsha) = getLog . L.unpack <$> catObject logsha
|
||||
geturls (Just logsha) = getLog . L.unpack <$> catObject logsha
|
||||
|
||||
data Downloader = DefaultDownloader | QuviDownloader
|
||||
|
||||
|
|
|
@ -101,7 +101,7 @@ byName (Just n) = either error Just <$> byName' n
|
|||
byNameWithUUID :: Maybe RemoteName -> Annex (Maybe Remote)
|
||||
byNameWithUUID = checkuuid <=< byName
|
||||
where
|
||||
checkuuid Nothing = return Nothing
|
||||
checkuuid Nothing = return Nothing
|
||||
checkuuid (Just r)
|
||||
| uuid r == NoUUID =
|
||||
if remoteAnnexIgnore (gitconfig r)
|
||||
|
|
|
@ -169,7 +169,7 @@ handleRequest' lck external req mp responsehandler
|
|||
go
|
||||
| otherwise = go
|
||||
where
|
||||
go = do
|
||||
go = do
|
||||
sendMessage lck external req
|
||||
loop
|
||||
loop = receiveMessage lck external responsehandler
|
||||
|
|
|
@ -147,7 +147,7 @@ rsyncTransport r
|
|||
| ":" `isInfixOf` loc = sshtransport $ separate (== ':') loc
|
||||
| otherwise = othertransport
|
||||
where
|
||||
loc = Git.repoLocation r
|
||||
loc = Git.repoLocation r
|
||||
sshtransport (host, path) = do
|
||||
let rsyncpath = if "/~/" `isPrefixOf` path
|
||||
then drop 3 path
|
||||
|
@ -166,7 +166,7 @@ gCryptSetup :: Maybe UUID -> Maybe CredPair -> RemoteConfig -> Annex (RemoteConf
|
|||
gCryptSetup mu _ c = go $ M.lookup "gitrepo" c
|
||||
where
|
||||
remotename = fromJust (M.lookup "name" c)
|
||||
go Nothing = error "Specify gitrepo="
|
||||
go Nothing = error "Specify gitrepo="
|
||||
go (Just gitrepo) = do
|
||||
(c', _encsetup) <- encryptionSetup c
|
||||
inRepo $ Git.Command.run
|
||||
|
@ -234,7 +234,7 @@ setupRepo gcryptid r
|
|||
- create the objectDir on the remote,
|
||||
- which is needed for direct rsync of objects to work.
|
||||
-}
|
||||
rsyncsetup = Remote.Rsync.withRsyncScratchDir $ \tmp -> do
|
||||
rsyncsetup = Remote.Rsync.withRsyncScratchDir $ \tmp -> do
|
||||
liftIO $ createDirectoryIfMissing True $ tmp </> objectDir
|
||||
(rsynctransport, rsyncurl, _) <- rsyncTransport r
|
||||
let tmpconfig = tmp </> "config"
|
||||
|
@ -266,7 +266,7 @@ isShell r = case method of
|
|||
AccessShell -> True
|
||||
_ -> False
|
||||
where
|
||||
method = toAccessMethod $ fromMaybe "" $
|
||||
method = toAccessMethod $ fromMaybe "" $
|
||||
remoteAnnexGCrypt $ gitconfig r
|
||||
|
||||
shellOrRsync :: Remote -> Annex a -> Annex a -> Annex a
|
||||
|
@ -352,7 +352,7 @@ checkKey r rsyncopts k
|
|||
| Git.repoIsSsh (repo r) = shellOrRsync r checkshell checkrsync
|
||||
| otherwise = unsupportedUrl
|
||||
where
|
||||
checkrsync = Remote.Rsync.checkKey (repo r) rsyncopts k
|
||||
checkrsync = Remote.Rsync.checkKey (repo r) rsyncopts k
|
||||
checkshell = Ssh.inAnnex (repo r) k
|
||||
|
||||
{- Annexed objects are hashed using lower-case directories for max
|
||||
|
|
|
@ -305,7 +305,7 @@ inAnnex rmt key
|
|||
| Git.repoIsUrl r = checkremote
|
||||
| otherwise = checklocal
|
||||
where
|
||||
r = repo rmt
|
||||
r = repo rmt
|
||||
checkhttp = do
|
||||
showChecking r
|
||||
ifM (Url.withUrlOptions $ \uo -> anyM (\u -> Url.checkBoth u (keySize key) uo) (keyUrls rmt key))
|
||||
|
|
|
@ -123,7 +123,7 @@ storeChunks u chunkconfig k f p storer checker =
|
|||
|
||||
loop bytesprocessed (chunk, bs) chunkkeys
|
||||
| L.null chunk && numchunks > 0 = do
|
||||
-- Once all chunks are successfully
|
||||
-- Once all chunks are successfully
|
||||
-- stored, update the chunk log.
|
||||
chunksStored u k (FixedSizeChunks chunksize) numchunks
|
||||
return True
|
||||
|
@ -138,7 +138,7 @@ storeChunks u chunkconfig k f p storer checker =
|
|||
)
|
||||
where
|
||||
numchunks = numChunks chunkkeys
|
||||
{- The MeterUpdate that is passed to the action
|
||||
{- The MeterUpdate that is passed to the action
|
||||
- storing a chunk is offset, so that it reflects
|
||||
- the total bytes that have already been stored
|
||||
- in previous chunks. -}
|
||||
|
@ -290,7 +290,7 @@ retrieveChunks retriever u chunkconfig encryptor basek dest basep sink
|
|||
hSeek h AbsoluteSeek startpoint
|
||||
return h
|
||||
|
||||
{- Progress meter updating is a bit tricky: If the Retriever
|
||||
{- Progress meter updating is a bit tricky: If the Retriever
|
||||
- populates a file, it is responsible for updating progress
|
||||
- as the file is being retrieved.
|
||||
-
|
||||
|
|
|
@ -196,7 +196,7 @@ specialRemote' cfg c preparestorer prepareretriever prepareremover preparecheckp
|
|||
retrieveKeyFileGen k dest p enc =
|
||||
safely $ prepareretriever k $ safely . go
|
||||
where
|
||||
go (Just retriever) = displayprogress p k $ \p' ->
|
||||
go (Just retriever) = displayprogress p k $ \p' ->
|
||||
retrieveChunks retriever (uuid baser) chunkconfig
|
||||
enck k dest p' (sink dest enc)
|
||||
go Nothing = return False
|
||||
|
@ -210,7 +210,7 @@ specialRemote' cfg c preparestorer prepareretriever prepareremover preparecheckp
|
|||
|
||||
checkPresentGen k enc = preparecheckpresent k go
|
||||
where
|
||||
go (Just checker) = checkPresentChunks checker (uuid baser) chunkconfig enck k
|
||||
go (Just checker) = checkPresentChunks checker (uuid baser) chunkconfig enck k
|
||||
go Nothing = cantCheck baser
|
||||
enck = maybe id snd enc
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue