This commit is contained in:
Joey Hess 2013-09-25 23:19:01 -04:00
parent 8cc90219d2
commit c1990702e9
15 changed files with 40 additions and 42 deletions

View file

@ -265,7 +265,7 @@ getFailedTransfers u = catMaybes <$> (liftIO . getpairs =<< concat <$> findfiles
clearFailedTransfers :: UUID -> Annex [(Transfer, TransferInfo)]
clearFailedTransfers u = do
failed <- getFailedTransfers u
mapM_ removeFailedTransfer $ map fst failed
mapM_ (removeFailedTransfer . fst) failed
return failed
removeFailedTransfer :: Transfer -> Annex ()

View file

@ -82,6 +82,5 @@ transitionList = map transition . S.elems
{- Typically ran with Annex.Branch.change, but we can't import Annex.Branch
- here since it depends on this module. -}
recordTransitions :: (FilePath -> (String -> String) -> Annex ()) -> Transitions -> Annex ()
recordTransitions changer t = do
changer transitionsLog $
showTransitions . S.union t . parseTransitionsStrictly "local"
recordTransitions changer t = changer transitionsLog $
showTransitions . S.union t . parseTransitionsStrictly "local"

View file

@ -12,7 +12,6 @@ module Remote.Directory (remote) where
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString as S
import qualified Data.Map as M
import qualified Control.Exception as E
import Data.Int
import Common.Annex
@ -109,7 +108,7 @@ withCheckedFiles check (Just _) d k a = go $ locations d k
ifM (check chunkcount)
( do
chunks <- listChunks f <$> readFile chunkcount
ifM (all id <$> mapM check chunks)
ifM (and <$> mapM check chunks)
( a chunks , return False )
, go fs
)
@ -159,7 +158,7 @@ storeSplit' :: MeterUpdate -> Int64 -> [FilePath] -> [S.ByteString] -> [FilePath
storeSplit' _ _ [] _ _ = error "ran out of dests"
storeSplit' _ _ _ [] c = return $ reverse c
storeSplit' meterupdate chunksize (d:dests) bs c = do
bs' <- E.bracket (openFile d WriteMode) hClose $
bs' <- withFile d WriteMode $
feed zeroBytesProcessed chunksize bs
storeSplit' meterupdate chunksize dests bs' (d:c)
where
@ -206,7 +205,7 @@ retrieve :: FilePath -> ChunkSize -> Key -> AssociatedFile -> FilePath -> MeterU
retrieve d chunksize k _ f p = metered (Just p) k $ \meterupdate ->
liftIO $ withStoredFiles chunksize d k $ \files ->
catchBoolIO $ do
meteredWriteFileChunks meterupdate f files $ L.readFile
meteredWriteFileChunks meterupdate f files L.readFile
return True
retrieveEncrypted :: FilePath -> ChunkSize -> (Cipher, Key) -> Key -> FilePath -> MeterUpdate -> Annex Bool
@ -217,7 +216,7 @@ retrieveEncrypted d chunksize (cipher, enck) k f p = metered (Just p) k $ \meter
readBytes $ meteredWriteFile meterupdate f
return True
where
feeder files h = forM_ files $ \file -> L.hPut h =<< L.readFile file
feeder files h = forM_ files $ L.hPut h <=< L.readFile
retrieveCheap :: FilePath -> ChunkSize -> Key -> FilePath -> Annex Bool
retrieveCheap _ (Just _) _ _ = return False -- no cheap retrieval for chunks

View file

@ -76,7 +76,7 @@ gen gcryptr u c gc = do
-- correctly.
resetup gcryptid r = do
let u' = genUUIDInNameSpace gCryptNameSpace gcryptid
v <- (M.lookup u' <$> readRemoteLog)
v <- M.lookup u' <$> readRemoteLog
case (Git.remoteName gcryptr, v) of
(Just remotename, Just c') -> do
setGcryptEncryption c' remotename
@ -186,14 +186,14 @@ gCryptSetup mu c = go $ M.lookup "gitrepo" c
void $ inRepo $ Git.Command.runBool
[ Param "push"
, Param remotename
, Param $ show $ Annex.Branch.fullname
, Param $ show Annex.Branch.fullname
]
g <- inRepo Git.Config.reRead
case Git.GCrypt.remoteRepoId g (Just remotename) of
Nothing -> error "unable to determine gcrypt-id of remote"
Just gcryptid -> do
let u = genUUIDInNameSpace gCryptNameSpace gcryptid
if Just u == mu || mu == Nothing
if Just u == mu || isNothing mu
then do
method <- setupRepo gcryptid =<< inRepo (Git.Construct.fromRemoteLocation gitrepo)
gitConfigSpecialRemote u c' "gcrypt" (fromAccessMethod method)
@ -246,7 +246,7 @@ setupRepo gcryptid r
ok <- liftIO $ rsync $ rsynctransport ++
[ Params "--recursive"
, Param $ tmp ++ "/"
, Param $ rsyncurl
, Param rsyncurl
]
unless ok $
error "Failed to connect to remote to set it up."

View file

@ -209,7 +209,7 @@ tryGitConfigRead r
Nothing -> return r
Just n -> do
whenM (inRepo $ Git.Command.runBool [Param "fetch", Param "--quiet", Param n]) $
set_ignore $ "does not have git-annex installed"
set_ignore "does not have git-annex installed"
return r
set_ignore msg = case Git.remoteName r of
@ -326,7 +326,7 @@ copyFromRemote' r key file dest
: maybe [] (\f -> [(Fields.associatedFile, f)]) file
Just (cmd, params) <- Ssh.git_annex_shell (repo r) "transferinfo"
[Param $ key2file key] fields
v <- liftIO $ (newEmptySV :: IO (MSampleVar Integer))
v <- liftIO (newEmptySV :: IO (MSampleVar Integer))
tid <- liftIO $ forkIO $ void $ tryIO $ do
bytes <- readSV v
p <- createProcess $
@ -337,7 +337,7 @@ copyFromRemote' r key file dest
hClose $ stderrHandle p
let h = stdinHandle p
let send b = do
hPutStrLn h $ show b
hPrint h b
hFlush h
send bytes
forever $
@ -414,7 +414,7 @@ rsyncOrCopyFile rsyncparams src dest p =
#else
ifM (sameDeviceIds src dest) (docopy, dorsync)
where
sameDeviceIds a b = (==) <$> (getDeviceId a) <*> (getDeviceId b)
sameDeviceIds a b = (==) <$> getDeviceId a <*> getDeviceId b
getDeviceId f = deviceID <$> liftIO (getFileStatus $ parentDir f)
docopy = liftIO $ bracket
(forkIO $ watchfilesize zeroBytesProcessed)
@ -450,7 +450,7 @@ commitOnCleanup r a = go `after` a
-- Throw away stderr, since the remote may not
-- have a new enough git-annex shell to
-- support committing.
liftIO $ catchMaybeIO $ do
liftIO $ catchMaybeIO $
withQuietOutput createProcessSuccess $
proc shellcmd $
toCommand shellparams

View file

@ -98,7 +98,7 @@ store r k _f p
storeHelper r k $ streamMeteredFile src meterupdate
storeEncrypted :: Remote -> (Cipher, Key) -> Key -> MeterUpdate -> Annex Bool
storeEncrypted r (cipher, enck) k p = sendAnnex k (void $ remove r enck) $ \src -> do
storeEncrypted r (cipher, enck) k p = sendAnnex k (void $ remove r enck) $ \src ->
metered (Just p) k $ \meterupdate ->
storeHelper r enck $ \h ->
encrypt (getGpgEncParams r) cipher (feedFile src)
@ -209,7 +209,7 @@ checkPresent r k = do
]
glacierAction :: Remote -> [CommandParam] -> Annex Bool
glacierAction r params = runGlacier (config r) (uuid r) params
glacierAction r = runGlacier (config r) (uuid r)
runGlacier :: RemoteConfig -> UUID -> [CommandParam] -> Annex Bool
runGlacier c u params = go =<< glacierEnv c u
@ -222,7 +222,7 @@ glacierParams :: RemoteConfig -> [CommandParam] -> [CommandParam]
glacierParams c params = datacenter:params
where
datacenter = Param $ "--region=" ++
(fromJust $ M.lookup "datacenter" c)
fromJust (M.lookup "datacenter" c)
glacierEnv :: RemoteConfig -> UUID -> Annex (Maybe [(String, String)])
glacierEnv c u = go =<< getRemoteCredPairFor "glacier" c creds
@ -282,7 +282,7 @@ jobList r keys = go =<< glacierEnv (config r) (uuid r)
enckeys <- forM keys $ \k ->
maybe k snd <$> cipherKey (config r) k
let keymap = M.fromList $ zip enckeys keys
let convert = catMaybes . map (`M.lookup` keymap)
let convert = mapMaybe (`M.lookup` keymap)
return (convert succeeded, convert failed)
parse c [] = c

View file

@ -68,7 +68,7 @@ storeChunks key tmp dest chunksize storer recorder finalizer = either onerr retu
where
go = do
stored <- storer tmpdests
when (chunksize /= Nothing) $ do
when (isNothing chunksize) $ do
let chunkcount = basef ++ chunkCount
recorder chunkcount (show $ length stored)
finalizer tmp dest
@ -79,7 +79,7 @@ storeChunks key tmp dest chunksize storer recorder finalizer = either onerr retu
basef = tmp ++ keyFile key
tmpdests
| chunksize == Nothing = [basef]
| isNothing chunksize = [basef]
| otherwise = map (basef ++ ) chunkStream
{- Given a list of destinations to use, chunks the data according to the
@ -123,5 +123,5 @@ storeChunked chunksize dests storer content = either onerr return
meteredWriteFileChunks :: MeterUpdate -> FilePath -> [v] -> (v -> IO L.ByteString) -> IO ()
meteredWriteFileChunks meterupdate dest chunks feeder =
withBinaryFile dest WriteMode $ \h ->
forM_ chunks $ \c ->
meteredWrite meterupdate h =<< feeder c
forM_ chunks $
meteredWrite meterupdate h <=< feeder

View file

@ -35,8 +35,8 @@ addHooks' r starthook stophook = r'
{ storeKey = \k f p -> wrapper $ storeKey r k f p
, retrieveKeyFile = \k f d p -> wrapper $ retrieveKeyFile r k f d p
, retrieveKeyFileCheap = \k f -> wrapper $ retrieveKeyFileCheap r k f
, removeKey = \k -> wrapper $ removeKey r k
, hasKey = \k -> wrapper $ hasKey r k
, removeKey = wrapper . removeKey r
, hasKey = wrapper . hasKey r
}
where
wrapper = runHooks r' starthook stophook
@ -45,7 +45,7 @@ runHooks :: Remote -> Maybe String -> Maybe String -> Annex a -> Annex a
runHooks r starthook stophook a = do
dir <- fromRepo gitAnnexRemotesDir
let lck = dir </> remoteid ++ ".lck"
whenM (not . any (== lck) . M.keys <$> getPool) $ do
whenM (notElem lck . M.keys <$> getPool) $ do
liftIO $ createDirectoryIfMissing True dir
firstrun lck
a

View file

@ -125,9 +125,9 @@ rsyncParamsRemote r direction key file afile = do
-- Convert the ssh command into rsync command line.
let eparam = rsyncShell (Param shellcmd:shellparams)
let o = rsyncParams r
if direction == Download
then return $ o ++ rsyncopts eparam dummy (File file)
else return $ o ++ rsyncopts eparam (File file) dummy
return $ if direction == Download
then o ++ rsyncopts eparam dummy (File file)
else o ++ rsyncopts eparam (File file) dummy
where
rsyncopts ps source dest
| end ps == [dashdash] = ps ++ [source, dest]
@ -143,6 +143,6 @@ rsyncParamsRemote r direction key file afile = do
-- --inplace to resume partial files
rsyncParams :: Remote -> [CommandParam]
rsyncParams r = [Params "--progress --inplace"] ++
rsyncParams r = Params "--progress --inplace" :
map Param (remoteAnnexRsyncOptions $ gitconfig r)

View file

@ -93,7 +93,7 @@ lookupHook hookname action = do
command <- getConfig (annexConfig hook) ""
if null command
then do
fallback <- getConfig (annexConfig $ hookfallback) ""
fallback <- getConfig (annexConfig hookfallback) ""
if null fallback
then do
warning $ "missing configuration for " ++ hook ++ " or " ++ hookfallback

View file

@ -80,7 +80,7 @@ remoteListRefresh = do
remoteList
{- Generates a Remote. -}
remoteGen :: (M.Map UUID RemoteConfig) -> RemoteType -> Git.Repo -> Annex (Maybe Remote)
remoteGen :: M.Map UUID RemoteConfig -> RemoteType -> Git.Repo -> Annex (Maybe Remote)
remoteGen m t r = do
u <- getRepoUUID r
g <- fromRepo id

View file

@ -86,7 +86,7 @@ gen r u c gc = do
then Just $ rsyncUrl o
else Nothing
, readonly = False
, globallyAvailable = not $ islocal
, globallyAvailable = not islocal
, remotetype = remote
}
@ -262,7 +262,7 @@ rsyncRetrieve o k dest callback =
, File dest
]
rsyncRemote :: RsyncOpts -> (Maybe MeterUpdate) -> [CommandParam] -> Annex Bool
rsyncRemote :: RsyncOpts -> Maybe MeterUpdate -> [CommandParam] -> Annex Bool
rsyncRemote o callback params = do
showOutput -- make way for progress bar
ifM (liftIO $ (maybe rsync rsyncProgress callback) ps)

View file

@ -181,9 +181,9 @@ checkPresent r k = davAction r noconn go
- or perhaps this was an intermittent error. -}
onerr url = do
v <- davUrlExists url user pass
if v == Right True
then return $ Left $ "failed to read " ++ url
else return v
return $ if v == Right True
then Left $ "failed to read " ++ url
else v
withStoredFiles
:: Remote

View file

@ -77,7 +77,7 @@ preferredContent ClientGroup = lastResort $
preferredContent TransferGroup = lastResort $
"not (inallgroup=client and copies=client:2) and (" ++ preferredContent ClientGroup ++ ")"
preferredContent BackupGroup = "include=*"
preferredContent IncrementalBackupGroup = lastResort $
preferredContent IncrementalBackupGroup = lastResort
"include=* and (not copies=incrementalbackup:1)"
preferredContent SmallArchiveGroup = lastResort $
"(include=*/archive/* or include=archive/*) and (" ++ preferredContent FullArchiveGroup ++ ")"

View file

@ -107,7 +107,7 @@ moveLocationLogs = do
dir <- fromRepo Upgrade.V2.gitStateDir
ifM (liftIO $ doesDirectoryExist dir)
( mapMaybe oldlog2key
<$> (liftIO $ getDirectoryContents dir)
<$> liftIO (getDirectoryContents dir)
, return []
)
move (l, k) = do