where indenting

This commit is contained in:
Joey Hess 2012-11-11 00:51:07 -04:00
parent 6a0756d2fb
commit 2172cc586e
42 changed files with 1193 additions and 1209 deletions

View file

@ -143,9 +143,9 @@ retrieveEncrypted buprepo (cipher, enck) _ f = liftIO $ catchBoolIO $
withHandle StdoutHandle createProcessSuccess p $ \h -> do
withDecryptedContent cipher (L.hGetContents h) $ L.writeFile f
return True
where
params = bupParams "join" buprepo [Param $ bupRef enck]
p = proc "bup" $ toCommand params
where
params = bupParams "join" buprepo [Param $ bupRef enck]
p = proc "bup" $ toCommand params
remove :: Key -> Annex Bool
remove _ = do
@ -164,10 +164,11 @@ checkPresent r bupr k
return $ Right ok
| otherwise = liftIO $ catchMsgIO $
boolSystem "git" $ Git.Command.gitCommandLine params bupr
where
params =
[ Params "show-ref --quiet --verify"
, Param $ "refs/heads/" ++ bupRef k]
where
params =
[ Params "show-ref --quiet --verify"
, Param $ "refs/heads/" ++ bupRef k
]
{- Store UUID in the annex.uuid setting of the bup repository. -}
storeBupUUID :: UUID -> BupRepo -> Annex ()
@ -185,8 +186,8 @@ storeBupUUID u buprepo = do
when (olduuid == "") $
Git.Command.run "config"
[Param "annex.uuid", Param v] r'
where
v = fromUUID u
where
v = fromUUID u
onBupRemote :: Git.Repo -> (FilePath -> [CommandParam] -> IO a) -> FilePath -> [CommandParam] -> Annex a
onBupRemote r a command params = do
@ -227,17 +228,17 @@ bup2GitRemote r
then Git.Construct.fromAbsPath r
else error "please specify an absolute path"
| otherwise = Git.Construct.fromUrl $ "ssh://" ++ host ++ slash dir
where
bits = split ":" r
host = Prelude.head bits
dir = join ":" $ drop 1 bits
-- "host:~user/dir" is not supported specially by bup;
-- "host:dir" is relative to the home directory;
-- "host:" goes in ~/.bup
slash d
| null d = "/~/.bup"
| "/" `isPrefixOf` d = d
| otherwise = "/~/" ++ d
where
bits = split ":" r
host = Prelude.head bits
dir = join ":" $ drop 1 bits
-- "host:~user/dir" is not supported specially by bup;
-- "host:dir" is relative to the home directory;
-- "host:" goes in ~/.bup
slash d
| null d = "/~/.bup"
| "/" `isPrefixOf` d = d
| otherwise = "/~/" ++ d
{- Converts a key into a git ref name, which bup-split -n will use to point
- to it. -}
@ -245,8 +246,8 @@ bupRef :: Key -> String
bupRef k
| Git.Ref.legal True shown = shown
| otherwise = "git-annex-" ++ showDigest (sha256 (fromString shown))
where
shown = key2file k
where
shown = key2file k
bupLocal :: BupRepo -> Bool
bupLocal = notElem ':'

View file

@ -57,7 +57,6 @@ gen r u c = do
readonly = False,
remotetype = remote
}
where
type ChunkSize = Maybe Int64
@ -101,25 +100,25 @@ chunkCount f = f ++ ".chunkcount"
withCheckedFiles :: (FilePath -> IO Bool) -> ChunkSize -> FilePath -> Key -> ([FilePath] -> IO Bool) -> IO Bool
withCheckedFiles _ _ [] _ _ = return False
withCheckedFiles check Nothing d k a = go $ locations d k
where
go [] = return False
go (f:fs) = ifM (check f) ( a [f] , go fs )
where
go [] = return False
go (f:fs) = ifM (check f) ( a [f] , go fs )
withCheckedFiles check (Just _) d k a = go $ locations d k
where
go [] = return False
go (f:fs) = do
let chunkcount = chunkCount f
use <- check chunkcount
if use
then do
count <- readcount chunkcount
let chunks = take count $ chunkStream f
ifM (all id <$> mapM check chunks)
( a chunks , return False )
else go fs
readcount f = fromMaybe (error $ "cannot parse " ++ f)
. (readish :: String -> Maybe Int)
<$> readFile f
where
go [] = return False
go (f:fs) = do
let chunkcount = chunkCount f
ifM (check chunkcount)
( do
count <- readcount chunkcount
let chunks = take count $ chunkStream f
ifM (all id <$> mapM check chunks)
( a chunks , return False )
, go fs
)
readcount f = fromMaybe (error $ "cannot parse " ++ f)
. (readish :: String -> Maybe Int)
<$> readFile f
withStoredFiles :: ChunkSize -> FilePath -> Key -> ([FilePath] -> IO Bool) -> IO Bool
withStoredFiles = withCheckedFiles doesFileExist
@ -170,39 +169,39 @@ storeSplit' _ _ _ [] c = return $ reverse c
storeSplit' meterupdate chunksize (d:dests) bs c = do
bs' <- E.bracket (openFile d WriteMode) hClose (feed chunksize bs)
storeSplit' meterupdate chunksize dests bs' (d:c)
where
feed _ [] _ = return []
feed sz (l:ls) h = do
let s = fromIntegral $ S.length l
if s <= sz
then do
S.hPut h l
meterupdate $ toInteger s
feed (sz - s) ls h
else return (l:ls)
where
feed _ [] _ = return []
feed sz (l:ls) h = do
let s = fromIntegral $ S.length l
if s <= sz
then do
S.hPut h l
meterupdate $ toInteger s
feed (sz - s) ls h
else return (l:ls)
{- Write a L.ByteString to a file, updating a progress meter
- after each chunk of the L.ByteString, typically every 64 kb or so. -}
meteredWriteFile :: MeterUpdate -> FilePath -> L.ByteString -> IO ()
meteredWriteFile meterupdate dest b =
meteredWriteFile' meterupdate dest (L.toChunks b) feeder
where
feeder chunks = return ([], chunks)
where
feeder chunks = return ([], chunks)
{- Writes a series of S.ByteString chunks to a file, updating a progress
- meter after each chunk. The feeder is called to get more chunks. -}
meteredWriteFile' :: MeterUpdate -> FilePath -> s -> (s -> IO (s, [S.ByteString])) -> IO ()
meteredWriteFile' meterupdate dest startstate feeder =
E.bracket (openFile dest WriteMode) hClose (feed startstate [])
where
feed state [] h = do
(state', cs) <- feeder state
unless (null cs) $
feed state' cs h
feed state (c:cs) h = do
S.hPut h c
meterupdate $ toInteger $ S.length c
feed state cs h
where
feed state [] h = do
(state', cs) <- feeder state
unless (null cs) $
feed state' cs h
feed state (c:cs) h = do
S.hPut h c
meterupdate $ toInteger $ S.length c
feed state cs h
{- Generates a list of destinations to write to in order to store a key.
- When chunksize is specified, this list will be a list of chunks.
@ -213,36 +212,36 @@ meteredWriteFile' meterupdate dest startstate feeder =
-}
storeHelper :: FilePath -> ChunkSize -> Key -> ([FilePath] -> IO [FilePath]) -> Annex Bool
storeHelper d chunksize key a = prep <&&> check <&&> go
where
desttemplate = Prelude.head $ locations d key
dir = parentDir desttemplate
tmpdests = case chunksize of
Nothing -> [desttemplate ++ tmpprefix]
Just _ -> map (++ tmpprefix) (chunkStream desttemplate)
tmpprefix = ".tmp"
detmpprefix f = take (length f - tmpprefixlen) f
tmpprefixlen = length tmpprefix
prep = liftIO $ catchBoolIO $ do
createDirectoryIfMissing True dir
allowWrite dir
return True
{- The size is not exactly known when encrypting the key;
- this assumes that at least the size of the key is
- needed as free space. -}
check = checkDiskSpace (Just dir) key 0
go = liftIO $ catchBoolIO $ do
stored <- a tmpdests
forM_ stored $ \f -> do
let dest = detmpprefix f
renameFile f dest
preventWrite dest
when (chunksize /= Nothing) $ do
let chunkcount = chunkCount desttemplate
_ <- tryIO $ allowWrite chunkcount
writeFile chunkcount (show $ length stored)
preventWrite chunkcount
preventWrite dir
return (not $ null stored)
where
desttemplate = Prelude.head $ locations d key
dir = parentDir desttemplate
tmpdests = case chunksize of
Nothing -> [desttemplate ++ tmpprefix]
Just _ -> map (++ tmpprefix) (chunkStream desttemplate)
tmpprefix = ".tmp"
detmpprefix f = take (length f - tmpprefixlen) f
tmpprefixlen = length tmpprefix
prep = liftIO $ catchBoolIO $ do
createDirectoryIfMissing True dir
allowWrite dir
return True
{- The size is not exactly known when encrypting the key;
- this assumes that at least the size of the key is
- needed as free space. -}
check = checkDiskSpace (Just dir) key 0
go = liftIO $ catchBoolIO $ do
stored <- a tmpdests
forM_ stored $ \f -> do
let dest = detmpprefix f
renameFile f dest
preventWrite dest
when (chunksize /= Nothing) $ do
let chunkcount = chunkCount desttemplate
_ <- tryIO $ allowWrite chunkcount
writeFile chunkcount (show $ length stored)
preventWrite chunkcount
preventWrite dir
return (not $ null stored)
retrieve :: FilePath -> ChunkSize -> Key -> AssociatedFile -> FilePath -> Annex Bool
retrieve d chunksize k _ f = metered Nothing k $ \meterupdate ->
@ -250,11 +249,11 @@ retrieve d chunksize k _ f = metered Nothing k $ \meterupdate ->
catchBoolIO $ do
meteredWriteFile' meterupdate f files feeder
return True
where
feeder [] = return ([], [])
feeder (x:xs) = do
chunks <- L.toChunks <$> L.readFile x
return (xs, chunks)
where
feeder [] = return ([], [])
feeder (x:xs) = do
chunks <- L.toChunks <$> L.readFile x
return (xs, chunks)
retrieveEncrypted :: FilePath -> ChunkSize -> (Cipher, Key) -> Key -> FilePath -> Annex Bool
retrieveEncrypted d chunksize (cipher, enck) k f = metered Nothing k $ \meterupdate ->
@ -267,20 +266,20 @@ retrieveEncrypted d chunksize (cipher, enck) k f = metered Nothing k $ \meterupd
retrieveCheap :: FilePath -> ChunkSize -> Key -> FilePath -> Annex Bool
retrieveCheap _ (Just _) _ _ = return False -- no cheap retrieval for chunks
retrieveCheap d _ k f = liftIO $ withStoredFiles Nothing d k go
where
go [file] = catchBoolIO $ createSymbolicLink file f >> return True
go _files = return False
where
go [file] = catchBoolIO $ createSymbolicLink file f >> return True
go _files = return False
remove :: FilePath -> ChunkSize -> Key -> Annex Bool
remove d chunksize k = liftIO $ withStoredFiles chunksize d k go
where
go = all id <$$> mapM removefile
removefile file = catchBoolIO $ do
let dir = parentDir file
allowWrite dir
removeFile file
_ <- tryIO $ removeDirectory dir
return True
where
go = all id <$$> mapM removefile
removefile file = catchBoolIO $ do
let dir = parentDir file
allowWrite dir
removeFile file
_ <- tryIO $ removeDirectory dir
return True
checkPresent :: FilePath -> ChunkSize -> Key -> Annex (Either String Bool)
checkPresent d chunksize k = liftIO $ catchMsgIO $ withStoredFiles chunksize d k $

View file

@ -32,12 +32,12 @@ encryptionSetup c = case (M.lookup "encryption" c, extractCipher c) of
(Just "shared", Nothing) -> use "encryption setup" $ genSharedCipher
(Just keyid, Nothing) -> use "encryption setup" $ genEncryptedCipher keyid
(Just keyid, Just v) -> use "encryption updated" $ updateEncryptedCipher keyid v
where
cannotchange = error "Cannot change encryption type of existing remote."
use m a = do
cipher <- liftIO a
showNote $ m ++ " " ++ describeCipher cipher
return $ M.delete "encryption" $ storeCipher c cipher
where
cannotchange = error "Cannot change encryption type of existing remote."
use m a = do
cipher <- liftIO a
showNote $ m ++ " " ++ describeCipher cipher
return $ M.delete "encryption" $ storeCipher c cipher
{- Modifies a Remote to support encryption.
-
@ -58,35 +58,35 @@ encryptableRemote c storeKeyEncrypted retrieveKeyFileEncrypted r =
hasKey = withkey $ hasKey r,
cost = cost r + encryptedRemoteCostAdj
}
where
store k f p = cip k >>= maybe
(storeKey r k f p)
(\enck -> storeKeyEncrypted enck k p)
retrieve k f d = cip k >>= maybe
(retrieveKeyFile r k f d)
(\enck -> retrieveKeyFileEncrypted enck k d)
retrieveCheap k d = cip k >>= maybe
(retrieveKeyFileCheap r k d)
(\_ -> return False)
withkey a k = cip k >>= maybe (a k) (a . snd)
cip = cipherKey c
where
store k f p = cip k >>= maybe
(storeKey r k f p)
(\enck -> storeKeyEncrypted enck k p)
retrieve k f d = cip k >>= maybe
(retrieveKeyFile r k f d)
(\enck -> retrieveKeyFileEncrypted enck k d)
retrieveCheap k d = cip k >>= maybe
(retrieveKeyFileCheap r k d)
(\_ -> return False)
withkey a k = cip k >>= maybe (a k) (a . snd)
cip = cipherKey c
{- Gets encryption Cipher. The decrypted Ciphers are cached in the Annex
- state. -}
remoteCipher :: RemoteConfig -> Annex (Maybe Cipher)
remoteCipher c = go $ extractCipher c
where
go Nothing = return Nothing
go (Just encipher) = do
cache <- Annex.getState Annex.ciphers
case M.lookup encipher cache of
Just cipher -> return $ Just cipher
Nothing -> decrypt encipher cache
decrypt encipher cache = do
showNote "gpg"
cipher <- liftIO $ decryptCipher encipher
Annex.changeState (\s -> s { Annex.ciphers = M.insert encipher cipher cache })
return $ Just cipher
where
go Nothing = return Nothing
go (Just encipher) = do
cache <- Annex.getState Annex.ciphers
case M.lookup encipher cache of
Just cipher -> return $ Just cipher
Nothing -> decrypt encipher cache
decrypt encipher cache = do
showNote "gpg"
cipher <- liftIO $ decryptCipher encipher
Annex.changeState (\s -> s { Annex.ciphers = M.insert encipher cipher cache })
return $ Just cipher
{- Checks if there is a trusted (non-shared) cipher. -}
isTrustedCipher :: RemoteConfig -> Bool
@ -97,16 +97,16 @@ isTrustedCipher c =
cipherKey :: Maybe RemoteConfig -> Key -> Annex (Maybe (Cipher, Key))
cipherKey Nothing _ = return Nothing
cipherKey (Just c) k = maybe Nothing encrypt <$> remoteCipher c
where
encrypt ciphertext = Just (ciphertext, encryptKey ciphertext k)
where
encrypt ciphertext = Just (ciphertext, encryptKey ciphertext k)
{- Stores an StorableCipher in a remote's configuration. -}
storeCipher :: RemoteConfig -> StorableCipher -> RemoteConfig
storeCipher c (SharedCipher t) = M.insert "cipher" (toB64 t) c
storeCipher c (EncryptedCipher t ks) =
M.insert "cipher" (toB64 t) $ M.insert "cipherkeys" (showkeys ks) c
where
showkeys (KeyIds l) = join "," l
where
showkeys (KeyIds l) = join "," l
{- Extracts an StorableCipher from a remote's configuration. -}
extractCipher :: RemoteConfig -> Maybe StorableCipher
@ -115,5 +115,5 @@ extractCipher c =
(Just t, Just ks) -> Just $ EncryptedCipher (fromB64 t) (readkeys ks)
(Just t, Nothing) -> Just $ SharedCipher (fromB64 t)
_ -> Nothing
where
readkeys = KeyIds . split ","
where
readkeys = KeyIds . split ","

View file

@ -25,16 +25,16 @@ addHooks r = addHooks' r <$> lookupHook r "start" <*> lookupHook r "stop"
addHooks' :: Remote -> Maybe String -> Maybe String -> Remote
addHooks' r Nothing Nothing = r
addHooks' r starthook stophook = r'
where
r' = r
{ storeKey = \k f p -> wrapper $ storeKey r k f p
, retrieveKeyFile = \k f d -> wrapper $ retrieveKeyFile r k f d
, retrieveKeyFileCheap = \k f -> wrapper $ retrieveKeyFileCheap r k f
, removeKey = \k -> wrapper $ removeKey r k
, hasKey = \k -> wrapper $ hasKey r k
}
where
wrapper = runHooks r' starthook stophook
where
r' = r
{ storeKey = \k f p -> wrapper $ storeKey r k f p
, retrieveKeyFile = \k f d -> wrapper $ retrieveKeyFile r k f d
, retrieveKeyFileCheap = \k f -> wrapper $ retrieveKeyFileCheap r k f
, removeKey = \k -> wrapper $ removeKey r k
, hasKey = \k -> wrapper $ hasKey r k
}
where
wrapper = runHooks r' starthook stophook
runHooks :: Remote -> Maybe String -> Maybe String -> Annex a -> Annex a
runHooks r starthook stophook a = do
@ -44,50 +44,49 @@ runHooks r starthook stophook a = do
liftIO $ createDirectoryIfMissing True dir
firstrun lck
a
where
remoteid = show (uuid r)
run Nothing = noop
run (Just command) = void $ liftIO $
boolSystem "sh" [Param "-c", Param command]
firstrun lck = do
-- Take a shared lock; This indicates that git-annex
-- is using the remote, and prevents other instances
-- of it from running the stophook. If another
-- instance is shutting down right now, this
-- will block waiting for its exclusive lock to clear.
lockFile lck
where
remoteid = show (uuid r)
run Nothing = noop
run (Just command) = void $ liftIO $
boolSystem "sh" [Param "-c", Param command]
firstrun lck = do
-- Take a shared lock; This indicates that git-annex
-- is using the remote, and prevents other instances
-- of it from running the stophook. If another
-- instance is shutting down right now, this
-- will block waiting for its exclusive lock to clear.
lockFile lck
-- The starthook is run even if some other git-annex
-- is already running, and ran it before.
-- It would be difficult to use locking to ensure
-- it's only run once, and it's also possible for
-- git-annex to be interrupted before it can run the
-- stophook, in which case the starthook
-- would be run again by the next git-annex.
-- So, requiring idempotency is the right approach.
run starthook
-- The starthook is run even if some other git-annex
-- is already running, and ran it before.
-- It would be difficult to use locking to ensure
-- it's only run once, and it's also possible for
-- git-annex to be interrupted before it can run the
-- stophook, in which case the starthook
-- would be run again by the next git-annex.
-- So, requiring idempotency is the right approach.
run starthook
Annex.addCleanup (remoteid ++ "-stop-command") $
runstop lck
runstop lck = do
-- Drop any shared lock we have, and take an
-- exclusive lock, without blocking. If the lock
-- succeeds, we're the only process using this remote,
-- so can stop it.
unlockFile lck
mode <- annexFileMode
fd <- liftIO $ noUmask mode $
openFd lck ReadWrite (Just mode) defaultFileFlags
v <- liftIO $ tryIO $
setLock fd (WriteLock, AbsoluteSeek, 0, 0)
case v of
Left _ -> noop
Right _ -> run stophook
liftIO $ closeFd fd
Annex.addCleanup (remoteid ++ "-stop-command") $ runstop lck
runstop lck = do
-- Drop any shared lock we have, and take an
-- exclusive lock, without blocking. If the lock
-- succeeds, we're the only process using this remote,
-- so can stop it.
unlockFile lck
mode <- annexFileMode
fd <- liftIO $ noUmask mode $
openFd lck ReadWrite (Just mode) defaultFileFlags
v <- liftIO $ tryIO $
setLock fd (WriteLock, AbsoluteSeek, 0, 0)
case v of
Left _ -> noop
Right _ -> run stophook
liftIO $ closeFd fd
lookupHook :: Remote -> String -> Annex (Maybe String)
lookupHook r n = go =<< getRemoteConfig (repo r) hookname ""
where
go "" = return Nothing
go command = return $ Just command
hookname = n ++ "-command"
where
go "" = return Nothing
go command = return $ Just command
hookname = n ++ "-command"

View file

@ -23,18 +23,18 @@ findSpecialRemotes :: String -> Annex [Git.Repo]
findSpecialRemotes s = do
m <- fromRepo Git.config
liftIO $ mapM construct $ remotepairs m
where
remotepairs = M.toList . M.filterWithKey match
construct (k,_) = Git.Construct.remoteNamedFromKey k Git.Construct.fromUnknown
match k _ = startswith "remote." k && endswith (".annex-"++s) k
where
remotepairs = M.toList . M.filterWithKey match
construct (k,_) = Git.Construct.remoteNamedFromKey k Git.Construct.fromUnknown
match k _ = startswith "remote." k && endswith (".annex-"++s) k
{- Sets up configuration for a special remote in .git/config. -}
gitConfigSpecialRemote :: UUID -> RemoteConfig -> String -> String -> Annex ()
gitConfigSpecialRemote u c k v = do
set ("annex-"++k) v
set ("annex-uuid") (fromUUID u)
where
set a b = inRepo $ Git.Command.run "config"
[Param (configsetting a), Param b]
remotename = fromJust (M.lookup "name" c)
configsetting s = "remote." ++ remotename ++ "." ++ s
where
set a b = inRepo $ Git.Command.run "config"
[Param (configsetting a), Param b]
remotename = fromJust (M.lookup "name" c)
configsetting s = "remote." ++ remotename ++ "." ++ s

View file

@ -1,6 +1,6 @@
{- git-annex remote access with ssh
-
- Copyright 2011.2012 Joey Hess <joey@kitenet.net>
- Copyright 2011,2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
@ -34,22 +34,22 @@ git_annex_shell r command params fields
sshparams <- sshToRepo r [Param $ sshcmd uuid ]
return $ Just ("ssh", sshparams)
| otherwise = return Nothing
where
dir = Git.repoPath r
shellcmd = "git-annex-shell"
shellopts = Param command : File dir : params
sshcmd uuid = unwords $
shellcmd : map shellEscape (toCommand shellopts) ++
uuidcheck uuid ++
map shellEscape (toCommand fieldopts)
uuidcheck NoUUID = []
uuidcheck (UUID u) = ["--uuid", u]
fieldopts
| null fields = []
| otherwise = fieldsep : map fieldopt fields ++ [fieldsep]
fieldsep = Param "--"
fieldopt (field, value) = Param $
fieldName field ++ "=" ++ value
where
dir = Git.repoPath r
shellcmd = "git-annex-shell"
shellopts = Param command : File dir : params
sshcmd uuid = unwords $
shellcmd : map shellEscape (toCommand shellopts) ++
uuidcheck uuid ++
map shellEscape (toCommand fieldopts)
uuidcheck NoUUID = []
uuidcheck (UUID u) = ["--uuid", u]
fieldopts
| null fields = []
| otherwise = fieldsep : map fieldopt fields ++ [fieldsep]
fieldsep = Param "--"
fieldopt (field, value) = Param $
fieldName field ++ "=" ++ value
{- Uses a supplied function (such as boolSystem) to run a git-annex-shell
- command on a remote.

View file

@ -64,19 +64,18 @@ hookSetup u c = do
hookEnv :: Key -> Maybe FilePath -> IO (Maybe [(String, String)])
hookEnv k f = Just <$> mergeenv (fileenv f ++ keyenv)
where
mergeenv l = M.toList .
M.union (M.fromList l)
<$> M.fromList <$> getEnvironment
env s v = ("ANNEX_" ++ s, v)
keyenv = catMaybes
[ Just $ env "KEY" (key2file k)
, env "HASH_1" <$> headMaybe hashbits
, env "HASH_2" <$> headMaybe (drop 1 hashbits)
]
fileenv Nothing = []
fileenv (Just file) = [env "FILE" file]
hashbits = map takeDirectory $ splitPath $ hashDirMixed k
where
mergeenv l = M.toList . M.union (M.fromList l)
<$> M.fromList <$> getEnvironment
env s v = ("ANNEX_" ++ s, v)
keyenv = catMaybes
[ Just $ env "KEY" (key2file k)
, env "HASH_1" <$> headMaybe hashbits
, env "HASH_2" <$> headMaybe (drop 1 hashbits)
]
fileenv Nothing = []
fileenv (Just file) = [env "FILE" file]
hashbits = map takeDirectory $ splitPath $ hashDirMixed k
lookupHook :: String -> String -> Annex (Maybe String)
lookupHook hooktype hook =do
@ -86,22 +85,20 @@ lookupHook hooktype hook =do
warning $ "missing configuration for " ++ hookname
return Nothing
else return $ Just command
where
hookname = hooktype ++ "-" ++ hook ++ "-hook"
where
hookname = hooktype ++ "-" ++ hook ++ "-hook"
runHook :: String -> String -> Key -> Maybe FilePath -> Annex Bool -> Annex Bool
runHook hooktype hook k f a = maybe (return False) run =<< lookupHook hooktype hook
where
run command = do
showOutput -- make way for hook output
ifM (liftIO $
boolSystemEnv "sh" [Param "-c", Param command]
=<< hookEnv k f)
( a
, do
warning $ hook ++ " hook exited nonzero!"
return False
)
where
run command = do
showOutput -- make way for hook output
ifM (liftIO $ boolSystemEnv "sh" [Param "-c", Param command] =<< hookEnv k f)
( a
, do
warning $ hook ++ " hook exited nonzero!"
return False
)
store :: String -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
store h k _f _p = do
@ -134,9 +131,9 @@ checkPresent r h k = do
showAction $ "checking " ++ Git.repoDescribe r
v <- lookupHook h "checkpresent"
liftIO $ catchMsgIO $ check v
where
findkey s = key2file k `elem` lines s
check Nothing = error "checkpresent hook misconfigured"
check (Just hook) = do
env <- hookEnv k Nothing
findkey <$> readProcessEnv "sh" ["-c", hook] env
where
findkey s = key2file k `elem` lines s
check Nothing = error "checkpresent hook misconfigured"
check (Just hook) = do
env <- hookEnv k Nothing
findkey <$> readProcessEnv "sh" ["-c", hook] env

View file

@ -56,8 +56,8 @@ remoteList = do
Annex.changeState $ \s -> s { Annex.remotes = rs' }
return rs'
else return rs
where
process m t = enumerate t >>= mapM (remoteGen m t)
where
process m t = enumerate t >>= mapM (remoteGen m t)
{- Forces the remoteList to be re-generated, re-reading the git config. -}
remoteListRefresh :: Annex [Remote]
@ -81,11 +81,11 @@ updateRemote remote = do
m <- readRemoteLog
remote' <- updaterepo $ repo remote
remoteGen m (remotetype remote) remote'
where
updaterepo r
| Git.repoIsLocal r || Git.repoIsLocalUnknown r =
Remote.Git.configRead r
| otherwise = return r
where
updaterepo r
| Git.repoIsLocal r || Git.repoIsLocalUnknown r =
Remote.Git.configRead r
| otherwise = return r
{- All remotes that are not ignored. -}
enabledRemoteList :: Annex [Remote]

View file

@ -72,14 +72,14 @@ genRsyncOpts r c = do
<$> getRemoteConfig r "rsync-options" ""
let escape = maybe True (\m -> M.lookup "shellescape" m /= Just "no") c
return $ RsyncOpts url opts escape
where
safe o
-- Don't allow user to pass --delete to rsync;
-- that could cause it to delete other keys
-- in the same hash bucket as a key it sends.
| o == "--delete" = False
| o == "--delete-excluded" = False
| otherwise = True
where
safe o
-- Don't allow user to pass --delete to rsync;
-- that could cause it to delete other keys
-- in the same hash bucket as a key it sends.
| o == "--delete" = False
| o == "--delete-excluded" = False
| otherwise = True
rsyncSetup :: UUID -> RemoteConfig -> Annex RemoteConfig
rsyncSetup u c = do
@ -100,9 +100,9 @@ rsyncEscape o s
rsyncUrls :: RsyncOpts -> Key -> [String]
rsyncUrls o k = map use annexHashes
where
use h = rsyncUrl o </> h k </> rsyncEscape o (f </> f)
f = keyFile k
where
use h = rsyncUrl o </> h k </> rsyncEscape o (f </> f)
f = keyFile k
store :: RsyncOpts -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
store o k _f p = rsyncSend o p k <=< inRepo $ gitAnnexLocation k
@ -146,18 +146,18 @@ remove o k = withRsyncScratchDir $ \tmp -> liftIO $ do
, Param $ addTrailingPathSeparator dummy
, Param $ rsyncUrl o
]
where
{- Specify include rules to match the directories where the
- content could be. Note that the parent directories have
- to also be explicitly included, due to how rsync
- traverses directories. -}
includes = concatMap use annexHashes
use h = let dir = h k in
[ parentDir dir
, dir
-- match content directory and anything in it
, dir </> keyFile k </> "***"
]
where
{- Specify include rules to match the directories where the
- content could be. Note that the parent directories have
- to also be explicitly included, due to how rsync
- traverses directories. -}
includes = concatMap use annexHashes
use h = let dir = h k in
[ parentDir dir
, dir
-- match content directory and anything in it
, dir </> keyFile k </> "***"
]
checkPresent :: Git.Repo -> RsyncOpts -> Key -> Annex (Either String Bool)
checkPresent r o k = do
@ -165,13 +165,13 @@ checkPresent r o k = do
-- note: Does not currently differentiate between rsync failing
-- to connect, and the file not being present.
Right <$> check
where
check = untilTrue (rsyncUrls o k) $ \u ->
liftIO $ catchBoolIO $ do
withQuietOutput createProcessSuccess $
proc "rsync" $ toCommand $
rsyncOptions o ++ [Param u]
return True
where
check = untilTrue (rsyncUrls o k) $ \u ->
liftIO $ catchBoolIO $ do
withQuietOutput createProcessSuccess $
proc "rsync" $ toCommand $
rsyncOptions o ++ [Param u]
return True
{- Rsync params to enable resumes of sending files safely,
- ensure that files are only moved into place once complete
@ -190,9 +190,9 @@ withRsyncScratchDir a = do
nuke tmp
liftIO $ createDirectoryIfMissing True tmp
nuke tmp `after` a tmp
where
nuke d = liftIO $ whenM (doesDirectoryExist d) $
removeDirectoryRecursive d
where
nuke d = liftIO $ whenM (doesDirectoryExist d) $
removeDirectoryRecursive d
rsyncRemote :: RsyncOpts -> (Maybe MeterUpdate) -> [CommandParam] -> Annex Bool
rsyncRemote o callback params = do
@ -203,9 +203,9 @@ rsyncRemote o callback params = do
showLongNote "rsync failed -- run git annex again to resume file transfer"
return False
)
where
defaultParams = [Params "--progress"]
ps = rsyncOptions o ++ defaultParams ++ params
where
defaultParams = [Params "--progress"]
ps = rsyncOptions o ++ defaultParams ++ params
{- To send a single key is slightly tricky; need to build up a temporary
directory structure to pass to rsync so it can create the hash

View file

@ -48,74 +48,71 @@ gen' r u c cst =
(storeEncrypted this)
(retrieveEncrypted this)
this
where
this = Remote {
uuid = u,
cost = cst,
name = Git.repoDescribe r,
storeKey = store this,
retrieveKeyFile = retrieve this,
retrieveKeyFileCheap = retrieveCheap this,
removeKey = remove this,
hasKey = checkPresent this,
hasKeyCheap = False,
whereisKey = Nothing,
config = c,
repo = r,
localpath = Nothing,
readonly = False,
remotetype = remote
}
where
this = Remote {
uuid = u,
cost = cst,
name = Git.repoDescribe r,
storeKey = store this,
retrieveKeyFile = retrieve this,
retrieveKeyFileCheap = retrieveCheap this,
removeKey = remove this,
hasKey = checkPresent this,
hasKeyCheap = False,
whereisKey = Nothing,
config = c,
repo = r,
localpath = Nothing,
readonly = False,
remotetype = remote
}
s3Setup :: UUID -> RemoteConfig -> Annex RemoteConfig
s3Setup u c = handlehost $ M.lookup "host" c
where
remotename = fromJust (M.lookup "name" c)
defbucket = remotename ++ "-" ++ fromUUID u
defaults = M.fromList
[ ("datacenter", "US")
, ("storageclass", "STANDARD")
, ("host", defaultAmazonS3Host)
, ("port", show defaultAmazonS3Port)
, ("bucket", defbucket)
]
where
remotename = fromJust (M.lookup "name" c)
defbucket = remotename ++ "-" ++ fromUUID u
defaults = M.fromList
[ ("datacenter", "US")
, ("storageclass", "STANDARD")
, ("host", defaultAmazonS3Host)
, ("port", show defaultAmazonS3Port)
, ("bucket", defbucket)
]
handlehost Nothing = defaulthost
handlehost (Just h)
| ".archive.org" `isSuffixOf` map toLower h = archiveorg
| otherwise = defaulthost
handlehost Nothing = defaulthost
handlehost (Just h)
| ".archive.org" `isSuffixOf` map toLower h = archiveorg
| otherwise = defaulthost
use fullconfig = do
gitConfigSpecialRemote u fullconfig "s3" "true"
s3SetCreds fullconfig u
use fullconfig = do
gitConfigSpecialRemote u fullconfig "s3" "true"
s3SetCreds fullconfig u
defaulthost = do
c' <- encryptionSetup c
let fullconfig = c' `M.union` defaults
genBucket fullconfig u
use fullconfig
defaulthost = do
c' <- encryptionSetup c
let fullconfig = c' `M.union` defaults
genBucket fullconfig u
use fullconfig
archiveorg = do
showNote "Internet Archive mode"
maybe (error "specify bucket=") (const noop) $
M.lookup "bucket" archiveconfig
use archiveconfig
where
archiveconfig =
-- hS3 does not pass through
-- x-archive-* headers
M.mapKeys (replace "x-archive-" "x-amz-") $
-- encryption does not make sense here
M.insert "encryption" "none" $
M.union c $
-- special constraints on key names
M.insert "mungekeys" "ia" $
-- bucket created only when files
-- are uploaded
M.insert "x-amz-auto-make-bucket" "1" $
-- no default bucket name; should
-- be human-readable
M.delete "bucket" defaults
archiveorg = do
showNote "Internet Archive mode"
maybe (error "specify bucket=") (const noop) $
M.lookup "bucket" archiveconfig
use archiveconfig
where
archiveconfig =
-- hS3 does not pass through x-archive-* headers
M.mapKeys (replace "x-archive-" "x-amz-") $
-- encryption does not make sense here
M.insert "encryption" "none" $
M.union c $
-- special constraints on key names
M.insert "mungekeys" "ia" $
-- bucket created only when files are uploaded
M.insert "x-amz-auto-make-bucket" "1" $
-- no default bucket name; should be human-readable
M.delete "bucket" defaults
store :: Remote -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
store r k _f _p = s3Action r False $ \(conn, bucket) -> do
@ -143,15 +140,15 @@ storeHelper (conn, bucket) r k file = do
S3Object bucket (bucketFile r k) ""
(("Content-Length", show size) : xheaders) content
sendObject conn object
where
storageclass =
case fromJust $ M.lookup "storageclass" $ fromJust $ config r of
"REDUCED_REDUNDANCY" -> REDUCED_REDUNDANCY
_ -> STANDARD
getsize = fileSize <$> (liftIO $ getFileStatus file)
xheaders = filter isxheader $ M.assocs $ fromJust $ config r
isxheader (h, _) = "x-amz-" `isPrefixOf` h
where
storageclass =
case fromJust $ M.lookup "storageclass" $ fromJust $ config r of
"REDUCED_REDUNDANCY" -> REDUCED_REDUNDANCY
_ -> STANDARD
getsize = fileSize <$> (liftIO $ getFileStatus file)
xheaders = filter isxheader $ M.assocs $ fromJust $ config r
isxheader (h, _) = "x-amz-" `isPrefixOf` h
retrieve :: Remote -> Key -> AssociatedFile -> FilePath -> Annex Bool
retrieve r k _f d = s3Action r False $ \(conn, bucket) -> do
@ -188,8 +185,8 @@ checkPresent r k = s3Action r noconn $ \(conn, bucket) -> do
Right _ -> return $ Right True
Left (AWSError _ _) -> return $ Right False
Left e -> return $ Left (s3Error e)
where
noconn = Left $ error "S3 not configured"
where
noconn = Left $ error "S3 not configured"
s3Warning :: ReqError -> Annex Bool
s3Warning e = do
@ -215,12 +212,12 @@ s3Action r noconn action = do
bucketFile :: Remote -> Key -> FilePath
bucketFile r = munge . key2file
where
munge s = case M.lookup "mungekeys" c of
Just "ia" -> iaMunge $ fileprefix ++ s
_ -> fileprefix ++ s
fileprefix = M.findWithDefault "" "fileprefix" c
c = fromJust $ config r
where
munge s = case M.lookup "mungekeys" c of
Just "ia" -> iaMunge $ fileprefix ++ s
_ -> fileprefix ++ s
fileprefix = M.findWithDefault "" "fileprefix" c
c = fromJust $ config r
bucketKey :: Remote -> String -> Key -> S3Object
bucketKey r bucket k = S3Object bucket (bucketFile r k) "" [] L.empty
@ -230,12 +227,12 @@ bucketKey r bucket k = S3Object bucket (bucketFile r k) "" [] L.empty
- encoded. -}
iaMunge :: String -> String
iaMunge = (>>= munge)
where
munge c
| isAsciiUpper c || isAsciiLower c || isNumber c = [c]
| c `elem` "_-.\"" = [c]
| isSpace c = []
| otherwise = "&" ++ show (ord c) ++ ";"
where
munge c
| isAsciiUpper c || isAsciiLower c || isNumber c = [c]
| c `elem` "_-.\"" = [c]
| isSpace c = []
| otherwise = "&" ++ show (ord c) ++ ";"
genBucket :: RemoteConfig -> UUID -> Annex ()
genBucket c u = do
@ -251,9 +248,9 @@ genBucket c u = do
case res of
Right _ -> noop
Left err -> s3Error err
where
bucket = fromJust $ M.lookup "bucket" c
datacenter = fromJust $ M.lookup "datacenter" c
where
bucket = fromJust $ M.lookup "bucket" c
datacenter = fromJust $ M.lookup "datacenter" c
s3ConnectionRequired :: RemoteConfig -> UUID -> Annex AWSConnection
s3ConnectionRequired c u =
@ -267,46 +264,46 @@ s3Connection c u = do
_ -> do
warning $ "Set both " ++ s3AccessKey ++ " and " ++ s3SecretKey ++ " to use S3"
return Nothing
where
host = fromJust $ M.lookup "host" c
port = let s = fromJust $ M.lookup "port" c in
case reads s of
[(p, _)] -> p
_ -> error $ "bad S3 port value: " ++ s
where
host = fromJust $ M.lookup "host" c
port = let s = fromJust $ M.lookup "port" c in
case reads s of
[(p, _)] -> p
_ -> error $ "bad S3 port value: " ++ s
{- S3 creds come from the environment if set, otherwise from the cache
- in gitAnnexCredsDir, or failing that, might be stored encrypted in
- the remote's config. -}
s3GetCreds :: RemoteConfig -> UUID -> Annex (Maybe (String, String))
s3GetCreds c u = maybe fromcache (return . Just) =<< liftIO getenv
where
getenv = liftM2 (,)
<$> get s3AccessKey
<*> get s3SecretKey
where
get = catchMaybeIO . getEnv
fromcache = do
d <- fromRepo gitAnnexCredsDir
let f = d </> fromUUID u
v <- liftIO $ catchMaybeIO $ readFile f
case lines <$> v of
Just (ak:sk:[]) -> return $ Just (ak, sk)
_ -> fromconfig
fromconfig = do
mcipher <- remoteCipher c
case (M.lookup "s3creds" c, mcipher) of
(Just s3creds, Just cipher) -> do
creds <- liftIO $ decrypt s3creds cipher
case creds of
[ak, sk] -> do
s3CacheCreds (ak, sk) u
return $ Just (ak, sk)
_ -> do error "bad s3creds"
_ -> return Nothing
decrypt s3creds cipher = lines <$>
withDecryptedContent cipher
(return $ L.pack $ fromB64 s3creds)
(return . L.unpack)
where
getenv = liftM2 (,)
<$> get s3AccessKey
<*> get s3SecretKey
where
get = catchMaybeIO . getEnv
fromcache = do
d <- fromRepo gitAnnexCredsDir
let f = d </> fromUUID u
v <- liftIO $ catchMaybeIO $ readFile f
case lines <$> v of
Just (ak:sk:[]) -> return $ Just (ak, sk)
_ -> fromconfig
fromconfig = do
mcipher <- remoteCipher c
case (M.lookup "s3creds" c, mcipher) of
(Just s3creds, Just cipher) -> do
creds <- liftIO $ decrypt s3creds cipher
case creds of
[ak, sk] -> do
s3CacheCreds (ak, sk) u
return $ Just (ak, sk)
_ -> do error "bad s3creds"
_ -> return Nothing
decrypt s3creds cipher = lines
<$> withDecryptedContent cipher
(return $ L.pack $ fromB64 s3creds)
(return . L.unpack)
{- Stores S3 creds encrypted in the remote's config if possible to do so
- securely, and otherwise locally in gitAnnexCredsDir. -}

View file

@ -55,13 +55,13 @@ gen r _ _ =
downloadKey :: Key -> AssociatedFile -> FilePath -> Annex Bool
downloadKey key _file dest = get =<< getUrls key
where
get [] = do
warning "no known url"
return False
get urls = do
showOutput -- make way for download progress bar
downloadUrl urls dest
where
get [] = do
warning "no known url"
return False
get urls = do
showOutput -- make way for download progress bar
downloadUrl urls dest
downloadKeyCheap :: Key -> FilePath -> Annex Bool
downloadKeyCheap _ _ = return False