make retrieveKeyFile and retrieveKeyFileCheap throw exceptions

Converted retrieveKeyFileCheap to a Maybe, to avoid needing to throw a
exception when a remote doesn't support it.
This commit is contained in:
Joey Hess 2020-05-13 17:05:56 -04:00
parent a6adea4aaf
commit d9c7f81ba4
No known key found for this signature in database
GPG key ID: DB12DB0FF05F8F38
32 changed files with 247 additions and 245 deletions

View file

@ -221,8 +221,8 @@ removeChunks remover u chunkconfig encryptor k = do
- other chunks in the list is fed to the sink.
-
- If retrival of one of the subsequent chunks throws an exception,
- gives up and returns False. Note that partial data may have been
- written to the sink in this case.
- gives up. Note that partial data may have been written to the sink
- in this case.
-
- Resuming is supported when using chunks. When the destination file
- already exists, it skips to the next chunked key that would be needed
@ -236,8 +236,8 @@ retrieveChunks
-> Key
-> FilePath
-> MeterUpdate
-> (Maybe Handle -> Maybe MeterUpdate -> ContentSource -> Annex Bool)
-> Annex Bool
-> (Maybe Handle -> Maybe MeterUpdate -> ContentSource -> Annex ())
-> Annex ()
retrieveChunks retriever u chunkconfig encryptor basek dest basep sink
| noChunks chunkconfig =
-- Optimisation: Try the unchunked key first, to avoid
@ -251,14 +251,10 @@ retrieveChunks retriever u chunkconfig encryptor basek dest basep sink
currsize <- liftIO $ catchMaybeIO $ getFileSize dest
let ls' = maybe ls (setupResume ls) currsize
if any null ls'
then return True -- dest is already complete
else firstavail currsize ls' `catchNonAsync` unable
then noop -- dest is already complete
else firstavail currsize ls'
unable e = do
warning (show e)
return False
firstavail _ [] = return False
firstavail _ [] = giveup "chunk retrieval failed"
firstavail currsize ([]:ls) = firstavail currsize ls
firstavail currsize ((k:ks):ls)
| k == basek = getunchunked
@ -271,25 +267,22 @@ retrieveChunks retriever u chunkconfig encryptor basek dest basep sink
v <- tryNonAsync $
retriever (encryptor k) p $ \content ->
bracketIO (maybe opennew openresume offset) hClose $ \h -> do
void $ tosink (Just h) p content
tosink (Just h) p content
let sz = toBytesProcessed $
fromMaybe 0 $ fromKey keyChunkSize k
getrest p h sz sz ks
`catchNonAsync` unable
case v of
Left e
| null ls -> unable e
| null ls -> throwM e
| otherwise -> firstavail currsize ls
Right r -> return r
getrest _ _ _ _ [] = return True
getrest _ _ _ _ [] = noop
getrest p h sz bytesprocessed (k:ks) = do
let p' = offsetMeterUpdate p bytesprocessed
liftIO $ p' zeroBytesProcessed
ifM (retriever (encryptor k) p' $ tosink (Just h) p')
( getrest p h sz (addBytesProcessed bytesprocessed sz) ks
, unable "chunk retrieval failed"
)
retriever (encryptor k) p' $ tosink (Just h) p'
getrest p h sz (addBytesProcessed bytesprocessed sz) ks
getunchunked = retriever (encryptor basek) basep $ tosink Nothing basep

View file

@ -202,15 +202,12 @@ adjustExportImport r rs = case getRemoteConfigValue exportTreeField (config r) o
, retrieveKeyFile = \k af dest p ->
let retrieveexport = retrieveKeyFileFromExport dbv k af dest p
in if appendonly r
then do
ret@(ok, _v) <- retrieveKeyFile r k af dest p
if ok
then return ret
else retrieveexport
then retrieveKeyFile r k af dest p
`catchNonAsync` const retrieveexport
else retrieveexport
, retrieveKeyFileCheap = if appendonly r
then retrieveKeyFileCheap r
else \_ _ _ -> return False
else Nothing
-- Removing a key from an export would need to
-- change the tree in the export log to not include
-- the file. Otherwise, conflicts when removing
@ -318,18 +315,16 @@ adjustExportImport r rs = case getRemoteConfigValue exportTreeField (config r) o
db <- getexportdb dbv
liftIO $ Export.getExportTree db k
retrieveKeyFileFromExport dbv k _af dest p = unVerified $
if maybe False (isJust . verifyKeyContent) (maybeLookupBackendVariety (fromKey keyVariety k))
then do
locs <- getexportlocs dbv k
case locs of
[] -> do
ifM (liftIO $ atomically $ readTVar $ getexportinconflict dbv)
( warning "unknown export location, likely due to the export conflict"
, warning "unknown export location"
)
return False
(l:_) -> retrieveExport (exportActions r) k l dest p
else do
warning $ "exported content cannot be verified due to using the " ++ decodeBS (formatKeyVariety (fromKey keyVariety k)) ++ " backend"
return False
retrieveKeyFileFromExport dbv k _af dest p
| maybe False (isJust . verifyKeyContent) (maybeLookupBackendVariety (fromKey keyVariety k)) = do
locs <- getexportlocs dbv k
case locs of
[] -> ifM (liftIO $ atomically $ readTVar $ getexportinconflict dbv)
( giveup "unknown export location, likely due to the export conflict"
, giveup "unknown export location"
)
(l:_) -> do
unlessM (retrieveExport (exportActions r) k l dest p) $
giveup "retrieving from export failed"
return UnVerified
| otherwise = giveup $ "exported content cannot be verified due to using the " ++ decodeBS (formatKeyVariety (fromKey keyVariety k)) ++ " backend"

View file

@ -34,7 +34,9 @@ addHooks' r starthook stophook = r'
r' = r
{ storeKey = \k f p -> wrapper $ storeKey r k f p
, retrieveKeyFile = \k f d p -> wrapper $ retrieveKeyFile r k f d p
, retrieveKeyFileCheap = \k af f -> wrapper $ retrieveKeyFileCheap r k af f
, retrieveKeyFileCheap = case retrieveKeyFileCheap r of
Just a -> Just $ \k af f -> wrapper $ a k af f
Nothing -> Nothing
, removeKey = wrapper . removeKey r
, checkPresent = wrapper . checkPresent r
}

View file

@ -39,11 +39,13 @@ store runner k af p = do
Just False -> giveup "transfer failed"
Nothing -> giveup "can't connect to remote"
retrieve :: (MeterUpdate -> ProtoRunner (Bool, Verification)) -> Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)
retrieve :: (MeterUpdate -> ProtoRunner (Bool, Verification)) -> Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex Verification
retrieve runner k af dest p =
metered (Just p) k $ \m p' ->
fromMaybe (False, UnVerified)
<$> runner p' (P2P.get dest k af m p')
runner p' (P2P.get dest k af m p') >>= \case
Just (True, v) -> return v
Just (False, _) -> giveup "transfer failed"
Nothing -> giveup "can't connec to remote"
remove :: ProtoRunner Bool -> Key -> Annex Bool
remove runner k = fromMaybe False <$> runner (P2P.remove k)

View file

@ -21,7 +21,7 @@ module Remote.Helper.Special (
fileRetriever,
byteRetriever,
storeKeyDummy,
retreiveKeyFileDummy,
retrieveKeyFileDummy,
removeKeyDummy,
checkPresentDummy,
SpecialRemoteCfg(..),
@ -112,7 +112,7 @@ fileRetriever a k m callback = do
-- A Retriever that generates a lazy ByteString containing the Key's
-- content, and passes it to a callback action which will fully consume it
-- before returning.
byteRetriever :: (Key -> (L.ByteString -> Annex Bool) -> Annex Bool) -> Retriever
byteRetriever :: (Key -> (L.ByteString -> Annex ()) -> Annex ()) -> Retriever
byteRetriever a k _m callback = a k (callback . ByteContent)
{- The base Remote that is provided to specialRemote needs to have
@ -122,8 +122,8 @@ byteRetriever a k _m callback = a k (callback . ByteContent)
-}
storeKeyDummy :: Key -> AssociatedFile -> MeterUpdate -> Annex ()
storeKeyDummy _ _ _ = error "missing storeKey implementation"
retreiveKeyFileDummy :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)
retreiveKeyFileDummy _ _ _ _ = unVerified (return False)
retrieveKeyFileDummy :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex Verification
retrieveKeyFileDummy _ _ _ _ = error "missing retrieveKeyFile implementation"
removeKeyDummy :: Key -> Annex Bool
removeKeyDummy _ = return False
checkPresentDummy :: Key -> Annex Bool
@ -168,11 +168,13 @@ specialRemote' cfg c storer retriever remover checkpresent baser = encr
where
encr = baser
{ storeKey = \k _f p -> cip >>= storeKeyGen k p
, retrieveKeyFile = \k _f d p -> cip >>= unVerified . retrieveKeyFileGen k d p
, retrieveKeyFileCheap = \k f d -> cip >>= maybe
(retrieveKeyFileCheap baser k f d)
-- retrieval of encrypted keys is never cheap
(\_ -> return False)
, retrieveKeyFile = \k _f d p -> cip >>= retrieveKeyFileGen k d p
, retrieveKeyFileCheap = case retrieveKeyFileCheap baser of
Nothing -> Nothing
Just a
-- retrieval of encrypted keys is never cheap
| isencrypted -> Nothing
| otherwise -> Just $ \k f d -> a k f d
-- When encryption is used, the remote could provide
-- some other content encrypted by the user, and trick
-- git-annex into decrypting it, leaking the decryption
@ -226,10 +228,11 @@ specialRemote' cfg c storer retriever remover checkpresent baser = encr
storer (enck k) (ByteContent encb) p
-- call retriever to get chunks; decrypt them; stream to dest file
retrieveKeyFileGen k dest p enc = safely $
retrieveKeyFileGen k dest p enc = do
displayprogress p k Nothing $ \p' ->
retrieveChunks retriever (uuid baser) chunkconfig
enck k dest p' (sink dest enc encr)
return UnVerified
where
enck = maybe id snd enc
@ -268,27 +271,25 @@ sink
-> Maybe Handle
-> Maybe MeterUpdate
-> ContentSource
-> Annex Bool
sink dest enc c mh mp content = do
case (enc, mh, content) of
(Nothing, Nothing, FileContent f)
| f == dest -> noop
| otherwise -> liftIO $ moveFile f dest
(Just (cipher, _), _, ByteContent b) -> do
cmd <- gpgCmd <$> Annex.getGitConfig
-> Annex ()
sink dest enc c mh mp content = case (enc, mh, content) of
(Nothing, Nothing, FileContent f)
| f == dest -> noop
| otherwise -> liftIO $ moveFile f dest
(Just (cipher, _), _, ByteContent b) -> do
cmd <- gpgCmd <$> Annex.getGitConfig
decrypt cmd c cipher (feedBytes b) $
readBytes write
(Just (cipher, _), _, FileContent f) -> do
cmd <- gpgCmd <$> Annex.getGitConfig
withBytes content $ \b ->
decrypt cmd c cipher (feedBytes b) $
readBytes write
(Just (cipher, _), _, FileContent f) -> do
cmd <- gpgCmd <$> Annex.getGitConfig
withBytes content $ \b ->
decrypt cmd c cipher (feedBytes b) $
readBytes write
liftIO $ nukeFile f
(Nothing, _, FileContent f) -> do
withBytes content write
liftIO $ nukeFile f
(Nothing, _, ByteContent b) -> write b
return True
liftIO $ nukeFile f
(Nothing, _, FileContent f) -> do
withBytes content write
liftIO $ nukeFile f
(Nothing, _, ByteContent b) -> write b
where
write b = case mh of
Just h -> liftIO $ b `streamto` h