make storeKey throw exceptions

When storing content on remote fails, always display a reason why.

Since the Storer used by special remotes already did, this mostly affects
git remotes, but not entirely. For example, if git-lfs failed to connect to
the endpoint, it used to silently return False.
This commit is contained in:
Joey Hess 2020-05-13 14:03:00 -04:00
parent b50ee9cd0c
commit c1cd402081
No known key found for this signature in database
GPG key ID: DB12DB0FF05F8F38
34 changed files with 214 additions and 197 deletions

View file

@ -584,21 +584,19 @@ unlinkAnnex key = do
{- Runs an action to transfer an object's content.
-
- In some cases, it's possible for the file to change as it's being sent.
- If this happens, runs the rollback action and returns False. The
- rollback action should remove the data that was transferred.
- If this happens, runs the rollback action and throws an exception.
- The rollback action should remove the data that was transferred.
-}
sendAnnex :: Key -> Annex () -> (FilePath -> Annex Bool) -> Annex Bool
sendAnnex :: Key -> Annex () -> (FilePath -> Annex a) -> Annex a
sendAnnex key rollback sendobject = go =<< prepSendAnnex key
where
go Nothing = return False
go (Just (f, checksuccess)) = do
r <- sendobject f
ifM checksuccess
( return r
, do
rollback
return False
)
unlessM checksuccess $ do
rollback
giveup "content changed while it was being sent"
return r
go Nothing = giveup "content not available to send"
{- Returns a file that contains an object's content,
- and a check to run after the transfer is complete.

View file

@ -26,6 +26,7 @@ git-annex (8.20200502) UNRELEASED; urgency=medium
the current directory.
* Display a warning message when asked to operate on a file inside a
directory that's a symbolic link to elsewhere.
* When storing content on remote fails, always display a reason why.
-- Joey Hess <id@joeyh.name> Mon, 04 May 2020 12:46:11 -0400

View file

@ -126,8 +126,12 @@ toPerform dest removewhen key afile fastcheck isthere =
Right False -> do
showAction $ "to " ++ Remote.name dest
ok <- notifyTransfer Upload afile $
upload (Remote.uuid dest) key afile stdRetry $
Remote.storeKey dest key afile
upload (Remote.uuid dest) key afile stdRetry $ \p ->
tryNonAsync (Remote.storeKey dest key afile p) >>= \case
Left e -> do
warning (show e)
return False
Right () -> return True
if ok
then finish False $
Remote.logStatus dest key InfoPresent

View file

@ -40,6 +40,7 @@ import "crypto-api" Crypto.Random
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
import qualified Data.Map as M
import Data.Either
import Control.Concurrent.STM hiding (check)
cmd :: Command
@ -217,11 +218,11 @@ test runannex mkr mkk =
, check ("present " ++ show False) $ \r k ->
whenwritable r $ present r k False
, check "storeKey" $ \r k ->
whenwritable r $ store r k
whenwritable r $ isRight <$> tryNonAsync (store r k)
, check ("present " ++ show True) $ \r k ->
whenwritable r $ present r k True
, check "storeKey when already present" $ \r k ->
whenwritable r $ store r k
whenwritable r $ isRight <$> tryNonAsync (store r k)
, check ("present " ++ show True) $ \r k -> present r k True
, check "retrieveKeyFile" $ \r k -> do
lockContentForRemoval k removeAnnex
@ -341,7 +342,7 @@ testUnavailable :: RunAnnex -> Annex (Maybe Remote) -> Annex Key -> [TestTree]
testUnavailable runannex mkr mkk =
[ check (== Right False) "removeKey" $ \r k ->
Remote.removeKey r k
, check (== Right False) "storeKey" $ \r k ->
, check isLeft "storeKey" $ \r k ->
Remote.storeKey r k (AssociatedFile Nothing) nullMeterUpdate
, check (`notElem` [Right True, Right False]) "checkPresent" $ \r k ->
Remote.checkPresent r k

View file

@ -52,10 +52,13 @@ start o key = startingCustomOutput key $ case fromToOptions o of
toPerform :: Key -> AssociatedFile -> Remote -> CommandPerform
toPerform key file remote = go Upload file $
upload (uuid remote) key file stdRetry $ \p -> do
ok <- Remote.storeKey remote key file p
when ok $
Remote.logStatus remote key InfoPresent
return ok
tryNonAsync (Remote.storeKey remote key file p) >>= \case
Left e -> do
warning (show e)
return False
Right () -> do
Remote.logStatus remote key InfoPresent
return True
fromPerform :: Key -> AssociatedFile -> Remote -> CommandPerform
fromPerform key file remote = go Upload file $

View file

@ -38,10 +38,13 @@ start = do
runner (TransferRequest direction remote key file)
| direction == Upload = notifyTransfer direction file $
upload (Remote.uuid remote) key file stdRetry $ \p -> do
ok <- Remote.storeKey remote key file p
when ok $
Remote.logStatus remote key InfoPresent
return ok
tryNonAsync (Remote.storeKey remote key file p) >>= \case
Left e -> do
warning (show e)
return False
Right () -> do
Remote.logStatus remote key InfoPresent
return True
| otherwise = notifyTransfer direction file $
download (Remote.uuid remote) key file stdRetry $ \p ->
getViaTmp (Remote.retrievalSecurityPolicy remote) (RemoteVerify remote) key $ \t -> do

View file

@ -160,7 +160,8 @@ adbSetup _ mu _ c gc = do
store :: AndroidSerial -> AndroidPath -> Storer
store serial adir = fileStorer $ \k src _p ->
let dest = androidLocation adir k
in store' serial dest src
in unlessM (store' serial dest src) $
giveup "adb failed"
store' :: AndroidSerial -> AndroidPath -> FilePath -> Annex Bool
store' serial dest src = store'' serial dest src (return True)

View file

@ -111,10 +111,8 @@ downloadKey key _file dest p = unVerified $
downloadKeyCheap :: Key -> AssociatedFile -> FilePath -> Annex Bool
downloadKeyCheap _ _ _ = return False
uploadKey :: Key -> AssociatedFile -> MeterUpdate -> Annex Bool
uploadKey _ _ _ = do
warning "upload to bittorrent not supported"
return False
uploadKey :: Key -> AssociatedFile -> MeterUpdate -> Annex ()
uploadKey _ _ _ = giveup "upload to bittorrent not supported"
dropKey :: Key -> Annex Bool
dropKey k = do

View file

@ -156,9 +156,7 @@ store r buprepo = byteStorer $ \k b p -> do
showOutput -- make way for bup output
let cmd = proc "bup" (toCommand params)
quiet <- commandProgressDisabled
let feeder = \h -> do
meteredWrite p h b
return True
let feeder = \h -> meteredWrite p h b
liftIO $ if quiet
then feedWithQuietOutput createProcessSuccess cmd feeder
else withHandle StdinHandle createProcessSuccess cmd feeder

View file

@ -127,7 +127,8 @@ store ddarrepo = fileStorer $ \k src _p -> do
, Param $ ddarRepoLocation ddarrepo
, File src
]
liftIO $ boolSystem "ddar" params
unlessM (liftIO $ boolSystem "ddar" params) $
giveup "ddar failed"
{- Convert remote DdarRepo to host and path on remote end -}
splitRemoteDdarRepo :: DdarRepo -> (SshHost, String)

View file

@ -170,16 +170,16 @@ checkDiskSpaceDirectory d k = do
<*> getFileStatus annexdir
checkDiskSpace (Just d) k 0 samefilesystem
store :: FilePath -> ChunkConfig -> Key -> L.ByteString -> MeterUpdate -> Annex Bool
store :: FilePath -> ChunkConfig -> Key -> L.ByteString -> MeterUpdate -> Annex ()
store d chunkconfig k b p = liftIO $ do
void $ tryIO $ createDirectoryUnder d tmpdir
case chunkconfig of
LegacyChunks chunksize -> Legacy.store d chunksize (finalizeStoreGeneric d) k b p tmpdir destdir
LegacyChunks chunksize ->
Legacy.store d chunksize (finalizeStoreGeneric d) k b p tmpdir destdir
_ -> do
let tmpf = tmpdir </> kf
meteredWriteFile p tmpf b
finalizeStoreGeneric d tmpdir destdir
return True
where
tmpdir = addTrailingPathSeparator $ d </> "tmp" </> kf
kf = fromRawFilePath (keyFile k)

View file

@ -70,7 +70,7 @@ storeLegacyChunked' meterupdate chunksize (d:dests) bs c = do
feed bytes' (sz - s) ls h
else return (l:ls)
storeHelper :: FilePath -> (FilePath -> FilePath -> IO ()) -> Key -> ([FilePath] -> IO [FilePath]) -> FilePath -> FilePath -> IO Bool
storeHelper :: FilePath -> (FilePath -> FilePath -> IO ()) -> Key -> ([FilePath] -> IO [FilePath]) -> FilePath -> FilePath -> IO ()
storeHelper repotop finalizer key storer tmpdir destdir = do
void $ liftIO $ tryIO $ createDirectoryUnder repotop tmpdir
Legacy.storeChunks key tmpdir destdir storer recorder finalizer
@ -80,7 +80,7 @@ storeHelper repotop finalizer key storer tmpdir destdir = do
writeFile f s
void $ tryIO $ preventWrite f
store :: FilePath -> ChunkSize -> (FilePath -> FilePath -> IO ()) -> Key -> L.ByteString -> MeterUpdate -> FilePath -> FilePath -> IO Bool
store :: FilePath -> ChunkSize -> (FilePath -> FilePath -> IO ()) -> Key -> L.ByteString -> MeterUpdate -> FilePath -> FilePath -> IO ()
store repotop chunksize finalizer k b p = storeHelper repotop finalizer k $ \dests ->
storeLegacyChunked p chunksize dests b

View file

@ -222,14 +222,14 @@ checkExportSupported' external = go `catchNonAsync` (const (return False))
_ -> Nothing
storeKeyM :: External -> Storer
storeKeyM external = fileStorer $ \k f p ->
handleRequestKey external (\sk -> TRANSFER Upload sk f) k (Just p) $ \resp ->
storeKeyM external = fileStorer $ \k f p -> either giveup return =<< go k f p
where
go k f p = handleRequestKey external (\sk -> TRANSFER Upload sk f) k (Just p) $ \resp ->
case resp of
TRANSFER_SUCCESS Upload k' | k == k' -> result True
TRANSFER_SUCCESS Upload k' | k == k' ->
result (Right ())
TRANSFER_FAILURE Upload k' errmsg | k == k' ->
Just $ do
warning $ respErrorMessage "TRANSFER" errmsg
return (Result False)
result (Left (respErrorMessage "TRANSFER" errmsg))
_ -> Nothing
retrieveKeyFileM :: External -> Retriever

View file

@ -367,20 +367,21 @@ store r rsyncopts k s p = do
store' :: Git.Repo -> Remote -> Remote.Rsync.RsyncOpts -> Storer
store' repo r rsyncopts
| not $ Git.repoIsUrl repo =
byteStorer $ \k b p -> guardUsable repo (return False) $ liftIO $ do
byteStorer $ \k b p -> guardUsable repo (giveup "cannot access remote") $ liftIO $ do
let tmpdir = Git.repoLocation repo </> "tmp" </> fromRawFilePath (keyFile k)
void $ tryIO $ createDirectoryUnder (Git.repoLocation repo) tmpdir
let tmpf = tmpdir </> fromRawFilePath (keyFile k)
meteredWriteFile p tmpf b
let destdir = parentDir $ gCryptLocation repo k
Remote.Directory.finalizeStoreGeneric (Git.repoLocation repo) tmpdir destdir
return True
| Git.repoIsSsh repo = if accessShell r
then fileStorer $ \k f p -> do
oh <- mkOutputHandler
Ssh.rsyncHelper oh (Just p)
ok <- Ssh.rsyncHelper oh (Just p)
=<< Ssh.rsyncParamsRemote False r Upload k f
(AssociatedFile Nothing)
unless ok $
giveup "rsync failed"
else fileStorer $ Remote.Rsync.store rsyncopts
| otherwise = unsupportedUrl

View file

@ -649,28 +649,26 @@ copyFromRemoteCheap' _ _ _ _ _ _ = return False
#endif
{- Tries to copy a key's content to a remote's annex. -}
copyToRemote :: Remote -> State -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
copyToRemote :: Remote -> State -> Key -> AssociatedFile -> MeterUpdate -> Annex ()
copyToRemote r st key file meterupdate = do
repo <- getRepo r
copyToRemote' repo r st key file meterupdate
copyToRemote' :: Git.Repo -> Remote -> State -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
copyToRemote' :: Git.Repo -> Remote -> State -> Key -> AssociatedFile -> MeterUpdate -> Annex ()
copyToRemote' repo r st@(State connpool duc _ _ _) key file meterupdate
| not $ Git.repoIsUrl repo = ifM duc
( guardUsable repo (return False) $ commitOnCleanup repo r st $
( guardUsable repo (giveup "cannot access remote") $ commitOnCleanup repo r st $
copylocal =<< Annex.Content.prepSendAnnex key
, return False
, giveup "remote does not have expected annex.uuid value"
)
| Git.repoIsSsh repo = commitOnCleanup repo r st $
P2PHelper.store
(\p -> Ssh.runProto r connpool (return False) (copyremotefallback p))
(Ssh.runProto r connpool (return False) . copyremotefallback)
key file meterupdate
| otherwise = do
warning "copying to non-ssh repo not supported"
return False
| otherwise = giveup "copying to non-ssh repo not supported"
where
copylocal Nothing = return False
copylocal Nothing = giveup "content not available"
copylocal (Just (object, checksuccess)) = do
-- The checksuccess action is going to be run in
-- the remote's Annex, but it needs access to the local
@ -680,7 +678,7 @@ copyToRemote' repo r st@(State connpool duc _ _ _) key file meterupdate
u <- getUUID
hardlink <- wantHardLink
-- run copy from perspective of remote
onLocalFast st $ ifM (Annex.Content.inAnnex key)
res <- onLocalFast st $ ifM (Annex.Content.inAnnex key)
( return True
, runTransfer (Transfer Download u (fromKey id key)) file stdRetry $ \p -> do
copier <- mkCopier hardlink st params
@ -692,7 +690,11 @@ copyToRemote' repo r st@(State connpool duc _ _ _) key file meterupdate
Annex.Content.saveState True
return res
)
copyremotefallback p = Annex.Content.sendAnnex key noop $ \object -> do
unless res $
giveup "failed to send content to remote"
copyremotefallback p = either (const False) id
<$> tryNonAsync (copyremotefallback' p)
copyremotefallback' p = Annex.Content.sendAnnex key noop $ \object -> do
-- This is too broad really, but recvkey normally
-- verifies content anyway, so avoid complicating
-- it with a local sendAnnex check and rollback.

View file

@ -440,18 +440,15 @@ mkDownloadRequest rs k = case (extractKeySha256 k, extractKeySize k) of
store :: RemoteStateHandle -> TVar LFSHandle -> Storer
store rs h = fileStorer $ \k src p -> getLFSEndpoint LFS.RequestUpload h >>= \case
Nothing -> return False
Just endpoint -> flip catchNonAsync failederr $ do
Nothing -> giveup "unable to connect to git-lfs endpoint"
Just endpoint -> do
(req, sha256, size) <- mkUploadRequest rs k src
sendTransferRequest req endpoint >>= \case
Left err -> do
warning err
return False
Right resp -> do
body <- liftIO $ httpBodyStorer src p
forM_ (LFS.objects resp) $
send body sha256 size
return True
Left err -> giveup err
where
send body sha256 size tro
| LFS.resp_oid tro /= sha256 || LFS.resp_size tro /= size =
@ -466,9 +463,6 @@ store rs h = fileStorer $ \k src p -> getLFSEndpoint LFS.RequestUpload h >>= \ca
Just [] -> noop -- server already has it
Just reqs -> forM_ reqs $
makeSmallAPIRequest . setRequestCheckStatus
failederr e = do
warning (show e)
return False
retrieve :: RemoteStateHandle -> TVar LFSHandle -> Retriever
retrieve rs h = fileRetriever $ \dest k p -> getLFSEndpoint LFS.RequestDownload h >>= \case

View file

@ -147,7 +147,7 @@ checkNonEmpty k
giveup "Cannot store empty files in Glacier."
| otherwise = return ()
store' :: Remote -> Key -> L.ByteString -> MeterUpdate -> Annex Bool
store' :: Remote -> Key -> L.ByteString -> MeterUpdate -> Annex ()
store' r k b p = go =<< glacierEnv c gc u
where
c = config r
@ -160,13 +160,11 @@ store' r k b p = go =<< glacierEnv c gc u
, Param $ getVault $ config r
, Param "-"
]
go Nothing = return False
go (Just e) = do
go Nothing = giveup "Glacier not usable."
go (Just e) = liftIO $ do
let cmd = (proc "glacier" (toCommand params)) { env = Just e }
liftIO $ catchBoolIO $
withHandle StdinHandle createProcessSuccess cmd $ \h -> do
meteredWrite p h b
return True
withHandle StdinHandle createProcessSuccess cmd $ \h ->
meteredWrite p h b
retrieve :: Remote -> Retriever
retrieve = byteRetriever . retrieve'

View file

@ -117,28 +117,22 @@ storeChunks
-> MeterUpdate
-> Storer
-> CheckPresent
-> Annex Bool
-> Annex ()
storeChunks u chunkconfig encryptor k f p storer checker =
case chunkconfig of
(UnpaddedChunks chunksize) | isStableKey k ->
bracketIO open close (go chunksize)
(UnpaddedChunks chunksize) | isStableKey k -> do
h <- liftIO $ openBinaryFile f ReadMode
go chunksize h
liftIO $ hClose h
_ -> storer k (FileContent f) p
where
open = tryIO $ openBinaryFile f ReadMode
close (Right h) = hClose h
close (Left _) = noop
go _ (Left e) = do
warning (show e)
return False
go chunksize (Right h) = do
go chunksize h = do
let chunkkeys = chunkKeyStream k chunksize
(chunkkeys', startpos) <- seekResume h encryptor chunkkeys checker
b <- liftIO $ L.hGetContents h
gochunks p startpos chunksize b chunkkeys'
gochunks :: MeterUpdate -> BytesProcessed -> ChunkSize -> L.ByteString -> ChunkKeyStream -> Annex Bool
gochunks :: MeterUpdate -> BytesProcessed -> ChunkSize -> L.ByteString -> ChunkKeyStream -> Annex ()
gochunks meterupdate startpos chunksize = loop startpos . splitchunk
where
splitchunk = L.splitAt chunksize
@ -148,16 +142,12 @@ storeChunks u chunkconfig encryptor k f p storer checker =
-- Once all chunks are successfully
-- stored, update the chunk log.
chunksStored u k (FixedSizeChunks chunksize) numchunks
return True
| otherwise = do
liftIO $ meterupdate' zeroBytesProcessed
let (chunkkey, chunkkeys') = nextChunkKeyStream chunkkeys
ifM (storer chunkkey (ByteContent chunk) meterupdate')
( do
let bytesprocessed' = addBytesProcessed bytesprocessed (L.length chunk)
loop bytesprocessed' (splitchunk bs) chunkkeys'
, return False
)
storer chunkkey (ByteContent chunk) meterupdate'
let bytesprocessed' = addBytesProcessed bytesprocessed (L.length chunk)
loop bytesprocessed' (splitchunk bs) chunkkeys'
where
numchunks = numChunks chunkkeys
{- The MeterUpdate that is passed to the action

View file

@ -63,20 +63,15 @@ probeChunks basedest check = go [] $ map (basedest ++) chunkStream
- finalizer is called to rename the tmp into the dest
- (and do any other cleanup).
-}
storeChunks :: Key -> FilePath -> FilePath -> ([FilePath] -> IO [FilePath]) -> (FilePath -> String -> IO ()) -> (FilePath -> FilePath -> IO ()) -> IO Bool
storeChunks key tmp dest storer recorder finalizer = either onerr return
=<< (E.try go :: IO (Either E.SomeException Bool))
storeChunks :: Key -> FilePath -> FilePath -> ([FilePath] -> IO [FilePath]) -> (FilePath -> String -> IO ()) -> (FilePath -> FilePath -> IO ()) -> IO ()
storeChunks key tmp dest storer recorder finalizer = do
stored <- storer tmpdests
let chunkcount = basef ++ chunkCount
recorder chunkcount (show $ length stored)
finalizer tmp dest
when (null stored) $
giveup "no chunks were stored"
where
go = do
stored <- storer tmpdests
let chunkcount = basef ++ chunkCount
recorder chunkcount (show $ length stored)
finalizer tmp dest
return (not $ null stored)
onerr e = do
warningIO (show e)
return False
basef = tmp ++ fromRawFilePath (keyFile key)
tmpdests = map (basef ++ ) chunkStream

View file

@ -188,9 +188,8 @@ adjustExportImport r rs = case getRemoteConfigValue exportTreeField (config r) o
-- when another repository has already stored the
-- key, and the local repository does not know
-- about it. To avoid unnecessary costs, don't do it.
{ storeKey = \_ _ _ -> do
warning "remote is configured with exporttree=yes; use `git-annex export` to store content on it"
return False
{ storeKey = \_ _ _ ->
giveup "remote is configured with exporttree=yes; use `git-annex export` to store content on it"
-- Keys can be retrieved using retrieveExport,
-- but since that retrieves from a path in the
-- remote that another writer could have replaced

View file

@ -25,7 +25,7 @@ import Network.HTTP.Types
--
-- Implemented as a fileStorer, so that the content can be streamed
-- from the file in constant space.
httpStorer :: (Key -> RequestBody -> Annex Bool) -> Storer
httpStorer :: (Key -> RequestBody -> Annex ()) -> Storer
httpStorer a = fileStorer $ \k f m -> a k =<< liftIO (httpBodyStorer f m)
-- Reads the file and generates a streaming request body, that will update

View file

@ -30,12 +30,14 @@ type ProtoConnRunner c = forall a. P2P.Proto a -> ClosableConnection c -> Annex
-- the pool when done.
type WithConn a c = (ClosableConnection c -> Annex (ClosableConnection c, a)) -> Annex a
store :: (MeterUpdate -> ProtoRunner Bool) -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
store :: (MeterUpdate -> ProtoRunner Bool) -> Key -> AssociatedFile -> MeterUpdate -> Annex ()
store runner k af p = do
let sizer = KeySizer k (fmap fst <$> prepSendAnnex k)
metered (Just p) sizer $ \_ p' ->
fromMaybe False
<$> runner p' (P2P.put k af p')
metered (Just p) sizer $ \_ p' ->
runner p' (P2P.put k af p') >>= \case
Just True -> return ()
Just False -> giveup "transfer failed"
Nothing -> giveup "can't connect to remote"
retrieve :: (MeterUpdate -> ProtoRunner (Bool, Verification)) -> Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)
retrieve runner k af dest p =

View file

@ -44,23 +44,23 @@ adjustReadOnly r
}
| otherwise = r
readonlyStoreKey :: Key -> AssociatedFile -> MeterUpdate -> Annex Bool
readonlyStoreKey :: Key -> AssociatedFile -> MeterUpdate -> Annex ()
readonlyStoreKey _ _ _ = readonlyFail
readonlyRemoveKey :: Key -> Annex Bool
readonlyRemoveKey _ = readonlyFail
readonlyRemoveKey _ = readonlyFail'
readonlyStorer :: Storer
readonlyStorer _ _ _ = readonlyFail
readonlyStoreExport :: FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex Bool
readonlyStoreExport _ _ _ _ = readonlyFail
readonlyStoreExport _ _ _ _ = readonlyFail'
readonlyRemoveExport :: Key -> ExportLocation -> Annex Bool
readonlyRemoveExport _ _ = readonlyFail
readonlyRemoveExport _ _ = readonlyFail'
readonlyRemoveExportDirectory :: ExportDirectory -> Annex Bool
readonlyRemoveExportDirectory _ = readonlyFail
readonlyRemoveExportDirectory _ = readonlyFail'
readonlyRenameExport :: Key -> ExportLocation -> ExportLocation -> Annex (Maybe Bool)
readonlyRenameExport _ _ _ = return Nothing
@ -70,10 +70,13 @@ readonlyStoreExportWithContentIdentifier _ _ _ _ _ =
return $ Left readonlyWarning
readonlyRemoveExportWithContentIdentifier :: Key -> ExportLocation -> [ContentIdentifier] -> Annex Bool
readonlyRemoveExportWithContentIdentifier _ _ _ = readonlyFail
readonlyRemoveExportWithContentIdentifier _ _ _ = readonlyFail'
readonlyFail :: Annex Bool
readonlyFail = do
readonlyFail :: Annex ()
readonlyFail = giveup readonlyWarning
readonlyFail' :: Annex Bool
readonlyFail' = do
warning readonlyWarning
return False

View file

@ -90,7 +90,7 @@ mkRetrievalVerifiableKeysSecure gc
-- A Storer that expects to be provided with a file containing
-- the content of the key to store.
fileStorer :: (Key -> FilePath -> MeterUpdate -> Annex Bool) -> Storer
fileStorer :: (Key -> FilePath -> MeterUpdate -> Annex ()) -> Storer
fileStorer a k (FileContent f) m = a k f m
fileStorer a k (ByteContent b) m = withTmp k $ \f -> do
liftIO $ L.writeFile f b
@ -98,7 +98,7 @@ fileStorer a k (ByteContent b) m = withTmp k $ \f -> do
-- A Storer that expects to be provided with a L.ByteString of
-- the content to store.
byteStorer :: (Key -> L.ByteString -> MeterUpdate -> Annex Bool) -> Storer
byteStorer :: (Key -> L.ByteString -> MeterUpdate -> Annex ()) -> Storer
byteStorer a k c m = withBytes c $ \b -> a k b m
-- A Retriever that writes the content of a Key to a provided file.
@ -120,8 +120,8 @@ byteRetriever a k _m callback = a k (callback . ByteContent)
- but they are never actually used (since specialRemote replaces them).
- Here are some dummy ones.
-}
storeKeyDummy :: Key -> AssociatedFile -> MeterUpdate -> Annex Bool
storeKeyDummy _ _ _ = return False
storeKeyDummy :: Key -> AssociatedFile -> MeterUpdate -> Annex ()
storeKeyDummy _ _ _ = error "missing storeKey implementation"
retreiveKeyFileDummy :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)
retreiveKeyFileDummy _ _ _ _ = unVerified (return False)
removeKeyDummy :: Key -> Annex Bool
@ -208,7 +208,7 @@ specialRemote' cfg c storer retriever remover checkpresent baser = encr
safely a = catchNonAsync a (\e -> warning (show e) >> return False)
-- chunk, then encrypt, then feed to the storer
storeKeyGen k p enc = safely $ sendAnnex k rollback $ \src ->
storeKeyGen k p enc = sendAnnex k rollback $ \src ->
displayprogress p k (Just src) $ \p' ->
storeChunks (uuid baser) chunkconfig enck k src p'
(storechunk enc)

View file

@ -133,8 +133,17 @@ lookupHook hookname action = do
hook = annexConfig $ encodeBS' $ hookname ++ "-" ++ action ++ "-hook"
hookfallback = annexConfig $ encodeBS' $ hookname ++ "-hook"
runHook :: HookName -> Action -> Key -> Maybe FilePath -> Annex Bool -> Annex Bool
runHook hook action k f a = maybe (return False) run =<< lookupHook hook action
runHook :: HookName -> Action -> Key -> Maybe FilePath -> Annex ()
runHook hook action k f = lookupHook hook action >>= \case
Just command -> do
showOutput -- make way for hook output
environ <- liftIO (hookEnv action k f)
unlessM (progressCommandEnv "sh" [Param "-c", Param command] environ) $
giveup $ hook ++ " hook exited nonzero!"
Nothing -> giveup $ action ++ " hook misconfigured"
runHook' :: HookName -> Action -> Key -> Maybe FilePath -> Annex Bool -> Annex Bool
runHook' hook action k f a = maybe (return False) run =<< lookupHook hook action
where
run command = do
showOutput -- make way for hook output
@ -146,19 +155,18 @@ runHook hook action k f a = maybe (return False) run =<< lookupHook hook action
)
store :: HookName -> Storer
store h = fileStorer $ \k src _p ->
runHook h "store" k (Just src) $ return True
store h = fileStorer $ \k src _p -> runHook h "store" k (Just src)
retrieve :: HookName -> Retriever
retrieve h = fileRetriever $ \d k _p ->
unlessM (runHook h "retrieve" k (Just d) $ return True) $
unlessM (runHook' h "retrieve" k (Just d) $ return True) $
giveup "failed to retrieve content"
retrieveCheap :: HookName -> Key -> AssociatedFile -> FilePath -> Annex Bool
retrieveCheap _ _ _ _ = return False
remove :: HookName -> Remover
remove h k = runHook h "remove" k Nothing $ return True
remove h k = runHook' h "remove" k Nothing $ return True
checkKey :: Git.Repo -> HookName -> CheckPresent
checkKey r h k = do

View file

@ -201,7 +201,7 @@ rsyncSetup _ mu _ c gc = do
- (When we have the right hash directory structure, we can just
- pass --include=X --include=X/Y --include=X/Y/file --exclude=*)
-}
store :: RsyncOpts -> Key -> FilePath -> MeterUpdate -> Annex Bool
store :: RsyncOpts -> Key -> FilePath -> MeterUpdate -> Annex ()
store o k src meterupdate = storeGeneric o meterupdate basedest populatedest
where
basedest = fromRawFilePath $ Prelude.head (keyPaths k)
@ -216,8 +216,13 @@ store o k src meterupdate = storeGeneric o meterupdate basedest populatedest
- object file, and has to be copied or hard linked into place. -}
canrename = isEncKey k || isChunkKey k
storeGeneric :: RsyncOpts -> MeterUpdate -> FilePath -> (FilePath -> Annex Bool) -> Annex Bool
storeGeneric o meterupdate basedest populatedest = withRsyncScratchDir $ \tmp -> do
storeGeneric :: RsyncOpts -> MeterUpdate -> FilePath -> (FilePath -> Annex Bool) -> Annex ()
storeGeneric o meterupdate basedest populatedest =
unlessM (storeGeneric' o meterupdate basedest populatedest) $
giveup "failed to rsync content"
storeGeneric' :: RsyncOpts -> MeterUpdate -> FilePath -> (FilePath -> Annex Bool) -> Annex Bool
storeGeneric' o meterupdate basedest populatedest = withRsyncScratchDir $ \tmp -> do
let dest = tmp </> basedest
createAnnexDirectory (parentDir dest)
ok <- populatedest dest
@ -287,7 +292,7 @@ checkPresentGeneric o rsyncurls = do
storeExportM :: RsyncOpts -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex Bool
storeExportM o src _k loc meterupdate =
storeGeneric o meterupdate basedest populatedest
storeGeneric' o meterupdate basedest populatedest
where
basedest = fromRawFilePath (fromExportLocation loc)
populatedest = liftIO . createLinkOrCopy src

View file

@ -322,7 +322,6 @@ store mh r info magic = fileStorer $ \k f p -> withS3HandleOrFail (uuid r) mh $
-- Store public URL to item in Internet Archive.
when (isIA info && not (isChunkKey k)) $
setUrlPresent k (iaPublicUrl info (bucketObject info k))
return True
storeHelper :: S3Info -> S3Handle -> Maybe Magic -> FilePath -> S3.Object -> MeterUpdate -> Annex (Maybe S3Etag, Maybe S3VersionID)
storeHelper info h magic f object p = liftIO $ case partSize info of

View file

@ -135,11 +135,11 @@ tahoeSetup _ mu _ c _ = do
where
missingfurl = giveup "Set TAHOE_FURL to the introducer furl to use."
store :: RemoteStateHandle -> TahoeHandle -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
store :: RemoteStateHandle -> TahoeHandle -> Key -> AssociatedFile -> MeterUpdate -> Annex ()
store rs hdl k _f _p = sendAnnex k noop $ \src ->
parsePut <$> liftIO (readTahoe hdl "put" [File src]) >>= maybe
(return False)
(\cap -> storeCapability rs k cap >> return True)
(giveup "tahoe failed to store content")
(\cap -> storeCapability rs k cap)
retrieve :: RemoteStateHandle -> TahoeHandle -> Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)
retrieve rs hdl k _f d _p = unVerified $ go =<< getCapability rs k

View file

@ -97,10 +97,8 @@ downloadKey key _af dest p = unVerified $ get =<< getWebUrls key
downloadKeyCheap :: Key -> AssociatedFile -> FilePath -> Annex Bool
downloadKeyCheap _ _ _ = return False
uploadKey :: Key -> AssociatedFile -> MeterUpdate -> Annex Bool
uploadKey _ _ _ = do
warning "upload to web not supported"
return False
uploadKey :: Key -> AssociatedFile -> MeterUpdate -> Annex ()
uploadKey _ _ _ = giveup "upload to web not supported"
dropKey :: Key -> Annex Bool
dropKey k = do

View file

@ -139,18 +139,13 @@ webdavSetup _ mu mcreds c gc = do
store :: DavHandleVar -> ChunkConfig -> Storer
store hv (LegacyChunks chunksize) = fileStorer $ \k f p ->
withDavHandle hv $ \case
Nothing -> return False
Just dav -> liftIO $
withMeteredFile f p $ storeLegacyChunked chunksize k dav
withDavHandle hv $ \dav -> liftIO $
withMeteredFile f p $ storeLegacyChunked chunksize k dav
store hv _ = httpStorer $ \k reqbody ->
withDavHandle hv $ \case
Nothing -> return False
Just dav -> liftIO $ goDAV dav $ do
let tmp = keyTmpLocation k
let dest = keyLocation k
storeHelper dav tmp dest reqbody
return True
withDavHandle hv $ \dav -> liftIO $ goDAV dav $ do
let tmp = keyTmpLocation k
let dest = keyLocation k
storeHelper dav tmp dest reqbody
storeHelper :: DavHandle -> DavLocation -> DavLocation -> RequestBody -> DAVT IO ()
storeHelper dav tmp dest reqbody = do
@ -172,12 +167,10 @@ retrieveCheap _ _ _ = return False
retrieve :: DavHandleVar -> ChunkConfig -> Retriever
retrieve hv cc = fileRetriever $ \d k p ->
withDavHandle hv $ \case
Nothing -> giveup "unable to connect"
Just dav -> case cc of
LegacyChunks _ -> retrieveLegacyChunked d k p dav
_ -> liftIO $
goDAV dav $ retrieveHelper (keyLocation k) d p
withDavHandle hv $ \dav -> case cc of
LegacyChunks _ -> retrieveLegacyChunked d k p dav
_ -> liftIO $
goDAV dav $ retrieveHelper (keyLocation k) d p
retrieveHelper :: DavLocation -> FilePath -> MeterUpdate -> DAVT IO ()
retrieveHelper loc d p = do
@ -186,9 +179,9 @@ retrieveHelper loc d p = do
withContentM $ httpBodyRetriever d p
remove :: DavHandleVar -> Remover
remove hv k = withDavHandle hv $ \case
Nothing -> return False
Just dav -> liftIO $ goDAV dav $
remove hv k = withDavHandle' hv $ \case
Left _e -> return False
Right dav -> liftIO $ goDAV dav $
-- Delete the key's whole directory, including any
-- legacy chunked files, etc, in a single action.
removeHelper (keyDir k)
@ -206,20 +199,18 @@ removeHelper d = do
_ -> return False
checkKey :: DavHandleVar -> Remote -> ChunkConfig -> CheckPresent
checkKey hv r chunkconfig k = withDavHandle hv $ \case
Nothing -> giveup $ name r ++ " not configured"
Just dav -> do
showChecking r
case chunkconfig of
LegacyChunks _ -> checkKeyLegacyChunked dav k
_ -> do
v <- liftIO $ goDAV dav $
existsDAV (keyLocation k)
either giveup return v
checkKey hv r chunkconfig k = withDavHandle hv $ \dav -> do
showChecking r
case chunkconfig of
LegacyChunks _ -> checkKeyLegacyChunked dav k
_ -> do
v <- liftIO $ goDAV dav $
existsDAV (keyLocation k)
either giveup return v
storeExportDav :: DavHandleVar -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex Bool
storeExportDav hdl f k loc p = case exportLocation loc of
Right dest -> withDavHandle hdl $ \mh -> runExport mh $ \dav -> do
Right dest -> withDavHandle' hdl $ \mh -> runExport mh $ \dav -> do
reqbody <- liftIO $ httpBodyStorer f p
storeHelper dav (keyTmpLocation k) dest reqbody
return True
@ -229,23 +220,23 @@ storeExportDav hdl f k loc p = case exportLocation loc of
retrieveExportDav :: DavHandleVar -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Bool
retrieveExportDav hdl _k loc d p = case exportLocation loc of
Right src -> withDavHandle hdl $ \mh -> runExport mh $ \_dav -> do
Right src -> withDavHandle' hdl $ \mh -> runExport mh $ \_dav -> do
retrieveHelper src d p
return True
Left _err -> return False
checkPresentExportDav :: DavHandleVar -> Remote -> Key -> ExportLocation -> Annex Bool
checkPresentExportDav hdl r _k loc = case exportLocation loc of
Right p -> withDavHandle hdl $ \case
Nothing -> giveup $ name r ++ " not configured"
Just h -> liftIO $ do
checkPresentExportDav hdl _ _k loc = case exportLocation loc of
Right p -> withDavHandle' hdl $ \case
Left e -> giveup e
Right h -> liftIO $ do
v <- goDAV h $ existsDAV p
either giveup return v
Left err -> giveup err
removeExportDav :: DavHandleVar-> Key -> ExportLocation -> Annex Bool
removeExportDav hdl _k loc = case exportLocation loc of
Right p -> withDavHandle hdl $ \mh -> runExport mh $ \_dav ->
Right p -> withDavHandle' hdl $ \mh -> runExport mh $ \_dav ->
removeHelper p
-- When the exportLocation is not legal for webdav,
-- the content is certianly not stored there, so it's ok for
@ -255,7 +246,7 @@ removeExportDav hdl _k loc = case exportLocation loc of
Left _err -> return True
removeExportDirectoryDav :: DavHandleVar -> ExportDirectory -> Annex Bool
removeExportDirectoryDav hdl dir = withDavHandle hdl $ \mh -> runExport mh $ \_dav -> do
removeExportDirectoryDav hdl dir = withDavHandle' hdl $ \mh -> runExport mh $ \_dav -> do
let d = fromRawFilePath $ fromExportDirectory dir
debugDav $ "delContent " ++ d
safely (inLocation d delContentM)
@ -263,23 +254,23 @@ removeExportDirectoryDav hdl dir = withDavHandle hdl $ \mh -> runExport mh $ \_d
renameExportDav :: DavHandleVar -> Key -> ExportLocation -> ExportLocation -> Annex (Maybe Bool)
renameExportDav hdl _k src dest = case (exportLocation src, exportLocation dest) of
(Right srcl, Right destl) -> withDavHandle hdl $ \case
Just h
(Right srcl, Right destl) -> withDavHandle' hdl $ \case
Right h
-- box.com's DAV endpoint has buggy handling of renames,
-- so avoid renaming when using it.
| boxComUrl `isPrefixOf` baseURL h -> return Nothing
| otherwise -> do
v <- runExport (Just h) $ \dav -> do
v <- runExport (Right h) $ \dav -> do
maybe noop (void . mkColRecursive) (locationParent destl)
moveDAV (baseURL dav) srcl destl
return True
return (Just v)
Nothing -> return (Just False)
Left _e -> return (Just False)
_ -> return (Just False)
runExport :: Maybe DavHandle -> (DavHandle -> DAVT IO Bool) -> Annex Bool
runExport Nothing _ = return False
runExport (Just h) a = fromMaybe False <$> liftIO (goDAV h $ safely (a h))
runExport :: Either String DavHandle -> (DavHandle -> DAVT IO Bool) -> Annex Bool
runExport (Left _e) _ = return False
runExport (Right h) a = fromMaybe False <$> liftIO (goDAV h $ safely (a h))
configUrl :: ParsedRemoteConfig -> Maybe URLString
configUrl c = fixup <$> getRemoteConfigValue urlField c
@ -418,7 +409,7 @@ choke f = do
data DavHandle = DavHandle DAVContext DavUser DavPass URLString
type DavHandleVar = TVar (Either (Annex (Maybe DavHandle)) (Maybe DavHandle))
type DavHandleVar = TVar (Either (Annex (Either String DavHandle)) (Either String DavHandle))
{- Prepares a DavHandle for later use. Does not connect to the server or do
- anything else expensive. -}
@ -429,11 +420,19 @@ mkDavHandleVar c gc u = liftIO $ newTVarIO $ Left $ do
(Just (user, pass), Just baseurl) -> do
ctx <- mkDAVContext baseurl
let h = DavHandle ctx (toDavUser user) (toDavPass pass) baseurl
return (Just h)
_ -> return Nothing
return (Right h)
_ -> return $ Left "webdav credentials not available"
withDavHandle :: DavHandleVar -> (Maybe DavHandle -> Annex a) -> Annex a
withDavHandle :: DavHandleVar -> (DavHandle -> Annex a) -> Annex a
withDavHandle hv a = liftIO (readTVarIO hv) >>= \case
Right hdl -> either giveup a hdl
Left mkhdl -> do
hdl <- mkhdl
liftIO $ atomically $ writeTVar hv (Right hdl)
either giveup a hdl
withDavHandle' :: DavHandleVar -> (Either String DavHandle -> Annex a) -> Annex a
withDavHandle' hv a = liftIO (readTVarIO hv) >>= \case
Right hdl -> a hdl
Left mkhdl -> do
hdl <- mkhdl
@ -472,7 +471,7 @@ prepDAV user pass = do
-- Legacy chunking code, to be removed eventually.
--
storeLegacyChunked :: ChunkSize -> Key -> DavHandle -> L.ByteString -> IO Bool
storeLegacyChunked :: ChunkSize -> Key -> DavHandle -> L.ByteString -> IO ()
storeLegacyChunked chunksize k dav b =
Legacy.storeChunks k tmp dest storer recorder finalizer
where

View file

@ -84,7 +84,8 @@ data RemoteA a = Remote
-- Transfers a key's contents from disk to the remote.
-- The key should not appear to be present on the remote until
-- all of its contents have been transferred.
, storeKey :: Key -> AssociatedFile -> MeterUpdate -> a Bool
-- Throws exception on failure.
, storeKey :: Key -> AssociatedFile -> MeterUpdate -> a ()
-- Retrieves a key's contents to a file.
-- (The MeterUpdate does not need to be used if it writes
-- sequentially to the file.)

View file

@ -23,7 +23,7 @@ isByteContent (FileContent _) = False
-- Action that stores a Key's content on a remote.
-- Can throw exceptions.
type Storer = Key -> ContentSource -> MeterUpdate -> Annex Bool
type Storer = Key -> ContentSource -> MeterUpdate -> Annex ()
-- Action that retrieves a Key's content from a remote, passing it to a
-- callback, which will fully consume the content before returning.

View file

@ -13,3 +13,6 @@ I am not yet sure what exact combinations of dots, spaces and maybe dashes cause
### What version of git-annex are you using? On what operating system?
git-annex version: 8.20200309-05df404212, Debian testing
[[!meta title="change exception handling of remotes to avoid ever failing
without telling the reason why"]]

View file

@ -0,0 +1,12 @@
[[!comment format=mdwn
username="joey"
subject="""comment 3"""
date="2020-05-13T17:58:49Z"
content="""
Converted the storeKey method to throw exceptions. This was a 1000 line
patch, 3 hours of work. Seems likely it will take 24 hours work to finish
converting all the methods..
There were quite a few places where it used to return False without
displaying a reason for the failure, so the work seems worth it.
"""]]