make storeKey throw exceptions

When storing content on remote fails, always display a reason why.

Since the Storer used by special remotes already did, this mostly affects
git remotes, but not entirely. For example, if git-lfs failed to connect to
the endpoint, it used to silently return False.
This commit is contained in:
Joey Hess 2020-05-13 14:03:00 -04:00
parent b50ee9cd0c
commit c1cd402081
No known key found for this signature in database
GPG key ID: DB12DB0FF05F8F38
34 changed files with 214 additions and 197 deletions

View file

@ -117,28 +117,22 @@ storeChunks
-> MeterUpdate
-> Storer
-> CheckPresent
-> Annex Bool
-> Annex ()
storeChunks u chunkconfig encryptor k f p storer checker =
case chunkconfig of
(UnpaddedChunks chunksize) | isStableKey k ->
bracketIO open close (go chunksize)
(UnpaddedChunks chunksize) | isStableKey k -> do
h <- liftIO $ openBinaryFile f ReadMode
go chunksize h
liftIO $ hClose h
_ -> storer k (FileContent f) p
where
open = tryIO $ openBinaryFile f ReadMode
close (Right h) = hClose h
close (Left _) = noop
go _ (Left e) = do
warning (show e)
return False
go chunksize (Right h) = do
go chunksize h = do
let chunkkeys = chunkKeyStream k chunksize
(chunkkeys', startpos) <- seekResume h encryptor chunkkeys checker
b <- liftIO $ L.hGetContents h
gochunks p startpos chunksize b chunkkeys'
gochunks :: MeterUpdate -> BytesProcessed -> ChunkSize -> L.ByteString -> ChunkKeyStream -> Annex Bool
gochunks :: MeterUpdate -> BytesProcessed -> ChunkSize -> L.ByteString -> ChunkKeyStream -> Annex ()
gochunks meterupdate startpos chunksize = loop startpos . splitchunk
where
splitchunk = L.splitAt chunksize
@ -148,16 +142,12 @@ storeChunks u chunkconfig encryptor k f p storer checker =
-- Once all chunks are successfully
-- stored, update the chunk log.
chunksStored u k (FixedSizeChunks chunksize) numchunks
return True
| otherwise = do
liftIO $ meterupdate' zeroBytesProcessed
let (chunkkey, chunkkeys') = nextChunkKeyStream chunkkeys
ifM (storer chunkkey (ByteContent chunk) meterupdate')
( do
let bytesprocessed' = addBytesProcessed bytesprocessed (L.length chunk)
loop bytesprocessed' (splitchunk bs) chunkkeys'
, return False
)
storer chunkkey (ByteContent chunk) meterupdate'
let bytesprocessed' = addBytesProcessed bytesprocessed (L.length chunk)
loop bytesprocessed' (splitchunk bs) chunkkeys'
where
numchunks = numChunks chunkkeys
{- The MeterUpdate that is passed to the action

View file

@ -63,20 +63,15 @@ probeChunks basedest check = go [] $ map (basedest ++) chunkStream
- finalizer is called to rename the tmp into the dest
- (and do any other cleanup).
-}
storeChunks :: Key -> FilePath -> FilePath -> ([FilePath] -> IO [FilePath]) -> (FilePath -> String -> IO ()) -> (FilePath -> FilePath -> IO ()) -> IO Bool
storeChunks key tmp dest storer recorder finalizer = either onerr return
=<< (E.try go :: IO (Either E.SomeException Bool))
storeChunks :: Key -> FilePath -> FilePath -> ([FilePath] -> IO [FilePath]) -> (FilePath -> String -> IO ()) -> (FilePath -> FilePath -> IO ()) -> IO ()
storeChunks key tmp dest storer recorder finalizer = do
stored <- storer tmpdests
let chunkcount = basef ++ chunkCount
recorder chunkcount (show $ length stored)
finalizer tmp dest
when (null stored) $
giveup "no chunks were stored"
where
go = do
stored <- storer tmpdests
let chunkcount = basef ++ chunkCount
recorder chunkcount (show $ length stored)
finalizer tmp dest
return (not $ null stored)
onerr e = do
warningIO (show e)
return False
basef = tmp ++ fromRawFilePath (keyFile key)
tmpdests = map (basef ++ ) chunkStream

View file

@ -188,9 +188,8 @@ adjustExportImport r rs = case getRemoteConfigValue exportTreeField (config r) o
-- when another repository has already stored the
-- key, and the local repository does not know
-- about it. To avoid unnecessary costs, don't do it.
{ storeKey = \_ _ _ -> do
warning "remote is configured with exporttree=yes; use `git-annex export` to store content on it"
return False
{ storeKey = \_ _ _ ->
giveup "remote is configured with exporttree=yes; use `git-annex export` to store content on it"
-- Keys can be retrieved using retrieveExport,
-- but since that retrieves from a path in the
-- remote that another writer could have replaced

View file

@ -25,7 +25,7 @@ import Network.HTTP.Types
--
-- Implemented as a fileStorer, so that the content can be streamed
-- from the file in constant space.
httpStorer :: (Key -> RequestBody -> Annex Bool) -> Storer
httpStorer :: (Key -> RequestBody -> Annex ()) -> Storer
httpStorer a = fileStorer $ \k f m -> a k =<< liftIO (httpBodyStorer f m)
-- Reads the file and generates a streaming request body, that will update

View file

@ -30,12 +30,14 @@ type ProtoConnRunner c = forall a. P2P.Proto a -> ClosableConnection c -> Annex
-- the pool when done.
type WithConn a c = (ClosableConnection c -> Annex (ClosableConnection c, a)) -> Annex a
store :: (MeterUpdate -> ProtoRunner Bool) -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
store :: (MeterUpdate -> ProtoRunner Bool) -> Key -> AssociatedFile -> MeterUpdate -> Annex ()
store runner k af p = do
let sizer = KeySizer k (fmap fst <$> prepSendAnnex k)
metered (Just p) sizer $ \_ p' ->
fromMaybe False
<$> runner p' (P2P.put k af p')
metered (Just p) sizer $ \_ p' ->
runner p' (P2P.put k af p') >>= \case
Just True -> return ()
Just False -> giveup "transfer failed"
Nothing -> giveup "can't connect to remote"
retrieve :: (MeterUpdate -> ProtoRunner (Bool, Verification)) -> Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)
retrieve runner k af dest p =

View file

@ -44,23 +44,23 @@ adjustReadOnly r
}
| otherwise = r
readonlyStoreKey :: Key -> AssociatedFile -> MeterUpdate -> Annex Bool
readonlyStoreKey :: Key -> AssociatedFile -> MeterUpdate -> Annex ()
readonlyStoreKey _ _ _ = readonlyFail
readonlyRemoveKey :: Key -> Annex Bool
readonlyRemoveKey _ = readonlyFail
readonlyRemoveKey _ = readonlyFail'
readonlyStorer :: Storer
readonlyStorer _ _ _ = readonlyFail
readonlyStoreExport :: FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex Bool
readonlyStoreExport _ _ _ _ = readonlyFail
readonlyStoreExport _ _ _ _ = readonlyFail'
readonlyRemoveExport :: Key -> ExportLocation -> Annex Bool
readonlyRemoveExport _ _ = readonlyFail
readonlyRemoveExport _ _ = readonlyFail'
readonlyRemoveExportDirectory :: ExportDirectory -> Annex Bool
readonlyRemoveExportDirectory _ = readonlyFail
readonlyRemoveExportDirectory _ = readonlyFail'
readonlyRenameExport :: Key -> ExportLocation -> ExportLocation -> Annex (Maybe Bool)
readonlyRenameExport _ _ _ = return Nothing
@ -70,10 +70,13 @@ readonlyStoreExportWithContentIdentifier _ _ _ _ _ =
return $ Left readonlyWarning
readonlyRemoveExportWithContentIdentifier :: Key -> ExportLocation -> [ContentIdentifier] -> Annex Bool
readonlyRemoveExportWithContentIdentifier _ _ _ = readonlyFail
readonlyRemoveExportWithContentIdentifier _ _ _ = readonlyFail'
readonlyFail :: Annex Bool
readonlyFail = do
readonlyFail :: Annex ()
readonlyFail = giveup readonlyWarning
readonlyFail' :: Annex Bool
readonlyFail' = do
warning readonlyWarning
return False

View file

@ -90,7 +90,7 @@ mkRetrievalVerifiableKeysSecure gc
-- A Storer that expects to be provided with a file containing
-- the content of the key to store.
fileStorer :: (Key -> FilePath -> MeterUpdate -> Annex Bool) -> Storer
fileStorer :: (Key -> FilePath -> MeterUpdate -> Annex ()) -> Storer
fileStorer a k (FileContent f) m = a k f m
fileStorer a k (ByteContent b) m = withTmp k $ \f -> do
liftIO $ L.writeFile f b
@ -98,7 +98,7 @@ fileStorer a k (ByteContent b) m = withTmp k $ \f -> do
-- A Storer that expects to be provided with a L.ByteString of
-- the content to store.
byteStorer :: (Key -> L.ByteString -> MeterUpdate -> Annex Bool) -> Storer
byteStorer :: (Key -> L.ByteString -> MeterUpdate -> Annex ()) -> Storer
byteStorer a k c m = withBytes c $ \b -> a k b m
-- A Retriever that writes the content of a Key to a provided file.
@ -120,8 +120,8 @@ byteRetriever a k _m callback = a k (callback . ByteContent)
- but they are never actually used (since specialRemote replaces them).
- Here are some dummy ones.
-}
storeKeyDummy :: Key -> AssociatedFile -> MeterUpdate -> Annex Bool
storeKeyDummy _ _ _ = return False
storeKeyDummy :: Key -> AssociatedFile -> MeterUpdate -> Annex ()
storeKeyDummy _ _ _ = error "missing storeKey implementation"
retreiveKeyFileDummy :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)
retreiveKeyFileDummy _ _ _ _ = unVerified (return False)
removeKeyDummy :: Key -> Annex Bool
@ -208,7 +208,7 @@ specialRemote' cfg c storer retriever remover checkpresent baser = encr
safely a = catchNonAsync a (\e -> warning (show e) >> return False)
-- chunk, then encrypt, then feed to the storer
storeKeyGen k p enc = safely $ sendAnnex k rollback $ \src ->
storeKeyGen k p enc = sendAnnex k rollback $ \src ->
displayprogress p k (Just src) $ \p' ->
storeChunks (uuid baser) chunkconfig enck k src p'
(storechunk enc)