change retrieveExport and getKey to throw exception

retrieveExport is part of ongoing transition to make remote methods
throw exceptions, rather than silently hide them.

getKey very rarely fails, and when it does it's always for the same reason
(user configured annex.backend to url for some reason). So, this will
avoid dealing with Nothing everywhere it's used.

This commit was sponsored by Ilya Shlyakhter on Patreon.
This commit is contained in:
Joey Hess 2020-05-15 12:51:09 -04:00
parent 4814b444dd
commit 3334d3831b
No known key found for this signature in database
GPG key ID: DB12DB0FF05F8F38
23 changed files with 151 additions and 152 deletions

View file

@ -349,16 +349,20 @@ downloadImport remote importtreeconfig importablecontents = do
return (Right job)
download cidmap db (loc, (cid, sz)) = do
let rundownload tmpfile p =
Remote.retrieveExportWithContentIdentifier ia loc cid tmpfile (mkkey loc tmpfile) p >>= \case
Just k -> tryNonAsync (moveAnnex k tmpfile) >>= \case
Right True -> do
let downloader tmpfile p = do
k <- Remote.retrieveExportWithContentIdentifier ia loc cid tmpfile (mkkey loc tmpfile) p
ok <- moveAnnex k tmpfile
return (k, ok)
let rundownload tmpfile p = tryNonAsync (downloader tmpfile p) >>= \case
Right (k, True) -> do
recordcidkey cidmap db cid k
logStatus k InfoPresent
logChange k (Remote.uuid remote) InfoPresent
return $ Just (loc, k)
_ -> return Nothing
Nothing -> return Nothing
Right (_, False) -> return Nothing
Left e -> do
warning (show e)
return Nothing
checkDiskSpaceToGet tmpkey Nothing $
withTmp tmpkey $ \tmpfile ->
metered Nothing tmpkey $
@ -375,7 +379,7 @@ downloadImport remote importtreeconfig importablecontents = do
, contentLocation = toRawFilePath tmpfile
, inodeCache = Nothing
}
fmap fst <$> genKey ks nullMeterUpdate backend
fst <$> genKey ks nullMeterUpdate backend
locworktreefilename loc = asTopFilePath $ case importtreeconfig of
ImportTree -> fromImportLocation loc

View file

@ -159,8 +159,8 @@ ingest' preferredbackend meterupdate (Just (LockedDown cfg source)) mk restage =
(chooseBackend $ fromRawFilePath $ keyFilename source)
(return . Just)
preferredbackend
fmap fst <$> genKey source meterupdate backend
Just k -> return (Just k)
fst <$> genKey source meterupdate backend
Just k -> return k
let src = contentLocation source
ms <- liftIO $ catchMaybeIO $ R.getFileStatus src
mcache <- maybe (pure Nothing) (liftIO . toInodeCache delta (fromRawFilePath src)) ms
@ -169,10 +169,9 @@ ingest' preferredbackend meterupdate (Just (LockedDown cfg source)) mk restage =
(Just newc, Just c) | compareStrong c newc -> go k mcache ms
_ -> failure "changed while it was being added"
where
go (Just key) mcache (Just s)
go key mcache (Just s)
| lockingFile cfg = golocked key mcache s
| otherwise = gounlocked key mcache s
go _ _ _ = failure "failed to generate a key"
golocked key mcache s =
tryNonAsync (moveAnnex key $ fromRawFilePath $ contentLocation source) >>= \case

View file

@ -14,6 +14,7 @@ module Annex.Url (
ipAddressesUnlimited,
checkBoth,
download,
download',
exists,
getUrlInfo,
U.downloadQuiet,
@ -172,6 +173,10 @@ download meterupdate url file uo =
Right () -> return True
Left err -> warning err >> return False
download' :: MeterUpdate -> U.URLString -> FilePath -> U.UrlOptions -> Annex (Either String ())
download' meterupdate url file uo =
liftIO (U.download meterupdate url file uo)
exists :: U.URLString -> U.UrlOptions -> Annex Bool
exists url uo = liftIO (U.exists url uo) >>= \case
Right b -> return b

View file

@ -51,12 +51,15 @@ defaultBackend = maybe cache return =<< Annex.getState Annex.backend
lookupname = lookupBackendVariety . parseKeyVariety . encodeBS
{- Generates a key for a file. -}
genKey :: KeySource -> MeterUpdate -> Maybe Backend -> Annex (Maybe (Key, Backend))
genKey :: KeySource -> MeterUpdate -> Maybe Backend -> Annex (Key, Backend)
genKey source meterupdate preferredbackend = do
b <- maybe defaultBackend return preferredbackend
B.getKey b source meterupdate >>= return . \case
Nothing -> Nothing
Just k -> Just (makesane k, b)
case B.getKey b of
Just a -> do
k <- a source meterupdate
return (makesane k, b)
Nothing -> giveup $ "Cannot generate a key for backend " ++
decodeBS (formatKeyVariety (B.backendVariety b))
where
-- keyNames should not contain newline characters.
makesane k = alterKey k $ \d -> d

View file

@ -63,7 +63,7 @@ backends = concatMap (\h -> [genBackendE h, genBackend h]) hashes
genBackend :: Hash -> Backend
genBackend hash = Backend
{ backendVariety = hashKeyVariety hash (HasExt False)
, getKey = keyValue hash
, getKey = Just (keyValue hash)
, verifyKeyContent = Just $ checkKeyChecksum hash
, canUpgradeKey = Just needsUpgrade
, fastMigrate = Just trivialMigrate
@ -73,7 +73,7 @@ genBackend hash = Backend
genBackendE :: Hash -> Backend
genBackendE hash = (genBackend hash)
{ backendVariety = hashKeyVariety hash (HasExt True)
, getKey = keyValueE hash
, getKey = Just (keyValueE hash)
}
hashKeyVariety :: Hash -> HasExt -> KeyVariety
@ -88,26 +88,26 @@ hashKeyVariety (Blake2sHash size) he = Blake2sKey size he
hashKeyVariety (Blake2spHash size) he = Blake2spKey size he
{- A key is a hash of its contents. -}
keyValue :: Hash -> KeySource -> MeterUpdate -> Annex (Maybe Key)
keyValue :: Hash -> KeySource -> MeterUpdate -> Annex Key
keyValue hash source meterupdate = do
let file = fromRawFilePath (contentLocation source)
filesize <- liftIO $ getFileSize file
s <- hashFile hash file meterupdate
return $ Just $ mkKey $ \k -> k
return $ mkKey $ \k -> k
{ keyName = encodeBS s
, keyVariety = hashKeyVariety hash (HasExt False)
, keySize = Just filesize
}
{- Extension preserving keys. -}
keyValueE :: Hash -> KeySource -> MeterUpdate -> Annex (Maybe Key)
keyValueE :: Hash -> KeySource -> MeterUpdate -> Annex Key
keyValueE hash source meterupdate =
keyValue hash source meterupdate >>= maybe (return Nothing) addE
keyValue hash source meterupdate >>= addE
where
addE k = do
maxlen <- annexMaxExtensionLength <$> Annex.getGitConfig
let ext = selectExtension maxlen (keyFilename source)
return $ Just $ alterKey k $ \d -> d
return $ alterKey k $ \d -> d
{ keyName = keyName d <> ext
, keyVariety = hashKeyVariety hash (HasExt True)
}
@ -296,7 +296,10 @@ md5Hasher = show . md5
testKeyBackend :: Backend
testKeyBackend =
let b = genBackendE (SHA2Hash (HashSize 256))
in b { getKey = \ks p -> (fmap addE) <$> getKey b ks p }
gk = case getKey b of
Nothing -> Nothing
Just f -> Just (\ks p -> addE <$> f ks p)
in b { getKey = gk }
where
addE k = alterKey k $ \d -> d
{ keyName = keyName d <> longext

View file

@ -21,7 +21,7 @@ backends = [backend]
backend :: Backend
backend = Backend
{ backendVariety = URLKey
, getKey = \_ _ -> return Nothing
, getKey = Nothing
, verifyKeyContent = Nothing
, canUpgradeKey = Nothing
, fastMigrate = Nothing

View file

@ -24,7 +24,7 @@ backends = [backend]
backend :: Backend
backend = Backend
{ backendVariety = WORMKey
, getKey = keyValue
, getKey = Just keyValue
, verifyKeyContent = Nothing
, canUpgradeKey = Just needsUpgrade
, fastMigrate = Just removeSpaces
@ -34,14 +34,14 @@ backend = Backend
{- The key includes the file size, modification time, and the
- original filename relative to the top of the git repository.
-}
keyValue :: KeySource -> MeterUpdate -> Annex (Maybe Key)
keyValue :: KeySource -> MeterUpdate -> Annex Key
keyValue source _ = do
let f = contentLocation source
stat <- liftIO $ R.getFileStatus f
sz <- liftIO $ getFileSize' (fromRawFilePath f) stat
relf <- fromRawFilePath . getTopFilePath
<$> inRepo (toTopFilePath $ keyFilename source)
return $ Just $ mkKey $ \k -> k
return $ mkKey $ \k -> k
{ keyName = genKeyName relf
, keyVariety = WORMKey
, keySize = Just sz

View file

@ -313,7 +313,7 @@ downloadWeb addunlockedmatcher o url urlinfo file =
normalfinish tmp = checkCanAdd file $ do
showDestinationFile file
createWorkTreeDirectory (parentDir file)
finishDownloadWith addunlockedmatcher tmp webUUID url file
Just <$> finishDownloadWith addunlockedmatcher tmp webUUID url file
tryyoutubedl tmp
-- Ask youtube-dl what filename it will download
-- first, and check if that is already an annexed file,
@ -388,7 +388,7 @@ downloadWith addunlockedmatcher downloader dummykey u url file =
where
afile = AssociatedFile (Just (toRawFilePath file))
go Nothing = return Nothing
go (Just tmp) = finishDownloadWith addunlockedmatcher tmp u url file
go (Just tmp) = Just <$> finishDownloadWith addunlockedmatcher tmp u url file
{- Like downloadWith, but leaves the dummy key content in
- the returned location. -}
@ -404,7 +404,7 @@ downloadWith' downloader dummykey u url afile =
then return (Just tmp)
else return Nothing
finishDownloadWith :: AddUnlockedMatcher -> FilePath -> UUID -> URLString -> FilePath -> Annex (Maybe Key)
finishDownloadWith :: AddUnlockedMatcher -> FilePath -> UUID -> URLString -> FilePath -> Annex Key
finishDownloadWith addunlockedmatcher tmp u url file = do
backend <- chooseBackend file
let source = KeySource
@ -412,11 +412,9 @@ finishDownloadWith addunlockedmatcher tmp u url file = do
, contentLocation = toRawFilePath tmp
, inodeCache = Nothing
}
genKey source nullMeterUpdate backend >>= \case
Nothing -> return Nothing
Just (key, _) -> do
key <- fst <$> genKey source nullMeterUpdate backend
addWorkTree addunlockedmatcher u url file key (Just tmp)
return (Just key)
return key
{- Adds the url size to the Key. -}
addSizeUrlKey :: Url.UrlInfo -> Key -> Key

View file

@ -20,11 +20,11 @@ cmd = noCommit $ noMessages $ dontCheck repoExists $
(batchable run (pure ()))
run :: () -> String -> Annex Bool
run _ file = genKey ks nullMeterUpdate Nothing >>= \case
Just (k, _) -> do
run _ file = tryNonAsync (genKey ks nullMeterUpdate Nothing) >>= \case
Right (k, _) -> do
liftIO $ putStrLn $ serializeKey k
return True
Nothing -> return False
Left _err -> return False
where
ks = KeySource file' file' Nothing
file' = toRawFilePath file

View file

@ -227,10 +227,8 @@ startLocal addunlockedmatcher largematcher mode (srcfile, destfile) =
case v of
Just ld -> do
backend <- chooseBackend destfile
v' <- genKey (keySource ld) nullMeterUpdate backend
case v' of
Just (k, _) -> a (ld, k)
Nothing -> giveup "failed to generate a key"
k <- fst <$> genKey (keySource ld) nullMeterUpdate backend
a (ld, k)
Nothing -> stop
checkdup k dupa notdupa = ifM (isKnownKey k)
( dupa

View file

@ -89,10 +89,8 @@ perform file oldkey oldbackend newbackend = go =<< genkey (fastMigrate oldbacken
, contentLocation = content
, inodeCache = Nothing
}
v <- genKey source nullMeterUpdate (Just newbackend)
return $ case v of
Just (newkey, _) -> Just (newkey, False)
_ -> Nothing
newkey <- fst <$> genKey source nullMeterUpdate (Just newbackend)
return $ Just (newkey, False)
genkey (Just fm) = fm oldkey newbackend afile >>= \case
Just newkey -> return (Just (newkey, True))
Nothing -> genkey Nothing

View file

@ -55,10 +55,8 @@ startSrcDest _ = giveup "specify a src file and a dest file"
startKnown :: FilePath -> CommandStart
startKnown src = notAnnexed src $
starting "reinject" (ActionItemOther (Just src)) $ do
mkb <- genKey ks nullMeterUpdate Nothing
case mkb of
Nothing -> error "Failed to generate key"
Just (key, _) -> ifM (isKnownKey key)
(key, _) <- genKey ks nullMeterUpdate Nothing
ifM (isKnownKey key)
( perform src key
, do
warning "Not known content; skipping"

View file

@ -329,10 +329,9 @@ testExportTree runannex mkr mkk1 mkk2 =
Remote.storeExport ea loc k testexportlocation nullMeterUpdate
retrieveexport ea k = withTmpFile "exported" $ \tmp h -> do
liftIO $ hClose h
ifM (Remote.retrieveExport ea k testexportlocation tmp nullMeterUpdate)
( verifyKeyContent RetrievalAllKeysSecure AlwaysVerify UnVerified k tmp
, return False
)
tryNonAsync (Remote.retrieveExport ea k testexportlocation tmp nullMeterUpdate) >>= \case
Left _ -> return False
Right () -> verifyKeyContent RetrievalAllKeysSecure AlwaysVerify UnVerified k tmp
checkpresentexport ea k = Remote.checkPresentExport ea k testexportlocation
removeexport ea k = Remote.removeExport ea k testexportlocation
removeexportdirectory ea = case Remote.removeExportDirectory ea of
@ -405,7 +404,7 @@ randKey :: Int -> Annex Key
randKey sz = withTmpFile "randkey" $ \f h -> do
gen <- liftIO (newGenIO :: IO SystemRandom)
case genBytes sz gen of
Left e -> error $ "failed to generate random key: " ++ show e
Left e -> giveup $ "failed to generate random key: " ++ show e
Right (rand, _) -> liftIO $ B.hPut h rand
liftIO $ hClose h
let ks = KeySource
@ -413,8 +412,9 @@ randKey sz = withTmpFile "randkey" $ \f h -> do
, contentLocation = toRawFilePath f
, inodeCache = Nothing
}
k <- fromMaybe (error "failed to generate random key")
<$> Backend.getKey Backend.Hash.testKeyBackend ks nullMeterUpdate
k <- case Backend.getKey Backend.Hash.testKeyBackend of
Just a -> a ks nullMeterUpdate
Nothing -> giveup "failed to generate random key (backend problem)"
_ <- moveAnnex k f
return k

View file

@ -186,11 +186,14 @@ store'' serial dest src canoverwrite = checkAdbInPath False $ do
retrieve :: AndroidSerial -> AndroidPath -> Retriever
retrieve serial adir = fileRetriever $ \dest k _p ->
let src = androidLocation adir k
in unlessM (retrieve' serial src dest) $
giveup "adb pull failed"
in retrieve' serial src dest
retrieve' :: AndroidSerial -> AndroidPath -> FilePath -> Annex Bool
retrieve' serial src dest = checkAdbInPath False $ do
retrieve' :: AndroidSerial -> AndroidPath -> FilePath -> Annex ()
retrieve' serial src dest =
unlessM go $
giveup "adb pull failed"
where
go = checkAdbInPath False $ do
showOutput -- make way for adb pull output
liftIO $ boolSystem "adb" $ mkAdbCommand serial
[ Param "pull"
@ -241,7 +244,7 @@ storeExportM serial adir src _k loc _p =
where
dest = androidExportLocation adir loc
retrieveExportM :: AndroidSerial -> AndroidPath -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Bool
retrieveExportM :: AndroidSerial -> AndroidPath -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex ()
retrieveExportM serial adir _k loc dest _p = retrieve' serial src dest
where
src = androidExportLocation adir loc
@ -305,17 +308,14 @@ listImportableContentsM serial adir =
-- connection is resonably fast, it's probably as good as
-- git's handling of similar situations with files being modified while
-- it's updating the working tree for a merge.
retrieveExportWithContentIdentifierM :: AndroidSerial -> AndroidPath -> ExportLocation -> ContentIdentifier -> FilePath -> Annex (Maybe Key) -> MeterUpdate -> Annex (Maybe Key)
retrieveExportWithContentIdentifierM serial adir loc cid dest mkkey _p = catchDefaultIO Nothing $
ifM (retrieve' serial src dest)
( do
retrieveExportWithContentIdentifierM :: AndroidSerial -> AndroidPath -> ExportLocation -> ContentIdentifier -> FilePath -> Annex Key -> MeterUpdate -> Annex Key
retrieveExportWithContentIdentifierM serial adir loc cid dest mkkey _p = do
retrieve' serial src dest
k <- mkkey
currcid <- getExportContentIdentifier serial adir loc
return $ if currcid == Right (Just cid)
then k
else Nothing
, return Nothing
)
if currcid == Right (Just cid)
then return k
else giveup "the file on the android device has changed"
where
src = androidExportLocation adir loc

View file

@ -273,10 +273,9 @@ storeExportM d src _k loc p = liftIO $ do
where
dest = exportPath d loc
retrieveExportM :: FilePath -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Bool
retrieveExportM d _k loc dest p = liftIO $ catchBoolIO $ do
withMeteredFile src p (L.writeFile dest)
return True
retrieveExportM :: FilePath -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex ()
retrieveExportM d _k loc dest p =
liftIO $ withMeteredFile src p (L.writeFile dest)
where
src = exportPath d loc
@ -346,9 +345,9 @@ mkContentIdentifier f st =
fmap (ContentIdentifier . encodeBS . showInodeCache)
<$> toInodeCache noTSDelta f st
retrieveExportWithContentIdentifierM :: FilePath -> ExportLocation -> ContentIdentifier -> FilePath -> Annex (Maybe Key) -> MeterUpdate -> Annex (Maybe Key)
retrieveExportWithContentIdentifierM :: FilePath -> ExportLocation -> ContentIdentifier -> FilePath -> Annex Key -> MeterUpdate -> Annex Key
retrieveExportWithContentIdentifierM dir loc cid dest mkkey p =
catchDefaultIO Nothing $ precheck $ docopy postcheck
precheck $ docopy postcheck
where
f = exportPath dir loc
@ -404,7 +403,7 @@ retrieveExportWithContentIdentifierM dir loc cid dest mkkey p =
comparecid cont currcid
| currcid == Just cid = cont
| otherwise = return Nothing
| otherwise = giveup "file content has changed"
storeExportWithContentIdentifierM :: FilePath -> FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex ContentIdentifier
storeExportWithContentIdentifierM dir src _k loc overwritablecids p = do

View file

@ -291,20 +291,17 @@ storeExportM external f k loc p = either giveup return =<< go
_ -> Nothing
req sk = TRANSFEREXPORT Upload sk f
retrieveExportM :: External -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Bool
retrieveExportM external k loc d p = safely $
handleRequestExport external loc req k (Just p) $ \resp -> case resp of
TRANSFER_SUCCESS Download k'
| k == k' -> result True
TRANSFER_FAILURE Download k' errmsg
| k == k' -> Just $ do
warning $ respErrorMessage "TRANSFER" errmsg
return (Result False)
UNSUPPORTED_REQUEST -> Just $ do
warning "TRANSFEREXPORT not implemented by external special remote"
return (Result False)
_ -> Nothing
retrieveExportM :: External -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex ()
retrieveExportM external k loc d p = either giveup return =<< go
where
go = handleRequestExport external loc req k (Just p) $ \resp -> case resp of
TRANSFER_SUCCESS Download k'
| k == k' -> result $ Right ()
TRANSFER_FAILURE Download k' errmsg
| k == k' -> result $ Left $ respErrorMessage "TRANSFER" errmsg
UNSUPPORTED_REQUEST ->
result $ Left "TRANSFEREXPORT not implemented by external special remote"
_ -> Nothing
req sk = TRANSFEREXPORT Download sk d
checkPresentExportM :: External -> Key -> ExportLocation -> Annex Bool

View file

@ -37,7 +37,7 @@ instance HasExportUnsupported (ParsedRemoteConfig -> RemoteGitConfig -> Annex Bo
instance HasExportUnsupported (ExportActions Annex) where
exportUnsupported = ExportActions
{ storeExport = nope
, retrieveExport = \_ _ _ _ -> return False
, retrieveExport = nope
, checkPresentExport = \_ _ -> return False
, removeExport = \_ _ -> return False
, removeExportDirectory = Just $ \_ -> return False
@ -56,7 +56,7 @@ instance HasImportUnsupported (ParsedRemoteConfig -> RemoteGitConfig -> Annex Bo
instance HasImportUnsupported (ImportActions Annex) where
importUnsupported = ImportActions
{ listImportableContents = return Nothing
, retrieveExportWithContentIdentifier = \_ _ _ _ _ -> return Nothing
, retrieveExportWithContentIdentifier = nope
, storeExportWithContentIdentifier = nope
, removeExportWithContentIdentifier = \_ _ _ -> return False
, removeExportDirectoryWhenEmpty = Just $ \_ -> return False
@ -319,7 +319,6 @@ adjustExportImport r rs = case getRemoteConfigValue exportTreeField (config r) o
, giveup "unknown export location"
)
(l:_) -> do
unlessM (retrieveExport (exportActions r) k l dest p) $
giveup "retrieving from export failed"
retrieveExport (exportActions r) k l dest p
return UnVerified
| otherwise = giveup $ "exported content cannot be verified due to using the " ++ decodeBS (formatKeyVariety (fromKey keyVariety k)) ++ " backend"

View file

@ -315,7 +315,7 @@ storeExportM o src _k loc meterupdate =
basedest = fromRawFilePath (fromExportLocation loc)
populatedest = liftIO . createLinkOrCopy src
retrieveExportM :: RsyncOpts -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Bool
retrieveExportM :: RsyncOpts -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex ()
retrieveExportM o _k loc dest p = rsyncRetrieve o [rsyncurl] dest (Just p)
where
rsyncurl = mkRsyncUrl o (fromRawFilePath (fromExportLocation loc))
@ -367,9 +367,12 @@ withRsyncScratchDir a = do
t <- fromRepo gitAnnexTmpObjectDir
withTmpDirIn t "rsynctmp" a
rsyncRetrieve :: RsyncOpts -> [RsyncUrl] -> FilePath -> Maybe MeterUpdate -> Annex Bool
rsyncRetrieve :: RsyncOpts -> [RsyncUrl] -> FilePath -> Maybe MeterUpdate -> Annex ()
rsyncRetrieve o rsyncurls dest meterupdate =
showResumable $ untilTrue rsyncurls $ \u -> rsyncRemote Download o meterupdate
unlessM go $
giveup "rsync failed"
where
go = showResumable $ untilTrue rsyncurls $ \u -> rsyncRemote Download o meterupdate
-- use inplace when retrieving to support resuming
[ Param "--inplace"
, Param u
@ -378,8 +381,7 @@ rsyncRetrieve o rsyncurls dest meterupdate =
rsyncRetrieveKey :: RsyncOpts -> Key -> FilePath -> Maybe MeterUpdate -> Annex ()
rsyncRetrieveKey o k dest meterupdate =
unlessM (rsyncRetrieve o (rsyncUrls o k) dest meterupdate) $
giveup "rsync failed"
rsyncRetrieve o (rsyncUrls o k) dest meterupdate
showResumable :: Annex Bool -> Annex Bool
showResumable a = ifM a

View file

@ -474,20 +474,16 @@ storeExportS3' hv r rs info magic f k loc p = withS3Handle hv $ \case
setS3VersionID info rs k mvid
return (metag, mvid)
retrieveExportS3 :: S3HandleVar -> Remote -> S3Info -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Bool
retrieveExportS3 hv r info _k loc f p =
catchNonAsync go (\e -> warning (show e) >> return False)
where
go = withS3Handle hv $ \case
Just h -> do
retrieveHelper info h (Left (T.pack exportloc)) f p
return True
retrieveExportS3 :: S3HandleVar -> Remote -> S3Info -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex ()
retrieveExportS3 hv r info _k loc f p = do
withS3Handle hv $ \case
Just h -> retrieveHelper info h (Left (T.pack exportloc)) f p
Nothing -> case getPublicUrlMaker info of
Nothing -> do
warning $ needS3Creds (uuid r)
return False
Just geturl -> Url.withUrlOptions $
Url.download p (geturl exportloc) f
Just geturl -> either giveup return =<<
Url.withUrlOptions
(Url.download' p (geturl exportloc) f)
Nothing -> giveup $ needS3Creds (uuid r)
where
exportloc = bucketExportLocation info loc
removeExportS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Key -> ExportLocation -> Annex Bool
@ -634,21 +630,18 @@ mkImportableContentsVersioned info = build . groupfiles
| otherwise =
i : removemostrecent mtime rest
retrieveExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> ExportLocation -> ContentIdentifier -> FilePath -> Annex (Maybe Key) -> MeterUpdate -> Annex (Maybe Key)
retrieveExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> ExportLocation -> ContentIdentifier -> FilePath -> Annex Key -> MeterUpdate -> Annex Key
retrieveExportWithContentIdentifierS3 hv r rs info loc cid dest mkkey p = withS3Handle hv $ \case
Nothing -> do
warning $ needS3Creds (uuid r)
return Nothing
Just h -> flip catchNonAsync (\e -> warning (show e) >> return Nothing) $ do
Just h -> do
rewritePreconditionException $ retrieveHelper' h dest p $
limitGetToContentIdentifier cid $
S3.getObject (bucket info) o
mk <- mkkey
case (mk, extractContentIdentifier cid o) of
(Just k, Right vid) ->
setS3VersionID info rs k vid
_ -> noop
return mk
k <- mkkey
case extractContentIdentifier cid o of
Right vid -> setS3VersionID info rs k vid
Left _ -> noop
return k
Nothing -> giveup $ needS3Creds (uuid r)
where
o = T.pack $ bucketExportLocation info loc

View file

@ -213,12 +213,11 @@ storeExportDav hdl f k loc p = case exportLocation loc of
storeHelper dav (keyTmpLocation k) dest reqbody
Left err -> giveup err
retrieveExportDav :: DavHandleVar -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Bool
retrieveExportDav :: DavHandleVar -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex ()
retrieveExportDav hdl _k loc d p = case exportLocation loc of
Right src -> withDavHandle' hdl $ \mh -> runExport' mh $ \_dav -> do
Right src -> withDavHandle hdl $ \h -> runExport h $ \_dav ->
retrieveHelper src d p
return True
Left _err -> return False
Left err -> giveup err
checkPresentExportDav :: DavHandleVar -> Remote -> Key -> ExportLocation -> Annex Bool
checkPresentExportDav hdl _ _k loc = case exportLocation loc of

View file

@ -582,9 +582,10 @@ backend_ :: String -> Types.Backend
backend_ = Backend.lookupBackendVariety . Types.Key.parseKeyVariety . encodeBS
getKey :: Types.Backend -> FilePath -> IO Types.Key
getKey b f = fromJust <$> annexeval go
getKey b f = case Types.Backend.getKey b of
Just a -> annexeval $ a ks Utility.Metered.nullMeterUpdate
Nothing -> error "internal"
where
go = Types.Backend.getKey b ks Utility.Metered.nullMeterUpdate
ks = Types.KeySource.KeySource
{ Types.KeySource.keyFilename = toRawFilePath f
, Types.KeySource.contentLocation = toRawFilePath f

View file

@ -17,7 +17,7 @@ import Utility.FileSystemEncoding
data BackendA a = Backend
{ backendVariety :: KeyVariety
, getKey :: KeySource -> MeterUpdate -> a (Maybe Key)
, getKey :: Maybe (KeySource -> MeterUpdate -> a Key)
-- Verifies the content of a key.
, verifyKeyContent :: Maybe (Key -> FilePath -> a Bool)
-- Checks if a key can be upgraded to a better form.

View file

@ -236,7 +236,8 @@ data ExportActions a = ExportActions
-- Retrieves exported content to a file.
-- (The MeterUpdate does not need to be used if it writes
-- sequentially to the file.)
, retrieveExport :: Key -> ExportLocation -> FilePath -> MeterUpdate -> a Bool
-- Throws exception on failure.
, retrieveExport :: Key -> ExportLocation -> FilePath -> MeterUpdate -> a ()
-- Removes an exported file (succeeds if the contents are not present)
, removeExport :: Key -> ExportLocation -> a Bool
-- Removes an exported directory. Typically the directory will be
@ -269,15 +270,17 @@ data ImportActions a = ImportActions
-- This has to be used rather than retrieveExport
-- when a special remote supports imports, since files on such a
-- special remote can be changed at any time.
--
-- Throws exception on failure.
, retrieveExportWithContentIdentifier
:: ExportLocation
-> ContentIdentifier
-- file to write content to
-> FilePath
-- callback that generates a key from the downloaded content
-> a (Maybe Key)
-> a Key
-> MeterUpdate
-> a (Maybe Key)
-> a Key
-- Exports content to an ExportLocation, and returns the
-- ContentIdentifier corresponding to the content it stored.
--