more OsPath conversion (464/749)

Sponsored-by: unqueued
This commit is contained in:
Joey Hess 2025-02-04 13:35:17 -04:00
parent cf986bc7e2
commit 54f0710fd2
No known key found for this signature in database
GPG key ID: DB12DB0FF05F8F38
17 changed files with 164 additions and 165 deletions

View file

@ -653,7 +653,7 @@ unlinkAnnex key = do
- If this happens, runs the rollback action and throws an exception.
- The rollback action should remove the data that was transferred.
-}
sendAnnex :: Key -> Maybe FilePath -> Annex () -> (FilePath -> FileSize -> Annex a) -> Annex a
sendAnnex :: Key -> Maybe OsPath -> Annex () -> (OsPath -> FileSize -> Annex a) -> Annex a
sendAnnex key o rollback sendobject = go =<< prepSendAnnex' key o
where
go (Just (f, sz, check)) = do
@ -676,10 +676,10 @@ sendAnnex key o rollback sendobject = go =<< prepSendAnnex' key o
- Annex monad of the remote that is receiving the object, rather than
- the sender. So it cannot rely on Annex state.
-}
prepSendAnnex :: Key -> Maybe FilePath -> Annex (Maybe (FilePath, FileSize, Annex Bool))
prepSendAnnex :: Key -> Maybe OsPath -> Annex (Maybe (OsPath, FileSize, Annex Bool))
prepSendAnnex key Nothing = withObjectLoc key $ \f -> do
let retval c cs = return $ Just
( fromOsPath f
( f
, inodeCacheFileSize c
, sameInodeCache f cs
)
@ -704,19 +704,18 @@ prepSendAnnex key Nothing = withObjectLoc key $ \f -> do
Nothing -> return Nothing
-- If the provided object file is the annex object file, handle as above.
prepSendAnnex key (Just o) = withObjectLoc key $ \aof ->
let o' = toOsPath o
in if aof == o'
if aof == o
then prepSendAnnex key Nothing
else do
withTSDelta (liftIO . genInodeCache o') >>= \case
withTSDelta (liftIO . genInodeCache o) >>= \case
Nothing -> return Nothing
Just c -> return $ Just
( o
, inodeCacheFileSize c
, sameInodeCache o' [c]
, sameInodeCache o [c]
)
prepSendAnnex' :: Key -> Maybe FilePath -> Annex (Maybe (FilePath, FileSize, Annex (Maybe String)))
prepSendAnnex' :: Key -> Maybe OsPath -> Annex (Maybe (OsPath, FileSize, Annex (Maybe String)))
prepSendAnnex' key o = prepSendAnnex key o >>= \case
Just (f, sz, checksuccess) ->
let checksuccess' = ifM checksuccess

View file

@ -17,6 +17,7 @@ module Annex.Magic (
getMagicMimeEncoding,
) where
import Common
import Types.Mime
import Control.Monad.IO.Class
#ifdef WITH_MAGICMIME
@ -24,7 +25,6 @@ import Magic
import Utility.Env
import Control.Concurrent
import System.IO.Unsafe (unsafePerformIO)
import Common
#else
type Magic = ()
#endif
@ -44,7 +44,7 @@ initMagicMime = catchMaybeIO $ do
initMagicMime = return Nothing
#endif
getMagicMime :: Magic -> FilePath -> IO (Maybe (MimeType, MimeEncoding))
getMagicMime :: Magic -> OsPath -> IO (Maybe (MimeType, MimeEncoding))
#ifdef WITH_MAGICMIME
getMagicMime m f = Just . parse <$> magicConcurrentSafe (magicFile m f)
where
@ -58,10 +58,10 @@ getMagicMime m f = Just . parse <$> magicConcurrentSafe (magicFile m f)
getMagicMime _ _ = return Nothing
#endif
getMagicMimeType :: MonadIO m => Magic -> FilePath -> m (Maybe MimeType)
getMagicMimeType :: MonadIO m => Magic -> OsPath -> m (Maybe MimeType)
getMagicMimeType m f = liftIO $ fmap fst <$> getMagicMime m f
getMagicMimeEncoding :: MonadIO m => Magic -> FilePath -> m(Maybe MimeEncoding)
getMagicMimeEncoding :: MonadIO m => Magic -> OsPath -> m(Maybe MimeEncoding)
getMagicMimeEncoding m f = liftIO $ fmap snd <$> getMagicMime m f
#ifdef WITH_MAGICMIME

View file

@ -14,7 +14,6 @@ module Remote.Directory.LegacyChunked where
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString as S
import qualified System.FilePath.ByteString as P
import Annex.Common
import Utility.FileMode
@ -23,7 +22,6 @@ import qualified Remote.Helper.Chunked.Legacy as Legacy
import Annex.Tmp
import Utility.Metered
import Utility.Directory.Create
import qualified Utility.RawFilePath as R
import qualified Utility.FileIO as F
withCheckedFiles :: (FilePath -> IO Bool) -> FilePath -> (FilePath -> Key -> [FilePath]) -> Key -> ([FilePath] -> IO Bool) -> IO Bool
@ -45,7 +43,7 @@ withCheckedFiles check d locations k a = go $ locations d k
else a chunks
)
withStoredFiles :: FilePath -> (FilePath -> Key -> [FilePath]) -> Key -> ([FilePath] -> IO Bool) -> IO Bool
withStoredFiles = withCheckedFiles doesFileExist
withStoredFiles = withCheckedFiles (doesFileExist . toOsPath)
{- Splits a ByteString into chunks and writes to dests, obeying configured
- chunk size (not to be confused with the L.ByteString chunk size). -}
@ -77,20 +75,20 @@ storeLegacyChunked' meterupdate chunksize (d:dests) bs c = do
feed bytes' (sz - s) ls h
else return (l:ls)
storeHelper :: FilePath -> (RawFilePath -> RawFilePath -> IO ()) -> Key -> ([FilePath] -> IO [FilePath]) -> FilePath -> FilePath -> IO ()
storeHelper :: FilePath -> (OsPath -> OsPath -> IO ()) -> Key -> ([FilePath] -> IO [FilePath]) -> FilePath -> FilePath -> IO ()
storeHelper repotop finalizer key storer tmpdir destdir = do
void $ liftIO $ tryIO $ createDirectoryUnder
[toRawFilePath repotop]
(toRawFilePath tmpdir)
[toOsPath repotop]
(toOsPath tmpdir)
Legacy.storeChunks key tmpdir destdir storer recorder (legacyFinalizer finalizer)
where
recorder f s = do
let f' = toRawFilePath f
let f' = toOsPath f
void $ tryIO $ allowWrite f'
writeFile f s
void $ tryIO $ preventWrite f'
store :: FilePath -> ChunkSize -> (RawFilePath -> RawFilePath -> IO ()) -> Key -> L.ByteString -> MeterUpdate -> FilePath -> FilePath -> IO ()
store :: FilePath -> ChunkSize -> (OsPath -> OsPath -> IO ()) -> Key -> L.ByteString -> MeterUpdate -> FilePath -> FilePath -> IO ()
store repotop chunksize finalizer k b p = storeHelper repotop finalizer k $ \dests ->
storeLegacyChunked p chunksize dests b
@ -98,30 +96,29 @@ store repotop chunksize finalizer k b p = storeHelper repotop finalizer k $ \des
- Done very innefficiently, by writing to a temp file.
- :/ This is legacy code..
-}
retrieve :: (RawFilePath -> Key -> [RawFilePath]) -> RawFilePath -> Retriever
retrieve :: (OsPath -> Key -> [OsPath]) -> OsPath -> Retriever
retrieve locations d basek p _dest miv c = withOtherTmp $ \tmpdir -> do
showLongNote "This remote uses the deprecated chunksize setting. So this will be quite slow."
let tmp = tmpdir P.</> keyFile basek <> ".directorylegacy.tmp"
let tmp' = toOsPath tmp
let tmp = tmpdir </> keyFile basek <> literalOsPath ".directorylegacy.tmp"
let go = \k sink -> do
liftIO $ void $ withStoredFiles (fromRawFilePath d) (legacyLocations locations) k $ \fs -> do
liftIO $ void $ withStoredFiles (fromOsPath d) (legacyLocations locations) k $ \fs -> do
forM_ fs $
F.appendFile' tmp' <=< S.readFile
F.appendFile' tmp <=< S.readFile
return True
b <- liftIO $ F.readFile tmp'
liftIO $ removeWhenExistsWith R.removeLink tmp
b <- liftIO $ F.readFile tmp
liftIO $ removeWhenExistsWith removeFile tmp
sink b
byteRetriever go basek p tmp miv c
checkKey :: RawFilePath -> (RawFilePath -> Key -> [RawFilePath]) -> Key -> Annex Bool
checkKey :: OsPath -> (OsPath -> Key -> [OsPath]) -> Key -> Annex Bool
checkKey d locations k = liftIO $
withStoredFiles (fromRawFilePath d) (legacyLocations locations) k $
withStoredFiles (fromOsPath d) (legacyLocations locations) k $
-- withStoredFiles checked that it exists
const $ return True
legacyFinalizer :: (RawFilePath -> RawFilePath -> IO ()) -> (FilePath -> FilePath -> IO ())
legacyFinalizer f = \a b -> f (toRawFilePath a) (toRawFilePath b)
legacyFinalizer :: (OsPath -> OsPath -> IO ()) -> (FilePath -> FilePath -> IO ())
legacyFinalizer f = \a b -> f (toOsPath a) (toOsPath b)
legacyLocations :: (RawFilePath -> Key -> [RawFilePath]) -> (FilePath -> Key -> [FilePath])
legacyLocations :: (OsPath -> Key -> [OsPath]) -> (FilePath -> Key -> [FilePath])
legacyLocations locations = \f k ->
map fromRawFilePath $ locations (toRawFilePath f) k
map fromOsPath $ locations (toOsPath f) k

View file

@ -33,7 +33,6 @@ import Crypto
import Backend (isStableKey)
import Annex.SpecialRemote.Config
import Annex.Verify
import qualified Utility.RawFilePath as R
import qualified Utility.FileIO as F
import qualified Data.ByteString as S
@ -584,4 +583,4 @@ ensureChunksAreLogged _ _ (ChunkKeys _) = return ()
withBytes :: ContentSource -> (L.ByteString -> Annex a) -> Annex a
withBytes (ByteContent b) a = a b
withBytes (FileContent f) a = a =<< liftIO (L.readFile (fromOsPath f))
withBytes (FileContent f) a = a =<< liftIO (F.readFile f)

View file

@ -14,6 +14,7 @@ import Types.StoreRetrieve
import Remote.Helper.Special
import Utility.Metered
import Utility.Hash (IncrementalVerifier(..))
import qualified Utility.FileIO as F
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString as S
@ -31,14 +32,14 @@ httpStorer a = fileStorer $ \k f m -> a k =<< liftIO (httpBodyStorer f m)
-- Reads the file and generates a streaming request body, that will update
-- the meter as it's sent.
httpBodyStorer :: FilePath -> MeterUpdate -> IO RequestBody
httpBodyStorer :: OsPath -> MeterUpdate -> IO RequestBody
httpBodyStorer src m = do
size <- getFileSize (toRawFilePath src)
size <- getFileSize src
let streamer sink = withMeteredFile src m $ \b -> byteStringPopper b sink
return $ RequestBodyStream (fromInteger size) streamer
-- Like httpBodyStorer, but generates a chunked request body.
httpBodyStorerChunked :: FilePath -> MeterUpdate -> RequestBody
httpBodyStorerChunked :: OsPath -> MeterUpdate -> RequestBody
httpBodyStorerChunked src m =
let streamer sink = withMeteredFile src m $ \b -> byteStringPopper b sink
in RequestBodyStreamChunked streamer
@ -75,10 +76,10 @@ handlePopper numchunks chunksize meterupdate h sink = do
-- Reads the http body and stores it to the specified file, updating the
-- meter and incremental verifier as it goes.
httpBodyRetriever :: FilePath -> MeterUpdate -> Maybe IncrementalVerifier -> Response BodyReader -> IO ()
httpBodyRetriever :: OsPath -> MeterUpdate -> Maybe IncrementalVerifier -> Response BodyReader -> IO ()
httpBodyRetriever dest meterupdate iv resp
| responseStatus resp /= ok200 = giveup $ show $ responseStatus resp
| otherwise = bracket (openBinaryFile dest WriteMode) hClose (go zeroBytesProcessed)
| otherwise = bracket (F.openBinaryFile dest WriteMode) hClose (go zeroBytesProcessed)
where
reader = responseBody resp
go sofar h = do

View file

@ -36,9 +36,9 @@ type ProtoConnRunner c = forall a. P2P.Proto a -> ClosableConnection c -> Annex
-- the pool when done.
type WithConn a c = (ClosableConnection c -> Annex (ClosableConnection c, a)) -> Annex a
store :: UUID -> RemoteGitConfig -> ProtoRunner (Maybe [UUID]) -> Key -> AssociatedFile -> Maybe FilePath -> MeterUpdate -> Annex ()
store :: UUID -> RemoteGitConfig -> ProtoRunner (Maybe [UUID]) -> Key -> AssociatedFile -> Maybe OsPath -> MeterUpdate -> Annex ()
store remoteuuid gc runner k af o p = do
let sizer = KeySizer k (fmap (toRawFilePath . fst3) <$> prepSendAnnex k o)
let sizer = KeySizer k (fmap fst3 <$> prepSendAnnex k o)
let bwlimit = remoteAnnexBwLimitUpload gc <|> remoteAnnexBwLimit gc
metered (Just p) sizer bwlimit $ \_ p' ->
runner (P2P.put k af p') >>= \case

View file

@ -44,7 +44,7 @@ adjustReadOnly r
}
| otherwise = r
readonlyStoreKey :: Key -> AssociatedFile -> Maybe FilePath -> MeterUpdate -> Annex ()
readonlyStoreKey :: Key -> AssociatedFile -> Maybe OsPath -> MeterUpdate -> Annex ()
readonlyStoreKey _ _ _ _ = readonlyFail
readonlyRemoveKey :: Maybe SafeDropProof -> Key -> Annex ()
@ -53,7 +53,7 @@ readonlyRemoveKey _ _ = readonlyFail
readonlyStorer :: Storer
readonlyStorer _ _ _ = readonlyFail
readonlyStoreExport :: FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex ()
readonlyStoreExport :: OsPath -> Key -> ExportLocation -> MeterUpdate -> Annex ()
readonlyStoreExport _ _ _ _ = readonlyFail
readonlyRemoveExport :: Key -> ExportLocation -> Annex ()
@ -62,7 +62,7 @@ readonlyRemoveExport _ _ = readonlyFail
readonlyRemoveExportDirectory :: ExportDirectory -> Annex ()
readonlyRemoveExportDirectory _ = readonlyFail
readonlyStoreExportWithContentIdentifier :: FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex ContentIdentifier
readonlyStoreExportWithContentIdentifier :: OsPath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex ContentIdentifier
readonlyStoreExportWithContentIdentifier _ _ _ _ _ = readonlyFail
readonlyRemoveExportWithContentIdentifier :: Key -> ExportLocation -> [ContentIdentifier] -> Annex ()

View file

@ -53,6 +53,7 @@ import Messages.Progress
import qualified Git
import qualified Git.Construct
import Git.Types
import qualified Utility.FileIO as F
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
@ -92,12 +93,11 @@ mkRetrievalVerifiableKeysSecure gc
-- A Storer that expects to be provided with a file containing
-- the content of the key to store.
fileStorer :: (Key -> FilePath -> MeterUpdate -> Annex ()) -> Storer
fileStorer :: (Key -> OsPath -> MeterUpdate -> Annex ()) -> Storer
fileStorer a k (FileContent f) m = a k f m
fileStorer a k (ByteContent b) m = withTmp k $ \f -> do
let f' = fromRawFilePath f
liftIO $ L.writeFile f' b
a k f' m
liftIO $ L.writeFile (fromOsPath f) b
a k f m
-- A Storer that expects to be provided with a L.ByteString of
-- the content to store.
@ -107,7 +107,7 @@ byteStorer a k c m = withBytes c $ \b -> a k b m
-- A Retriever that generates a lazy ByteString containing the Key's
-- content, and passes it to a callback action which will fully consume it
-- before returning.
byteRetriever :: (Key -> (L.ByteString -> Annex a) -> Annex a) -> Key -> MeterUpdate -> RawFilePath -> Maybe IncrementalVerifier -> (ContentSource -> Annex a) -> Annex a
byteRetriever :: (Key -> (L.ByteString -> Annex a) -> Annex a) -> Key -> MeterUpdate -> OsPath -> Maybe IncrementalVerifier -> (ContentSource -> Annex a) -> Annex a
byteRetriever a k _m _dest _miv callback = a k (callback . ByteContent)
-- A Retriever that writes the content of a Key to a file.
@ -115,7 +115,7 @@ byteRetriever a k _m _dest _miv callback = a k (callback . ByteContent)
-- retrieves data. The incremental verifier is updated in the background as
-- the action writes to the file, but may not be updated with the entire
-- content of the file.
fileRetriever :: (RawFilePath -> Key -> MeterUpdate -> Annex ()) -> Retriever
fileRetriever :: (OsPath -> Key -> MeterUpdate -> Annex ()) -> Retriever
fileRetriever a = fileRetriever' $ \f k m miv ->
let retrieve = a f k m
in tailVerify miv f retrieve
@ -124,20 +124,20 @@ fileRetriever a = fileRetriever' $ \f k m miv ->
- The action is responsible for updating the progress meter and the
- incremental verifier as it retrieves data.
-}
fileRetriever' :: (RawFilePath -> Key -> MeterUpdate -> Maybe IncrementalVerifier -> Annex ()) -> Retriever
fileRetriever' :: (OsPath -> Key -> MeterUpdate -> Maybe IncrementalVerifier -> Annex ()) -> Retriever
fileRetriever' a k m dest miv callback = do
createAnnexDirectory (parentDir dest)
a dest k m miv
pruneTmpWorkDirBefore dest (callback . FileContent . fromRawFilePath)
pruneTmpWorkDirBefore dest (callback . FileContent)
{- The base Remote that is provided to specialRemote needs to have
- storeKey, retrieveKeyFile, removeKey, and checkPresent methods,
- but they are never actually used (since specialRemote replaces them).
- Here are some dummy ones.
-}
storeKeyDummy :: Key -> AssociatedFile -> Maybe FilePath -> MeterUpdate -> Annex ()
storeKeyDummy :: Key -> AssociatedFile -> Maybe OsPath -> MeterUpdate -> Annex ()
storeKeyDummy _ _ _ _ = error "missing storeKey implementation"
retrieveKeyFileDummy :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> VerifyConfig -> Annex Verification
retrieveKeyFileDummy :: Key -> AssociatedFile -> OsPath -> MeterUpdate -> VerifyConfig -> Annex Verification
retrieveKeyFileDummy _ _ _ _ _ = error "missing retrieveKeyFile implementation"
removeKeyDummy :: Maybe SafeDropProof -> Key -> Annex ()
removeKeyDummy _ _ = error "missing removeKey implementation"
@ -258,9 +258,9 @@ specialRemote' cfg c storer retriever remover checkpresent baser = encr
displayprogress bwlimit p k srcfile a
| displayProgress cfg = do
metered (Just p) (KeySizer k (pure (fmap toRawFilePath srcfile))) bwlimit (const a)
metered (Just p) (KeySizer k (pure srcfile)) bwlimit (const a)
| otherwise = a p
withBytes :: ContentSource -> (L.ByteString -> Annex a) -> Annex a
withBytes (ByteContent b) a = a b
withBytes (FileContent f) a = a =<< liftIO (L.readFile f)
withBytes (FileContent f) a = a =<< liftIO (F.readFile f)

View file

@ -118,8 +118,8 @@ hookEnv action k f = Just <$> mergeenv (fileenv f ++ keyenv)
]
fileenv Nothing = []
fileenv (Just file) = [envvar "FILE" file]
hashbits = map takeDirectory $ splitPath $
fromRawFilePath $ hashDirMixed def k
hashbits = map (fromOsPath . takeDirectory) $
splitPath $ hashDirMixed def k
lookupHook :: HookName -> Action -> Annex (Maybe String)
lookupHook hookname action = do
@ -159,11 +159,11 @@ runHook' hook action k f a = maybe (return False) run =<< lookupHook hook action
)
store :: HookName -> Storer
store h = fileStorer $ \k src _p -> runHook h "store" k (Just src)
store h = fileStorer $ \k src _p -> runHook h "store" k (Just (fromOsPath src))
retrieve :: HookName -> Retriever
retrieve h = fileRetriever $ \d k _p ->
unlessM (runHook' h "retrieve" k (Just (fromRawFilePath d)) $ return True) $
unlessM (runHook' h "retrieve" k (Just (fromOsPath d)) $ return True) $
giveup "failed to retrieve content"
remove :: HookName -> Remover

View file

@ -122,14 +122,14 @@ httpAlsoSetup _ (Just u) _ c gc = do
downloadKey :: Maybe URLString -> LearnedLayout -> Retriever
downloadKey baseurl ll = fileRetriever' $ \dest key p iv ->
downloadAction (fromRawFilePath dest) p iv (keyUrlAction baseurl ll key)
downloadAction dest p iv (keyUrlAction baseurl ll key)
retriveExportHttpAlso :: Maybe URLString -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Verification
retriveExportHttpAlso :: Maybe URLString -> Key -> ExportLocation -> OsPath -> MeterUpdate -> Annex Verification
retriveExportHttpAlso baseurl key loc dest p = do
verifyKeyContentIncrementally AlwaysVerify key $ \iv ->
downloadAction dest p iv (exportLocationUrlAction baseurl loc)
downloadAction :: FilePath -> MeterUpdate -> Maybe IncrementalVerifier -> ((URLString -> Annex (Either String ())) -> Annex (Either String ())) -> Annex ()
downloadAction :: OsPath -> MeterUpdate -> Maybe IncrementalVerifier -> ((URLString -> Annex (Either String ())) -> Annex (Either String ())) -> Annex ()
downloadAction dest p iv run =
Url.withUrlOptions $ \uo ->
run (\url -> Url.download' p iv url dest uo)
@ -192,7 +192,7 @@ exportLocationUrlAction
-> (URLString -> Annex (Either String ()))
-> Annex (Either String ())
exportLocationUrlAction (Just baseurl) loc a =
a (baseurl P.</> fromRawFilePath (fromExportLocation loc))
a (baseurl P.</> fromOsPath (fromExportLocation loc))
exportLocationUrlAction Nothing _ _ = noBaseUrlError
-- cannot normally happen
@ -228,5 +228,5 @@ supportedLayouts baseurl =
]
]
where
mkurl k hasher = baseurl P.</> fromRawFilePath (hasher k) P.</> kf k
kf k = fromRawFilePath (keyFile k)
mkurl k hasher = baseurl P.</> fromOsPath (hasher k) P.</> kf k
kf k = fromOsPath (keyFile k)

View file

@ -117,12 +117,13 @@ gen r u rc gc rs = do
, getRepo = return r
, gitconfig = gc
, localpath = if islocal
then Just $ rsyncUrl o
then Just $ toOsPath $ rsyncUrl o
else Nothing
, readonly = False
, appendonly = False
, untrustworthy = False
, availability = checkPathAvailability islocal (rsyncUrl o)
, availability = checkPathAvailability islocal
(toOsPath (rsyncUrl o))
, remotetype = remote
, mkUnavailable = return Nothing
, getInfo = return [("url", url)]
@ -221,45 +222,45 @@ rsyncSetup _ mu _ c gc = do
- (When we have the right hash directory structure, we can just
- pass --include=X --include=X/Y --include=X/Y/file --exclude=*)
-}
store :: RsyncOpts -> Key -> FilePath -> MeterUpdate -> Annex ()
store :: RsyncOpts -> Key -> OsPath -> MeterUpdate -> Annex ()
store o k src meterupdate = storeGeneric o meterupdate basedest populatedest
where
basedest = fromRawFilePath $ NE.head (keyPaths k)
basedest = NE.head (keyPaths k)
populatedest dest = liftIO $ if canrename
then do
R.rename (toRawFilePath src) (toRawFilePath dest)
R.rename (fromOsPath src) (fromOsPath dest)
return True
else createLinkOrCopy (toRawFilePath src) (toRawFilePath dest)
else createLinkOrCopy src dest
{- If the key being sent is encrypted or chunked, the file
- containing its content is a temp file, and so can be
- renamed into place. Otherwise, the file is the annexed
- object file, and has to be copied or hard linked into place. -}
canrename = isEncKey k || isChunkKey k
storeGeneric :: RsyncOpts -> MeterUpdate -> FilePath -> (FilePath -> Annex Bool) -> Annex ()
storeGeneric :: RsyncOpts -> MeterUpdate -> OsPath -> (OsPath -> Annex Bool) -> Annex ()
storeGeneric o meterupdate basedest populatedest =
unlessM (storeGeneric' o meterupdate basedest populatedest) $
giveup "failed to rsync content"
storeGeneric' :: RsyncOpts -> MeterUpdate -> FilePath -> (FilePath -> Annex Bool) -> Annex Bool
storeGeneric' :: RsyncOpts -> MeterUpdate -> OsPath -> (OsPath -> Annex Bool) -> Annex Bool
storeGeneric' o meterupdate basedest populatedest = withRsyncScratchDir $ \tmp -> do
let dest = tmp </> basedest
createAnnexDirectory (parentDir (toRawFilePath dest))
createAnnexDirectory (parentDir dest)
ok <- populatedest dest
ps <- sendParams
if ok
then showResumable $ rsyncRemote Upload o (Just meterupdate) $ ps ++
Param "--recursive" : partialParams ++
-- tmp/ to send contents of tmp dir
[ File $ addTrailingPathSeparator tmp
[ File $ fromOsPath $ addTrailingPathSeparator tmp
, Param $ rsyncUrl o
]
else return False
retrieve :: RsyncOpts -> RawFilePath -> Key -> MeterUpdate -> Annex ()
retrieve o f k p = rsyncRetrieveKey o k (fromRawFilePath f) (Just p)
retrieve :: RsyncOpts -> OsPath -> Key -> MeterUpdate -> Annex ()
retrieve o f k p = rsyncRetrieveKey o k f (Just p)
retrieveCheap :: RsyncOpts -> Key -> AssociatedFile -> FilePath -> Annex ()
retrieveCheap :: RsyncOpts -> Key -> AssociatedFile -> OsPath -> Annex ()
retrieveCheap o k _af f = ifM (preseedTmp k f)
( rsyncRetrieveKey o k f Nothing
, giveup "cannot preseed rsync with existing content"
@ -269,11 +270,11 @@ remove :: RsyncOpts -> Remover
remove o _proof k = removeGeneric o includes
where
includes = concatMap use dirHashes
use h = let dir = fromRawFilePath (h def k) in
[ fromRawFilePath (parentDir (toRawFilePath dir))
, dir
use h = let dir = h def k in
[ fromOsPath (parentDir dir)
, fromOsPath dir
-- match content directory and anything in it
, dir </> fromRawFilePath (keyFile k) </> "***"
, fromOsPath $ dir </> keyFile k </> literalOsPath "***"
]
{- An empty directory is rsynced to make it delete. Everything is excluded,
@ -291,7 +292,7 @@ removeGeneric o includes = do
[ Param "--exclude=*" -- exclude everything else
, Param "--quiet", Param "--delete", Param "--recursive"
] ++ partialParams ++
[ Param $ addTrailingPathSeparator tmp
[ Param $ fromOsPath $ addTrailingPathSeparator tmp
, Param $ rsyncUrl o
]
unless ok $
@ -313,43 +314,43 @@ checkPresentGeneric o rsyncurls = do
}
in withCreateProcess p $ \_ _ _ -> checkSuccessProcess
storeExportM :: RsyncOpts -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex ()
storeExportM :: RsyncOpts -> OsPath -> Key -> ExportLocation -> MeterUpdate -> Annex ()
storeExportM o src _k loc meterupdate =
storeGeneric o meterupdate basedest populatedest
where
basedest = fromRawFilePath (fromExportLocation loc)
populatedest = liftIO . createLinkOrCopy (toRawFilePath src) . toRawFilePath
basedest = fromExportLocation loc
populatedest = liftIO . createLinkOrCopy src
retrieveExportM :: RsyncOpts -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Verification
retrieveExportM :: RsyncOpts -> Key -> ExportLocation -> OsPath -> MeterUpdate -> Annex Verification
retrieveExportM o k loc dest p =
verifyKeyContentIncrementally AlwaysVerify k $ \iv ->
tailVerify iv (toRawFilePath dest) $
tailVerify iv dest $
rsyncRetrieve o [rsyncurl] dest (Just p)
where
rsyncurl = mkRsyncUrl o (fromRawFilePath (fromExportLocation loc))
rsyncurl = mkRsyncUrl o (fromOsPath (fromExportLocation loc))
checkPresentExportM :: RsyncOpts -> Key -> ExportLocation -> Annex Bool
checkPresentExportM o _k loc = checkPresentGeneric o [rsyncurl]
where
rsyncurl = mkRsyncUrl o (fromRawFilePath (fromExportLocation loc))
rsyncurl = mkRsyncUrl o (fromOsPath (fromExportLocation loc))
removeExportM :: RsyncOpts -> Key -> ExportLocation -> Annex ()
removeExportM o _k loc =
removeGeneric o $ map fromRawFilePath $
includes $ fromExportLocation loc
removeGeneric o $ map fromOsPath $ includes $ fromExportLocation loc
where
includes f = f : case upFrom f of
Nothing -> []
Just f' -> includes f'
removeExportDirectoryM :: RsyncOpts -> ExportDirectory -> Annex ()
removeExportDirectoryM o ed = removeGeneric o (allbelow d : includes d)
removeExportDirectoryM o ed = removeGeneric o $
map fromOsPath (allbelow d : includes d)
where
d = fromRawFilePath $ fromExportDirectory ed
allbelow f = f </> "***"
includes f = f : case upFrom (toRawFilePath f) of
d = fromExportDirectory ed
allbelow f = f </> literalOsPath "***"
includes f = f : case upFrom f of
Nothing -> []
Just f' -> includes (fromRawFilePath f')
Just f' -> includes f'
renameExportM :: RsyncOpts -> Key -> ExportLocation -> ExportLocation -> Annex (Maybe ())
renameExportM _ _ _ _ = return Nothing
@ -371,12 +372,12 @@ sendParams = ifM crippledFileSystem
{- Runs an action in an empty scratch directory that can be used to build
- up trees for rsync. -}
withRsyncScratchDir :: (FilePath -> Annex a) -> Annex a
withRsyncScratchDir :: (OsPath -> Annex a) -> Annex a
withRsyncScratchDir a = do
t <- fromRawFilePath <$> fromRepo gitAnnexTmpObjectDir
withTmpDirIn t (toOsPath "rsynctmp") a
t <- fromRepo gitAnnexTmpObjectDir
withTmpDirIn t (literalOsPath "rsynctmp") a
rsyncRetrieve :: RsyncOpts -> [RsyncUrl] -> FilePath -> Maybe MeterUpdate -> Annex ()
rsyncRetrieve :: RsyncOpts -> [RsyncUrl] -> OsPath -> Maybe MeterUpdate -> Annex ()
rsyncRetrieve o rsyncurls dest meterupdate =
unlessM go $
giveup "rsync failed"
@ -385,10 +386,10 @@ rsyncRetrieve o rsyncurls dest meterupdate =
-- use inplace when retrieving to support resuming
[ Param "--inplace"
, Param u
, File dest
, File (fromOsPath dest)
]
rsyncRetrieveKey :: RsyncOpts -> Key -> FilePath -> Maybe MeterUpdate -> Annex ()
rsyncRetrieveKey :: RsyncOpts -> Key -> OsPath -> Maybe MeterUpdate -> Annex ()
rsyncRetrieveKey o k dest meterupdate =
rsyncRetrieve o (rsyncUrls o k) dest meterupdate

View file

@ -68,6 +68,7 @@ import Utility.Url (extractFromResourceT, UserAgent)
import Annex.Url (getUserAgent, getUrlOptions, withUrlOptions, UrlOptions(..))
import Utility.Env
import Annex.Verify
import qualified Utility.FileIO as F
type BucketName = String
type BucketObject = String
@ -349,10 +350,10 @@ store mh r info magic = fileStorer $ \k f p -> withS3HandleOrFail (uuid r) mh $
when (isIA info && not (isChunkKey k)) $
setUrlPresent k (iaPublicUrl info (bucketObject info k))
storeHelper :: S3Info -> S3Handle -> Maybe Magic -> FilePath -> S3.Object -> MeterUpdate -> Annex (Maybe S3Etag, Maybe S3VersionID)
storeHelper :: S3Info -> S3Handle -> Maybe Magic -> OsPath -> S3.Object -> MeterUpdate -> Annex (Maybe S3Etag, Maybe S3VersionID)
storeHelper info h magic f object p = liftIO $ case partSize info of
Just partsz | partsz > 0 -> do
fsz <- getFileSize (toRawFilePath f)
fsz <- getFileSize f
if fsz > partsz
then multipartupload fsz partsz
else singlepartupload
@ -385,7 +386,7 @@ storeHelper info h magic f object p = liftIO $ case partSize info of
-- Send parts of the file, taking care to stream each part
-- w/o buffering in memory, since the parts can be large.
etags <- bracketIO (openBinaryFile f ReadMode) hClose $ \fh -> do
etags <- bracketIO (F.openBinaryFile f ReadMode) hClose $ \fh -> do
let sendparts meter etags partnum = do
pos <- liftIO $ hTell fh
if pos >= fsz
@ -420,24 +421,24 @@ retrieve hv r rs c info = fileRetriever' $ \f k p iv -> withS3Handle hv $ \case
Left failreason -> do
warning (UnquotedString failreason)
giveup "cannot download content"
Right loc -> retrieveHelper info h loc (fromRawFilePath f) p iv
Right loc -> retrieveHelper info h loc f p iv
Left S3HandleNeedCreds ->
getPublicWebUrls' rs info c k >>= \case
Left failreason -> do
warning (UnquotedString failreason)
giveup "cannot download content"
Right us -> unlessM (withUrlOptions $ downloadUrl False k p iv us (fromRawFilePath f)) $
Right us -> unlessM (withUrlOptions $ downloadUrl False k p iv us f) $
giveup "failed to download content"
Left S3HandleAnonymousOldAws -> giveupS3HandleProblem S3HandleAnonymousOldAws (uuid r)
retrieveHelper :: S3Info -> S3Handle -> (Either S3.Object S3VersionID) -> FilePath -> MeterUpdate -> Maybe IncrementalVerifier -> Annex ()
retrieveHelper :: S3Info -> S3Handle -> (Either S3.Object S3VersionID) -> OsPath -> MeterUpdate -> Maybe IncrementalVerifier -> Annex ()
retrieveHelper info h loc f p iv = retrieveHelper' h f p iv $
case loc of
Left o -> S3.getObject (bucket info) o
Right (S3VersionID o vid) -> (S3.getObject (bucket info) o)
{ S3.goVersionId = Just vid }
retrieveHelper' :: S3Handle -> FilePath -> MeterUpdate -> Maybe IncrementalVerifier -> S3.GetObject -> Annex ()
retrieveHelper' :: S3Handle -> OsPath -> MeterUpdate -> Maybe IncrementalVerifier -> S3.GetObject -> Annex ()
retrieveHelper' h f p iv req = liftIO $ runResourceT $ do
S3.GetObjectResponse { S3.gorResponse = rsp } <- sendS3Handle h req
Url.sinkResponseFile p iv zeroBytesProcessed f WriteMode rsp
@ -495,10 +496,10 @@ checkKeyHelper' info h o limit = liftIO $ runResourceT $ do
where
req = limit $ S3.headObject (bucket info) o
storeExportS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Maybe Magic -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex ()
storeExportS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Maybe Magic -> OsPath -> Key -> ExportLocation -> MeterUpdate -> Annex ()
storeExportS3 hv r rs info magic f k loc p = void $ storeExportS3' hv r rs info magic f k loc p
storeExportS3' :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Maybe Magic -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex (Maybe S3Etag, Maybe S3VersionID)
storeExportS3' :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Maybe Magic -> OsPath -> Key -> ExportLocation -> MeterUpdate -> Annex (Maybe S3Etag, Maybe S3VersionID)
storeExportS3' hv r rs info magic f k loc p = withS3Handle hv $ \case
Right h -> go h
Left pr -> giveupS3HandleProblem pr (uuid r)
@ -509,7 +510,7 @@ storeExportS3' hv r rs info magic f k loc p = withS3Handle hv $ \case
setS3VersionID info rs k mvid
return (metag, mvid)
retrieveExportS3 :: S3HandleVar -> Remote -> S3Info -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Verification
retrieveExportS3 :: S3HandleVar -> Remote -> S3Info -> Key -> ExportLocation -> OsPath -> MeterUpdate -> Annex Verification
retrieveExportS3 hv r info k loc f p = verifyKeyContentIncrementally AlwaysVerify k $ \iv ->
withS3Handle hv $ \case
Right h -> retrieveHelper info h (Left (T.pack exportloc)) f p iv
@ -700,7 +701,7 @@ mkImportableContentsVersioned = build . groupfiles
| otherwise =
i : removemostrecent mtime rest
retrieveExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> ExportLocation -> [ContentIdentifier] -> FilePath -> Either Key (Annex Key) -> MeterUpdate -> Annex (Key, Verification)
retrieveExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> ExportLocation -> [ContentIdentifier] -> OsPath -> Either Key (Annex Key) -> MeterUpdate -> Annex (Key, Verification)
retrieveExportWithContentIdentifierS3 hv r rs info loc (cid:_) dest gk p =
case gk of
Right _mkkey -> do
@ -744,7 +745,7 @@ rewritePreconditionException a = catchJust (Url.matchStatusCodeException want) a
--
-- When the bucket is not versioned, data loss can result.
-- This is why that configuration requires --force to enable.
storeExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Maybe Magic -> FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex ContentIdentifier
storeExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Maybe Magic -> OsPath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex ContentIdentifier
storeExportWithContentIdentifierS3 hv r rs info magic src k loc _overwritablecids p
| versioning info = go
| otherwise = go
@ -835,7 +836,7 @@ writeUUIDFile c u info h = unless (exportTree c || importTree c) $ do
giveup "Cannot reuse this bucket."
_ -> void $ liftIO $ runResourceT $ sendS3Handle h mkobject
where
file = T.pack $ uuidFile c
file = T.pack $ fromOsPath $ uuidFile c
uuidb = L.fromChunks [T.encodeUtf8 $ T.pack $ fromUUID u]
mkobject = putObject info file (RequestBodyLBS uuidb)
@ -858,11 +859,11 @@ checkUUIDFile c u info h
check (S3.GetObjectMemoryResponse _meta rsp) =
responseStatus rsp == ok200 && responseBody rsp == uuidb
file = T.pack $ uuidFile c
file = T.pack $ fromOsPath $ uuidFile c
uuidb = L.fromChunks [T.encodeUtf8 $ T.pack $ fromUUID u]
uuidFile :: ParsedRemoteConfig -> FilePath
uuidFile c = getFilePrefix c ++ "annex-uuid"
uuidFile :: ParsedRemoteConfig -> OsPath
uuidFile c = toOsPath (getFilePrefix c) <> literalOsPath "annex-uuid"
tryS3 :: ResourceT IO a -> ResourceT IO (Either S3.S3Error a)
tryS3 a = (Right <$> a) `catch` (pure . Left)
@ -1090,16 +1091,16 @@ getBucketObject c = munge . serializeKey
getBucketExportLocation :: ParsedRemoteConfig -> ExportLocation -> BucketObject
getBucketExportLocation c loc =
getFilePrefix c ++ fromRawFilePath (fromExportLocation loc)
getFilePrefix c ++ fromOsPath (fromExportLocation loc)
getBucketImportLocation :: ParsedRemoteConfig -> BucketObject -> Maybe ImportLocation
getBucketImportLocation c obj
-- The uuidFile should not be imported.
| obj == uuidfile = Nothing
| obj == fromOsPath uuidfile = Nothing
-- Only import files that are under the fileprefix, when
-- one is configured.
| prefix `isPrefixOf` obj = Just $ mkImportLocation $
toRawFilePath $ drop prefixlen obj
toOsPath $ drop prefixlen obj
| otherwise = Nothing
where
prefix = getFilePrefix c

View file

@ -49,7 +49,7 @@ import Utility.ThreadScheduler
{- The TMVar is left empty until tahoe has been verified to be running. -}
data TahoeHandle = TahoeHandle TahoeConfigDir (TMVar ())
type TahoeConfigDir = FilePath
type TahoeConfigDir = OsPath
type SharedConvergenceSecret = String
type IntroducerFurl = String
type Capability = String
@ -81,7 +81,9 @@ gen r u rc gc rs = do
c <- parsedRemoteConfig remote rc
cst <- remoteCost gc c expensiveRemoteCost
hdl <- liftIO $ TahoeHandle
<$> maybe (defaultTahoeConfigDir u) return (remoteAnnexTahoe gc)
<$> maybe (defaultTahoeConfigDir u)
(return . toOsPath)
(remoteAnnexTahoe gc)
<*> newEmptyTMVarIO
return $ Just $ Remote
{ uuid = u
@ -136,18 +138,18 @@ tahoeSetup _ mu _ c _ = do
, (scsField, Proposed scs)
]
else c
gitConfigSpecialRemote u c' [("tahoe", configdir)]
gitConfigSpecialRemote u c' [("tahoe", fromOsPath configdir)]
return (c', u)
where
missingfurl = giveup "Set TAHOE_FURL to the introducer furl to use."
store :: RemoteStateHandle -> TahoeHandle -> Key -> AssociatedFile -> Maybe FilePath -> MeterUpdate -> Annex ()
store :: RemoteStateHandle -> TahoeHandle -> Key -> AssociatedFile -> Maybe OsPath -> MeterUpdate -> Annex ()
store rs hdl k _af o _p = sendAnnex k o noop $ \src _sz ->
parsePut <$> liftIO (readTahoe hdl "put" [File src]) >>= maybe
parsePut <$> liftIO (readTahoe hdl "put" [File (fromOsPath src)]) >>= maybe
(giveup "tahoe failed to store content")
(\cap -> storeCapability rs k cap)
retrieve :: RemoteStateHandle -> TahoeHandle -> Key -> AssociatedFile -> FilePath -> MeterUpdate -> VerifyConfig -> Annex Verification
retrieve :: RemoteStateHandle -> TahoeHandle -> Key -> AssociatedFile -> OsPath -> MeterUpdate -> VerifyConfig -> Annex Verification
retrieve rs hdl k _f d _p _ = do
go =<< getCapability rs k
-- Tahoe verifies the content it retrieves using cryptographically
@ -155,7 +157,7 @@ retrieve rs hdl k _f d _p _ = do
return Verified
where
go Nothing = giveup "tahoe capability is not known"
go (Just cap) = unlessM (liftIO $ requestTahoe hdl "get" [Param cap, File d]) $
go (Just cap) = unlessM (liftIO $ requestTahoe hdl "get" [Param cap, File (fromOsPath d)]) $
giveup "tahoe failed to reteieve content"
remove :: Maybe SafeDropProof -> Key -> Annex ()
@ -185,7 +187,7 @@ checkKey rs hdl k = go =<< getCapability rs k
defaultTahoeConfigDir :: UUID -> IO TahoeConfigDir
defaultTahoeConfigDir u = do
h <- myHomeDir
return $ h </> ".tahoe-git-annex" </> fromUUID u
return $ toOsPath h </> literalOsPath ".tahoe-git-annex" </> fromUUID u
tahoeConfigure :: TahoeConfigDir -> IntroducerFurl -> Maybe SharedConvergenceSecret -> IO SharedConvergenceSecret
tahoeConfigure configdir furl mscs = do
@ -197,8 +199,7 @@ tahoeConfigure configdir furl mscs = do
createClient :: TahoeConfigDir -> IntroducerFurl -> IO Bool
createClient configdir furl = do
createDirectoryIfMissing True $
fromRawFilePath $ parentDir $ toRawFilePath configdir
createDirectoryIfMissing True $ parentDir configdir
boolTahoe configdir "create-client"
[ Param "--nickname", Param "git-annex"
, Param "--introducer", Param furl
@ -206,7 +207,8 @@ createClient configdir furl = do
writeSharedConvergenceSecret :: TahoeConfigDir -> SharedConvergenceSecret -> IO ()
writeSharedConvergenceSecret configdir scs =
writeFile (convergenceFile configdir) (unlines [scs])
writeFile (fromOsPath (convergenceFile configdir))
(unlines [scs])
{- The tahoe daemon writes the convergenceFile shortly after it starts
- (it does not need to connect to the network). So, try repeatedly to read
@ -215,7 +217,7 @@ writeSharedConvergenceSecret configdir scs =
getSharedConvergenceSecret :: TahoeConfigDir -> IO SharedConvergenceSecret
getSharedConvergenceSecret configdir = go (60 :: Int)
where
f = convergenceFile configdir
f = fromOsPath $ convergenceFile configdir
go n
| n == 0 = giveup $ "tahoe did not write " ++ f ++ " after 1 minute. Perhaps the daemon failed to start?"
| otherwise = do
@ -227,8 +229,9 @@ getSharedConvergenceSecret configdir = go (60 :: Int)
threadDelaySeconds (Seconds 1)
go (n - 1)
convergenceFile :: TahoeConfigDir -> FilePath
convergenceFile configdir = configdir </> "private" </> "convergence"
convergenceFile :: TahoeConfigDir -> OsPath
convergenceFile configdir =
configdir </> literalOsPath "private" </> literalOsPath "convergence"
startTahoeDaemon :: TahoeConfigDir -> IO ()
startTahoeDaemon configdir = void $ boolTahoe configdir "start" []
@ -267,7 +270,7 @@ readTahoe hdl command params = withTahoeConfigDir hdl $ \configdir ->
tahoeParams :: TahoeConfigDir -> String -> [CommandParam] -> [CommandParam]
tahoeParams configdir command params =
Param "-d" : File configdir : Param command : params
Param "-d" : File (fromOsPath configdir) : Param command : params
storeCapability :: RemoteStateHandle -> Key -> Capability -> Annex ()
storeCapability rs k cap = setRemoteState rs k cap

View file

@ -176,11 +176,11 @@ retrieve hv cc = fileRetriever' $ \d k p iv ->
LegacyChunks _ -> do
-- Not doing incremental verification for chunks.
liftIO $ maybe noop unableIncrementalVerifier iv
retrieveLegacyChunked (fromRawFilePath d) k p dav
retrieveLegacyChunked (fromOsPath d) k p dav
_ -> liftIO $ goDAV dav $
retrieveHelper (keyLocation k) (fromRawFilePath d) p iv
retrieveHelper (keyLocation k) d p iv
retrieveHelper :: DavLocation -> FilePath -> MeterUpdate -> Maybe IncrementalVerifier -> DAVT IO ()
retrieveHelper :: DavLocation -> OsPath -> MeterUpdate -> Maybe IncrementalVerifier -> DAVT IO ()
retrieveHelper loc d p iv = do
debugDav $ "retrieve " ++ loc
inLocation loc $
@ -213,14 +213,14 @@ checkKey hv chunkconfig k = withDavHandle hv $ \dav ->
existsDAV (keyLocation k)
either giveup return v
storeExportDav :: DavHandleVar -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex ()
storeExportDav :: DavHandleVar -> OsPath -> Key -> ExportLocation -> MeterUpdate -> Annex ()
storeExportDav hdl f k loc p = case exportLocation loc of
Right dest -> withDavHandle hdl $ \h -> runExport h $ \dav -> do
reqbody <- liftIO $ httpBodyStorer f p
storeHelper dav (exportTmpLocation loc k) dest reqbody
Left err -> giveup err
retrieveExportDav :: DavHandleVar -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Verification
retrieveExportDav :: DavHandleVar -> Key -> ExportLocation -> OsPath -> MeterUpdate -> Annex Verification
retrieveExportDav hdl k loc d p = case exportLocation loc of
Right src -> verifyKeyContentIncrementally AlwaysVerify k $ \iv ->
withDavHandle hdl $ \h -> runExport h $ \_dav ->
@ -247,7 +247,7 @@ removeExportDav hdl _k loc = case exportLocation loc of
removeExportDirectoryDav :: DavHandleVar -> ExportDirectory -> Annex ()
removeExportDirectoryDav hdl dir = withDavHandle hdl $ \h -> runExport h $ \_dav -> do
let d = fromRawFilePath $ fromExportDirectory dir
let d = fromOsPath $ fromExportDirectory dir
debugDav $ "delContent " ++ d
inLocation d delContentM
@ -481,7 +481,7 @@ storeLegacyChunked annexrunner chunksize k dav b =
finalizer tmp' dest' = goDAV dav $
finalizeStore dav tmp' (fromJust $ locationParent dest')
tmp = addTrailingPathSeparator $ keyTmpLocation k
tmp = fromOsPath $ addTrailingPathSeparator $ toOsPath $ keyTmpLocation k
dest = keyLocation k
retrieveLegacyChunked :: FilePath -> Key -> MeterUpdate -> DavHandle -> Annex ()

View file

@ -31,6 +31,7 @@ module Types.Remote
import Data.Ord
import Common
import qualified Git
import Types.Key
import Types.UUID
@ -47,7 +48,6 @@ import Utility.Hash (IncrementalVerifier)
import Config.Cost
import Utility.Metered
import Git.Types (RemoteName)
import Utility.SafeCommand
import Utility.Url
import Utility.DataUnits
@ -92,18 +92,18 @@ data RemoteA a = Remote
-- The key should not appear to be present on the remote until
-- all of its contents have been transferred.
-- Throws exception on failure.
, storeKey :: Key -> AssociatedFile -> Maybe FilePath -> MeterUpdate -> a ()
, storeKey :: Key -> AssociatedFile -> Maybe OsPath -> MeterUpdate -> a ()
-- Retrieves a key's contents to a file.
-- (The MeterUpdate does not need to be used if it writes
-- sequentially to the file.)
-- Throws exception on failure.
, retrieveKeyFile :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> VerifyConfigA a -> a Verification
, retrieveKeyFile :: Key -> AssociatedFile -> OsPath -> MeterUpdate -> VerifyConfigA a -> a Verification
{- Will retrieveKeyFile write to the file in order? -}
, retrieveKeyFileInOrder :: a Bool
-- Retrieves a key's contents to a tmp file, if it can be done cheaply.
-- It's ok to create a symlink or hardlink.
-- Throws exception on failure.
, retrieveKeyFileCheap :: Maybe (Key -> AssociatedFile -> FilePath -> a ())
, retrieveKeyFileCheap :: Maybe (Key -> AssociatedFile -> OsPath -> a ())
-- Security policy for reteiving keys from this remote.
, retrievalSecurityPolicy :: RetrievalSecurityPolicy
-- Removes a key's contents (succeeds even the contents are not present)
@ -147,7 +147,7 @@ data RemoteA a = Remote
-- a Remote's configuration from git
, gitconfig :: RemoteGitConfig
-- a Remote can be associated with a specific local filesystem path
, localpath :: Maybe FilePath
, localpath :: Maybe OsPath
-- a Remote can be known to be readonly
, readonly :: Bool
-- a Remote can allow writes but not have a way to delete content
@ -270,12 +270,12 @@ data ExportActions a = ExportActions
-- The exported file should not appear to be present on the remote
-- until all of its contents have been transferred.
-- Throws exception on failure.
{ storeExport :: FilePath -> Key -> ExportLocation -> MeterUpdate -> a ()
{ storeExport :: OsPath -> Key -> ExportLocation -> MeterUpdate -> a ()
-- Retrieves exported content to a file.
-- (The MeterUpdate does not need to be used if it writes
-- sequentially to the file.)
-- Throws exception on failure.
, retrieveExport :: Key -> ExportLocation -> FilePath -> MeterUpdate -> a Verification
, retrieveExport :: Key -> ExportLocation -> OsPath -> MeterUpdate -> a Verification
-- Removes an exported file (succeeds if the contents are not present)
-- Can throw exception if unable to access remote, or if remote
-- refuses to remove the content.
@ -351,7 +351,7 @@ data ImportActions a = ImportActions
:: ExportLocation
-> [ContentIdentifier]
-- file to write content to
-> FilePath
-> OsPath
-- Either the key, or when it's not yet known, a callback
-- that generates a key from the downloaded content.
-> Either Key (a Key)
@ -376,7 +376,7 @@ data ImportActions a = ImportActions
--
-- Throws exception on failure.
, storeExportWithContentIdentifier
:: FilePath
:: OsPath
-> Key
-> ExportLocation
-- old content that it's safe to overwrite

View file

@ -60,7 +60,7 @@ needsUpgrade v
g <- Annex.gitRepo
p <- liftIO $ absPath $ Git.repoPath g
return $ Just $ unwords
[ "Repository", fromRawFilePath p
[ "Repository", fromOsPath p
, "is at"
, if v `elem` supportedVersions
then "supported"
@ -117,7 +117,7 @@ upgrade automatic destversion = go =<< getVersion
-- This avoids complicating the upgrade code by needing to handle
-- upgrading a git repo other than the current repo.
upgraderemote = do
rp <- fromRawFilePath <$> fromRepo Git.repoPath
rp <- fromOsPath <$> fromRepo Git.repoPath
ok <- gitAnnexChildProcess "upgrade"
[ Param "--quiet"
, Param "--autoonly"

View file

@ -22,11 +22,11 @@ upgrade = do
showAction "v0 to v1"
-- do the reorganisation of the key files
olddir <- fromRawFilePath <$> fromRepo gitAnnexDir
olddir <- fromRepo gitAnnexDir
keys <- getKeysPresent0 olddir
forM_ keys $ \k ->
moveAnnex k (AssociatedFile Nothing)
(toRawFilePath $ olddir </> keyFile0 k)
(olddir </> toOsPath (keyFile0 k))
-- update the symlinks to the key files
-- No longer needed here; V1.upgrade does the same thing
@ -39,20 +39,18 @@ keyFile0 :: Key -> FilePath
keyFile0 = Upgrade.V1.keyFile1
fileKey0 :: FilePath -> Key
fileKey0 = Upgrade.V1.fileKey1
lookupKey0 :: FilePath -> Annex (Maybe (Key, Backend))
lookupKey0 = Upgrade.V1.lookupKey1
getKeysPresent0 :: FilePath -> Annex [Key]
getKeysPresent0 :: OsPath -> Annex [Key]
getKeysPresent0 dir = ifM (liftIO $ doesDirectoryExist dir)
( liftIO $ map fileKey0
( liftIO $ map (fileKey0 . fromOsPath)
<$> (filterM present =<< getDirectoryContents dir)
, return []
)
where
present d = do
result <- tryIO $
R.getFileStatus $ toRawFilePath $
dir ++ "/" ++ takeFileName d
R.getFileStatus $ fromOsPath $
dir <> literalOsPath "/" <> takeFileName d
case result of
Right s -> return $ isRegularFile s
Left _ -> return False