more OsPath conversion (464/749)

Sponsored-by: unqueued
This commit is contained in:
Joey Hess 2025-02-04 13:35:17 -04:00
parent cf986bc7e2
commit 54f0710fd2
No known key found for this signature in database
GPG key ID: DB12DB0FF05F8F38
17 changed files with 164 additions and 165 deletions

View file

@ -653,7 +653,7 @@ unlinkAnnex key = do
- If this happens, runs the rollback action and throws an exception. - If this happens, runs the rollback action and throws an exception.
- The rollback action should remove the data that was transferred. - The rollback action should remove the data that was transferred.
-} -}
sendAnnex :: Key -> Maybe FilePath -> Annex () -> (FilePath -> FileSize -> Annex a) -> Annex a sendAnnex :: Key -> Maybe OsPath -> Annex () -> (OsPath -> FileSize -> Annex a) -> Annex a
sendAnnex key o rollback sendobject = go =<< prepSendAnnex' key o sendAnnex key o rollback sendobject = go =<< prepSendAnnex' key o
where where
go (Just (f, sz, check)) = do go (Just (f, sz, check)) = do
@ -676,10 +676,10 @@ sendAnnex key o rollback sendobject = go =<< prepSendAnnex' key o
- Annex monad of the remote that is receiving the object, rather than - Annex monad of the remote that is receiving the object, rather than
- the sender. So it cannot rely on Annex state. - the sender. So it cannot rely on Annex state.
-} -}
prepSendAnnex :: Key -> Maybe FilePath -> Annex (Maybe (FilePath, FileSize, Annex Bool)) prepSendAnnex :: Key -> Maybe OsPath -> Annex (Maybe (OsPath, FileSize, Annex Bool))
prepSendAnnex key Nothing = withObjectLoc key $ \f -> do prepSendAnnex key Nothing = withObjectLoc key $ \f -> do
let retval c cs = return $ Just let retval c cs = return $ Just
( fromOsPath f ( f
, inodeCacheFileSize c , inodeCacheFileSize c
, sameInodeCache f cs , sameInodeCache f cs
) )
@ -704,19 +704,18 @@ prepSendAnnex key Nothing = withObjectLoc key $ \f -> do
Nothing -> return Nothing Nothing -> return Nothing
-- If the provided object file is the annex object file, handle as above. -- If the provided object file is the annex object file, handle as above.
prepSendAnnex key (Just o) = withObjectLoc key $ \aof -> prepSendAnnex key (Just o) = withObjectLoc key $ \aof ->
let o' = toOsPath o if aof == o
in if aof == o'
then prepSendAnnex key Nothing then prepSendAnnex key Nothing
else do else do
withTSDelta (liftIO . genInodeCache o') >>= \case withTSDelta (liftIO . genInodeCache o) >>= \case
Nothing -> return Nothing Nothing -> return Nothing
Just c -> return $ Just Just c -> return $ Just
( o ( o
, inodeCacheFileSize c , inodeCacheFileSize c
, sameInodeCache o' [c] , sameInodeCache o [c]
) )
prepSendAnnex' :: Key -> Maybe FilePath -> Annex (Maybe (FilePath, FileSize, Annex (Maybe String))) prepSendAnnex' :: Key -> Maybe OsPath -> Annex (Maybe (OsPath, FileSize, Annex (Maybe String)))
prepSendAnnex' key o = prepSendAnnex key o >>= \case prepSendAnnex' key o = prepSendAnnex key o >>= \case
Just (f, sz, checksuccess) -> Just (f, sz, checksuccess) ->
let checksuccess' = ifM checksuccess let checksuccess' = ifM checksuccess

View file

@ -17,6 +17,7 @@ module Annex.Magic (
getMagicMimeEncoding, getMagicMimeEncoding,
) where ) where
import Common
import Types.Mime import Types.Mime
import Control.Monad.IO.Class import Control.Monad.IO.Class
#ifdef WITH_MAGICMIME #ifdef WITH_MAGICMIME
@ -24,7 +25,6 @@ import Magic
import Utility.Env import Utility.Env
import Control.Concurrent import Control.Concurrent
import System.IO.Unsafe (unsafePerformIO) import System.IO.Unsafe (unsafePerformIO)
import Common
#else #else
type Magic = () type Magic = ()
#endif #endif
@ -44,7 +44,7 @@ initMagicMime = catchMaybeIO $ do
initMagicMime = return Nothing initMagicMime = return Nothing
#endif #endif
getMagicMime :: Magic -> FilePath -> IO (Maybe (MimeType, MimeEncoding)) getMagicMime :: Magic -> OsPath -> IO (Maybe (MimeType, MimeEncoding))
#ifdef WITH_MAGICMIME #ifdef WITH_MAGICMIME
getMagicMime m f = Just . parse <$> magicConcurrentSafe (magicFile m f) getMagicMime m f = Just . parse <$> magicConcurrentSafe (magicFile m f)
where where
@ -58,10 +58,10 @@ getMagicMime m f = Just . parse <$> magicConcurrentSafe (magicFile m f)
getMagicMime _ _ = return Nothing getMagicMime _ _ = return Nothing
#endif #endif
getMagicMimeType :: MonadIO m => Magic -> FilePath -> m (Maybe MimeType) getMagicMimeType :: MonadIO m => Magic -> OsPath -> m (Maybe MimeType)
getMagicMimeType m f = liftIO $ fmap fst <$> getMagicMime m f getMagicMimeType m f = liftIO $ fmap fst <$> getMagicMime m f
getMagicMimeEncoding :: MonadIO m => Magic -> FilePath -> m(Maybe MimeEncoding) getMagicMimeEncoding :: MonadIO m => Magic -> OsPath -> m(Maybe MimeEncoding)
getMagicMimeEncoding m f = liftIO $ fmap snd <$> getMagicMime m f getMagicMimeEncoding m f = liftIO $ fmap snd <$> getMagicMime m f
#ifdef WITH_MAGICMIME #ifdef WITH_MAGICMIME

View file

@ -14,7 +14,6 @@ module Remote.Directory.LegacyChunked where
import qualified Data.ByteString.Lazy as L import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString as S import qualified Data.ByteString as S
import qualified System.FilePath.ByteString as P
import Annex.Common import Annex.Common
import Utility.FileMode import Utility.FileMode
@ -23,7 +22,6 @@ import qualified Remote.Helper.Chunked.Legacy as Legacy
import Annex.Tmp import Annex.Tmp
import Utility.Metered import Utility.Metered
import Utility.Directory.Create import Utility.Directory.Create
import qualified Utility.RawFilePath as R
import qualified Utility.FileIO as F import qualified Utility.FileIO as F
withCheckedFiles :: (FilePath -> IO Bool) -> FilePath -> (FilePath -> Key -> [FilePath]) -> Key -> ([FilePath] -> IO Bool) -> IO Bool withCheckedFiles :: (FilePath -> IO Bool) -> FilePath -> (FilePath -> Key -> [FilePath]) -> Key -> ([FilePath] -> IO Bool) -> IO Bool
@ -45,7 +43,7 @@ withCheckedFiles check d locations k a = go $ locations d k
else a chunks else a chunks
) )
withStoredFiles :: FilePath -> (FilePath -> Key -> [FilePath]) -> Key -> ([FilePath] -> IO Bool) -> IO Bool withStoredFiles :: FilePath -> (FilePath -> Key -> [FilePath]) -> Key -> ([FilePath] -> IO Bool) -> IO Bool
withStoredFiles = withCheckedFiles doesFileExist withStoredFiles = withCheckedFiles (doesFileExist . toOsPath)
{- Splits a ByteString into chunks and writes to dests, obeying configured {- Splits a ByteString into chunks and writes to dests, obeying configured
- chunk size (not to be confused with the L.ByteString chunk size). -} - chunk size (not to be confused with the L.ByteString chunk size). -}
@ -77,20 +75,20 @@ storeLegacyChunked' meterupdate chunksize (d:dests) bs c = do
feed bytes' (sz - s) ls h feed bytes' (sz - s) ls h
else return (l:ls) else return (l:ls)
storeHelper :: FilePath -> (RawFilePath -> RawFilePath -> IO ()) -> Key -> ([FilePath] -> IO [FilePath]) -> FilePath -> FilePath -> IO () storeHelper :: FilePath -> (OsPath -> OsPath -> IO ()) -> Key -> ([FilePath] -> IO [FilePath]) -> FilePath -> FilePath -> IO ()
storeHelper repotop finalizer key storer tmpdir destdir = do storeHelper repotop finalizer key storer tmpdir destdir = do
void $ liftIO $ tryIO $ createDirectoryUnder void $ liftIO $ tryIO $ createDirectoryUnder
[toRawFilePath repotop] [toOsPath repotop]
(toRawFilePath tmpdir) (toOsPath tmpdir)
Legacy.storeChunks key tmpdir destdir storer recorder (legacyFinalizer finalizer) Legacy.storeChunks key tmpdir destdir storer recorder (legacyFinalizer finalizer)
where where
recorder f s = do recorder f s = do
let f' = toRawFilePath f let f' = toOsPath f
void $ tryIO $ allowWrite f' void $ tryIO $ allowWrite f'
writeFile f s writeFile f s
void $ tryIO $ preventWrite f' void $ tryIO $ preventWrite f'
store :: FilePath -> ChunkSize -> (RawFilePath -> RawFilePath -> IO ()) -> Key -> L.ByteString -> MeterUpdate -> FilePath -> FilePath -> IO () store :: FilePath -> ChunkSize -> (OsPath -> OsPath -> IO ()) -> Key -> L.ByteString -> MeterUpdate -> FilePath -> FilePath -> IO ()
store repotop chunksize finalizer k b p = storeHelper repotop finalizer k $ \dests -> store repotop chunksize finalizer k b p = storeHelper repotop finalizer k $ \dests ->
storeLegacyChunked p chunksize dests b storeLegacyChunked p chunksize dests b
@ -98,30 +96,29 @@ store repotop chunksize finalizer k b p = storeHelper repotop finalizer k $ \des
- Done very innefficiently, by writing to a temp file. - Done very innefficiently, by writing to a temp file.
- :/ This is legacy code.. - :/ This is legacy code..
-} -}
retrieve :: (RawFilePath -> Key -> [RawFilePath]) -> RawFilePath -> Retriever retrieve :: (OsPath -> Key -> [OsPath]) -> OsPath -> Retriever
retrieve locations d basek p _dest miv c = withOtherTmp $ \tmpdir -> do retrieve locations d basek p _dest miv c = withOtherTmp $ \tmpdir -> do
showLongNote "This remote uses the deprecated chunksize setting. So this will be quite slow." showLongNote "This remote uses the deprecated chunksize setting. So this will be quite slow."
let tmp = tmpdir P.</> keyFile basek <> ".directorylegacy.tmp" let tmp = tmpdir </> keyFile basek <> literalOsPath ".directorylegacy.tmp"
let tmp' = toOsPath tmp
let go = \k sink -> do let go = \k sink -> do
liftIO $ void $ withStoredFiles (fromRawFilePath d) (legacyLocations locations) k $ \fs -> do liftIO $ void $ withStoredFiles (fromOsPath d) (legacyLocations locations) k $ \fs -> do
forM_ fs $ forM_ fs $
F.appendFile' tmp' <=< S.readFile F.appendFile' tmp <=< S.readFile
return True return True
b <- liftIO $ F.readFile tmp' b <- liftIO $ F.readFile tmp
liftIO $ removeWhenExistsWith R.removeLink tmp liftIO $ removeWhenExistsWith removeFile tmp
sink b sink b
byteRetriever go basek p tmp miv c byteRetriever go basek p tmp miv c
checkKey :: RawFilePath -> (RawFilePath -> Key -> [RawFilePath]) -> Key -> Annex Bool checkKey :: OsPath -> (OsPath -> Key -> [OsPath]) -> Key -> Annex Bool
checkKey d locations k = liftIO $ checkKey d locations k = liftIO $
withStoredFiles (fromRawFilePath d) (legacyLocations locations) k $ withStoredFiles (fromOsPath d) (legacyLocations locations) k $
-- withStoredFiles checked that it exists -- withStoredFiles checked that it exists
const $ return True const $ return True
legacyFinalizer :: (RawFilePath -> RawFilePath -> IO ()) -> (FilePath -> FilePath -> IO ()) legacyFinalizer :: (OsPath -> OsPath -> IO ()) -> (FilePath -> FilePath -> IO ())
legacyFinalizer f = \a b -> f (toRawFilePath a) (toRawFilePath b) legacyFinalizer f = \a b -> f (toOsPath a) (toOsPath b)
legacyLocations :: (RawFilePath -> Key -> [RawFilePath]) -> (FilePath -> Key -> [FilePath]) legacyLocations :: (OsPath -> Key -> [OsPath]) -> (FilePath -> Key -> [FilePath])
legacyLocations locations = \f k -> legacyLocations locations = \f k ->
map fromRawFilePath $ locations (toRawFilePath f) k map fromOsPath $ locations (toOsPath f) k

View file

@ -33,7 +33,6 @@ import Crypto
import Backend (isStableKey) import Backend (isStableKey)
import Annex.SpecialRemote.Config import Annex.SpecialRemote.Config
import Annex.Verify import Annex.Verify
import qualified Utility.RawFilePath as R
import qualified Utility.FileIO as F import qualified Utility.FileIO as F
import qualified Data.ByteString as S import qualified Data.ByteString as S
@ -584,4 +583,4 @@ ensureChunksAreLogged _ _ (ChunkKeys _) = return ()
withBytes :: ContentSource -> (L.ByteString -> Annex a) -> Annex a withBytes :: ContentSource -> (L.ByteString -> Annex a) -> Annex a
withBytes (ByteContent b) a = a b withBytes (ByteContent b) a = a b
withBytes (FileContent f) a = a =<< liftIO (L.readFile (fromOsPath f)) withBytes (FileContent f) a = a =<< liftIO (F.readFile f)

View file

@ -14,6 +14,7 @@ import Types.StoreRetrieve
import Remote.Helper.Special import Remote.Helper.Special
import Utility.Metered import Utility.Metered
import Utility.Hash (IncrementalVerifier(..)) import Utility.Hash (IncrementalVerifier(..))
import qualified Utility.FileIO as F
import qualified Data.ByteString.Lazy as L import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString as S import qualified Data.ByteString as S
@ -31,14 +32,14 @@ httpStorer a = fileStorer $ \k f m -> a k =<< liftIO (httpBodyStorer f m)
-- Reads the file and generates a streaming request body, that will update -- Reads the file and generates a streaming request body, that will update
-- the meter as it's sent. -- the meter as it's sent.
httpBodyStorer :: FilePath -> MeterUpdate -> IO RequestBody httpBodyStorer :: OsPath -> MeterUpdate -> IO RequestBody
httpBodyStorer src m = do httpBodyStorer src m = do
size <- getFileSize (toRawFilePath src) size <- getFileSize src
let streamer sink = withMeteredFile src m $ \b -> byteStringPopper b sink let streamer sink = withMeteredFile src m $ \b -> byteStringPopper b sink
return $ RequestBodyStream (fromInteger size) streamer return $ RequestBodyStream (fromInteger size) streamer
-- Like httpBodyStorer, but generates a chunked request body. -- Like httpBodyStorer, but generates a chunked request body.
httpBodyStorerChunked :: FilePath -> MeterUpdate -> RequestBody httpBodyStorerChunked :: OsPath -> MeterUpdate -> RequestBody
httpBodyStorerChunked src m = httpBodyStorerChunked src m =
let streamer sink = withMeteredFile src m $ \b -> byteStringPopper b sink let streamer sink = withMeteredFile src m $ \b -> byteStringPopper b sink
in RequestBodyStreamChunked streamer in RequestBodyStreamChunked streamer
@ -75,10 +76,10 @@ handlePopper numchunks chunksize meterupdate h sink = do
-- Reads the http body and stores it to the specified file, updating the -- Reads the http body and stores it to the specified file, updating the
-- meter and incremental verifier as it goes. -- meter and incremental verifier as it goes.
httpBodyRetriever :: FilePath -> MeterUpdate -> Maybe IncrementalVerifier -> Response BodyReader -> IO () httpBodyRetriever :: OsPath -> MeterUpdate -> Maybe IncrementalVerifier -> Response BodyReader -> IO ()
httpBodyRetriever dest meterupdate iv resp httpBodyRetriever dest meterupdate iv resp
| responseStatus resp /= ok200 = giveup $ show $ responseStatus resp | responseStatus resp /= ok200 = giveup $ show $ responseStatus resp
| otherwise = bracket (openBinaryFile dest WriteMode) hClose (go zeroBytesProcessed) | otherwise = bracket (F.openBinaryFile dest WriteMode) hClose (go zeroBytesProcessed)
where where
reader = responseBody resp reader = responseBody resp
go sofar h = do go sofar h = do

View file

@ -36,9 +36,9 @@ type ProtoConnRunner c = forall a. P2P.Proto a -> ClosableConnection c -> Annex
-- the pool when done. -- the pool when done.
type WithConn a c = (ClosableConnection c -> Annex (ClosableConnection c, a)) -> Annex a type WithConn a c = (ClosableConnection c -> Annex (ClosableConnection c, a)) -> Annex a
store :: UUID -> RemoteGitConfig -> ProtoRunner (Maybe [UUID]) -> Key -> AssociatedFile -> Maybe FilePath -> MeterUpdate -> Annex () store :: UUID -> RemoteGitConfig -> ProtoRunner (Maybe [UUID]) -> Key -> AssociatedFile -> Maybe OsPath -> MeterUpdate -> Annex ()
store remoteuuid gc runner k af o p = do store remoteuuid gc runner k af o p = do
let sizer = KeySizer k (fmap (toRawFilePath . fst3) <$> prepSendAnnex k o) let sizer = KeySizer k (fmap fst3 <$> prepSendAnnex k o)
let bwlimit = remoteAnnexBwLimitUpload gc <|> remoteAnnexBwLimit gc let bwlimit = remoteAnnexBwLimitUpload gc <|> remoteAnnexBwLimit gc
metered (Just p) sizer bwlimit $ \_ p' -> metered (Just p) sizer bwlimit $ \_ p' ->
runner (P2P.put k af p') >>= \case runner (P2P.put k af p') >>= \case

View file

@ -44,7 +44,7 @@ adjustReadOnly r
} }
| otherwise = r | otherwise = r
readonlyStoreKey :: Key -> AssociatedFile -> Maybe FilePath -> MeterUpdate -> Annex () readonlyStoreKey :: Key -> AssociatedFile -> Maybe OsPath -> MeterUpdate -> Annex ()
readonlyStoreKey _ _ _ _ = readonlyFail readonlyStoreKey _ _ _ _ = readonlyFail
readonlyRemoveKey :: Maybe SafeDropProof -> Key -> Annex () readonlyRemoveKey :: Maybe SafeDropProof -> Key -> Annex ()
@ -53,7 +53,7 @@ readonlyRemoveKey _ _ = readonlyFail
readonlyStorer :: Storer readonlyStorer :: Storer
readonlyStorer _ _ _ = readonlyFail readonlyStorer _ _ _ = readonlyFail
readonlyStoreExport :: FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex () readonlyStoreExport :: OsPath -> Key -> ExportLocation -> MeterUpdate -> Annex ()
readonlyStoreExport _ _ _ _ = readonlyFail readonlyStoreExport _ _ _ _ = readonlyFail
readonlyRemoveExport :: Key -> ExportLocation -> Annex () readonlyRemoveExport :: Key -> ExportLocation -> Annex ()
@ -62,7 +62,7 @@ readonlyRemoveExport _ _ = readonlyFail
readonlyRemoveExportDirectory :: ExportDirectory -> Annex () readonlyRemoveExportDirectory :: ExportDirectory -> Annex ()
readonlyRemoveExportDirectory _ = readonlyFail readonlyRemoveExportDirectory _ = readonlyFail
readonlyStoreExportWithContentIdentifier :: FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex ContentIdentifier readonlyStoreExportWithContentIdentifier :: OsPath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex ContentIdentifier
readonlyStoreExportWithContentIdentifier _ _ _ _ _ = readonlyFail readonlyStoreExportWithContentIdentifier _ _ _ _ _ = readonlyFail
readonlyRemoveExportWithContentIdentifier :: Key -> ExportLocation -> [ContentIdentifier] -> Annex () readonlyRemoveExportWithContentIdentifier :: Key -> ExportLocation -> [ContentIdentifier] -> Annex ()

View file

@ -53,6 +53,7 @@ import Messages.Progress
import qualified Git import qualified Git
import qualified Git.Construct import qualified Git.Construct
import Git.Types import Git.Types
import qualified Utility.FileIO as F
import qualified Data.ByteString as S import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L import qualified Data.ByteString.Lazy as L
@ -92,12 +93,11 @@ mkRetrievalVerifiableKeysSecure gc
-- A Storer that expects to be provided with a file containing -- A Storer that expects to be provided with a file containing
-- the content of the key to store. -- the content of the key to store.
fileStorer :: (Key -> FilePath -> MeterUpdate -> Annex ()) -> Storer fileStorer :: (Key -> OsPath -> MeterUpdate -> Annex ()) -> Storer
fileStorer a k (FileContent f) m = a k f m fileStorer a k (FileContent f) m = a k f m
fileStorer a k (ByteContent b) m = withTmp k $ \f -> do fileStorer a k (ByteContent b) m = withTmp k $ \f -> do
let f' = fromRawFilePath f liftIO $ L.writeFile (fromOsPath f) b
liftIO $ L.writeFile f' b a k f m
a k f' m
-- A Storer that expects to be provided with a L.ByteString of -- A Storer that expects to be provided with a L.ByteString of
-- the content to store. -- the content to store.
@ -107,7 +107,7 @@ byteStorer a k c m = withBytes c $ \b -> a k b m
-- A Retriever that generates a lazy ByteString containing the Key's -- A Retriever that generates a lazy ByteString containing the Key's
-- content, and passes it to a callback action which will fully consume it -- content, and passes it to a callback action which will fully consume it
-- before returning. -- before returning.
byteRetriever :: (Key -> (L.ByteString -> Annex a) -> Annex a) -> Key -> MeterUpdate -> RawFilePath -> Maybe IncrementalVerifier -> (ContentSource -> Annex a) -> Annex a byteRetriever :: (Key -> (L.ByteString -> Annex a) -> Annex a) -> Key -> MeterUpdate -> OsPath -> Maybe IncrementalVerifier -> (ContentSource -> Annex a) -> Annex a
byteRetriever a k _m _dest _miv callback = a k (callback . ByteContent) byteRetriever a k _m _dest _miv callback = a k (callback . ByteContent)
-- A Retriever that writes the content of a Key to a file. -- A Retriever that writes the content of a Key to a file.
@ -115,7 +115,7 @@ byteRetriever a k _m _dest _miv callback = a k (callback . ByteContent)
-- retrieves data. The incremental verifier is updated in the background as -- retrieves data. The incremental verifier is updated in the background as
-- the action writes to the file, but may not be updated with the entire -- the action writes to the file, but may not be updated with the entire
-- content of the file. -- content of the file.
fileRetriever :: (RawFilePath -> Key -> MeterUpdate -> Annex ()) -> Retriever fileRetriever :: (OsPath -> Key -> MeterUpdate -> Annex ()) -> Retriever
fileRetriever a = fileRetriever' $ \f k m miv -> fileRetriever a = fileRetriever' $ \f k m miv ->
let retrieve = a f k m let retrieve = a f k m
in tailVerify miv f retrieve in tailVerify miv f retrieve
@ -124,20 +124,20 @@ fileRetriever a = fileRetriever' $ \f k m miv ->
- The action is responsible for updating the progress meter and the - The action is responsible for updating the progress meter and the
- incremental verifier as it retrieves data. - incremental verifier as it retrieves data.
-} -}
fileRetriever' :: (RawFilePath -> Key -> MeterUpdate -> Maybe IncrementalVerifier -> Annex ()) -> Retriever fileRetriever' :: (OsPath -> Key -> MeterUpdate -> Maybe IncrementalVerifier -> Annex ()) -> Retriever
fileRetriever' a k m dest miv callback = do fileRetriever' a k m dest miv callback = do
createAnnexDirectory (parentDir dest) createAnnexDirectory (parentDir dest)
a dest k m miv a dest k m miv
pruneTmpWorkDirBefore dest (callback . FileContent . fromRawFilePath) pruneTmpWorkDirBefore dest (callback . FileContent)
{- The base Remote that is provided to specialRemote needs to have {- The base Remote that is provided to specialRemote needs to have
- storeKey, retrieveKeyFile, removeKey, and checkPresent methods, - storeKey, retrieveKeyFile, removeKey, and checkPresent methods,
- but they are never actually used (since specialRemote replaces them). - but they are never actually used (since specialRemote replaces them).
- Here are some dummy ones. - Here are some dummy ones.
-} -}
storeKeyDummy :: Key -> AssociatedFile -> Maybe FilePath -> MeterUpdate -> Annex () storeKeyDummy :: Key -> AssociatedFile -> Maybe OsPath -> MeterUpdate -> Annex ()
storeKeyDummy _ _ _ _ = error "missing storeKey implementation" storeKeyDummy _ _ _ _ = error "missing storeKey implementation"
retrieveKeyFileDummy :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> VerifyConfig -> Annex Verification retrieveKeyFileDummy :: Key -> AssociatedFile -> OsPath -> MeterUpdate -> VerifyConfig -> Annex Verification
retrieveKeyFileDummy _ _ _ _ _ = error "missing retrieveKeyFile implementation" retrieveKeyFileDummy _ _ _ _ _ = error "missing retrieveKeyFile implementation"
removeKeyDummy :: Maybe SafeDropProof -> Key -> Annex () removeKeyDummy :: Maybe SafeDropProof -> Key -> Annex ()
removeKeyDummy _ _ = error "missing removeKey implementation" removeKeyDummy _ _ = error "missing removeKey implementation"
@ -258,9 +258,9 @@ specialRemote' cfg c storer retriever remover checkpresent baser = encr
displayprogress bwlimit p k srcfile a displayprogress bwlimit p k srcfile a
| displayProgress cfg = do | displayProgress cfg = do
metered (Just p) (KeySizer k (pure (fmap toRawFilePath srcfile))) bwlimit (const a) metered (Just p) (KeySizer k (pure srcfile)) bwlimit (const a)
| otherwise = a p | otherwise = a p
withBytes :: ContentSource -> (L.ByteString -> Annex a) -> Annex a withBytes :: ContentSource -> (L.ByteString -> Annex a) -> Annex a
withBytes (ByteContent b) a = a b withBytes (ByteContent b) a = a b
withBytes (FileContent f) a = a =<< liftIO (L.readFile f) withBytes (FileContent f) a = a =<< liftIO (F.readFile f)

View file

@ -118,8 +118,8 @@ hookEnv action k f = Just <$> mergeenv (fileenv f ++ keyenv)
] ]
fileenv Nothing = [] fileenv Nothing = []
fileenv (Just file) = [envvar "FILE" file] fileenv (Just file) = [envvar "FILE" file]
hashbits = map takeDirectory $ splitPath $ hashbits = map (fromOsPath . takeDirectory) $
fromRawFilePath $ hashDirMixed def k splitPath $ hashDirMixed def k
lookupHook :: HookName -> Action -> Annex (Maybe String) lookupHook :: HookName -> Action -> Annex (Maybe String)
lookupHook hookname action = do lookupHook hookname action = do
@ -159,11 +159,11 @@ runHook' hook action k f a = maybe (return False) run =<< lookupHook hook action
) )
store :: HookName -> Storer store :: HookName -> Storer
store h = fileStorer $ \k src _p -> runHook h "store" k (Just src) store h = fileStorer $ \k src _p -> runHook h "store" k (Just (fromOsPath src))
retrieve :: HookName -> Retriever retrieve :: HookName -> Retriever
retrieve h = fileRetriever $ \d k _p -> retrieve h = fileRetriever $ \d k _p ->
unlessM (runHook' h "retrieve" k (Just (fromRawFilePath d)) $ return True) $ unlessM (runHook' h "retrieve" k (Just (fromOsPath d)) $ return True) $
giveup "failed to retrieve content" giveup "failed to retrieve content"
remove :: HookName -> Remover remove :: HookName -> Remover

View file

@ -122,14 +122,14 @@ httpAlsoSetup _ (Just u) _ c gc = do
downloadKey :: Maybe URLString -> LearnedLayout -> Retriever downloadKey :: Maybe URLString -> LearnedLayout -> Retriever
downloadKey baseurl ll = fileRetriever' $ \dest key p iv -> downloadKey baseurl ll = fileRetriever' $ \dest key p iv ->
downloadAction (fromRawFilePath dest) p iv (keyUrlAction baseurl ll key) downloadAction dest p iv (keyUrlAction baseurl ll key)
retriveExportHttpAlso :: Maybe URLString -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Verification retriveExportHttpAlso :: Maybe URLString -> Key -> ExportLocation -> OsPath -> MeterUpdate -> Annex Verification
retriveExportHttpAlso baseurl key loc dest p = do retriveExportHttpAlso baseurl key loc dest p = do
verifyKeyContentIncrementally AlwaysVerify key $ \iv -> verifyKeyContentIncrementally AlwaysVerify key $ \iv ->
downloadAction dest p iv (exportLocationUrlAction baseurl loc) downloadAction dest p iv (exportLocationUrlAction baseurl loc)
downloadAction :: FilePath -> MeterUpdate -> Maybe IncrementalVerifier -> ((URLString -> Annex (Either String ())) -> Annex (Either String ())) -> Annex () downloadAction :: OsPath -> MeterUpdate -> Maybe IncrementalVerifier -> ((URLString -> Annex (Either String ())) -> Annex (Either String ())) -> Annex ()
downloadAction dest p iv run = downloadAction dest p iv run =
Url.withUrlOptions $ \uo -> Url.withUrlOptions $ \uo ->
run (\url -> Url.download' p iv url dest uo) run (\url -> Url.download' p iv url dest uo)
@ -192,7 +192,7 @@ exportLocationUrlAction
-> (URLString -> Annex (Either String ())) -> (URLString -> Annex (Either String ()))
-> Annex (Either String ()) -> Annex (Either String ())
exportLocationUrlAction (Just baseurl) loc a = exportLocationUrlAction (Just baseurl) loc a =
a (baseurl P.</> fromRawFilePath (fromExportLocation loc)) a (baseurl P.</> fromOsPath (fromExportLocation loc))
exportLocationUrlAction Nothing _ _ = noBaseUrlError exportLocationUrlAction Nothing _ _ = noBaseUrlError
-- cannot normally happen -- cannot normally happen
@ -228,5 +228,5 @@ supportedLayouts baseurl =
] ]
] ]
where where
mkurl k hasher = baseurl P.</> fromRawFilePath (hasher k) P.</> kf k mkurl k hasher = baseurl P.</> fromOsPath (hasher k) P.</> kf k
kf k = fromRawFilePath (keyFile k) kf k = fromOsPath (keyFile k)

View file

@ -117,12 +117,13 @@ gen r u rc gc rs = do
, getRepo = return r , getRepo = return r
, gitconfig = gc , gitconfig = gc
, localpath = if islocal , localpath = if islocal
then Just $ rsyncUrl o then Just $ toOsPath $ rsyncUrl o
else Nothing else Nothing
, readonly = False , readonly = False
, appendonly = False , appendonly = False
, untrustworthy = False , untrustworthy = False
, availability = checkPathAvailability islocal (rsyncUrl o) , availability = checkPathAvailability islocal
(toOsPath (rsyncUrl o))
, remotetype = remote , remotetype = remote
, mkUnavailable = return Nothing , mkUnavailable = return Nothing
, getInfo = return [("url", url)] , getInfo = return [("url", url)]
@ -221,45 +222,45 @@ rsyncSetup _ mu _ c gc = do
- (When we have the right hash directory structure, we can just - (When we have the right hash directory structure, we can just
- pass --include=X --include=X/Y --include=X/Y/file --exclude=*) - pass --include=X --include=X/Y --include=X/Y/file --exclude=*)
-} -}
store :: RsyncOpts -> Key -> FilePath -> MeterUpdate -> Annex () store :: RsyncOpts -> Key -> OsPath -> MeterUpdate -> Annex ()
store o k src meterupdate = storeGeneric o meterupdate basedest populatedest store o k src meterupdate = storeGeneric o meterupdate basedest populatedest
where where
basedest = fromRawFilePath $ NE.head (keyPaths k) basedest = NE.head (keyPaths k)
populatedest dest = liftIO $ if canrename populatedest dest = liftIO $ if canrename
then do then do
R.rename (toRawFilePath src) (toRawFilePath dest) R.rename (fromOsPath src) (fromOsPath dest)
return True return True
else createLinkOrCopy (toRawFilePath src) (toRawFilePath dest) else createLinkOrCopy src dest
{- If the key being sent is encrypted or chunked, the file {- If the key being sent is encrypted or chunked, the file
- containing its content is a temp file, and so can be - containing its content is a temp file, and so can be
- renamed into place. Otherwise, the file is the annexed - renamed into place. Otherwise, the file is the annexed
- object file, and has to be copied or hard linked into place. -} - object file, and has to be copied or hard linked into place. -}
canrename = isEncKey k || isChunkKey k canrename = isEncKey k || isChunkKey k
storeGeneric :: RsyncOpts -> MeterUpdate -> FilePath -> (FilePath -> Annex Bool) -> Annex () storeGeneric :: RsyncOpts -> MeterUpdate -> OsPath -> (OsPath -> Annex Bool) -> Annex ()
storeGeneric o meterupdate basedest populatedest = storeGeneric o meterupdate basedest populatedest =
unlessM (storeGeneric' o meterupdate basedest populatedest) $ unlessM (storeGeneric' o meterupdate basedest populatedest) $
giveup "failed to rsync content" giveup "failed to rsync content"
storeGeneric' :: RsyncOpts -> MeterUpdate -> FilePath -> (FilePath -> Annex Bool) -> Annex Bool storeGeneric' :: RsyncOpts -> MeterUpdate -> OsPath -> (OsPath -> Annex Bool) -> Annex Bool
storeGeneric' o meterupdate basedest populatedest = withRsyncScratchDir $ \tmp -> do storeGeneric' o meterupdate basedest populatedest = withRsyncScratchDir $ \tmp -> do
let dest = tmp </> basedest let dest = tmp </> basedest
createAnnexDirectory (parentDir (toRawFilePath dest)) createAnnexDirectory (parentDir dest)
ok <- populatedest dest ok <- populatedest dest
ps <- sendParams ps <- sendParams
if ok if ok
then showResumable $ rsyncRemote Upload o (Just meterupdate) $ ps ++ then showResumable $ rsyncRemote Upload o (Just meterupdate) $ ps ++
Param "--recursive" : partialParams ++ Param "--recursive" : partialParams ++
-- tmp/ to send contents of tmp dir -- tmp/ to send contents of tmp dir
[ File $ addTrailingPathSeparator tmp [ File $ fromOsPath $ addTrailingPathSeparator tmp
, Param $ rsyncUrl o , Param $ rsyncUrl o
] ]
else return False else return False
retrieve :: RsyncOpts -> RawFilePath -> Key -> MeterUpdate -> Annex () retrieve :: RsyncOpts -> OsPath -> Key -> MeterUpdate -> Annex ()
retrieve o f k p = rsyncRetrieveKey o k (fromRawFilePath f) (Just p) retrieve o f k p = rsyncRetrieveKey o k f (Just p)
retrieveCheap :: RsyncOpts -> Key -> AssociatedFile -> FilePath -> Annex () retrieveCheap :: RsyncOpts -> Key -> AssociatedFile -> OsPath -> Annex ()
retrieveCheap o k _af f = ifM (preseedTmp k f) retrieveCheap o k _af f = ifM (preseedTmp k f)
( rsyncRetrieveKey o k f Nothing ( rsyncRetrieveKey o k f Nothing
, giveup "cannot preseed rsync with existing content" , giveup "cannot preseed rsync with existing content"
@ -269,11 +270,11 @@ remove :: RsyncOpts -> Remover
remove o _proof k = removeGeneric o includes remove o _proof k = removeGeneric o includes
where where
includes = concatMap use dirHashes includes = concatMap use dirHashes
use h = let dir = fromRawFilePath (h def k) in use h = let dir = h def k in
[ fromRawFilePath (parentDir (toRawFilePath dir)) [ fromOsPath (parentDir dir)
, dir , fromOsPath dir
-- match content directory and anything in it -- match content directory and anything in it
, dir </> fromRawFilePath (keyFile k) </> "***" , fromOsPath $ dir </> keyFile k </> literalOsPath "***"
] ]
{- An empty directory is rsynced to make it delete. Everything is excluded, {- An empty directory is rsynced to make it delete. Everything is excluded,
@ -291,7 +292,7 @@ removeGeneric o includes = do
[ Param "--exclude=*" -- exclude everything else [ Param "--exclude=*" -- exclude everything else
, Param "--quiet", Param "--delete", Param "--recursive" , Param "--quiet", Param "--delete", Param "--recursive"
] ++ partialParams ++ ] ++ partialParams ++
[ Param $ addTrailingPathSeparator tmp [ Param $ fromOsPath $ addTrailingPathSeparator tmp
, Param $ rsyncUrl o , Param $ rsyncUrl o
] ]
unless ok $ unless ok $
@ -313,43 +314,43 @@ checkPresentGeneric o rsyncurls = do
} }
in withCreateProcess p $ \_ _ _ -> checkSuccessProcess in withCreateProcess p $ \_ _ _ -> checkSuccessProcess
storeExportM :: RsyncOpts -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex () storeExportM :: RsyncOpts -> OsPath -> Key -> ExportLocation -> MeterUpdate -> Annex ()
storeExportM o src _k loc meterupdate = storeExportM o src _k loc meterupdate =
storeGeneric o meterupdate basedest populatedest storeGeneric o meterupdate basedest populatedest
where where
basedest = fromRawFilePath (fromExportLocation loc) basedest = fromExportLocation loc
populatedest = liftIO . createLinkOrCopy (toRawFilePath src) . toRawFilePath populatedest = liftIO . createLinkOrCopy src
retrieveExportM :: RsyncOpts -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Verification retrieveExportM :: RsyncOpts -> Key -> ExportLocation -> OsPath -> MeterUpdate -> Annex Verification
retrieveExportM o k loc dest p = retrieveExportM o k loc dest p =
verifyKeyContentIncrementally AlwaysVerify k $ \iv -> verifyKeyContentIncrementally AlwaysVerify k $ \iv ->
tailVerify iv (toRawFilePath dest) $ tailVerify iv dest $
rsyncRetrieve o [rsyncurl] dest (Just p) rsyncRetrieve o [rsyncurl] dest (Just p)
where where
rsyncurl = mkRsyncUrl o (fromRawFilePath (fromExportLocation loc)) rsyncurl = mkRsyncUrl o (fromOsPath (fromExportLocation loc))
checkPresentExportM :: RsyncOpts -> Key -> ExportLocation -> Annex Bool checkPresentExportM :: RsyncOpts -> Key -> ExportLocation -> Annex Bool
checkPresentExportM o _k loc = checkPresentGeneric o [rsyncurl] checkPresentExportM o _k loc = checkPresentGeneric o [rsyncurl]
where where
rsyncurl = mkRsyncUrl o (fromRawFilePath (fromExportLocation loc)) rsyncurl = mkRsyncUrl o (fromOsPath (fromExportLocation loc))
removeExportM :: RsyncOpts -> Key -> ExportLocation -> Annex () removeExportM :: RsyncOpts -> Key -> ExportLocation -> Annex ()
removeExportM o _k loc = removeExportM o _k loc =
removeGeneric o $ map fromRawFilePath $ removeGeneric o $ map fromOsPath $ includes $ fromExportLocation loc
includes $ fromExportLocation loc
where where
includes f = f : case upFrom f of includes f = f : case upFrom f of
Nothing -> [] Nothing -> []
Just f' -> includes f' Just f' -> includes f'
removeExportDirectoryM :: RsyncOpts -> ExportDirectory -> Annex () removeExportDirectoryM :: RsyncOpts -> ExportDirectory -> Annex ()
removeExportDirectoryM o ed = removeGeneric o (allbelow d : includes d) removeExportDirectoryM o ed = removeGeneric o $
map fromOsPath (allbelow d : includes d)
where where
d = fromRawFilePath $ fromExportDirectory ed d = fromExportDirectory ed
allbelow f = f </> "***" allbelow f = f </> literalOsPath "***"
includes f = f : case upFrom (toRawFilePath f) of includes f = f : case upFrom f of
Nothing -> [] Nothing -> []
Just f' -> includes (fromRawFilePath f') Just f' -> includes f'
renameExportM :: RsyncOpts -> Key -> ExportLocation -> ExportLocation -> Annex (Maybe ()) renameExportM :: RsyncOpts -> Key -> ExportLocation -> ExportLocation -> Annex (Maybe ())
renameExportM _ _ _ _ = return Nothing renameExportM _ _ _ _ = return Nothing
@ -371,12 +372,12 @@ sendParams = ifM crippledFileSystem
{- Runs an action in an empty scratch directory that can be used to build {- Runs an action in an empty scratch directory that can be used to build
- up trees for rsync. -} - up trees for rsync. -}
withRsyncScratchDir :: (FilePath -> Annex a) -> Annex a withRsyncScratchDir :: (OsPath -> Annex a) -> Annex a
withRsyncScratchDir a = do withRsyncScratchDir a = do
t <- fromRawFilePath <$> fromRepo gitAnnexTmpObjectDir t <- fromRepo gitAnnexTmpObjectDir
withTmpDirIn t (toOsPath "rsynctmp") a withTmpDirIn t (literalOsPath "rsynctmp") a
rsyncRetrieve :: RsyncOpts -> [RsyncUrl] -> FilePath -> Maybe MeterUpdate -> Annex () rsyncRetrieve :: RsyncOpts -> [RsyncUrl] -> OsPath -> Maybe MeterUpdate -> Annex ()
rsyncRetrieve o rsyncurls dest meterupdate = rsyncRetrieve o rsyncurls dest meterupdate =
unlessM go $ unlessM go $
giveup "rsync failed" giveup "rsync failed"
@ -385,10 +386,10 @@ rsyncRetrieve o rsyncurls dest meterupdate =
-- use inplace when retrieving to support resuming -- use inplace when retrieving to support resuming
[ Param "--inplace" [ Param "--inplace"
, Param u , Param u
, File dest , File (fromOsPath dest)
] ]
rsyncRetrieveKey :: RsyncOpts -> Key -> FilePath -> Maybe MeterUpdate -> Annex () rsyncRetrieveKey :: RsyncOpts -> Key -> OsPath -> Maybe MeterUpdate -> Annex ()
rsyncRetrieveKey o k dest meterupdate = rsyncRetrieveKey o k dest meterupdate =
rsyncRetrieve o (rsyncUrls o k) dest meterupdate rsyncRetrieve o (rsyncUrls o k) dest meterupdate

View file

@ -68,6 +68,7 @@ import Utility.Url (extractFromResourceT, UserAgent)
import Annex.Url (getUserAgent, getUrlOptions, withUrlOptions, UrlOptions(..)) import Annex.Url (getUserAgent, getUrlOptions, withUrlOptions, UrlOptions(..))
import Utility.Env import Utility.Env
import Annex.Verify import Annex.Verify
import qualified Utility.FileIO as F
type BucketName = String type BucketName = String
type BucketObject = String type BucketObject = String
@ -349,10 +350,10 @@ store mh r info magic = fileStorer $ \k f p -> withS3HandleOrFail (uuid r) mh $
when (isIA info && not (isChunkKey k)) $ when (isIA info && not (isChunkKey k)) $
setUrlPresent k (iaPublicUrl info (bucketObject info k)) setUrlPresent k (iaPublicUrl info (bucketObject info k))
storeHelper :: S3Info -> S3Handle -> Maybe Magic -> FilePath -> S3.Object -> MeterUpdate -> Annex (Maybe S3Etag, Maybe S3VersionID) storeHelper :: S3Info -> S3Handle -> Maybe Magic -> OsPath -> S3.Object -> MeterUpdate -> Annex (Maybe S3Etag, Maybe S3VersionID)
storeHelper info h magic f object p = liftIO $ case partSize info of storeHelper info h magic f object p = liftIO $ case partSize info of
Just partsz | partsz > 0 -> do Just partsz | partsz > 0 -> do
fsz <- getFileSize (toRawFilePath f) fsz <- getFileSize f
if fsz > partsz if fsz > partsz
then multipartupload fsz partsz then multipartupload fsz partsz
else singlepartupload else singlepartupload
@ -385,7 +386,7 @@ storeHelper info h magic f object p = liftIO $ case partSize info of
-- Send parts of the file, taking care to stream each part -- Send parts of the file, taking care to stream each part
-- w/o buffering in memory, since the parts can be large. -- w/o buffering in memory, since the parts can be large.
etags <- bracketIO (openBinaryFile f ReadMode) hClose $ \fh -> do etags <- bracketIO (F.openBinaryFile f ReadMode) hClose $ \fh -> do
let sendparts meter etags partnum = do let sendparts meter etags partnum = do
pos <- liftIO $ hTell fh pos <- liftIO $ hTell fh
if pos >= fsz if pos >= fsz
@ -420,24 +421,24 @@ retrieve hv r rs c info = fileRetriever' $ \f k p iv -> withS3Handle hv $ \case
Left failreason -> do Left failreason -> do
warning (UnquotedString failreason) warning (UnquotedString failreason)
giveup "cannot download content" giveup "cannot download content"
Right loc -> retrieveHelper info h loc (fromRawFilePath f) p iv Right loc -> retrieveHelper info h loc f p iv
Left S3HandleNeedCreds -> Left S3HandleNeedCreds ->
getPublicWebUrls' rs info c k >>= \case getPublicWebUrls' rs info c k >>= \case
Left failreason -> do Left failreason -> do
warning (UnquotedString failreason) warning (UnquotedString failreason)
giveup "cannot download content" giveup "cannot download content"
Right us -> unlessM (withUrlOptions $ downloadUrl False k p iv us (fromRawFilePath f)) $ Right us -> unlessM (withUrlOptions $ downloadUrl False k p iv us f) $
giveup "failed to download content" giveup "failed to download content"
Left S3HandleAnonymousOldAws -> giveupS3HandleProblem S3HandleAnonymousOldAws (uuid r) Left S3HandleAnonymousOldAws -> giveupS3HandleProblem S3HandleAnonymousOldAws (uuid r)
retrieveHelper :: S3Info -> S3Handle -> (Either S3.Object S3VersionID) -> FilePath -> MeterUpdate -> Maybe IncrementalVerifier -> Annex () retrieveHelper :: S3Info -> S3Handle -> (Either S3.Object S3VersionID) -> OsPath -> MeterUpdate -> Maybe IncrementalVerifier -> Annex ()
retrieveHelper info h loc f p iv = retrieveHelper' h f p iv $ retrieveHelper info h loc f p iv = retrieveHelper' h f p iv $
case loc of case loc of
Left o -> S3.getObject (bucket info) o Left o -> S3.getObject (bucket info) o
Right (S3VersionID o vid) -> (S3.getObject (bucket info) o) Right (S3VersionID o vid) -> (S3.getObject (bucket info) o)
{ S3.goVersionId = Just vid } { S3.goVersionId = Just vid }
retrieveHelper' :: S3Handle -> FilePath -> MeterUpdate -> Maybe IncrementalVerifier -> S3.GetObject -> Annex () retrieveHelper' :: S3Handle -> OsPath -> MeterUpdate -> Maybe IncrementalVerifier -> S3.GetObject -> Annex ()
retrieveHelper' h f p iv req = liftIO $ runResourceT $ do retrieveHelper' h f p iv req = liftIO $ runResourceT $ do
S3.GetObjectResponse { S3.gorResponse = rsp } <- sendS3Handle h req S3.GetObjectResponse { S3.gorResponse = rsp } <- sendS3Handle h req
Url.sinkResponseFile p iv zeroBytesProcessed f WriteMode rsp Url.sinkResponseFile p iv zeroBytesProcessed f WriteMode rsp
@ -495,10 +496,10 @@ checkKeyHelper' info h o limit = liftIO $ runResourceT $ do
where where
req = limit $ S3.headObject (bucket info) o req = limit $ S3.headObject (bucket info) o
storeExportS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Maybe Magic -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex () storeExportS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Maybe Magic -> OsPath -> Key -> ExportLocation -> MeterUpdate -> Annex ()
storeExportS3 hv r rs info magic f k loc p = void $ storeExportS3' hv r rs info magic f k loc p storeExportS3 hv r rs info magic f k loc p = void $ storeExportS3' hv r rs info magic f k loc p
storeExportS3' :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Maybe Magic -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex (Maybe S3Etag, Maybe S3VersionID) storeExportS3' :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Maybe Magic -> OsPath -> Key -> ExportLocation -> MeterUpdate -> Annex (Maybe S3Etag, Maybe S3VersionID)
storeExportS3' hv r rs info magic f k loc p = withS3Handle hv $ \case storeExportS3' hv r rs info magic f k loc p = withS3Handle hv $ \case
Right h -> go h Right h -> go h
Left pr -> giveupS3HandleProblem pr (uuid r) Left pr -> giveupS3HandleProblem pr (uuid r)
@ -509,7 +510,7 @@ storeExportS3' hv r rs info magic f k loc p = withS3Handle hv $ \case
setS3VersionID info rs k mvid setS3VersionID info rs k mvid
return (metag, mvid) return (metag, mvid)
retrieveExportS3 :: S3HandleVar -> Remote -> S3Info -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Verification retrieveExportS3 :: S3HandleVar -> Remote -> S3Info -> Key -> ExportLocation -> OsPath -> MeterUpdate -> Annex Verification
retrieveExportS3 hv r info k loc f p = verifyKeyContentIncrementally AlwaysVerify k $ \iv -> retrieveExportS3 hv r info k loc f p = verifyKeyContentIncrementally AlwaysVerify k $ \iv ->
withS3Handle hv $ \case withS3Handle hv $ \case
Right h -> retrieveHelper info h (Left (T.pack exportloc)) f p iv Right h -> retrieveHelper info h (Left (T.pack exportloc)) f p iv
@ -700,7 +701,7 @@ mkImportableContentsVersioned = build . groupfiles
| otherwise = | otherwise =
i : removemostrecent mtime rest i : removemostrecent mtime rest
retrieveExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> ExportLocation -> [ContentIdentifier] -> FilePath -> Either Key (Annex Key) -> MeterUpdate -> Annex (Key, Verification) retrieveExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> ExportLocation -> [ContentIdentifier] -> OsPath -> Either Key (Annex Key) -> MeterUpdate -> Annex (Key, Verification)
retrieveExportWithContentIdentifierS3 hv r rs info loc (cid:_) dest gk p = retrieveExportWithContentIdentifierS3 hv r rs info loc (cid:_) dest gk p =
case gk of case gk of
Right _mkkey -> do Right _mkkey -> do
@ -744,7 +745,7 @@ rewritePreconditionException a = catchJust (Url.matchStatusCodeException want) a
-- --
-- When the bucket is not versioned, data loss can result. -- When the bucket is not versioned, data loss can result.
-- This is why that configuration requires --force to enable. -- This is why that configuration requires --force to enable.
storeExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Maybe Magic -> FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex ContentIdentifier storeExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Maybe Magic -> OsPath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex ContentIdentifier
storeExportWithContentIdentifierS3 hv r rs info magic src k loc _overwritablecids p storeExportWithContentIdentifierS3 hv r rs info magic src k loc _overwritablecids p
| versioning info = go | versioning info = go
| otherwise = go | otherwise = go
@ -835,7 +836,7 @@ writeUUIDFile c u info h = unless (exportTree c || importTree c) $ do
giveup "Cannot reuse this bucket." giveup "Cannot reuse this bucket."
_ -> void $ liftIO $ runResourceT $ sendS3Handle h mkobject _ -> void $ liftIO $ runResourceT $ sendS3Handle h mkobject
where where
file = T.pack $ uuidFile c file = T.pack $ fromOsPath $ uuidFile c
uuidb = L.fromChunks [T.encodeUtf8 $ T.pack $ fromUUID u] uuidb = L.fromChunks [T.encodeUtf8 $ T.pack $ fromUUID u]
mkobject = putObject info file (RequestBodyLBS uuidb) mkobject = putObject info file (RequestBodyLBS uuidb)
@ -858,11 +859,11 @@ checkUUIDFile c u info h
check (S3.GetObjectMemoryResponse _meta rsp) = check (S3.GetObjectMemoryResponse _meta rsp) =
responseStatus rsp == ok200 && responseBody rsp == uuidb responseStatus rsp == ok200 && responseBody rsp == uuidb
file = T.pack $ uuidFile c file = T.pack $ fromOsPath $ uuidFile c
uuidb = L.fromChunks [T.encodeUtf8 $ T.pack $ fromUUID u] uuidb = L.fromChunks [T.encodeUtf8 $ T.pack $ fromUUID u]
uuidFile :: ParsedRemoteConfig -> FilePath uuidFile :: ParsedRemoteConfig -> OsPath
uuidFile c = getFilePrefix c ++ "annex-uuid" uuidFile c = toOsPath (getFilePrefix c) <> literalOsPath "annex-uuid"
tryS3 :: ResourceT IO a -> ResourceT IO (Either S3.S3Error a) tryS3 :: ResourceT IO a -> ResourceT IO (Either S3.S3Error a)
tryS3 a = (Right <$> a) `catch` (pure . Left) tryS3 a = (Right <$> a) `catch` (pure . Left)
@ -1090,16 +1091,16 @@ getBucketObject c = munge . serializeKey
getBucketExportLocation :: ParsedRemoteConfig -> ExportLocation -> BucketObject getBucketExportLocation :: ParsedRemoteConfig -> ExportLocation -> BucketObject
getBucketExportLocation c loc = getBucketExportLocation c loc =
getFilePrefix c ++ fromRawFilePath (fromExportLocation loc) getFilePrefix c ++ fromOsPath (fromExportLocation loc)
getBucketImportLocation :: ParsedRemoteConfig -> BucketObject -> Maybe ImportLocation getBucketImportLocation :: ParsedRemoteConfig -> BucketObject -> Maybe ImportLocation
getBucketImportLocation c obj getBucketImportLocation c obj
-- The uuidFile should not be imported. -- The uuidFile should not be imported.
| obj == uuidfile = Nothing | obj == fromOsPath uuidfile = Nothing
-- Only import files that are under the fileprefix, when -- Only import files that are under the fileprefix, when
-- one is configured. -- one is configured.
| prefix `isPrefixOf` obj = Just $ mkImportLocation $ | prefix `isPrefixOf` obj = Just $ mkImportLocation $
toRawFilePath $ drop prefixlen obj toOsPath $ drop prefixlen obj
| otherwise = Nothing | otherwise = Nothing
where where
prefix = getFilePrefix c prefix = getFilePrefix c

View file

@ -49,7 +49,7 @@ import Utility.ThreadScheduler
{- The TMVar is left empty until tahoe has been verified to be running. -} {- The TMVar is left empty until tahoe has been verified to be running. -}
data TahoeHandle = TahoeHandle TahoeConfigDir (TMVar ()) data TahoeHandle = TahoeHandle TahoeConfigDir (TMVar ())
type TahoeConfigDir = FilePath type TahoeConfigDir = OsPath
type SharedConvergenceSecret = String type SharedConvergenceSecret = String
type IntroducerFurl = String type IntroducerFurl = String
type Capability = String type Capability = String
@ -81,7 +81,9 @@ gen r u rc gc rs = do
c <- parsedRemoteConfig remote rc c <- parsedRemoteConfig remote rc
cst <- remoteCost gc c expensiveRemoteCost cst <- remoteCost gc c expensiveRemoteCost
hdl <- liftIO $ TahoeHandle hdl <- liftIO $ TahoeHandle
<$> maybe (defaultTahoeConfigDir u) return (remoteAnnexTahoe gc) <$> maybe (defaultTahoeConfigDir u)
(return . toOsPath)
(remoteAnnexTahoe gc)
<*> newEmptyTMVarIO <*> newEmptyTMVarIO
return $ Just $ Remote return $ Just $ Remote
{ uuid = u { uuid = u
@ -136,18 +138,18 @@ tahoeSetup _ mu _ c _ = do
, (scsField, Proposed scs) , (scsField, Proposed scs)
] ]
else c else c
gitConfigSpecialRemote u c' [("tahoe", configdir)] gitConfigSpecialRemote u c' [("tahoe", fromOsPath configdir)]
return (c', u) return (c', u)
where where
missingfurl = giveup "Set TAHOE_FURL to the introducer furl to use." missingfurl = giveup "Set TAHOE_FURL to the introducer furl to use."
store :: RemoteStateHandle -> TahoeHandle -> Key -> AssociatedFile -> Maybe FilePath -> MeterUpdate -> Annex () store :: RemoteStateHandle -> TahoeHandle -> Key -> AssociatedFile -> Maybe OsPath -> MeterUpdate -> Annex ()
store rs hdl k _af o _p = sendAnnex k o noop $ \src _sz -> store rs hdl k _af o _p = sendAnnex k o noop $ \src _sz ->
parsePut <$> liftIO (readTahoe hdl "put" [File src]) >>= maybe parsePut <$> liftIO (readTahoe hdl "put" [File (fromOsPath src)]) >>= maybe
(giveup "tahoe failed to store content") (giveup "tahoe failed to store content")
(\cap -> storeCapability rs k cap) (\cap -> storeCapability rs k cap)
retrieve :: RemoteStateHandle -> TahoeHandle -> Key -> AssociatedFile -> FilePath -> MeterUpdate -> VerifyConfig -> Annex Verification retrieve :: RemoteStateHandle -> TahoeHandle -> Key -> AssociatedFile -> OsPath -> MeterUpdate -> VerifyConfig -> Annex Verification
retrieve rs hdl k _f d _p _ = do retrieve rs hdl k _f d _p _ = do
go =<< getCapability rs k go =<< getCapability rs k
-- Tahoe verifies the content it retrieves using cryptographically -- Tahoe verifies the content it retrieves using cryptographically
@ -155,7 +157,7 @@ retrieve rs hdl k _f d _p _ = do
return Verified return Verified
where where
go Nothing = giveup "tahoe capability is not known" go Nothing = giveup "tahoe capability is not known"
go (Just cap) = unlessM (liftIO $ requestTahoe hdl "get" [Param cap, File d]) $ go (Just cap) = unlessM (liftIO $ requestTahoe hdl "get" [Param cap, File (fromOsPath d)]) $
giveup "tahoe failed to reteieve content" giveup "tahoe failed to reteieve content"
remove :: Maybe SafeDropProof -> Key -> Annex () remove :: Maybe SafeDropProof -> Key -> Annex ()
@ -185,7 +187,7 @@ checkKey rs hdl k = go =<< getCapability rs k
defaultTahoeConfigDir :: UUID -> IO TahoeConfigDir defaultTahoeConfigDir :: UUID -> IO TahoeConfigDir
defaultTahoeConfigDir u = do defaultTahoeConfigDir u = do
h <- myHomeDir h <- myHomeDir
return $ h </> ".tahoe-git-annex" </> fromUUID u return $ toOsPath h </> literalOsPath ".tahoe-git-annex" </> fromUUID u
tahoeConfigure :: TahoeConfigDir -> IntroducerFurl -> Maybe SharedConvergenceSecret -> IO SharedConvergenceSecret tahoeConfigure :: TahoeConfigDir -> IntroducerFurl -> Maybe SharedConvergenceSecret -> IO SharedConvergenceSecret
tahoeConfigure configdir furl mscs = do tahoeConfigure configdir furl mscs = do
@ -197,8 +199,7 @@ tahoeConfigure configdir furl mscs = do
createClient :: TahoeConfigDir -> IntroducerFurl -> IO Bool createClient :: TahoeConfigDir -> IntroducerFurl -> IO Bool
createClient configdir furl = do createClient configdir furl = do
createDirectoryIfMissing True $ createDirectoryIfMissing True $ parentDir configdir
fromRawFilePath $ parentDir $ toRawFilePath configdir
boolTahoe configdir "create-client" boolTahoe configdir "create-client"
[ Param "--nickname", Param "git-annex" [ Param "--nickname", Param "git-annex"
, Param "--introducer", Param furl , Param "--introducer", Param furl
@ -206,7 +207,8 @@ createClient configdir furl = do
writeSharedConvergenceSecret :: TahoeConfigDir -> SharedConvergenceSecret -> IO () writeSharedConvergenceSecret :: TahoeConfigDir -> SharedConvergenceSecret -> IO ()
writeSharedConvergenceSecret configdir scs = writeSharedConvergenceSecret configdir scs =
writeFile (convergenceFile configdir) (unlines [scs]) writeFile (fromOsPath (convergenceFile configdir))
(unlines [scs])
{- The tahoe daemon writes the convergenceFile shortly after it starts {- The tahoe daemon writes the convergenceFile shortly after it starts
- (it does not need to connect to the network). So, try repeatedly to read - (it does not need to connect to the network). So, try repeatedly to read
@ -215,7 +217,7 @@ writeSharedConvergenceSecret configdir scs =
getSharedConvergenceSecret :: TahoeConfigDir -> IO SharedConvergenceSecret getSharedConvergenceSecret :: TahoeConfigDir -> IO SharedConvergenceSecret
getSharedConvergenceSecret configdir = go (60 :: Int) getSharedConvergenceSecret configdir = go (60 :: Int)
where where
f = convergenceFile configdir f = fromOsPath $ convergenceFile configdir
go n go n
| n == 0 = giveup $ "tahoe did not write " ++ f ++ " after 1 minute. Perhaps the daemon failed to start?" | n == 0 = giveup $ "tahoe did not write " ++ f ++ " after 1 minute. Perhaps the daemon failed to start?"
| otherwise = do | otherwise = do
@ -227,8 +229,9 @@ getSharedConvergenceSecret configdir = go (60 :: Int)
threadDelaySeconds (Seconds 1) threadDelaySeconds (Seconds 1)
go (n - 1) go (n - 1)
convergenceFile :: TahoeConfigDir -> FilePath convergenceFile :: TahoeConfigDir -> OsPath
convergenceFile configdir = configdir </> "private" </> "convergence" convergenceFile configdir =
configdir </> literalOsPath "private" </> literalOsPath "convergence"
startTahoeDaemon :: TahoeConfigDir -> IO () startTahoeDaemon :: TahoeConfigDir -> IO ()
startTahoeDaemon configdir = void $ boolTahoe configdir "start" [] startTahoeDaemon configdir = void $ boolTahoe configdir "start" []
@ -267,7 +270,7 @@ readTahoe hdl command params = withTahoeConfigDir hdl $ \configdir ->
tahoeParams :: TahoeConfigDir -> String -> [CommandParam] -> [CommandParam] tahoeParams :: TahoeConfigDir -> String -> [CommandParam] -> [CommandParam]
tahoeParams configdir command params = tahoeParams configdir command params =
Param "-d" : File configdir : Param command : params Param "-d" : File (fromOsPath configdir) : Param command : params
storeCapability :: RemoteStateHandle -> Key -> Capability -> Annex () storeCapability :: RemoteStateHandle -> Key -> Capability -> Annex ()
storeCapability rs k cap = setRemoteState rs k cap storeCapability rs k cap = setRemoteState rs k cap

View file

@ -176,11 +176,11 @@ retrieve hv cc = fileRetriever' $ \d k p iv ->
LegacyChunks _ -> do LegacyChunks _ -> do
-- Not doing incremental verification for chunks. -- Not doing incremental verification for chunks.
liftIO $ maybe noop unableIncrementalVerifier iv liftIO $ maybe noop unableIncrementalVerifier iv
retrieveLegacyChunked (fromRawFilePath d) k p dav retrieveLegacyChunked (fromOsPath d) k p dav
_ -> liftIO $ goDAV dav $ _ -> liftIO $ goDAV dav $
retrieveHelper (keyLocation k) (fromRawFilePath d) p iv retrieveHelper (keyLocation k) d p iv
retrieveHelper :: DavLocation -> FilePath -> MeterUpdate -> Maybe IncrementalVerifier -> DAVT IO () retrieveHelper :: DavLocation -> OsPath -> MeterUpdate -> Maybe IncrementalVerifier -> DAVT IO ()
retrieveHelper loc d p iv = do retrieveHelper loc d p iv = do
debugDav $ "retrieve " ++ loc debugDav $ "retrieve " ++ loc
inLocation loc $ inLocation loc $
@ -213,14 +213,14 @@ checkKey hv chunkconfig k = withDavHandle hv $ \dav ->
existsDAV (keyLocation k) existsDAV (keyLocation k)
either giveup return v either giveup return v
storeExportDav :: DavHandleVar -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex () storeExportDav :: DavHandleVar -> OsPath -> Key -> ExportLocation -> MeterUpdate -> Annex ()
storeExportDav hdl f k loc p = case exportLocation loc of storeExportDav hdl f k loc p = case exportLocation loc of
Right dest -> withDavHandle hdl $ \h -> runExport h $ \dav -> do Right dest -> withDavHandle hdl $ \h -> runExport h $ \dav -> do
reqbody <- liftIO $ httpBodyStorer f p reqbody <- liftIO $ httpBodyStorer f p
storeHelper dav (exportTmpLocation loc k) dest reqbody storeHelper dav (exportTmpLocation loc k) dest reqbody
Left err -> giveup err Left err -> giveup err
retrieveExportDav :: DavHandleVar -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Verification retrieveExportDav :: DavHandleVar -> Key -> ExportLocation -> OsPath -> MeterUpdate -> Annex Verification
retrieveExportDav hdl k loc d p = case exportLocation loc of retrieveExportDav hdl k loc d p = case exportLocation loc of
Right src -> verifyKeyContentIncrementally AlwaysVerify k $ \iv -> Right src -> verifyKeyContentIncrementally AlwaysVerify k $ \iv ->
withDavHandle hdl $ \h -> runExport h $ \_dav -> withDavHandle hdl $ \h -> runExport h $ \_dav ->
@ -247,7 +247,7 @@ removeExportDav hdl _k loc = case exportLocation loc of
removeExportDirectoryDav :: DavHandleVar -> ExportDirectory -> Annex () removeExportDirectoryDav :: DavHandleVar -> ExportDirectory -> Annex ()
removeExportDirectoryDav hdl dir = withDavHandle hdl $ \h -> runExport h $ \_dav -> do removeExportDirectoryDav hdl dir = withDavHandle hdl $ \h -> runExport h $ \_dav -> do
let d = fromRawFilePath $ fromExportDirectory dir let d = fromOsPath $ fromExportDirectory dir
debugDav $ "delContent " ++ d debugDav $ "delContent " ++ d
inLocation d delContentM inLocation d delContentM
@ -481,7 +481,7 @@ storeLegacyChunked annexrunner chunksize k dav b =
finalizer tmp' dest' = goDAV dav $ finalizer tmp' dest' = goDAV dav $
finalizeStore dav tmp' (fromJust $ locationParent dest') finalizeStore dav tmp' (fromJust $ locationParent dest')
tmp = addTrailingPathSeparator $ keyTmpLocation k tmp = fromOsPath $ addTrailingPathSeparator $ toOsPath $ keyTmpLocation k
dest = keyLocation k dest = keyLocation k
retrieveLegacyChunked :: FilePath -> Key -> MeterUpdate -> DavHandle -> Annex () retrieveLegacyChunked :: FilePath -> Key -> MeterUpdate -> DavHandle -> Annex ()

View file

@ -31,6 +31,7 @@ module Types.Remote
import Data.Ord import Data.Ord
import Common
import qualified Git import qualified Git
import Types.Key import Types.Key
import Types.UUID import Types.UUID
@ -47,7 +48,6 @@ import Utility.Hash (IncrementalVerifier)
import Config.Cost import Config.Cost
import Utility.Metered import Utility.Metered
import Git.Types (RemoteName) import Git.Types (RemoteName)
import Utility.SafeCommand
import Utility.Url import Utility.Url
import Utility.DataUnits import Utility.DataUnits
@ -92,18 +92,18 @@ data RemoteA a = Remote
-- The key should not appear to be present on the remote until -- The key should not appear to be present on the remote until
-- all of its contents have been transferred. -- all of its contents have been transferred.
-- Throws exception on failure. -- Throws exception on failure.
, storeKey :: Key -> AssociatedFile -> Maybe FilePath -> MeterUpdate -> a () , storeKey :: Key -> AssociatedFile -> Maybe OsPath -> MeterUpdate -> a ()
-- Retrieves a key's contents to a file. -- Retrieves a key's contents to a file.
-- (The MeterUpdate does not need to be used if it writes -- (The MeterUpdate does not need to be used if it writes
-- sequentially to the file.) -- sequentially to the file.)
-- Throws exception on failure. -- Throws exception on failure.
, retrieveKeyFile :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> VerifyConfigA a -> a Verification , retrieveKeyFile :: Key -> AssociatedFile -> OsPath -> MeterUpdate -> VerifyConfigA a -> a Verification
{- Will retrieveKeyFile write to the file in order? -} {- Will retrieveKeyFile write to the file in order? -}
, retrieveKeyFileInOrder :: a Bool , retrieveKeyFileInOrder :: a Bool
-- Retrieves a key's contents to a tmp file, if it can be done cheaply. -- Retrieves a key's contents to a tmp file, if it can be done cheaply.
-- It's ok to create a symlink or hardlink. -- It's ok to create a symlink or hardlink.
-- Throws exception on failure. -- Throws exception on failure.
, retrieveKeyFileCheap :: Maybe (Key -> AssociatedFile -> FilePath -> a ()) , retrieveKeyFileCheap :: Maybe (Key -> AssociatedFile -> OsPath -> a ())
-- Security policy for reteiving keys from this remote. -- Security policy for reteiving keys from this remote.
, retrievalSecurityPolicy :: RetrievalSecurityPolicy , retrievalSecurityPolicy :: RetrievalSecurityPolicy
-- Removes a key's contents (succeeds even the contents are not present) -- Removes a key's contents (succeeds even the contents are not present)
@ -147,7 +147,7 @@ data RemoteA a = Remote
-- a Remote's configuration from git -- a Remote's configuration from git
, gitconfig :: RemoteGitConfig , gitconfig :: RemoteGitConfig
-- a Remote can be associated with a specific local filesystem path -- a Remote can be associated with a specific local filesystem path
, localpath :: Maybe FilePath , localpath :: Maybe OsPath
-- a Remote can be known to be readonly -- a Remote can be known to be readonly
, readonly :: Bool , readonly :: Bool
-- a Remote can allow writes but not have a way to delete content -- a Remote can allow writes but not have a way to delete content
@ -270,12 +270,12 @@ data ExportActions a = ExportActions
-- The exported file should not appear to be present on the remote -- The exported file should not appear to be present on the remote
-- until all of its contents have been transferred. -- until all of its contents have been transferred.
-- Throws exception on failure. -- Throws exception on failure.
{ storeExport :: FilePath -> Key -> ExportLocation -> MeterUpdate -> a () { storeExport :: OsPath -> Key -> ExportLocation -> MeterUpdate -> a ()
-- Retrieves exported content to a file. -- Retrieves exported content to a file.
-- (The MeterUpdate does not need to be used if it writes -- (The MeterUpdate does not need to be used if it writes
-- sequentially to the file.) -- sequentially to the file.)
-- Throws exception on failure. -- Throws exception on failure.
, retrieveExport :: Key -> ExportLocation -> FilePath -> MeterUpdate -> a Verification , retrieveExport :: Key -> ExportLocation -> OsPath -> MeterUpdate -> a Verification
-- Removes an exported file (succeeds if the contents are not present) -- Removes an exported file (succeeds if the contents are not present)
-- Can throw exception if unable to access remote, or if remote -- Can throw exception if unable to access remote, or if remote
-- refuses to remove the content. -- refuses to remove the content.
@ -351,7 +351,7 @@ data ImportActions a = ImportActions
:: ExportLocation :: ExportLocation
-> [ContentIdentifier] -> [ContentIdentifier]
-- file to write content to -- file to write content to
-> FilePath -> OsPath
-- Either the key, or when it's not yet known, a callback -- Either the key, or when it's not yet known, a callback
-- that generates a key from the downloaded content. -- that generates a key from the downloaded content.
-> Either Key (a Key) -> Either Key (a Key)
@ -376,7 +376,7 @@ data ImportActions a = ImportActions
-- --
-- Throws exception on failure. -- Throws exception on failure.
, storeExportWithContentIdentifier , storeExportWithContentIdentifier
:: FilePath :: OsPath
-> Key -> Key
-> ExportLocation -> ExportLocation
-- old content that it's safe to overwrite -- old content that it's safe to overwrite

View file

@ -60,7 +60,7 @@ needsUpgrade v
g <- Annex.gitRepo g <- Annex.gitRepo
p <- liftIO $ absPath $ Git.repoPath g p <- liftIO $ absPath $ Git.repoPath g
return $ Just $ unwords return $ Just $ unwords
[ "Repository", fromRawFilePath p [ "Repository", fromOsPath p
, "is at" , "is at"
, if v `elem` supportedVersions , if v `elem` supportedVersions
then "supported" then "supported"
@ -117,7 +117,7 @@ upgrade automatic destversion = go =<< getVersion
-- This avoids complicating the upgrade code by needing to handle -- This avoids complicating the upgrade code by needing to handle
-- upgrading a git repo other than the current repo. -- upgrading a git repo other than the current repo.
upgraderemote = do upgraderemote = do
rp <- fromRawFilePath <$> fromRepo Git.repoPath rp <- fromOsPath <$> fromRepo Git.repoPath
ok <- gitAnnexChildProcess "upgrade" ok <- gitAnnexChildProcess "upgrade"
[ Param "--quiet" [ Param "--quiet"
, Param "--autoonly" , Param "--autoonly"

View file

@ -22,11 +22,11 @@ upgrade = do
showAction "v0 to v1" showAction "v0 to v1"
-- do the reorganisation of the key files -- do the reorganisation of the key files
olddir <- fromRawFilePath <$> fromRepo gitAnnexDir olddir <- fromRepo gitAnnexDir
keys <- getKeysPresent0 olddir keys <- getKeysPresent0 olddir
forM_ keys $ \k -> forM_ keys $ \k ->
moveAnnex k (AssociatedFile Nothing) moveAnnex k (AssociatedFile Nothing)
(toRawFilePath $ olddir </> keyFile0 k) (olddir </> toOsPath (keyFile0 k))
-- update the symlinks to the key files -- update the symlinks to the key files
-- No longer needed here; V1.upgrade does the same thing -- No longer needed here; V1.upgrade does the same thing
@ -39,20 +39,18 @@ keyFile0 :: Key -> FilePath
keyFile0 = Upgrade.V1.keyFile1 keyFile0 = Upgrade.V1.keyFile1
fileKey0 :: FilePath -> Key fileKey0 :: FilePath -> Key
fileKey0 = Upgrade.V1.fileKey1 fileKey0 = Upgrade.V1.fileKey1
lookupKey0 :: FilePath -> Annex (Maybe (Key, Backend))
lookupKey0 = Upgrade.V1.lookupKey1
getKeysPresent0 :: FilePath -> Annex [Key] getKeysPresent0 :: OsPath -> Annex [Key]
getKeysPresent0 dir = ifM (liftIO $ doesDirectoryExist dir) getKeysPresent0 dir = ifM (liftIO $ doesDirectoryExist dir)
( liftIO $ map fileKey0 ( liftIO $ map (fileKey0 . fromOsPath)
<$> (filterM present =<< getDirectoryContents dir) <$> (filterM present =<< getDirectoryContents dir)
, return [] , return []
) )
where where
present d = do present d = do
result <- tryIO $ result <- tryIO $
R.getFileStatus $ toRawFilePath $ R.getFileStatus $ fromOsPath $
dir ++ "/" ++ takeFileName d dir <> literalOsPath "/" <> takeFileName d
case result of case result of
Right s -> return $ isRegularFile s Right s -> return $ isRegularFile s
Left _ -> return False Left _ -> return False