From b28433072c93e966d17770357177dd4d879b2913 Mon Sep 17 00:00:00 2001 From: Joey Hess Date: Wed, 5 Feb 2025 12:14:56 -0400 Subject: [PATCH] more OsPath conversion (475/749) Sponsored-by: Nicholas Golder-Manning --- Annex/AutoMerge.hs | 75 +++++++++-------- Git/UpdateIndex.hs | 1 + Remote/Directory.hs | 194 +++++++++++++++++++++----------------------- Remote/External.hs | 31 +++---- Remote/Web.hs | 6 +- 5 files changed, 153 insertions(+), 154 deletions(-) diff --git a/Annex/AutoMerge.hs b/Annex/AutoMerge.hs index 0c0c203688..b097f03dff 100644 --- a/Annex/AutoMerge.hs +++ b/Annex/AutoMerge.hs @@ -133,7 +133,7 @@ autoMergeFrom' branch currbranch mergeconfig commitmode willresolvemerge toresol resolveMerge :: Maybe Git.Ref -> Git.Ref -> Bool -> Annex Bool resolveMerge us them inoverlay = do top <- if inoverlay - then pure "." + then pure (literalOsPath ".") else fromRepo Git.repoPath (fs, cleanup) <- inRepo (LsFiles.unmerged [top]) srcmap <- if inoverlay @@ -150,7 +150,7 @@ resolveMerge us them inoverlay = do unless (null deleted) $ Annex.Queue.addCommand [] "rm" [Param "--quiet", Param "-f", Param "--"] - (map fromRawFilePath deleted) + (map fromOsPath deleted) void $ liftIO cleanup2 when merged $ do @@ -167,7 +167,7 @@ resolveMerge us them inoverlay = do , LsFiles.unmergedSiblingFile u ] -resolveMerge' :: InodeMap -> Maybe Git.Ref -> Git.Ref -> Bool -> LsFiles.Unmerged -> Annex ([Key], Maybe FilePath) +resolveMerge' :: InodeMap -> Maybe Git.Ref -> Git.Ref -> Bool -> LsFiles.Unmerged -> Annex ([Key], Maybe OsPath) resolveMerge' _ Nothing _ _ _ = return ([], Nothing) resolveMerge' unstagedmap (Just us) them inoverlay u = do kus <- getkey LsFiles.valUs @@ -182,7 +182,7 @@ resolveMerge' unstagedmap (Just us) them inoverlay u = do -- files, so delete here. unless inoverlay $ unless (islocked LsFiles.valUs) $ - liftIO $ removeWhenExistsWith R.removeLink (toRawFilePath file) + liftIO $ removeWhenExistsWith removeFile file | otherwise -> resolveby [keyUs, keyThem] $ -- Only resolve using symlink when both -- were locked, otherwise use unlocked @@ -204,8 +204,8 @@ resolveMerge' unstagedmap (Just us) them inoverlay u = do -- Neither side is annexed file; cannot resolve. (Nothing, Nothing) -> return ([], Nothing) where - file = fromRawFilePath $ LsFiles.unmergedFile u - sibfile = fromRawFilePath <$> LsFiles.unmergedSiblingFile u + file = LsFiles.unmergedFile u + sibfile = LsFiles.unmergedSiblingFile u getkey select = case select (LsFiles.unmergedSha u) of @@ -230,16 +230,15 @@ resolveMerge' unstagedmap (Just us) them inoverlay u = do dest = variantFile file key destmode = fromTreeItemType <$> select (LsFiles.unmergedTreeItemType u) - stagefile :: FilePath -> Annex FilePath + stagefile :: OsPath -> Annex OsPath stagefile f - | inoverlay = ( f) . fromRawFilePath <$> fromRepo Git.repoPath + | inoverlay = ( f) <$> fromRepo Git.repoPath | otherwise = pure f makesymlink key dest = do - let rdest = toRawFilePath dest - l <- calcRepo $ gitAnnexLink rdest key - unless inoverlay $ replacewithsymlink rdest l - dest' <- toRawFilePath <$> stagefile dest + l <- fromOsPath <$> calcRepo (gitAnnexLink dest key) + unless inoverlay $ replacewithsymlink dest l + dest' <- stagefile dest stageSymlink dest' =<< hashSymlink l replacewithsymlink dest link = replaceWorkTreeFile dest $ @@ -248,27 +247,27 @@ resolveMerge' unstagedmap (Just us) them inoverlay u = do makepointer key dest destmode = do unless inoverlay $ unlessM (reuseOldFile unstagedmap key file dest) $ - linkFromAnnex key (toRawFilePath dest) destmode >>= \case + linkFromAnnex key dest destmode >>= \case LinkAnnexFailed -> liftIO $ - writePointerFile (toRawFilePath dest) key destmode + writePointerFile dest key destmode _ -> noop - dest' <- toRawFilePath <$> stagefile dest + dest' <- stagefile dest stagePointerFile dest' destmode =<< hashPointerFile key unless inoverlay $ Database.Keys.addAssociatedFile key - =<< inRepo (toTopFilePath (toRawFilePath dest)) + =<< inRepo (toTopFilePath dest) {- Stage a graft of a directory or file from a branch - and update the work tree. -} graftin b item selectwant selectwant' selectunwant = do Annex.Queue.addUpdateIndex - =<< fromRepo (UpdateIndex.lsSubTree b item) - + =<< fromRepo (UpdateIndex.lsSubTree b (fromOsPath item)) + let replacefile isexecutable = case selectwant' (LsFiles.unmergedSha u) of Nothing -> noop - Just sha -> replaceWorkTreeFile (toRawFilePath item) $ \tmp -> do + Just sha -> replaceWorkTreeFile item $ \tmp -> do c <- catObject sha - liftIO $ F.writeFile (toOsPath tmp) c + liftIO $ F.writeFile tmp c when isexecutable $ liftIO $ void $ tryIO $ modifyFileMode tmp $ @@ -281,7 +280,7 @@ resolveMerge' unstagedmap (Just us) them inoverlay u = do Nothing -> noop Just sha -> do link <- catSymLinkTarget sha - replacewithsymlink (toRawFilePath item) link + replacewithsymlink item (fromOsPath link) (Just TreeFile, Just TreeSymlink) -> replacefile False (Just TreeExecutable, Just TreeSymlink) -> replacefile True _ -> ifM (liftIO $ doesDirectoryExist item) @@ -305,9 +304,9 @@ resolveMerge' unstagedmap (Just us) them inoverlay u = do , Param "--cached" , Param "--" ] - (catMaybes [Just file, sibfile]) + (map fromOsPath $ catMaybes [Just file, sibfile]) liftIO $ maybe noop - (removeWhenExistsWith R.removeLink . toRawFilePath) + (removeWhenExistsWith removeFile) sibfile void a return (ks, Just file) @@ -322,13 +321,13 @@ resolveMerge' unstagedmap (Just us) them inoverlay u = do - C) are pointers to or have the content of keys that were involved - in the merge. -} -cleanConflictCruft :: [Key] -> [FilePath] -> InodeMap -> Annex () +cleanConflictCruft :: [Key] -> [OsPath] -> InodeMap -> Annex () cleanConflictCruft resolvedks resolvedfs unstagedmap = do is <- S.fromList . map (inodeCacheToKey Strongly) . concat <$> mapM Database.Keys.getInodeCaches resolvedks forM_ (M.toList unstagedmap) $ \(i, f) -> whenM (matchesresolved is i f) $ - liftIO $ removeWhenExistsWith R.removeLink (toRawFilePath f) + liftIO $ removeWhenExistsWith removeFile f where fs = S.fromList resolvedfs ks = S.fromList resolvedks @@ -336,19 +335,24 @@ cleanConflictCruft resolvedks resolvedfs unstagedmap = do matchesresolved is i f | S.member f fs || S.member (conflictCruftBase f) fs = anyM id [ pure $ either (const False) (`S.member` is) i - , inks <$> isAnnexLink (toRawFilePath f) - , inks <$> liftIO (isPointerFile (toRawFilePath f)) + , inks <$> isAnnexLink f + , inks <$> liftIO (isPointerFile f) ] | otherwise = return False -conflictCruftBase :: FilePath -> FilePath -conflictCruftBase f = reverse $ drop 1 $ dropWhile (/= '~') $ reverse f +conflictCruftBase :: OsPath -> OsPath +conflictCruftBase = toOsPath + . reverse + . drop 1 + . dropWhile (/= '~') + . reverse + . fromOsPath {- When possible, reuse an existing file from the srcmap as the - content of a worktree file in the resolved merge. It must have the - same name as the origfile, or a name that git would use for conflict - cruft. And, its inode cache must be a known one for the key. -} -reuseOldFile :: InodeMap -> Key -> FilePath -> FilePath -> Annex Bool +reuseOldFile :: InodeMap -> Key -> OsPath -> OsPath -> Annex Bool reuseOldFile srcmap key origfile destfile = do is <- map (inodeCacheToKey Strongly) <$> Database.Keys.getInodeCaches key @@ -374,19 +378,18 @@ commitResolvedMerge commitmode = do , Param "git-annex automatic merge conflict fix" ] -type InodeMap = M.Map (Either FilePath InodeCacheKey) FilePath +type InodeMap = M.Map (Either OsPath InodeCacheKey) OsPath -inodeMap :: Annex ([RawFilePath], IO Bool) -> Annex InodeMap +inodeMap :: Annex ([OsPath], IO Bool) -> Annex InodeMap inodeMap getfiles = do (fs, cleanup) <- getfiles fsis <- forM fs $ \f -> do - s <- liftIO $ R.getSymbolicLinkStatus f - let f' = fromRawFilePath f + s <- liftIO $ R.getSymbolicLinkStatus (fromOsPath f) if isSymbolicLink s - then pure $ Just (Left f', f') + then pure $ Just (Left f, f) else withTSDelta (\d -> liftIO $ toInodeCache d f s) >>= return . \case - Just i -> Just (Right (inodeCacheToKey Strongly i), f') + Just i -> Just (Right (inodeCacheToKey Strongly i), f) Nothing -> Nothing void $ liftIO cleanup return $ M.fromList $ catMaybes fsis diff --git a/Git/UpdateIndex.hs b/Git/UpdateIndex.hs index c5f1d2f3e1..257fcd7763 100644 --- a/Git/UpdateIndex.hs +++ b/Git/UpdateIndex.hs @@ -81,6 +81,7 @@ lsTree (Ref x) repo streamer = do void $ cleanup where params = map Param ["ls-tree", "-z", "-r", "--full-tree", decodeBS x] + lsSubTree :: Ref -> FilePath -> Repo -> Streamer lsSubTree (Ref x) p repo streamer = do (s, cleanup) <- pipeNullSplit params repo diff --git a/Remote/Directory.hs b/Remote/Directory.hs index d2f03e0735..6acaf251f6 100644 --- a/Remote/Directory.hs +++ b/Remote/Directory.hs @@ -17,7 +17,6 @@ module Remote.Directory ( import qualified Data.Map as M import qualified Data.List.NonEmpty as NE -import qualified System.FilePath.ByteString as P import Data.Default import System.PosixCompat.Files (isRegularFile, deviceID) #ifndef mingw32_HOST_OS @@ -132,11 +131,11 @@ gen r u rc gc rs = do , config = c , getRepo = return r , gitconfig = gc - , localpath = Just dir' + , localpath = Just dir , readonly = False , appendonly = False , untrustworthy = False - , availability = checkPathAvailability True dir' + , availability = checkPathAvailability True dir , remotetype = remote , mkUnavailable = gen r u rc (gc { remoteAnnexDirectory = Just "/dev/null" }) rs @@ -146,8 +145,9 @@ gen r u rc gc rs = do , remoteStateHandle = rs } where - dir = toRawFilePath dir' - dir' = fromMaybe (giveup "missing directory") (remoteAnnexDirectory gc) + dir = toOsPath dir' + dir' = fromMaybe (giveup "missing directory") + (remoteAnnexDirectory gc) directorySetup :: SetupStage -> Maybe UUID -> Maybe CredPair -> RemoteConfig -> RemoteGitConfig -> Annex (RemoteConfig, UUID) directorySetup _ mu _ c gc = do @@ -155,43 +155,41 @@ directorySetup _ mu _ c gc = do -- verify configuration is sane let dir = maybe (giveup "Specify directory=") fromProposedAccepted $ M.lookup directoryField c - absdir <- liftIO $ fromRawFilePath <$> absPath (toRawFilePath dir) + absdir <- liftIO $ absPath (toOsPath dir) liftIO $ unlessM (doesDirectoryExist absdir) $ - giveup $ "Directory does not exist: " ++ absdir + giveup $ "Directory does not exist: " ++ fromOsPath absdir (c', _encsetup) <- encryptionSetup c gc -- The directory is stored in git config, not in this remote's -- persistent state, so it can vary between hosts. - gitConfigSpecialRemote u c' [("directory", absdir)] + gitConfigSpecialRemote u c' [("directory", fromOsPath absdir)] return (M.delete directoryField c', u) {- Locations to try to access a given Key in the directory. - We try more than one since we used to write to different hash - directories. -} -locations :: RawFilePath -> Key -> NE.NonEmpty RawFilePath -locations d k = NE.map (d P.) (keyPaths k) +locations :: OsPath -> Key -> NE.NonEmpty OsPath +locations d k = NE.map (d ) (keyPaths k) -locations' :: RawFilePath -> Key -> [RawFilePath] +locations' :: OsPath -> Key -> [OsPath] locations' d k = NE.toList (locations d k) {- Returns the location of a Key in the directory. If the key is - present, returns the location that is actually used, otherwise - returns the first, default location. -} -getLocation :: RawFilePath -> Key -> IO RawFilePath +getLocation :: OsPath -> Key -> IO OsPath getLocation d k = do let locs = locations d k - fromMaybe (NE.head locs) - <$> firstM (doesFileExist . fromRawFilePath) - (NE.toList locs) + fromMaybe (NE.head locs) <$> firstM doesFileExist (NE.toList locs) {- Directory where the file(s) for a key are stored. -} -storeDir :: RawFilePath -> Key -> RawFilePath -storeDir d k = P.addTrailingPathSeparator $ - d P. hashDirLower def k P. keyFile k +storeDir :: OsPath -> Key -> OsPath +storeDir d k = addTrailingPathSeparator $ + d hashDirLower def k keyFile k {- Check if there is enough free disk space in the remote's directory to - store the key. Note that the unencrypted key size is checked. -} -storeKeyM :: RawFilePath -> ChunkConfig -> CopyCoWTried -> Storer +storeKeyM :: OsPath -> ChunkConfig -> CopyCoWTried -> Storer storeKeyM d chunkconfig cow k c m = ifM (checkDiskSpaceDirectory d k) ( do @@ -203,16 +201,16 @@ storeKeyM d chunkconfig cow k c m = store = case chunkconfig of LegacyChunks chunksize -> let go _k b p = liftIO $ Legacy.store - (fromRawFilePath d) + (fromOsPath d) chunksize (finalizeStoreGeneric d) k b p - (fromRawFilePath tmpdir) - (fromRawFilePath destdir) + (fromOsPath tmpdir) + (fromOsPath destdir) in byteStorer go k c m NoChunks -> let go _k src p = liftIO $ do - void $ fileCopier cow src tmpf p Nothing + void $ fileCopier cow (fromOsPath src) (fromOsPath tmpf) p Nothing finalizeStoreGeneric d tmpdir destdir in fileStorer go k c m _ -> @@ -221,60 +219,58 @@ storeKeyM d chunkconfig cow k c m = finalizeStoreGeneric d tmpdir destdir in byteStorer go k c m - tmpdir = P.addTrailingPathSeparator $ d P. "tmp" P. kf - tmpf = fromRawFilePath tmpdir fromRawFilePath kf + tmpdir = addTrailingPathSeparator $ d literalOsPath "tmp" kf + tmpf = tmpdir kf kf = keyFile k destdir = storeDir d k -checkDiskSpaceDirectory :: RawFilePath -> Key -> Annex Bool +checkDiskSpaceDirectory :: OsPath -> Key -> Annex Bool checkDiskSpaceDirectory d k = do annexdir <- fromRepo gitAnnexObjectDir samefilesystem <- liftIO $ catchDefaultIO False $ (\a b -> deviceID a == deviceID b) - <$> R.getSymbolicLinkStatus d - <*> R.getSymbolicLinkStatus annexdir + <$> R.getSymbolicLinkStatus (fromOsPath d) + <*> R.getSymbolicLinkStatus (fromOsPath annexdir) checkDiskSpace Nothing (Just d) k 0 samefilesystem {- Passed a temp directory that contains the files that should be placed - in the dest directory, moves it into place. Anything already existing - in the dest directory will be deleted. File permissions will be locked - down. -} -finalizeStoreGeneric :: RawFilePath -> RawFilePath -> RawFilePath -> IO () +finalizeStoreGeneric :: OsPath -> OsPath -> OsPath -> IO () finalizeStoreGeneric d tmp dest = do removeDirGeneric False d dest createDirectoryUnder [d] (parentDir dest) - renameDirectory (fromRawFilePath tmp) dest' + renameDirectory tmp dest -- may fail on some filesystems void $ tryIO $ do mapM_ preventWrite =<< dirContents dest preventWrite dest - where - dest' = fromRawFilePath dest -retrieveKeyFileM :: RawFilePath -> ChunkConfig -> CopyCoWTried -> Retriever +retrieveKeyFileM :: OsPath -> ChunkConfig -> CopyCoWTried -> Retriever retrieveKeyFileM d (LegacyChunks _) _ = Legacy.retrieve locations' d retrieveKeyFileM d NoChunks cow = fileRetriever' $ \dest k p iv -> do - src <- liftIO $ fromRawFilePath <$> getLocation d k - void $ liftIO $ fileCopier cow src (fromRawFilePath dest) p iv + src <- liftIO $ getLocation d k + void $ liftIO $ fileCopier cow (fromOsPath src) (fromOsPath dest) p iv retrieveKeyFileM d _ _ = byteRetriever $ \k sink -> - sink =<< liftIO (F.readFile . toOsPath =<< getLocation d k) + sink =<< liftIO (F.readFile =<< getLocation d k) -retrieveKeyFileCheapM :: RawFilePath -> ChunkConfig -> Maybe (Key -> AssociatedFile -> FilePath -> Annex ()) +retrieveKeyFileCheapM :: OsPath -> ChunkConfig -> Maybe (Key -> AssociatedFile -> OsPath -> Annex ()) -- no cheap retrieval possible for chunks retrieveKeyFileCheapM _ (UnpaddedChunks _) = Nothing retrieveKeyFileCheapM _ (LegacyChunks _) = Nothing #ifndef mingw32_HOST_OS retrieveKeyFileCheapM d NoChunks = Just $ \k _af f -> liftIO $ do - file <- fromRawFilePath <$> (absPath =<< getLocation d k) + file <- absPath =<< getLocation d k ifM (doesFileExist file) - ( R.createSymbolicLink (toRawFilePath file) (toRawFilePath f) + ( R.createSymbolicLink (fromOsPath file) (fromOsPath f) , giveup "content file not present in remote" ) #else retrieveKeyFileCheapM _ _ = Nothing #endif -removeKeyM :: RawFilePath -> Remover +removeKeyM :: OsPath -> Remover removeKeyM d _proof k = liftIO $ removeDirGeneric True d (storeDir d k) {- Removes the directory, which must be located under the topdir. @@ -291,7 +287,7 @@ removeKeyM d _proof k = liftIO $ removeDirGeneric True d (storeDir d k) - can also be removed. Failure to remove such a directory is not treated - as an error. -} -removeDirGeneric :: Bool -> RawFilePath -> RawFilePath -> IO () +removeDirGeneric :: Bool -> OsPath -> OsPath -> IO () removeDirGeneric removeemptyparents topdir dir = do void $ tryIO $ allowWrite dir #ifdef mingw32_HOST_OS @@ -299,102 +295,100 @@ removeDirGeneric removeemptyparents topdir dir = do - before it can delete them. -} void $ tryIO $ mapM_ allowWrite =<< dirContents dir #endif - tryNonAsync (removeDirectoryRecursive dir') >>= \case + tryNonAsync (removeDirectoryRecursive dir) >>= \case Right () -> return () Left e -> - unlessM (doesDirectoryExist topdir' <&&> (not <$> doesDirectoryExist dir')) $ + unlessM (doesDirectoryExist topdir <&&> (not <$> doesDirectoryExist dir)) $ throwM e when removeemptyparents $ do - subdir <- relPathDirToFile topdir (P.takeDirectory dir) - goparents (Just (P.takeDirectory subdir)) (Right ()) + subdir <- relPathDirToFile topdir (takeDirectory dir) + goparents (Just (takeDirectory subdir)) (Right ()) where goparents _ (Left _e) = return () goparents Nothing _ = return () goparents (Just subdir) _ = do - let d = topdir' fromRawFilePath subdir + let d = topdir subdir goparents (upFrom subdir) =<< tryIO (removeDirectory d) - dir' = fromRawFilePath dir - topdir' = fromRawFilePath topdir -checkPresentM :: RawFilePath -> ChunkConfig -> CheckPresent +checkPresentM :: OsPath -> ChunkConfig -> CheckPresent checkPresentM d (LegacyChunks _) k = Legacy.checkKey d locations' k checkPresentM d _ k = checkPresentGeneric d (locations' d k) -checkPresentGeneric :: RawFilePath -> [RawFilePath] -> Annex Bool +checkPresentGeneric :: OsPath -> [OsPath] -> Annex Bool checkPresentGeneric d ps = checkPresentGeneric' d $ - liftIO $ anyM (doesFileExist . fromRawFilePath) ps + liftIO $ anyM doesFileExist ps -checkPresentGeneric' :: RawFilePath -> Annex Bool -> Annex Bool +checkPresentGeneric' :: OsPath -> Annex Bool -> Annex Bool checkPresentGeneric' d check = ifM check ( return True - , ifM (liftIO $ doesDirectoryExist (fromRawFilePath d)) + , ifM (liftIO $ doesDirectoryExist d) ( return False - , giveup $ "directory " ++ fromRawFilePath d ++ " is not accessible" + , giveup $ "directory " ++ fromOsPath d ++ " is not accessible" ) ) -storeExportM :: RawFilePath -> CopyCoWTried -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex () +storeExportM :: OsPath -> CopyCoWTried -> OsPath -> Key -> ExportLocation -> MeterUpdate -> Annex () storeExportM d cow src _k loc p = do - liftIO $ createDirectoryUnder [d] (P.takeDirectory dest) + liftIO $ createDirectoryUnder [d] (takeDirectory dest) -- Write via temp file so that checkPresentGeneric will not -- see it until it's fully stored. - viaTmp go (toOsPath dest) () + viaTmp go dest () where dest = exportPath d loc - go tmp () = void $ liftIO $ fileCopier cow src (fromRawFilePath (fromOsPath tmp)) p Nothing + go tmp () = void $ liftIO $ + fileCopier cow (fromOsPath src) (fromOsPath tmp) p Nothing -retrieveExportM :: RawFilePath -> CopyCoWTried -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Verification +retrieveExportM :: OsPath -> CopyCoWTried -> Key -> ExportLocation -> OsPath -> MeterUpdate -> Annex Verification retrieveExportM d cow k loc dest p = verifyKeyContentIncrementally AlwaysVerify k $ \iv -> - void $ liftIO $ fileCopier cow src dest p iv + void $ liftIO $ fileCopier cow src (fromOsPath dest) p iv where - src = fromRawFilePath $ exportPath d loc + src = fromOsPath $ exportPath d loc -removeExportM :: RawFilePath -> Key -> ExportLocation -> Annex () +removeExportM :: OsPath -> Key -> ExportLocation -> Annex () removeExportM d _k loc = liftIO $ do - removeWhenExistsWith R.removeLink src + removeWhenExistsWith removeFile src removeExportLocation d loc where src = exportPath d loc -checkPresentExportM :: RawFilePath -> Key -> ExportLocation -> Annex Bool +checkPresentExportM :: OsPath -> Key -> ExportLocation -> Annex Bool checkPresentExportM d _k loc = checkPresentGeneric d [exportPath d loc] -renameExportM :: RawFilePath -> Key -> ExportLocation -> ExportLocation -> Annex (Maybe ()) +renameExportM :: OsPath -> Key -> ExportLocation -> ExportLocation -> Annex (Maybe ()) renameExportM d _k oldloc newloc = liftIO $ do - createDirectoryUnder [d] (P.takeDirectory dest) - renameFile (fromRawFilePath src) (fromRawFilePath dest) + createDirectoryUnder [d] (takeDirectory dest) + renameFile src dest removeExportLocation d oldloc return (Just ()) where src = exportPath d oldloc dest = exportPath d newloc -exportPath :: RawFilePath -> ExportLocation -> RawFilePath -exportPath d loc = d P. fromExportLocation loc +exportPath :: OsPath -> ExportLocation -> OsPath +exportPath d loc = d fromExportLocation loc {- Removes the ExportLocation's parent directory and its parents, so long as - they're empty, up to but not including the topdir. -} -removeExportLocation :: RawFilePath -> ExportLocation -> IO () +removeExportLocation :: OsPath -> ExportLocation -> IO () removeExportLocation topdir loc = - go (Just $ P.takeDirectory $ fromExportLocation loc) (Right ()) + go (Just $ takeDirectory $ fromExportLocation loc) (Right ()) where go _ (Left _e) = return () go Nothing _ = return () go (Just loc') _ = - let p = fromRawFilePath $ exportPath topdir $ - mkExportLocation loc' + let p = exportPath topdir $ mkExportLocation loc' in go (upFrom loc') =<< tryIO (removeDirectory p) -listImportableContentsM :: IgnoreInodes -> RawFilePath -> Annex (Maybe (ImportableContentsChunkable Annex (ContentIdentifier, ByteSize))) +listImportableContentsM :: IgnoreInodes -> OsPath -> Annex (Maybe (ImportableContentsChunkable Annex (ContentIdentifier, ByteSize))) listImportableContentsM ii dir = liftIO $ do l' <- mapM go =<< dirContentsRecursiveSkipping (const False) False dir return $ Just $ ImportableContentsComplete $ ImportableContents (catMaybes l') [] where go f = do - st <- R.getSymbolicLinkStatus f + st <- R.getSymbolicLinkStatus (fromOsPath f) mkContentIdentifier ii f st >>= \case Nothing -> return Nothing Just cid -> do @@ -408,7 +402,7 @@ newtype IgnoreInodes = IgnoreInodes Bool -- and also normally the inode, unless ignoreinodes=yes. -- -- If the file is not a regular file, this will return Nothing. -mkContentIdentifier :: IgnoreInodes -> RawFilePath -> FileStatus -> IO (Maybe ContentIdentifier) +mkContentIdentifier :: IgnoreInodes -> OsPath -> FileStatus -> IO (Maybe ContentIdentifier) mkContentIdentifier (IgnoreInodes ii) f st = liftIO $ fmap (ContentIdentifier . encodeBS . showInodeCache) <$> if ii @@ -434,25 +428,25 @@ guardSameContentIdentifiers cont olds (Just new) let ic' = replaceInode 0 ic in ContentIdentifier (encodeBS (showInodeCache ic')) -importKeyM :: IgnoreInodes -> RawFilePath -> ExportLocation -> ContentIdentifier -> ByteSize -> MeterUpdate -> Annex (Maybe Key) +importKeyM :: IgnoreInodes -> OsPath -> ExportLocation -> ContentIdentifier -> ByteSize -> MeterUpdate -> Annex (Maybe Key) importKeyM ii dir loc cid sz p = do backend <- chooseBackend f unsizedk <- fst <$> genKey ks p backend let k = alterKey unsizedk $ \kd -> kd { keySize = keySize kd <|> Just sz } currcid <- liftIO $ mkContentIdentifier ii absf - =<< R.getSymbolicLinkStatus absf + =<< R.getSymbolicLinkStatus (fromOsPath absf) guardSameContentIdentifiers (return (Just k)) [cid] currcid where f = fromExportLocation loc - absf = dir P. f + absf = dir f ks = KeySource { keyFilename = f , contentLocation = absf , inodeCache = Nothing } -retrieveExportWithContentIdentifierM :: IgnoreInodes -> RawFilePath -> CopyCoWTried -> ExportLocation -> [ContentIdentifier] -> FilePath -> Either Key (Annex Key) -> MeterUpdate -> Annex (Key, Verification) +retrieveExportWithContentIdentifierM :: IgnoreInodes -> OsPath -> CopyCoWTried -> ExportLocation -> [ContentIdentifier] -> OsPath -> Either Key (Annex Key) -> MeterUpdate -> Annex (Key, Verification) retrieveExportWithContentIdentifierM ii dir cow loc cids dest gk p = case gk of Right mkkey -> do @@ -464,11 +458,11 @@ retrieveExportWithContentIdentifierM ii dir cow loc cids dest gk p = return (k, v) where f = exportPath dir loc - f' = fromRawFilePath f - + f' = fromOsPath f + go iv = precheck (docopy iv) - docopy iv = ifM (liftIO $ tryCopyCoW cow f' dest p) + docopy iv = ifM (liftIO $ tryCopyCoW cow (fromOsPath f) (fromOsPath dest) p) ( postcheckcow (liftIO $ maybe noop unableIncrementalVerifier iv) , docopynoncow iv ) @@ -477,7 +471,7 @@ retrieveExportWithContentIdentifierM ii dir cow loc cids dest gk p = #ifndef mingw32_HOST_OS let open = do -- Need a duplicate fd for the post check. - fd <- openFdWithMode f ReadOnly Nothing defaultFileFlags + fd <- openFdWithMode f' ReadOnly Nothing defaultFileFlags dupfd <- dup fd h <- fdToHandle fd return (h, dupfd) @@ -490,7 +484,7 @@ retrieveExportWithContentIdentifierM ii dir cow loc cids dest gk p = let close = hClose bracketIO open close $ \h -> do #endif - liftIO $ fileContentCopier h dest p iv + liftIO $ fileContentCopier h (fromOsPath dest) p iv #ifndef mingw32_HOST_OS postchecknoncow dupfd (return ()) #else @@ -501,7 +495,7 @@ retrieveExportWithContentIdentifierM ii dir cow loc cids dest gk p = -- content. precheck cont = guardSameContentIdentifiers cont cids =<< liftIO . mkContentIdentifier ii f - =<< liftIO (R.getSymbolicLinkStatus f) + =<< liftIO (R.getSymbolicLinkStatus f') -- Check after copy, in case the file was changed while it was -- being copied. @@ -525,7 +519,7 @@ retrieveExportWithContentIdentifierM ii dir cow loc cids dest gk p = #ifndef mingw32_HOST_OS =<< getFdStatus fd #else - =<< R.getSymbolicLinkStatus f + =<< R.getSymbolicLinkStatus f' #endif guardSameContentIdentifiers cont cids currcid @@ -536,37 +530,37 @@ retrieveExportWithContentIdentifierM ii dir cow loc cids dest gk p = -- restored to the original content before this check. postcheckcow cont = do currcid <- liftIO $ mkContentIdentifier ii f - =<< R.getSymbolicLinkStatus f + =<< R.getSymbolicLinkStatus f' guardSameContentIdentifiers cont cids currcid -storeExportWithContentIdentifierM :: IgnoreInodes -> RawFilePath -> CopyCoWTried -> FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex ContentIdentifier +storeExportWithContentIdentifierM :: IgnoreInodes -> OsPath -> CopyCoWTried -> OsPath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex ContentIdentifier storeExportWithContentIdentifierM ii dir cow src _k loc overwritablecids p = do liftIO $ createDirectoryUnder [dir] destdir - withTmpFileIn (toOsPath destdir) template $ \tmpf tmph -> do + withTmpFileIn destdir template $ \tmpf tmph -> do let tmpf' = fromOsPath tmpf liftIO $ hClose tmph - void $ liftIO $ fileCopier cow src (fromRawFilePath tmpf') p Nothing - resetAnnexFilePerm tmpf' - liftIO (R.getSymbolicLinkStatus tmpf') >>= liftIO . mkContentIdentifier ii tmpf' >>= \case + void $ liftIO $ fileCopier cow (fromOsPath src) (fromOsPath tmpf) p Nothing + resetAnnexFilePerm tmpf + liftIO (R.getSymbolicLinkStatus tmpf') >>= liftIO . mkContentIdentifier ii tmpf >>= \case Nothing -> giveup "unable to generate content identifier" Just newcid -> do checkExportContent ii dir loc overwritablecids (giveup "unsafe to overwrite file") - (const $ liftIO $ R.rename tmpf' dest) + (const $ liftIO $ R.rename tmpf' (fromOsPath dest)) return newcid where dest = exportPath dir loc - (destdir, base) = P.splitFileName dest - template = relatedTemplate (base <> ".tmp") + (destdir, base) = splitFileName dest + template = relatedTemplate (fromOsPath base <> ".tmp") -removeExportWithContentIdentifierM :: IgnoreInodes -> RawFilePath -> Key -> ExportLocation -> [ContentIdentifier] -> Annex () +removeExportWithContentIdentifierM :: IgnoreInodes -> OsPath -> Key -> ExportLocation -> [ContentIdentifier] -> Annex () removeExportWithContentIdentifierM ii dir k loc removeablecids = checkExportContent ii dir loc removeablecids (giveup "unsafe to remove modified file") $ \case DoesNotExist -> return () KnownContentIdentifier -> removeExportM dir k loc -checkPresentExportWithContentIdentifierM :: IgnoreInodes -> RawFilePath -> Key -> ExportLocation -> [ContentIdentifier] -> Annex Bool +checkPresentExportWithContentIdentifierM :: IgnoreInodes -> OsPath -> Key -> ExportLocation -> [ContentIdentifier] -> Annex Bool checkPresentExportWithContentIdentifierM ii dir _k loc knowncids = checkPresentGeneric' dir $ checkExportContent ii dir loc knowncids (return False) $ \case @@ -590,9 +584,9 @@ data CheckResult = DoesNotExist | KnownContentIdentifier -- -- So, it suffices to check if the destination file's current -- content is known, and immediately run the callback. -checkExportContent :: IgnoreInodes -> RawFilePath -> ExportLocation -> [ContentIdentifier] -> Annex a -> (CheckResult -> Annex a) -> Annex a +checkExportContent :: IgnoreInodes -> OsPath -> ExportLocation -> [ContentIdentifier] -> Annex a -> (CheckResult -> Annex a) -> Annex a checkExportContent ii dir loc knowncids unsafe callback = - tryWhenExists (liftIO $ R.getSymbolicLinkStatus dest) >>= \case + tryWhenExists (liftIO $ R.getSymbolicLinkStatus (fromOsPath dest)) >>= \case Just destst | not (isRegularFile destst) -> unsafe | otherwise -> catchDefaultIO Nothing (liftIO $ mkContentIdentifier ii dest destst) >>= \case diff --git a/Remote/External.hs b/Remote/External.hs index 882fa22888..251ca666fe 100644 --- a/Remote/External.hs +++ b/Remote/External.hs @@ -237,9 +237,10 @@ checkExportSupported' external = go `catchNonAsync` (const (return False)) storeKeyM :: External -> Storer storeKeyM external = fileStorer $ \k f p -> - either giveup return =<< go k f p + either giveup return =<< go k p + (\sk -> TRANSFER Upload sk (fromOsPath f)) where - go k f p = handleRequestKey external (\sk -> TRANSFER Upload sk f) k (Just p) $ \resp -> + go k p mkreq = handleRequestKey external mkreq k (Just p) $ \resp -> case resp of TRANSFER_SUCCESS Upload k' | k == k' -> result (Right ()) @@ -251,7 +252,7 @@ retrieveKeyFileM :: External -> Retriever retrieveKeyFileM external = fileRetriever $ \d k p -> either giveup return =<< watchFileSize d p (go d k) where - go d k p = handleRequestKey external (\sk -> TRANSFER Download sk (fromRawFilePath d)) k (Just p) $ \resp -> + go d k p = handleRequestKey external (\sk -> TRANSFER Download sk (fromOsPath d)) k (Just p) $ \resp -> case resp of TRANSFER_SUCCESS Download k' | k == k' -> result $ Right () @@ -293,7 +294,7 @@ whereisKeyM external k = handleRequestKey external WHEREIS k Nothing $ \resp -> UNSUPPORTED_REQUEST -> result [] _ -> Nothing -storeExportM :: External -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex () +storeExportM :: External -> OsPath -> Key -> ExportLocation -> MeterUpdate -> Annex () storeExportM external f k loc p = either giveup return =<< go where go = handleRequestExport external loc req k (Just p) $ \resp -> case resp of @@ -303,12 +304,12 @@ storeExportM external f k loc p = either giveup return =<< go UNSUPPORTED_REQUEST -> result $ Left "TRANSFEREXPORT not implemented by external special remote" _ -> Nothing - req sk = TRANSFEREXPORT Upload sk f + req sk = TRANSFEREXPORT Upload sk (fromOsPath f) -retrieveExportM :: External -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Verification +retrieveExportM :: External -> Key -> ExportLocation -> OsPath -> MeterUpdate -> Annex Verification retrieveExportM external k loc dest p = do verifyKeyContentIncrementally AlwaysVerify k $ \iv -> - tailVerify iv (toRawFilePath dest) $ + tailVerify iv dest $ either giveup return =<< go where go = handleRequestExport external loc req k (Just p) $ \resp -> case resp of @@ -319,7 +320,7 @@ retrieveExportM external k loc dest p = do UNSUPPORTED_REQUEST -> result $ Left "TRANSFEREXPORT not implemented by external special remote" _ -> Nothing - req sk = TRANSFEREXPORT Download sk dest + req sk = TRANSFEREXPORT Download sk (fromOsPath dest) checkPresentExportM :: External -> Key -> ExportLocation -> Annex Bool checkPresentExportM external k loc = either giveup id <$> go @@ -429,9 +430,9 @@ handleRequest' st external req mp responsehandler handleRemoteRequest (PROGRESS bytesprocessed) = maybe noop (\a -> liftIO $ a bytesprocessed) mp handleRemoteRequest (DIRHASH k) = - send $ VALUE $ fromRawFilePath $ hashDirMixed def k + send $ VALUE $ fromOsPath $ hashDirMixed def k handleRemoteRequest (DIRHASH_LOWER k) = - send $ VALUE $ fromRawFilePath $ hashDirLower def k + send $ VALUE $ fromOsPath $ hashDirLower def k handleRemoteRequest (SETCONFIG setting value) = liftIO $ atomically $ do ParsedRemoteConfig m c <- takeTMVar (externalConfig st) @@ -480,7 +481,7 @@ handleRequest' st external req mp responsehandler Just u -> send $ VALUE $ fromUUID u Nothing -> senderror "cannot send GETUUID here" handleRemoteRequest GETGITDIR = - send . VALUE . fromRawFilePath =<< fromRepo Git.localGitDir + send . VALUE . fromOsPath =<< fromRepo Git.localGitDir handleRemoteRequest GETGITREMOTENAME = case externalRemoteName external of Just n -> send $ VALUE n @@ -526,7 +527,7 @@ handleRequest' st external req mp responsehandler senderror = sendMessage st . ERROR credstorage setting u = CredPairStorage - { credPairFile = base + { credPairFile = toOsPath base , credPairEnvironment = (base ++ "login", base ++ "password") , credPairRemoteField = Accepted setting } @@ -824,19 +825,19 @@ checkUrlM :: External -> URLString -> Annex UrlContents checkUrlM external url = handleRequest external (CHECKURL url) Nothing $ \req -> case req of CHECKURL_CONTENTS sz f -> result $ UrlContents sz $ - if null f then Nothing else Just f + if null f then Nothing else Just (toOsPath f) CHECKURL_MULTI l -> result $ UrlMulti $ map mkmulti l CHECKURL_FAILURE errmsg -> Just $ giveup $ respErrorMessage "CHECKURL" errmsg UNSUPPORTED_REQUEST -> giveup "CHECKURL not implemented by external special remote" _ -> Nothing where - mkmulti (u, s, f) = (u, s, f) + mkmulti (u, s, f) = (u, s, toOsPath f) retrieveUrl :: Retriever retrieveUrl = fileRetriever' $ \f k p iv -> do us <- getWebUrls k - unlessM (withUrlOptions $ downloadUrl True k p iv us (fromRawFilePath f)) $ + unlessM (withUrlOptions $ downloadUrl True k p iv us f) $ giveup "failed to download content" checkKeyUrl :: CheckPresent diff --git a/Remote/Web.hs b/Remote/Web.hs index 87232b3dfb..4728a64c6a 100644 --- a/Remote/Web.hs +++ b/Remote/Web.hs @@ -116,7 +116,7 @@ setupInstance _ mu _ c _ = do gitConfigSpecialRemote u c [("web", "true")] return (c, u) -downloadKey :: UrlIncludeExclude -> Key -> AssociatedFile -> FilePath -> MeterUpdate -> VerifyConfig -> Annex Verification +downloadKey :: UrlIncludeExclude -> Key -> AssociatedFile -> OsPath -> MeterUpdate -> VerifyConfig -> Annex Verification downloadKey urlincludeexclude key _af dest p vc = go =<< getWebUrls' urlincludeexclude key where @@ -175,14 +175,14 @@ downloadKey urlincludeexclude key _af dest p vc = let b = if isCryptographicallySecure db then db else defaultHashBackend - generateEquivilantKey b (toRawFilePath dest) >>= \case + generateEquivilantKey b dest >>= \case Nothing -> return Nothing Just ek -> do unless (ek `elem` eks) $ setEquivilantKey key ek return (Just Verified) -uploadKey :: Key -> AssociatedFile -> Maybe FilePath -> MeterUpdate -> Annex () +uploadKey :: Key -> AssociatedFile -> Maybe OsPath -> MeterUpdate -> Annex () uploadKey _ _ _ _ = giveup "upload to web not supported" dropKey :: UrlIncludeExclude -> Maybe SafeDropProof -> Key -> Annex ()