From 1ceece3108f03badcca0d9c64cd287f9352656b3 Mon Sep 17 00:00:00 2001 From: Joey Hess Date: Mon, 20 Jan 2025 18:03:26 -0400 Subject: [PATCH] RawFilePath conversion of System.Directory By using System.Directory.OsPath, which takes and returns OsString, which is a ShortByteString. So, things like dirContents currently have the overhead of copying that to a ByteString, but that should be less than the overhead of using Strings which often in turn were converted to RawFilePaths. Added Utility.OsString and the OsString build flag. That flag is turned on in the stack.yaml, and will be turned on automatically by cabal when built with new enough libraries. The stack.yaml change is a bit ugly, and that could be reverted for now if it causes any problems. Note that Utility.OsString.toOsString on windows is avoiding only a check of encoding that is documented as being unlikely to fail. I don't think it can fail in git-annex; if it could, git-annex didn't contain such an encoding check before, so at worst that should be a wash. --- Annex/AdjustedBranch/Merge.hs | 13 +++--- Annex/Branch.hs | 2 +- Annex/Content.hs | 8 ++-- Annex/RepoSize/LiveUpdate.hs | 2 +- Annex/Ssh.hs | 30 +++++++------- Annex/Tmp.hs | 10 +++-- Annex/YoutubeDl.hs | 9 +++-- Assistant/Repair.hs | 19 +++++---- Assistant/Threads/TransferWatcher.hs | 6 +-- Assistant/Upgrade.hs | 11 ++--- Assistant/WebApp/Configurators/Delete.hs | 2 +- Build/LinuxMkLibs.hs | 2 +- CmdLine/GitRemoteAnnex.hs | 5 ++- CmdLine/Seek.hs | 6 +-- Command/P2PHttp.hs | 4 +- Command/Uninit.hs | 10 ++--- Git/Objects.hs | 10 ++--- Git/Repair.hs | 24 +++++------ Logs/Transfer.hs | 15 +++---- Remote/BitTorrent.hs | 13 +++--- Remote/Directory.hs | 5 +-- Remote/Helper/Git.hs | 9 +++-- Test/Framework.hs | 4 +- Types/Direction.hs | 2 +- Upgrade/V2.hs | 6 +-- Utility/DirWatcher/INotify.hs | 2 +- Utility/DirWatcher/Kqueue.hs | 2 +- Utility/Directory.hs | 51 +++++++++++++++++------- Utility/Directory/Stream.hs | 3 +- Utility/LockFile/PidLock.hs | 8 ++-- Utility/OsString.hs | 38 ++++++++++++++++++ doc/todo/RawFilePath_conversion.mdwn | 7 ---- git-annex.cabal | 11 +++++ stack.yaml | 11 ++++- 34 files changed, 222 insertions(+), 138 deletions(-) create mode 100644 Utility/OsString.hs diff --git a/Annex/AdjustedBranch/Merge.hs b/Annex/AdjustedBranch/Merge.hs index 904f4ee412..5a88a8e79f 100644 --- a/Annex/AdjustedBranch/Merge.hs +++ b/Annex/AdjustedBranch/Merge.hs @@ -29,6 +29,7 @@ import Annex.GitOverlay import Utility.Tmp.Dir import Utility.CopyFile import Utility.Directory.Create +import qualified Utility.RawFilePath as R import qualified Data.ByteString as S import qualified System.FilePath.ByteString as P @@ -72,7 +73,6 @@ mergeToAdjustedBranch tomerge (origbranch, adj) mergeconfig canresolvemerge comm -} changestomerge (Just updatedorig) = withOtherTmp $ \othertmpdir -> do git_dir <- fromRepo Git.localGitDir - let git_dir' = fromRawFilePath git_dir tmpwt <- fromRepo gitAnnexMergeDir withTmpDirIn (fromRawFilePath othertmpdir) "git" $ \tmpgit -> withWorkTreeRelated tmpgit $ withemptydir git_dir tmpwt $ withWorkTree tmpwt $ do @@ -82,16 +82,15 @@ mergeToAdjustedBranch tomerge (origbranch, adj) mergeconfig canresolvemerge comm -- causes it not to look in GIT_DIR for refs. refs <- liftIO $ emptyWhenDoesNotExist $ dirContentsRecursive $ - git_dir' "refs" - let refs' = (git_dir' "packed-refs") : refs + git_dir P. "refs" + let refs' = (git_dir P. "packed-refs") : refs liftIO $ forM_ refs' $ \src -> do - let src' = toRawFilePath src - whenM (doesFileExist src) $ do - dest <- relPathDirToFile git_dir src' + whenM (R.doesPathExist src) $ do + dest <- relPathDirToFile git_dir src let dest' = toRawFilePath tmpgit P. dest createDirectoryUnder [git_dir] (P.takeDirectory dest') - void $ createLinkOrCopy src' dest' + void $ createLinkOrCopy src dest' -- This reset makes git merge not care -- that the work tree is empty; otherwise -- it will think that all the files have diff --git a/Annex/Branch.hs b/Annex/Branch.hs index ce4c3ad85e..2474b2ae13 100644 --- a/Annex/Branch.hs +++ b/Annex/Branch.hs @@ -753,7 +753,7 @@ stageJournal jl commitindex = withIndex $ withOtherTmp $ \tmpdir -> do Nothing -> return () Just file -> do let path = dir P. toRawFilePath file - unless (dirCruft file) $ whenM (isfile path) $ do + unless (dirCruft (toRawFilePath file)) $ whenM (isfile path) $ do sha <- Git.HashObject.hashFile h path hPutStrLn jlogh file streamer $ Git.UpdateIndex.updateIndexLine diff --git a/Annex/Content.hs b/Annex/Content.hs index aba53add7b..40f13e7ea5 100644 --- a/Annex/Content.hs +++ b/Annex/Content.hs @@ -817,7 +817,7 @@ listKeys' keyloc want = do s <- Annex.getState id r <- Annex.getRead id depth <- gitAnnexLocationDepth <$> Annex.getGitConfig - liftIO $ walk (s, r) depth (fromRawFilePath dir) + liftIO $ walk (s, r) depth dir where walk s depth dir = do contents <- catchDefaultIO [] (dirContents dir) @@ -825,7 +825,7 @@ listKeys' keyloc want = do then do contents' <- filterM present contents keys <- filterM (Annex.eval s . want) $ - mapMaybe (fileKey . P.takeFileName . toRawFilePath) contents' + mapMaybe (fileKey . P.takeFileName) contents' continue keys [] else do let deeper = walk s (depth - 1) @@ -843,8 +843,8 @@ listKeys' keyloc want = do present _ | inanywhere = pure True present d = presentInAnnex d - presentInAnnex = doesFileExist . contentfile - contentfile d = d takeFileName d + presentInAnnex = R.doesPathExist . contentfile + contentfile d = d P. P.takeFileName d {- Things to do to record changes to content when shutting down. - diff --git a/Annex/RepoSize/LiveUpdate.hs b/Annex/RepoSize/LiveUpdate.hs index a792b42597..8710282999 100644 --- a/Annex/RepoSize/LiveUpdate.hs +++ b/Annex/RepoSize/LiveUpdate.hs @@ -161,7 +161,7 @@ checkStaleSizeChanges h@(RepoSizeHandle (Just _) livev) = do where go livedir lck pidlockfile now = do void $ tryNonAsync $ do - lockfiles <- liftIO $ filter (not . dirCruft) + lockfiles <- liftIO $ filter (not . dirCruft . toRawFilePath) <$> getDirectoryContents (fromRawFilePath livedir) stale <- forM lockfiles $ \lockfile -> if (lockfile /= pidlockfile) diff --git a/Annex/Ssh.hs b/Annex/Ssh.hs index 90d462f7be..6cdfba7b02 100644 --- a/Annex/Ssh.hs +++ b/Annex/Ssh.hs @@ -5,6 +5,7 @@ - Licensed under the GNU AGPL version 3 or higher. -} +{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE CPP #-} module Annex.Ssh ( @@ -100,15 +101,16 @@ consumeStdinParams NoConsumeStdin = [Param "-n"] {- Returns a filename to use for a ssh connection caching socket, and - parameters to enable ssh connection caching. -} -sshCachingInfo :: (SshHost, Maybe Integer) -> Annex (Maybe FilePath, [CommandParam]) +sshCachingInfo :: (SshHost, Maybe Integer) -> Annex (Maybe RawFilePath, [CommandParam]) sshCachingInfo (host, port) = go =<< sshCacheDir' where go (Right dir) = liftIO (bestSocketPath $ dir P. hostport2socket host port) >>= return . \case Nothing -> (Nothing, []) Just socketfile -> - let socketfile' = fromRawFilePath socketfile - in (Just socketfile', sshConnectionCachingParams socketfile') + (Just socketfile + , sshConnectionCachingParams (fromRawFilePath socketfile) + ) -- No connection caching with concurrency is not a good -- combination, so warn the user. go (Left whynocaching) = do @@ -214,7 +216,7 @@ portParams (Just port) = [Param "-p", Param $ show port] - Locks the socket lock file to prevent other git-annex processes from - stopping the ssh multiplexer on this socket. -} -prepSocket :: FilePath -> SshHost -> [CommandParam] -> Annex () +prepSocket :: RawFilePath -> SshHost -> [CommandParam] -> Annex () prepSocket socketfile sshhost sshparams = do -- There could be stale ssh connections hanging around -- from a previous git-annex run that was interrupted. @@ -286,13 +288,13 @@ prepSocket socketfile sshhost sshparams = do - and this check makes such files be skipped since the corresponding lock - file won't exist. -} -enumSocketFiles :: Annex [FilePath] +enumSocketFiles :: Annex [RawFilePath] enumSocketFiles = liftIO . go =<< sshCacheDir where go Nothing = return [] go (Just dir) = filterM (R.doesPathExist . socket2lock) =<< filter (not . isLock) - <$> catchDefaultIO [] (dirContents (fromRawFilePath dir)) + <$> catchDefaultIO [] (dirContents dir) {- Stop any unused ssh connection caching processes. -} sshCleanup :: Annex () @@ -324,9 +326,9 @@ sshCleanup = mapM_ cleanup =<< enumSocketFiles forceSshCleanup :: Annex () forceSshCleanup = mapM_ forceStopSsh =<< enumSocketFiles -forceStopSsh :: FilePath -> Annex () +forceStopSsh :: RawFilePath -> Annex () forceStopSsh socketfile = withNullHandle $ \nullh -> do - let (dir, base) = splitFileName socketfile + let (dir, base) = splitFileName (fromRawFilePath socketfile) let p = (proc "ssh" $ toCommand $ [ Param "-O", Param "stop" ] ++ sshConnectionCachingParams base ++ @@ -338,7 +340,7 @@ forceStopSsh socketfile = withNullHandle $ \nullh -> do } void $ liftIO $ catchMaybeIO $ withCreateProcess p $ \_ _ _ pid -> forceSuccessProcess p pid - liftIO $ removeWhenExistsWith R.removeLink (toRawFilePath socketfile) + liftIO $ removeWhenExistsWith R.removeLink socketfile {- This needs to be as short as possible, due to limitations on the length - of the path to a socket file. At the same time, it needs to be unique @@ -355,13 +357,13 @@ hostport2socket' s where lengthofmd5s = 32 -socket2lock :: FilePath -> RawFilePath -socket2lock socket = toRawFilePath (socket ++ lockExt) +socket2lock :: RawFilePath -> RawFilePath +socket2lock socket = socket <> lockExt -isLock :: FilePath -> Bool -isLock f = lockExt `isSuffixOf` f +isLock :: RawFilePath -> Bool +isLock f = lockExt `S.isSuffixOf` f -lockExt :: String +lockExt :: S.ByteString lockExt = ".lock" {- This is the size of the sun_path component of sockaddr_un, which diff --git a/Annex/Tmp.hs b/Annex/Tmp.hs index 2bbebd6388..6f9f28b8b6 100644 --- a/Annex/Tmp.hs +++ b/Annex/Tmp.hs @@ -60,15 +60,17 @@ cleanupOtherTmp = do void $ tryIO $ tryExclusiveLock tmplck $ do tmpdir <- fromRawFilePath <$> fromRepo gitAnnexTmpOtherDir void $ liftIO $ tryIO $ removeDirectoryRecursive tmpdir - oldtmp <- fromRawFilePath <$> fromRepo gitAnnexTmpOtherDirOld + oldtmp <- fromRepo gitAnnexTmpOtherDirOld liftIO $ mapM_ cleanold =<< emptyWhenDoesNotExist (dirContentsRecursive oldtmp) - liftIO $ void $ tryIO $ removeDirectory oldtmp -- when empty + -- remove when empty + liftIO $ void $ tryIO $ + removeDirectory (fromRawFilePath oldtmp) where cleanold f = do now <- liftIO getPOSIXTime let oldenough = now - (60 * 60 * 24 * 7) - catchMaybeIO (modificationTime <$> R.getSymbolicLinkStatus (toRawFilePath f)) >>= \case + catchMaybeIO (modificationTime <$> R.getSymbolicLinkStatus f) >>= \case Just mtime | realToFrac mtime <= oldenough -> - void $ tryIO $ removeWhenExistsWith R.removeLink (toRawFilePath f) + void $ tryIO $ removeWhenExistsWith R.removeLink f _ -> return () diff --git a/Annex/YoutubeDl.hs b/Annex/YoutubeDl.hs index 3a4dd051bc..04ab1091ed 100644 --- a/Annex/YoutubeDl.hs +++ b/Annex/YoutubeDl.hs @@ -30,6 +30,7 @@ import Utility.Metered import Utility.Tmp import Messages.Progress import Logs.Transfer +import qualified Utility.RawFilePath as R import Network.URI import Control.Concurrent.Async @@ -101,9 +102,9 @@ youtubeDl' url workdir p uo | isytdlp cmd = liftIO $ (nub . lines <$> readFile filelistfile) `catchIO` (pure . const []) - | otherwise = workdirfiles - workdirfiles = liftIO $ filter (/= filelistfile) - <$> (filterM (doesFileExist) =<< dirContents workdir) + | otherwise = map fromRawFilePath <$> workdirfiles + workdirfiles = liftIO $ filter (/= toRawFilePath filelistfile) + <$> (filterM R.doesPathExist =<< dirContents (toRawFilePath workdir)) filelistfile = workdir filelistfilebase filelistfilebase = "git-annex-file-list-file" isytdlp cmd = cmd == "yt-dlp" @@ -159,7 +160,7 @@ youtubeDlMaxSize workdir = ifM (Annex.getRead Annex.force) Just have -> do inprogress <- sizeOfDownloadsInProgress (const True) partial <- liftIO $ sum - <$> (mapM (getFileSize . toRawFilePath) =<< dirContents workdir) + <$> (mapM getFileSize =<< dirContents (toRawFilePath workdir)) reserve <- annexDiskReserve <$> Annex.getGitConfig let maxsize = have - reserve - inprogress + partial if maxsize > 0 diff --git a/Assistant/Repair.hs b/Assistant/Repair.hs index 02ebab3cae..4c37227c8d 100644 --- a/Assistant/Repair.hs +++ b/Assistant/Repair.hs @@ -5,6 +5,7 @@ - Licensed under the GNU AGPL version 3 or higher. -} +{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE CPP #-} module Assistant.Repair where @@ -33,6 +34,8 @@ import Utility.ThreadScheduler import qualified Utility.RawFilePath as R import Control.Concurrent.Async +import qualified Data.ByteString as S +import qualified System.FilePath.ByteString as P {- When the FsckResults require a repair, tries to do a non-destructive - repair. If that fails, pops up an alert. -} @@ -132,26 +135,26 @@ repairStaleGitLocks r = do repairStaleLocks lockfiles return $ not $ null lockfiles where - findgitfiles = dirContentsRecursiveSkipping (== dropTrailingPathSeparator (fromRawFilePath annexDir)) True . fromRawFilePath . Git.localGitDir + findgitfiles = dirContentsRecursiveSkipping (== P.dropTrailingPathSeparator annexDir) True . Git.localGitDir islock f - | "gc.pid" `isInfixOf` f = False - | ".lock" `isSuffixOf` f = True - | takeFileName f == "MERGE_HEAD" = True + | "gc.pid" `S.isInfixOf` f = False + | ".lock" `S.isSuffixOf` f = True + | P.takeFileName f == "MERGE_HEAD" = True | otherwise = False -repairStaleLocks :: [FilePath] -> Assistant () +repairStaleLocks :: [RawFilePath] -> Assistant () repairStaleLocks lockfiles = go =<< getsizes where getsize lf = catchMaybeIO $ (\s -> (lf, s)) - <$> getFileSize (toRawFilePath lf) + <$> getFileSize lf getsizes = liftIO $ catMaybes <$> mapM getsize lockfiles go [] = return () - go l = ifM (liftIO $ null <$> Lsof.query ("--" : map fst l)) + go l = ifM (liftIO $ null <$> Lsof.query ("--" : map (fromRawFilePath . fst) l)) ( do waitforit "to check stale git lock file" l' <- getsizes if l' == l - then liftIO $ mapM_ (removeWhenExistsWith R.removeLink . toRawFilePath . fst) l + then liftIO $ mapM_ (removeWhenExistsWith R.removeLink . fst) l else go l' , do waitforit "for git lock file writer" diff --git a/Assistant/Threads/TransferWatcher.hs b/Assistant/Threads/TransferWatcher.hs index d692a3ffd0..3dc40fb1e1 100644 --- a/Assistant/Threads/TransferWatcher.hs +++ b/Assistant/Threads/TransferWatcher.hs @@ -57,7 +57,7 @@ onErr = giveup {- Called when a new transfer information file is written. -} onAdd :: Handler -onAdd file = case parseTransferFile file of +onAdd file = case parseTransferFile (toRawFilePath file) of Nothing -> noop Just t -> go t =<< liftAnnex (checkTransfer t) where @@ -73,7 +73,7 @@ onAdd file = case parseTransferFile file of - The only thing that should change in the transfer info is the - bytesComplete, so that's the only thing updated in the DaemonStatus. -} onModify :: Handler -onModify file = case parseTransferFile file of +onModify file = case parseTransferFile (toRawFilePath file) of Nothing -> noop Just t -> go t =<< liftIO (readTransferInfoFile Nothing file) where @@ -88,7 +88,7 @@ watchesTransferSize = modifyTracked {- Called when a transfer information file is removed. -} onDel :: Handler -onDel file = case parseTransferFile file of +onDel file = case parseTransferFile (toRawFilePath file) of Nothing -> noop Just t -> do debug [ "transfer finishing:", show t] diff --git a/Assistant/Upgrade.hs b/Assistant/Upgrade.hs index 81d7f70b23..8e299b5271 100644 --- a/Assistant/Upgrade.hs +++ b/Assistant/Upgrade.hs @@ -41,6 +41,7 @@ import qualified Utility.Url as Url import qualified Annex.Url as Url hiding (download) import Utility.Tuple import qualified Utility.RawFilePath as R +import qualified System.FilePath.ByteString as P import Data.Either import qualified Data.Map as M @@ -212,8 +213,8 @@ upgradeToDistribution newdir cleanup distributionfile = do makeorigsymlink olddir return (newdir "git-annex", deleteold) installby a dstdir srcdir = - mapM_ (\x -> a (toRawFilePath x) (toRawFilePath (dstdir takeFileName x))) - =<< dirContents srcdir + mapM_ (\x -> a x (toRawFilePath dstdir P. P.takeFileName x)) + =<< dirContents (toRawFilePath srcdir) #endif sanitycheck dir = unlessM (doesDirectoryExist dir) $ @@ -280,14 +281,14 @@ deleteFromManifest dir = do fs <- map (dir ) . lines <$> catchDefaultIO "" (readFile manifest) mapM_ (removeWhenExistsWith R.removeLink . toRawFilePath) fs removeWhenExistsWith R.removeLink (toRawFilePath manifest) - removeEmptyRecursive dir + removeEmptyRecursive (toRawFilePath dir) where manifest = dir "git-annex.MANIFEST" -removeEmptyRecursive :: FilePath -> IO () +removeEmptyRecursive :: RawFilePath -> IO () removeEmptyRecursive dir = do mapM_ removeEmptyRecursive =<< dirContents dir - void $ tryIO $ removeDirectory dir + void $ tryIO $ removeDirectory (fromRawFilePath dir) {- This is a file that the UpgradeWatcher can watch for modifications to - detect when git-annex has been upgraded. diff --git a/Assistant/WebApp/Configurators/Delete.hs b/Assistant/WebApp/Configurators/Delete.hs index 333e13656a..31b5b19d14 100644 --- a/Assistant/WebApp/Configurators/Delete.hs +++ b/Assistant/WebApp/Configurators/Delete.hs @@ -89,7 +89,7 @@ deleteCurrentRepository = dangerPage $ do rs <- syncRemotes <$> getDaemonStatus mapM_ (\r -> changeSyncable (Just r) False) rs - liftAnnex $ prepareRemoveAnnexDir dir + liftAnnex $ prepareRemoveAnnexDir (toRawFilePath dir) liftIO $ removeDirectoryRecursive . fromRawFilePath =<< absPath (toRawFilePath dir) diff --git a/Build/LinuxMkLibs.hs b/Build/LinuxMkLibs.hs index 6a5f8dea01..434b6c31bd 100644 --- a/Build/LinuxMkLibs.hs +++ b/Build/LinuxMkLibs.hs @@ -80,7 +80,7 @@ consolidateUsrLib top libdirs = go [] libdirs forM_ fs $ \f -> do let src = inTop top (x f) let dst = inTop top (d f) - unless (dirCruft f) $ + unless (dirCruft (toRawFilePath f)) $ unlessM (doesDirectoryExist src) $ renameFile src dst symlinkHwCapDirs top d diff --git a/CmdLine/GitRemoteAnnex.hs b/CmdLine/GitRemoteAnnex.hs index da2a61b34b..0702bbd3f0 100644 --- a/CmdLine/GitRemoteAnnex.hs +++ b/CmdLine/GitRemoteAnnex.hs @@ -57,6 +57,7 @@ import Utility.Tmp.Dir import Utility.Env import Utility.Metered import Utility.FileMode +import qualified Utility.RawFilePath as R import Network.URI import Data.Either @@ -65,7 +66,6 @@ import qualified Data.ByteString as B import qualified Data.ByteString.Char8 as B8 import qualified Data.Map.Strict as M import qualified System.FilePath.ByteString as P -import qualified Utility.RawFilePath as R import qualified Data.Set as S run :: [String] -> IO () @@ -1162,7 +1162,8 @@ specialRemoteFromUrl sab a = withTmpDir "journal" $ \tmpdir -> do -- objects are deleted. cleanupInitialization :: StartAnnexBranch -> FilePath -> Annex () cleanupInitialization sab alternatejournaldir = void $ tryNonAsync $ do - liftIO $ mapM_ removeFile =<< dirContents alternatejournaldir + liftIO $ mapM_ R.removeLink + =<< dirContents (toRawFilePath alternatejournaldir) case sab of AnnexBranchExistedAlready _ -> noop AnnexBranchCreatedEmpty r -> diff --git a/CmdLine/Seek.hs b/CmdLine/Seek.hs index 07818dcda5..a25c6b083b 100644 --- a/CmdLine/Seek.hs +++ b/CmdLine/Seek.hs @@ -56,6 +56,7 @@ import Data.IORef import Data.Time.Clock.POSIX import System.PosixCompat.Files (isDirectory, isSymbolicLink, deviceID, fileID) import qualified System.FilePath.ByteString as P +import qualified Data.ByteString as S data AnnexedFileSeeker = AnnexedFileSeeker { startAction :: Maybe KeySha -> SeekInput -> RawFilePath -> Key -> CommandStart @@ -122,9 +123,8 @@ withPathContents a params = do -- exist. get p = ifM (isDirectory <$> R.getFileStatus p') ( map (\f -> - let f' = toRawFilePath f - in (f', P.makeRelative (P.takeDirectory (P.dropTrailingPathSeparator p')) f')) - <$> dirContentsRecursiveSkipping (".git" `isSuffixOf`) False p + (f, P.makeRelative (P.takeDirectory (P.dropTrailingPathSeparator p')) f)) + <$> dirContentsRecursiveSkipping (".git" `S.isSuffixOf`) False p' , return [(p', P.takeFileName p')] ) where diff --git a/Command/P2PHttp.hs b/Command/P2PHttp.hs index 31ee330f4d..ac72c7053d 100644 --- a/Command/P2PHttp.hs +++ b/Command/P2PHttp.hs @@ -266,8 +266,8 @@ getAuthEnv = do findRepos :: Options -> IO [Git.Repo] findRepos o = do - files <- map toRawFilePath . concat - <$> mapM dirContents (directoryOption o) + files <- concat + <$> mapM (dirContents . toRawFilePath) (directoryOption o) map Git.Construct.newFrom . catMaybes <$> mapM Git.Construct.checkForRepo files diff --git a/Command/Uninit.hs b/Command/Uninit.hs index a38ac9a7e6..d883467787 100644 --- a/Command/Uninit.hs +++ b/Command/Uninit.hs @@ -102,14 +102,14 @@ startCheckIncomplete recordnotok file key = removeAnnexDir :: CommandCleanup -> CommandStart removeAnnexDir recordok = do Annex.Queue.flush - annexdir <- fromRawFilePath <$> fromRepo gitAnnexDir + annexdir <- fromRepo gitAnnexDir annexobjectdir <- fromRepo gitAnnexObjectDir starting ("uninit objects") (ActionItemOther Nothing) (SeekInput []) $ do leftovers <- removeUnannexed =<< listKeys InAnnex prepareRemoveAnnexDir annexdir if null leftovers then do - liftIO $ removeDirectoryRecursive annexdir + liftIO $ removeDirectoryRecursive (fromRawFilePath annexdir) next recordok else giveup $ unlines [ "Not fully uninitialized" @@ -134,15 +134,15 @@ removeAnnexDir recordok = do - - Also closes sqlite databases that might be in the directory, - to avoid later failure to write any cached changes to them. -} -prepareRemoveAnnexDir :: FilePath -> Annex () +prepareRemoveAnnexDir :: RawFilePath -> Annex () prepareRemoveAnnexDir annexdir = do Database.Keys.closeDb liftIO $ prepareRemoveAnnexDir' annexdir -prepareRemoveAnnexDir' :: FilePath -> IO () +prepareRemoveAnnexDir' :: RawFilePath -> IO () prepareRemoveAnnexDir' annexdir = emptyWhenDoesNotExist (dirTreeRecursiveSkipping (const False) annexdir) - >>= mapM_ (void . tryIO . allowWrite . toRawFilePath) + >>= mapM_ (void . tryIO . allowWrite) {- Keys that were moved out of the annex have a hard link still in the - annex, with > 1 link count, and those can be removed. diff --git a/Git/Objects.hs b/Git/Objects.hs index 1390209e97..b66b0b5e19 100644 --- a/Git/Objects.hs +++ b/Git/Objects.hs @@ -25,14 +25,14 @@ packDir r = objectsDir r P. "pack" packIdxFile :: RawFilePath -> RawFilePath packIdxFile = flip P.replaceExtension "idx" -listPackFiles :: Repo -> IO [FilePath] -listPackFiles r = filter (".pack" `isSuffixOf`) - <$> catchDefaultIO [] (dirContents $ fromRawFilePath $ packDir r) +listPackFiles :: Repo -> IO [RawFilePath] +listPackFiles r = filter (".pack" `B.isSuffixOf`) + <$> catchDefaultIO [] (dirContents $ packDir r) listLooseObjectShas :: Repo -> IO [Sha] listLooseObjectShas r = catchDefaultIO [] $ - mapMaybe (extractSha . encodeBS . concat . reverse . take 2 . reverse . splitDirectories) - <$> emptyWhenDoesNotExist (dirContentsRecursiveSkipping (== "pack") True (fromRawFilePath (objectsDir r))) + mapMaybe (extractSha . encodeBS . concat . reverse . take 2 . reverse . splitDirectories . decodeBS) + <$> emptyWhenDoesNotExist (dirContentsRecursiveSkipping (== "pack") True (objectsDir r)) looseObjectFile :: Repo -> Sha -> RawFilePath looseObjectFile r sha = objectsDir r P. prefix P. rest diff --git a/Git/Repair.hs b/Git/Repair.hs index ace7ae89af..332bb5d50a 100644 --- a/Git/Repair.hs +++ b/Git/Repair.hs @@ -83,24 +83,23 @@ explodePacks r = go =<< listPackFiles r putStrLn "Unpacking all pack files." forM_ packs $ \packfile -> do -- Just in case permissions are messed up. - allowRead (toRawFilePath packfile) + allowRead packfile -- May fail, if pack file is corrupt. void $ tryIO $ pipeWrite [Param "unpack-objects", Param "-r"] r' $ \h -> - L.hPut h =<< L.readFile packfile - objs <- emptyWhenDoesNotExist (dirContentsRecursive tmpdir) + L.hPut h =<< L.readFile (fromRawFilePath packfile) + objs <- emptyWhenDoesNotExist (dirContentsRecursive (toRawFilePath tmpdir)) forM_ objs $ \objfile -> do f <- relPathDirToFile (toRawFilePath tmpdir) - (toRawFilePath objfile) + objfile let dest = objectsDir r P. f createDirectoryIfMissing True (fromRawFilePath (parentDir dest)) - moveFile (toRawFilePath objfile) dest + moveFile objfile dest forM_ packs $ \packfile -> do - let f = toRawFilePath packfile - removeWhenExistsWith R.removeLink f - removeWhenExistsWith R.removeLink (packIdxFile f) + removeWhenExistsWith R.removeLink packfile + removeWhenExistsWith R.removeLink (packIdxFile packfile) return True {- Try to retrieve a set of missing objects, from the remotes of a @@ -248,13 +247,14 @@ badBranches missing r = filterM isbad =<< getAllRefs r - Relies on packed refs being exploded before it's called. -} getAllRefs :: Repo -> IO [Ref] -getAllRefs r = getAllRefs' (fromRawFilePath (localGitDir r) "refs") +getAllRefs r = getAllRefs' (localGitDir r P. "refs") -getAllRefs' :: FilePath -> IO [Ref] +getAllRefs' :: RawFilePath -> IO [Ref] getAllRefs' refdir = do - let topsegs = length (splitPath refdir) - 1 + let topsegs = length (P.splitPath refdir) - 1 let toref = Ref . toInternalGitPath . encodeBS - . joinPath . drop topsegs . splitPath + . joinPath . drop topsegs . splitPath + . decodeBS map toref <$> emptyWhenDoesNotExist (dirContentsRecursive refdir) explodePackedRefsFile :: Repo -> IO () diff --git a/Logs/Transfer.hs b/Logs/Transfer.hs index 88c2f947cc..603a8446b0 100644 --- a/Logs/Transfer.hs +++ b/Logs/Transfer.hs @@ -29,6 +29,7 @@ import Annex.Perms import Data.Time.Clock import Data.Time.Clock.POSIX import Control.Concurrent.STM +import qualified Data.ByteString as B import qualified Data.ByteString.Char8 as B8 import qualified System.FilePath.ByteString as P @@ -157,7 +158,7 @@ getTransfers' dirs wanted = do infos <- mapM checkTransfer transfers return $ mapMaybe running $ zip transfers infos where - findfiles = liftIO . mapM (emptyWhenDoesNotExist . dirContentsRecursive . fromRawFilePath) + findfiles = liftIO . mapM (emptyWhenDoesNotExist . dirContentsRecursive) =<< mapM (fromRepo . transferDir) dirs running (t, Just i) = Just (t, i) running (_, Nothing) = Nothing @@ -180,11 +181,11 @@ getFailedTransfers u = catMaybes <$> (liftIO . getpairs =<< concat <$> findfiles where getpairs = mapM $ \f -> do let mt = parseTransferFile f - mi <- readTransferInfoFile Nothing f + mi <- readTransferInfoFile Nothing (fromRawFilePath f) return $ case (mt, mi) of (Just t, Just i) -> Just (t, i) _ -> Nothing - findfiles = liftIO . mapM (emptyWhenDoesNotExist . dirContentsRecursive . fromRawFilePath) + findfiles = liftIO . mapM (emptyWhenDoesNotExist . dirContentsRecursive) =<< mapM (fromRepo . failedTransferDir u) [Download, Upload] clearFailedTransfers :: UUID -> Annex [(Transfer, TransferInfo)] @@ -244,17 +245,17 @@ failedTransferFile (Transfer direction u kd) r = P. keyFile (mkKey (const kd)) {- Parses a transfer information filename to a Transfer. -} -parseTransferFile :: FilePath -> Maybe Transfer +parseTransferFile :: RawFilePath -> Maybe Transfer parseTransferFile file - | "lck." `isPrefixOf` takeFileName file = Nothing + | "lck." `B.isPrefixOf` P.takeFileName file = Nothing | otherwise = case drop (length bits - 3) bits of [direction, u, key] -> Transfer <$> parseDirection direction <*> pure (toUUID u) - <*> fmap (fromKey id) (fileKey (toRawFilePath key)) + <*> fmap (fromKey id) (fileKey key) _ -> Nothing where - bits = splitDirectories file + bits = P.splitDirectories file writeTransferInfoFile :: TransferInfo -> RawFilePath -> Annex () writeTransferInfoFile info tfile = writeLogFile tfile $ writeTransferInfo info diff --git a/Remote/BitTorrent.hs b/Remote/BitTorrent.hs index c9c15d75ed..29e19e5f8b 100644 --- a/Remote/BitTorrent.hs +++ b/Remote/BitTorrent.hs @@ -35,6 +35,7 @@ import qualified Utility.RawFilePath as R import Network.URI import qualified System.FilePath.ByteString as P +import qualified Data.ByteString as S #ifdef WITH_TORRENTPARSER import Data.Torrent @@ -208,9 +209,7 @@ downloadTorrentFile u = do let metadir = othertmp P. "torrentmeta" P. kf createAnnexDirectory metadir showOutput - ok <- downloadMagnetLink u - (fromRawFilePath metadir) - (fromRawFilePath torrent) + ok <- downloadMagnetLink u metadir torrent liftIO $ removeDirectoryRecursive (fromRawFilePath metadir) return ok @@ -225,14 +224,14 @@ downloadTorrentFile u = do return ok ) -downloadMagnetLink :: URLString -> FilePath -> FilePath -> Annex Bool +downloadMagnetLink :: URLString -> RawFilePath -> RawFilePath -> Annex Bool downloadMagnetLink u metadir dest = ifM download ( liftIO $ do - ts <- filter (".torrent" `isSuffixOf`) + ts <- filter (".torrent" `S.isSuffixOf`) <$> dirContents metadir case ts of (t:[]) -> do - moveFile (toRawFilePath t) (toRawFilePath dest) + moveFile t dest return True _ -> return False , return False @@ -245,7 +244,7 @@ downloadMagnetLink u metadir dest = ifM download , Param "--seed-time=0" , Param "--summary-interval=0" , Param "-d" - , File metadir + , File (fromRawFilePath metadir) ] downloadTorrentContent :: Key -> URLString -> FilePath -> Int -> MeterUpdate -> Annex Bool diff --git a/Remote/Directory.hs b/Remote/Directory.hs index 1086e7cf64..ac0e4cbeec 100644 --- a/Remote/Directory.hs +++ b/Remote/Directory.hs @@ -246,7 +246,7 @@ finalizeStoreGeneric d tmp dest = do renameDirectory (fromRawFilePath tmp) dest' -- may fail on some filesystems void $ tryIO $ do - mapM_ (preventWrite . toRawFilePath) =<< dirContents dest' + mapM_ preventWrite =<< dirContents dest preventWrite dest where dest' = fromRawFilePath dest @@ -389,8 +389,7 @@ removeExportLocation topdir loc = listImportableContentsM :: IgnoreInodes -> RawFilePath -> Annex (Maybe (ImportableContentsChunkable Annex (ContentIdentifier, ByteSize))) listImportableContentsM ii dir = liftIO $ do - l <- dirContentsRecursiveSkipping (const False) False (fromRawFilePath dir) - l' <- mapM (go . toRawFilePath) l + l' <- mapM go =<< dirContentsRecursiveSkipping (const False) False dir return $ Just $ ImportableContentsComplete $ ImportableContents (catMaybes l') [] where diff --git a/Remote/Helper/Git.hs b/Remote/Helper/Git.hs index 1567e7ae6a..a8f6798662 100644 --- a/Remote/Helper/Git.hs +++ b/Remote/Helper/Git.hs @@ -5,6 +5,8 @@ - Licensed under the GNU AGPL version 3 or higher. -} +{-# LANGUAGE OverloadedStrings #-} + module Remote.Helper.Git where import Annex.Common @@ -21,6 +23,7 @@ import Data.Time.Clock.POSIX import System.PosixCompat.Files (modificationTime) import qualified Data.Map as M import qualified Data.Set as S +import qualified System.FilePath.ByteString as P repoCheap :: Git.Repo -> Bool repoCheap = not . Git.repoIsUrl @@ -59,9 +62,9 @@ guardUsable r fallback a gitRepoInfo :: Remote -> Annex [(String, String)] gitRepoInfo r = do - d <- fromRawFilePath <$> fromRepo Git.localGitDir - mtimes <- liftIO $ mapM (\p -> modificationTime <$> R.getFileStatus (toRawFilePath p)) - =<< emptyWhenDoesNotExist (dirContentsRecursive (d "refs" "remotes" Remote.name r)) + d <- fromRepo Git.localGitDir + mtimes <- liftIO $ mapM (\p -> modificationTime <$> R.getFileStatus p) + =<< emptyWhenDoesNotExist (dirContentsRecursive (d P. "refs" P. "remotes" P. encodeBS (Remote.name r))) let lastsynctime = case mtimes of [] -> "never" _ -> show $ posixSecondsToUTCTime $ realToFrac $ maximum mtimes diff --git a/Test/Framework.hs b/Test/Framework.hs index b9b8bcde79..dbf13af054 100644 --- a/Test/Framework.hs +++ b/Test/Framework.hs @@ -339,14 +339,14 @@ removeDirectoryForCleanup = removePathForcibly cleanup :: FilePath -> IO () cleanup dir = whenM (doesDirectoryExist dir) $ do - Command.Uninit.prepareRemoveAnnexDir' dir + Command.Uninit.prepareRemoveAnnexDir' (toRawFilePath dir) -- This can fail if files in the directory are still open by a -- subprocess. void $ tryIO $ removeDirectoryForCleanup dir finalCleanup :: IO () finalCleanup = whenM (doesDirectoryExist tmpdir) $ do - Command.Uninit.prepareRemoveAnnexDir' tmpdir + Command.Uninit.prepareRemoveAnnexDir' (toRawFilePath tmpdir) catchIO (removeDirectoryForCleanup tmpdir) $ \e -> do print e putStrLn "sleeping 10 seconds and will retry directory cleanup" diff --git a/Types/Direction.hs b/Types/Direction.hs index a18b83697d..814b66f72b 100644 --- a/Types/Direction.hs +++ b/Types/Direction.hs @@ -18,7 +18,7 @@ formatDirection :: Direction -> B.ByteString formatDirection Upload = "upload" formatDirection Download = "download" -parseDirection :: String -> Maybe Direction +parseDirection :: B.ByteString -> Maybe Direction parseDirection "upload" = Just Upload parseDirection "download" = Just Download parseDirection _ = Nothing diff --git a/Upgrade/V2.hs b/Upgrade/V2.hs index f467fa2596..bbe5d8431d 100644 --- a/Upgrade/V2.hs +++ b/Upgrade/V2.hs @@ -73,14 +73,14 @@ locationLogs = do config <- Annex.getGitConfig dir <- fromRepo gitStateDir liftIO $ do - levela <- dirContents dir + levela <- dirContents (toRawFilePath dir) levelb <- mapM tryDirContents levela files <- mapM tryDirContents (concat levelb) return $ mapMaybe (islogfile config) (concat files) where tryDirContents d = catchDefaultIO [] $ dirContents d - islogfile config f = maybe Nothing (\k -> Just (k, f)) $ - locationLogFileKey config (toRawFilePath f) + islogfile config f = maybe Nothing (\k -> Just (k, fromRawFilePath f)) $ + locationLogFileKey config f inject :: FilePath -> FilePath -> Annex () inject source dest = do diff --git a/Utility/DirWatcher/INotify.hs b/Utility/DirWatcher/INotify.hs index 700bff5773..4b14e85bd2 100644 --- a/Utility/DirWatcher/INotify.hs +++ b/Utility/DirWatcher/INotify.hs @@ -59,7 +59,7 @@ watchDir i dir ignored scanevents hooks void (addWatch i watchevents (toInternalFilePath dir) handler) `catchIO` failedaddwatch withLock lock $ - mapM_ scan =<< filter (not . dirCruft) <$> + mapM_ scan =<< filter (not . dirCruft . toRawFilePath) <$> getDirectoryContents dir where recurse d = watchDir i d ignored scanevents hooks diff --git a/Utility/DirWatcher/Kqueue.hs b/Utility/DirWatcher/Kqueue.hs index dc9fed31c2..b793eee58b 100644 --- a/Utility/DirWatcher/Kqueue.hs +++ b/Utility/DirWatcher/Kqueue.hs @@ -77,7 +77,7 @@ data DirInfo = DirInfo getDirInfo :: FilePath -> IO DirInfo getDirInfo dir = do - l <- filter (not . dirCruft) <$> getDirectoryContents dir + l <- filter (not . dirCruft . toRawFilePath) <$> getDirectoryContents dir contents <- S.fromList . catMaybes <$> mapM getDirEnt l return $ DirInfo dir contents where diff --git a/Utility/Directory.hs b/Utility/Directory.hs index a0a12998a3..20908108f0 100644 --- a/Utility/Directory.hs +++ b/Utility/Directory.hs @@ -7,33 +7,48 @@ {-# LANGUAGE CPP #-} {-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} {-# OPTIONS_GHC -fno-warn-tabs #-} module Utility.Directory where import Control.Monad -import System.FilePath import System.PosixCompat.Files (isDirectory, isSymbolicLink) import Control.Applicative import System.IO.Unsafe (unsafeInterleaveIO) +import qualified System.FilePath.ByteString as P import Data.Maybe import Prelude -import Utility.SystemDirectory import Utility.Exception import Utility.Monad import Utility.FileSystemEncoding import qualified Utility.RawFilePath as R -dirCruft :: FilePath -> Bool +#ifdef WITH_OSSTRING +import Utility.OsString +import qualified System.Directory.OsPath as OP +#else +import Utility.SystemDirectory +#endif + +dirCruft :: R.RawFilePath -> Bool dirCruft "." = True dirCruft ".." = True dirCruft _ = False {- Lists the contents of a directory. - Unlike getDirectoryContents, paths are not relative to the directory. -} -dirContents :: FilePath -> IO [FilePath] -dirContents d = map (d ) . filter (not . dirCruft) <$> getDirectoryContents d +dirContents :: RawFilePath -> IO [RawFilePath] +#ifdef WITH_OSSTRING +dirContents d = map (\p -> d P. fromOsString p) + <$> OP.listDirectory (toOsString d) +#else +dirContents d = + map (\p -> d P. toRawFilePath p) + . filter (not . dirCruft . toRawFilePath) + <$> getDirectoryContents (fromRawFilePath d) +#endif {- Gets files in a directory, and then its subdirectories, recursively, - and lazily. @@ -45,13 +60,13 @@ dirContents d = map (d ) . filter (not . dirCruft) <$> getDirectoryContents d - be accessed (the use of unsafeInterleaveIO would make it difficult to - trap such exceptions). -} -dirContentsRecursive :: FilePath -> IO [FilePath] +dirContentsRecursive :: RawFilePath -> IO [RawFilePath] dirContentsRecursive = dirContentsRecursiveSkipping (const False) True {- Skips directories whose basenames match the skipdir. -} -dirContentsRecursiveSkipping :: (FilePath -> Bool) -> Bool -> FilePath -> IO [FilePath] +dirContentsRecursiveSkipping :: (RawFilePath -> Bool) -> Bool -> RawFilePath -> IO [RawFilePath] dirContentsRecursiveSkipping skipdir followsubdirsymlinks topdir - | skipdir (takeFileName topdir) = return [] + | skipdir (P.takeFileName topdir) = return [] | otherwise = do -- Get the contents of the top directory outside of -- unsafeInterleaveIO, which allows throwing exceptions if @@ -63,24 +78,30 @@ dirContentsRecursiveSkipping skipdir followsubdirsymlinks topdir where go [] = return [] go (dir:dirs) - | skipdir (takeFileName dir) = go dirs + | skipdir (P.takeFileName dir) = go dirs | otherwise = unsafeInterleaveIO $ do (files, dirs') <- collect [] [] =<< catchDefaultIO [] (dirContents dir) files' <- go (dirs' ++ dirs) return (files ++ files') + + collect :: [RawFilePath] -> [RawFilePath] -> [RawFilePath] -> IO ([RawFilePath], [RawFilePath]) collect files dirs' [] = return (reverse files, reverse dirs') collect files dirs' (entry:entries) | dirCruft entry = collect files dirs' entries | otherwise = do let skip = collect (entry:files) dirs' entries let recurse = collect files (entry:dirs') entries - ms <- catchMaybeIO $ R.getSymbolicLinkStatus (toRawFilePath entry) + ms <- catchMaybeIO $ R.getSymbolicLinkStatus entry case ms of (Just s) | isDirectory s -> recurse | isSymbolicLink s && followsubdirsymlinks -> - ifM (doesDirectoryExist entry) +#ifdef WITH_OSSTRING + ifM (OP.doesDirectoryExist (toOsString entry)) +#else + ifM (doesDirectoryExist (fromRawFilePath entry)) +#endif ( recurse , skip ) @@ -95,22 +116,22 @@ dirContentsRecursiveSkipping skipdir followsubdirsymlinks topdir - be accessed (the use of unsafeInterleaveIO would make it difficult to - trap such exceptions). -} -dirTreeRecursiveSkipping :: (FilePath -> Bool) -> FilePath -> IO [FilePath] +dirTreeRecursiveSkipping :: (RawFilePath -> Bool) -> RawFilePath -> IO [RawFilePath] dirTreeRecursiveSkipping skipdir topdir - | skipdir (takeFileName topdir) = return [] + | skipdir (P.takeFileName topdir) = return [] | otherwise = do subdirs <- filterM isdir =<< dirContents topdir go [] subdirs where go c [] = return c go c (dir:dirs) - | skipdir (takeFileName dir) = go c dirs + | skipdir (P.takeFileName dir) = go c dirs | otherwise = unsafeInterleaveIO $ do subdirs <- go [] =<< filterM isdir =<< catchDefaultIO [] (dirContents dir) go (subdirs++dir:c) dirs - isdir p = isDirectory <$> R.getSymbolicLinkStatus (toRawFilePath p) + isdir p = isDirectory <$> R.getSymbolicLinkStatus p {- When the action fails due to the directory not existing, returns []. -} emptyWhenDoesNotExist :: IO [a] -> IO [a] diff --git a/Utility/Directory/Stream.hs b/Utility/Directory/Stream.hs index 3a6222c561..726f884dd1 100644 --- a/Utility/Directory/Stream.hs +++ b/Utility/Directory/Stream.hs @@ -31,6 +31,7 @@ import qualified System.Posix as Posix import Utility.Directory import Utility.Exception +import Utility.FileSystemEncoding #ifndef mingw32_HOST_OS data DirectoryHandle = DirectoryHandle IsOpen Posix.DirStream @@ -115,5 +116,5 @@ isDirectoryEmpty d = bracket (openDirectory d) closeDirectory check case v of Nothing -> return True Just f - | not (dirCruft f) -> return False + | not (dirCruft (toRawFilePath f)) -> return False | otherwise -> check h diff --git a/Utility/LockFile/PidLock.hs b/Utility/LockFile/PidLock.hs index de5436dd4b..b3a3b0e2cc 100644 --- a/Utility/LockFile/PidLock.hs +++ b/Utility/LockFile/PidLock.hs @@ -39,6 +39,7 @@ import Utility.FileSystemEncoding import Utility.Env import Utility.Env.Set import Utility.Tmp +import Utility.RawFilePath import qualified Utility.LockFile.Posix as Posix import System.IO @@ -242,15 +243,14 @@ linkToLock (Just _) src dest = do -- with the SAME FILENAME exist. checkInsaneLustre :: RawFilePath -> IO Bool checkInsaneLustre dest = do - let dest' = fromRawFilePath dest - fs <- dirContents (takeDirectory dest') - case length (filter (== dest') fs) of + fs <- dirContents (P.takeDirectory dest) + case length (filter (== dest) fs) of 1 -> return False -- whew! 0 -> return True -- wtf? _ -> do -- Try to clean up the extra copy we made -- that has the same name. Egads. - _ <- tryIO $ removeFile dest' + _ <- tryIO $ removeLink dest return True -- | Waits as necessary to take a lock. diff --git a/Utility/OsString.hs b/Utility/OsString.hs new file mode 100644 index 0000000000..8f06c6d057 --- /dev/null +++ b/Utility/OsString.hs @@ -0,0 +1,38 @@ +{- OsString utilities + - + - Copyright 2025 Joey Hess + - + - License: BSD-2-clause + -} + +{-# LANGUAGE CPP #-} +{-# LANGUAGE PackageImports #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} + +module Utility.OsString where + +#ifdef WITH_OSSTRING + +import Utility.RawFilePath + +import "os-string" System.OsString.Internal.Types +import qualified Data.ByteString.Short as S + +{- Unlike System.OsString.fromBytes, on Windows this does not ensure a + - valid USC-2LE encoding. The input ByteString must be in a valid encoding + - already or uses of the OsString will fail. -} +toOsString :: RawFilePath -> OsString +#if defined(mingw32_HOST_OS) +toOsString = OsString . WindowsString . S.toShort +#else +toOsString = OsString . PosixString . S.toShort +#endif + +fromOsString :: OsString -> RawFilePath +#if defined(mingw32_HOST_OS) +fromOsString = S.fromShort . getWindowsString . getOsString +#else +fromOsString = S.fromShort . getPosixString . getOsString +#endif + +#endif /* WITH_OSSTRING */ diff --git a/doc/todo/RawFilePath_conversion.mdwn b/doc/todo/RawFilePath_conversion.mdwn index a8b9af18d2..138cd57d47 100644 --- a/doc/todo/RawFilePath_conversion.mdwn +++ b/doc/todo/RawFilePath_conversion.mdwn @@ -8,13 +8,6 @@ Some commands like `git-annex find` use RawFilePath end-to-end. But this conversion is not yet complete. This is a todo to keep track of the status. -* The Abstract FilePath proposal (AFPP) has been implemented, and so a number of - libraries like directory now have versions that operate on - OSPath. That could be used in git-annex eg for things like - getDirectoryContents, when built against those versions. - (OSPath uses ShortByteString, while RawFilePath is ByteString, so - conversion still entails a copy, eg using - `System.OsString.Internal.fromBytes`) * unix has modules that operate on RawFilePath but no OSPath versions yet. See https://github.com/haskell/unix/issues/240 * filepath-1.4.100 implements support for OSPath. It is bundled with diff --git a/git-annex.cabal b/git-annex.cabal index 864efa527e..d5322b028f 100644 --- a/git-annex.cabal +++ b/git-annex.cabal @@ -175,6 +175,9 @@ Flag Crypton Flag Servant Description: Use the servant library, enabling using annex+http urls and git-annex p2phttp +Flag OsString + Description: Use the os-string library + Flag Benchmark Description: Enable benchmarking Default: True @@ -329,6 +332,13 @@ Executable git-annex P2P.Http.Server P2P.Http.State + if flag(OsString) + Build-Depends: + os-string (>= 2.0.0), + directory (>= 1.3.8.3), + filepath (>= 1.5.2.0) + CPP-Options: -DWITH_OSSTRING + if (os(windows)) Build-Depends: Win32 ((>= 2.6.1.0 && < 2.12.0.0) || >= 2.13.4.0), @@ -1094,6 +1104,7 @@ Executable git-annex Utility.OpenFile Utility.OptParse Utility.OSX + Utility.OsString Utility.PID Utility.PartialPrelude Utility.Path diff --git a/stack.yaml b/stack.yaml index d46045734f..55bc04f4f2 100644 --- a/stack.yaml +++ b/stack.yaml @@ -11,8 +11,17 @@ flags: benchmark: true crypton: true servant: true + osstring: true packages: - '.' -resolver: lts-23.2 +resolver: nightly-2025-01-20 extra-deps: - filepath-bytestring-1.4.100.3.2 +- aws-0.24.3 +- feed-1.3.2.1 +- git-lfs-1.2.2 +allow-newer: true +allow-newer-deps: +- filepath-bytestring +- aws +- feed