more OsPath conversion

Sponsored-by: Brock Spratlen
This commit is contained in:
Joey Hess 2025-02-01 11:54:19 -04:00
parent c69e57aede
commit 474cf3bc8b
No known key found for this signature in database
GPG key ID: DB12DB0FF05F8F38
38 changed files with 342 additions and 330 deletions

View file

@ -313,7 +313,7 @@ updateTo' pairs = do
- transitions that have not been applied to all refs will be applied on
- the fly.
-}
get :: RawFilePath -> Annex L.ByteString
get :: OsPath -> Annex L.ByteString
get file = do
st <- update
case getCache file st of
@ -353,7 +353,7 @@ getUnmergedRefs = unmergedRefs <$> update
- using some optimised method. The journal has to be checked, in case
- it has a newer version of the file that has not reached the branch yet.
-}
precache :: RawFilePath -> L.ByteString -> Annex ()
precache :: OsPath -> L.ByteString -> Annex ()
precache file branchcontent = do
st <- getState
content <- if journalIgnorable st
@ -369,12 +369,12 @@ precache file branchcontent = do
- reflect changes in remotes.
- (Changing the value this returns, and then merging is always the
- same as using get, and then changing its value.) -}
getLocal :: RawFilePath -> Annex L.ByteString
getLocal :: OsPath -> Annex L.ByteString
getLocal = getLocal' (GetPrivate True)
getLocal' :: GetPrivate -> RawFilePath -> Annex L.ByteString
getLocal' :: GetPrivate -> OsPath -> Annex L.ByteString
getLocal' getprivate file = do
fastDebug "Annex.Branch" ("read " ++ fromRawFilePath file)
fastDebug "Annex.Branch" ("read " ++ fromOsPath file)
go =<< getJournalFileStale getprivate file
where
go NoJournalledContent = getRef fullname file
@ -384,14 +384,14 @@ getLocal' getprivate file = do
return (v <> journalcontent)
{- Gets the content of a file as staged in the branch's index. -}
getStaged :: RawFilePath -> Annex L.ByteString
getStaged :: OsPath -> Annex L.ByteString
getStaged = getRef indexref
where
-- This makes git cat-file be run with ":file",
-- so it looks at the index.
indexref = Ref ""
getHistorical :: RefDate -> RawFilePath -> Annex L.ByteString
getHistorical :: RefDate -> OsPath -> Annex L.ByteString
getHistorical date file =
-- This check avoids some ugly error messages when the reflog
-- is empty.
@ -400,7 +400,7 @@ getHistorical date file =
, getRef (Git.Ref.dateRef fullname date) file
)
getRef :: Ref -> RawFilePath -> Annex L.ByteString
getRef :: Ref -> OsPath -> Annex L.ByteString
getRef ref file = withIndex $ catFile ref file
{- Applies a function to modify the content of a file.
@ -408,7 +408,7 @@ getRef ref file = withIndex $ catFile ref file
- Note that this does not cause the branch to be merged, it only
- modifies the current content of the file on the branch.
-}
change :: Journalable content => RegardingUUID -> RawFilePath -> (L.ByteString -> content) -> Annex ()
change :: Journalable content => RegardingUUID -> OsPath -> (L.ByteString -> content) -> Annex ()
change ru file f = lockJournal $ \jl -> f <$> getToChange ru file >>= set jl ru file
{- Applies a function which can modify the content of a file, or not.
@ -416,7 +416,7 @@ change ru file f = lockJournal $ \jl -> f <$> getToChange ru file >>= set jl ru
- When the file was modified, runs the onchange action, and returns
- True. The action is run while the journal is still locked,
- so another concurrent call to this cannot happen while it is running. -}
maybeChange :: Journalable content => RegardingUUID -> RawFilePath -> (L.ByteString -> Maybe content) -> Annex () -> Annex Bool
maybeChange :: Journalable content => RegardingUUID -> OsPath -> (L.ByteString -> Maybe content) -> Annex () -> Annex Bool
maybeChange ru file f onchange = lockJournal $ \jl -> do
v <- getToChange ru file
case f v of
@ -449,7 +449,7 @@ data ChangeOrAppend t = Change t | Append t
- state that would confuse the older version. This is planned to be
- changed in a future repository version.
-}
changeOrAppend :: Journalable content => RegardingUUID -> RawFilePath -> (L.ByteString -> ChangeOrAppend content) -> Annex ()
changeOrAppend :: Journalable content => RegardingUUID -> OsPath -> (L.ByteString -> ChangeOrAppend content) -> Annex ()
changeOrAppend ru file f = lockJournal $ \jl ->
checkCanAppendJournalFile jl ru file >>= \case
Just appendable -> ifM (annexAlwaysCompact <$> Annex.getGitConfig)
@ -481,7 +481,7 @@ changeOrAppend ru file f = lockJournal $ \jl ->
oldc <> journalableByteString toappend
{- Only get private information when the RegardingUUID is itself private. -}
getToChange :: RegardingUUID -> RawFilePath -> Annex L.ByteString
getToChange :: RegardingUUID -> OsPath -> Annex L.ByteString
getToChange ru f = flip getLocal' f . GetPrivate =<< regardingPrivateUUID ru
{- Records new content of a file into the journal.
@ -493,11 +493,11 @@ getToChange ru f = flip getLocal' f . GetPrivate =<< regardingPrivateUUID ru
- git-annex index, and should not be written to the public git-annex
- branch.
-}
set :: Journalable content => JournalLocked -> RegardingUUID -> RawFilePath -> content -> Annex ()
set :: Journalable content => JournalLocked -> RegardingUUID -> OsPath -> content -> Annex ()
set jl ru f c = do
journalChanged
setJournalFile jl ru f c
fastDebug "Annex.Branch" ("set " ++ fromRawFilePath f)
fastDebug "Annex.Branch" ("set " ++ fromOsPath f)
-- Could cache the new content, but it would involve
-- evaluating a Journalable Builder twice, which is not very
-- efficient. Instead, assume that it's not common to need to read
@ -505,11 +505,11 @@ set jl ru f c = do
invalidateCache f
{- Appends content to the journal file. -}
append :: Journalable content => JournalLocked -> RawFilePath -> AppendableJournalFile -> content -> Annex ()
append :: Journalable content => JournalLocked -> OsPath -> AppendableJournalFile -> content -> Annex ()
append jl f appendable toappend = do
journalChanged
appendJournalFile jl appendable toappend
fastDebug "Annex.Branch" ("append " ++ fromRawFilePath f)
fastDebug "Annex.Branch" ("append " ++ fromOsPath f)
invalidateCache f
{- Commit message used when making a commit of whatever data has changed
@ -611,7 +611,7 @@ commitIndex' jl branchref message basemessage retrynum parents = do
- not been merged in, returns Nothing, because it's not possible to
- efficiently handle that.
-}
files :: Annex (Maybe ([RawFilePath], IO Bool))
files :: Annex (Maybe ([OsPath], IO Bool))
files = do
st <- update
if not (null (unmergedRefs st))
@ -629,10 +629,10 @@ files = do
{- Lists all files currently in the journal, but not files in the private
- journal. -}
journalledFiles :: Annex [RawFilePath]
journalledFiles :: Annex [OsPath]
journalledFiles = getJournalledFilesStale gitAnnexJournalDir
journalledFilesPrivate :: Annex [RawFilePath]
journalledFilesPrivate :: Annex [OsPath]
journalledFilesPrivate = ifM privateUUIDsKnown
( getJournalledFilesStale gitAnnexPrivateJournalDir
, return []
@ -640,10 +640,10 @@ journalledFilesPrivate = ifM privateUUIDsKnown
{- Files in the branch, not including any from journalled changes,
- and without updating the branch. -}
branchFiles :: Annex ([RawFilePath], IO Bool)
branchFiles :: Annex ([OsPath], IO Bool)
branchFiles = withIndex $ inRepo branchFiles'
branchFiles' :: Git.Repo -> IO ([RawFilePath], IO Bool)
branchFiles' :: Git.Repo -> IO ([OsPath], IO Bool)
branchFiles' = Git.Command.pipeNullSplit' $
lsTreeParams Git.LsTree.LsTreeRecursive (Git.LsTree.LsTreeLong False)
fullname
@ -690,7 +690,7 @@ withIndex' :: Bool -> Annex a -> Annex a
withIndex' bootstrapping a = withIndexFile AnnexIndexFile $ \f -> do
checkIndexOnce $ unlessM (liftIO $ doesFileExist f) $ do
unless bootstrapping create
createAnnexDirectory $ toRawFilePath $ takeDirectory f
createAnnexDirectory $ toOsPath $ takeDirectory f
unless bootstrapping $ inRepo genIndex
a
@ -748,7 +748,7 @@ stageJournal jl commitindex = withIndex $ withOtherTmp $ \tmpdir -> do
Git.UpdateIndex.streamUpdateIndex g
[genstream dir h jh jlogh]
commitindex
liftIO $ cleanup (fromRawFilePath dir) jlogh jlogf
liftIO $ cleanup (fromOsPath dir) jlogh jlogf
where
genstream dir h jh jlogh streamer = readDirectory jh >>= \case
Nothing -> return ()
@ -999,7 +999,7 @@ data UnmergedBranches t
= UnmergedBranches t
| NoUnmergedBranches t
type FileContents t b = Maybe (t, RawFilePath, Maybe (L.ByteString, Maybe b))
type FileContents t b = Maybe (t, OsPath, Maybe (L.ByteString, Maybe b))
{- Runs an action on the content of selected files from the branch.
- This is much faster than reading the content of each file in turn,
@ -1022,7 +1022,7 @@ overBranchFileContents
-- the callback can be run more than once on the same filename,
-- and in this case it's also possible for the callback to be
-- passed some of the same file content repeatedly.
-> (RawFilePath -> Maybe v)
-> (OsPath -> Maybe v)
-> (Annex (FileContents v Bool) -> Annex a)
-> Annex (UnmergedBranches (a, Git.Sha))
overBranchFileContents ignorejournal select go = do
@ -1036,7 +1036,7 @@ overBranchFileContents ignorejournal select go = do
else NoUnmergedBranches v
overBranchFileContents'
:: (RawFilePath -> Maybe v)
:: (OsPath -> Maybe v)
-> (Annex (FileContents v Bool) -> Annex a)
-> BranchState
-> Annex (a, Git.Sha)
@ -1086,11 +1086,11 @@ combineStaleJournalWithBranch branchcontent journalledcontent =
- files.
-}
overJournalFileContents
:: (RawFilePath -> L.ByteString -> Annex (L.ByteString, Maybe b))
:: (OsPath -> L.ByteString -> Annex (L.ByteString, Maybe b))
-- ^ Called with the journalled file content when the journalled
-- content may be stale or lack information committed to the
-- git-annex branch.
-> (RawFilePath -> Maybe v)
-> (OsPath -> Maybe v)
-> (Annex (FileContents v b) -> Annex a)
-> Annex a
overJournalFileContents handlestale select go = do
@ -1098,9 +1098,9 @@ overJournalFileContents handlestale select go = do
go $ overJournalFileContents' buf handlestale select
overJournalFileContents'
:: MVar ([RawFilePath], [RawFilePath])
-> (RawFilePath -> L.ByteString -> Annex (L.ByteString, Maybe b))
-> (RawFilePath -> Maybe a)
:: MVar ([OsPath], [OsPath])
-> (OsPath -> L.ByteString -> Annex (L.ByteString, Maybe b))
-> (OsPath -> Maybe a)
-> Annex (FileContents a b)
overJournalFileContents' buf handlestale select =
liftIO (tryTakeMVar buf) >>= \case

View file

@ -118,7 +118,7 @@ enableInteractiveBranchAccess = changeState $ \s -> s
, journalIgnorable = False
}
setCache :: RawFilePath -> L.ByteString -> Annex ()
setCache :: OsPath -> L.ByteString -> Annex ()
setCache file content = changeState $ \s -> s
{ cachedFileContents = add (cachedFileContents s) }
where
@ -126,7 +126,7 @@ setCache file content = changeState $ \s -> s
| length l < logFilesToCache = (file, content) : l
| otherwise = (file, content) : Prelude.init l
getCache :: RawFilePath -> BranchState -> Maybe L.ByteString
getCache :: OsPath -> BranchState -> Maybe L.ByteString
getCache file state = go (cachedFileContents state)
where
go [] = Nothing
@ -134,7 +134,7 @@ getCache file state = go (cachedFileContents state)
| f == file && not (needInteractiveAccess state) = Just c
| otherwise = go rest
invalidateCache :: RawFilePath -> Annex ()
invalidateCache :: OsPath -> Annex ()
invalidateCache f = changeState $ \s -> s
{ cachedFileContents = filter (\(f', _) -> f' /= f)
(cachedFileContents s)

View file

@ -45,11 +45,11 @@ import Types.AdjustedBranch
import Types.CatFileHandles
import Utility.ResourcePool
catFile :: Git.Branch -> RawFilePath -> Annex L.ByteString
catFile :: Git.Branch -> OsPath -> Annex L.ByteString
catFile branch file = withCatFileHandle $ \h ->
liftIO $ Git.CatFile.catFile h branch file
catFileDetails :: Git.Branch -> RawFilePath -> Annex (Maybe (L.ByteString, Sha, ObjectType))
catFileDetails :: Git.Branch -> OsPath -> Annex (Maybe (L.ByteString, Sha, ObjectType))
catFileDetails branch file = withCatFileHandle $ \h ->
liftIO $ Git.CatFile.catFileDetails h branch file
@ -167,8 +167,8 @@ catKey' ref sz
catKey' _ _ = return Nothing
{- Gets a symlink target. -}
catSymLinkTarget :: Sha -> Annex RawFilePath
catSymLinkTarget sha = fromInternalGitPath . L.toStrict <$> get
catSymLinkTarget :: Sha -> Annex OsPath
catSymLinkTarget sha = fromInternalGitPath . toOsPath . L.toStrict <$> get
where
-- Avoid buffering the whole file content, which might be large.
-- 8192 is enough if it really is a symlink.
@ -195,25 +195,25 @@ catSymLinkTarget sha = fromInternalGitPath . L.toStrict <$> get
-
- So, this gets info from the index, unless running as a daemon.
-}
catKeyFile :: RawFilePath -> Annex (Maybe Key)
catKeyFile :: OsPath -> Annex (Maybe Key)
catKeyFile f = ifM (Annex.getState Annex.daemon)
( catKeyFileHEAD f
, maybe (pure Nothing) catKey =<< inRepo (Git.Ref.fileRef f)
)
catKeyFileHEAD :: RawFilePath -> Annex (Maybe Key)
catKeyFileHEAD :: OsPath -> Annex (Maybe Key)
catKeyFileHEAD f = maybe (pure Nothing) catKey
=<< inRepo (Git.Ref.fileFromRef Git.Ref.headRef f)
{- Look in the original branch from whence an adjusted branch is based
- to find the file. But only when the adjustment hides some files. -}
catKeyFileHidden :: RawFilePath -> CurrBranch -> Annex (Maybe Key)
catKeyFileHidden :: OsPath -> CurrBranch -> Annex (Maybe Key)
catKeyFileHidden = hiddenCat catKey
catObjectMetaDataHidden :: RawFilePath -> CurrBranch -> Annex (Maybe (Sha, Integer, ObjectType))
catObjectMetaDataHidden :: OsPath -> CurrBranch -> Annex (Maybe (Sha, Integer, ObjectType))
catObjectMetaDataHidden = hiddenCat catObjectMetaData
hiddenCat :: (Ref -> Annex (Maybe a)) -> RawFilePath -> CurrBranch -> Annex (Maybe a)
hiddenCat :: (Ref -> Annex (Maybe a)) -> OsPath -> CurrBranch -> Annex (Maybe a)
hiddenCat a f (Just origbranch, Just adj)
| adjustmentHidesFiles adj =
maybe (pure Nothing) a

View file

@ -19,13 +19,12 @@ import Utility.DataUnits
import Utility.CopyFile
import qualified Utility.RawFilePath as R
import qualified System.FilePath.ByteString as P
import System.PosixCompat.Files (linkCount)
{- Runs the secure erase command if set, otherwise does nothing.
- File may or may not be deleted at the end; caller is responsible for
- making sure it's deleted. -}
secureErase :: RawFilePath -> Annex ()
secureErase :: OsPath -> Annex ()
secureErase = void . runAnnexPathHook "%file"
secureEraseAnnexHook annexSecureEraseCommand
@ -44,45 +43,48 @@ data LinkedOrCopied = Linked | Copied
- execute bit will be set. The mode is not fully copied over because
- git doesn't support file modes beyond execute.
-}
linkOrCopy :: Key -> RawFilePath -> RawFilePath -> Maybe FileMode -> Annex (Maybe LinkedOrCopied)
linkOrCopy :: Key -> OsPath -> OsPath -> Maybe FileMode -> Annex (Maybe LinkedOrCopied)
linkOrCopy = linkOrCopy' (annexThin <$> Annex.getGitConfig)
linkOrCopy' :: Annex Bool -> Key -> RawFilePath -> RawFilePath -> Maybe FileMode -> Annex (Maybe LinkedOrCopied)
linkOrCopy' :: Annex Bool -> Key -> OsPath -> OsPath -> Maybe FileMode -> Annex (Maybe LinkedOrCopied)
linkOrCopy' canhardlink key src dest destmode = catchDefaultIO Nothing $
ifM canhardlink
( hardlink
( hardlinkorcopy
, copy =<< getstat
)
where
hardlink = do
hardlinkorcopy = do
s <- getstat
if linkCount s > 1
then copy s
else liftIO (R.createLink src dest >> preserveGitMode dest destmode >> return (Just Linked))
`catchIO` const (copy s)
else hardlink `catchIO` const (copy s)
hardlink = liftIO $ do
R.createLink (fromOsPath src) (fromOsPath dest)
void $ preserveGitMode dest destmode
return (Just Linked)
copy s = ifM (checkedCopyFile' key src dest destmode s)
( return (Just Copied)
, return Nothing
)
getstat = liftIO $ R.getFileStatus src
getstat = liftIO $ R.getFileStatus (fromOsPath src)
{- Checks disk space before copying. -}
checkedCopyFile :: Key -> RawFilePath -> RawFilePath -> Maybe FileMode -> Annex Bool
checkedCopyFile :: Key -> OsPath -> OsPath -> Maybe FileMode -> Annex Bool
checkedCopyFile key src dest destmode = catchBoolIO $
checkedCopyFile' key src dest destmode
=<< liftIO (R.getFileStatus src)
=<< liftIO (R.getFileStatus (fromOsPath src))
checkedCopyFile' :: Key -> RawFilePath -> RawFilePath -> Maybe FileMode -> FileStatus -> Annex Bool
checkedCopyFile' :: Key -> OsPath -> OsPath -> Maybe FileMode -> FileStatus -> Annex Bool
checkedCopyFile' key src dest destmode s = catchBoolIO $ do
sz <- liftIO $ getFileSize' src s
ifM (checkDiskSpace' sz (Just $ P.takeDirectory dest) key 0 True)
ifM (checkDiskSpace' sz (Just $ takeDirectory dest) key 0 True)
( liftIO $
copyFileExternal CopyAllMetaData (fromRawFilePath src) (fromRawFilePath dest)
copyFileExternal CopyAllMetaData src dest
<&&> preserveGitMode dest destmode
, return False
)
preserveGitMode :: RawFilePath -> Maybe FileMode -> IO Bool
preserveGitMode :: OsPath -> Maybe FileMode -> IO Bool
preserveGitMode f (Just mode)
| isExecutable mode = catchBoolIO $ do
modifyFileMode f $ addModes executeModes
@ -100,12 +102,12 @@ preserveGitMode _ _ = return True
- to be downloaded from the free space. This way, we avoid overcommitting
- when doing concurrent downloads.
-}
checkDiskSpace :: Maybe FileSize -> Maybe RawFilePath -> Key -> Integer -> Bool -> Annex Bool
checkDiskSpace :: Maybe FileSize -> Maybe OsPath -> Key -> Integer -> Bool -> Annex Bool
checkDiskSpace msz destdir key = checkDiskSpace' sz destdir key
where
sz = fromMaybe 1 (fromKey keySize key <|> msz)
checkDiskSpace' :: FileSize -> Maybe RawFilePath -> Key -> Integer -> Bool -> Annex Bool
checkDiskSpace' :: FileSize -> Maybe OsPath -> Key -> Integer -> Bool -> Annex Bool
checkDiskSpace' sz destdir key alreadythere samefilesystem = ifM (Annex.getRead Annex.force)
( return True
, do
@ -118,7 +120,7 @@ checkDiskSpace' sz destdir key alreadythere samefilesystem = ifM (Annex.getRead
inprogress <- if samefilesystem
then sizeOfDownloadsInProgress (/= key)
else pure 0
dir >>= liftIO . getDiskFree . fromRawFilePath >>= \case
dir >>= liftIO . getDiskFree . fromOsPath >>= \case
Just have -> do
reserve <- annexDiskReserve <$> Annex.getGitConfig
let delta = sz + reserve - have - alreadythere + inprogress

View file

@ -30,12 +30,14 @@ import System.PosixCompat.Files (fileMode)
-
- Returns an InodeCache if it populated the pointer file.
-}
populatePointerFile :: Restage -> Key -> RawFilePath -> RawFilePath -> Annex (Maybe InodeCache)
populatePointerFile :: Restage -> Key -> OsPath -> OsPath -> Annex (Maybe InodeCache)
populatePointerFile restage k obj f = go =<< liftIO (isPointerFile f)
where
go (Just k') | k == k' = do
destmode <- liftIO $ catchMaybeIO $ fileMode <$> R.getFileStatus f
liftIO $ removeWhenExistsWith R.removeLink f
let f' = fromOsPath f
destmode <- liftIO $ catchMaybeIO $
fileMode <$> R.getFileStatus f'
liftIO $ removeWhenExistsWith R.removeLink f'
(ic, populated) <- replaceWorkTreeFile f $ \tmp -> do
ok <- linkOrCopy k obj tmp destmode >>= \case
Just _ -> thawContent tmp >> return True
@ -47,23 +49,24 @@ populatePointerFile restage k obj f = go =<< liftIO (isPointerFile f)
then return ic
else return Nothing
go _ = return Nothing
{- Removes the content from a pointer file, replacing it with a pointer.
-
- Does not check if the pointer file is modified. -}
depopulatePointerFile :: Key -> RawFilePath -> Annex ()
depopulatePointerFile :: Key -> OsPath -> Annex ()
depopulatePointerFile key file = do
st <- liftIO $ catchMaybeIO $ R.getFileStatus file
let file' = fromOsPath file
st <- liftIO $ catchMaybeIO $ R.getFileStatus file'
let mode = fmap fileMode st
secureErase file
liftIO $ removeWhenExistsWith R.removeLink file
liftIO $ removeWhenExistsWith R.removeLink file'
ic <- replaceWorkTreeFile file $ \tmp -> do
liftIO $ writePointerFile tmp key mode
#if ! defined(mingw32_HOST_OS)
-- Don't advance mtime; this avoids unnecessary re-smudging
-- by git in some cases.
liftIO $ maybe noop
(\t -> touch tmp t False)
(\t -> touch (fromOsPath tmp) t False)
(fmap Posix.modificationTimeHiRes st)
#endif
withTSDelta (liftIO . genInodeCache tmp)

View file

@ -51,7 +51,7 @@ tryCopyCoW (CopyCoWTried copycowtried) src dest meterupdate =
-- CoW is known to work, so delete
-- dest if it exists in order to do a fast
-- CoW copy.
void $ tryIO $ removeFile dest
void $ tryIO $ removeFile dest'
docopycow
, return False
)
@ -60,18 +60,18 @@ tryCopyCoW (CopyCoWTried copycowtried) src dest meterupdate =
docopycow = watchFileSize dest' meterupdate $ const $
copyCoW CopyTimeStamps src dest
dest' = toRawFilePath dest
dest' = toOsPath dest
-- Check if the dest file already exists, which would prevent
-- probing CoW. If the file exists but is empty, there's no benefit
-- to resuming from it when CoW does not work, so remove it.
destfilealreadypopulated =
tryIO (R.getFileStatus dest') >>= \case
tryIO (R.getFileStatus (toRawFilePath dest)) >>= \case
Left _ -> return False
Right st -> do
sz <- getFileSize' dest' st
if sz == 0
then tryIO (removeFile dest) >>= \case
then tryIO (removeFile dest') >>= \case
Right () -> return False
Left _ -> return True
else return True
@ -111,14 +111,15 @@ fileCopier copycowtried src dest meterupdate iv =
docopy = do
-- The file might have had the write bit removed,
-- so make sure we can write to it.
void $ tryIO $ allowWrite dest'
void $ tryIO $ allowWrite (toOsPath dest)
withBinaryFile src ReadMode $ \hsrc ->
fileContentCopier hsrc dest meterupdate iv
-- Copy src mode and mtime.
mode <- fileMode <$> R.getFileStatus (toRawFilePath src)
mtime <- utcTimeToPOSIXSeconds <$> getModificationTime src
mtime <- utcTimeToPOSIXSeconds
<$> getModificationTime (toOsPath src)
R.setFileMode dest' mode
touch dest' mtime False

View file

@ -85,9 +85,9 @@ startExternalAddonProcess basecmd ps pid = do
runerr (Just cmd) =
return $ Left $ ProgramFailure $
"Cannot run " ++ cmd ++ " -- Make sure it's executable and that its dependencies are installed."
"Cannot run " ++ fromOsPath cmd ++ " -- Make sure it's executable and that its dependencies are installed."
runerr Nothing = do
path <- intercalate ":" <$> getSearchPath
path <- intercalate ":" . map fromOsPath <$> getSearchPath
return $ Left $ ProgramNotInstalled $
"Cannot run " ++ basecmd ++ " -- It is not installed in PATH (" ++ path ++ ")"

View file

@ -66,13 +66,13 @@ withIndexFile i = withAltRepo usecachedgitenv restoregitenv
{- Runs an action using a different git work tree.
-
- Smudge and clean filters are disabled in this work tree. -}
withWorkTree :: FilePath -> Annex a -> Annex a
withWorkTree :: OsPath -> Annex a -> Annex a
withWorkTree d a = withAltRepo
(\g -> return $ (g { location = modlocation (location g), gitGlobalOpts = gitGlobalOpts g ++ bypassSmudgeConfig }, ()))
(\g g' -> g' { location = location g, gitGlobalOpts = gitGlobalOpts g })
(const a)
where
modlocation l@(Local {}) = l { worktree = Just (toRawFilePath d) }
modlocation l@(Local {}) = l { worktree = Just d }
modlocation _ = giveup "withWorkTree of non-local git repo"
{- Runs an action with the git index file and HEAD, and a few other
@ -83,13 +83,13 @@ withWorkTree d a = withAltRepo
-
- Needs git 2.2.0 or newer.
-}
withWorkTreeRelated :: FilePath -> Annex a -> Annex a
withWorkTreeRelated :: OsPath -> Annex a -> Annex a
withWorkTreeRelated d a = withAltRepo modrepo unmodrepo (const a)
where
modrepo g = liftIO $ do
g' <- addGitEnv g "GIT_COMMON_DIR" . fromRawFilePath
g' <- addGitEnv g "GIT_COMMON_DIR" . fromOsPath
=<< absPath (localGitDir g)
g'' <- addGitEnv g' "GIT_DIR" d
g'' <- addGitEnv g' "GIT_DIR" (fromOsPath d)
return (g'' { gitEnvOverridesGitDir = True }, ())
unmodrepo g g' = g'
{ gitEnv = gitEnv g

View file

@ -28,7 +28,7 @@ hashObjectStop = maybe noop stop =<< Annex.getState Annex.hashobjecthandle
liftIO $ freeResourcePool p Git.HashObject.hashObjectStop
Annex.changeState $ \s -> s { Annex.hashobjecthandle = Nothing }
hashFile :: RawFilePath -> Annex Sha
hashFile :: OsPath -> Annex Sha
hashFile f = withHashObjectHandle $ \h ->
liftIO $ Git.HashObject.hashFile h f

View file

@ -30,22 +30,22 @@ compareInodeCachesWith = ifM inodesChanged ( return Weakly, return Strongly )
{- Checks if one of the provided old InodeCache matches the current
- version of a file. -}
sameInodeCache :: RawFilePath -> [InodeCache] -> Annex Bool
sameInodeCache :: OsPath -> [InodeCache] -> Annex Bool
sameInodeCache file [] = do
fastDebug "Annex.InodeSentinal" $
fromRawFilePath file ++ " inode cache empty"
fromOsPath file ++ " inode cache empty"
return False
sameInodeCache file old = go =<< withTSDelta (liftIO . genInodeCache file)
where
go Nothing = do
fastDebug "Annex.InodeSentinal" $
fromRawFilePath file ++ " not present, cannot compare with inode cache"
fromOsPath file ++ " not present, cannot compare with inode cache"
return False
go (Just curr) = ifM (elemInodeCaches curr old)
( return True
, do
fastDebug "Annex.InodeSentinal" $
fromRawFilePath file ++ " (" ++ show curr ++ ") does not match inode cache (" ++ show old ++ ")"
fromOsPath file ++ " (" ++ show curr ++ ") does not match inode cache (" ++ show old ++ ")"
return False
)
@ -99,7 +99,7 @@ createInodeSentinalFile evenwithobjects =
alreadyexists = liftIO. sentinalFileExists =<< annexSentinalFile
hasobjects
| evenwithobjects = pure False
| otherwise = liftIO . doesDirectoryExist . fromRawFilePath
| otherwise = liftIO . doesDirectoryExist
=<< fromRepo gitAnnexObjectDir
annexSentinalFile :: Annex SentinalFile

View file

@ -26,13 +26,12 @@ import Annex.LockFile
import Annex.BranchState
import Types.BranchState
import Utility.Directory.Stream
import qualified Utility.RawFilePath as R
import qualified Utility.FileIO as F
import qualified Utility.OsString as OS
import qualified Data.Set as S
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString as B
import qualified System.FilePath.ByteString as P
import Data.ByteString.Builder
import Data.Char
@ -83,7 +82,7 @@ privateUUIDsKnown' = not . S.null . annexPrivateRepos . Annex.gitconfig
- interrupted write truncating information that was earlier read from the
- file, and so losing data.
-}
setJournalFile :: Journalable content => JournalLocked -> RegardingUUID -> RawFilePath -> content -> Annex ()
setJournalFile :: Journalable content => JournalLocked -> RegardingUUID -> OsPath -> content -> Annex ()
setJournalFile _jl ru file content = withOtherTmp $ \tmp -> do
st <- getState
jd <- fromRepo =<< ifM (regardingPrivateUUID ru)
@ -92,10 +91,10 @@ setJournalFile _jl ru file content = withOtherTmp $ \tmp -> do
)
-- journal file is written atomically
let jfile = journalFile file
let tmpfile = tmp P.</> jfile
liftIO $ F.withFile (toOsPath tmpfile) WriteMode $ \h ->
let tmpfile = tmp </> jfile
liftIO $ F.withFile tmpfile WriteMode $ \h ->
writeJournalHandle h content
let dest = jd P.</> jfile
let dest = jd </> jfile
let mv = do
liftIO $ moveFile tmpfile dest
setAnnexFilePerm dest
@ -103,20 +102,20 @@ setJournalFile _jl ru file content = withOtherTmp $ \tmp -> do
-- exists
mv `catchIO` (const (createAnnexDirectory jd >> mv))
newtype AppendableJournalFile = AppendableJournalFile (RawFilePath, RawFilePath)
newtype AppendableJournalFile = AppendableJournalFile (OsPath, OsPath)
{- If the journal file does not exist, it cannot be appended to, because
- that would overwrite whatever content the file has in the git-annex
- branch. -}
checkCanAppendJournalFile :: JournalLocked -> RegardingUUID -> RawFilePath -> Annex (Maybe AppendableJournalFile)
checkCanAppendJournalFile :: JournalLocked -> RegardingUUID -> OsPath -> Annex (Maybe AppendableJournalFile)
checkCanAppendJournalFile _jl ru file = do
st <- getState
jd <- fromRepo =<< ifM (regardingPrivateUUID ru)
( return (gitAnnexPrivateJournalDir st)
, return (gitAnnexJournalDir st)
)
let jfile = jd P.</> journalFile file
ifM (liftIO $ R.doesPathExist jfile)
let jfile = jd </> journalFile file
ifM (liftIO $ doesFileExist jfile)
( return (Just (AppendableJournalFile (jd, jfile)))
, return Nothing
)
@ -134,7 +133,7 @@ checkCanAppendJournalFile _jl ru file = do
-}
appendJournalFile :: Journalable content => JournalLocked -> AppendableJournalFile -> content -> Annex ()
appendJournalFile _jl (AppendableJournalFile (jd, jfile)) content = do
let write = liftIO $ F.withFile (toOsPath jfile) ReadWriteMode $ \h -> do
let write = liftIO $ F.withFile jfile ReadWriteMode $ \h -> do
sz <- hFileSize h
when (sz /= 0) $ do
hSeek h SeekFromEnd (-1)
@ -161,7 +160,7 @@ data JournalledContent
-- information that were made after that journal file was written.
{- Gets any journalled content for a file in the branch. -}
getJournalFile :: JournalLocked -> GetPrivate -> RawFilePath -> Annex JournalledContent
getJournalFile :: JournalLocked -> GetPrivate -> OsPath -> Annex JournalledContent
getJournalFile _jl = getJournalFileStale
data GetPrivate = GetPrivate Bool
@ -179,7 +178,7 @@ data GetPrivate = GetPrivate Bool
- (or is in progress when this is called), if the file content does not end
- with a newline, it is truncated back to the previous newline.
-}
getJournalFileStale :: GetPrivate -> RawFilePath -> Annex JournalledContent
getJournalFileStale :: GetPrivate -> OsPath -> Annex JournalledContent
getJournalFileStale (GetPrivate getprivate) file = do
st <- Annex.getState id
let repo = Annex.repo st
@ -205,7 +204,7 @@ getJournalFileStale (GetPrivate getprivate) file = do
jfile = journalFile file
getfrom d = catchMaybeIO $
discardIncompleteAppend . L.fromStrict
<$> F.readFile' (toOsPath (d P.</> jfile))
<$> F.readFile' (d </> jfile)
-- Note that this forces read of the whole lazy bytestring.
discardIncompleteAppend :: L.ByteString -> L.ByteString
@ -224,18 +223,18 @@ discardIncompleteAppend v
{- List of existing journal files in a journal directory, but without locking,
- may miss new ones just being added, or may have false positives if the
- journal is staged as it is run. -}
getJournalledFilesStale :: (BranchState -> Git.Repo -> RawFilePath) -> Annex [RawFilePath]
getJournalledFilesStale :: (BranchState -> Git.Repo -> OsPath) -> Annex [OsPath]
getJournalledFilesStale getjournaldir = do
bs <- getState
repo <- Annex.gitRepo
let d = getjournaldir bs repo
fs <- liftIO $ catchDefaultIO [] $
getDirectoryContents (fromRawFilePath d)
return $ filter (`notElem` [".", ".."]) $
map (fileJournal . toRawFilePath) fs
getDirectoryContents d
return $ filter (`notElem` dirCruft) $
map fileJournal fs
{- Directory handle open on a journal directory. -}
withJournalHandle :: (BranchState -> Git.Repo -> RawFilePath) -> (DirectoryHandle -> IO a) -> Annex a
withJournalHandle :: (BranchState -> Git.Repo -> OsPath) -> (DirectoryHandle -> IO a) -> Annex a
withJournalHandle getjournaldir a = do
bs <- getState
repo <- Annex.gitRepo
@ -244,15 +243,15 @@ withJournalHandle getjournaldir a = do
where
-- avoid overhead of creating the journal directory when it already
-- exists
opendir d = liftIO (openDirectory d)
opendir d = liftIO (openDirectory (fromOsPath d))
`catchIO` (const (createAnnexDirectory d >> opendir d))
{- Checks if there are changes in the journal. -}
journalDirty :: (BranchState -> Git.Repo -> RawFilePath) -> Annex Bool
journalDirty :: (BranchState -> Git.Repo -> OsPath) -> Annex Bool
journalDirty getjournaldir = do
st <- getState
d <- fromRepo (getjournaldir st)
liftIO $ isDirectoryPopulated d
liftIO $ isDirectoryPopulated (fromOsPath d)
{- Produces a filename to use in the journal for a file on the branch.
- The filename does not include the journal directory.
@ -261,33 +260,33 @@ journalDirty getjournaldir = do
- used in the branch is not necessary, and all the files are put directly
- in the journal directory.
-}
journalFile :: RawFilePath -> RawFilePath
journalFile file = B.concatMap mangle file
journalFile :: OsPath -> OsPath
journalFile file = OS.concat $ map mangle $ OS.unpack file
where
mangle c
| P.isPathSeparator c = B.singleton underscore
| c == underscore = B.pack [underscore, underscore]
| otherwise = B.singleton c
underscore = fromIntegral (ord '_')
| isPathSeparator c = OS.singleton underscore
| c == underscore = OS.pack [underscore, underscore]
| otherwise = OS.singleton c
underscore = unsafeFromChar '_'
{- Converts a journal file (relative to the journal dir) back to the
- filename on the branch. -}
fileJournal :: RawFilePath -> RawFilePath
fileJournal :: OsPath -> OsPath
fileJournal = go
where
go b =
let (h, t) = B.break (== underscore) b
in h <> case B.uncons t of
let (h, t) = OS.break (== underscore) b
in h <> case OS.uncons t of
Nothing -> t
Just (_u, t') -> case B.uncons t' of
Just (_u, t') -> case OS.uncons t' of
Nothing -> t'
Just (w, t'')
| w == underscore ->
B.cons underscore (go t'')
OS.cons underscore (go t'')
| otherwise ->
B.cons P.pathSeparator (go t')
OS.cons pathSeparator (go t')
underscore = fromIntegral (ord '_')
underscore = unsafeFromChar '_'
{- Sentinal value, only produced by lockJournal; required
- as a parameter by things that need to ensure the journal is

View file

@ -39,11 +39,11 @@ import Utility.CopyFile
import qualified Database.Keys.Handle
import qualified Utility.RawFilePath as R
import qualified Utility.FileIO as F
import qualified Utility.OsString as OS
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as S8
import qualified Data.ByteString.Lazy as L
import qualified System.FilePath.ByteString as P
#ifndef mingw32_HOST_OS
#if MIN_VERSION_unix(2,8,0)
#else
@ -103,7 +103,7 @@ getAnnexLinkTarget' file coresymlinks = if coresymlinks
then mempty
else s
makeAnnexLink :: LinkTarget -> RawFilePath -> Annex ()
makeAnnexLink :: LinkTarget -> OsPath -> Annex ()
makeAnnexLink = makeGitLink
{- Creates a link on disk.
@ -113,26 +113,31 @@ makeAnnexLink = makeGitLink
- it's staged as such, so use addAnnexLink when adding a new file or
- modified link to git.
-}
makeGitLink :: LinkTarget -> RawFilePath -> Annex ()
makeGitLink :: LinkTarget -> OsPath -> Annex ()
makeGitLink linktarget file = ifM (coreSymlinks <$> Annex.getGitConfig)
( liftIO $ do
void $ tryIO $ R.removeLink file
R.createSymbolicLink linktarget file
, liftIO $ F.writeFile' (toOsPath file) linktarget
void $ tryIO $ R.removeLink file'
R.createSymbolicLink linktarget file'
, liftIO $ F.writeFile' file linktarget
)
where
file' = fromOsPath file
{- Creates a link on disk, and additionally stages it in git. -}
addAnnexLink :: LinkTarget -> RawFilePath -> Annex ()
addAnnexLink :: LinkTarget -> OsPath -> Annex ()
addAnnexLink linktarget file = do
makeAnnexLink linktarget file
stageSymlink file =<< hashSymlink linktarget
{- Injects a symlink target into git, returning its Sha. -}
hashSymlink :: LinkTarget -> Annex Sha
hashSymlink = hashBlob . toInternalGitPath
hashSymlink = go . fromOsPath . toInternalGitPath . toOsPath
where
go :: LinkTarget -> Annex Sha
go = hashBlob
{- Stages a symlink to an annexed object, using a Sha of its target. -}
stageSymlink :: RawFilePath -> Sha -> Annex ()
stageSymlink :: OsPath -> Sha -> Annex ()
stageSymlink file sha =
Annex.Queue.addUpdateIndex =<<
inRepo (Git.UpdateIndex.stageSymlink file sha)
@ -142,7 +147,7 @@ hashPointerFile :: Key -> Annex Sha
hashPointerFile key = hashBlob $ formatPointer key
{- Stages a pointer file, using a Sha of its content -}
stagePointerFile :: RawFilePath -> Maybe FileMode -> Sha -> Annex ()
stagePointerFile :: OsPath -> Maybe FileMode -> Sha -> Annex ()
stagePointerFile file mode sha =
Annex.Queue.addUpdateIndex =<<
inRepo (Git.UpdateIndex.stageFile sha treeitemtype file)
@ -151,10 +156,10 @@ stagePointerFile file mode sha =
| maybe False isExecutable mode = TreeExecutable
| otherwise = TreeFile
writePointerFile :: RawFilePath -> Key -> Maybe FileMode -> IO ()
writePointerFile :: OsPath -> Key -> Maybe FileMode -> IO ()
writePointerFile file k mode = do
F.writeFile' (toOsPath file) (formatPointer k)
maybe noop (R.setFileMode file) mode
F.writeFile' file (formatPointer k)
maybe noop (R.setFileMode (fromOsPath file)) mode
newtype Restage = Restage Bool
@ -187,7 +192,7 @@ newtype Restage = Restage Bool
- if the process is interrupted before the git queue is fulushed, the
- restage will be taken care of later.
-}
restagePointerFile :: Restage -> RawFilePath -> InodeCache -> Annex ()
restagePointerFile :: Restage -> OsPath -> InodeCache -> Annex ()
restagePointerFile (Restage False) f orig = do
flip writeRestageLog orig =<< inRepo (toTopFilePath f)
toplevelWarning True $ unableToRestage $ Just f
@ -225,14 +230,14 @@ restagePointerFiles r = unlessM (Annex.getState Annex.insmudgecleanfilter) $ do
=<< Annex.getRead Annex.keysdbhandle
realindex <- liftIO $ Git.Index.currentIndexFile r
numsz@(numfiles, _) <- calcnumsz
let lock = fromRawFilePath (Git.Index.indexFileLock realindex)
let lock = Git.Index.indexFileLock realindex
lockindex = liftIO $ catchMaybeIO $ Git.LockFile.openLock' lock
unlockindex = liftIO . maybe noop Git.LockFile.closeLock
showwarning = warning $ unableToRestage Nothing
go Nothing = showwarning
go (Just _) = withtmpdir $ \tmpdir -> do
tsd <- getTSDelta
let tmpindex = toRawFilePath (tmpdir </> "index")
let tmpindex = tmpdir </> literalOsPath "index"
let replaceindex = liftIO $ moveFile tmpindex realindex
let updatetmpindex = do
r' <- liftIO $ Git.Env.addGitEnv r Git.Index.indexEnv
@ -247,8 +252,8 @@ restagePointerFiles r = unlessM (Annex.getState Annex.insmudgecleanfilter) $ do
bracket lockindex unlockindex go
where
withtmpdir = withTmpDirIn
(fromRawFilePath $ Git.localGitDir r)
(toOsPath "annexindex")
(Git.localGitDir r)
(literalOsPath "annexindex")
isunmodified tsd f orig =
genInodeCache f tsd >>= return . \case
@ -325,7 +330,7 @@ restagePointerFiles r = unlessM (Annex.getState Annex.insmudgecleanfilter) $ do
ck = ConfigKey "filter.annex.process"
ckd = ConfigKey "filter.annex.process-temp-disabled"
unableToRestage :: Maybe RawFilePath -> StringContainingQuotedPath
unableToRestage :: Maybe OsPath -> StringContainingQuotedPath
unableToRestage mf =
"git status will show " <> maybe "some files" QuotedPath mf
<> " to be modified, since content availability has changed"
@ -361,7 +366,8 @@ parseLinkTargetOrPointer' b =
Nothing -> Right Nothing
where
parsekey l
| isLinkToAnnex l = fileKey $ snd $ S8.breakEnd pathsep l
| isLinkToAnnex l = fileKey $ toOsPath $
snd $ S8.breakEnd pathsep l
| otherwise = Nothing
restvalid r
@ -400,9 +406,9 @@ parseLinkTargetOrPointerLazy' b =
in parseLinkTargetOrPointer' (L.toStrict b')
formatPointer :: Key -> S.ByteString
formatPointer k = prefix <> keyFile k <> nl
formatPointer k = fromOsPath prefix <> fromOsPath (keyFile k) <> nl
where
prefix = toInternalGitPath $ P.pathSeparator `S.cons` objectDir
prefix = toInternalGitPath $ pathSeparator `OS.cons` objectDir
nl = S8.singleton '\n'
{- Maximum size of a file that could be a pointer to a key.
@ -434,21 +440,21 @@ maxSymlinkSz = 8192
- an object that looks like a pointer file. Or that a non-annex
- symlink does. Avoids a false positive in those cases.
- -}
isPointerFile :: RawFilePath -> IO (Maybe Key)
isPointerFile :: OsPath -> IO (Maybe Key)
isPointerFile f = catchDefaultIO Nothing $
#if defined(mingw32_HOST_OS)
F.withFile (toOsPath f) ReadMode readhandle
F.withFile f ReadMode readhandle
#else
#if MIN_VERSION_unix(2,8,0)
let open = do
fd <- openFd (fromRawFilePath f) ReadOnly
fd <- openFd (fromOsPath f) ReadOnly
(defaultFileFlags { nofollow = True })
fdToHandle fd
in bracket open hClose readhandle
#else
ifM (isSymbolicLink <$> R.getSymbolicLinkStatus f)
ifM (isSymbolicLink <$> R.getSymbolicLinkStatus (toRawFilePath f))
( return Nothing
, F.withFile (toOsPath f) ReadMode readhandle
, F.withFile f ReadMode readhandle
)
#endif
#endif
@ -463,13 +469,13 @@ isPointerFile f = catchDefaultIO Nothing $
- than .git to be used.
-}
isLinkToAnnex :: S.ByteString -> Bool
isLinkToAnnex s = p `S.isInfixOf` s
isLinkToAnnex s = p `OS.isInfixOf` (toOsPath s)
#ifdef mingw32_HOST_OS
-- '/' is used inside pointer files on Windows, not the native '\'
|| p' `S.isInfixOf` s
|| p' `OS.isInfixOf` s
#endif
where
p = P.pathSeparator `S.cons` objectDir
p = pathSeparator `OS.cons` objectDir
#ifdef mingw32_HOST_OS
p' = toInternalGitPath p
#endif

View file

@ -387,7 +387,7 @@ gitAnnexFsckDir u r mc = case annexDbDir =<< mc of
Nothing -> go (gitAnnexDir r)
Just d -> go d
where
go d = d </> literalOsPath "fsck" </> uuidPath u
go d = d </> literalOsPath "fsck" </> fromUUID u
{- used to store information about incremental fscks. -}
gitAnnexFsckState :: UUID -> Git.Repo -> OsPath
@ -408,7 +408,7 @@ gitAnnexFsckDbLock u r c = gitAnnexFsckDir u r (Just c) </> literalOsPath "fsck.
{- .git/annex/fsckresults/uuid is used to store results of git fscks -}
gitAnnexFsckResultsLog :: UUID -> Git.Repo -> OsPath
gitAnnexFsckResultsLog u r =
gitAnnexDir r </> literalOsPath "fsckresults" </> uuidPath u
gitAnnexDir r </> literalOsPath "fsckresults" </> fromUUID u
{- .git/annex/upgrade.log is used to record repository version upgrades. -}
gitAnnexUpgradeLog :: Git.Repo -> OsPath
@ -476,7 +476,7 @@ gitAnnexExportDir r c = fromMaybe (gitAnnexDir r) (annexDbDir c)
{- Directory containing database used to record export info. -}
gitAnnexExportDbDir :: UUID -> Git.Repo -> GitConfig -> OsPath
gitAnnexExportDbDir u r c =
gitAnnexExportDir r c </> uuidPath u </> literalOsPath "exportdb"
gitAnnexExportDir r c </> fromUUID u </> literalOsPath "exportdb"
{- Lock file for export database. -}
gitAnnexExportLock :: UUID -> Git.Repo -> GitConfig -> OsPath
@ -491,7 +491,7 @@ gitAnnexExportUpdateLock u r c = gitAnnexExportDbDir u r c <> literalOsPath ".up
- remote, but were excluded by its preferred content settings. -}
gitAnnexExportExcludeLog :: UUID -> Git.Repo -> OsPath
gitAnnexExportExcludeLog u r = gitAnnexDir r
</> literalOsPath "export.ex" </> uuidPath u
</> literalOsPath "export.ex" </> fromUUID u
{- Directory containing database used to record remote content ids.
-
@ -516,7 +516,7 @@ gitAnnexImportDir r c =
{- File containing state about the last import done from a remote. -}
gitAnnexImportLog :: UUID -> Git.Repo -> GitConfig -> OsPath
gitAnnexImportLog u r c =
gitAnnexImportDir r c </> uuidPath u </> literalOsPath "log"
gitAnnexImportDir r c </> fromUUID u </> literalOsPath "log"
{- Directory containing database used by importfeed. -}
gitAnnexImportFeedDbDir :: Git.Repo -> GitConfig -> OsPath

View file

@ -7,20 +7,17 @@
module Annex.Multicast where
import Common
import Annex.Path
import Utility.Env
import Utility.PartialPrelude
import System.Process
import System.IO
import GHC.IO.Handle.FD
import Control.Applicative
import Prelude
multicastReceiveEnv :: String
multicastReceiveEnv = "GIT_ANNEX_MULTICAST_RECEIVE"
multicastCallbackEnv :: IO (FilePath, [(String, String)], Handle)
multicastCallbackEnv :: IO (OsPath, [(String, String)], Handle)
multicastCallbackEnv = do
gitannex <- programPath
-- This will even work on Windows

View file

@ -40,18 +40,18 @@ import qualified Data.Map as M
- git-annex-shell or git-remote-annex, this finds a git-annex program
- instead.
-}
programPath :: IO FilePath
programPath :: IO OsPath
programPath = go =<< getEnv "GIT_ANNEX_DIR"
where
go (Just dir) = do
name <- reqgitannex <$> getProgName
return (dir </> name)
return (toOsPath dir </> toOsPath name)
go Nothing = do
name <- getProgName
exe <- if isgitannex name
then getExecutablePath
else pure "git-annex"
p <- if isAbsolute exe
p <- if isAbsolute (toOsPath exe)
then return exe
else fromMaybe exe <$> readProgramFile
maybe cannotFindProgram return =<< searchPath p
@ -65,12 +65,12 @@ programPath = go =<< getEnv "GIT_ANNEX_DIR"
readProgramFile :: IO (Maybe FilePath)
readProgramFile = catchDefaultIO Nothing $ do
programfile <- programFile
headMaybe . lines <$> readFile programfile
headMaybe . lines <$> readFile (fromOsPath programfile)
cannotFindProgram :: IO a
cannotFindProgram = do
f <- programFile
giveup $ "cannot find git-annex program in PATH or in " ++ f
giveup $ "cannot find git-annex program in PATH or in " ++ fromOsPath f
{- Runs a git-annex child process.
-
@ -88,7 +88,7 @@ gitAnnexChildProcess
gitAnnexChildProcess subcmd ps f a = do
cmd <- liftIO programPath
ps' <- gitAnnexChildProcessParams subcmd ps
pidLockChildProcess cmd ps' f a
pidLockChildProcess (fromOsPath cmd) ps' f a
{- Parameters to pass to a git-annex child process to run a subcommand
- with some parameters.

View file

@ -31,7 +31,7 @@ addCommand commonparams command params files = do
store =<< flushWhenFull =<<
(Git.Queue.addCommand commonparams command params files q =<< gitRepo)
addFlushAction :: Git.Queue.FlushActionRunner Annex -> [RawFilePath] -> Annex ()
addFlushAction :: Git.Queue.FlushActionRunner Annex -> [OsPath] -> Annex ()
addFlushAction runner files = do
q <- get
store =<< flushWhenFull =<<

View file

@ -21,8 +21,6 @@ import Utility.Tmp
import Utility.Tmp.Dir
import Utility.Directory.Create
import qualified System.FilePath.ByteString as P
{- replaceFile on a file located inside the gitAnnexDir. -}
replaceGitAnnexDirFile :: OsPath -> (OsPath -> Annex a) -> Annex a
replaceGitAnnexDirFile = replaceFile createAnnexDirectory

View file

@ -23,8 +23,6 @@ import Utility.PID
import Control.Concurrent
import Text.Read
import Data.Time.Clock.POSIX
import qualified Utility.RawFilePath as R
import qualified System.FilePath.ByteString as P
{- Called when a location log change is journalled, so the LiveUpdate
- is done. This is called with the journal still locked, so no concurrent
@ -146,12 +144,11 @@ checkStaleSizeChanges :: RepoSizeHandle -> Annex ()
checkStaleSizeChanges h@(RepoSizeHandle (Just _) livev) = do
livedir <- calcRepo' gitAnnexRepoSizeLiveDir
pid <- liftIO getPID
let pidlockfile = show pid
let pidlockfile = toOsPath (show pid)
now <- liftIO getPOSIXTime
liftIO (takeMVar livev) >>= \case
Nothing -> do
lck <- takeExclusiveLock $
livedir P.</> toRawFilePath pidlockfile
lck <- takeExclusiveLock $ livedir </> pidlockfile
go livedir lck pidlockfile now
Just v@(lck, lastcheck)
| now >= lastcheck + 60 ->
@ -161,11 +158,11 @@ checkStaleSizeChanges h@(RepoSizeHandle (Just _) livev) = do
where
go livedir lck pidlockfile now = do
void $ tryNonAsync $ do
lockfiles <- liftIO $ filter (not . dirCruft . toRawFilePath)
<$> getDirectoryContents (fromRawFilePath livedir)
lockfiles <- liftIO $ filter (`notElem` dirCruft)
<$> getDirectoryContents livedir
stale <- forM lockfiles $ \lockfile ->
if (lockfile /= pidlockfile)
then case readMaybe lockfile of
then case readMaybe (fromOsPath lockfile) of
Nothing -> return Nothing
Just pid -> checkstale livedir lockfile pid
else return Nothing
@ -176,7 +173,7 @@ checkStaleSizeChanges h@(RepoSizeHandle (Just _) livev) = do
liftIO $ putMVar livev (Just (lck, now))
checkstale livedir lockfile pid =
let f = livedir P.</> toRawFilePath lockfile
let f = livedir </> lockfile
in trySharedLock f >>= \case
Nothing -> return Nothing
Just lck -> do
@ -184,6 +181,6 @@ checkStaleSizeChanges h@(RepoSizeHandle (Just _) livev) = do
( StaleSizeChanger (SizeChangeProcessId pid)
, do
dropLock lck
removeWhenExistsWith R.removeLink f
removeWhenExistsWith removeFile f
)
checkStaleSizeChanges (RepoSizeHandle Nothing _) = noop

View file

@ -40,14 +40,14 @@ import Types.Concurrency
import Git.Env
import Git.Ssh
import qualified Utility.RawFilePath as R
import qualified Utility.OsString as OS
import Annex.Perms
#ifndef mingw32_HOST_OS
import Annex.LockPool
#endif
import Control.Concurrent.STM
import qualified Data.ByteString as S
import qualified System.FilePath.ByteString as P
import qualified Data.ByteString.Short as SBS
{- Some ssh commands are fed stdin on a pipe and so should be allowed to
- consume it. But ssh commands that are not piped stdin should generally
@ -101,15 +101,15 @@ consumeStdinParams NoConsumeStdin = [Param "-n"]
{- Returns a filename to use for a ssh connection caching socket, and
- parameters to enable ssh connection caching. -}
sshCachingInfo :: (SshHost, Maybe Integer) -> Annex (Maybe RawFilePath, [CommandParam])
sshCachingInfo :: (SshHost, Maybe Integer) -> Annex (Maybe OsPath, [CommandParam])
sshCachingInfo (host, port) = go =<< sshCacheDir'
where
go (Right dir) =
liftIO (bestSocketPath $ dir P.</> hostport2socket host port) >>= return . \case
liftIO (bestSocketPath $ dir </> hostport2socket host port) >>= return . \case
Nothing -> (Nothing, [])
Just socketfile ->
(Just socketfile
, sshConnectionCachingParams (fromRawFilePath socketfile)
, sshConnectionCachingParams (fromOsPath socketfile)
)
-- No connection caching with concurrency is not a good
-- combination, so warn the user.
@ -137,10 +137,10 @@ sshCachingInfo (host, port) = go =<< sshCacheDir'
- file.
-
- If no path can be constructed that is a valid socket, returns Nothing. -}
bestSocketPath :: RawFilePath -> IO (Maybe RawFilePath)
bestSocketPath :: OsPath -> IO (Maybe OsPath)
bestSocketPath abssocketfile = do
relsocketfile <- liftIO $ relPathCwdToFile abssocketfile
let socketfile = if S.length abssocketfile <= S.length relsocketfile
let socketfile = if OS.length abssocketfile <= OS.length relsocketfile
then abssocketfile
else relsocketfile
return $ if valid_unix_socket_path socketfile sshgarbagelen
@ -167,10 +167,10 @@ sshSocketDirEnv = "GIT_ANNEX_SSH_SOCKET_DIR"
-
- The directory will be created if it does not exist.
-}
sshCacheDir :: Annex (Maybe RawFilePath)
sshCacheDir :: Annex (Maybe OsPath)
sshCacheDir = eitherToMaybe <$> sshCacheDir'
sshCacheDir' :: Annex (Either String RawFilePath)
sshCacheDir' :: Annex (Either String OsPath)
sshCacheDir' =
ifM (fromMaybe BuildInfo.sshconnectioncaching . annexSshCaching <$> Annex.getGitConfig)
( ifM crippledFileSystem
@ -191,9 +191,9 @@ sshCacheDir' =
gettmpdir = liftIO $ getEnv sshSocketDirEnv
usetmpdir tmpdir = do
let socktmp = tmpdir </> "ssh"
let socktmp = toOsPath tmpdir </> literalOsPath "ssh"
createDirectoryIfMissing True socktmp
return (toRawFilePath socktmp)
return socktmp
crippledfswarning = unwords
[ "This repository is on a crippled filesystem, so unix named"
@ -216,7 +216,7 @@ portParams (Just port) = [Param "-p", Param $ show port]
- Locks the socket lock file to prevent other git-annex processes from
- stopping the ssh multiplexer on this socket.
-}
prepSocket :: RawFilePath -> SshHost -> [CommandParam] -> Annex ()
prepSocket :: OsPath -> SshHost -> [CommandParam] -> Annex ()
prepSocket socketfile sshhost sshparams = do
-- There could be stale ssh connections hanging around
-- from a previous git-annex run that was interrupted.
@ -288,11 +288,11 @@ prepSocket socketfile sshhost sshparams = do
- and this check makes such files be skipped since the corresponding lock
- file won't exist.
-}
enumSocketFiles :: Annex [RawFilePath]
enumSocketFiles :: Annex [OsPath]
enumSocketFiles = liftIO . go =<< sshCacheDir
where
go Nothing = return []
go (Just dir) = filterM (R.doesPathExist . socket2lock)
go (Just dir) = filterM (R.doesPathExist . fromOsPath . socket2lock)
=<< filter (not . isLock)
<$> catchDefaultIO [] (dirContents dir)
@ -326,45 +326,45 @@ sshCleanup = mapM_ cleanup =<< enumSocketFiles
forceSshCleanup :: Annex ()
forceSshCleanup = mapM_ forceStopSsh =<< enumSocketFiles
forceStopSsh :: RawFilePath -> Annex ()
forceStopSsh :: OsPath -> Annex ()
forceStopSsh socketfile = withNullHandle $ \nullh -> do
let (dir, base) = splitFileName (fromRawFilePath socketfile)
let (dir, base) = splitFileName socketfile
let p = (proc "ssh" $ toCommand $
[ Param "-O", Param "stop" ] ++
sshConnectionCachingParams base ++
sshConnectionCachingParams (fromOsPath base) ++
[Param "localhost"])
{ cwd = Just dir
{ cwd = Just (fromOsPath dir)
-- "ssh -O stop" is noisy on stderr even with -q
, std_out = UseHandle nullh
, std_err = UseHandle nullh
}
void $ liftIO $ catchMaybeIO $ withCreateProcess p $ \_ _ _ pid ->
forceSuccessProcess p pid
liftIO $ removeWhenExistsWith R.removeLink socketfile
liftIO $ removeWhenExistsWith R.removeLink (fromOsPath socketfile)
{- This needs to be as short as possible, due to limitations on the length
- of the path to a socket file. At the same time, it needs to be unique
- for each host.
-}
hostport2socket :: SshHost -> Maybe Integer -> RawFilePath
hostport2socket :: SshHost -> Maybe Integer -> OsPath
hostport2socket host Nothing = hostport2socket' $ fromSshHost host
hostport2socket host (Just port) = hostport2socket' $
fromSshHost host ++ "!" ++ show port
hostport2socket' :: String -> RawFilePath
hostport2socket' :: String -> OsPath
hostport2socket' s
| length s > lengthofmd5s = toRawFilePath $ show $ md5 $ encodeBL s
| otherwise = toRawFilePath s
| length s > lengthofmd5s = toOsPath $ show $ md5 $ encodeBL s
| otherwise = toOsPath s
where
lengthofmd5s = 32
socket2lock :: RawFilePath -> RawFilePath
socket2lock :: OsPath -> OsPath
socket2lock socket = socket <> lockExt
isLock :: RawFilePath -> Bool
isLock f = lockExt `S.isSuffixOf` f
isLock :: OsPath -> Bool
isLock f = lockExt `OS.isSuffixOf` f
lockExt :: S.ByteString
lockExt = ".lock"
lockExt :: OsPath
lockExt = literalOsPath ".lock"
{- This is the size of the sun_path component of sockaddr_un, which
- is the limit to the total length of the filename of a unix socket.
@ -376,8 +376,9 @@ sizeof_sockaddr_un_sun_path = 100
{- Note that this looks at the true length of the path in bytes, as it will
- appear on disk. -}
valid_unix_socket_path :: RawFilePath -> Int -> Bool
valid_unix_socket_path f n = S.length f + n < sizeof_sockaddr_un_sun_path
valid_unix_socket_path :: OsPath -> Int -> Bool
valid_unix_socket_path f n =
SBS.length (fromOsPath f) + n < sizeof_sockaddr_un_sun_path
{- Parses the SSH port, and returns the other OpenSSH options. If
- several ports are found, the last one takes precedence. -}
@ -463,7 +464,7 @@ sshOptionsTo remote gc localr
liftIO $ do
localr' <- addGitEnv localr sshOptionsEnv
(toSshOptionsEnv sshopts)
addGitEnv localr' gitSshEnv command
addGitEnv localr' gitSshEnv (fromOsPath command)
runSshOptions :: [String] -> String -> IO ()
runSshOptions args s = do

View file

@ -43,7 +43,7 @@ data RunTransferrer = RunTransferrer String [CommandParam] BatchCommandMaker
mkRunTransferrer :: BatchCommandMaker -> Annex RunTransferrer
mkRunTransferrer batchmaker = RunTransferrer
<$> liftIO programPath
<$> liftIO (fromOsPath <$> programPath)
<*> gitAnnexChildProcessParams "transferrer" []
<*> pure batchmaker