more OsPath conversion

Sponsored-by: Brock Spratlen
This commit is contained in:
Joey Hess 2025-02-01 11:54:19 -04:00
parent c69e57aede
commit 474cf3bc8b
No known key found for this signature in database
GPG key ID: DB12DB0FF05F8F38
38 changed files with 342 additions and 330 deletions

View file

@ -313,7 +313,7 @@ updateTo' pairs = do
- transitions that have not been applied to all refs will be applied on - transitions that have not been applied to all refs will be applied on
- the fly. - the fly.
-} -}
get :: RawFilePath -> Annex L.ByteString get :: OsPath -> Annex L.ByteString
get file = do get file = do
st <- update st <- update
case getCache file st of case getCache file st of
@ -353,7 +353,7 @@ getUnmergedRefs = unmergedRefs <$> update
- using some optimised method. The journal has to be checked, in case - using some optimised method. The journal has to be checked, in case
- it has a newer version of the file that has not reached the branch yet. - it has a newer version of the file that has not reached the branch yet.
-} -}
precache :: RawFilePath -> L.ByteString -> Annex () precache :: OsPath -> L.ByteString -> Annex ()
precache file branchcontent = do precache file branchcontent = do
st <- getState st <- getState
content <- if journalIgnorable st content <- if journalIgnorable st
@ -369,12 +369,12 @@ precache file branchcontent = do
- reflect changes in remotes. - reflect changes in remotes.
- (Changing the value this returns, and then merging is always the - (Changing the value this returns, and then merging is always the
- same as using get, and then changing its value.) -} - same as using get, and then changing its value.) -}
getLocal :: RawFilePath -> Annex L.ByteString getLocal :: OsPath -> Annex L.ByteString
getLocal = getLocal' (GetPrivate True) getLocal = getLocal' (GetPrivate True)
getLocal' :: GetPrivate -> RawFilePath -> Annex L.ByteString getLocal' :: GetPrivate -> OsPath -> Annex L.ByteString
getLocal' getprivate file = do getLocal' getprivate file = do
fastDebug "Annex.Branch" ("read " ++ fromRawFilePath file) fastDebug "Annex.Branch" ("read " ++ fromOsPath file)
go =<< getJournalFileStale getprivate file go =<< getJournalFileStale getprivate file
where where
go NoJournalledContent = getRef fullname file go NoJournalledContent = getRef fullname file
@ -384,14 +384,14 @@ getLocal' getprivate file = do
return (v <> journalcontent) return (v <> journalcontent)
{- Gets the content of a file as staged in the branch's index. -} {- Gets the content of a file as staged in the branch's index. -}
getStaged :: RawFilePath -> Annex L.ByteString getStaged :: OsPath -> Annex L.ByteString
getStaged = getRef indexref getStaged = getRef indexref
where where
-- This makes git cat-file be run with ":file", -- This makes git cat-file be run with ":file",
-- so it looks at the index. -- so it looks at the index.
indexref = Ref "" indexref = Ref ""
getHistorical :: RefDate -> RawFilePath -> Annex L.ByteString getHistorical :: RefDate -> OsPath -> Annex L.ByteString
getHistorical date file = getHistorical date file =
-- This check avoids some ugly error messages when the reflog -- This check avoids some ugly error messages when the reflog
-- is empty. -- is empty.
@ -400,7 +400,7 @@ getHistorical date file =
, getRef (Git.Ref.dateRef fullname date) file , getRef (Git.Ref.dateRef fullname date) file
) )
getRef :: Ref -> RawFilePath -> Annex L.ByteString getRef :: Ref -> OsPath -> Annex L.ByteString
getRef ref file = withIndex $ catFile ref file getRef ref file = withIndex $ catFile ref file
{- Applies a function to modify the content of a file. {- Applies a function to modify the content of a file.
@ -408,7 +408,7 @@ getRef ref file = withIndex $ catFile ref file
- Note that this does not cause the branch to be merged, it only - Note that this does not cause the branch to be merged, it only
- modifies the current content of the file on the branch. - modifies the current content of the file on the branch.
-} -}
change :: Journalable content => RegardingUUID -> RawFilePath -> (L.ByteString -> content) -> Annex () change :: Journalable content => RegardingUUID -> OsPath -> (L.ByteString -> content) -> Annex ()
change ru file f = lockJournal $ \jl -> f <$> getToChange ru file >>= set jl ru file change ru file f = lockJournal $ \jl -> f <$> getToChange ru file >>= set jl ru file
{- Applies a function which can modify the content of a file, or not. {- Applies a function which can modify the content of a file, or not.
@ -416,7 +416,7 @@ change ru file f = lockJournal $ \jl -> f <$> getToChange ru file >>= set jl ru
- When the file was modified, runs the onchange action, and returns - When the file was modified, runs the onchange action, and returns
- True. The action is run while the journal is still locked, - True. The action is run while the journal is still locked,
- so another concurrent call to this cannot happen while it is running. -} - so another concurrent call to this cannot happen while it is running. -}
maybeChange :: Journalable content => RegardingUUID -> RawFilePath -> (L.ByteString -> Maybe content) -> Annex () -> Annex Bool maybeChange :: Journalable content => RegardingUUID -> OsPath -> (L.ByteString -> Maybe content) -> Annex () -> Annex Bool
maybeChange ru file f onchange = lockJournal $ \jl -> do maybeChange ru file f onchange = lockJournal $ \jl -> do
v <- getToChange ru file v <- getToChange ru file
case f v of case f v of
@ -449,7 +449,7 @@ data ChangeOrAppend t = Change t | Append t
- state that would confuse the older version. This is planned to be - state that would confuse the older version. This is planned to be
- changed in a future repository version. - changed in a future repository version.
-} -}
changeOrAppend :: Journalable content => RegardingUUID -> RawFilePath -> (L.ByteString -> ChangeOrAppend content) -> Annex () changeOrAppend :: Journalable content => RegardingUUID -> OsPath -> (L.ByteString -> ChangeOrAppend content) -> Annex ()
changeOrAppend ru file f = lockJournal $ \jl -> changeOrAppend ru file f = lockJournal $ \jl ->
checkCanAppendJournalFile jl ru file >>= \case checkCanAppendJournalFile jl ru file >>= \case
Just appendable -> ifM (annexAlwaysCompact <$> Annex.getGitConfig) Just appendable -> ifM (annexAlwaysCompact <$> Annex.getGitConfig)
@ -481,7 +481,7 @@ changeOrAppend ru file f = lockJournal $ \jl ->
oldc <> journalableByteString toappend oldc <> journalableByteString toappend
{- Only get private information when the RegardingUUID is itself private. -} {- Only get private information when the RegardingUUID is itself private. -}
getToChange :: RegardingUUID -> RawFilePath -> Annex L.ByteString getToChange :: RegardingUUID -> OsPath -> Annex L.ByteString
getToChange ru f = flip getLocal' f . GetPrivate =<< regardingPrivateUUID ru getToChange ru f = flip getLocal' f . GetPrivate =<< regardingPrivateUUID ru
{- Records new content of a file into the journal. {- Records new content of a file into the journal.
@ -493,11 +493,11 @@ getToChange ru f = flip getLocal' f . GetPrivate =<< regardingPrivateUUID ru
- git-annex index, and should not be written to the public git-annex - git-annex index, and should not be written to the public git-annex
- branch. - branch.
-} -}
set :: Journalable content => JournalLocked -> RegardingUUID -> RawFilePath -> content -> Annex () set :: Journalable content => JournalLocked -> RegardingUUID -> OsPath -> content -> Annex ()
set jl ru f c = do set jl ru f c = do
journalChanged journalChanged
setJournalFile jl ru f c setJournalFile jl ru f c
fastDebug "Annex.Branch" ("set " ++ fromRawFilePath f) fastDebug "Annex.Branch" ("set " ++ fromOsPath f)
-- Could cache the new content, but it would involve -- Could cache the new content, but it would involve
-- evaluating a Journalable Builder twice, which is not very -- evaluating a Journalable Builder twice, which is not very
-- efficient. Instead, assume that it's not common to need to read -- efficient. Instead, assume that it's not common to need to read
@ -505,11 +505,11 @@ set jl ru f c = do
invalidateCache f invalidateCache f
{- Appends content to the journal file. -} {- Appends content to the journal file. -}
append :: Journalable content => JournalLocked -> RawFilePath -> AppendableJournalFile -> content -> Annex () append :: Journalable content => JournalLocked -> OsPath -> AppendableJournalFile -> content -> Annex ()
append jl f appendable toappend = do append jl f appendable toappend = do
journalChanged journalChanged
appendJournalFile jl appendable toappend appendJournalFile jl appendable toappend
fastDebug "Annex.Branch" ("append " ++ fromRawFilePath f) fastDebug "Annex.Branch" ("append " ++ fromOsPath f)
invalidateCache f invalidateCache f
{- Commit message used when making a commit of whatever data has changed {- Commit message used when making a commit of whatever data has changed
@ -611,7 +611,7 @@ commitIndex' jl branchref message basemessage retrynum parents = do
- not been merged in, returns Nothing, because it's not possible to - not been merged in, returns Nothing, because it's not possible to
- efficiently handle that. - efficiently handle that.
-} -}
files :: Annex (Maybe ([RawFilePath], IO Bool)) files :: Annex (Maybe ([OsPath], IO Bool))
files = do files = do
st <- update st <- update
if not (null (unmergedRefs st)) if not (null (unmergedRefs st))
@ -629,10 +629,10 @@ files = do
{- Lists all files currently in the journal, but not files in the private {- Lists all files currently in the journal, but not files in the private
- journal. -} - journal. -}
journalledFiles :: Annex [RawFilePath] journalledFiles :: Annex [OsPath]
journalledFiles = getJournalledFilesStale gitAnnexJournalDir journalledFiles = getJournalledFilesStale gitAnnexJournalDir
journalledFilesPrivate :: Annex [RawFilePath] journalledFilesPrivate :: Annex [OsPath]
journalledFilesPrivate = ifM privateUUIDsKnown journalledFilesPrivate = ifM privateUUIDsKnown
( getJournalledFilesStale gitAnnexPrivateJournalDir ( getJournalledFilesStale gitAnnexPrivateJournalDir
, return [] , return []
@ -640,10 +640,10 @@ journalledFilesPrivate = ifM privateUUIDsKnown
{- Files in the branch, not including any from journalled changes, {- Files in the branch, not including any from journalled changes,
- and without updating the branch. -} - and without updating the branch. -}
branchFiles :: Annex ([RawFilePath], IO Bool) branchFiles :: Annex ([OsPath], IO Bool)
branchFiles = withIndex $ inRepo branchFiles' branchFiles = withIndex $ inRepo branchFiles'
branchFiles' :: Git.Repo -> IO ([RawFilePath], IO Bool) branchFiles' :: Git.Repo -> IO ([OsPath], IO Bool)
branchFiles' = Git.Command.pipeNullSplit' $ branchFiles' = Git.Command.pipeNullSplit' $
lsTreeParams Git.LsTree.LsTreeRecursive (Git.LsTree.LsTreeLong False) lsTreeParams Git.LsTree.LsTreeRecursive (Git.LsTree.LsTreeLong False)
fullname fullname
@ -690,7 +690,7 @@ withIndex' :: Bool -> Annex a -> Annex a
withIndex' bootstrapping a = withIndexFile AnnexIndexFile $ \f -> do withIndex' bootstrapping a = withIndexFile AnnexIndexFile $ \f -> do
checkIndexOnce $ unlessM (liftIO $ doesFileExist f) $ do checkIndexOnce $ unlessM (liftIO $ doesFileExist f) $ do
unless bootstrapping create unless bootstrapping create
createAnnexDirectory $ toRawFilePath $ takeDirectory f createAnnexDirectory $ toOsPath $ takeDirectory f
unless bootstrapping $ inRepo genIndex unless bootstrapping $ inRepo genIndex
a a
@ -748,7 +748,7 @@ stageJournal jl commitindex = withIndex $ withOtherTmp $ \tmpdir -> do
Git.UpdateIndex.streamUpdateIndex g Git.UpdateIndex.streamUpdateIndex g
[genstream dir h jh jlogh] [genstream dir h jh jlogh]
commitindex commitindex
liftIO $ cleanup (fromRawFilePath dir) jlogh jlogf liftIO $ cleanup (fromOsPath dir) jlogh jlogf
where where
genstream dir h jh jlogh streamer = readDirectory jh >>= \case genstream dir h jh jlogh streamer = readDirectory jh >>= \case
Nothing -> return () Nothing -> return ()
@ -999,7 +999,7 @@ data UnmergedBranches t
= UnmergedBranches t = UnmergedBranches t
| NoUnmergedBranches t | NoUnmergedBranches t
type FileContents t b = Maybe (t, RawFilePath, Maybe (L.ByteString, Maybe b)) type FileContents t b = Maybe (t, OsPath, Maybe (L.ByteString, Maybe b))
{- Runs an action on the content of selected files from the branch. {- Runs an action on the content of selected files from the branch.
- This is much faster than reading the content of each file in turn, - This is much faster than reading the content of each file in turn,
@ -1022,7 +1022,7 @@ overBranchFileContents
-- the callback can be run more than once on the same filename, -- the callback can be run more than once on the same filename,
-- and in this case it's also possible for the callback to be -- and in this case it's also possible for the callback to be
-- passed some of the same file content repeatedly. -- passed some of the same file content repeatedly.
-> (RawFilePath -> Maybe v) -> (OsPath -> Maybe v)
-> (Annex (FileContents v Bool) -> Annex a) -> (Annex (FileContents v Bool) -> Annex a)
-> Annex (UnmergedBranches (a, Git.Sha)) -> Annex (UnmergedBranches (a, Git.Sha))
overBranchFileContents ignorejournal select go = do overBranchFileContents ignorejournal select go = do
@ -1036,7 +1036,7 @@ overBranchFileContents ignorejournal select go = do
else NoUnmergedBranches v else NoUnmergedBranches v
overBranchFileContents' overBranchFileContents'
:: (RawFilePath -> Maybe v) :: (OsPath -> Maybe v)
-> (Annex (FileContents v Bool) -> Annex a) -> (Annex (FileContents v Bool) -> Annex a)
-> BranchState -> BranchState
-> Annex (a, Git.Sha) -> Annex (a, Git.Sha)
@ -1086,11 +1086,11 @@ combineStaleJournalWithBranch branchcontent journalledcontent =
- files. - files.
-} -}
overJournalFileContents overJournalFileContents
:: (RawFilePath -> L.ByteString -> Annex (L.ByteString, Maybe b)) :: (OsPath -> L.ByteString -> Annex (L.ByteString, Maybe b))
-- ^ Called with the journalled file content when the journalled -- ^ Called with the journalled file content when the journalled
-- content may be stale or lack information committed to the -- content may be stale or lack information committed to the
-- git-annex branch. -- git-annex branch.
-> (RawFilePath -> Maybe v) -> (OsPath -> Maybe v)
-> (Annex (FileContents v b) -> Annex a) -> (Annex (FileContents v b) -> Annex a)
-> Annex a -> Annex a
overJournalFileContents handlestale select go = do overJournalFileContents handlestale select go = do
@ -1098,9 +1098,9 @@ overJournalFileContents handlestale select go = do
go $ overJournalFileContents' buf handlestale select go $ overJournalFileContents' buf handlestale select
overJournalFileContents' overJournalFileContents'
:: MVar ([RawFilePath], [RawFilePath]) :: MVar ([OsPath], [OsPath])
-> (RawFilePath -> L.ByteString -> Annex (L.ByteString, Maybe b)) -> (OsPath -> L.ByteString -> Annex (L.ByteString, Maybe b))
-> (RawFilePath -> Maybe a) -> (OsPath -> Maybe a)
-> Annex (FileContents a b) -> Annex (FileContents a b)
overJournalFileContents' buf handlestale select = overJournalFileContents' buf handlestale select =
liftIO (tryTakeMVar buf) >>= \case liftIO (tryTakeMVar buf) >>= \case

View file

@ -118,7 +118,7 @@ enableInteractiveBranchAccess = changeState $ \s -> s
, journalIgnorable = False , journalIgnorable = False
} }
setCache :: RawFilePath -> L.ByteString -> Annex () setCache :: OsPath -> L.ByteString -> Annex ()
setCache file content = changeState $ \s -> s setCache file content = changeState $ \s -> s
{ cachedFileContents = add (cachedFileContents s) } { cachedFileContents = add (cachedFileContents s) }
where where
@ -126,7 +126,7 @@ setCache file content = changeState $ \s -> s
| length l < logFilesToCache = (file, content) : l | length l < logFilesToCache = (file, content) : l
| otherwise = (file, content) : Prelude.init l | otherwise = (file, content) : Prelude.init l
getCache :: RawFilePath -> BranchState -> Maybe L.ByteString getCache :: OsPath -> BranchState -> Maybe L.ByteString
getCache file state = go (cachedFileContents state) getCache file state = go (cachedFileContents state)
where where
go [] = Nothing go [] = Nothing
@ -134,7 +134,7 @@ getCache file state = go (cachedFileContents state)
| f == file && not (needInteractiveAccess state) = Just c | f == file && not (needInteractiveAccess state) = Just c
| otherwise = go rest | otherwise = go rest
invalidateCache :: RawFilePath -> Annex () invalidateCache :: OsPath -> Annex ()
invalidateCache f = changeState $ \s -> s invalidateCache f = changeState $ \s -> s
{ cachedFileContents = filter (\(f', _) -> f' /= f) { cachedFileContents = filter (\(f', _) -> f' /= f)
(cachedFileContents s) (cachedFileContents s)

View file

@ -45,11 +45,11 @@ import Types.AdjustedBranch
import Types.CatFileHandles import Types.CatFileHandles
import Utility.ResourcePool import Utility.ResourcePool
catFile :: Git.Branch -> RawFilePath -> Annex L.ByteString catFile :: Git.Branch -> OsPath -> Annex L.ByteString
catFile branch file = withCatFileHandle $ \h -> catFile branch file = withCatFileHandle $ \h ->
liftIO $ Git.CatFile.catFile h branch file liftIO $ Git.CatFile.catFile h branch file
catFileDetails :: Git.Branch -> RawFilePath -> Annex (Maybe (L.ByteString, Sha, ObjectType)) catFileDetails :: Git.Branch -> OsPath -> Annex (Maybe (L.ByteString, Sha, ObjectType))
catFileDetails branch file = withCatFileHandle $ \h -> catFileDetails branch file = withCatFileHandle $ \h ->
liftIO $ Git.CatFile.catFileDetails h branch file liftIO $ Git.CatFile.catFileDetails h branch file
@ -167,8 +167,8 @@ catKey' ref sz
catKey' _ _ = return Nothing catKey' _ _ = return Nothing
{- Gets a symlink target. -} {- Gets a symlink target. -}
catSymLinkTarget :: Sha -> Annex RawFilePath catSymLinkTarget :: Sha -> Annex OsPath
catSymLinkTarget sha = fromInternalGitPath . L.toStrict <$> get catSymLinkTarget sha = fromInternalGitPath . toOsPath . L.toStrict <$> get
where where
-- Avoid buffering the whole file content, which might be large. -- Avoid buffering the whole file content, which might be large.
-- 8192 is enough if it really is a symlink. -- 8192 is enough if it really is a symlink.
@ -195,25 +195,25 @@ catSymLinkTarget sha = fromInternalGitPath . L.toStrict <$> get
- -
- So, this gets info from the index, unless running as a daemon. - So, this gets info from the index, unless running as a daemon.
-} -}
catKeyFile :: RawFilePath -> Annex (Maybe Key) catKeyFile :: OsPath -> Annex (Maybe Key)
catKeyFile f = ifM (Annex.getState Annex.daemon) catKeyFile f = ifM (Annex.getState Annex.daemon)
( catKeyFileHEAD f ( catKeyFileHEAD f
, maybe (pure Nothing) catKey =<< inRepo (Git.Ref.fileRef f) , maybe (pure Nothing) catKey =<< inRepo (Git.Ref.fileRef f)
) )
catKeyFileHEAD :: RawFilePath -> Annex (Maybe Key) catKeyFileHEAD :: OsPath -> Annex (Maybe Key)
catKeyFileHEAD f = maybe (pure Nothing) catKey catKeyFileHEAD f = maybe (pure Nothing) catKey
=<< inRepo (Git.Ref.fileFromRef Git.Ref.headRef f) =<< inRepo (Git.Ref.fileFromRef Git.Ref.headRef f)
{- Look in the original branch from whence an adjusted branch is based {- Look in the original branch from whence an adjusted branch is based
- to find the file. But only when the adjustment hides some files. -} - to find the file. But only when the adjustment hides some files. -}
catKeyFileHidden :: RawFilePath -> CurrBranch -> Annex (Maybe Key) catKeyFileHidden :: OsPath -> CurrBranch -> Annex (Maybe Key)
catKeyFileHidden = hiddenCat catKey catKeyFileHidden = hiddenCat catKey
catObjectMetaDataHidden :: RawFilePath -> CurrBranch -> Annex (Maybe (Sha, Integer, ObjectType)) catObjectMetaDataHidden :: OsPath -> CurrBranch -> Annex (Maybe (Sha, Integer, ObjectType))
catObjectMetaDataHidden = hiddenCat catObjectMetaData catObjectMetaDataHidden = hiddenCat catObjectMetaData
hiddenCat :: (Ref -> Annex (Maybe a)) -> RawFilePath -> CurrBranch -> Annex (Maybe a) hiddenCat :: (Ref -> Annex (Maybe a)) -> OsPath -> CurrBranch -> Annex (Maybe a)
hiddenCat a f (Just origbranch, Just adj) hiddenCat a f (Just origbranch, Just adj)
| adjustmentHidesFiles adj = | adjustmentHidesFiles adj =
maybe (pure Nothing) a maybe (pure Nothing) a

View file

@ -19,13 +19,12 @@ import Utility.DataUnits
import Utility.CopyFile import Utility.CopyFile
import qualified Utility.RawFilePath as R import qualified Utility.RawFilePath as R
import qualified System.FilePath.ByteString as P
import System.PosixCompat.Files (linkCount) import System.PosixCompat.Files (linkCount)
{- Runs the secure erase command if set, otherwise does nothing. {- Runs the secure erase command if set, otherwise does nothing.
- File may or may not be deleted at the end; caller is responsible for - File may or may not be deleted at the end; caller is responsible for
- making sure it's deleted. -} - making sure it's deleted. -}
secureErase :: RawFilePath -> Annex () secureErase :: OsPath -> Annex ()
secureErase = void . runAnnexPathHook "%file" secureErase = void . runAnnexPathHook "%file"
secureEraseAnnexHook annexSecureEraseCommand secureEraseAnnexHook annexSecureEraseCommand
@ -44,45 +43,48 @@ data LinkedOrCopied = Linked | Copied
- execute bit will be set. The mode is not fully copied over because - execute bit will be set. The mode is not fully copied over because
- git doesn't support file modes beyond execute. - git doesn't support file modes beyond execute.
-} -}
linkOrCopy :: Key -> RawFilePath -> RawFilePath -> Maybe FileMode -> Annex (Maybe LinkedOrCopied) linkOrCopy :: Key -> OsPath -> OsPath -> Maybe FileMode -> Annex (Maybe LinkedOrCopied)
linkOrCopy = linkOrCopy' (annexThin <$> Annex.getGitConfig) linkOrCopy = linkOrCopy' (annexThin <$> Annex.getGitConfig)
linkOrCopy' :: Annex Bool -> Key -> RawFilePath -> RawFilePath -> Maybe FileMode -> Annex (Maybe LinkedOrCopied) linkOrCopy' :: Annex Bool -> Key -> OsPath -> OsPath -> Maybe FileMode -> Annex (Maybe LinkedOrCopied)
linkOrCopy' canhardlink key src dest destmode = catchDefaultIO Nothing $ linkOrCopy' canhardlink key src dest destmode = catchDefaultIO Nothing $
ifM canhardlink ifM canhardlink
( hardlink ( hardlinkorcopy
, copy =<< getstat , copy =<< getstat
) )
where where
hardlink = do hardlinkorcopy = do
s <- getstat s <- getstat
if linkCount s > 1 if linkCount s > 1
then copy s then copy s
else liftIO (R.createLink src dest >> preserveGitMode dest destmode >> return (Just Linked)) else hardlink `catchIO` const (copy s)
`catchIO` const (copy s) hardlink = liftIO $ do
R.createLink (fromOsPath src) (fromOsPath dest)
void $ preserveGitMode dest destmode
return (Just Linked)
copy s = ifM (checkedCopyFile' key src dest destmode s) copy s = ifM (checkedCopyFile' key src dest destmode s)
( return (Just Copied) ( return (Just Copied)
, return Nothing , return Nothing
) )
getstat = liftIO $ R.getFileStatus src getstat = liftIO $ R.getFileStatus (fromOsPath src)
{- Checks disk space before copying. -} {- Checks disk space before copying. -}
checkedCopyFile :: Key -> RawFilePath -> RawFilePath -> Maybe FileMode -> Annex Bool checkedCopyFile :: Key -> OsPath -> OsPath -> Maybe FileMode -> Annex Bool
checkedCopyFile key src dest destmode = catchBoolIO $ checkedCopyFile key src dest destmode = catchBoolIO $
checkedCopyFile' key src dest destmode checkedCopyFile' key src dest destmode
=<< liftIO (R.getFileStatus src) =<< liftIO (R.getFileStatus (fromOsPath src))
checkedCopyFile' :: Key -> RawFilePath -> RawFilePath -> Maybe FileMode -> FileStatus -> Annex Bool checkedCopyFile' :: Key -> OsPath -> OsPath -> Maybe FileMode -> FileStatus -> Annex Bool
checkedCopyFile' key src dest destmode s = catchBoolIO $ do checkedCopyFile' key src dest destmode s = catchBoolIO $ do
sz <- liftIO $ getFileSize' src s sz <- liftIO $ getFileSize' src s
ifM (checkDiskSpace' sz (Just $ P.takeDirectory dest) key 0 True) ifM (checkDiskSpace' sz (Just $ takeDirectory dest) key 0 True)
( liftIO $ ( liftIO $
copyFileExternal CopyAllMetaData (fromRawFilePath src) (fromRawFilePath dest) copyFileExternal CopyAllMetaData src dest
<&&> preserveGitMode dest destmode <&&> preserveGitMode dest destmode
, return False , return False
) )
preserveGitMode :: RawFilePath -> Maybe FileMode -> IO Bool preserveGitMode :: OsPath -> Maybe FileMode -> IO Bool
preserveGitMode f (Just mode) preserveGitMode f (Just mode)
| isExecutable mode = catchBoolIO $ do | isExecutable mode = catchBoolIO $ do
modifyFileMode f $ addModes executeModes modifyFileMode f $ addModes executeModes
@ -100,12 +102,12 @@ preserveGitMode _ _ = return True
- to be downloaded from the free space. This way, we avoid overcommitting - to be downloaded from the free space. This way, we avoid overcommitting
- when doing concurrent downloads. - when doing concurrent downloads.
-} -}
checkDiskSpace :: Maybe FileSize -> Maybe RawFilePath -> Key -> Integer -> Bool -> Annex Bool checkDiskSpace :: Maybe FileSize -> Maybe OsPath -> Key -> Integer -> Bool -> Annex Bool
checkDiskSpace msz destdir key = checkDiskSpace' sz destdir key checkDiskSpace msz destdir key = checkDiskSpace' sz destdir key
where where
sz = fromMaybe 1 (fromKey keySize key <|> msz) sz = fromMaybe 1 (fromKey keySize key <|> msz)
checkDiskSpace' :: FileSize -> Maybe RawFilePath -> Key -> Integer -> Bool -> Annex Bool checkDiskSpace' :: FileSize -> Maybe OsPath -> Key -> Integer -> Bool -> Annex Bool
checkDiskSpace' sz destdir key alreadythere samefilesystem = ifM (Annex.getRead Annex.force) checkDiskSpace' sz destdir key alreadythere samefilesystem = ifM (Annex.getRead Annex.force)
( return True ( return True
, do , do
@ -118,7 +120,7 @@ checkDiskSpace' sz destdir key alreadythere samefilesystem = ifM (Annex.getRead
inprogress <- if samefilesystem inprogress <- if samefilesystem
then sizeOfDownloadsInProgress (/= key) then sizeOfDownloadsInProgress (/= key)
else pure 0 else pure 0
dir >>= liftIO . getDiskFree . fromRawFilePath >>= \case dir >>= liftIO . getDiskFree . fromOsPath >>= \case
Just have -> do Just have -> do
reserve <- annexDiskReserve <$> Annex.getGitConfig reserve <- annexDiskReserve <$> Annex.getGitConfig
let delta = sz + reserve - have - alreadythere + inprogress let delta = sz + reserve - have - alreadythere + inprogress

View file

@ -30,12 +30,14 @@ import System.PosixCompat.Files (fileMode)
- -
- Returns an InodeCache if it populated the pointer file. - Returns an InodeCache if it populated the pointer file.
-} -}
populatePointerFile :: Restage -> Key -> RawFilePath -> RawFilePath -> Annex (Maybe InodeCache) populatePointerFile :: Restage -> Key -> OsPath -> OsPath -> Annex (Maybe InodeCache)
populatePointerFile restage k obj f = go =<< liftIO (isPointerFile f) populatePointerFile restage k obj f = go =<< liftIO (isPointerFile f)
where where
go (Just k') | k == k' = do go (Just k') | k == k' = do
destmode <- liftIO $ catchMaybeIO $ fileMode <$> R.getFileStatus f let f' = fromOsPath f
liftIO $ removeWhenExistsWith R.removeLink f destmode <- liftIO $ catchMaybeIO $
fileMode <$> R.getFileStatus f'
liftIO $ removeWhenExistsWith R.removeLink f'
(ic, populated) <- replaceWorkTreeFile f $ \tmp -> do (ic, populated) <- replaceWorkTreeFile f $ \tmp -> do
ok <- linkOrCopy k obj tmp destmode >>= \case ok <- linkOrCopy k obj tmp destmode >>= \case
Just _ -> thawContent tmp >> return True Just _ -> thawContent tmp >> return True
@ -51,19 +53,20 @@ populatePointerFile restage k obj f = go =<< liftIO (isPointerFile f)
{- Removes the content from a pointer file, replacing it with a pointer. {- Removes the content from a pointer file, replacing it with a pointer.
- -
- Does not check if the pointer file is modified. -} - Does not check if the pointer file is modified. -}
depopulatePointerFile :: Key -> RawFilePath -> Annex () depopulatePointerFile :: Key -> OsPath -> Annex ()
depopulatePointerFile key file = do depopulatePointerFile key file = do
st <- liftIO $ catchMaybeIO $ R.getFileStatus file let file' = fromOsPath file
st <- liftIO $ catchMaybeIO $ R.getFileStatus file'
let mode = fmap fileMode st let mode = fmap fileMode st
secureErase file secureErase file
liftIO $ removeWhenExistsWith R.removeLink file liftIO $ removeWhenExistsWith R.removeLink file'
ic <- replaceWorkTreeFile file $ \tmp -> do ic <- replaceWorkTreeFile file $ \tmp -> do
liftIO $ writePointerFile tmp key mode liftIO $ writePointerFile tmp key mode
#if ! defined(mingw32_HOST_OS) #if ! defined(mingw32_HOST_OS)
-- Don't advance mtime; this avoids unnecessary re-smudging -- Don't advance mtime; this avoids unnecessary re-smudging
-- by git in some cases. -- by git in some cases.
liftIO $ maybe noop liftIO $ maybe noop
(\t -> touch tmp t False) (\t -> touch (fromOsPath tmp) t False)
(fmap Posix.modificationTimeHiRes st) (fmap Posix.modificationTimeHiRes st)
#endif #endif
withTSDelta (liftIO . genInodeCache tmp) withTSDelta (liftIO . genInodeCache tmp)

View file

@ -51,7 +51,7 @@ tryCopyCoW (CopyCoWTried copycowtried) src dest meterupdate =
-- CoW is known to work, so delete -- CoW is known to work, so delete
-- dest if it exists in order to do a fast -- dest if it exists in order to do a fast
-- CoW copy. -- CoW copy.
void $ tryIO $ removeFile dest void $ tryIO $ removeFile dest'
docopycow docopycow
, return False , return False
) )
@ -60,18 +60,18 @@ tryCopyCoW (CopyCoWTried copycowtried) src dest meterupdate =
docopycow = watchFileSize dest' meterupdate $ const $ docopycow = watchFileSize dest' meterupdate $ const $
copyCoW CopyTimeStamps src dest copyCoW CopyTimeStamps src dest
dest' = toRawFilePath dest dest' = toOsPath dest
-- Check if the dest file already exists, which would prevent -- Check if the dest file already exists, which would prevent
-- probing CoW. If the file exists but is empty, there's no benefit -- probing CoW. If the file exists but is empty, there's no benefit
-- to resuming from it when CoW does not work, so remove it. -- to resuming from it when CoW does not work, so remove it.
destfilealreadypopulated = destfilealreadypopulated =
tryIO (R.getFileStatus dest') >>= \case tryIO (R.getFileStatus (toRawFilePath dest)) >>= \case
Left _ -> return False Left _ -> return False
Right st -> do Right st -> do
sz <- getFileSize' dest' st sz <- getFileSize' dest' st
if sz == 0 if sz == 0
then tryIO (removeFile dest) >>= \case then tryIO (removeFile dest') >>= \case
Right () -> return False Right () -> return False
Left _ -> return True Left _ -> return True
else return True else return True
@ -111,14 +111,15 @@ fileCopier copycowtried src dest meterupdate iv =
docopy = do docopy = do
-- The file might have had the write bit removed, -- The file might have had the write bit removed,
-- so make sure we can write to it. -- so make sure we can write to it.
void $ tryIO $ allowWrite dest' void $ tryIO $ allowWrite (toOsPath dest)
withBinaryFile src ReadMode $ \hsrc -> withBinaryFile src ReadMode $ \hsrc ->
fileContentCopier hsrc dest meterupdate iv fileContentCopier hsrc dest meterupdate iv
-- Copy src mode and mtime. -- Copy src mode and mtime.
mode <- fileMode <$> R.getFileStatus (toRawFilePath src) mode <- fileMode <$> R.getFileStatus (toRawFilePath src)
mtime <- utcTimeToPOSIXSeconds <$> getModificationTime src mtime <- utcTimeToPOSIXSeconds
<$> getModificationTime (toOsPath src)
R.setFileMode dest' mode R.setFileMode dest' mode
touch dest' mtime False touch dest' mtime False

View file

@ -85,9 +85,9 @@ startExternalAddonProcess basecmd ps pid = do
runerr (Just cmd) = runerr (Just cmd) =
return $ Left $ ProgramFailure $ return $ Left $ ProgramFailure $
"Cannot run " ++ cmd ++ " -- Make sure it's executable and that its dependencies are installed." "Cannot run " ++ fromOsPath cmd ++ " -- Make sure it's executable and that its dependencies are installed."
runerr Nothing = do runerr Nothing = do
path <- intercalate ":" <$> getSearchPath path <- intercalate ":" . map fromOsPath <$> getSearchPath
return $ Left $ ProgramNotInstalled $ return $ Left $ ProgramNotInstalled $
"Cannot run " ++ basecmd ++ " -- It is not installed in PATH (" ++ path ++ ")" "Cannot run " ++ basecmd ++ " -- It is not installed in PATH (" ++ path ++ ")"

View file

@ -66,13 +66,13 @@ withIndexFile i = withAltRepo usecachedgitenv restoregitenv
{- Runs an action using a different git work tree. {- Runs an action using a different git work tree.
- -
- Smudge and clean filters are disabled in this work tree. -} - Smudge and clean filters are disabled in this work tree. -}
withWorkTree :: FilePath -> Annex a -> Annex a withWorkTree :: OsPath -> Annex a -> Annex a
withWorkTree d a = withAltRepo withWorkTree d a = withAltRepo
(\g -> return $ (g { location = modlocation (location g), gitGlobalOpts = gitGlobalOpts g ++ bypassSmudgeConfig }, ())) (\g -> return $ (g { location = modlocation (location g), gitGlobalOpts = gitGlobalOpts g ++ bypassSmudgeConfig }, ()))
(\g g' -> g' { location = location g, gitGlobalOpts = gitGlobalOpts g }) (\g g' -> g' { location = location g, gitGlobalOpts = gitGlobalOpts g })
(const a) (const a)
where where
modlocation l@(Local {}) = l { worktree = Just (toRawFilePath d) } modlocation l@(Local {}) = l { worktree = Just d }
modlocation _ = giveup "withWorkTree of non-local git repo" modlocation _ = giveup "withWorkTree of non-local git repo"
{- Runs an action with the git index file and HEAD, and a few other {- Runs an action with the git index file and HEAD, and a few other
@ -83,13 +83,13 @@ withWorkTree d a = withAltRepo
- -
- Needs git 2.2.0 or newer. - Needs git 2.2.0 or newer.
-} -}
withWorkTreeRelated :: FilePath -> Annex a -> Annex a withWorkTreeRelated :: OsPath -> Annex a -> Annex a
withWorkTreeRelated d a = withAltRepo modrepo unmodrepo (const a) withWorkTreeRelated d a = withAltRepo modrepo unmodrepo (const a)
where where
modrepo g = liftIO $ do modrepo g = liftIO $ do
g' <- addGitEnv g "GIT_COMMON_DIR" . fromRawFilePath g' <- addGitEnv g "GIT_COMMON_DIR" . fromOsPath
=<< absPath (localGitDir g) =<< absPath (localGitDir g)
g'' <- addGitEnv g' "GIT_DIR" d g'' <- addGitEnv g' "GIT_DIR" (fromOsPath d)
return (g'' { gitEnvOverridesGitDir = True }, ()) return (g'' { gitEnvOverridesGitDir = True }, ())
unmodrepo g g' = g' unmodrepo g g' = g'
{ gitEnv = gitEnv g { gitEnv = gitEnv g

View file

@ -28,7 +28,7 @@ hashObjectStop = maybe noop stop =<< Annex.getState Annex.hashobjecthandle
liftIO $ freeResourcePool p Git.HashObject.hashObjectStop liftIO $ freeResourcePool p Git.HashObject.hashObjectStop
Annex.changeState $ \s -> s { Annex.hashobjecthandle = Nothing } Annex.changeState $ \s -> s { Annex.hashobjecthandle = Nothing }
hashFile :: RawFilePath -> Annex Sha hashFile :: OsPath -> Annex Sha
hashFile f = withHashObjectHandle $ \h -> hashFile f = withHashObjectHandle $ \h ->
liftIO $ Git.HashObject.hashFile h f liftIO $ Git.HashObject.hashFile h f

View file

@ -30,22 +30,22 @@ compareInodeCachesWith = ifM inodesChanged ( return Weakly, return Strongly )
{- Checks if one of the provided old InodeCache matches the current {- Checks if one of the provided old InodeCache matches the current
- version of a file. -} - version of a file. -}
sameInodeCache :: RawFilePath -> [InodeCache] -> Annex Bool sameInodeCache :: OsPath -> [InodeCache] -> Annex Bool
sameInodeCache file [] = do sameInodeCache file [] = do
fastDebug "Annex.InodeSentinal" $ fastDebug "Annex.InodeSentinal" $
fromRawFilePath file ++ " inode cache empty" fromOsPath file ++ " inode cache empty"
return False return False
sameInodeCache file old = go =<< withTSDelta (liftIO . genInodeCache file) sameInodeCache file old = go =<< withTSDelta (liftIO . genInodeCache file)
where where
go Nothing = do go Nothing = do
fastDebug "Annex.InodeSentinal" $ fastDebug "Annex.InodeSentinal" $
fromRawFilePath file ++ " not present, cannot compare with inode cache" fromOsPath file ++ " not present, cannot compare with inode cache"
return False return False
go (Just curr) = ifM (elemInodeCaches curr old) go (Just curr) = ifM (elemInodeCaches curr old)
( return True ( return True
, do , do
fastDebug "Annex.InodeSentinal" $ fastDebug "Annex.InodeSentinal" $
fromRawFilePath file ++ " (" ++ show curr ++ ") does not match inode cache (" ++ show old ++ ")" fromOsPath file ++ " (" ++ show curr ++ ") does not match inode cache (" ++ show old ++ ")"
return False return False
) )
@ -99,7 +99,7 @@ createInodeSentinalFile evenwithobjects =
alreadyexists = liftIO. sentinalFileExists =<< annexSentinalFile alreadyexists = liftIO. sentinalFileExists =<< annexSentinalFile
hasobjects hasobjects
| evenwithobjects = pure False | evenwithobjects = pure False
| otherwise = liftIO . doesDirectoryExist . fromRawFilePath | otherwise = liftIO . doesDirectoryExist
=<< fromRepo gitAnnexObjectDir =<< fromRepo gitAnnexObjectDir
annexSentinalFile :: Annex SentinalFile annexSentinalFile :: Annex SentinalFile

View file

@ -26,13 +26,12 @@ import Annex.LockFile
import Annex.BranchState import Annex.BranchState
import Types.BranchState import Types.BranchState
import Utility.Directory.Stream import Utility.Directory.Stream
import qualified Utility.RawFilePath as R
import qualified Utility.FileIO as F import qualified Utility.FileIO as F
import qualified Utility.OsString as OS
import qualified Data.Set as S import qualified Data.Set as S
import qualified Data.ByteString.Lazy as L import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString as B import qualified Data.ByteString as B
import qualified System.FilePath.ByteString as P
import Data.ByteString.Builder import Data.ByteString.Builder
import Data.Char import Data.Char
@ -83,7 +82,7 @@ privateUUIDsKnown' = not . S.null . annexPrivateRepos . Annex.gitconfig
- interrupted write truncating information that was earlier read from the - interrupted write truncating information that was earlier read from the
- file, and so losing data. - file, and so losing data.
-} -}
setJournalFile :: Journalable content => JournalLocked -> RegardingUUID -> RawFilePath -> content -> Annex () setJournalFile :: Journalable content => JournalLocked -> RegardingUUID -> OsPath -> content -> Annex ()
setJournalFile _jl ru file content = withOtherTmp $ \tmp -> do setJournalFile _jl ru file content = withOtherTmp $ \tmp -> do
st <- getState st <- getState
jd <- fromRepo =<< ifM (regardingPrivateUUID ru) jd <- fromRepo =<< ifM (regardingPrivateUUID ru)
@ -92,10 +91,10 @@ setJournalFile _jl ru file content = withOtherTmp $ \tmp -> do
) )
-- journal file is written atomically -- journal file is written atomically
let jfile = journalFile file let jfile = journalFile file
let tmpfile = tmp P.</> jfile let tmpfile = tmp </> jfile
liftIO $ F.withFile (toOsPath tmpfile) WriteMode $ \h -> liftIO $ F.withFile tmpfile WriteMode $ \h ->
writeJournalHandle h content writeJournalHandle h content
let dest = jd P.</> jfile let dest = jd </> jfile
let mv = do let mv = do
liftIO $ moveFile tmpfile dest liftIO $ moveFile tmpfile dest
setAnnexFilePerm dest setAnnexFilePerm dest
@ -103,20 +102,20 @@ setJournalFile _jl ru file content = withOtherTmp $ \tmp -> do
-- exists -- exists
mv `catchIO` (const (createAnnexDirectory jd >> mv)) mv `catchIO` (const (createAnnexDirectory jd >> mv))
newtype AppendableJournalFile = AppendableJournalFile (RawFilePath, RawFilePath) newtype AppendableJournalFile = AppendableJournalFile (OsPath, OsPath)
{- If the journal file does not exist, it cannot be appended to, because {- If the journal file does not exist, it cannot be appended to, because
- that would overwrite whatever content the file has in the git-annex - that would overwrite whatever content the file has in the git-annex
- branch. -} - branch. -}
checkCanAppendJournalFile :: JournalLocked -> RegardingUUID -> RawFilePath -> Annex (Maybe AppendableJournalFile) checkCanAppendJournalFile :: JournalLocked -> RegardingUUID -> OsPath -> Annex (Maybe AppendableJournalFile)
checkCanAppendJournalFile _jl ru file = do checkCanAppendJournalFile _jl ru file = do
st <- getState st <- getState
jd <- fromRepo =<< ifM (regardingPrivateUUID ru) jd <- fromRepo =<< ifM (regardingPrivateUUID ru)
( return (gitAnnexPrivateJournalDir st) ( return (gitAnnexPrivateJournalDir st)
, return (gitAnnexJournalDir st) , return (gitAnnexJournalDir st)
) )
let jfile = jd P.</> journalFile file let jfile = jd </> journalFile file
ifM (liftIO $ R.doesPathExist jfile) ifM (liftIO $ doesFileExist jfile)
( return (Just (AppendableJournalFile (jd, jfile))) ( return (Just (AppendableJournalFile (jd, jfile)))
, return Nothing , return Nothing
) )
@ -134,7 +133,7 @@ checkCanAppendJournalFile _jl ru file = do
-} -}
appendJournalFile :: Journalable content => JournalLocked -> AppendableJournalFile -> content -> Annex () appendJournalFile :: Journalable content => JournalLocked -> AppendableJournalFile -> content -> Annex ()
appendJournalFile _jl (AppendableJournalFile (jd, jfile)) content = do appendJournalFile _jl (AppendableJournalFile (jd, jfile)) content = do
let write = liftIO $ F.withFile (toOsPath jfile) ReadWriteMode $ \h -> do let write = liftIO $ F.withFile jfile ReadWriteMode $ \h -> do
sz <- hFileSize h sz <- hFileSize h
when (sz /= 0) $ do when (sz /= 0) $ do
hSeek h SeekFromEnd (-1) hSeek h SeekFromEnd (-1)
@ -161,7 +160,7 @@ data JournalledContent
-- information that were made after that journal file was written. -- information that were made after that journal file was written.
{- Gets any journalled content for a file in the branch. -} {- Gets any journalled content for a file in the branch. -}
getJournalFile :: JournalLocked -> GetPrivate -> RawFilePath -> Annex JournalledContent getJournalFile :: JournalLocked -> GetPrivate -> OsPath -> Annex JournalledContent
getJournalFile _jl = getJournalFileStale getJournalFile _jl = getJournalFileStale
data GetPrivate = GetPrivate Bool data GetPrivate = GetPrivate Bool
@ -179,7 +178,7 @@ data GetPrivate = GetPrivate Bool
- (or is in progress when this is called), if the file content does not end - (or is in progress when this is called), if the file content does not end
- with a newline, it is truncated back to the previous newline. - with a newline, it is truncated back to the previous newline.
-} -}
getJournalFileStale :: GetPrivate -> RawFilePath -> Annex JournalledContent getJournalFileStale :: GetPrivate -> OsPath -> Annex JournalledContent
getJournalFileStale (GetPrivate getprivate) file = do getJournalFileStale (GetPrivate getprivate) file = do
st <- Annex.getState id st <- Annex.getState id
let repo = Annex.repo st let repo = Annex.repo st
@ -205,7 +204,7 @@ getJournalFileStale (GetPrivate getprivate) file = do
jfile = journalFile file jfile = journalFile file
getfrom d = catchMaybeIO $ getfrom d = catchMaybeIO $
discardIncompleteAppend . L.fromStrict discardIncompleteAppend . L.fromStrict
<$> F.readFile' (toOsPath (d P.</> jfile)) <$> F.readFile' (d </> jfile)
-- Note that this forces read of the whole lazy bytestring. -- Note that this forces read of the whole lazy bytestring.
discardIncompleteAppend :: L.ByteString -> L.ByteString discardIncompleteAppend :: L.ByteString -> L.ByteString
@ -224,18 +223,18 @@ discardIncompleteAppend v
{- List of existing journal files in a journal directory, but without locking, {- List of existing journal files in a journal directory, but without locking,
- may miss new ones just being added, or may have false positives if the - may miss new ones just being added, or may have false positives if the
- journal is staged as it is run. -} - journal is staged as it is run. -}
getJournalledFilesStale :: (BranchState -> Git.Repo -> RawFilePath) -> Annex [RawFilePath] getJournalledFilesStale :: (BranchState -> Git.Repo -> OsPath) -> Annex [OsPath]
getJournalledFilesStale getjournaldir = do getJournalledFilesStale getjournaldir = do
bs <- getState bs <- getState
repo <- Annex.gitRepo repo <- Annex.gitRepo
let d = getjournaldir bs repo let d = getjournaldir bs repo
fs <- liftIO $ catchDefaultIO [] $ fs <- liftIO $ catchDefaultIO [] $
getDirectoryContents (fromRawFilePath d) getDirectoryContents d
return $ filter (`notElem` [".", ".."]) $ return $ filter (`notElem` dirCruft) $
map (fileJournal . toRawFilePath) fs map fileJournal fs
{- Directory handle open on a journal directory. -} {- Directory handle open on a journal directory. -}
withJournalHandle :: (BranchState -> Git.Repo -> RawFilePath) -> (DirectoryHandle -> IO a) -> Annex a withJournalHandle :: (BranchState -> Git.Repo -> OsPath) -> (DirectoryHandle -> IO a) -> Annex a
withJournalHandle getjournaldir a = do withJournalHandle getjournaldir a = do
bs <- getState bs <- getState
repo <- Annex.gitRepo repo <- Annex.gitRepo
@ -244,15 +243,15 @@ withJournalHandle getjournaldir a = do
where where
-- avoid overhead of creating the journal directory when it already -- avoid overhead of creating the journal directory when it already
-- exists -- exists
opendir d = liftIO (openDirectory d) opendir d = liftIO (openDirectory (fromOsPath d))
`catchIO` (const (createAnnexDirectory d >> opendir d)) `catchIO` (const (createAnnexDirectory d >> opendir d))
{- Checks if there are changes in the journal. -} {- Checks if there are changes in the journal. -}
journalDirty :: (BranchState -> Git.Repo -> RawFilePath) -> Annex Bool journalDirty :: (BranchState -> Git.Repo -> OsPath) -> Annex Bool
journalDirty getjournaldir = do journalDirty getjournaldir = do
st <- getState st <- getState
d <- fromRepo (getjournaldir st) d <- fromRepo (getjournaldir st)
liftIO $ isDirectoryPopulated d liftIO $ isDirectoryPopulated (fromOsPath d)
{- Produces a filename to use in the journal for a file on the branch. {- Produces a filename to use in the journal for a file on the branch.
- The filename does not include the journal directory. - The filename does not include the journal directory.
@ -261,33 +260,33 @@ journalDirty getjournaldir = do
- used in the branch is not necessary, and all the files are put directly - used in the branch is not necessary, and all the files are put directly
- in the journal directory. - in the journal directory.
-} -}
journalFile :: RawFilePath -> RawFilePath journalFile :: OsPath -> OsPath
journalFile file = B.concatMap mangle file journalFile file = OS.concat $ map mangle $ OS.unpack file
where where
mangle c mangle c
| P.isPathSeparator c = B.singleton underscore | isPathSeparator c = OS.singleton underscore
| c == underscore = B.pack [underscore, underscore] | c == underscore = OS.pack [underscore, underscore]
| otherwise = B.singleton c | otherwise = OS.singleton c
underscore = fromIntegral (ord '_') underscore = unsafeFromChar '_'
{- Converts a journal file (relative to the journal dir) back to the {- Converts a journal file (relative to the journal dir) back to the
- filename on the branch. -} - filename on the branch. -}
fileJournal :: RawFilePath -> RawFilePath fileJournal :: OsPath -> OsPath
fileJournal = go fileJournal = go
where where
go b = go b =
let (h, t) = B.break (== underscore) b let (h, t) = OS.break (== underscore) b
in h <> case B.uncons t of in h <> case OS.uncons t of
Nothing -> t Nothing -> t
Just (_u, t') -> case B.uncons t' of Just (_u, t') -> case OS.uncons t' of
Nothing -> t' Nothing -> t'
Just (w, t'') Just (w, t'')
| w == underscore -> | w == underscore ->
B.cons underscore (go t'') OS.cons underscore (go t'')
| otherwise -> | otherwise ->
B.cons P.pathSeparator (go t') OS.cons pathSeparator (go t')
underscore = fromIntegral (ord '_') underscore = unsafeFromChar '_'
{- Sentinal value, only produced by lockJournal; required {- Sentinal value, only produced by lockJournal; required
- as a parameter by things that need to ensure the journal is - as a parameter by things that need to ensure the journal is

View file

@ -39,11 +39,11 @@ import Utility.CopyFile
import qualified Database.Keys.Handle import qualified Database.Keys.Handle
import qualified Utility.RawFilePath as R import qualified Utility.RawFilePath as R
import qualified Utility.FileIO as F import qualified Utility.FileIO as F
import qualified Utility.OsString as OS
import qualified Data.ByteString as S import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as S8 import qualified Data.ByteString.Char8 as S8
import qualified Data.ByteString.Lazy as L import qualified Data.ByteString.Lazy as L
import qualified System.FilePath.ByteString as P
#ifndef mingw32_HOST_OS #ifndef mingw32_HOST_OS
#if MIN_VERSION_unix(2,8,0) #if MIN_VERSION_unix(2,8,0)
#else #else
@ -103,7 +103,7 @@ getAnnexLinkTarget' file coresymlinks = if coresymlinks
then mempty then mempty
else s else s
makeAnnexLink :: LinkTarget -> RawFilePath -> Annex () makeAnnexLink :: LinkTarget -> OsPath -> Annex ()
makeAnnexLink = makeGitLink makeAnnexLink = makeGitLink
{- Creates a link on disk. {- Creates a link on disk.
@ -113,26 +113,31 @@ makeAnnexLink = makeGitLink
- it's staged as such, so use addAnnexLink when adding a new file or - it's staged as such, so use addAnnexLink when adding a new file or
- modified link to git. - modified link to git.
-} -}
makeGitLink :: LinkTarget -> RawFilePath -> Annex () makeGitLink :: LinkTarget -> OsPath -> Annex ()
makeGitLink linktarget file = ifM (coreSymlinks <$> Annex.getGitConfig) makeGitLink linktarget file = ifM (coreSymlinks <$> Annex.getGitConfig)
( liftIO $ do ( liftIO $ do
void $ tryIO $ R.removeLink file void $ tryIO $ R.removeLink file'
R.createSymbolicLink linktarget file R.createSymbolicLink linktarget file'
, liftIO $ F.writeFile' (toOsPath file) linktarget , liftIO $ F.writeFile' file linktarget
) )
where
file' = fromOsPath file
{- Creates a link on disk, and additionally stages it in git. -} {- Creates a link on disk, and additionally stages it in git. -}
addAnnexLink :: LinkTarget -> RawFilePath -> Annex () addAnnexLink :: LinkTarget -> OsPath -> Annex ()
addAnnexLink linktarget file = do addAnnexLink linktarget file = do
makeAnnexLink linktarget file makeAnnexLink linktarget file
stageSymlink file =<< hashSymlink linktarget stageSymlink file =<< hashSymlink linktarget
{- Injects a symlink target into git, returning its Sha. -} {- Injects a symlink target into git, returning its Sha. -}
hashSymlink :: LinkTarget -> Annex Sha hashSymlink :: LinkTarget -> Annex Sha
hashSymlink = hashBlob . toInternalGitPath hashSymlink = go . fromOsPath . toInternalGitPath . toOsPath
where
go :: LinkTarget -> Annex Sha
go = hashBlob
{- Stages a symlink to an annexed object, using a Sha of its target. -} {- Stages a symlink to an annexed object, using a Sha of its target. -}
stageSymlink :: RawFilePath -> Sha -> Annex () stageSymlink :: OsPath -> Sha -> Annex ()
stageSymlink file sha = stageSymlink file sha =
Annex.Queue.addUpdateIndex =<< Annex.Queue.addUpdateIndex =<<
inRepo (Git.UpdateIndex.stageSymlink file sha) inRepo (Git.UpdateIndex.stageSymlink file sha)
@ -142,7 +147,7 @@ hashPointerFile :: Key -> Annex Sha
hashPointerFile key = hashBlob $ formatPointer key hashPointerFile key = hashBlob $ formatPointer key
{- Stages a pointer file, using a Sha of its content -} {- Stages a pointer file, using a Sha of its content -}
stagePointerFile :: RawFilePath -> Maybe FileMode -> Sha -> Annex () stagePointerFile :: OsPath -> Maybe FileMode -> Sha -> Annex ()
stagePointerFile file mode sha = stagePointerFile file mode sha =
Annex.Queue.addUpdateIndex =<< Annex.Queue.addUpdateIndex =<<
inRepo (Git.UpdateIndex.stageFile sha treeitemtype file) inRepo (Git.UpdateIndex.stageFile sha treeitemtype file)
@ -151,10 +156,10 @@ stagePointerFile file mode sha =
| maybe False isExecutable mode = TreeExecutable | maybe False isExecutable mode = TreeExecutable
| otherwise = TreeFile | otherwise = TreeFile
writePointerFile :: RawFilePath -> Key -> Maybe FileMode -> IO () writePointerFile :: OsPath -> Key -> Maybe FileMode -> IO ()
writePointerFile file k mode = do writePointerFile file k mode = do
F.writeFile' (toOsPath file) (formatPointer k) F.writeFile' file (formatPointer k)
maybe noop (R.setFileMode file) mode maybe noop (R.setFileMode (fromOsPath file)) mode
newtype Restage = Restage Bool newtype Restage = Restage Bool
@ -187,7 +192,7 @@ newtype Restage = Restage Bool
- if the process is interrupted before the git queue is fulushed, the - if the process is interrupted before the git queue is fulushed, the
- restage will be taken care of later. - restage will be taken care of later.
-} -}
restagePointerFile :: Restage -> RawFilePath -> InodeCache -> Annex () restagePointerFile :: Restage -> OsPath -> InodeCache -> Annex ()
restagePointerFile (Restage False) f orig = do restagePointerFile (Restage False) f orig = do
flip writeRestageLog orig =<< inRepo (toTopFilePath f) flip writeRestageLog orig =<< inRepo (toTopFilePath f)
toplevelWarning True $ unableToRestage $ Just f toplevelWarning True $ unableToRestage $ Just f
@ -225,14 +230,14 @@ restagePointerFiles r = unlessM (Annex.getState Annex.insmudgecleanfilter) $ do
=<< Annex.getRead Annex.keysdbhandle =<< Annex.getRead Annex.keysdbhandle
realindex <- liftIO $ Git.Index.currentIndexFile r realindex <- liftIO $ Git.Index.currentIndexFile r
numsz@(numfiles, _) <- calcnumsz numsz@(numfiles, _) <- calcnumsz
let lock = fromRawFilePath (Git.Index.indexFileLock realindex) let lock = Git.Index.indexFileLock realindex
lockindex = liftIO $ catchMaybeIO $ Git.LockFile.openLock' lock lockindex = liftIO $ catchMaybeIO $ Git.LockFile.openLock' lock
unlockindex = liftIO . maybe noop Git.LockFile.closeLock unlockindex = liftIO . maybe noop Git.LockFile.closeLock
showwarning = warning $ unableToRestage Nothing showwarning = warning $ unableToRestage Nothing
go Nothing = showwarning go Nothing = showwarning
go (Just _) = withtmpdir $ \tmpdir -> do go (Just _) = withtmpdir $ \tmpdir -> do
tsd <- getTSDelta tsd <- getTSDelta
let tmpindex = toRawFilePath (tmpdir </> "index") let tmpindex = tmpdir </> literalOsPath "index"
let replaceindex = liftIO $ moveFile tmpindex realindex let replaceindex = liftIO $ moveFile tmpindex realindex
let updatetmpindex = do let updatetmpindex = do
r' <- liftIO $ Git.Env.addGitEnv r Git.Index.indexEnv r' <- liftIO $ Git.Env.addGitEnv r Git.Index.indexEnv
@ -247,8 +252,8 @@ restagePointerFiles r = unlessM (Annex.getState Annex.insmudgecleanfilter) $ do
bracket lockindex unlockindex go bracket lockindex unlockindex go
where where
withtmpdir = withTmpDirIn withtmpdir = withTmpDirIn
(fromRawFilePath $ Git.localGitDir r) (Git.localGitDir r)
(toOsPath "annexindex") (literalOsPath "annexindex")
isunmodified tsd f orig = isunmodified tsd f orig =
genInodeCache f tsd >>= return . \case genInodeCache f tsd >>= return . \case
@ -325,7 +330,7 @@ restagePointerFiles r = unlessM (Annex.getState Annex.insmudgecleanfilter) $ do
ck = ConfigKey "filter.annex.process" ck = ConfigKey "filter.annex.process"
ckd = ConfigKey "filter.annex.process-temp-disabled" ckd = ConfigKey "filter.annex.process-temp-disabled"
unableToRestage :: Maybe RawFilePath -> StringContainingQuotedPath unableToRestage :: Maybe OsPath -> StringContainingQuotedPath
unableToRestage mf = unableToRestage mf =
"git status will show " <> maybe "some files" QuotedPath mf "git status will show " <> maybe "some files" QuotedPath mf
<> " to be modified, since content availability has changed" <> " to be modified, since content availability has changed"
@ -361,7 +366,8 @@ parseLinkTargetOrPointer' b =
Nothing -> Right Nothing Nothing -> Right Nothing
where where
parsekey l parsekey l
| isLinkToAnnex l = fileKey $ snd $ S8.breakEnd pathsep l | isLinkToAnnex l = fileKey $ toOsPath $
snd $ S8.breakEnd pathsep l
| otherwise = Nothing | otherwise = Nothing
restvalid r restvalid r
@ -400,9 +406,9 @@ parseLinkTargetOrPointerLazy' b =
in parseLinkTargetOrPointer' (L.toStrict b') in parseLinkTargetOrPointer' (L.toStrict b')
formatPointer :: Key -> S.ByteString formatPointer :: Key -> S.ByteString
formatPointer k = prefix <> keyFile k <> nl formatPointer k = fromOsPath prefix <> fromOsPath (keyFile k) <> nl
where where
prefix = toInternalGitPath $ P.pathSeparator `S.cons` objectDir prefix = toInternalGitPath $ pathSeparator `OS.cons` objectDir
nl = S8.singleton '\n' nl = S8.singleton '\n'
{- Maximum size of a file that could be a pointer to a key. {- Maximum size of a file that could be a pointer to a key.
@ -434,21 +440,21 @@ maxSymlinkSz = 8192
- an object that looks like a pointer file. Or that a non-annex - an object that looks like a pointer file. Or that a non-annex
- symlink does. Avoids a false positive in those cases. - symlink does. Avoids a false positive in those cases.
- -} - -}
isPointerFile :: RawFilePath -> IO (Maybe Key) isPointerFile :: OsPath -> IO (Maybe Key)
isPointerFile f = catchDefaultIO Nothing $ isPointerFile f = catchDefaultIO Nothing $
#if defined(mingw32_HOST_OS) #if defined(mingw32_HOST_OS)
F.withFile (toOsPath f) ReadMode readhandle F.withFile f ReadMode readhandle
#else #else
#if MIN_VERSION_unix(2,8,0) #if MIN_VERSION_unix(2,8,0)
let open = do let open = do
fd <- openFd (fromRawFilePath f) ReadOnly fd <- openFd (fromOsPath f) ReadOnly
(defaultFileFlags { nofollow = True }) (defaultFileFlags { nofollow = True })
fdToHandle fd fdToHandle fd
in bracket open hClose readhandle in bracket open hClose readhandle
#else #else
ifM (isSymbolicLink <$> R.getSymbolicLinkStatus f) ifM (isSymbolicLink <$> R.getSymbolicLinkStatus (toRawFilePath f))
( return Nothing ( return Nothing
, F.withFile (toOsPath f) ReadMode readhandle , F.withFile f ReadMode readhandle
) )
#endif #endif
#endif #endif
@ -463,13 +469,13 @@ isPointerFile f = catchDefaultIO Nothing $
- than .git to be used. - than .git to be used.
-} -}
isLinkToAnnex :: S.ByteString -> Bool isLinkToAnnex :: S.ByteString -> Bool
isLinkToAnnex s = p `S.isInfixOf` s isLinkToAnnex s = p `OS.isInfixOf` (toOsPath s)
#ifdef mingw32_HOST_OS #ifdef mingw32_HOST_OS
-- '/' is used inside pointer files on Windows, not the native '\' -- '/' is used inside pointer files on Windows, not the native '\'
|| p' `S.isInfixOf` s || p' `OS.isInfixOf` s
#endif #endif
where where
p = P.pathSeparator `S.cons` objectDir p = pathSeparator `OS.cons` objectDir
#ifdef mingw32_HOST_OS #ifdef mingw32_HOST_OS
p' = toInternalGitPath p p' = toInternalGitPath p
#endif #endif

View file

@ -387,7 +387,7 @@ gitAnnexFsckDir u r mc = case annexDbDir =<< mc of
Nothing -> go (gitAnnexDir r) Nothing -> go (gitAnnexDir r)
Just d -> go d Just d -> go d
where where
go d = d </> literalOsPath "fsck" </> uuidPath u go d = d </> literalOsPath "fsck" </> fromUUID u
{- used to store information about incremental fscks. -} {- used to store information about incremental fscks. -}
gitAnnexFsckState :: UUID -> Git.Repo -> OsPath gitAnnexFsckState :: UUID -> Git.Repo -> OsPath
@ -408,7 +408,7 @@ gitAnnexFsckDbLock u r c = gitAnnexFsckDir u r (Just c) </> literalOsPath "fsck.
{- .git/annex/fsckresults/uuid is used to store results of git fscks -} {- .git/annex/fsckresults/uuid is used to store results of git fscks -}
gitAnnexFsckResultsLog :: UUID -> Git.Repo -> OsPath gitAnnexFsckResultsLog :: UUID -> Git.Repo -> OsPath
gitAnnexFsckResultsLog u r = gitAnnexFsckResultsLog u r =
gitAnnexDir r </> literalOsPath "fsckresults" </> uuidPath u gitAnnexDir r </> literalOsPath "fsckresults" </> fromUUID u
{- .git/annex/upgrade.log is used to record repository version upgrades. -} {- .git/annex/upgrade.log is used to record repository version upgrades. -}
gitAnnexUpgradeLog :: Git.Repo -> OsPath gitAnnexUpgradeLog :: Git.Repo -> OsPath
@ -476,7 +476,7 @@ gitAnnexExportDir r c = fromMaybe (gitAnnexDir r) (annexDbDir c)
{- Directory containing database used to record export info. -} {- Directory containing database used to record export info. -}
gitAnnexExportDbDir :: UUID -> Git.Repo -> GitConfig -> OsPath gitAnnexExportDbDir :: UUID -> Git.Repo -> GitConfig -> OsPath
gitAnnexExportDbDir u r c = gitAnnexExportDbDir u r c =
gitAnnexExportDir r c </> uuidPath u </> literalOsPath "exportdb" gitAnnexExportDir r c </> fromUUID u </> literalOsPath "exportdb"
{- Lock file for export database. -} {- Lock file for export database. -}
gitAnnexExportLock :: UUID -> Git.Repo -> GitConfig -> OsPath gitAnnexExportLock :: UUID -> Git.Repo -> GitConfig -> OsPath
@ -491,7 +491,7 @@ gitAnnexExportUpdateLock u r c = gitAnnexExportDbDir u r c <> literalOsPath ".up
- remote, but were excluded by its preferred content settings. -} - remote, but were excluded by its preferred content settings. -}
gitAnnexExportExcludeLog :: UUID -> Git.Repo -> OsPath gitAnnexExportExcludeLog :: UUID -> Git.Repo -> OsPath
gitAnnexExportExcludeLog u r = gitAnnexDir r gitAnnexExportExcludeLog u r = gitAnnexDir r
</> literalOsPath "export.ex" </> uuidPath u </> literalOsPath "export.ex" </> fromUUID u
{- Directory containing database used to record remote content ids. {- Directory containing database used to record remote content ids.
- -
@ -516,7 +516,7 @@ gitAnnexImportDir r c =
{- File containing state about the last import done from a remote. -} {- File containing state about the last import done from a remote. -}
gitAnnexImportLog :: UUID -> Git.Repo -> GitConfig -> OsPath gitAnnexImportLog :: UUID -> Git.Repo -> GitConfig -> OsPath
gitAnnexImportLog u r c = gitAnnexImportLog u r c =
gitAnnexImportDir r c </> uuidPath u </> literalOsPath "log" gitAnnexImportDir r c </> fromUUID u </> literalOsPath "log"
{- Directory containing database used by importfeed. -} {- Directory containing database used by importfeed. -}
gitAnnexImportFeedDbDir :: Git.Repo -> GitConfig -> OsPath gitAnnexImportFeedDbDir :: Git.Repo -> GitConfig -> OsPath

View file

@ -7,20 +7,17 @@
module Annex.Multicast where module Annex.Multicast where
import Common
import Annex.Path import Annex.Path
import Utility.Env import Utility.Env
import Utility.PartialPrelude
import System.Process import System.Process
import System.IO
import GHC.IO.Handle.FD import GHC.IO.Handle.FD
import Control.Applicative
import Prelude
multicastReceiveEnv :: String multicastReceiveEnv :: String
multicastReceiveEnv = "GIT_ANNEX_MULTICAST_RECEIVE" multicastReceiveEnv = "GIT_ANNEX_MULTICAST_RECEIVE"
multicastCallbackEnv :: IO (FilePath, [(String, String)], Handle) multicastCallbackEnv :: IO (OsPath, [(String, String)], Handle)
multicastCallbackEnv = do multicastCallbackEnv = do
gitannex <- programPath gitannex <- programPath
-- This will even work on Windows -- This will even work on Windows

View file

@ -40,18 +40,18 @@ import qualified Data.Map as M
- git-annex-shell or git-remote-annex, this finds a git-annex program - git-annex-shell or git-remote-annex, this finds a git-annex program
- instead. - instead.
-} -}
programPath :: IO FilePath programPath :: IO OsPath
programPath = go =<< getEnv "GIT_ANNEX_DIR" programPath = go =<< getEnv "GIT_ANNEX_DIR"
where where
go (Just dir) = do go (Just dir) = do
name <- reqgitannex <$> getProgName name <- reqgitannex <$> getProgName
return (dir </> name) return (toOsPath dir </> toOsPath name)
go Nothing = do go Nothing = do
name <- getProgName name <- getProgName
exe <- if isgitannex name exe <- if isgitannex name
then getExecutablePath then getExecutablePath
else pure "git-annex" else pure "git-annex"
p <- if isAbsolute exe p <- if isAbsolute (toOsPath exe)
then return exe then return exe
else fromMaybe exe <$> readProgramFile else fromMaybe exe <$> readProgramFile
maybe cannotFindProgram return =<< searchPath p maybe cannotFindProgram return =<< searchPath p
@ -65,12 +65,12 @@ programPath = go =<< getEnv "GIT_ANNEX_DIR"
readProgramFile :: IO (Maybe FilePath) readProgramFile :: IO (Maybe FilePath)
readProgramFile = catchDefaultIO Nothing $ do readProgramFile = catchDefaultIO Nothing $ do
programfile <- programFile programfile <- programFile
headMaybe . lines <$> readFile programfile headMaybe . lines <$> readFile (fromOsPath programfile)
cannotFindProgram :: IO a cannotFindProgram :: IO a
cannotFindProgram = do cannotFindProgram = do
f <- programFile f <- programFile
giveup $ "cannot find git-annex program in PATH or in " ++ f giveup $ "cannot find git-annex program in PATH or in " ++ fromOsPath f
{- Runs a git-annex child process. {- Runs a git-annex child process.
- -
@ -88,7 +88,7 @@ gitAnnexChildProcess
gitAnnexChildProcess subcmd ps f a = do gitAnnexChildProcess subcmd ps f a = do
cmd <- liftIO programPath cmd <- liftIO programPath
ps' <- gitAnnexChildProcessParams subcmd ps ps' <- gitAnnexChildProcessParams subcmd ps
pidLockChildProcess cmd ps' f a pidLockChildProcess (fromOsPath cmd) ps' f a
{- Parameters to pass to a git-annex child process to run a subcommand {- Parameters to pass to a git-annex child process to run a subcommand
- with some parameters. - with some parameters.

View file

@ -31,7 +31,7 @@ addCommand commonparams command params files = do
store =<< flushWhenFull =<< store =<< flushWhenFull =<<
(Git.Queue.addCommand commonparams command params files q =<< gitRepo) (Git.Queue.addCommand commonparams command params files q =<< gitRepo)
addFlushAction :: Git.Queue.FlushActionRunner Annex -> [RawFilePath] -> Annex () addFlushAction :: Git.Queue.FlushActionRunner Annex -> [OsPath] -> Annex ()
addFlushAction runner files = do addFlushAction runner files = do
q <- get q <- get
store =<< flushWhenFull =<< store =<< flushWhenFull =<<

View file

@ -21,8 +21,6 @@ import Utility.Tmp
import Utility.Tmp.Dir import Utility.Tmp.Dir
import Utility.Directory.Create import Utility.Directory.Create
import qualified System.FilePath.ByteString as P
{- replaceFile on a file located inside the gitAnnexDir. -} {- replaceFile on a file located inside the gitAnnexDir. -}
replaceGitAnnexDirFile :: OsPath -> (OsPath -> Annex a) -> Annex a replaceGitAnnexDirFile :: OsPath -> (OsPath -> Annex a) -> Annex a
replaceGitAnnexDirFile = replaceFile createAnnexDirectory replaceGitAnnexDirFile = replaceFile createAnnexDirectory

View file

@ -23,8 +23,6 @@ import Utility.PID
import Control.Concurrent import Control.Concurrent
import Text.Read import Text.Read
import Data.Time.Clock.POSIX import Data.Time.Clock.POSIX
import qualified Utility.RawFilePath as R
import qualified System.FilePath.ByteString as P
{- Called when a location log change is journalled, so the LiveUpdate {- Called when a location log change is journalled, so the LiveUpdate
- is done. This is called with the journal still locked, so no concurrent - is done. This is called with the journal still locked, so no concurrent
@ -146,12 +144,11 @@ checkStaleSizeChanges :: RepoSizeHandle -> Annex ()
checkStaleSizeChanges h@(RepoSizeHandle (Just _) livev) = do checkStaleSizeChanges h@(RepoSizeHandle (Just _) livev) = do
livedir <- calcRepo' gitAnnexRepoSizeLiveDir livedir <- calcRepo' gitAnnexRepoSizeLiveDir
pid <- liftIO getPID pid <- liftIO getPID
let pidlockfile = show pid let pidlockfile = toOsPath (show pid)
now <- liftIO getPOSIXTime now <- liftIO getPOSIXTime
liftIO (takeMVar livev) >>= \case liftIO (takeMVar livev) >>= \case
Nothing -> do Nothing -> do
lck <- takeExclusiveLock $ lck <- takeExclusiveLock $ livedir </> pidlockfile
livedir P.</> toRawFilePath pidlockfile
go livedir lck pidlockfile now go livedir lck pidlockfile now
Just v@(lck, lastcheck) Just v@(lck, lastcheck)
| now >= lastcheck + 60 -> | now >= lastcheck + 60 ->
@ -161,11 +158,11 @@ checkStaleSizeChanges h@(RepoSizeHandle (Just _) livev) = do
where where
go livedir lck pidlockfile now = do go livedir lck pidlockfile now = do
void $ tryNonAsync $ do void $ tryNonAsync $ do
lockfiles <- liftIO $ filter (not . dirCruft . toRawFilePath) lockfiles <- liftIO $ filter (`notElem` dirCruft)
<$> getDirectoryContents (fromRawFilePath livedir) <$> getDirectoryContents livedir
stale <- forM lockfiles $ \lockfile -> stale <- forM lockfiles $ \lockfile ->
if (lockfile /= pidlockfile) if (lockfile /= pidlockfile)
then case readMaybe lockfile of then case readMaybe (fromOsPath lockfile) of
Nothing -> return Nothing Nothing -> return Nothing
Just pid -> checkstale livedir lockfile pid Just pid -> checkstale livedir lockfile pid
else return Nothing else return Nothing
@ -176,7 +173,7 @@ checkStaleSizeChanges h@(RepoSizeHandle (Just _) livev) = do
liftIO $ putMVar livev (Just (lck, now)) liftIO $ putMVar livev (Just (lck, now))
checkstale livedir lockfile pid = checkstale livedir lockfile pid =
let f = livedir P.</> toRawFilePath lockfile let f = livedir </> lockfile
in trySharedLock f >>= \case in trySharedLock f >>= \case
Nothing -> return Nothing Nothing -> return Nothing
Just lck -> do Just lck -> do
@ -184,6 +181,6 @@ checkStaleSizeChanges h@(RepoSizeHandle (Just _) livev) = do
( StaleSizeChanger (SizeChangeProcessId pid) ( StaleSizeChanger (SizeChangeProcessId pid)
, do , do
dropLock lck dropLock lck
removeWhenExistsWith R.removeLink f removeWhenExistsWith removeFile f
) )
checkStaleSizeChanges (RepoSizeHandle Nothing _) = noop checkStaleSizeChanges (RepoSizeHandle Nothing _) = noop

View file

@ -40,14 +40,14 @@ import Types.Concurrency
import Git.Env import Git.Env
import Git.Ssh import Git.Ssh
import qualified Utility.RawFilePath as R import qualified Utility.RawFilePath as R
import qualified Utility.OsString as OS
import Annex.Perms import Annex.Perms
#ifndef mingw32_HOST_OS #ifndef mingw32_HOST_OS
import Annex.LockPool import Annex.LockPool
#endif #endif
import Control.Concurrent.STM import Control.Concurrent.STM
import qualified Data.ByteString as S import qualified Data.ByteString.Short as SBS
import qualified System.FilePath.ByteString as P
{- Some ssh commands are fed stdin on a pipe and so should be allowed to {- Some ssh commands are fed stdin on a pipe and so should be allowed to
- consume it. But ssh commands that are not piped stdin should generally - consume it. But ssh commands that are not piped stdin should generally
@ -101,15 +101,15 @@ consumeStdinParams NoConsumeStdin = [Param "-n"]
{- Returns a filename to use for a ssh connection caching socket, and {- Returns a filename to use for a ssh connection caching socket, and
- parameters to enable ssh connection caching. -} - parameters to enable ssh connection caching. -}
sshCachingInfo :: (SshHost, Maybe Integer) -> Annex (Maybe RawFilePath, [CommandParam]) sshCachingInfo :: (SshHost, Maybe Integer) -> Annex (Maybe OsPath, [CommandParam])
sshCachingInfo (host, port) = go =<< sshCacheDir' sshCachingInfo (host, port) = go =<< sshCacheDir'
where where
go (Right dir) = go (Right dir) =
liftIO (bestSocketPath $ dir P.</> hostport2socket host port) >>= return . \case liftIO (bestSocketPath $ dir </> hostport2socket host port) >>= return . \case
Nothing -> (Nothing, []) Nothing -> (Nothing, [])
Just socketfile -> Just socketfile ->
(Just socketfile (Just socketfile
, sshConnectionCachingParams (fromRawFilePath socketfile) , sshConnectionCachingParams (fromOsPath socketfile)
) )
-- No connection caching with concurrency is not a good -- No connection caching with concurrency is not a good
-- combination, so warn the user. -- combination, so warn the user.
@ -137,10 +137,10 @@ sshCachingInfo (host, port) = go =<< sshCacheDir'
- file. - file.
- -
- If no path can be constructed that is a valid socket, returns Nothing. -} - If no path can be constructed that is a valid socket, returns Nothing. -}
bestSocketPath :: RawFilePath -> IO (Maybe RawFilePath) bestSocketPath :: OsPath -> IO (Maybe OsPath)
bestSocketPath abssocketfile = do bestSocketPath abssocketfile = do
relsocketfile <- liftIO $ relPathCwdToFile abssocketfile relsocketfile <- liftIO $ relPathCwdToFile abssocketfile
let socketfile = if S.length abssocketfile <= S.length relsocketfile let socketfile = if OS.length abssocketfile <= OS.length relsocketfile
then abssocketfile then abssocketfile
else relsocketfile else relsocketfile
return $ if valid_unix_socket_path socketfile sshgarbagelen return $ if valid_unix_socket_path socketfile sshgarbagelen
@ -167,10 +167,10 @@ sshSocketDirEnv = "GIT_ANNEX_SSH_SOCKET_DIR"
- -
- The directory will be created if it does not exist. - The directory will be created if it does not exist.
-} -}
sshCacheDir :: Annex (Maybe RawFilePath) sshCacheDir :: Annex (Maybe OsPath)
sshCacheDir = eitherToMaybe <$> sshCacheDir' sshCacheDir = eitherToMaybe <$> sshCacheDir'
sshCacheDir' :: Annex (Either String RawFilePath) sshCacheDir' :: Annex (Either String OsPath)
sshCacheDir' = sshCacheDir' =
ifM (fromMaybe BuildInfo.sshconnectioncaching . annexSshCaching <$> Annex.getGitConfig) ifM (fromMaybe BuildInfo.sshconnectioncaching . annexSshCaching <$> Annex.getGitConfig)
( ifM crippledFileSystem ( ifM crippledFileSystem
@ -191,9 +191,9 @@ sshCacheDir' =
gettmpdir = liftIO $ getEnv sshSocketDirEnv gettmpdir = liftIO $ getEnv sshSocketDirEnv
usetmpdir tmpdir = do usetmpdir tmpdir = do
let socktmp = tmpdir </> "ssh" let socktmp = toOsPath tmpdir </> literalOsPath "ssh"
createDirectoryIfMissing True socktmp createDirectoryIfMissing True socktmp
return (toRawFilePath socktmp) return socktmp
crippledfswarning = unwords crippledfswarning = unwords
[ "This repository is on a crippled filesystem, so unix named" [ "This repository is on a crippled filesystem, so unix named"
@ -216,7 +216,7 @@ portParams (Just port) = [Param "-p", Param $ show port]
- Locks the socket lock file to prevent other git-annex processes from - Locks the socket lock file to prevent other git-annex processes from
- stopping the ssh multiplexer on this socket. - stopping the ssh multiplexer on this socket.
-} -}
prepSocket :: RawFilePath -> SshHost -> [CommandParam] -> Annex () prepSocket :: OsPath -> SshHost -> [CommandParam] -> Annex ()
prepSocket socketfile sshhost sshparams = do prepSocket socketfile sshhost sshparams = do
-- There could be stale ssh connections hanging around -- There could be stale ssh connections hanging around
-- from a previous git-annex run that was interrupted. -- from a previous git-annex run that was interrupted.
@ -288,11 +288,11 @@ prepSocket socketfile sshhost sshparams = do
- and this check makes such files be skipped since the corresponding lock - and this check makes such files be skipped since the corresponding lock
- file won't exist. - file won't exist.
-} -}
enumSocketFiles :: Annex [RawFilePath] enumSocketFiles :: Annex [OsPath]
enumSocketFiles = liftIO . go =<< sshCacheDir enumSocketFiles = liftIO . go =<< sshCacheDir
where where
go Nothing = return [] go Nothing = return []
go (Just dir) = filterM (R.doesPathExist . socket2lock) go (Just dir) = filterM (R.doesPathExist . fromOsPath . socket2lock)
=<< filter (not . isLock) =<< filter (not . isLock)
<$> catchDefaultIO [] (dirContents dir) <$> catchDefaultIO [] (dirContents dir)
@ -326,45 +326,45 @@ sshCleanup = mapM_ cleanup =<< enumSocketFiles
forceSshCleanup :: Annex () forceSshCleanup :: Annex ()
forceSshCleanup = mapM_ forceStopSsh =<< enumSocketFiles forceSshCleanup = mapM_ forceStopSsh =<< enumSocketFiles
forceStopSsh :: RawFilePath -> Annex () forceStopSsh :: OsPath -> Annex ()
forceStopSsh socketfile = withNullHandle $ \nullh -> do forceStopSsh socketfile = withNullHandle $ \nullh -> do
let (dir, base) = splitFileName (fromRawFilePath socketfile) let (dir, base) = splitFileName socketfile
let p = (proc "ssh" $ toCommand $ let p = (proc "ssh" $ toCommand $
[ Param "-O", Param "stop" ] ++ [ Param "-O", Param "stop" ] ++
sshConnectionCachingParams base ++ sshConnectionCachingParams (fromOsPath base) ++
[Param "localhost"]) [Param "localhost"])
{ cwd = Just dir { cwd = Just (fromOsPath dir)
-- "ssh -O stop" is noisy on stderr even with -q -- "ssh -O stop" is noisy on stderr even with -q
, std_out = UseHandle nullh , std_out = UseHandle nullh
, std_err = UseHandle nullh , std_err = UseHandle nullh
} }
void $ liftIO $ catchMaybeIO $ withCreateProcess p $ \_ _ _ pid -> void $ liftIO $ catchMaybeIO $ withCreateProcess p $ \_ _ _ pid ->
forceSuccessProcess p pid forceSuccessProcess p pid
liftIO $ removeWhenExistsWith R.removeLink socketfile liftIO $ removeWhenExistsWith R.removeLink (fromOsPath socketfile)
{- This needs to be as short as possible, due to limitations on the length {- This needs to be as short as possible, due to limitations on the length
- of the path to a socket file. At the same time, it needs to be unique - of the path to a socket file. At the same time, it needs to be unique
- for each host. - for each host.
-} -}
hostport2socket :: SshHost -> Maybe Integer -> RawFilePath hostport2socket :: SshHost -> Maybe Integer -> OsPath
hostport2socket host Nothing = hostport2socket' $ fromSshHost host hostport2socket host Nothing = hostport2socket' $ fromSshHost host
hostport2socket host (Just port) = hostport2socket' $ hostport2socket host (Just port) = hostport2socket' $
fromSshHost host ++ "!" ++ show port fromSshHost host ++ "!" ++ show port
hostport2socket' :: String -> RawFilePath hostport2socket' :: String -> OsPath
hostport2socket' s hostport2socket' s
| length s > lengthofmd5s = toRawFilePath $ show $ md5 $ encodeBL s | length s > lengthofmd5s = toOsPath $ show $ md5 $ encodeBL s
| otherwise = toRawFilePath s | otherwise = toOsPath s
where where
lengthofmd5s = 32 lengthofmd5s = 32
socket2lock :: RawFilePath -> RawFilePath socket2lock :: OsPath -> OsPath
socket2lock socket = socket <> lockExt socket2lock socket = socket <> lockExt
isLock :: RawFilePath -> Bool isLock :: OsPath -> Bool
isLock f = lockExt `S.isSuffixOf` f isLock f = lockExt `OS.isSuffixOf` f
lockExt :: S.ByteString lockExt :: OsPath
lockExt = ".lock" lockExt = literalOsPath ".lock"
{- This is the size of the sun_path component of sockaddr_un, which {- This is the size of the sun_path component of sockaddr_un, which
- is the limit to the total length of the filename of a unix socket. - is the limit to the total length of the filename of a unix socket.
@ -376,8 +376,9 @@ sizeof_sockaddr_un_sun_path = 100
{- Note that this looks at the true length of the path in bytes, as it will {- Note that this looks at the true length of the path in bytes, as it will
- appear on disk. -} - appear on disk. -}
valid_unix_socket_path :: RawFilePath -> Int -> Bool valid_unix_socket_path :: OsPath -> Int -> Bool
valid_unix_socket_path f n = S.length f + n < sizeof_sockaddr_un_sun_path valid_unix_socket_path f n =
SBS.length (fromOsPath f) + n < sizeof_sockaddr_un_sun_path
{- Parses the SSH port, and returns the other OpenSSH options. If {- Parses the SSH port, and returns the other OpenSSH options. If
- several ports are found, the last one takes precedence. -} - several ports are found, the last one takes precedence. -}
@ -463,7 +464,7 @@ sshOptionsTo remote gc localr
liftIO $ do liftIO $ do
localr' <- addGitEnv localr sshOptionsEnv localr' <- addGitEnv localr sshOptionsEnv
(toSshOptionsEnv sshopts) (toSshOptionsEnv sshopts)
addGitEnv localr' gitSshEnv command addGitEnv localr' gitSshEnv (fromOsPath command)
runSshOptions :: [String] -> String -> IO () runSshOptions :: [String] -> String -> IO ()
runSshOptions args s = do runSshOptions args s = do

View file

@ -43,7 +43,7 @@ data RunTransferrer = RunTransferrer String [CommandParam] BatchCommandMaker
mkRunTransferrer :: BatchCommandMaker -> Annex RunTransferrer mkRunTransferrer :: BatchCommandMaker -> Annex RunTransferrer
mkRunTransferrer batchmaker = RunTransferrer mkRunTransferrer batchmaker = RunTransferrer
<$> liftIO programPath <$> liftIO (fromOsPath <$> programPath)
<*> gitAnnexChildProcessParams "transferrer" [] <*> gitAnnexChildProcessParams "transferrer" []
<*> pure batchmaker <*> pure batchmaker

View file

@ -96,7 +96,7 @@ genKeyExternal ebname hasext ks meterupdate =
withExternalState ebname hasext $ \st -> withExternalState ebname hasext $ \st ->
handleRequest st req notavail go handleRequest st req notavail go
where where
req = GENKEY (fromRawFilePath (contentLocation ks)) req = GENKEY (fromOsPath (contentLocation ks))
notavail = giveup $ "Cannot generate a key, since " ++ externalBackendProgram ebname ++ " is not available." notavail = giveup $ "Cannot generate a key, since " ++ externalBackendProgram ebname ++ " is not available."
go (GENKEY_SUCCESS pk) = Just $ Result <$> fromProtoKey pk hasext ks go (GENKEY_SUCCESS pk) = Just $ Result <$> fromProtoKey pk hasext ks
@ -107,12 +107,12 @@ genKeyExternal ebname hasext ks meterupdate =
return $ GetNextMessage go return $ GetNextMessage go
go _ = Nothing go _ = Nothing
verifyKeyContentExternal :: ExternalBackendName -> HasExt -> MeterUpdate -> Key -> RawFilePath -> Annex Bool verifyKeyContentExternal :: ExternalBackendName -> HasExt -> MeterUpdate -> Key -> OsPath -> Annex Bool
verifyKeyContentExternal ebname hasext meterupdate k f = verifyKeyContentExternal ebname hasext meterupdate k f =
withExternalState ebname hasext $ \st -> withExternalState ebname hasext $ \st ->
handleRequest st req notavail go handleRequest st req notavail go
where where
req = VERIFYKEYCONTENT (toProtoKey k) (fromRawFilePath f) req = VERIFYKEYCONTENT (toProtoKey k) (fromOsPath f)
-- This should not be able to happen, because CANVERIFY is checked -- This should not be able to happen, because CANVERIFY is checked
-- before this function is enable, and so the external program -- before this function is enable, and so the external program

View file

@ -108,6 +108,6 @@ hookExists h r = do
runHook :: (FilePath -> [CommandParam] -> IO a) -> Hook -> [CommandParam] -> Repo -> IO a runHook :: (FilePath -> [CommandParam] -> IO a) -> Hook -> [CommandParam] -> Repo -> IO a
runHook runner h ps r = do runHook runner h ps r = do
let f = fromOsPath $ hookFile h r let f = hookFile h r
(c, cps) <- findShellCommand f (c, cps) <- findShellCommand f
runner c (cps ++ ps) runner c (cps ++ ps)

View file

@ -53,11 +53,11 @@ data Action m
- those will be run before the FlushAction is. -} - those will be run before the FlushAction is. -}
| FlushAction | FlushAction
{ getFlushActionRunner :: FlushActionRunner m { getFlushActionRunner :: FlushActionRunner m
, getFlushActionFiles :: [RawFilePath] , getFlushActionFiles :: [OsPath]
} }
{- The String must be unique for each flush action. -} {- The String must be unique for each flush action. -}
data FlushActionRunner m = FlushActionRunner String (Repo -> [RawFilePath] -> m ()) data FlushActionRunner m = FlushActionRunner String (Repo -> [OsPath] -> m ())
instance Eq (FlushActionRunner m) where instance Eq (FlushActionRunner m) where
FlushActionRunner s1 _ == FlushActionRunner s2 _ = s1 == s2 FlushActionRunner s1 _ == FlushActionRunner s2 _ = s1 == s2
@ -140,7 +140,7 @@ addCommand commonparams subcommand params files q repo =
{- Adds an flush action to the queue. This can co-exist with anything else {- Adds an flush action to the queue. This can co-exist with anything else
- that gets added to the queue, and when the queue is eventually flushed, - that gets added to the queue, and when the queue is eventually flushed,
- it will be run after the other things in the queue. -} - it will be run after the other things in the queue. -}
addFlushAction :: MonadIO m => FlushActionRunner m -> [RawFilePath] -> Queue m -> Repo -> m (Queue m) addFlushAction :: MonadIO m => FlushActionRunner m -> [OsPath] -> Queue m -> Repo -> m (Queue m)
addFlushAction runner files q repo = addFlushAction runner files q repo =
updateQueue action (const False) (length files) q repo updateQueue action (const False) (length files) q repo
where where

View file

@ -15,7 +15,6 @@ import Annex.Common
import Git.Fsck import Git.Fsck
import Git.Types import Git.Types
import Logs.File import Logs.File
import qualified Utility.RawFilePath as R
import qualified Data.Set as S import qualified Data.Set as S
@ -25,7 +24,7 @@ writeFsckResults u fsckresults = do
case serializeFsckResults fsckresults of case serializeFsckResults fsckresults of
Just s -> store s logfile Just s -> store s logfile
Nothing -> liftIO $ Nothing -> liftIO $
removeWhenExistsWith R.removeLink logfile removeWhenExistsWith removeFile logfile
where where
store s logfile = writeLogFile logfile s store s logfile = writeLogFile logfile s
@ -46,7 +45,7 @@ readFsckResults :: UUID -> Annex FsckResults
readFsckResults u = do readFsckResults u = do
logfile <- fromRepo $ gitAnnexFsckResultsLog u logfile <- fromRepo $ gitAnnexFsckResultsLog u
liftIO $ catchDefaultIO (FsckFoundMissing S.empty False) $ liftIO $ catchDefaultIO (FsckFoundMissing S.empty False) $
deserializeFsckResults <$> readFile (fromRawFilePath logfile) deserializeFsckResults <$> readFile (fromOsPath logfile)
deserializeFsckResults :: String -> FsckResults deserializeFsckResults :: String -> FsckResults
deserializeFsckResults = deserialize . lines deserializeFsckResults = deserialize . lines
@ -58,6 +57,6 @@ deserializeFsckResults = deserialize . lines
in if S.null s then FsckFailed else FsckFoundMissing s t in if S.null s then FsckFailed else FsckFoundMissing s t
clearFsckResults :: UUID -> Annex () clearFsckResults :: UUID -> Annex ()
clearFsckResults = liftIO . removeWhenExistsWith R.removeLink clearFsckResults = liftIO . removeWhenExistsWith removeFile
<=< fromRepo . gitAnnexFsckResultsLog <=< fromRepo . gitAnnexFsckResultsLog

View file

@ -18,7 +18,6 @@ import qualified Utility.FileIO as F
import qualified Data.ByteString as S import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L import qualified Data.ByteString.Lazy as L
import qualified Utility.RawFilePath as R
-- | Log a file whose pointer needs to be restaged in git. -- | Log a file whose pointer needs to be restaged in git.
-- The content of the file may not be a pointer, if it is populated with -- The content of the file may not be a pointer, if it is populated with
@ -52,13 +51,13 @@ streamRestageLog finalizer processor = do
lckf <- fromRepo gitAnnexRestageLock lckf <- fromRepo gitAnnexRestageLock
withExclusiveLock lckf $ liftIO $ withExclusiveLock lckf $ liftIO $
whenM (R.doesPathExist logf) $ whenM (doesPathExist logf) $
ifM (R.doesPathExist oldf) ifM (doesPathExist oldf)
( do ( do
h <- F.openFile (toOsPath oldf) AppendMode h <- F.openFile oldf AppendMode
hPutStr h =<< readFile (fromRawFilePath logf) hPutStr h =<< readFile (fromOsPath logf)
hClose h hClose h
liftIO $ removeWhenExistsWith R.removeLink logf liftIO $ removeWhenExistsWith removeFile logf
, moveFile logf oldf , moveFile logf oldf
) )
@ -67,7 +66,7 @@ streamRestageLog finalizer processor = do
Just (f, ic) -> processor f ic Just (f, ic) -> processor f ic
Nothing -> noop Nothing -> noop
liftIO $ removeWhenExistsWith R.removeLink oldf liftIO $ removeWhenExistsWith removeFile oldf
-- | Calculate over both the current restage log, and also over the old -- | Calculate over both the current restage log, and also over the old
-- one if it had started to be processed but did not get finished due -- one if it had started to be processed but did not get finished due
@ -86,11 +85,12 @@ calcRestageLog start update = do
Nothing -> v Nothing -> v
formatRestageLog :: TopFilePath -> InodeCache -> S.ByteString formatRestageLog :: TopFilePath -> InodeCache -> S.ByteString
formatRestageLog f ic = encodeBS (showInodeCache ic) <> ":" <> getTopFilePath f formatRestageLog f ic =
encodeBS (showInodeCache ic) <> ":" <> fromOsPath (getTopFilePath f)
parseRestageLog :: String -> Maybe (TopFilePath, InodeCache) parseRestageLog :: String -> Maybe (TopFilePath, InodeCache)
parseRestageLog l = parseRestageLog l =
let (ics, f) = separate (== ':') l let (ics, f) = separate (== ':') l
in do in do
ic <- readInodeCache ics ic <- readInodeCache ics
return (asTopFilePath (toRawFilePath f), ic) return (asTopFilePath (toOsPath f), ic)

View file

@ -21,7 +21,7 @@ smudgeLog k f = do
logf <- fromRepo gitAnnexSmudgeLog logf <- fromRepo gitAnnexSmudgeLog
lckf <- fromRepo gitAnnexSmudgeLock lckf <- fromRepo gitAnnexSmudgeLock
appendLogFile logf lckf $ L.fromStrict $ appendLogFile logf lckf $ L.fromStrict $
serializeKey' k <> " " <> getTopFilePath f serializeKey' k <> " " <> fromOsPath (getTopFilePath f)
-- | Streams all smudged files, and then empties the log at the end. -- | Streams all smudged files, and then empties the log at the end.
-- --
@ -43,4 +43,4 @@ streamSmudged a = do
let (ks, f) = separate (== ' ') l let (ks, f) = separate (== ' ') l
in do in do
k <- deserializeKey ks k <- deserializeKey ks
return (k, asTopFilePath (toRawFilePath f)) return (k, asTopFilePath (toOsPath f))

View file

@ -21,8 +21,8 @@ import Utility.PID
import Annex.LockPool import Annex.LockPool
import Utility.TimeStamp import Utility.TimeStamp
import Logs.File import Logs.File
import qualified Utility.RawFilePath as R
import qualified Utility.FileIO as F import qualified Utility.FileIO as F
import qualified Utility.OsString as OS
#ifndef mingw32_HOST_OS #ifndef mingw32_HOST_OS
import Annex.Perms import Annex.Perms
#endif #endif
@ -30,9 +30,6 @@ import Annex.Perms
import Data.Time.Clock import Data.Time.Clock
import Data.Time.Clock.POSIX import Data.Time.Clock.POSIX
import Control.Concurrent.STM import Control.Concurrent.STM
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as B8
import qualified System.FilePath.ByteString as P
describeTransfer :: Git.Quote.QuotePath -> Transfer -> TransferInfo -> String describeTransfer :: Git.Quote.QuotePath -> Transfer -> TransferInfo -> String
describeTransfer qp t info = unwords describeTransfer qp t info = unwords
@ -62,20 +59,21 @@ percentComplete t info =
- appropriate permissions, which should be run after locking the transfer - appropriate permissions, which should be run after locking the transfer
- lock file, but before using the callback, and a TVar that can be used to - lock file, but before using the callback, and a TVar that can be used to
- read the number of bytes processed so far. -} - read the number of bytes processed so far. -}
mkProgressUpdater :: Transfer -> TransferInfo -> RawFilePath -> Annex (MeterUpdate, Annex (), TVar (Maybe BytesProcessed)) mkProgressUpdater :: Transfer -> TransferInfo -> OsPath -> Annex (MeterUpdate, Annex (), TVar (Maybe BytesProcessed))
mkProgressUpdater t info tfile = do mkProgressUpdater t info tfile = do
let createtfile = void $ tryNonAsync $ writeTransferInfoFile info tfile let createtfile = void $ tryNonAsync $
writeTransferInfoFile info tfile
tvar <- liftIO $ newTVarIO Nothing tvar <- liftIO $ newTVarIO Nothing
loggedtvar <- liftIO $ newTVarIO 0 loggedtvar <- liftIO $ newTVarIO 0
return (liftIO . updater (fromRawFilePath tfile) tvar loggedtvar, createtfile, tvar) return (liftIO . updater tvar loggedtvar, createtfile, tvar)
where where
updater tfile' tvar loggedtvar new = do updater tvar loggedtvar new = do
old <- atomically $ swapTVar tvar (Just new) old <- atomically $ swapTVar tvar (Just new)
let oldbytes = maybe 0 fromBytesProcessed old let oldbytes = maybe 0 fromBytesProcessed old
let newbytes = fromBytesProcessed new let newbytes = fromBytesProcessed new
when (newbytes - oldbytes >= mindelta) $ do when (newbytes - oldbytes >= mindelta) $ do
let info' = info { bytesComplete = Just newbytes } let info' = info { bytesComplete = Just newbytes }
_ <- tryIO $ updateTransferInfoFile info' tfile' _ <- tryIO $ updateTransferInfoFile info' tfile
atomically $ writeTVar loggedtvar newbytes atomically $ writeTVar loggedtvar newbytes
{- The minimum change in bytesComplete that is worth {- The minimum change in bytesComplete that is worth
@ -109,9 +107,9 @@ checkTransfer :: Transfer -> Annex (Maybe TransferInfo)
checkTransfer t = debugLocks $ do checkTransfer t = debugLocks $ do
(tfile, lck, moldlck) <- fromRepo $ transferFileAndLockFile t (tfile, lck, moldlck) <- fromRepo $ transferFileAndLockFile t
let deletestale = do let deletestale = do
void $ tryIO $ R.removeLink tfile void $ tryIO $ removeFile tfile
void $ tryIO $ R.removeLink lck void $ tryIO $ removeFile lck
maybe noop (void . tryIO . R.removeLink) moldlck maybe noop (void . tryIO . removeFile) moldlck
#ifndef mingw32_HOST_OS #ifndef mingw32_HOST_OS
v <- getLockStatus lck v <- getLockStatus lck
v' <- case (moldlck, v) of v' <- case (moldlck, v) of
@ -198,7 +196,7 @@ clearFailedTransfers u = do
removeFailedTransfer :: Transfer -> Annex () removeFailedTransfer :: Transfer -> Annex ()
removeFailedTransfer t = do removeFailedTransfer t = do
f <- fromRepo $ failedTransferFile t f <- fromRepo $ failedTransferFile t
liftIO $ void $ tryIO $ R.removeLink f liftIO $ void $ tryIO $ removeFile f
recordFailedTransfer :: Transfer -> TransferInfo -> Annex () recordFailedTransfer :: Transfer -> TransferInfo -> Annex ()
recordFailedTransfer t info = do recordFailedTransfer t info = do
@ -225,46 +223,47 @@ recordFailedTransfer t info = do
- At some point in the future, when old git-annex processes are no longer - At some point in the future, when old git-annex processes are no longer
- a concern, this complication can be removed. - a concern, this complication can be removed.
-} -}
transferFileAndLockFile :: Transfer -> Git.Repo -> (RawFilePath, RawFilePath, Maybe RawFilePath) transferFileAndLockFile :: Transfer -> Git.Repo -> (OsPath, OsPath, Maybe OsPath)
transferFileAndLockFile (Transfer direction u kd) r = transferFileAndLockFile (Transfer direction u kd) r =
case direction of case direction of
Upload -> (transferfile, uuidlockfile, Nothing) Upload -> (transferfile, uuidlockfile, Nothing)
Download -> (transferfile, nouuidlockfile, Just uuidlockfile) Download -> (transferfile, nouuidlockfile, Just uuidlockfile)
where where
td = transferDir direction r td = transferDir direction r
fu = B8.filter (/= '/') (fromUUID u) fu = OS.filter (/= unsafeFromChar '/') (fromUUID u)
kf = keyFile (mkKey (const kd)) kf = keyFile (mkKey (const kd))
lckkf = "lck." <> kf lckkf = literalOsPath "lck." <> kf
transferfile = td P.</> fu P.</> kf transferfile = td </> fu </> kf
uuidlockfile = td P.</> fu P.</> lckkf uuidlockfile = td </> fu </> lckkf
nouuidlockfile = td P.</> "lck" P.</> lckkf nouuidlockfile = td </> literalOsPath "lck" </> lckkf
{- The transfer information file to use to record a failed Transfer -} {- The transfer information file to use to record a failed Transfer -}
failedTransferFile :: Transfer -> Git.Repo -> RawFilePath failedTransferFile :: Transfer -> Git.Repo -> OsPath
failedTransferFile (Transfer direction u kd) r = failedTransferFile (Transfer direction u kd) r =
failedTransferDir u direction r failedTransferDir u direction r
P.</> keyFile (mkKey (const kd)) </> keyFile (mkKey (const kd))
{- Parses a transfer information filename to a Transfer. -} {- Parses a transfer information filename to a Transfer. -}
parseTransferFile :: RawFilePath -> Maybe Transfer parseTransferFile :: OsPath -> Maybe Transfer
parseTransferFile file parseTransferFile file
| "lck." `B.isPrefixOf` P.takeFileName file = Nothing | literalOsPath "lck." `OS.isPrefixOf` takeFileName file = Nothing
| otherwise = case drop (length bits - 3) bits of | otherwise = case drop (length bits - 3) bits of
[direction, u, key] -> Transfer [direction, u, key] -> Transfer
<$> parseDirection direction <$> parseDirection (fromOsPath direction)
<*> pure (toUUID u) <*> pure (toUUID u)
<*> fmap (fromKey id) (fileKey key) <*> fmap (fromKey id) (fileKey key)
_ -> Nothing _ -> Nothing
where where
bits = P.splitDirectories file bits = splitDirectories file
writeTransferInfoFile :: TransferInfo -> RawFilePath -> Annex () writeTransferInfoFile :: TransferInfo -> OsPath -> Annex ()
writeTransferInfoFile info tfile = writeLogFile tfile $ writeTransferInfo info writeTransferInfoFile info tfile = writeLogFile tfile $ writeTransferInfo info
-- The file keeps whatever permissions it has, so should be used only -- The file keeps whatever permissions it has, so should be used only
-- after it's been created with the right perms by writeTransferInfoFile. -- after it's been created with the right perms by writeTransferInfoFile.
updateTransferInfoFile :: TransferInfo -> FilePath -> IO () updateTransferInfoFile :: TransferInfo -> OsPath -> IO ()
updateTransferInfoFile info tfile = writeFile tfile $ writeTransferInfo info updateTransferInfoFile info tfile =
writeFile (fromOsPath tfile) $ writeTransferInfo info
{- File format is a header line containing the startedTime and any {- File format is a header line containing the startedTime and any
- bytesComplete value. Followed by a newline and the associatedFile. - bytesComplete value. Followed by a newline and the associatedFile.
@ -283,12 +282,12 @@ writeTransferInfo info = unlines
#endif #endif
-- comes last; arbitrary content -- comes last; arbitrary content
, let AssociatedFile afile = associatedFile info , let AssociatedFile afile = associatedFile info
in maybe "" fromRawFilePath afile in maybe "" fromOsPath afile
] ]
readTransferInfoFile :: Maybe PID -> RawFilePath -> IO (Maybe TransferInfo) readTransferInfoFile :: Maybe PID -> OsPath -> IO (Maybe TransferInfo)
readTransferInfoFile mpid tfile = catchDefaultIO Nothing $ readTransferInfoFile mpid tfile = catchDefaultIO Nothing $
readTransferInfo mpid . decodeBS <$> F.readFile' (toOsPath tfile) readTransferInfo mpid . decodeBS <$> F.readFile' tfile
readTransferInfo :: Maybe PID -> String -> Maybe TransferInfo readTransferInfo :: Maybe PID -> String -> Maybe TransferInfo
readTransferInfo mpid s = TransferInfo readTransferInfo mpid s = TransferInfo
@ -301,9 +300,13 @@ readTransferInfo mpid s = TransferInfo
<*> pure Nothing <*> pure Nothing
<*> pure Nothing <*> pure Nothing
<*> bytes <*> bytes
<*> pure (AssociatedFile (if null filename then Nothing else Just (toRawFilePath filename))) <*> pure af
<*> pure False <*> pure False
where where
af = AssociatedFile $
if null filename
then Nothing
else Just (toOsPath filename)
#ifdef mingw32_HOST_OS #ifdef mingw32_HOST_OS
(firstliner, otherlines) = separate (== '\n') s (firstliner, otherlines) = separate (== '\n') s
(secondliner, rest) = separate (== '\n') otherlines (secondliner, rest) = separate (== '\n') otherlines
@ -326,16 +329,18 @@ readTransferInfo mpid s = TransferInfo
else pure Nothing -- not failure else pure Nothing -- not failure
{- The directory holding transfer information files for a given Direction. -} {- The directory holding transfer information files for a given Direction. -}
transferDir :: Direction -> Git.Repo -> RawFilePath transferDir :: Direction -> Git.Repo -> OsPath
transferDir direction r = gitAnnexTransferDir r P.</> formatDirection direction transferDir direction r =
gitAnnexTransferDir r
</> toOsPath (formatDirection direction)
{- The directory holding failed transfer information files for a given {- The directory holding failed transfer information files for a given
- Direction and UUID -} - Direction and UUID -}
failedTransferDir :: UUID -> Direction -> Git.Repo -> RawFilePath failedTransferDir :: UUID -> Direction -> Git.Repo -> OsPath
failedTransferDir u direction r = gitAnnexTransferDir r failedTransferDir u direction r = gitAnnexTransferDir r
P.</> "failed" </> literalOsPath "failed"
P.</> formatDirection direction </> toOsPath (formatDirection direction)
P.</> B8.filter (/= '/') (fromUUID u) </> OS.filter (/= unsafeFromChar '/') (fromUUID u)
prop_read_write_transferinfo :: TransferInfo -> Bool prop_read_write_transferinfo :: TransferInfo -> Bool
prop_read_write_transferinfo info prop_read_write_transferinfo info

View file

@ -32,8 +32,8 @@ import qualified Data.ByteString.Lazy as L
import qualified Data.Attoparsec.ByteString.Lazy as A import qualified Data.Attoparsec.ByteString.Lazy as A
import qualified Data.Attoparsec.ByteString.Char8 as A8 import qualified Data.Attoparsec.ByteString.Char8 as A8
transitionsLog :: RawFilePath transitionsLog :: OsPath
transitionsLog = "transitions.log" transitionsLog = literalOsPath "transitions.log"
data Transition data Transition
= ForgetGitHistory = ForgetGitHistory
@ -102,7 +102,7 @@ knownTransitionList = nub . rights . map transition . S.elems
{- Typically ran with Annex.Branch.change, but we can't import Annex.Branch {- Typically ran with Annex.Branch.change, but we can't import Annex.Branch
- here since it depends on this module. -} - here since it depends on this module. -}
recordTransitions :: (RawFilePath -> (L.ByteString -> Builder) -> Annex ()) -> Transitions -> Annex () recordTransitions :: (OsPath -> (L.ByteString -> Builder) -> Annex ()) -> Transitions -> Annex ()
recordTransitions changer t = changer transitionsLog $ recordTransitions changer t = changer transitionsLog $
buildTransitions . S.union t . parseTransitionsStrictly "local" buildTransitions . S.union t . parseTransitionsStrictly "local"

View file

@ -52,7 +52,7 @@ instance APIVersion V0 where protocolVersion _ = P2P.ProtocolVersion 0
newtype B64Key = B64Key Key newtype B64Key = B64Key Key
deriving (Show) deriving (Show)
newtype B64FilePath = B64FilePath RawFilePath newtype B64FilePath = B64FilePath OsPath
deriving (Show) deriving (Show)
associatedFileToB64FilePath :: AssociatedFile -> Maybe B64FilePath associatedFileToB64FilePath :: AssociatedFile -> Maybe B64FilePath
@ -233,11 +233,11 @@ instance FromHttpApiData (B64UUID t) where
Left err -> Left err Left err -> Left err
instance ToHttpApiData B64FilePath where instance ToHttpApiData B64FilePath where
toUrlPiece (B64FilePath f) = encodeB64Text f toUrlPiece (B64FilePath f) = encodeB64Text (fromOsPath f)
instance FromHttpApiData B64FilePath where instance FromHttpApiData B64FilePath where
parseUrlPiece t = case decodeB64Text t of parseUrlPiece t = case decodeB64Text t of
Right b -> Right (B64FilePath b) Right b -> Right (B64FilePath (toOsPath b))
Left err -> Left err Left err -> Left err
instance ToHttpApiData Offset where instance ToHttpApiData Offset where

View file

@ -175,7 +175,7 @@ serveUnixSocket unixsocket serveconn = do
-- Connections have to authenticate to do anything, -- Connections have to authenticate to do anything,
-- so it's fine that other local users can connect to the -- so it's fine that other local users can connect to the
-- socket. -- socket.
modifyFileMode (toRawFilePath unixsocket) $ addModes modifyFileMode (toOsPath unixsocket) $ addModes
[groupReadMode, groupWriteMode, otherReadMode, otherWriteMode] [groupReadMode, groupWriteMode, otherReadMode, otherWriteMode]
S.listen soc 2 S.listen soc 2
forever $ do forever $ do
@ -381,7 +381,7 @@ runRelayService conn runner service = case connRepo conn of
serviceproc repo = gitCreateProcess serviceproc repo = gitCreateProcess
[ Param cmd [ Param cmd
, File (fromRawFilePath (repoPath repo)) , File (fromOsPath (repoPath repo))
] repo ] repo
serviceproc' repo = (serviceproc repo) serviceproc' repo = (serviceproc repo)
{ std_out = CreatePipe { std_out = CreatePipe

View file

@ -10,6 +10,7 @@
{-# LANGUAGE DeriveFunctor, TemplateHaskell, FlexibleContexts #-} {-# LANGUAGE DeriveFunctor, TemplateHaskell, FlexibleContexts #-}
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances, RankNTypes #-} {-# LANGUAGE TypeSynonymInstances, FlexibleInstances, RankNTypes #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-} {-# OPTIONS_GHC -fno-warn-orphans #-}
module P2P.Protocol where module P2P.Protocol where
@ -25,8 +26,9 @@ import Utility.AuthToken
import Utility.Applicative import Utility.Applicative
import Utility.PartialPrelude import Utility.PartialPrelude
import Utility.Metered import Utility.Metered
import Utility.FileSystemEncoding
import Utility.MonotonicClock import Utility.MonotonicClock
import Utility.OsPath
import qualified Utility.OsString as OS
import Git.FilePath import Git.FilePath
import Annex.ChangedRefs (ChangedRefs) import Annex.ChangedRefs (ChangedRefs)
import Types.NumCopies import Types.NumCopies
@ -37,8 +39,6 @@ import Control.Monad.Free.TH
import Control.Monad.Catch import Control.Monad.Catch
import System.Exit (ExitCode(..)) import System.Exit (ExitCode(..))
import System.IO import System.IO
import qualified System.FilePath.ByteString as P
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L import qualified Data.ByteString.Lazy as L
import qualified Data.Set as S import qualified Data.Set as S
import Data.Char import Data.Char
@ -224,17 +224,19 @@ instance Proto.Serializable Service where
instance Proto.Serializable ProtoAssociatedFile where instance Proto.Serializable ProtoAssociatedFile where
serialize (ProtoAssociatedFile (AssociatedFile Nothing)) = "" serialize (ProtoAssociatedFile (AssociatedFile Nothing)) = ""
serialize (ProtoAssociatedFile (AssociatedFile (Just af))) = serialize (ProtoAssociatedFile (AssociatedFile (Just af))) =
decodeBS $ toInternalGitPath $ encodeBS $ concatMap esc $ fromRawFilePath af fromOsPath $ toInternalGitPath $
OS.concat $ map esc $ OS.unpack af
where where
esc '%' = "%%" esc c = case OS.toChar c of
esc c '%' -> literalOsPath "%%"
| isSpace c = "%" c' | isSpace c' -> literalOsPath "%"
| otherwise = [c] _ -> OS.singleton c
deserialize s = case fromInternalGitPath $ toRawFilePath $ deesc [] s of deserialize s = case fromInternalGitPath $ toOsPath $ deesc [] s of
f f
| B.null f -> Just $ ProtoAssociatedFile $ AssociatedFile Nothing | OS.null f -> Just $ ProtoAssociatedFile $
| P.isRelative f -> Just $ ProtoAssociatedFile $ AssociatedFile Nothing
| isRelative f -> Just $ ProtoAssociatedFile $
AssociatedFile $ Just f AssociatedFile $ Just f
| otherwise -> Nothing | otherwise -> Nothing
where where

View file

@ -56,7 +56,7 @@ runHooks r starthook stophook a = do
firstrun lck firstrun lck
a a
where where
remoteid = uuidPath (uuid r) remoteid = fromUUID (uuid r)
run Nothing = noop run Nothing = noop
run (Just command) = void $ liftIO $ run (Just command) = void $ liftIO $
boolSystem "sh" [Param "-c", Param command] boolSystem "sh" [Param "-c", Param command]

View file

@ -9,16 +9,16 @@
module Types.Direction where module Types.Direction where
import qualified Data.ByteString as B import Data.ByteString.Short
data Direction = Upload | Download data Direction = Upload | Download
deriving (Eq, Ord, Show, Read) deriving (Eq, Ord, Show, Read)
formatDirection :: Direction -> B.ByteString formatDirection :: Direction -> ShortByteString
formatDirection Upload = "upload" formatDirection Upload = "upload"
formatDirection Download = "download" formatDirection Download = "download"
parseDirection :: B.ByteString -> Maybe Direction parseDirection :: ShortByteString -> Maybe Direction
parseDirection "upload" = Just Upload parseDirection "upload" = Just Upload
parseDirection "download" = Just Download parseDirection "download" = Just Download
parseDirection _ = Nothing parseDirection _ = Nothing

View file

@ -7,7 +7,7 @@
module Types.Transitions where module Types.Transitions where
import Utility.RawFilePath import Utility.OsPath
import qualified Data.ByteString.Lazy as L import qualified Data.ByteString.Lazy as L
import Data.ByteString.Builder import Data.ByteString.Builder
@ -16,4 +16,4 @@ data FileTransition
= ChangeFile Builder = ChangeFile Builder
| PreserveFile | PreserveFile
type TransitionCalculator = RawFilePath -> L.ByteString -> FileTransition type TransitionCalculator = OsPath -> L.ByteString -> FileTransition

View file

@ -65,6 +65,14 @@ instance ToUUID SB.ShortByteString where
| SB.null b = NoUUID | SB.null b = NoUUID
| otherwise = UUID (SB.fromShort b) | otherwise = UUID (SB.fromShort b)
-- OsPath is a ShortByteString internally, so this is the most
-- efficient conversion.
instance FromUUID OsPath where
fromUUID s = toOsPath (fromUUID s :: SB.ShortByteString)
instance ToUUID OsPath where
toUUID s = toUUID (fromOsPath s :: SB.ShortByteString)
instance FromUUID String where instance FromUUID String where
fromUUID s = decodeBS (fromUUID s) fromUUID s = decodeBS (fromUUID s)
@ -102,9 +110,6 @@ buildUUID NoUUID = mempty
isUUID :: String -> Bool isUUID :: String -> Bool
isUUID = isJust . U.fromString isUUID = isJust . U.fromString
uuidPath :: UUID -> OsPath
uuidPath u = toOsPath (fromUUID u :: SB.ShortByteString)
-- A description of a UUID. -- A description of a UUID.
newtype UUIDDesc = UUIDDesc B.ByteString newtype UUIDDesc = UUIDDesc B.ByteString
deriving (Eq, Sem.Semigroup, Monoid, IsString) deriving (Eq, Sem.Semigroup, Monoid, IsString)

View file

@ -28,7 +28,6 @@ import Config
import Annex.Perms import Annex.Perms
import Utility.InodeCache import Utility.InodeCache
import Annex.InodeSentinal import Annex.InodeSentinal
import qualified Utility.RawFilePath as R
import qualified Utility.FileIO as F import qualified Utility.FileIO as F
setIndirect :: Annex () setIndirect :: Annex ()
@ -79,27 +78,27 @@ switchHEADBack = maybe noop switch =<< inRepo Git.Branch.currentUnsafe
Nothing -> inRepo $ Git.Branch.checkout orighead Nothing -> inRepo $ Git.Branch.checkout orighead
{- Absolute FilePaths of Files in the tree that are associated with a key. -} {- Absolute FilePaths of Files in the tree that are associated with a key. -}
associatedFiles :: Key -> Annex [FilePath] associatedFiles :: Key -> Annex [OsPath]
associatedFiles key = do associatedFiles key = do
files <- associatedFilesRelative key files <- associatedFilesRelative key
top <- fromRawFilePath <$> fromRepo Git.repoPath top <- fromRepo Git.repoPath
return $ map (top </>) files return $ map (top </>) files
{- List of files in the tree that are associated with a key, relative to {- List of files in the tree that are associated with a key, relative to
- the top of the repo. -} - the top of the repo. -}
associatedFilesRelative :: Key -> Annex [FilePath] associatedFilesRelative :: Key -> Annex [OsPath]
associatedFilesRelative key = do associatedFilesRelative key = do
mapping <- calcRepo (gitAnnexMapping key) mapping <- calcRepo (gitAnnexMapping key)
liftIO $ catchDefaultIO [] $ F.withFile (toOsPath mapping) ReadMode $ \h -> liftIO $ catchDefaultIO [] $ F.withFile mapping ReadMode $ \h ->
-- Read strictly to ensure the file is closed promptly -- Read strictly to ensure the file is closed promptly
lines <$> hGetContentsStrict h map toOsPath . lines <$> hGetContentsStrict h
{- Removes the list of associated files. -} {- Removes the list of associated files. -}
removeAssociatedFiles :: Key -> Annex () removeAssociatedFiles :: Key -> Annex ()
removeAssociatedFiles key = do removeAssociatedFiles key = do
mapping <- calcRepo $ gitAnnexMapping key mapping <- calcRepo $ gitAnnexMapping key
modifyContentDir mapping $ modifyContentDir mapping $
liftIO $ removeWhenExistsWith R.removeLink mapping liftIO $ removeWhenExistsWith removeFile mapping
{- Checks if a file in the tree, associated with a key, has not been modified. {- Checks if a file in the tree, associated with a key, has not been modified.
- -
@ -107,10 +106,8 @@ removeAssociatedFiles key = do
- expensive checksum, this relies on a cache that contains the file's - expensive checksum, this relies on a cache that contains the file's
- expected mtime and inode. - expected mtime and inode.
-} -}
goodContent :: Key -> FilePath -> Annex Bool goodContent :: Key -> OsPath -> Annex Bool
goodContent key file = goodContent key file = sameInodeCache file =<< recordedInodeCache key
sameInodeCache (toRawFilePath file)
=<< recordedInodeCache key
{- Gets the recorded inode cache for a key. {- Gets the recorded inode cache for a key.
- -
@ -120,26 +117,25 @@ recordedInodeCache :: Key -> Annex [InodeCache]
recordedInodeCache key = withInodeCacheFile key $ \f -> recordedInodeCache key = withInodeCacheFile key $ \f ->
liftIO $ catchDefaultIO [] $ liftIO $ catchDefaultIO [] $
mapMaybe (readInodeCache . decodeBS) . fileLines' mapMaybe (readInodeCache . decodeBS) . fileLines'
<$> F.readFile' (toOsPath f) <$> F.readFile' f
{- Removes an inode cache. -} {- Removes an inode cache. -}
removeInodeCache :: Key -> Annex () removeInodeCache :: Key -> Annex ()
removeInodeCache key = withInodeCacheFile key $ \f -> removeInodeCache key = withInodeCacheFile key $ \f ->
modifyContentDir f $ modifyContentDir f $ liftIO $ removeWhenExistsWith removeFile f
liftIO $ removeWhenExistsWith R.removeLink f
withInodeCacheFile :: Key -> (RawFilePath -> Annex a) -> Annex a withInodeCacheFile :: Key -> (OsPath -> Annex a) -> Annex a
withInodeCacheFile key a = a =<< calcRepo (gitAnnexInodeCache key) withInodeCacheFile key a = a =<< calcRepo (gitAnnexInodeCache key)
{- File that maps from a key to the file(s) in the git repository. -} {- File that maps from a key to the file(s) in the git repository. -}
gitAnnexMapping :: Key -> Git.Repo -> GitConfig -> IO RawFilePath gitAnnexMapping :: Key -> Git.Repo -> GitConfig -> IO OsPath
gitAnnexMapping key r c = do gitAnnexMapping key r c = do
loc <- gitAnnexLocation key r c loc <- gitAnnexLocation key r c
return $ loc <> ".map" return $ loc <> literalOsPath ".map"
{- File that caches information about a key's content, used to determine {- File that caches information about a key's content, used to determine
- if a file has changed. -} - if a file has changed. -}
gitAnnexInodeCache :: Key -> Git.Repo -> GitConfig -> IO RawFilePath gitAnnexInodeCache :: Key -> Git.Repo -> GitConfig -> IO OsPath
gitAnnexInodeCache key r c = do gitAnnexInodeCache key r c = do
loc <- gitAnnexLocation key r c loc <- gitAnnexLocation key r c
return $ loc <> ".cache" return $ loc <> literalOsPath ".cache"

View file

@ -44,12 +44,12 @@ copyMetaDataParams meta = map snd $ filter fst
{- The cp command is used, because I hate reinventing the wheel, {- The cp command is used, because I hate reinventing the wheel,
- and because this allows easy access to features like cp --reflink - and because this allows easy access to features like cp --reflink
- and preserving metadata. -} - and preserving metadata. -}
copyFileExternal :: CopyMetaData -> FilePath -> FilePath -> IO Bool copyFileExternal :: CopyMetaData -> OsPath -> OsPath -> IO Bool
copyFileExternal meta src dest = do copyFileExternal meta src dest = do
-- Delete any existing dest file because an unwritable file -- Delete any existing dest file because an unwritable file
-- would prevent cp from working. -- would prevent cp from working.
void $ tryIO $ removeFile (toOsPath dest) void $ tryIO $ removeFile dest
boolSystem "cp" $ params ++ [File src, File dest] boolSystem "cp" $ params ++ [File (fromOsPath src), File (fromOsPath dest)]
where where
params params
| BuildInfo.cp_reflink_supported = | BuildInfo.cp_reflink_supported =
@ -87,10 +87,10 @@ copyCoW meta src dest
{- Create a hard link if the filesystem allows it, and fall back to copying {- Create a hard link if the filesystem allows it, and fall back to copying
- the file. -} - the file. -}
createLinkOrCopy :: RawFilePath -> RawFilePath -> IO Bool createLinkOrCopy :: OsPath -> OsPath -> IO Bool
createLinkOrCopy src dest = go `catchIO` const fallback createLinkOrCopy src dest = go `catchIO` const fallback
where where
go = do go = do
R.createLink src dest R.createLink (fromOsPath src) (fromOsPath dest)
return True return True
fallback = copyFileExternal CopyAllMetaData (fromRawFilePath src) (fromRawFilePath dest) fallback = copyFileExternal CopyAllMetaData src dest

View file

@ -13,6 +13,7 @@ module Utility.Shell (
findShellCommand, findShellCommand,
) where ) where
import Utility.OsPath
import Utility.SafeCommand import Utility.SafeCommand
#ifdef mingw32_HOST_OS #ifdef mingw32_HOST_OS
import Utility.Path import Utility.Path
@ -35,12 +36,12 @@ shebang = "#!" ++ shellPath
-- parse it for shebang. -- parse it for shebang.
-- --
-- This has no effect on Unix. -- This has no effect on Unix.
findShellCommand :: FilePath -> IO (FilePath, [CommandParam]) findShellCommand :: OsPath -> IO (FilePath, [CommandParam])
findShellCommand f = do findShellCommand f = do
#ifndef mingw32_HOST_OS #ifndef mingw32_HOST_OS
defcmd defcmd
#else #else
l <- catchDefaultIO Nothing $ headMaybe . lines <$> readFile f l <- catchDefaultIO Nothing $ headMaybe . lines <$> readFile (fromOsPath f)
case l of case l of
Just ('#':'!':rest) -> case words rest of Just ('#':'!':rest) -> case words rest of
[] -> defcmd [] -> defcmd
@ -55,4 +56,4 @@ findShellCommand f = do
_ -> defcmd _ -> defcmd
#endif #endif
where where
defcmd = return (f, []) defcmd = return (fromOsPath f, [])