bring back OsPath changes
I hope that the windows test suite failure on appveyor was fixed by updating to a newer windows there. I have not been able to reproduce that failure in a windows 11 VM run locally.
This commit is contained in:
parent
f0ab439c95
commit
84291b6014
119 changed files with 1003 additions and 647 deletions
|
@ -70,6 +70,7 @@ import Logs.View (is_branchView)
|
||||||
import Logs.AdjustedBranchUpdate
|
import Logs.AdjustedBranchUpdate
|
||||||
import Utility.FileMode
|
import Utility.FileMode
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import Data.Time.Clock.POSIX
|
import Data.Time.Clock.POSIX
|
||||||
import qualified Data.Map as M
|
import qualified Data.Map as M
|
||||||
|
@ -268,7 +269,7 @@ updateAdjustedBranch adj (AdjBranch currbranch) origbranch
|
||||||
-- origbranch.
|
-- origbranch.
|
||||||
_ <- propigateAdjustedCommits' True origbranch adj commitlck
|
_ <- propigateAdjustedCommits' True origbranch adj commitlck
|
||||||
|
|
||||||
origheadfile <- inRepo $ readFileStrict . Git.Ref.headFile
|
origheadfile <- inRepo $ F.readFile' . toOsPath . Git.Ref.headFile
|
||||||
origheadsha <- inRepo (Git.Ref.sha currbranch)
|
origheadsha <- inRepo (Git.Ref.sha currbranch)
|
||||||
|
|
||||||
b <- adjustBranch adj origbranch
|
b <- adjustBranch adj origbranch
|
||||||
|
@ -280,8 +281,8 @@ updateAdjustedBranch adj (AdjBranch currbranch) origbranch
|
||||||
newheadfile <- case origheadsha of
|
newheadfile <- case origheadsha of
|
||||||
Just s -> do
|
Just s -> do
|
||||||
inRepo $ \r -> do
|
inRepo $ \r -> do
|
||||||
let newheadfile = fromRef s
|
let newheadfile = fromRef' s
|
||||||
writeFile (Git.Ref.headFile r) newheadfile
|
F.writeFile' (toOsPath (Git.Ref.headFile r)) newheadfile
|
||||||
return (Just newheadfile)
|
return (Just newheadfile)
|
||||||
_ -> return Nothing
|
_ -> return Nothing
|
||||||
|
|
||||||
|
@ -295,9 +296,9 @@ updateAdjustedBranch adj (AdjBranch currbranch) origbranch
|
||||||
unless ok $ case newheadfile of
|
unless ok $ case newheadfile of
|
||||||
Nothing -> noop
|
Nothing -> noop
|
||||||
Just v -> preventCommits $ \_commitlck -> inRepo $ \r -> do
|
Just v -> preventCommits $ \_commitlck -> inRepo $ \r -> do
|
||||||
v' <- readFileStrict (Git.Ref.headFile r)
|
v' <- F.readFile' (toOsPath (Git.Ref.headFile r))
|
||||||
when (v == v') $
|
when (v == v') $
|
||||||
writeFile (Git.Ref.headFile r) origheadfile
|
F.writeFile' (toOsPath (Git.Ref.headFile r)) origheadfile
|
||||||
|
|
||||||
return ok
|
return ok
|
||||||
| otherwise = preventCommits $ \commitlck -> do
|
| otherwise = preventCommits $ \commitlck -> do
|
||||||
|
|
|
@ -29,8 +29,9 @@ import Annex.GitOverlay
|
||||||
import Utility.Tmp.Dir
|
import Utility.Tmp.Dir
|
||||||
import Utility.CopyFile
|
import Utility.CopyFile
|
||||||
import Utility.Directory.Create
|
import Utility.Directory.Create
|
||||||
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import qualified Data.ByteString as S
|
|
||||||
import qualified System.FilePath.ByteString as P
|
import qualified System.FilePath.ByteString as P
|
||||||
|
|
||||||
canMergeToAdjustedBranch :: Branch -> (OrigBranch, Adjustment) -> Annex Bool
|
canMergeToAdjustedBranch :: Branch -> (OrigBranch, Adjustment) -> Annex Bool
|
||||||
|
@ -72,26 +73,25 @@ mergeToAdjustedBranch tomerge (origbranch, adj) mergeconfig canresolvemerge comm
|
||||||
-}
|
-}
|
||||||
changestomerge (Just updatedorig) = withOtherTmp $ \othertmpdir -> do
|
changestomerge (Just updatedorig) = withOtherTmp $ \othertmpdir -> do
|
||||||
git_dir <- fromRepo Git.localGitDir
|
git_dir <- fromRepo Git.localGitDir
|
||||||
let git_dir' = fromRawFilePath git_dir
|
|
||||||
tmpwt <- fromRepo gitAnnexMergeDir
|
tmpwt <- fromRepo gitAnnexMergeDir
|
||||||
withTmpDirIn (fromRawFilePath othertmpdir) "git" $ \tmpgit -> withWorkTreeRelated tmpgit $
|
withTmpDirIn (fromRawFilePath othertmpdir) (toOsPath "git") $ \tmpgit -> withWorkTreeRelated tmpgit $
|
||||||
withemptydir git_dir tmpwt $ withWorkTree tmpwt $ do
|
withemptydir git_dir tmpwt $ withWorkTree tmpwt $ do
|
||||||
|
let tmpgit' = toRawFilePath tmpgit
|
||||||
liftIO $ writeFile (tmpgit </> "HEAD") (fromRef updatedorig)
|
liftIO $ writeFile (tmpgit </> "HEAD") (fromRef updatedorig)
|
||||||
-- Copy in refs and packed-refs, to work
|
-- Copy in refs and packed-refs, to work
|
||||||
-- around bug in git 2.13.0, which
|
-- around bug in git 2.13.0, which
|
||||||
-- causes it not to look in GIT_DIR for refs.
|
-- causes it not to look in GIT_DIR for refs.
|
||||||
refs <- liftIO $ emptyWhenDoesNotExist $
|
refs <- liftIO $ emptyWhenDoesNotExist $
|
||||||
dirContentsRecursive $
|
dirContentsRecursive $
|
||||||
git_dir' </> "refs"
|
git_dir P.</> "refs"
|
||||||
let refs' = (git_dir' </> "packed-refs") : refs
|
let refs' = (git_dir P.</> "packed-refs") : refs
|
||||||
liftIO $ forM_ refs' $ \src -> do
|
liftIO $ forM_ refs' $ \src -> do
|
||||||
let src' = toRawFilePath src
|
whenM (R.doesPathExist src) $ do
|
||||||
whenM (doesFileExist src) $ do
|
dest <- relPathDirToFile git_dir src
|
||||||
dest <- relPathDirToFile git_dir src'
|
let dest' = tmpgit' P.</> dest
|
||||||
let dest' = toRawFilePath tmpgit P.</> dest
|
|
||||||
createDirectoryUnder [git_dir]
|
createDirectoryUnder [git_dir]
|
||||||
(P.takeDirectory dest')
|
(P.takeDirectory dest')
|
||||||
void $ createLinkOrCopy src' dest'
|
void $ createLinkOrCopy src dest'
|
||||||
-- This reset makes git merge not care
|
-- This reset makes git merge not care
|
||||||
-- that the work tree is empty; otherwise
|
-- that the work tree is empty; otherwise
|
||||||
-- it will think that all the files have
|
-- it will think that all the files have
|
||||||
|
@ -107,7 +107,7 @@ mergeToAdjustedBranch tomerge (origbranch, adj) mergeconfig canresolvemerge comm
|
||||||
if merged
|
if merged
|
||||||
then do
|
then do
|
||||||
!mergecommit <- liftIO $ extractSha
|
!mergecommit <- liftIO $ extractSha
|
||||||
<$> S.readFile (tmpgit </> "HEAD")
|
<$> F.readFile' (toOsPath (tmpgit' P.</> "HEAD"))
|
||||||
-- This is run after the commit lock is dropped.
|
-- This is run after the commit lock is dropped.
|
||||||
return $ postmerge mergecommit
|
return $ postmerge mergecommit
|
||||||
else return $ return False
|
else return $ return False
|
||||||
|
|
|
@ -35,10 +35,10 @@ import Annex.InodeSentinal
|
||||||
import Utility.InodeCache
|
import Utility.InodeCache
|
||||||
import Utility.FileMode
|
import Utility.FileMode
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import qualified Data.Set as S
|
import qualified Data.Set as S
|
||||||
import qualified Data.Map as M
|
import qualified Data.Map as M
|
||||||
import qualified Data.ByteString.Lazy as L
|
|
||||||
import System.PosixCompat.Files (isSymbolicLink)
|
import System.PosixCompat.Files (isSymbolicLink)
|
||||||
|
|
||||||
{- Merges from a branch into the current branch (which may not exist yet),
|
{- Merges from a branch into the current branch (which may not exist yet),
|
||||||
|
@ -236,8 +236,9 @@ resolveMerge' unstagedmap (Just us) them inoverlay u = do
|
||||||
| otherwise = pure f
|
| otherwise = pure f
|
||||||
|
|
||||||
makesymlink key dest = do
|
makesymlink key dest = do
|
||||||
l <- calcRepo $ gitAnnexLink (toRawFilePath dest) key
|
let rdest = toRawFilePath dest
|
||||||
unless inoverlay $ replacewithsymlink dest l
|
l <- calcRepo $ gitAnnexLink rdest key
|
||||||
|
unless inoverlay $ replacewithsymlink rdest l
|
||||||
dest' <- toRawFilePath <$> stagefile dest
|
dest' <- toRawFilePath <$> stagefile dest
|
||||||
stageSymlink dest' =<< hashSymlink l
|
stageSymlink dest' =<< hashSymlink l
|
||||||
|
|
||||||
|
@ -265,9 +266,9 @@ resolveMerge' unstagedmap (Just us) them inoverlay u = do
|
||||||
|
|
||||||
let replacefile isexecutable = case selectwant' (LsFiles.unmergedSha u) of
|
let replacefile isexecutable = case selectwant' (LsFiles.unmergedSha u) of
|
||||||
Nothing -> noop
|
Nothing -> noop
|
||||||
Just sha -> replaceWorkTreeFile item $ \tmp -> do
|
Just sha -> replaceWorkTreeFile (toRawFilePath item) $ \tmp -> do
|
||||||
c <- catObject sha
|
c <- catObject sha
|
||||||
liftIO $ L.writeFile (decodeBS tmp) c
|
liftIO $ F.writeFile (toOsPath tmp) c
|
||||||
when isexecutable $
|
when isexecutable $
|
||||||
liftIO $ void $ tryIO $
|
liftIO $ void $ tryIO $
|
||||||
modifyFileMode tmp $
|
modifyFileMode tmp $
|
||||||
|
@ -280,7 +281,7 @@ resolveMerge' unstagedmap (Just us) them inoverlay u = do
|
||||||
Nothing -> noop
|
Nothing -> noop
|
||||||
Just sha -> do
|
Just sha -> do
|
||||||
link <- catSymLinkTarget sha
|
link <- catSymLinkTarget sha
|
||||||
replacewithsymlink item link
|
replacewithsymlink (toRawFilePath item) link
|
||||||
(Just TreeFile, Just TreeSymlink) -> replacefile False
|
(Just TreeFile, Just TreeSymlink) -> replacefile False
|
||||||
(Just TreeExecutable, Just TreeSymlink) -> replacefile True
|
(Just TreeExecutable, Just TreeSymlink) -> replacefile True
|
||||||
_ -> ifM (liftIO $ doesDirectoryExist item)
|
_ -> ifM (liftIO $ doesDirectoryExist item)
|
||||||
|
|
|
@ -11,11 +11,12 @@ import Key
|
||||||
import Types.UUID
|
import Types.UUID
|
||||||
import Utility.Hash
|
import Utility.Hash
|
||||||
|
|
||||||
import Data.List
|
|
||||||
import Data.Maybe
|
import Data.Maybe
|
||||||
import Data.Bits (shiftL)
|
import Data.Bits (shiftL)
|
||||||
import qualified Data.Set as S
|
import qualified Data.Set as S
|
||||||
import qualified Data.ByteArray as BA
|
import qualified Data.ByteArray as BA
|
||||||
|
import Data.List
|
||||||
|
import Prelude
|
||||||
|
|
||||||
-- The Int is how many UUIDs to pick.
|
-- The Int is how many UUIDs to pick.
|
||||||
type BalancedPicker = S.Set UUID -> Key -> Int -> [UUID]
|
type BalancedPicker = S.Set UUID -> Key -> Int -> [UUID]
|
||||||
|
|
|
@ -96,6 +96,7 @@ import Annex.Hook
|
||||||
import Utility.Directory.Stream
|
import Utility.Directory.Stream
|
||||||
import Utility.Tmp
|
import Utility.Tmp
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
{- Name of the branch that is used to store git-annex's information. -}
|
{- Name of the branch that is used to store git-annex's information. -}
|
||||||
name :: Git.Ref
|
name :: Git.Ref
|
||||||
|
@ -711,9 +712,9 @@ forceUpdateIndex jl branchref = do
|
||||||
{- Checks if the index needs to be updated. -}
|
{- Checks if the index needs to be updated. -}
|
||||||
needUpdateIndex :: Git.Ref -> Annex Bool
|
needUpdateIndex :: Git.Ref -> Annex Bool
|
||||||
needUpdateIndex branchref = do
|
needUpdateIndex branchref = do
|
||||||
f <- fromRawFilePath <$> fromRepo gitAnnexIndexStatus
|
f <- toOsPath <$> fromRepo gitAnnexIndexStatus
|
||||||
committedref <- Git.Ref . firstLine' <$>
|
committedref <- Git.Ref . firstLine' <$>
|
||||||
liftIO (catchDefaultIO mempty $ B.readFile f)
|
liftIO (catchDefaultIO mempty $ F.readFile' f)
|
||||||
return (committedref /= branchref)
|
return (committedref /= branchref)
|
||||||
|
|
||||||
{- Record that the branch's index has been updated to correspond to a
|
{- Record that the branch's index has been updated to correspond to a
|
||||||
|
@ -741,7 +742,7 @@ stageJournal jl commitindex = withIndex $ withOtherTmp $ \tmpdir -> do
|
||||||
g <- gitRepo
|
g <- gitRepo
|
||||||
st <- getState
|
st <- getState
|
||||||
let dir = gitAnnexJournalDir st g
|
let dir = gitAnnexJournalDir st g
|
||||||
(jlogf, jlogh) <- openjlog (fromRawFilePath tmpdir)
|
(jlogf, jlogh) <- openjlog tmpdir
|
||||||
withHashObjectHandle $ \h ->
|
withHashObjectHandle $ \h ->
|
||||||
withJournalHandle gitAnnexJournalDir $ \jh ->
|
withJournalHandle gitAnnexJournalDir $ \jh ->
|
||||||
Git.UpdateIndex.streamUpdateIndex g
|
Git.UpdateIndex.streamUpdateIndex g
|
||||||
|
@ -752,12 +753,12 @@ stageJournal jl commitindex = withIndex $ withOtherTmp $ \tmpdir -> do
|
||||||
genstream dir h jh jlogh streamer = readDirectory jh >>= \case
|
genstream dir h jh jlogh streamer = readDirectory jh >>= \case
|
||||||
Nothing -> return ()
|
Nothing -> return ()
|
||||||
Just file -> do
|
Just file -> do
|
||||||
let path = dir P.</> toRawFilePath file
|
let path = dir P.</> file
|
||||||
unless (dirCruft file) $ whenM (isfile path) $ do
|
unless (dirCruft file) $ whenM (isfile path) $ do
|
||||||
sha <- Git.HashObject.hashFile h path
|
sha <- Git.HashObject.hashFile h path
|
||||||
hPutStrLn jlogh file
|
B.hPutStr jlogh (file <> "\n")
|
||||||
streamer $ Git.UpdateIndex.updateIndexLine
|
streamer $ Git.UpdateIndex.updateIndexLine
|
||||||
sha TreeFile (asTopFilePath $ fileJournal $ toRawFilePath file)
|
sha TreeFile (asTopFilePath $ fileJournal file)
|
||||||
genstream dir h jh jlogh streamer
|
genstream dir h jh jlogh streamer
|
||||||
isfile file = isRegularFile <$> R.getFileStatus file
|
isfile file = isRegularFile <$> R.getFileStatus file
|
||||||
-- Clean up the staged files, as listed in the temp log file.
|
-- Clean up the staged files, as listed in the temp log file.
|
||||||
|
@ -769,8 +770,8 @@ stageJournal jl commitindex = withIndex $ withOtherTmp $ \tmpdir -> do
|
||||||
stagedfs <- lines <$> hGetContents jlogh
|
stagedfs <- lines <$> hGetContents jlogh
|
||||||
mapM_ (removeFile . (dir </>)) stagedfs
|
mapM_ (removeFile . (dir </>)) stagedfs
|
||||||
hClose jlogh
|
hClose jlogh
|
||||||
removeWhenExistsWith (R.removeLink) (toRawFilePath jlogf)
|
removeWhenExistsWith (R.removeLink) (fromOsPath jlogf)
|
||||||
openjlog tmpdir = liftIO $ openTmpFileIn tmpdir "jlog"
|
openjlog tmpdir = liftIO $ openTmpFileIn (toOsPath tmpdir) (toOsPath "jlog")
|
||||||
|
|
||||||
getLocalTransitions :: Annex Transitions
|
getLocalTransitions :: Annex Transitions
|
||||||
getLocalTransitions =
|
getLocalTransitions =
|
||||||
|
@ -931,8 +932,8 @@ getIgnoredRefs =
|
||||||
S.fromList . mapMaybe Git.Sha.extractSha . fileLines' <$> content
|
S.fromList . mapMaybe Git.Sha.extractSha . fileLines' <$> content
|
||||||
where
|
where
|
||||||
content = do
|
content = do
|
||||||
f <- fromRawFilePath <$> fromRepo gitAnnexIgnoredRefs
|
f <- toOsPath <$> fromRepo gitAnnexIgnoredRefs
|
||||||
liftIO $ catchDefaultIO mempty $ B.readFile f
|
liftIO $ catchDefaultIO mempty $ F.readFile' f
|
||||||
|
|
||||||
addMergedRefs :: [(Git.Sha, Git.Branch)] -> Annex ()
|
addMergedRefs :: [(Git.Sha, Git.Branch)] -> Annex ()
|
||||||
addMergedRefs [] = return ()
|
addMergedRefs [] = return ()
|
||||||
|
@ -949,8 +950,8 @@ getMergedRefs = S.fromList . map fst <$> getMergedRefs'
|
||||||
|
|
||||||
getMergedRefs' :: Annex [(Git.Sha, Git.Branch)]
|
getMergedRefs' :: Annex [(Git.Sha, Git.Branch)]
|
||||||
getMergedRefs' = do
|
getMergedRefs' = do
|
||||||
f <- fromRawFilePath <$> fromRepo gitAnnexMergedRefs
|
f <- toOsPath <$> fromRepo gitAnnexMergedRefs
|
||||||
s <- liftIO $ catchDefaultIO mempty $ B.readFile f
|
s <- liftIO $ catchDefaultIO mempty $ F.readFile' f
|
||||||
return $ map parse $ fileLines' s
|
return $ map parse $ fileLines' s
|
||||||
where
|
where
|
||||||
parse l =
|
parse l =
|
||||||
|
|
|
@ -23,11 +23,11 @@ import Utility.Directory.Create
|
||||||
import qualified Git
|
import qualified Git
|
||||||
import Git.Sha
|
import Git.Sha
|
||||||
import qualified Utility.SimpleProtocol as Proto
|
import qualified Utility.SimpleProtocol as Proto
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import Control.Concurrent
|
import Control.Concurrent
|
||||||
import Control.Concurrent.STM
|
import Control.Concurrent.STM
|
||||||
import Control.Concurrent.STM.TBMChan
|
import Control.Concurrent.STM.TBMChan
|
||||||
import qualified Data.ByteString as S
|
|
||||||
import qualified System.FilePath.ByteString as P
|
import qualified System.FilePath.ByteString as P
|
||||||
|
|
||||||
newtype ChangedRefs = ChangedRefs [Git.Ref]
|
newtype ChangedRefs = ChangedRefs [Git.Ref]
|
||||||
|
@ -104,7 +104,7 @@ notifyHook chan reffile _
|
||||||
| ".lock" `isSuffixOf` reffile = noop
|
| ".lock" `isSuffixOf` reffile = noop
|
||||||
| otherwise = void $ do
|
| otherwise = void $ do
|
||||||
sha <- catchDefaultIO Nothing $
|
sha <- catchDefaultIO Nothing $
|
||||||
extractSha <$> S.readFile reffile
|
extractSha <$> F.readFile' (toOsPath (toRawFilePath reffile))
|
||||||
-- When the channel is full, there is probably no reader
|
-- When the channel is full, there is probably no reader
|
||||||
-- running, or ref changes have been occurring very fast,
|
-- running, or ref changes have been occurring very fast,
|
||||||
-- so it's ok to not write the change to it.
|
-- so it's ok to not write the change to it.
|
||||||
|
|
|
@ -108,6 +108,7 @@ import Utility.HumanTime
|
||||||
import Utility.TimeStamp
|
import Utility.TimeStamp
|
||||||
import Utility.FileMode
|
import Utility.FileMode
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import qualified System.FilePath.ByteString as P
|
import qualified System.FilePath.ByteString as P
|
||||||
import System.PosixCompat.Files (isSymbolicLink, linkCount)
|
import System.PosixCompat.Files (isSymbolicLink, linkCount)
|
||||||
|
@ -581,7 +582,7 @@ linkToAnnex key src srcic = ifM (checkSecureHashes' key)
|
||||||
-}
|
-}
|
||||||
linkFromAnnex :: Key -> RawFilePath -> Maybe FileMode -> Annex LinkAnnexResult
|
linkFromAnnex :: Key -> RawFilePath -> Maybe FileMode -> Annex LinkAnnexResult
|
||||||
linkFromAnnex key dest destmode =
|
linkFromAnnex key dest destmode =
|
||||||
replaceFile' (const noop) (fromRawFilePath dest) (== LinkAnnexOk) $ \tmp ->
|
replaceFile' (const noop) dest (== LinkAnnexOk) $ \tmp ->
|
||||||
linkFromAnnex' key tmp destmode
|
linkFromAnnex' key tmp destmode
|
||||||
|
|
||||||
{- This is only safe to use when dest is not a worktree file. -}
|
{- This is only safe to use when dest is not a worktree file. -}
|
||||||
|
@ -817,7 +818,7 @@ listKeys' keyloc want = do
|
||||||
s <- Annex.getState id
|
s <- Annex.getState id
|
||||||
r <- Annex.getRead id
|
r <- Annex.getRead id
|
||||||
depth <- gitAnnexLocationDepth <$> Annex.getGitConfig
|
depth <- gitAnnexLocationDepth <$> Annex.getGitConfig
|
||||||
liftIO $ walk (s, r) depth (fromRawFilePath dir)
|
liftIO $ walk (s, r) depth dir
|
||||||
where
|
where
|
||||||
walk s depth dir = do
|
walk s depth dir = do
|
||||||
contents <- catchDefaultIO [] (dirContents dir)
|
contents <- catchDefaultIO [] (dirContents dir)
|
||||||
|
@ -825,7 +826,7 @@ listKeys' keyloc want = do
|
||||||
then do
|
then do
|
||||||
contents' <- filterM present contents
|
contents' <- filterM present contents
|
||||||
keys <- filterM (Annex.eval s . want) $
|
keys <- filterM (Annex.eval s . want) $
|
||||||
mapMaybe (fileKey . P.takeFileName . toRawFilePath) contents'
|
mapMaybe (fileKey . P.takeFileName) contents'
|
||||||
continue keys []
|
continue keys []
|
||||||
else do
|
else do
|
||||||
let deeper = walk s (depth - 1)
|
let deeper = walk s (depth - 1)
|
||||||
|
@ -843,8 +844,8 @@ listKeys' keyloc want = do
|
||||||
present _ | inanywhere = pure True
|
present _ | inanywhere = pure True
|
||||||
present d = presentInAnnex d
|
present d = presentInAnnex d
|
||||||
|
|
||||||
presentInAnnex = doesFileExist . contentfile
|
presentInAnnex = R.doesPathExist . contentfile
|
||||||
contentfile d = d </> takeFileName d
|
contentfile d = d P.</> P.takeFileName d
|
||||||
|
|
||||||
{- Things to do to record changes to content when shutting down.
|
{- Things to do to record changes to content when shutting down.
|
||||||
-
|
-
|
||||||
|
@ -1076,7 +1077,7 @@ writeContentRetentionTimestamp key rt t = do
|
||||||
modifyContentDirWhenExists lckfile $ bracket (lock lckfile) unlock $ \_ ->
|
modifyContentDirWhenExists lckfile $ bracket (lock lckfile) unlock $ \_ ->
|
||||||
readContentRetentionTimestamp rt >>= \case
|
readContentRetentionTimestamp rt >>= \case
|
||||||
Just ts | ts >= t -> return ()
|
Just ts | ts >= t -> return ()
|
||||||
_ -> replaceFile (const noop) (fromRawFilePath rt) $ \tmp ->
|
_ -> replaceFile (const noop) rt $ \tmp ->
|
||||||
liftIO $ writeFile (fromRawFilePath tmp) $ show t
|
liftIO $ writeFile (fromRawFilePath tmp) $ show t
|
||||||
where
|
where
|
||||||
lock = takeExclusiveLock
|
lock = takeExclusiveLock
|
||||||
|
@ -1086,7 +1087,7 @@ writeContentRetentionTimestamp key rt t = do
|
||||||
readContentRetentionTimestamp :: RawFilePath -> Annex (Maybe POSIXTime)
|
readContentRetentionTimestamp :: RawFilePath -> Annex (Maybe POSIXTime)
|
||||||
readContentRetentionTimestamp rt =
|
readContentRetentionTimestamp rt =
|
||||||
liftIO $ join <$> tryWhenExists
|
liftIO $ join <$> tryWhenExists
|
||||||
(parsePOSIXTime <$> readFile (fromRawFilePath rt))
|
(parsePOSIXTime <$> F.readFile' (toOsPath rt))
|
||||||
|
|
||||||
{- Checks if the retention timestamp is in the future, if so returns
|
{- Checks if the retention timestamp is in the future, if so returns
|
||||||
- Nothing.
|
- Nothing.
|
||||||
|
|
|
@ -34,10 +34,9 @@ populatePointerFile :: Restage -> Key -> RawFilePath -> RawFilePath -> Annex (Ma
|
||||||
populatePointerFile restage k obj f = go =<< liftIO (isPointerFile f)
|
populatePointerFile restage k obj f = go =<< liftIO (isPointerFile f)
|
||||||
where
|
where
|
||||||
go (Just k') | k == k' = do
|
go (Just k') | k == k' = do
|
||||||
let f' = fromRawFilePath f
|
|
||||||
destmode <- liftIO $ catchMaybeIO $ fileMode <$> R.getFileStatus f
|
destmode <- liftIO $ catchMaybeIO $ fileMode <$> R.getFileStatus f
|
||||||
liftIO $ removeWhenExistsWith R.removeLink f
|
liftIO $ removeWhenExistsWith R.removeLink f
|
||||||
(ic, populated) <- replaceWorkTreeFile f' $ \tmp -> do
|
(ic, populated) <- replaceWorkTreeFile f $ \tmp -> do
|
||||||
ok <- linkOrCopy k obj tmp destmode >>= \case
|
ok <- linkOrCopy k obj tmp destmode >>= \case
|
||||||
Just _ -> thawContent tmp >> return True
|
Just _ -> thawContent tmp >> return True
|
||||||
Nothing -> liftIO (writePointerFile tmp k destmode) >> return False
|
Nothing -> liftIO (writePointerFile tmp k destmode) >> return False
|
||||||
|
@ -58,7 +57,7 @@ depopulatePointerFile key file = do
|
||||||
let mode = fmap fileMode st
|
let mode = fmap fileMode st
|
||||||
secureErase file
|
secureErase file
|
||||||
liftIO $ removeWhenExistsWith R.removeLink file
|
liftIO $ removeWhenExistsWith R.removeLink file
|
||||||
ic <- replaceWorkTreeFile (fromRawFilePath file) $ \tmp -> do
|
ic <- replaceWorkTreeFile file $ \tmp -> do
|
||||||
liftIO $ writePointerFile tmp key mode
|
liftIO $ writePointerFile tmp key mode
|
||||||
#if ! defined(mingw32_HOST_OS)
|
#if ! defined(mingw32_HOST_OS)
|
||||||
-- Don't advance mtime; this avoids unnecessary re-smudging
|
-- Don't advance mtime; this avoids unnecessary re-smudging
|
||||||
|
|
|
@ -19,6 +19,7 @@ import Utility.Directory
|
||||||
import Utility.Exception
|
import Utility.Exception
|
||||||
import Utility.Monad
|
import Utility.Monad
|
||||||
import Utility.FileSystemEncoding
|
import Utility.FileSystemEncoding
|
||||||
|
import Utility.SystemDirectory
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
import Utility.PartialPrelude
|
import Utility.PartialPrelude
|
||||||
|
|
||||||
|
|
|
@ -9,6 +9,8 @@
|
||||||
- Licensed under the GNU AGPL version 3 or higher.
|
- Licensed under the GNU AGPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
|
||||||
module Annex.Hook where
|
module Annex.Hook where
|
||||||
|
|
||||||
import Annex.Common
|
import Annex.Common
|
||||||
|
@ -85,7 +87,8 @@ hookWarning :: Git.Hook -> String -> Annex ()
|
||||||
hookWarning h msg = do
|
hookWarning h msg = do
|
||||||
r <- gitRepo
|
r <- gitRepo
|
||||||
warning $ UnquotedString $
|
warning $ UnquotedString $
|
||||||
Git.hookName h ++ " hook (" ++ Git.hookFile h r ++ ") " ++ msg
|
fromRawFilePath (Git.hookName h) ++
|
||||||
|
" hook (" ++ fromRawFilePath (Git.hookFile h r) ++ ") " ++ msg
|
||||||
|
|
||||||
{- To avoid checking if the hook exists every time, the existing hooks
|
{- To avoid checking if the hook exists every time, the existing hooks
|
||||||
- are cached. -}
|
- are cached. -}
|
||||||
|
@ -118,7 +121,7 @@ runAnnexHook' hook commandcfg = ifM (doesAnnexHookExist hook)
|
||||||
( return Nothing
|
( return Nothing
|
||||||
, do
|
, do
|
||||||
h <- fromRepo (Git.hookFile hook)
|
h <- fromRepo (Git.hookFile hook)
|
||||||
commandfailed h
|
commandfailed (fromRawFilePath h)
|
||||||
)
|
)
|
||||||
runcommandcfg = commandcfg <$> Annex.getGitConfig >>= \case
|
runcommandcfg = commandcfg <$> Annex.getGitConfig >>= \case
|
||||||
Nothing -> return Nothing
|
Nothing -> return Nothing
|
||||||
|
|
|
@ -118,20 +118,21 @@ lockDown' cfg file = tryNonAsync $ ifM crippledFileSystem
|
||||||
withhardlink tmpdir = do
|
withhardlink tmpdir = do
|
||||||
setperms
|
setperms
|
||||||
withTSDelta $ \delta -> liftIO $ do
|
withTSDelta $ \delta -> liftIO $ do
|
||||||
(tmpfile, h) <- openTmpFileIn (fromRawFilePath tmpdir) $
|
(tmpfile, h) <- openTmpFileIn (toOsPath tmpdir) $
|
||||||
relatedTemplate $ "ingest-" ++ takeFileName file
|
relatedTemplate $ toRawFilePath $
|
||||||
|
"ingest-" ++ takeFileName file
|
||||||
hClose h
|
hClose h
|
||||||
removeWhenExistsWith R.removeLink (toRawFilePath tmpfile)
|
let tmpfile' = fromOsPath tmpfile
|
||||||
withhardlink' delta tmpfile
|
removeWhenExistsWith R.removeLink tmpfile'
|
||||||
|
withhardlink' delta tmpfile'
|
||||||
`catchIO` const (nohardlink' delta)
|
`catchIO` const (nohardlink' delta)
|
||||||
|
|
||||||
withhardlink' delta tmpfile = do
|
withhardlink' delta tmpfile = do
|
||||||
let tmpfile' = toRawFilePath tmpfile
|
R.createLink file' tmpfile
|
||||||
R.createLink file' tmpfile'
|
cache <- genInodeCache tmpfile delta
|
||||||
cache <- genInodeCache tmpfile' delta
|
|
||||||
return $ LockedDown cfg $ KeySource
|
return $ LockedDown cfg $ KeySource
|
||||||
{ keyFilename = file'
|
{ keyFilename = file'
|
||||||
, contentLocation = tmpfile'
|
, contentLocation = tmpfile
|
||||||
, inodeCache = cache
|
, inodeCache = cache
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -308,7 +309,7 @@ restoreFile file key e = do
|
||||||
makeLink :: RawFilePath -> Key -> Maybe InodeCache -> Annex LinkTarget
|
makeLink :: RawFilePath -> Key -> Maybe InodeCache -> Annex LinkTarget
|
||||||
makeLink file key mcache = flip catchNonAsync (restoreFile file key) $ do
|
makeLink file key mcache = flip catchNonAsync (restoreFile file key) $ do
|
||||||
l <- calcRepo $ gitAnnexLink file key
|
l <- calcRepo $ gitAnnexLink file key
|
||||||
replaceWorkTreeFile file' $ makeAnnexLink l
|
replaceWorkTreeFile file $ makeAnnexLink l
|
||||||
|
|
||||||
-- touch symlink to have same time as the original file,
|
-- touch symlink to have same time as the original file,
|
||||||
-- as provided in the InodeCache
|
-- as provided in the InodeCache
|
||||||
|
@ -317,8 +318,6 @@ makeLink file key mcache = flip catchNonAsync (restoreFile file key) $ do
|
||||||
Nothing -> noop
|
Nothing -> noop
|
||||||
|
|
||||||
return l
|
return l
|
||||||
where
|
|
||||||
file' = fromRawFilePath file
|
|
||||||
|
|
||||||
{- Creates the symlink to the annexed content, and stages it in git. -}
|
{- Creates the symlink to the annexed content, and stages it in git. -}
|
||||||
addSymlink :: RawFilePath -> Key -> Maybe InodeCache -> Annex ()
|
addSymlink :: RawFilePath -> Key -> Maybe InodeCache -> Annex ()
|
||||||
|
|
|
@ -27,6 +27,7 @@ import Annex.BranchState
|
||||||
import Types.BranchState
|
import Types.BranchState
|
||||||
import Utility.Directory.Stream
|
import Utility.Directory.Stream
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import qualified Data.Set as S
|
import qualified Data.Set as S
|
||||||
import qualified Data.ByteString.Lazy as L
|
import qualified Data.ByteString.Lazy as L
|
||||||
|
@ -92,7 +93,7 @@ setJournalFile _jl ru file content = withOtherTmp $ \tmp -> do
|
||||||
-- journal file is written atomically
|
-- journal file is written atomically
|
||||||
let jfile = journalFile file
|
let jfile = journalFile file
|
||||||
let tmpfile = tmp P.</> jfile
|
let tmpfile = tmp P.</> jfile
|
||||||
liftIO $ withFile (fromRawFilePath tmpfile) WriteMode $ \h ->
|
liftIO $ F.withFile (toOsPath tmpfile) WriteMode $ \h ->
|
||||||
writeJournalHandle h content
|
writeJournalHandle h content
|
||||||
let dest = jd P.</> jfile
|
let dest = jd P.</> jfile
|
||||||
let mv = do
|
let mv = do
|
||||||
|
@ -133,7 +134,7 @@ checkCanAppendJournalFile _jl ru file = do
|
||||||
-}
|
-}
|
||||||
appendJournalFile :: Journalable content => JournalLocked -> AppendableJournalFile -> content -> Annex ()
|
appendJournalFile :: Journalable content => JournalLocked -> AppendableJournalFile -> content -> Annex ()
|
||||||
appendJournalFile _jl (AppendableJournalFile (jd, jfile)) content = do
|
appendJournalFile _jl (AppendableJournalFile (jd, jfile)) content = do
|
||||||
let write = liftIO $ withFile (fromRawFilePath jfile) ReadWriteMode $ \h -> do
|
let write = liftIO $ F.withFile (toOsPath jfile) ReadWriteMode $ \h -> do
|
||||||
sz <- hFileSize h
|
sz <- hFileSize h
|
||||||
when (sz /= 0) $ do
|
when (sz /= 0) $ do
|
||||||
hSeek h SeekFromEnd (-1)
|
hSeek h SeekFromEnd (-1)
|
||||||
|
@ -204,7 +205,7 @@ getJournalFileStale (GetPrivate getprivate) file = do
|
||||||
jfile = journalFile file
|
jfile = journalFile file
|
||||||
getfrom d = catchMaybeIO $
|
getfrom d = catchMaybeIO $
|
||||||
discardIncompleteAppend . L.fromStrict
|
discardIncompleteAppend . L.fromStrict
|
||||||
<$> B.readFile (fromRawFilePath (d P.</> jfile))
|
<$> F.readFile' (toOsPath (d P.</> jfile))
|
||||||
|
|
||||||
-- Note that this forces read of the whole lazy bytestring.
|
-- Note that this forces read of the whole lazy bytestring.
|
||||||
discardIncompleteAppend :: L.ByteString -> L.ByteString
|
discardIncompleteAppend :: L.ByteString -> L.ByteString
|
||||||
|
@ -243,17 +244,15 @@ withJournalHandle getjournaldir a = do
|
||||||
where
|
where
|
||||||
-- avoid overhead of creating the journal directory when it already
|
-- avoid overhead of creating the journal directory when it already
|
||||||
-- exists
|
-- exists
|
||||||
opendir d = liftIO (openDirectory (fromRawFilePath d))
|
opendir d = liftIO (openDirectory d)
|
||||||
`catchIO` (const (createAnnexDirectory d >> opendir d))
|
`catchIO` (const (createAnnexDirectory d >> opendir d))
|
||||||
|
|
||||||
{- Checks if there are changes in the journal. -}
|
{- Checks if there are changes in the journal. -}
|
||||||
journalDirty :: (BranchState -> Git.Repo -> RawFilePath) -> Annex Bool
|
journalDirty :: (BranchState -> Git.Repo -> RawFilePath) -> Annex Bool
|
||||||
journalDirty getjournaldir = do
|
journalDirty getjournaldir = do
|
||||||
st <- getState
|
st <- getState
|
||||||
d <- fromRawFilePath <$> fromRepo (getjournaldir st)
|
d <- fromRepo (getjournaldir st)
|
||||||
liftIO $
|
liftIO $ isDirectoryPopulated d
|
||||||
(not <$> isDirectoryEmpty d)
|
|
||||||
`catchIO` (const $ doesDirectoryExist d)
|
|
||||||
|
|
||||||
{- Produces a filename to use in the journal for a file on the branch.
|
{- Produces a filename to use in the journal for a file on the branch.
|
||||||
- The filename does not include the journal directory.
|
- The filename does not include the journal directory.
|
||||||
|
|
|
@ -38,6 +38,7 @@ import Utility.Tmp.Dir
|
||||||
import Utility.CopyFile
|
import Utility.CopyFile
|
||||||
import qualified Database.Keys.Handle
|
import qualified Database.Keys.Handle
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import qualified Data.ByteString as S
|
import qualified Data.ByteString as S
|
||||||
import qualified Data.ByteString.Char8 as S8
|
import qualified Data.ByteString.Char8 as S8
|
||||||
|
@ -87,7 +88,7 @@ getAnnexLinkTarget' file coresymlinks = if coresymlinks
|
||||||
|
|
||||||
probesymlink = R.readSymbolicLink file
|
probesymlink = R.readSymbolicLink file
|
||||||
|
|
||||||
probefilecontent = withFile (fromRawFilePath file) ReadMode $ \h -> do
|
probefilecontent = F.withFile (toOsPath file) ReadMode $ \h -> do
|
||||||
s <- S.hGet h maxSymlinkSz
|
s <- S.hGet h maxSymlinkSz
|
||||||
-- If we got the full amount, the file is too large
|
-- If we got the full amount, the file is too large
|
||||||
-- to be a symlink target.
|
-- to be a symlink target.
|
||||||
|
@ -117,7 +118,7 @@ makeGitLink linktarget file = ifM (coreSymlinks <$> Annex.getGitConfig)
|
||||||
( liftIO $ do
|
( liftIO $ do
|
||||||
void $ tryIO $ R.removeLink file
|
void $ tryIO $ R.removeLink file
|
||||||
R.createSymbolicLink linktarget file
|
R.createSymbolicLink linktarget file
|
||||||
, liftIO $ S.writeFile (fromRawFilePath file) linktarget
|
, liftIO $ F.writeFile' (toOsPath file) linktarget
|
||||||
)
|
)
|
||||||
|
|
||||||
{- Creates a link on disk, and additionally stages it in git. -}
|
{- Creates a link on disk, and additionally stages it in git. -}
|
||||||
|
@ -152,7 +153,7 @@ stagePointerFile file mode sha =
|
||||||
|
|
||||||
writePointerFile :: RawFilePath -> Key -> Maybe FileMode -> IO ()
|
writePointerFile :: RawFilePath -> Key -> Maybe FileMode -> IO ()
|
||||||
writePointerFile file k mode = do
|
writePointerFile file k mode = do
|
||||||
S.writeFile (fromRawFilePath file) (formatPointer k)
|
F.writeFile' (toOsPath file) (formatPointer k)
|
||||||
maybe noop (R.setFileMode file) mode
|
maybe noop (R.setFileMode file) mode
|
||||||
|
|
||||||
newtype Restage = Restage Bool
|
newtype Restage = Restage Bool
|
||||||
|
@ -245,7 +246,9 @@ restagePointerFiles r = unlessM (Annex.getState Annex.insmudgecleanfilter) $ do
|
||||||
when (numfiles > 0) $
|
when (numfiles > 0) $
|
||||||
bracket lockindex unlockindex go
|
bracket lockindex unlockindex go
|
||||||
where
|
where
|
||||||
withtmpdir = withTmpDirIn (fromRawFilePath $ Git.localGitDir r) "annexindex"
|
withtmpdir = withTmpDirIn
|
||||||
|
(fromRawFilePath $ Git.localGitDir r)
|
||||||
|
(toOsPath "annexindex")
|
||||||
|
|
||||||
isunmodified tsd f orig =
|
isunmodified tsd f orig =
|
||||||
genInodeCache f tsd >>= return . \case
|
genInodeCache f tsd >>= return . \case
|
||||||
|
@ -434,7 +437,7 @@ maxSymlinkSz = 8192
|
||||||
isPointerFile :: RawFilePath -> IO (Maybe Key)
|
isPointerFile :: RawFilePath -> IO (Maybe Key)
|
||||||
isPointerFile f = catchDefaultIO Nothing $
|
isPointerFile f = catchDefaultIO Nothing $
|
||||||
#if defined(mingw32_HOST_OS)
|
#if defined(mingw32_HOST_OS)
|
||||||
withFile (fromRawFilePath f) ReadMode readhandle
|
F.withFile (toOsPath f) ReadMode readhandle
|
||||||
#else
|
#else
|
||||||
#if MIN_VERSION_unix(2,8,0)
|
#if MIN_VERSION_unix(2,8,0)
|
||||||
let open = do
|
let open = do
|
||||||
|
@ -445,7 +448,7 @@ isPointerFile f = catchDefaultIO Nothing $
|
||||||
#else
|
#else
|
||||||
ifM (isSymbolicLink <$> R.getSymbolicLinkStatus f)
|
ifM (isSymbolicLink <$> R.getSymbolicLinkStatus f)
|
||||||
( return Nothing
|
( return Nothing
|
||||||
, withFile (fromRawFilePath f) ReadMode readhandle
|
, F.withFile (toOsPath f) ReadMode readhandle
|
||||||
)
|
)
|
||||||
#endif
|
#endif
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
-}
|
-}
|
||||||
|
|
||||||
{-# LANGUAGE CPP #-}
|
{-# LANGUAGE CPP #-}
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
|
||||||
module Annex.Proxy where
|
module Annex.Proxy where
|
||||||
|
|
||||||
|
@ -30,6 +31,7 @@ import Utility.Tmp.Dir
|
||||||
import Utility.Metered
|
import Utility.Metered
|
||||||
import Git.Types
|
import Git.Types
|
||||||
import qualified Database.Export as Export
|
import qualified Database.Export as Export
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
import Utility.OpenFile
|
import Utility.OpenFile
|
||||||
#endif
|
#endif
|
||||||
|
@ -173,7 +175,7 @@ proxySpecialRemote protoversion r ihdl ohdl owaitv oclosedv mexportdb = go
|
||||||
-- independently. Also, this key is not getting added into the
|
-- independently. Also, this key is not getting added into the
|
||||||
-- local annex objects.
|
-- local annex objects.
|
||||||
withproxytmpfile k a = withOtherTmp $ \othertmpdir ->
|
withproxytmpfile k a = withOtherTmp $ \othertmpdir ->
|
||||||
withTmpDirIn (fromRawFilePath othertmpdir) "proxy" $ \tmpdir ->
|
withTmpDirIn (fromRawFilePath othertmpdir) (toOsPath "proxy") $ \tmpdir ->
|
||||||
a (toRawFilePath tmpdir P.</> keyFile k)
|
a (toRawFilePath tmpdir P.</> keyFile k)
|
||||||
|
|
||||||
proxyput af k = do
|
proxyput af k = do
|
||||||
|
@ -184,7 +186,7 @@ proxySpecialRemote protoversion r ihdl ohdl owaitv oclosedv mexportdb = go
|
||||||
-- the client, to avoid bad content
|
-- the client, to avoid bad content
|
||||||
-- being stored in the special remote.
|
-- being stored in the special remote.
|
||||||
iv <- startVerifyKeyContentIncrementally Remote.AlwaysVerify k
|
iv <- startVerifyKeyContentIncrementally Remote.AlwaysVerify k
|
||||||
h <- liftIO $ openFile (fromRawFilePath tmpfile) WriteMode
|
h <- liftIO $ F.openFile (toOsPath tmpfile) WriteMode
|
||||||
let nuketmp = liftIO $ removeWhenExistsWith removeFile (fromRawFilePath tmpfile)
|
let nuketmp = liftIO $ removeWhenExistsWith removeFile (fromRawFilePath tmpfile)
|
||||||
gotall <- liftIO $ receivetofile iv h len
|
gotall <- liftIO $ receivetofile iv h len
|
||||||
liftIO $ hClose h
|
liftIO $ hClose h
|
||||||
|
|
|
@ -1,12 +1,10 @@
|
||||||
{- git-annex file replacing
|
{- git-annex file replacing
|
||||||
-
|
-
|
||||||
- Copyright 2013-2021 Joey Hess <id@joeyh.name>
|
- Copyright 2013-2025 Joey Hess <id@joeyh.name>
|
||||||
-
|
-
|
||||||
- Licensed under the GNU AGPL version 3 or higher.
|
- Licensed under the GNU AGPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
{-# LANGUAGE CPP #-}
|
|
||||||
|
|
||||||
module Annex.ReplaceFile (
|
module Annex.ReplaceFile (
|
||||||
replaceGitAnnexDirFile,
|
replaceGitAnnexDirFile,
|
||||||
replaceGitDirFile,
|
replaceGitDirFile,
|
||||||
|
@ -19,24 +17,24 @@ import Annex.Common
|
||||||
import Annex.Tmp
|
import Annex.Tmp
|
||||||
import Annex.Perms
|
import Annex.Perms
|
||||||
import Git
|
import Git
|
||||||
|
import Utility.Tmp
|
||||||
import Utility.Tmp.Dir
|
import Utility.Tmp.Dir
|
||||||
import Utility.Directory.Create
|
import Utility.Directory.Create
|
||||||
#ifndef mingw32_HOST_OS
|
|
||||||
import Utility.Path.Max
|
import qualified System.FilePath.ByteString as P
|
||||||
#endif
|
|
||||||
|
|
||||||
{- replaceFile on a file located inside the gitAnnexDir. -}
|
{- replaceFile on a file located inside the gitAnnexDir. -}
|
||||||
replaceGitAnnexDirFile :: FilePath -> (RawFilePath -> Annex a) -> Annex a
|
replaceGitAnnexDirFile :: RawFilePath -> (RawFilePath -> Annex a) -> Annex a
|
||||||
replaceGitAnnexDirFile = replaceFile createAnnexDirectory
|
replaceGitAnnexDirFile = replaceFile createAnnexDirectory
|
||||||
|
|
||||||
{- replaceFile on a file located inside the .git directory. -}
|
{- replaceFile on a file located inside the .git directory. -}
|
||||||
replaceGitDirFile :: FilePath -> (RawFilePath -> Annex a) -> Annex a
|
replaceGitDirFile :: RawFilePath -> (RawFilePath -> Annex a) -> Annex a
|
||||||
replaceGitDirFile = replaceFile $ \dir -> do
|
replaceGitDirFile = replaceFile $ \dir -> do
|
||||||
top <- fromRepo localGitDir
|
top <- fromRepo localGitDir
|
||||||
liftIO $ createDirectoryUnder [top] dir
|
liftIO $ createDirectoryUnder [top] dir
|
||||||
|
|
||||||
{- replaceFile on a worktree file. -}
|
{- replaceFile on a worktree file. -}
|
||||||
replaceWorkTreeFile :: FilePath -> (RawFilePath -> Annex a) -> Annex a
|
replaceWorkTreeFile :: RawFilePath -> (RawFilePath -> Annex a) -> Annex a
|
||||||
replaceWorkTreeFile = replaceFile createWorkTreeDirectory
|
replaceWorkTreeFile = replaceFile createWorkTreeDirectory
|
||||||
|
|
||||||
{- Replaces a possibly already existing file with a new version,
|
{- Replaces a possibly already existing file with a new version,
|
||||||
|
@ -54,28 +52,17 @@ replaceWorkTreeFile = replaceFile createWorkTreeDirectory
|
||||||
- The createdirectory action is only run when moving the file into place
|
- The createdirectory action is only run when moving the file into place
|
||||||
- fails, and can create any parent directory structure needed.
|
- fails, and can create any parent directory structure needed.
|
||||||
-}
|
-}
|
||||||
replaceFile :: (RawFilePath -> Annex ()) -> FilePath -> (RawFilePath -> Annex a) -> Annex a
|
replaceFile :: (RawFilePath -> Annex ()) -> RawFilePath -> (RawFilePath -> Annex a) -> Annex a
|
||||||
replaceFile createdirectory file action = replaceFile' createdirectory file (const True) action
|
replaceFile createdirectory file action = replaceFile' createdirectory file (const True) action
|
||||||
|
|
||||||
replaceFile' :: (RawFilePath -> Annex ()) -> FilePath -> (a -> Bool) -> (RawFilePath -> Annex a) -> Annex a
|
replaceFile' :: (RawFilePath -> Annex ()) -> RawFilePath -> (a -> Bool) -> (RawFilePath -> Annex a) -> Annex a
|
||||||
replaceFile' createdirectory file checkres action = withOtherTmp $ \othertmpdir -> do
|
replaceFile' createdirectory file checkres action = withOtherTmp $ \othertmpdir -> do
|
||||||
let othertmpdir' = fromRawFilePath othertmpdir
|
let basetmp = relatedTemplate' (P.takeFileName file)
|
||||||
#ifndef mingw32_HOST_OS
|
withTmpDirIn (fromRawFilePath othertmpdir) (toOsPath basetmp) $ \tmpdir -> do
|
||||||
-- Use part of the filename as the template for the temp
|
let tmpfile = toRawFilePath tmpdir P.</> basetmp
|
||||||
-- directory. This does not need to be unique, but it
|
|
||||||
-- makes it more clear what this temp directory is for.
|
|
||||||
filemax <- liftIO $ fileNameLengthLimit othertmpdir'
|
|
||||||
let basetmp = take (filemax `div` 2) (takeFileName file)
|
|
||||||
#else
|
|
||||||
-- Windows has limits on the whole path length, so keep
|
|
||||||
-- it short.
|
|
||||||
let basetmp = "t"
|
|
||||||
#endif
|
|
||||||
withTmpDirIn othertmpdir' basetmp $ \tmpdir -> do
|
|
||||||
let tmpfile = toRawFilePath (tmpdir </> basetmp)
|
|
||||||
r <- action tmpfile
|
r <- action tmpfile
|
||||||
when (checkres r) $
|
when (checkres r) $
|
||||||
replaceFileFrom tmpfile (toRawFilePath file) createdirectory
|
replaceFileFrom tmpfile file createdirectory
|
||||||
return r
|
return r
|
||||||
|
|
||||||
replaceFileFrom :: RawFilePath -> RawFilePath -> (RawFilePath -> Annex ()) -> Annex ()
|
replaceFileFrom :: RawFilePath -> RawFilePath -> (RawFilePath -> Annex ()) -> Annex ()
|
||||||
|
|
|
@ -161,7 +161,7 @@ checkStaleSizeChanges h@(RepoSizeHandle (Just _) livev) = do
|
||||||
where
|
where
|
||||||
go livedir lck pidlockfile now = do
|
go livedir lck pidlockfile now = do
|
||||||
void $ tryNonAsync $ do
|
void $ tryNonAsync $ do
|
||||||
lockfiles <- liftIO $ filter (not . dirCruft)
|
lockfiles <- liftIO $ filter (not . dirCruft . toRawFilePath)
|
||||||
<$> getDirectoryContents (fromRawFilePath livedir)
|
<$> getDirectoryContents (fromRawFilePath livedir)
|
||||||
stale <- forM lockfiles $ \lockfile ->
|
stale <- forM lockfiles $ \lockfile ->
|
||||||
if (lockfile /= pidlockfile)
|
if (lockfile /= pidlockfile)
|
||||||
|
|
30
Annex/Ssh.hs
30
Annex/Ssh.hs
|
@ -5,6 +5,7 @@
|
||||||
- Licensed under the GNU AGPL version 3 or higher.
|
- Licensed under the GNU AGPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
{-# LANGUAGE CPP #-}
|
{-# LANGUAGE CPP #-}
|
||||||
|
|
||||||
module Annex.Ssh (
|
module Annex.Ssh (
|
||||||
|
@ -100,15 +101,16 @@ consumeStdinParams NoConsumeStdin = [Param "-n"]
|
||||||
|
|
||||||
{- Returns a filename to use for a ssh connection caching socket, and
|
{- Returns a filename to use for a ssh connection caching socket, and
|
||||||
- parameters to enable ssh connection caching. -}
|
- parameters to enable ssh connection caching. -}
|
||||||
sshCachingInfo :: (SshHost, Maybe Integer) -> Annex (Maybe FilePath, [CommandParam])
|
sshCachingInfo :: (SshHost, Maybe Integer) -> Annex (Maybe RawFilePath, [CommandParam])
|
||||||
sshCachingInfo (host, port) = go =<< sshCacheDir'
|
sshCachingInfo (host, port) = go =<< sshCacheDir'
|
||||||
where
|
where
|
||||||
go (Right dir) =
|
go (Right dir) =
|
||||||
liftIO (bestSocketPath $ dir P.</> hostport2socket host port) >>= return . \case
|
liftIO (bestSocketPath $ dir P.</> hostport2socket host port) >>= return . \case
|
||||||
Nothing -> (Nothing, [])
|
Nothing -> (Nothing, [])
|
||||||
Just socketfile ->
|
Just socketfile ->
|
||||||
let socketfile' = fromRawFilePath socketfile
|
(Just socketfile
|
||||||
in (Just socketfile', sshConnectionCachingParams socketfile')
|
, sshConnectionCachingParams (fromRawFilePath socketfile)
|
||||||
|
)
|
||||||
-- No connection caching with concurrency is not a good
|
-- No connection caching with concurrency is not a good
|
||||||
-- combination, so warn the user.
|
-- combination, so warn the user.
|
||||||
go (Left whynocaching) = do
|
go (Left whynocaching) = do
|
||||||
|
@ -214,7 +216,7 @@ portParams (Just port) = [Param "-p", Param $ show port]
|
||||||
- Locks the socket lock file to prevent other git-annex processes from
|
- Locks the socket lock file to prevent other git-annex processes from
|
||||||
- stopping the ssh multiplexer on this socket.
|
- stopping the ssh multiplexer on this socket.
|
||||||
-}
|
-}
|
||||||
prepSocket :: FilePath -> SshHost -> [CommandParam] -> Annex ()
|
prepSocket :: RawFilePath -> SshHost -> [CommandParam] -> Annex ()
|
||||||
prepSocket socketfile sshhost sshparams = do
|
prepSocket socketfile sshhost sshparams = do
|
||||||
-- There could be stale ssh connections hanging around
|
-- There could be stale ssh connections hanging around
|
||||||
-- from a previous git-annex run that was interrupted.
|
-- from a previous git-annex run that was interrupted.
|
||||||
|
@ -286,13 +288,13 @@ prepSocket socketfile sshhost sshparams = do
|
||||||
- and this check makes such files be skipped since the corresponding lock
|
- and this check makes such files be skipped since the corresponding lock
|
||||||
- file won't exist.
|
- file won't exist.
|
||||||
-}
|
-}
|
||||||
enumSocketFiles :: Annex [FilePath]
|
enumSocketFiles :: Annex [RawFilePath]
|
||||||
enumSocketFiles = liftIO . go =<< sshCacheDir
|
enumSocketFiles = liftIO . go =<< sshCacheDir
|
||||||
where
|
where
|
||||||
go Nothing = return []
|
go Nothing = return []
|
||||||
go (Just dir) = filterM (R.doesPathExist . socket2lock)
|
go (Just dir) = filterM (R.doesPathExist . socket2lock)
|
||||||
=<< filter (not . isLock)
|
=<< filter (not . isLock)
|
||||||
<$> catchDefaultIO [] (dirContents (fromRawFilePath dir))
|
<$> catchDefaultIO [] (dirContents dir)
|
||||||
|
|
||||||
{- Stop any unused ssh connection caching processes. -}
|
{- Stop any unused ssh connection caching processes. -}
|
||||||
sshCleanup :: Annex ()
|
sshCleanup :: Annex ()
|
||||||
|
@ -324,9 +326,9 @@ sshCleanup = mapM_ cleanup =<< enumSocketFiles
|
||||||
forceSshCleanup :: Annex ()
|
forceSshCleanup :: Annex ()
|
||||||
forceSshCleanup = mapM_ forceStopSsh =<< enumSocketFiles
|
forceSshCleanup = mapM_ forceStopSsh =<< enumSocketFiles
|
||||||
|
|
||||||
forceStopSsh :: FilePath -> Annex ()
|
forceStopSsh :: RawFilePath -> Annex ()
|
||||||
forceStopSsh socketfile = withNullHandle $ \nullh -> do
|
forceStopSsh socketfile = withNullHandle $ \nullh -> do
|
||||||
let (dir, base) = splitFileName socketfile
|
let (dir, base) = splitFileName (fromRawFilePath socketfile)
|
||||||
let p = (proc "ssh" $ toCommand $
|
let p = (proc "ssh" $ toCommand $
|
||||||
[ Param "-O", Param "stop" ] ++
|
[ Param "-O", Param "stop" ] ++
|
||||||
sshConnectionCachingParams base ++
|
sshConnectionCachingParams base ++
|
||||||
|
@ -338,7 +340,7 @@ forceStopSsh socketfile = withNullHandle $ \nullh -> do
|
||||||
}
|
}
|
||||||
void $ liftIO $ catchMaybeIO $ withCreateProcess p $ \_ _ _ pid ->
|
void $ liftIO $ catchMaybeIO $ withCreateProcess p $ \_ _ _ pid ->
|
||||||
forceSuccessProcess p pid
|
forceSuccessProcess p pid
|
||||||
liftIO $ removeWhenExistsWith R.removeLink (toRawFilePath socketfile)
|
liftIO $ removeWhenExistsWith R.removeLink socketfile
|
||||||
|
|
||||||
{- This needs to be as short as possible, due to limitations on the length
|
{- This needs to be as short as possible, due to limitations on the length
|
||||||
- of the path to a socket file. At the same time, it needs to be unique
|
- of the path to a socket file. At the same time, it needs to be unique
|
||||||
|
@ -355,13 +357,13 @@ hostport2socket' s
|
||||||
where
|
where
|
||||||
lengthofmd5s = 32
|
lengthofmd5s = 32
|
||||||
|
|
||||||
socket2lock :: FilePath -> RawFilePath
|
socket2lock :: RawFilePath -> RawFilePath
|
||||||
socket2lock socket = toRawFilePath (socket ++ lockExt)
|
socket2lock socket = socket <> lockExt
|
||||||
|
|
||||||
isLock :: FilePath -> Bool
|
isLock :: RawFilePath -> Bool
|
||||||
isLock f = lockExt `isSuffixOf` f
|
isLock f = lockExt `S.isSuffixOf` f
|
||||||
|
|
||||||
lockExt :: String
|
lockExt :: S.ByteString
|
||||||
lockExt = ".lock"
|
lockExt = ".lock"
|
||||||
|
|
||||||
{- This is the size of the sun_path component of sockaddr_un, which
|
{- This is the size of the sun_path component of sockaddr_un, which
|
||||||
|
|
10
Annex/Tmp.hs
10
Annex/Tmp.hs
|
@ -60,15 +60,17 @@ cleanupOtherTmp = do
|
||||||
void $ tryIO $ tryExclusiveLock tmplck $ do
|
void $ tryIO $ tryExclusiveLock tmplck $ do
|
||||||
tmpdir <- fromRawFilePath <$> fromRepo gitAnnexTmpOtherDir
|
tmpdir <- fromRawFilePath <$> fromRepo gitAnnexTmpOtherDir
|
||||||
void $ liftIO $ tryIO $ removeDirectoryRecursive tmpdir
|
void $ liftIO $ tryIO $ removeDirectoryRecursive tmpdir
|
||||||
oldtmp <- fromRawFilePath <$> fromRepo gitAnnexTmpOtherDirOld
|
oldtmp <- fromRepo gitAnnexTmpOtherDirOld
|
||||||
liftIO $ mapM_ cleanold
|
liftIO $ mapM_ cleanold
|
||||||
=<< emptyWhenDoesNotExist (dirContentsRecursive oldtmp)
|
=<< emptyWhenDoesNotExist (dirContentsRecursive oldtmp)
|
||||||
liftIO $ void $ tryIO $ removeDirectory oldtmp -- when empty
|
-- remove when empty
|
||||||
|
liftIO $ void $ tryIO $
|
||||||
|
removeDirectory (fromRawFilePath oldtmp)
|
||||||
where
|
where
|
||||||
cleanold f = do
|
cleanold f = do
|
||||||
now <- liftIO getPOSIXTime
|
now <- liftIO getPOSIXTime
|
||||||
let oldenough = now - (60 * 60 * 24 * 7)
|
let oldenough = now - (60 * 60 * 24 * 7)
|
||||||
catchMaybeIO (modificationTime <$> R.getSymbolicLinkStatus (toRawFilePath f)) >>= \case
|
catchMaybeIO (modificationTime <$> R.getSymbolicLinkStatus f) >>= \case
|
||||||
Just mtime | realToFrac mtime <= oldenough ->
|
Just mtime | realToFrac mtime <= oldenough ->
|
||||||
void $ tryIO $ removeWhenExistsWith R.removeLink (toRawFilePath f)
|
void $ tryIO $ removeWhenExistsWith R.removeLink f
|
||||||
_ -> return ()
|
_ -> return ()
|
||||||
|
|
|
@ -21,6 +21,7 @@ import qualified Annex
|
||||||
import Utility.TimeStamp
|
import Utility.TimeStamp
|
||||||
|
|
||||||
import Data.ByteString.Builder
|
import Data.ByteString.Builder
|
||||||
|
import qualified Data.ByteString as B
|
||||||
import qualified Data.Attoparsec.ByteString.Lazy as A
|
import qualified Data.Attoparsec.ByteString.Lazy as A
|
||||||
|
|
||||||
currentVectorClock :: Annex CandidateVectorClock
|
currentVectorClock :: Annex CandidateVectorClock
|
||||||
|
@ -76,7 +77,7 @@ formatVectorClock (VectorClock t) = show t
|
||||||
buildVectorClock :: VectorClock -> Builder
|
buildVectorClock :: VectorClock -> Builder
|
||||||
buildVectorClock = string7 . formatVectorClock
|
buildVectorClock = string7 . formatVectorClock
|
||||||
|
|
||||||
parseVectorClock :: String -> Maybe VectorClock
|
parseVectorClock :: B.ByteString -> Maybe VectorClock
|
||||||
parseVectorClock t = VectorClock <$> parsePOSIXTime t
|
parseVectorClock t = VectorClock <$> parsePOSIXTime t
|
||||||
|
|
||||||
vectorClockParser :: A.Parser VectorClock
|
vectorClockParser :: A.Parser VectorClock
|
||||||
|
|
|
@ -12,12 +12,13 @@ import Data.Time.Clock.POSIX
|
||||||
import Types.VectorClock
|
import Types.VectorClock
|
||||||
import Utility.Env
|
import Utility.Env
|
||||||
import Utility.TimeStamp
|
import Utility.TimeStamp
|
||||||
|
import Utility.FileSystemEncoding
|
||||||
|
|
||||||
startVectorClock :: IO (IO CandidateVectorClock)
|
startVectorClock :: IO (IO CandidateVectorClock)
|
||||||
startVectorClock = go =<< getEnv "GIT_ANNEX_VECTOR_CLOCK"
|
startVectorClock = go =<< getEnv "GIT_ANNEX_VECTOR_CLOCK"
|
||||||
where
|
where
|
||||||
go Nothing = timebased
|
go Nothing = timebased
|
||||||
go (Just s) = case parsePOSIXTime s of
|
go (Just s) = case parsePOSIXTime (encodeBS s) of
|
||||||
Just t -> return (pure (CandidateVectorClock t))
|
Just t -> return (pure (CandidateVectorClock t))
|
||||||
Nothing -> timebased
|
Nothing -> timebased
|
||||||
-- Avoid using fractional seconds in the CandidateVectorClock.
|
-- Avoid using fractional seconds in the CandidateVectorClock.
|
||||||
|
|
|
@ -30,6 +30,8 @@ import Utility.Metered
|
||||||
import Utility.Tmp
|
import Utility.Tmp
|
||||||
import Messages.Progress
|
import Messages.Progress
|
||||||
import Logs.Transfer
|
import Logs.Transfer
|
||||||
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import Network.URI
|
import Network.URI
|
||||||
import Control.Concurrent.Async
|
import Control.Concurrent.Async
|
||||||
|
@ -37,7 +39,6 @@ import Text.Read
|
||||||
import Data.Either
|
import Data.Either
|
||||||
import qualified Data.Aeson as Aeson
|
import qualified Data.Aeson as Aeson
|
||||||
import GHC.Generics
|
import GHC.Generics
|
||||||
import qualified Data.ByteString as B
|
|
||||||
import qualified Data.ByteString.Char8 as B8
|
import qualified Data.ByteString.Char8 as B8
|
||||||
|
|
||||||
-- youtube-dl can follow redirects to anywhere, including potentially
|
-- youtube-dl can follow redirects to anywhere, including potentially
|
||||||
|
@ -101,9 +102,9 @@ youtubeDl' url workdir p uo
|
||||||
| isytdlp cmd = liftIO $
|
| isytdlp cmd = liftIO $
|
||||||
(nub . lines <$> readFile filelistfile)
|
(nub . lines <$> readFile filelistfile)
|
||||||
`catchIO` (pure . const [])
|
`catchIO` (pure . const [])
|
||||||
| otherwise = workdirfiles
|
| otherwise = map fromRawFilePath <$> workdirfiles
|
||||||
workdirfiles = liftIO $ filter (/= filelistfile)
|
workdirfiles = liftIO $ filter (/= toRawFilePath filelistfile)
|
||||||
<$> (filterM (doesFileExist) =<< dirContents workdir)
|
<$> (filterM R.doesPathExist =<< dirContents (toRawFilePath workdir))
|
||||||
filelistfile = workdir </> filelistfilebase
|
filelistfile = workdir </> filelistfilebase
|
||||||
filelistfilebase = "git-annex-file-list-file"
|
filelistfilebase = "git-annex-file-list-file"
|
||||||
isytdlp cmd = cmd == "yt-dlp"
|
isytdlp cmd = cmd == "yt-dlp"
|
||||||
|
@ -159,7 +160,7 @@ youtubeDlMaxSize workdir = ifM (Annex.getRead Annex.force)
|
||||||
Just have -> do
|
Just have -> do
|
||||||
inprogress <- sizeOfDownloadsInProgress (const True)
|
inprogress <- sizeOfDownloadsInProgress (const True)
|
||||||
partial <- liftIO $ sum
|
partial <- liftIO $ sum
|
||||||
<$> (mapM (getFileSize . toRawFilePath) =<< dirContents workdir)
|
<$> (mapM getFileSize =<< dirContents (toRawFilePath workdir))
|
||||||
reserve <- annexDiskReserve <$> Annex.getGitConfig
|
reserve <- annexDiskReserve <$> Annex.getGitConfig
|
||||||
let maxsize = have - reserve - inprogress + partial
|
let maxsize = have - reserve - inprogress + partial
|
||||||
if maxsize > 0
|
if maxsize > 0
|
||||||
|
@ -352,7 +353,7 @@ youtubePlaylist url = do
|
||||||
else return $ Left $ "Scraping needs yt-dlp, but git-annex has been configured to use " ++ cmd
|
else return $ Left $ "Scraping needs yt-dlp, but git-annex has been configured to use " ++ cmd
|
||||||
|
|
||||||
youtubePlaylist' :: URLString -> String -> IO (Either String [YoutubePlaylistItem])
|
youtubePlaylist' :: URLString -> String -> IO (Either String [YoutubePlaylistItem])
|
||||||
youtubePlaylist' url cmd = withTmpFile "yt-dlp" $ \tmpfile h -> do
|
youtubePlaylist' url cmd = withTmpFile (toOsPath (toRawFilePath "yt-dlp")) $ \tmpfile h -> do
|
||||||
hClose h
|
hClose h
|
||||||
(outerr, ok) <- processTranscript cmd
|
(outerr, ok) <- processTranscript cmd
|
||||||
[ "--simulate"
|
[ "--simulate"
|
||||||
|
@ -362,14 +363,14 @@ youtubePlaylist' url cmd = withTmpFile "yt-dlp" $ \tmpfile h -> do
|
||||||
, "--print-to-file"
|
, "--print-to-file"
|
||||||
-- Write json with selected fields.
|
-- Write json with selected fields.
|
||||||
, "%(.{" ++ intercalate "," youtubePlaylistItemFields ++ "})j"
|
, "%(.{" ++ intercalate "," youtubePlaylistItemFields ++ "})j"
|
||||||
, tmpfile
|
, fromRawFilePath (fromOsPath tmpfile)
|
||||||
, url
|
, url
|
||||||
]
|
]
|
||||||
Nothing
|
Nothing
|
||||||
if ok
|
if ok
|
||||||
then flip catchIO (pure . Left . show) $ do
|
then flip catchIO (pure . Left . show) $ do
|
||||||
v <- map Aeson.eitherDecodeStrict . B8.lines
|
v <- map Aeson.eitherDecodeStrict . B8.lines
|
||||||
<$> B.readFile tmpfile
|
<$> F.readFile' tmpfile
|
||||||
return $ case partitionEithers v of
|
return $ case partitionEithers v of
|
||||||
((parserr:_), _) ->
|
((parserr:_), _) ->
|
||||||
Left $ "yt-dlp json parse error: " ++ parserr
|
Left $ "yt-dlp json parse error: " ++ parserr
|
||||||
|
|
|
@ -22,6 +22,7 @@ import qualified Remote
|
||||||
import qualified Types.Remote as Remote
|
import qualified Types.Remote as Remote
|
||||||
import Config.DynamicConfig
|
import Config.DynamicConfig
|
||||||
import Annex.SpecialRemote.Config
|
import Annex.SpecialRemote.Config
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import Control.Concurrent.STM
|
import Control.Concurrent.STM
|
||||||
import System.Posix.Types
|
import System.Posix.Types
|
||||||
|
@ -121,9 +122,9 @@ startDaemonStatus = do
|
||||||
- and parts of it are not relevant. -}
|
- and parts of it are not relevant. -}
|
||||||
writeDaemonStatusFile :: FilePath -> DaemonStatus -> IO ()
|
writeDaemonStatusFile :: FilePath -> DaemonStatus -> IO ()
|
||||||
writeDaemonStatusFile file status =
|
writeDaemonStatusFile file status =
|
||||||
viaTmp writeFile file =<< serialized <$> getPOSIXTime
|
viaTmp F.writeFile' (toOsPath (toRawFilePath file)) =<< serialized <$> getPOSIXTime
|
||||||
where
|
where
|
||||||
serialized now = unlines
|
serialized now = encodeBS $ unlines
|
||||||
[ "lastRunning:" ++ show now
|
[ "lastRunning:" ++ show now
|
||||||
, "scanComplete:" ++ show (scanComplete status)
|
, "scanComplete:" ++ show (scanComplete status)
|
||||||
, "sanityCheckRunning:" ++ show (sanityCheckRunning status)
|
, "sanityCheckRunning:" ++ show (sanityCheckRunning status)
|
||||||
|
@ -135,13 +136,13 @@ readDaemonStatusFile file = parse <$> newDaemonStatus <*> readFile file
|
||||||
where
|
where
|
||||||
parse status = foldr parseline status . lines
|
parse status = foldr parseline status . lines
|
||||||
parseline line status
|
parseline line status
|
||||||
| key == "lastRunning" = parseval parsePOSIXTime $ \v ->
|
| key == "lastRunning" = parseval (parsePOSIXTime . encodeBS) $ \v ->
|
||||||
status { lastRunning = Just v }
|
status { lastRunning = Just v }
|
||||||
| key == "scanComplete" = parseval readish $ \v ->
|
| key == "scanComplete" = parseval readish $ \v ->
|
||||||
status { scanComplete = v }
|
status { scanComplete = v }
|
||||||
| key == "sanityCheckRunning" = parseval readish $ \v ->
|
| key == "sanityCheckRunning" = parseval readish $ \v ->
|
||||||
status { sanityCheckRunning = v }
|
status { sanityCheckRunning = v }
|
||||||
| key == "lastSanityCheck" = parseval parsePOSIXTime $ \v ->
|
| key == "lastSanityCheck" = parseval (parsePOSIXTime . encodeBS) $ \v ->
|
||||||
status { lastSanityCheck = Just v }
|
status { lastSanityCheck = Just v }
|
||||||
| otherwise = status -- unparsable line
|
| otherwise = status -- unparsable line
|
||||||
where
|
where
|
||||||
|
|
|
@ -17,6 +17,7 @@ import Utility.Shell
|
||||||
import Utility.Tmp
|
import Utility.Tmp
|
||||||
import Utility.Env
|
import Utility.Env
|
||||||
import Utility.SshConfig
|
import Utility.SshConfig
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
#ifdef darwin_HOST_OS
|
#ifdef darwin_HOST_OS
|
||||||
import Utility.OSX
|
import Utility.OSX
|
||||||
|
@ -28,6 +29,7 @@ import Utility.Android
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
import System.PosixCompat.Files (ownerExecuteMode)
|
import System.PosixCompat.Files (ownerExecuteMode)
|
||||||
|
import qualified Data.ByteString.Char8 as S8
|
||||||
|
|
||||||
standaloneAppBase :: IO (Maybe FilePath)
|
standaloneAppBase :: IO (Maybe FilePath)
|
||||||
standaloneAppBase = getEnv "GIT_ANNEX_APP_BASE"
|
standaloneAppBase = getEnv "GIT_ANNEX_APP_BASE"
|
||||||
|
@ -82,7 +84,7 @@ ensureInstalled = ifM (isJust <$> getEnv "GIT_ANNEX_PACKAGE_INSTALL")
|
||||||
let runshell var = "exec " ++ base </> "runshell " ++ var
|
let runshell var = "exec " ++ base </> "runshell " ++ var
|
||||||
let rungitannexshell var = runshell $ "git-annex-shell -c \"" ++ var ++ "\""
|
let rungitannexshell var = runshell $ "git-annex-shell -c \"" ++ var ++ "\""
|
||||||
|
|
||||||
installWrapper (sshdir </> "git-annex-shell") $ unlines
|
installWrapper (toRawFilePath (sshdir </> "git-annex-shell")) $
|
||||||
[ shebang
|
[ shebang
|
||||||
, "set -e"
|
, "set -e"
|
||||||
, "if [ \"x$SSH_ORIGINAL_COMMAND\" != \"x\" ]; then"
|
, "if [ \"x$SSH_ORIGINAL_COMMAND\" != \"x\" ]; then"
|
||||||
|
@ -91,7 +93,7 @@ ensureInstalled = ifM (isJust <$> getEnv "GIT_ANNEX_PACKAGE_INSTALL")
|
||||||
, rungitannexshell "$@"
|
, rungitannexshell "$@"
|
||||||
, "fi"
|
, "fi"
|
||||||
]
|
]
|
||||||
installWrapper (sshdir </> "git-annex-wrapper") $ unlines
|
installWrapper (toRawFilePath (sshdir </> "git-annex-wrapper")) $
|
||||||
[ shebang
|
[ shebang
|
||||||
, "set -e"
|
, "set -e"
|
||||||
, runshell "\"$@\""
|
, runshell "\"$@\""
|
||||||
|
@ -99,14 +101,15 @@ ensureInstalled = ifM (isJust <$> getEnv "GIT_ANNEX_PACKAGE_INSTALL")
|
||||||
|
|
||||||
installFileManagerHooks program
|
installFileManagerHooks program
|
||||||
|
|
||||||
installWrapper :: FilePath -> String -> IO ()
|
installWrapper :: RawFilePath -> [String] -> IO ()
|
||||||
installWrapper file content = do
|
installWrapper file content = do
|
||||||
curr <- catchDefaultIO "" $ readFileStrict file
|
let content' = map encodeBS content
|
||||||
when (curr /= content) $ do
|
curr <- catchDefaultIO [] $ fileLines' <$> F.readFile' (toOsPath file)
|
||||||
createDirectoryIfMissing True (fromRawFilePath (parentDir (toRawFilePath file)))
|
when (curr /= content') $ do
|
||||||
viaTmp writeFile file content
|
createDirectoryIfMissing True (fromRawFilePath (parentDir file))
|
||||||
modifyFileMode (toRawFilePath file) $
|
viaTmp F.writeFile' (toOsPath file) $
|
||||||
addModes [ownerExecuteMode]
|
linesFile' (S8.unlines content')
|
||||||
|
modifyFileMode file $ addModes [ownerExecuteMode]
|
||||||
|
|
||||||
installFileManagerHooks :: FilePath -> IO ()
|
installFileManagerHooks :: FilePath -> IO ()
|
||||||
#ifdef linux_HOST_OS
|
#ifdef linux_HOST_OS
|
||||||
|
@ -127,17 +130,18 @@ installFileManagerHooks program = unlessM osAndroid $ do
|
||||||
(kdeDesktopFile actions)
|
(kdeDesktopFile actions)
|
||||||
where
|
where
|
||||||
genNautilusScript scriptdir action =
|
genNautilusScript scriptdir action =
|
||||||
installscript (scriptdir </> scriptname action) $ unlines
|
installscript (toRawFilePath (scriptdir </> scriptname action)) $ unlines
|
||||||
[ shebang
|
[ shebang
|
||||||
, autoaddedcomment
|
, autoaddedcomment
|
||||||
, "exec " ++ program ++ " " ++ action ++ " --notify-start --notify-finish -- \"$@\""
|
, "exec " ++ program ++ " " ++ action ++ " --notify-start --notify-finish -- \"$@\""
|
||||||
]
|
]
|
||||||
scriptname action = "git-annex " ++ action
|
scriptname action = "git-annex " ++ action
|
||||||
installscript f c = whenM (safetoinstallscript f) $ do
|
installscript f c = whenM (safetoinstallscript f) $ do
|
||||||
writeFile f c
|
writeFile (fromRawFilePath f) c
|
||||||
modifyFileMode (toRawFilePath f) $ addModes [ownerExecuteMode]
|
modifyFileMode f $ addModes [ownerExecuteMode]
|
||||||
safetoinstallscript f = catchDefaultIO True $
|
safetoinstallscript f = catchDefaultIO True $
|
||||||
elem autoaddedcomment . lines <$> readFileStrict f
|
elem (encodeBS autoaddedcomment) . fileLines'
|
||||||
|
<$> F.readFile' (toOsPath f)
|
||||||
autoaddedcomment = "# " ++ autoaddedmsg ++ " (To disable, chmod 600 this file.)"
|
autoaddedcomment = "# " ++ autoaddedmsg ++ " (To disable, chmod 600 this file.)"
|
||||||
autoaddedmsg = "Automatically added by git-annex, do not edit."
|
autoaddedmsg = "Automatically added by git-annex, do not edit."
|
||||||
|
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
- Licensed under the GNU AGPL version 3 or higher.
|
- Licensed under the GNU AGPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
{-# LANGUAGE CPP #-}
|
{-# LANGUAGE CPP #-}
|
||||||
|
|
||||||
module Assistant.Repair where
|
module Assistant.Repair where
|
||||||
|
@ -33,6 +34,8 @@ import Utility.ThreadScheduler
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
|
||||||
import Control.Concurrent.Async
|
import Control.Concurrent.Async
|
||||||
|
import qualified Data.ByteString as S
|
||||||
|
import qualified System.FilePath.ByteString as P
|
||||||
|
|
||||||
{- When the FsckResults require a repair, tries to do a non-destructive
|
{- When the FsckResults require a repair, tries to do a non-destructive
|
||||||
- repair. If that fails, pops up an alert. -}
|
- repair. If that fails, pops up an alert. -}
|
||||||
|
@ -132,26 +135,26 @@ repairStaleGitLocks r = do
|
||||||
repairStaleLocks lockfiles
|
repairStaleLocks lockfiles
|
||||||
return $ not $ null lockfiles
|
return $ not $ null lockfiles
|
||||||
where
|
where
|
||||||
findgitfiles = dirContentsRecursiveSkipping (== dropTrailingPathSeparator (fromRawFilePath annexDir)) True . fromRawFilePath . Git.localGitDir
|
findgitfiles = dirContentsRecursiveSkipping (== P.dropTrailingPathSeparator annexDir) True . Git.localGitDir
|
||||||
islock f
|
islock f
|
||||||
| "gc.pid" `isInfixOf` f = False
|
| "gc.pid" `S.isInfixOf` f = False
|
||||||
| ".lock" `isSuffixOf` f = True
|
| ".lock" `S.isSuffixOf` f = True
|
||||||
| takeFileName f == "MERGE_HEAD" = True
|
| P.takeFileName f == "MERGE_HEAD" = True
|
||||||
| otherwise = False
|
| otherwise = False
|
||||||
|
|
||||||
repairStaleLocks :: [FilePath] -> Assistant ()
|
repairStaleLocks :: [RawFilePath] -> Assistant ()
|
||||||
repairStaleLocks lockfiles = go =<< getsizes
|
repairStaleLocks lockfiles = go =<< getsizes
|
||||||
where
|
where
|
||||||
getsize lf = catchMaybeIO $ (\s -> (lf, s))
|
getsize lf = catchMaybeIO $ (\s -> (lf, s))
|
||||||
<$> getFileSize (toRawFilePath lf)
|
<$> getFileSize lf
|
||||||
getsizes = liftIO $ catMaybes <$> mapM getsize lockfiles
|
getsizes = liftIO $ catMaybes <$> mapM getsize lockfiles
|
||||||
go [] = return ()
|
go [] = return ()
|
||||||
go l = ifM (liftIO $ null <$> Lsof.query ("--" : map fst l))
|
go l = ifM (liftIO $ null <$> Lsof.query ("--" : map (fromRawFilePath . fst) l))
|
||||||
( do
|
( do
|
||||||
waitforit "to check stale git lock file"
|
waitforit "to check stale git lock file"
|
||||||
l' <- getsizes
|
l' <- getsizes
|
||||||
if l' == l
|
if l' == l
|
||||||
then liftIO $ mapM_ (removeWhenExistsWith R.removeLink . toRawFilePath . fst) l
|
then liftIO $ mapM_ (removeWhenExistsWith R.removeLink . fst) l
|
||||||
else go l'
|
else go l'
|
||||||
, do
|
, do
|
||||||
waitforit "for git lock file writer"
|
waitforit "for git lock file writer"
|
||||||
|
|
|
@ -17,6 +17,7 @@ import Utility.SshConfig
|
||||||
import Git.Remote
|
import Git.Remote
|
||||||
import Utility.SshHost
|
import Utility.SshHost
|
||||||
import Utility.Process.Transcript
|
import Utility.Process.Transcript
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import Data.Text (Text)
|
import Data.Text (Text)
|
||||||
import qualified Data.Text as T
|
import qualified Data.Text as T
|
||||||
|
@ -158,8 +159,8 @@ removeAuthorizedKeys :: Bool -> FilePath -> SshPubKey -> IO ()
|
||||||
removeAuthorizedKeys gitannexshellonly dir pubkey = do
|
removeAuthorizedKeys gitannexshellonly dir pubkey = do
|
||||||
let keyline = authorizedKeysLine gitannexshellonly dir pubkey
|
let keyline = authorizedKeysLine gitannexshellonly dir pubkey
|
||||||
sshdir <- sshDir
|
sshdir <- sshDir
|
||||||
let keyfile = sshdir </> "authorized_keys"
|
let keyfile = toOsPath $ toRawFilePath $ sshdir </> "authorized_keys"
|
||||||
tryWhenExists (lines <$> readFileStrict keyfile) >>= \case
|
tryWhenExists (map decodeBS . fileLines' <$> F.readFile' keyfile) >>= \case
|
||||||
Just ls -> viaTmp writeSshConfig keyfile $
|
Just ls -> viaTmp writeSshConfig keyfile $
|
||||||
unlines $ filter (/= keyline) ls
|
unlines $ filter (/= keyline) ls
|
||||||
Nothing -> noop
|
Nothing -> noop
|
||||||
|
@ -212,7 +213,7 @@ authorizedKeysLine gitannexshellonly dir pubkey
|
||||||
|
|
||||||
{- Generates a ssh key pair. -}
|
{- Generates a ssh key pair. -}
|
||||||
genSshKeyPair :: IO SshKeyPair
|
genSshKeyPair :: IO SshKeyPair
|
||||||
genSshKeyPair = withTmpDir "git-annex-keygen" $ \dir -> do
|
genSshKeyPair = withTmpDir (toOsPath (toRawFilePath "git-annex-keygen")) $ \dir -> do
|
||||||
ok <- boolSystem "ssh-keygen"
|
ok <- boolSystem "ssh-keygen"
|
||||||
[ Param "-P", Param "" -- no password
|
[ Param "-P", Param "" -- no password
|
||||||
, Param "-f", File $ dir </> "key"
|
, Param "-f", File $ dir </> "key"
|
||||||
|
|
|
@ -47,7 +47,7 @@ transferPollerThread = namedThread "TransferPoller" $ do
|
||||||
| otherwise = do
|
| otherwise = do
|
||||||
let (f, _, _) = transferFileAndLockFile t g
|
let (f, _, _) = transferFileAndLockFile t g
|
||||||
mi <- liftIO $ catchDefaultIO Nothing $
|
mi <- liftIO $ catchDefaultIO Nothing $
|
||||||
readTransferInfoFile Nothing (fromRawFilePath f)
|
readTransferInfoFile Nothing f
|
||||||
maybe noop (newsize t info . bytesComplete) mi
|
maybe noop (newsize t info . bytesComplete) mi
|
||||||
|
|
||||||
newsize t info sz
|
newsize t info sz
|
||||||
|
|
|
@ -57,7 +57,7 @@ onErr = giveup
|
||||||
|
|
||||||
{- Called when a new transfer information file is written. -}
|
{- Called when a new transfer information file is written. -}
|
||||||
onAdd :: Handler
|
onAdd :: Handler
|
||||||
onAdd file = case parseTransferFile file of
|
onAdd file = case parseTransferFile (toRawFilePath file) of
|
||||||
Nothing -> noop
|
Nothing -> noop
|
||||||
Just t -> go t =<< liftAnnex (checkTransfer t)
|
Just t -> go t =<< liftAnnex (checkTransfer t)
|
||||||
where
|
where
|
||||||
|
@ -73,9 +73,9 @@ onAdd file = case parseTransferFile file of
|
||||||
- The only thing that should change in the transfer info is the
|
- The only thing that should change in the transfer info is the
|
||||||
- bytesComplete, so that's the only thing updated in the DaemonStatus. -}
|
- bytesComplete, so that's the only thing updated in the DaemonStatus. -}
|
||||||
onModify :: Handler
|
onModify :: Handler
|
||||||
onModify file = case parseTransferFile file of
|
onModify file = case parseTransferFile (toRawFilePath file) of
|
||||||
Nothing -> noop
|
Nothing -> noop
|
||||||
Just t -> go t =<< liftIO (readTransferInfoFile Nothing file)
|
Just t -> go t =<< liftIO (readTransferInfoFile Nothing (toRawFilePath file))
|
||||||
where
|
where
|
||||||
go _ Nothing = noop
|
go _ Nothing = noop
|
||||||
go t (Just newinfo) = alterTransferInfo t $
|
go t (Just newinfo) = alterTransferInfo t $
|
||||||
|
@ -88,7 +88,7 @@ watchesTransferSize = modifyTracked
|
||||||
|
|
||||||
{- Called when a transfer information file is removed. -}
|
{- Called when a transfer information file is removed. -}
|
||||||
onDel :: Handler
|
onDel :: Handler
|
||||||
onDel file = case parseTransferFile file of
|
onDel file = case parseTransferFile (toRawFilePath file) of
|
||||||
Nothing -> noop
|
Nothing -> noop
|
||||||
Just t -> do
|
Just t -> do
|
||||||
debug [ "transfer finishing:", show t]
|
debug [ "transfer finishing:", show t]
|
||||||
|
|
|
@ -289,7 +289,7 @@ onAddSymlink' linktarget mk file filestatus = go mk
|
||||||
if linktarget == Just link
|
if linktarget == Just link
|
||||||
then ensurestaged (Just link) =<< getDaemonStatus
|
then ensurestaged (Just link) =<< getDaemonStatus
|
||||||
else do
|
else do
|
||||||
liftAnnex $ replaceWorkTreeFile file $
|
liftAnnex $ replaceWorkTreeFile (toRawFilePath file) $
|
||||||
makeAnnexLink link
|
makeAnnexLink link
|
||||||
addLink file link (Just key)
|
addLink file link (Just key)
|
||||||
-- other symlink, not git-annex
|
-- other symlink, not git-annex
|
||||||
|
|
|
@ -89,9 +89,9 @@ webAppThread assistantdata urlrenderer noannex cannotrun postfirstrun listenhost
|
||||||
, return app
|
, return app
|
||||||
)
|
)
|
||||||
runWebApp tlssettings listenhost' listenport' app' $ \addr -> if noannex
|
runWebApp tlssettings listenhost' listenport' app' $ \addr -> if noannex
|
||||||
then withTmpFile "webapp.html" $ \tmpfile h -> do
|
then withTmpFile (toOsPath "webapp.html") $ \tmpfile h -> do
|
||||||
hClose h
|
hClose h
|
||||||
go tlssettings addr webapp tmpfile Nothing
|
go tlssettings addr webapp (fromRawFilePath (fromOsPath tmpfile)) Nothing
|
||||||
else do
|
else do
|
||||||
htmlshim <- getAnnex' $ fromRepo gitAnnexHtmlShim
|
htmlshim <- getAnnex' $ fromRepo gitAnnexHtmlShim
|
||||||
urlfile <- getAnnex' $ fromRepo gitAnnexUrlFile
|
urlfile <- getAnnex' $ fromRepo gitAnnexUrlFile
|
||||||
|
|
|
@ -41,9 +41,11 @@ import qualified Utility.Url as Url
|
||||||
import qualified Annex.Url as Url hiding (download)
|
import qualified Annex.Url as Url hiding (download)
|
||||||
import Utility.Tuple
|
import Utility.Tuple
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import Data.Either
|
import Data.Either
|
||||||
import qualified Data.Map as M
|
import qualified Data.Map as M
|
||||||
|
import qualified System.FilePath.ByteString as P
|
||||||
|
|
||||||
{- Upgrade without interaction in the webapp. -}
|
{- Upgrade without interaction in the webapp. -}
|
||||||
unattendedUpgrade :: Assistant ()
|
unattendedUpgrade :: Assistant ()
|
||||||
|
@ -163,7 +165,7 @@ upgradeToDistribution newdir cleanup distributionfile = do
|
||||||
{- OS X uses a dmg, so mount it, and copy the contents into place. -}
|
{- OS X uses a dmg, so mount it, and copy the contents into place. -}
|
||||||
unpack = liftIO $ do
|
unpack = liftIO $ do
|
||||||
olddir <- oldVersionLocation
|
olddir <- oldVersionLocation
|
||||||
withTmpDirIn (fromRawFilePath (parentDir (toRawFilePath newdir))) "git-annex.upgrade" $ \tmpdir -> do
|
withTmpDirIn (fromRawFilePath (parentDir (toRawFilePath newdir))) (toOsPath (toRawFilePath "git-annex.upgrade")) $ \tmpdir -> do
|
||||||
void $ boolSystem "hdiutil"
|
void $ boolSystem "hdiutil"
|
||||||
[ Param "attach", File distributionfile
|
[ Param "attach", File distributionfile
|
||||||
, Param "-mountpoint", File tmpdir
|
, Param "-mountpoint", File tmpdir
|
||||||
|
@ -188,7 +190,7 @@ upgradeToDistribution newdir cleanup distributionfile = do
|
||||||
- into place. -}
|
- into place. -}
|
||||||
unpack = liftIO $ do
|
unpack = liftIO $ do
|
||||||
olddir <- oldVersionLocation
|
olddir <- oldVersionLocation
|
||||||
withTmpDirIn (fromRawFilePath $ parentDir $ toRawFilePath newdir) "git-annex.upgrade" $ \tmpdir -> do
|
withTmpDirIn (fromRawFilePath $ parentDir $ toRawFilePath newdir) (toOsPath $ toRawFilePath "git-annex.upgrade") $ \tmpdir -> do
|
||||||
let tarball = tmpdir </> "tar"
|
let tarball = tmpdir </> "tar"
|
||||||
-- Cannot rely on filename extension, and this also
|
-- Cannot rely on filename extension, and this also
|
||||||
-- avoids problems if tar doesn't support transparent
|
-- avoids problems if tar doesn't support transparent
|
||||||
|
@ -212,8 +214,8 @@ upgradeToDistribution newdir cleanup distributionfile = do
|
||||||
makeorigsymlink olddir
|
makeorigsymlink olddir
|
||||||
return (newdir </> "git-annex", deleteold)
|
return (newdir </> "git-annex", deleteold)
|
||||||
installby a dstdir srcdir =
|
installby a dstdir srcdir =
|
||||||
mapM_ (\x -> a (toRawFilePath x) (toRawFilePath (dstdir </> takeFileName x)))
|
mapM_ (\x -> a x (toRawFilePath dstdir P.</> P.takeFileName x))
|
||||||
=<< dirContents srcdir
|
=<< dirContents (toRawFilePath srcdir)
|
||||||
#endif
|
#endif
|
||||||
sanitycheck dir =
|
sanitycheck dir =
|
||||||
unlessM (doesDirectoryExist dir) $
|
unlessM (doesDirectoryExist dir) $
|
||||||
|
@ -280,14 +282,14 @@ deleteFromManifest dir = do
|
||||||
fs <- map (dir </>) . lines <$> catchDefaultIO "" (readFile manifest)
|
fs <- map (dir </>) . lines <$> catchDefaultIO "" (readFile manifest)
|
||||||
mapM_ (removeWhenExistsWith R.removeLink . toRawFilePath) fs
|
mapM_ (removeWhenExistsWith R.removeLink . toRawFilePath) fs
|
||||||
removeWhenExistsWith R.removeLink (toRawFilePath manifest)
|
removeWhenExistsWith R.removeLink (toRawFilePath manifest)
|
||||||
removeEmptyRecursive dir
|
removeEmptyRecursive (toRawFilePath dir)
|
||||||
where
|
where
|
||||||
manifest = dir </> "git-annex.MANIFEST"
|
manifest = dir </> "git-annex.MANIFEST"
|
||||||
|
|
||||||
removeEmptyRecursive :: FilePath -> IO ()
|
removeEmptyRecursive :: RawFilePath -> IO ()
|
||||||
removeEmptyRecursive dir = do
|
removeEmptyRecursive dir = do
|
||||||
mapM_ removeEmptyRecursive =<< dirContents dir
|
mapM_ removeEmptyRecursive =<< dirContents dir
|
||||||
void $ tryIO $ removeDirectory dir
|
void $ tryIO $ removeDirectory (fromRawFilePath dir)
|
||||||
|
|
||||||
{- This is a file that the UpgradeWatcher can watch for modifications to
|
{- This is a file that the UpgradeWatcher can watch for modifications to
|
||||||
- detect when git-annex has been upgraded.
|
- detect when git-annex has been upgraded.
|
||||||
|
@ -322,13 +324,14 @@ downloadDistributionInfo :: Assistant (Maybe GitAnnexDistribution)
|
||||||
downloadDistributionInfo = do
|
downloadDistributionInfo = do
|
||||||
uo <- liftAnnex Url.getUrlOptions
|
uo <- liftAnnex Url.getUrlOptions
|
||||||
gpgcmd <- liftAnnex $ gpgCmd <$> Annex.getGitConfig
|
gpgcmd <- liftAnnex $ gpgCmd <$> Annex.getGitConfig
|
||||||
liftIO $ withTmpDir "git-annex.tmp" $ \tmpdir -> do
|
liftIO $ withTmpDir (toOsPath (toRawFilePath "git-annex.tmp")) $ \tmpdir -> do
|
||||||
let infof = tmpdir </> "info"
|
let infof = tmpdir </> "info"
|
||||||
let sigf = infof ++ ".sig"
|
let sigf = infof ++ ".sig"
|
||||||
ifM (isRight <$> Url.download nullMeterUpdate Nothing distributionInfoUrl infof uo
|
ifM (isRight <$> Url.download nullMeterUpdate Nothing distributionInfoUrl infof uo
|
||||||
<&&> (isRight <$> Url.download nullMeterUpdate Nothing distributionInfoSigUrl sigf uo)
|
<&&> (isRight <$> Url.download nullMeterUpdate Nothing distributionInfoSigUrl sigf uo)
|
||||||
<&&> verifyDistributionSig gpgcmd sigf)
|
<&&> verifyDistributionSig gpgcmd sigf)
|
||||||
( parseInfoFile <$> readFileStrict infof
|
( parseInfoFile . map decodeBS . fileLines'
|
||||||
|
<$> F.readFile' (toOsPath (toRawFilePath infof))
|
||||||
, return Nothing
|
, return Nothing
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -360,7 +363,7 @@ upgradeSupported = False
|
||||||
verifyDistributionSig :: GpgCmd -> FilePath -> IO Bool
|
verifyDistributionSig :: GpgCmd -> FilePath -> IO Bool
|
||||||
verifyDistributionSig gpgcmd sig = readProgramFile >>= \case
|
verifyDistributionSig gpgcmd sig = readProgramFile >>= \case
|
||||||
Just p | isAbsolute p ->
|
Just p | isAbsolute p ->
|
||||||
withUmask 0o0077 $ withTmpDir "git-annex-gpg.tmp" $ \gpgtmp -> do
|
withUmask 0o0077 $ withTmpDir (toOsPath (toRawFilePath "git-annex-gpg.tmp")) $ \gpgtmp -> do
|
||||||
let trustedkeys = takeDirectory p </> "trustedkeys.gpg"
|
let trustedkeys = takeDirectory p </> "trustedkeys.gpg"
|
||||||
boolGpgCmd gpgcmd
|
boolGpgCmd gpgcmd
|
||||||
[ Param "--no-default-keyring"
|
[ Param "--no-default-keyring"
|
||||||
|
|
|
@ -89,7 +89,7 @@ deleteCurrentRepository = dangerPage $ do
|
||||||
rs <- syncRemotes <$> getDaemonStatus
|
rs <- syncRemotes <$> getDaemonStatus
|
||||||
mapM_ (\r -> changeSyncable (Just r) False) rs
|
mapM_ (\r -> changeSyncable (Just r) False) rs
|
||||||
|
|
||||||
liftAnnex $ prepareRemoveAnnexDir dir
|
liftAnnex $ prepareRemoveAnnexDir (toRawFilePath dir)
|
||||||
liftIO $ removeDirectoryRecursive . fromRawFilePath
|
liftIO $ removeDirectoryRecursive . fromRawFilePath
|
||||||
=<< absPath (toRawFilePath dir)
|
=<< absPath (toRawFilePath dir)
|
||||||
|
|
||||||
|
|
|
@ -389,13 +389,13 @@ sshAuthTranscript sshinput opts sshhost cmd input = case inputAuthMethod sshinpu
|
||||||
v <- getCachedCred login
|
v <- getCachedCred login
|
||||||
liftIO $ case v of
|
liftIO $ case v of
|
||||||
Nothing -> go [passwordprompts 0] Nothing
|
Nothing -> go [passwordprompts 0] Nothing
|
||||||
Just pass -> withTmpFile "ssh" $ \passfile h -> do
|
Just pass -> withTmpFile (toOsPath "ssh") $ \passfile h -> do
|
||||||
hClose h
|
hClose h
|
||||||
writeFileProtected (toRawFilePath passfile) pass
|
writeFileProtected (fromOsPath passfile) pass
|
||||||
environ <- getEnvironment
|
environ <- getEnvironment
|
||||||
let environ' = addEntries
|
let environ' = addEntries
|
||||||
[ ("SSH_ASKPASS", program)
|
[ ("SSH_ASKPASS", program)
|
||||||
, (sshAskPassEnv, passfile)
|
, (sshAskPassEnv, fromRawFilePath $ fromOsPath passfile)
|
||||||
, ("DISPLAY", ":0")
|
, ("DISPLAY", ":0")
|
||||||
] environ
|
] environ
|
||||||
go [passwordprompts 1] (Just environ')
|
go [passwordprompts 1] (Just environ')
|
||||||
|
|
|
@ -29,12 +29,12 @@ import Data.Word
|
||||||
genKeyName :: String -> S.ShortByteString
|
genKeyName :: String -> S.ShortByteString
|
||||||
genKeyName s
|
genKeyName s
|
||||||
-- Avoid making keys longer than the length of a SHA256 checksum.
|
-- Avoid making keys longer than the length of a SHA256 checksum.
|
||||||
| bytelen > sha256len = S.toShort $ encodeBS $
|
| bytelen > sha256len = S.toShort $
|
||||||
truncateFilePath (sha256len - md5len - 1) s' ++ "-" ++
|
truncateFilePath (sha256len - md5len - 1) s'
|
||||||
show (md5 bl)
|
<> "-" <> encodeBS (show (md5 bl))
|
||||||
| otherwise = S.toShort $ encodeBS s'
|
| otherwise = S.toShort s'
|
||||||
where
|
where
|
||||||
s' = preSanitizeKeyName s
|
s' = encodeBS $ preSanitizeKeyName s
|
||||||
bl = encodeBL s
|
bl = encodeBL s
|
||||||
bytelen = fromIntegral $ L.length bl
|
bytelen = fromIntegral $ L.length bl
|
||||||
|
|
||||||
|
|
|
@ -26,11 +26,12 @@ import Utility.Path.AbsRel
|
||||||
import Utility.FileMode
|
import Utility.FileMode
|
||||||
import Utility.CopyFile
|
import Utility.CopyFile
|
||||||
import Utility.FileSystemEncoding
|
import Utility.FileSystemEncoding
|
||||||
|
import Utility.SystemDirectory
|
||||||
|
|
||||||
mklibs :: FilePath -> a -> IO Bool
|
mklibs :: FilePath -> a -> IO Bool
|
||||||
mklibs top _installedbins = do
|
mklibs top _installedbins = do
|
||||||
fs <- dirContentsRecursive top
|
fs <- dirContentsRecursive (toRawFilePath top)
|
||||||
exes <- filterM checkExe fs
|
exes <- filterM checkExe (map fromRawFilePath fs)
|
||||||
libs <- runLdd exes
|
libs <- runLdd exes
|
||||||
|
|
||||||
glibclibs <- glibcLibs
|
glibclibs <- glibcLibs
|
||||||
|
@ -80,7 +81,7 @@ consolidateUsrLib top libdirs = go [] libdirs
|
||||||
forM_ fs $ \f -> do
|
forM_ fs $ \f -> do
|
||||||
let src = inTop top (x </> f)
|
let src = inTop top (x </> f)
|
||||||
let dst = inTop top (d </> f)
|
let dst = inTop top (d </> f)
|
||||||
unless (dirCruft f) $
|
unless (dirCruft (toRawFilePath f)) $
|
||||||
unlessM (doesDirectoryExist src) $
|
unlessM (doesDirectoryExist src) $
|
||||||
renameFile src dst
|
renameFile src dst
|
||||||
symlinkHwCapDirs top d
|
symlinkHwCapDirs top d
|
||||||
|
|
|
@ -25,6 +25,7 @@ import Utility.Path.AbsRel
|
||||||
import Utility.Directory
|
import Utility.Directory
|
||||||
import Utility.Env
|
import Utility.Env
|
||||||
import Utility.FileSystemEncoding
|
import Utility.FileSystemEncoding
|
||||||
|
import Utility.SystemDirectory
|
||||||
import Build.BundledPrograms
|
import Build.BundledPrograms
|
||||||
#ifdef darwin_HOST_OS
|
#ifdef darwin_HOST_OS
|
||||||
import System.IO
|
import System.IO
|
||||||
|
@ -71,14 +72,15 @@ installGitLibs topdir = do
|
||||||
-- install git-core programs; these are run by the git command
|
-- install git-core programs; these are run by the git command
|
||||||
createDirectoryIfMissing True gitcoredestdir
|
createDirectoryIfMissing True gitcoredestdir
|
||||||
execpath <- getgitpath "exec-path"
|
execpath <- getgitpath "exec-path"
|
||||||
cfs <- dirContents execpath
|
cfs <- dirContents (toRawFilePath execpath)
|
||||||
forM_ cfs $ \f -> do
|
forM_ cfs $ \f -> do
|
||||||
|
let f' = fromRawFilePath f
|
||||||
destf <- ((gitcoredestdir </>) . fromRawFilePath)
|
destf <- ((gitcoredestdir </>) . fromRawFilePath)
|
||||||
<$> relPathDirToFile
|
<$> relPathDirToFile
|
||||||
(toRawFilePath execpath)
|
(toRawFilePath execpath)
|
||||||
(toRawFilePath f)
|
f
|
||||||
createDirectoryIfMissing True (takeDirectory destf)
|
createDirectoryIfMissing True (takeDirectory destf)
|
||||||
issymlink <- isSymbolicLink <$> getSymbolicLinkStatus f
|
issymlink <- isSymbolicLink <$> getSymbolicLinkStatus f'
|
||||||
if issymlink
|
if issymlink
|
||||||
then do
|
then do
|
||||||
-- many git-core files may symlink to eg
|
-- many git-core files may symlink to eg
|
||||||
|
@ -91,20 +93,20 @@ installGitLibs topdir = do
|
||||||
-- Other git-core files symlink to a file
|
-- Other git-core files symlink to a file
|
||||||
-- beside them in the directory. Those
|
-- beside them in the directory. Those
|
||||||
-- links can be copied as-is.
|
-- links can be copied as-is.
|
||||||
linktarget <- readSymbolicLink f
|
linktarget <- readSymbolicLink f'
|
||||||
if takeFileName linktarget == linktarget
|
if takeFileName linktarget == linktarget
|
||||||
then cp f destf
|
then cp f' destf
|
||||||
else do
|
else do
|
||||||
let linktarget' = progDir topdir </> takeFileName linktarget
|
let linktarget' = progDir topdir </> takeFileName linktarget
|
||||||
unlessM (doesFileExist linktarget') $ do
|
unlessM (doesFileExist linktarget') $ do
|
||||||
createDirectoryIfMissing True (takeDirectory linktarget')
|
createDirectoryIfMissing True (takeDirectory linktarget')
|
||||||
L.readFile f >>= L.writeFile linktarget'
|
L.readFile f' >>= L.writeFile linktarget'
|
||||||
removeWhenExistsWith removeLink destf
|
removeWhenExistsWith removeLink destf
|
||||||
rellinktarget <- relPathDirToFile
|
rellinktarget <- relPathDirToFile
|
||||||
(toRawFilePath (takeDirectory destf))
|
(toRawFilePath (takeDirectory destf))
|
||||||
(toRawFilePath linktarget')
|
(toRawFilePath linktarget')
|
||||||
createSymbolicLink (fromRawFilePath rellinktarget) destf
|
createSymbolicLink (fromRawFilePath rellinktarget) destf
|
||||||
else cp f destf
|
else cp f' destf
|
||||||
|
|
||||||
-- install git's template files
|
-- install git's template files
|
||||||
-- git does not have an option to get the path of these,
|
-- git does not have an option to get the path of these,
|
||||||
|
@ -112,14 +114,14 @@ installGitLibs topdir = do
|
||||||
-- next to the --man-path, in eg /usr/share/git-core
|
-- next to the --man-path, in eg /usr/share/git-core
|
||||||
manpath <- getgitpath "man-path"
|
manpath <- getgitpath "man-path"
|
||||||
let templatepath = manpath </> ".." </> "git-core" </> "templates"
|
let templatepath = manpath </> ".." </> "git-core" </> "templates"
|
||||||
tfs <- dirContents templatepath
|
tfs <- dirContents (toRawFilePath templatepath)
|
||||||
forM_ tfs $ \f -> do
|
forM_ tfs $ \f -> do
|
||||||
destf <- ((templatedestdir </>) . fromRawFilePath)
|
destf <- ((templatedestdir </>) . fromRawFilePath)
|
||||||
<$> relPathDirToFile
|
<$> relPathDirToFile
|
||||||
(toRawFilePath templatepath)
|
(toRawFilePath templatepath)
|
||||||
(toRawFilePath f)
|
f
|
||||||
createDirectoryIfMissing True (takeDirectory destf)
|
createDirectoryIfMissing True (takeDirectory destf)
|
||||||
cp f destf
|
cp (fromRawFilePath f) destf
|
||||||
where
|
where
|
||||||
gitcoredestdir = topdir </> "git-core"
|
gitcoredestdir = topdir </> "git-core"
|
||||||
templatedestdir = topdir </> "templates"
|
templatedestdir = topdir </> "templates"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{- Package version determination. -}
|
{- Package version determination. -}
|
||||||
|
|
||||||
{-# LANGUAGE LambdaCase #-}
|
{-# LANGUAGE LambdaCase, OverloadedStrings #-}
|
||||||
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
||||||
|
|
||||||
module Build.Version where
|
module Build.Version where
|
||||||
|
@ -14,7 +14,9 @@ import Prelude
|
||||||
|
|
||||||
import Utility.Monad
|
import Utility.Monad
|
||||||
import Utility.Exception
|
import Utility.Exception
|
||||||
import Utility.Misc
|
import Utility.OsPath
|
||||||
|
import Utility.FileSystemEncoding
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
type Version = String
|
type Version = String
|
||||||
|
|
||||||
|
@ -56,11 +58,11 @@ getChangelogVersion = do
|
||||||
middle = drop 1 . init
|
middle = drop 1 . init
|
||||||
|
|
||||||
writeVersion :: Version -> IO ()
|
writeVersion :: Version -> IO ()
|
||||||
writeVersion ver = catchMaybeIO (readFileStrict f) >>= \case
|
writeVersion ver = catchMaybeIO (F.readFile' f) >>= \case
|
||||||
Just s | s == body -> return ()
|
Just s | s == body -> return ()
|
||||||
_ -> writeFile f body
|
_ -> F.writeFile' f body
|
||||||
where
|
where
|
||||||
body = unlines $ concat
|
body = encodeBS $ unlines $ concat
|
||||||
[ header
|
[ header
|
||||||
, ["packageversion :: String"]
|
, ["packageversion :: String"]
|
||||||
, ["packageversion = \"" ++ ver ++ "\""]
|
, ["packageversion = \"" ++ ver ++ "\""]
|
||||||
|
@ -71,4 +73,4 @@ writeVersion ver = catchMaybeIO (readFileStrict f) >>= \case
|
||||||
, ""
|
, ""
|
||||||
]
|
]
|
||||||
footer = []
|
footer = []
|
||||||
f = "Build/Version"
|
f = toOsPath "Build/Version"
|
||||||
|
|
|
@ -3,6 +3,7 @@ git-annex (10.20250116) UNRELEASED; urgency=medium
|
||||||
* Support help.autocorrect settings "prompt", "never", and "immediate".
|
* Support help.autocorrect settings "prompt", "never", and "immediate".
|
||||||
* Allow setting remote.foo.annex-tracking-branch to a branch name
|
* Allow setting remote.foo.annex-tracking-branch to a branch name
|
||||||
that contains "/", as long as it's not a remote tracking branch.
|
that contains "/", as long as it's not a remote tracking branch.
|
||||||
|
* Added OsPath build flag, which speeds up git-annex's operations on files.
|
||||||
|
|
||||||
-- Joey Hess <id@joeyh.name> Mon, 20 Jan 2025 10:24:51 -0400
|
-- Joey Hess <id@joeyh.name> Mon, 20 Jan 2025 10:24:51 -0400
|
||||||
|
|
||||||
|
|
|
@ -57,6 +57,8 @@ import Utility.Tmp.Dir
|
||||||
import Utility.Env
|
import Utility.Env
|
||||||
import Utility.Metered
|
import Utility.Metered
|
||||||
import Utility.FileMode
|
import Utility.FileMode
|
||||||
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import Network.URI
|
import Network.URI
|
||||||
import Data.Either
|
import Data.Either
|
||||||
|
@ -65,7 +67,6 @@ import qualified Data.ByteString as B
|
||||||
import qualified Data.ByteString.Char8 as B8
|
import qualified Data.ByteString.Char8 as B8
|
||||||
import qualified Data.Map.Strict as M
|
import qualified Data.Map.Strict as M
|
||||||
import qualified System.FilePath.ByteString as P
|
import qualified System.FilePath.ByteString as P
|
||||||
import qualified Utility.RawFilePath as R
|
|
||||||
import qualified Data.Set as S
|
import qualified Data.Set as S
|
||||||
|
|
||||||
run :: [String] -> IO ()
|
run :: [String] -> IO ()
|
||||||
|
@ -495,13 +496,16 @@ resolveSpecialRemoteWebUrl :: String -> Annex (Maybe String)
|
||||||
resolveSpecialRemoteWebUrl url
|
resolveSpecialRemoteWebUrl url
|
||||||
| "http://" `isPrefixOf` lcurl || "https://" `isPrefixOf` lcurl =
|
| "http://" `isPrefixOf` lcurl || "https://" `isPrefixOf` lcurl =
|
||||||
Url.withUrlOptionsPromptingCreds $ \uo ->
|
Url.withUrlOptionsPromptingCreds $ \uo ->
|
||||||
withTmpFile "git-remote-annex" $ \tmp h -> do
|
withTmpFile (toOsPath "git-remote-annex") $ \tmp h -> do
|
||||||
liftIO $ hClose h
|
liftIO $ hClose h
|
||||||
Url.download' nullMeterUpdate Nothing url tmp uo >>= \case
|
let tmp' = fromRawFilePath $ fromOsPath tmp
|
||||||
|
Url.download' nullMeterUpdate Nothing url tmp' uo >>= \case
|
||||||
Left err -> giveup $ url ++ " " ++ err
|
Left err -> giveup $ url ++ " " ++ err
|
||||||
Right () -> liftIO $
|
Right () -> liftIO $
|
||||||
(headMaybe . lines)
|
fmap decodeBS
|
||||||
<$> readFileStrict tmp
|
. headMaybe
|
||||||
|
. fileLines'
|
||||||
|
<$> F.readFile' tmp
|
||||||
| otherwise = return Nothing
|
| otherwise = return Nothing
|
||||||
where
|
where
|
||||||
lcurl = map toLower url
|
lcurl = map toLower url
|
||||||
|
@ -724,10 +728,10 @@ downloadManifest rmt = get mkmain >>= maybe (get mkbak) (pure . Just)
|
||||||
-- it needs to re-download it fresh every time, and the object
|
-- it needs to re-download it fresh every time, and the object
|
||||||
-- file should not be stored locally.
|
-- file should not be stored locally.
|
||||||
gettotmp dl = withOtherTmp $ \othertmp ->
|
gettotmp dl = withOtherTmp $ \othertmp ->
|
||||||
withTmpFileIn (fromRawFilePath othertmp) "GITMANIFEST" $ \tmp tmph -> do
|
withTmpFileIn (toOsPath othertmp) (toOsPath "GITMANIFEST") $ \tmp tmph -> do
|
||||||
liftIO $ hClose tmph
|
liftIO $ hClose tmph
|
||||||
_ <- dl tmp
|
_ <- dl (fromRawFilePath (fromOsPath tmp))
|
||||||
b <- liftIO (B.readFile tmp)
|
b <- liftIO (F.readFile' tmp)
|
||||||
case parseManifest b of
|
case parseManifest b of
|
||||||
Right m -> Just <$> verifyManifest rmt m
|
Right m -> Just <$> verifyManifest rmt m
|
||||||
Left err -> giveup err
|
Left err -> giveup err
|
||||||
|
@ -774,7 +778,7 @@ uploadManifest rmt manifest = do
|
||||||
dropKey' rmt mk
|
dropKey' rmt mk
|
||||||
put mk
|
put mk
|
||||||
|
|
||||||
put mk = withTmpFile "GITMANIFEST" $ \tmp tmph -> do
|
put mk = withTmpFile (toOsPath "GITMANIFEST") $ \tmp tmph -> do
|
||||||
liftIO $ B8.hPut tmph (formatManifest manifest)
|
liftIO $ B8.hPut tmph (formatManifest manifest)
|
||||||
liftIO $ hClose tmph
|
liftIO $ hClose tmph
|
||||||
-- Uploading needs the key to be in the annex objects
|
-- Uploading needs the key to be in the annex objects
|
||||||
|
@ -785,7 +789,7 @@ uploadManifest rmt manifest = do
|
||||||
-- keys, which it is not.
|
-- keys, which it is not.
|
||||||
objfile <- calcRepo (gitAnnexLocation mk)
|
objfile <- calcRepo (gitAnnexLocation mk)
|
||||||
modifyContentDir objfile $
|
modifyContentDir objfile $
|
||||||
linkOrCopy mk (toRawFilePath tmp) objfile Nothing >>= \case
|
linkOrCopy mk (fromOsPath tmp) objfile Nothing >>= \case
|
||||||
-- Important to set the right perms even
|
-- Important to set the right perms even
|
||||||
-- though the object is only present
|
-- though the object is only present
|
||||||
-- briefly, since sending objects may rely
|
-- briefly, since sending objects may rely
|
||||||
|
@ -857,7 +861,7 @@ startPush' rmt manifest = do
|
||||||
f <- fromRepo (lastPushedManifestFile (Remote.uuid rmt))
|
f <- fromRepo (lastPushedManifestFile (Remote.uuid rmt))
|
||||||
oldmanifest <- liftIO $
|
oldmanifest <- liftIO $
|
||||||
fromRight mempty . parseManifest
|
fromRight mempty . parseManifest
|
||||||
<$> B.readFile (fromRawFilePath f)
|
<$> F.readFile' (toOsPath f)
|
||||||
`catchNonAsync` (const (pure mempty))
|
`catchNonAsync` (const (pure mempty))
|
||||||
let oldmanifest' = mkManifest [] $
|
let oldmanifest' = mkManifest [] $
|
||||||
S.fromList (inManifest oldmanifest)
|
S.fromList (inManifest oldmanifest)
|
||||||
|
@ -973,14 +977,15 @@ generateGitBundle
|
||||||
-> Manifest
|
-> Manifest
|
||||||
-> Annex (Key, Annex ())
|
-> Annex (Key, Annex ())
|
||||||
generateGitBundle rmt bs manifest =
|
generateGitBundle rmt bs manifest =
|
||||||
withTmpFile "GITBUNDLE" $ \tmp tmph -> do
|
withTmpFile (toOsPath "GITBUNDLE") $ \tmp tmph -> do
|
||||||
|
let tmp' = fromOsPath tmp
|
||||||
liftIO $ hClose tmph
|
liftIO $ hClose tmph
|
||||||
inRepo $ Git.Bundle.create tmp bs
|
inRepo $ Git.Bundle.create (fromRawFilePath tmp') bs
|
||||||
bundlekey <- genGitBundleKey (Remote.uuid rmt)
|
bundlekey <- genGitBundleKey (Remote.uuid rmt)
|
||||||
(toRawFilePath tmp) nullMeterUpdate
|
tmp' nullMeterUpdate
|
||||||
if (bundlekey `notElem` inManifest manifest)
|
if (bundlekey `notElem` inManifest manifest)
|
||||||
then do
|
then do
|
||||||
unlessM (moveAnnex bundlekey (AssociatedFile Nothing) (toRawFilePath tmp)) $
|
unlessM (moveAnnex bundlekey (AssociatedFile Nothing) tmp') $
|
||||||
giveup "Unable to push"
|
giveup "Unable to push"
|
||||||
return (bundlekey, uploadaction bundlekey)
|
return (bundlekey, uploadaction bundlekey)
|
||||||
else return (bundlekey, noop)
|
else return (bundlekey, noop)
|
||||||
|
@ -1122,7 +1127,7 @@ startAnnexBranch = ifM (null <$> Annex.Branch.siblingBranches)
|
||||||
-- journal writes to a temporary directory, so that all writes
|
-- journal writes to a temporary directory, so that all writes
|
||||||
-- to the git-annex branch by the action will be discarded.
|
-- to the git-annex branch by the action will be discarded.
|
||||||
specialRemoteFromUrl :: StartAnnexBranch -> Annex a -> Annex a
|
specialRemoteFromUrl :: StartAnnexBranch -> Annex a -> Annex a
|
||||||
specialRemoteFromUrl sab a = withTmpDir "journal" $ \tmpdir -> do
|
specialRemoteFromUrl sab a = withTmpDir (toOsPath "journal") $ \tmpdir -> do
|
||||||
Annex.overrideGitConfig $ \c ->
|
Annex.overrideGitConfig $ \c ->
|
||||||
c { annexAlwaysCommit = False }
|
c { annexAlwaysCommit = False }
|
||||||
Annex.BranchState.changeState $ \st ->
|
Annex.BranchState.changeState $ \st ->
|
||||||
|
@ -1162,7 +1167,8 @@ specialRemoteFromUrl sab a = withTmpDir "journal" $ \tmpdir -> do
|
||||||
-- objects are deleted.
|
-- objects are deleted.
|
||||||
cleanupInitialization :: StartAnnexBranch -> FilePath -> Annex ()
|
cleanupInitialization :: StartAnnexBranch -> FilePath -> Annex ()
|
||||||
cleanupInitialization sab alternatejournaldir = void $ tryNonAsync $ do
|
cleanupInitialization sab alternatejournaldir = void $ tryNonAsync $ do
|
||||||
liftIO $ mapM_ removeFile =<< dirContents alternatejournaldir
|
liftIO $ mapM_ R.removeLink
|
||||||
|
=<< dirContents (toRawFilePath alternatejournaldir)
|
||||||
case sab of
|
case sab of
|
||||||
AnnexBranchExistedAlready _ -> noop
|
AnnexBranchExistedAlready _ -> noop
|
||||||
AnnexBranchCreatedEmpty r ->
|
AnnexBranchCreatedEmpty r ->
|
||||||
|
|
|
@ -56,6 +56,7 @@ import Data.IORef
|
||||||
import Data.Time.Clock.POSIX
|
import Data.Time.Clock.POSIX
|
||||||
import System.PosixCompat.Files (isDirectory, isSymbolicLink, deviceID, fileID)
|
import System.PosixCompat.Files (isDirectory, isSymbolicLink, deviceID, fileID)
|
||||||
import qualified System.FilePath.ByteString as P
|
import qualified System.FilePath.ByteString as P
|
||||||
|
import qualified Data.ByteString as S
|
||||||
|
|
||||||
data AnnexedFileSeeker = AnnexedFileSeeker
|
data AnnexedFileSeeker = AnnexedFileSeeker
|
||||||
{ startAction :: Maybe KeySha -> SeekInput -> RawFilePath -> Key -> CommandStart
|
{ startAction :: Maybe KeySha -> SeekInput -> RawFilePath -> Key -> CommandStart
|
||||||
|
@ -122,9 +123,8 @@ withPathContents a params = do
|
||||||
-- exist.
|
-- exist.
|
||||||
get p = ifM (isDirectory <$> R.getFileStatus p')
|
get p = ifM (isDirectory <$> R.getFileStatus p')
|
||||||
( map (\f ->
|
( map (\f ->
|
||||||
let f' = toRawFilePath f
|
(f, P.makeRelative (P.takeDirectory (P.dropTrailingPathSeparator p')) f))
|
||||||
in (f', P.makeRelative (P.takeDirectory (P.dropTrailingPathSeparator p')) f'))
|
<$> dirContentsRecursiveSkipping (".git" `S.isSuffixOf`) False p'
|
||||||
<$> dirContentsRecursiveSkipping (".git" `isSuffixOf`) False p
|
|
||||||
, return [(p', P.takeFileName p')]
|
, return [(p', P.takeFileName p')]
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
|
|
|
@ -200,12 +200,12 @@ checkUrl addunlockedmatcher r o si u = do
|
||||||
startRemote :: AddUnlockedMatcher -> Remote -> AddUrlOptions -> SeekInput -> FilePath -> URLString -> Maybe Integer -> CommandStart
|
startRemote :: AddUnlockedMatcher -> Remote -> AddUrlOptions -> SeekInput -> FilePath -> URLString -> Maybe Integer -> CommandStart
|
||||||
startRemote addunlockedmatcher r o si file uri sz = do
|
startRemote addunlockedmatcher r o si file uri sz = do
|
||||||
pathmax <- liftIO $ fileNameLengthLimit "."
|
pathmax <- liftIO $ fileNameLengthLimit "."
|
||||||
let file' = joinPath $ map (truncateFilePath pathmax) $
|
let file' = P.joinPath $ map (truncateFilePath pathmax) $
|
||||||
splitDirectories file
|
P.splitDirectories (toRawFilePath file)
|
||||||
startingAddUrl si uri o $ do
|
startingAddUrl si uri o $ do
|
||||||
showNote $ UnquotedString $ "from " ++ Remote.name r
|
showNote $ UnquotedString $ "from " ++ Remote.name r
|
||||||
showDestinationFile (toRawFilePath file')
|
showDestinationFile file'
|
||||||
performRemote addunlockedmatcher r o uri (toRawFilePath file') sz
|
performRemote addunlockedmatcher r o uri file' sz
|
||||||
|
|
||||||
performRemote :: AddUnlockedMatcher -> Remote -> AddUrlOptions -> URLString -> RawFilePath -> Maybe Integer -> CommandPerform
|
performRemote :: AddUnlockedMatcher -> Remote -> AddUrlOptions -> URLString -> RawFilePath -> Maybe Integer -> CommandPerform
|
||||||
performRemote addunlockedmatcher r o uri file sz = lookupKey file >>= \case
|
performRemote addunlockedmatcher r o uri file sz = lookupKey file >>= \case
|
||||||
|
@ -279,7 +279,8 @@ sanitizeOrPreserveFilePath o f
|
||||||
return f
|
return f
|
||||||
| otherwise = do
|
| otherwise = do
|
||||||
pathmax <- liftIO $ fileNameLengthLimit "."
|
pathmax <- liftIO $ fileNameLengthLimit "."
|
||||||
return $ truncateFilePath pathmax $ sanitizeFilePath f
|
return $ fromRawFilePath $ truncateFilePath pathmax $
|
||||||
|
toRawFilePath $ sanitizeFilePath f
|
||||||
|
|
||||||
-- sanitizeFilePath avoids all these security problems
|
-- sanitizeFilePath avoids all these security problems
|
||||||
-- (and probably others, but at least this catches the most egrarious ones).
|
-- (and probably others, but at least this catches the most egrarious ones).
|
||||||
|
@ -353,7 +354,7 @@ downloadWeb addunlockedmatcher o url urlinfo file =
|
||||||
urlkey = addSizeUrlKey urlinfo $ Backend.URL.fromUrl url Nothing (verifiableOption o)
|
urlkey = addSizeUrlKey urlinfo $ Backend.URL.fromUrl url Nothing (verifiableOption o)
|
||||||
downloader f p = Url.withUrlOptions $ downloadUrl False urlkey p Nothing [url] f
|
downloader f p = Url.withUrlOptions $ downloadUrl False urlkey p Nothing [url] f
|
||||||
go Nothing = return Nothing
|
go Nothing = return Nothing
|
||||||
go (Just (tmp, backend)) = ifM (useYoutubeDl o <&&> liftIO (isHtmlFile (fromRawFilePath tmp)))
|
go (Just (tmp, backend)) = ifM (useYoutubeDl o <&&> liftIO (isHtmlFile tmp))
|
||||||
( tryyoutubedl tmp backend
|
( tryyoutubedl tmp backend
|
||||||
, normalfinish tmp backend
|
, normalfinish tmp backend
|
||||||
)
|
)
|
||||||
|
@ -567,7 +568,7 @@ nodownloadWeb' o addunlockedmatcher url key file = checkCanAdd o file $ \canadd
|
||||||
|
|
||||||
url2file :: URI -> Maybe Int -> Int -> FilePath
|
url2file :: URI -> Maybe Int -> Int -> FilePath
|
||||||
url2file url pathdepth pathmax = case pathdepth of
|
url2file url pathdepth pathmax = case pathdepth of
|
||||||
Nothing -> truncateFilePath pathmax $ sanitizeFilePath fullurl
|
Nothing -> truncatesanitize fullurl
|
||||||
Just depth
|
Just depth
|
||||||
| depth >= length urlbits -> frombits id
|
| depth >= length urlbits -> frombits id
|
||||||
| depth > 0 -> frombits $ drop depth
|
| depth > 0 -> frombits $ drop depth
|
||||||
|
@ -580,8 +581,12 @@ url2file url pathdepth pathmax = case pathdepth of
|
||||||
, uriQuery url
|
, uriQuery url
|
||||||
]
|
]
|
||||||
frombits a = intercalate "/" $ a urlbits
|
frombits a = intercalate "/" $ a urlbits
|
||||||
urlbits = map (truncateFilePath pathmax . sanitizeFilePath) $
|
urlbits = map truncatesanitize $
|
||||||
filter (not . null) $ splitc '/' fullurl
|
filter (not . null) $ splitc '/' fullurl
|
||||||
|
truncatesanitize = fromRawFilePath
|
||||||
|
. truncateFilePath pathmax
|
||||||
|
. toRawFilePath
|
||||||
|
. sanitizeFilePath
|
||||||
|
|
||||||
urlString2file :: URLString -> Maybe Int -> Int -> FilePath
|
urlString2file :: URLString -> Maybe Int -> Int -> FilePath
|
||||||
urlString2file s pathdepth pathmax = case Url.parseURIRelaxed s of
|
urlString2file s pathdepth pathmax = case Url.parseURIRelaxed s of
|
||||||
|
|
|
@ -312,12 +312,12 @@ performExport r srcrs db ek af contentsha loc allfilledvar = do
|
||||||
sent <- tryNonAsync $ if not (isGitShaKey ek)
|
sent <- tryNonAsync $ if not (isGitShaKey ek)
|
||||||
then tryrenameannexobject $ sendannexobject
|
then tryrenameannexobject $ sendannexobject
|
||||||
-- Sending a non-annexed file.
|
-- Sending a non-annexed file.
|
||||||
else withTmpFile "export" $ \tmp h -> do
|
else withTmpFile (toOsPath "export") $ \tmp h -> do
|
||||||
b <- catObject contentsha
|
b <- catObject contentsha
|
||||||
liftIO $ L.hPut h b
|
liftIO $ L.hPut h b
|
||||||
liftIO $ hClose h
|
liftIO $ hClose h
|
||||||
Remote.action $
|
Remote.action $
|
||||||
storer tmp ek loc nullMeterUpdate
|
storer (fromRawFilePath (fromOsPath tmp)) ek loc nullMeterUpdate
|
||||||
let failedsend = liftIO $ modifyMVar_ allfilledvar (pure . const (AllFilled False))
|
let failedsend = liftIO $ modifyMVar_ allfilledvar (pure . const (AllFilled False))
|
||||||
case sent of
|
case sent of
|
||||||
Right True -> next $ cleanupExport r db ek loc True
|
Right True -> next $ cleanupExport r db ek loc True
|
||||||
|
|
|
@ -72,7 +72,7 @@ start fixwhat si file key = do
|
||||||
|
|
||||||
breakHardLink :: RawFilePath -> Key -> RawFilePath -> CommandPerform
|
breakHardLink :: RawFilePath -> Key -> RawFilePath -> CommandPerform
|
||||||
breakHardLink file key obj = do
|
breakHardLink file key obj = do
|
||||||
replaceWorkTreeFile (fromRawFilePath file) $ \tmp -> do
|
replaceWorkTreeFile file $ \tmp -> do
|
||||||
mode <- liftIO $ catchMaybeIO $ fileMode <$> R.getFileStatus file
|
mode <- liftIO $ catchMaybeIO $ fileMode <$> R.getFileStatus file
|
||||||
unlessM (checkedCopyFile key obj tmp mode) $
|
unlessM (checkedCopyFile key obj tmp mode) $
|
||||||
giveup "unable to break hard link"
|
giveup "unable to break hard link"
|
||||||
|
@ -83,7 +83,7 @@ breakHardLink file key obj = do
|
||||||
|
|
||||||
makeHardLink :: RawFilePath -> Key -> CommandPerform
|
makeHardLink :: RawFilePath -> Key -> CommandPerform
|
||||||
makeHardLink file key = do
|
makeHardLink file key = do
|
||||||
replaceWorkTreeFile (fromRawFilePath file) $ \tmp -> do
|
replaceWorkTreeFile file $ \tmp -> do
|
||||||
mode <- liftIO $ catchMaybeIO $ fileMode <$> R.getFileStatus file
|
mode <- liftIO $ catchMaybeIO $ fileMode <$> R.getFileStatus file
|
||||||
linkFromAnnex' key tmp mode >>= \case
|
linkFromAnnex' key tmp mode >>= \case
|
||||||
LinkAnnexFailed -> giveup "unable to make hard link"
|
LinkAnnexFailed -> giveup "unable to make hard link"
|
||||||
|
@ -97,7 +97,7 @@ fixSymlink file link = do
|
||||||
mtime <- liftIO $ catchMaybeIO $ Posix.modificationTimeHiRes
|
mtime <- liftIO $ catchMaybeIO $ Posix.modificationTimeHiRes
|
||||||
<$> R.getSymbolicLinkStatus file
|
<$> R.getSymbolicLinkStatus file
|
||||||
#endif
|
#endif
|
||||||
replaceWorkTreeFile (fromRawFilePath file) $ \tmpfile -> do
|
replaceWorkTreeFile file $ \tmpfile -> do
|
||||||
liftIO $ R.createSymbolicLink link tmpfile
|
liftIO $ R.createSymbolicLink link tmpfile
|
||||||
#if ! defined(mingw32_HOST_OS)
|
#if ! defined(mingw32_HOST_OS)
|
||||||
liftIO $ maybe noop (\t -> touch tmpfile t False) mtime
|
liftIO $ maybe noop (\t -> touch tmpfile t False) mtime
|
||||||
|
|
|
@ -45,6 +45,7 @@ import qualified Database.Fsck as FsckDb
|
||||||
import Types.CleanupActions
|
import Types.CleanupActions
|
||||||
import Types.Key
|
import Types.Key
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import Data.Time.Clock.POSIX
|
import Data.Time.Clock.POSIX
|
||||||
import System.Posix.Types (EpochTime)
|
import System.Posix.Types (EpochTime)
|
||||||
|
@ -417,7 +418,7 @@ verifyWorkTree key file = do
|
||||||
case mk of
|
case mk of
|
||||||
Just k | k == key -> whenM (inAnnex key) $ do
|
Just k | k == key -> whenM (inAnnex key) $ do
|
||||||
showNote "fixing worktree content"
|
showNote "fixing worktree content"
|
||||||
replaceWorkTreeFile (fromRawFilePath file) $ \tmp -> do
|
replaceWorkTreeFile file $ \tmp -> do
|
||||||
mode <- liftIO $ catchMaybeIO $ fileMode <$> R.getFileStatus file
|
mode <- liftIO $ catchMaybeIO $ fileMode <$> R.getFileStatus file
|
||||||
ifM (annexThin <$> Annex.getGitConfig)
|
ifM (annexThin <$> Annex.getGitConfig)
|
||||||
( void $ linkFromAnnex' key tmp mode
|
( void $ linkFromAnnex' key tmp mode
|
||||||
|
@ -678,7 +679,7 @@ recordStartTime u = do
|
||||||
f <- fromRepo (gitAnnexFsckState u)
|
f <- fromRepo (gitAnnexFsckState u)
|
||||||
createAnnexDirectory $ parentDir f
|
createAnnexDirectory $ parentDir f
|
||||||
liftIO $ removeWhenExistsWith R.removeLink f
|
liftIO $ removeWhenExistsWith R.removeLink f
|
||||||
liftIO $ withFile (fromRawFilePath f) WriteMode $ \h -> do
|
liftIO $ F.withFile (toOsPath f) WriteMode $ \h -> do
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
t <- modificationTime <$> R.getFileStatus f
|
t <- modificationTime <$> R.getFileStatus f
|
||||||
#else
|
#else
|
||||||
|
@ -701,7 +702,7 @@ getStartTime u = do
|
||||||
liftIO $ catchDefaultIO Nothing $ do
|
liftIO $ catchDefaultIO Nothing $ do
|
||||||
timestamp <- modificationTime <$> R.getFileStatus f
|
timestamp <- modificationTime <$> R.getFileStatus f
|
||||||
let fromstatus = Just (realToFrac timestamp)
|
let fromstatus = Just (realToFrac timestamp)
|
||||||
fromfile <- parsePOSIXTime <$> readFile (fromRawFilePath f)
|
fromfile <- parsePOSIXTime <$> F.readFile' (toOsPath f)
|
||||||
return $ if matchingtimestamp fromfile fromstatus
|
return $ if matchingtimestamp fromfile fromstatus
|
||||||
then Just timestamp
|
then Just timestamp
|
||||||
else Nothing
|
else Nothing
|
||||||
|
|
|
@ -158,10 +158,11 @@ getFeed o url st =
|
||||||
| scrapeOption o = scrape
|
| scrapeOption o = scrape
|
||||||
| otherwise = get
|
| otherwise = get
|
||||||
|
|
||||||
get = withTmpFile "feed" $ \tmpf h -> do
|
get = withTmpFile (toOsPath "feed") $ \tmpf h -> do
|
||||||
|
let tmpf' = fromRawFilePath $ fromOsPath tmpf
|
||||||
liftIO $ hClose h
|
liftIO $ hClose h
|
||||||
ifM (downloadFeed url tmpf)
|
ifM (downloadFeed url tmpf')
|
||||||
( parse tmpf
|
( parse tmpf'
|
||||||
, do
|
, do
|
||||||
recordfail
|
recordfail
|
||||||
next $ feedProblem url
|
next $ feedProblem url
|
||||||
|
|
|
@ -78,7 +78,7 @@ perform file key = do
|
||||||
breakhardlink obj = whenM (catchBoolIO $ (> 1) . linkCount <$> liftIO (R.getFileStatus obj)) $ do
|
breakhardlink obj = whenM (catchBoolIO $ (> 1) . linkCount <$> liftIO (R.getFileStatus obj)) $ do
|
||||||
mfc <- withTSDelta (liftIO . genInodeCache file)
|
mfc <- withTSDelta (liftIO . genInodeCache file)
|
||||||
unlessM (sameInodeCache obj (maybeToList mfc)) $ do
|
unlessM (sameInodeCache obj (maybeToList mfc)) $ do
|
||||||
modifyContentDir obj $ replaceGitAnnexDirFile (fromRawFilePath obj) $ \tmp -> do
|
modifyContentDir obj $ replaceGitAnnexDirFile obj $ \tmp -> do
|
||||||
unlessM (checkedCopyFile key obj tmp Nothing) $
|
unlessM (checkedCopyFile key obj tmp Nothing) $
|
||||||
giveup "unable to lock file"
|
giveup "unable to lock file"
|
||||||
Database.Keys.storeInodeCaches key [obj]
|
Database.Keys.storeInodeCaches key [obj]
|
||||||
|
|
|
@ -130,7 +130,7 @@ send ups fs = do
|
||||||
-- the names of keys, and would have to be copied, which is too
|
-- the names of keys, and would have to be copied, which is too
|
||||||
-- expensive.
|
-- expensive.
|
||||||
starting "sending files" (ActionItemOther Nothing) (SeekInput []) $
|
starting "sending files" (ActionItemOther Nothing) (SeekInput []) $
|
||||||
withTmpFile "send" $ \t h -> do
|
withTmpFile (toOsPath "send") $ \t h -> do
|
||||||
let ww = WarnUnmatchLsFiles "multicast"
|
let ww = WarnUnmatchLsFiles "multicast"
|
||||||
(fs', cleanup) <- seekHelper id ww LsFiles.inRepo
|
(fs', cleanup) <- seekHelper id ww LsFiles.inRepo
|
||||||
=<< workTreeItems ww fs
|
=<< workTreeItems ww fs
|
||||||
|
@ -163,7 +163,7 @@ send ups fs = do
|
||||||
-- only allow clients on the authlist
|
-- only allow clients on the authlist
|
||||||
, Param "-H", Param ("@"++authlist)
|
, Param "-H", Param ("@"++authlist)
|
||||||
-- pass in list of files to send
|
-- pass in list of files to send
|
||||||
, Param "-i", File t
|
, Param "-i", File (fromRawFilePath (fromOsPath t))
|
||||||
] ++ ups
|
] ++ ups
|
||||||
liftIO (boolSystem "uftp" ps) >>= showEndResult
|
liftIO (boolSystem "uftp" ps) >>= showEndResult
|
||||||
next $ return True
|
next $ return True
|
||||||
|
@ -178,7 +178,7 @@ receive ups = starting "receiving multicast files" ai si $ do
|
||||||
(callback, environ, statush) <- liftIO multicastCallbackEnv
|
(callback, environ, statush) <- liftIO multicastCallbackEnv
|
||||||
tmpobjdir <- fromRepo gitAnnexTmpObjectDir
|
tmpobjdir <- fromRepo gitAnnexTmpObjectDir
|
||||||
createAnnexDirectory tmpobjdir
|
createAnnexDirectory tmpobjdir
|
||||||
withTmpDirIn (fromRawFilePath tmpobjdir) "multicast" $ \tmpdir -> withAuthList $ \authlist -> do
|
withTmpDirIn (fromRawFilePath tmpobjdir) (toOsPath "multicast") $ \tmpdir -> withAuthList $ \authlist -> do
|
||||||
abstmpdir <- liftIO $ absPath (toRawFilePath tmpdir)
|
abstmpdir <- liftIO $ absPath (toRawFilePath tmpdir)
|
||||||
abscallback <- liftIO $ searchPath callback
|
abscallback <- liftIO $ searchPath callback
|
||||||
let ps =
|
let ps =
|
||||||
|
@ -245,10 +245,10 @@ uftpUID u = "0x" ++ (take 8 $ show $ sha2_256 $ B8.fromString (fromUUID u))
|
||||||
withAuthList :: (FilePath -> Annex a) -> Annex a
|
withAuthList :: (FilePath -> Annex a) -> Annex a
|
||||||
withAuthList a = do
|
withAuthList a = do
|
||||||
m <- knownFingerPrints
|
m <- knownFingerPrints
|
||||||
withTmpFile "authlist" $ \t h -> do
|
withTmpFile (toOsPath "authlist") $ \t h -> do
|
||||||
liftIO $ hPutStr h (genAuthList m)
|
liftIO $ hPutStr h (genAuthList m)
|
||||||
liftIO $ hClose h
|
liftIO $ hClose h
|
||||||
a t
|
a (fromRawFilePath (fromOsPath t))
|
||||||
|
|
||||||
genAuthList :: M.Map UUID Fingerprint -> String
|
genAuthList :: M.Map UUID Fingerprint -> String
|
||||||
genAuthList = unlines . map fmt . M.toList
|
genAuthList = unlines . map fmt . M.toList
|
||||||
|
|
|
@ -26,6 +26,7 @@ import Utility.FileMode
|
||||||
import Utility.ThreadScheduler
|
import Utility.ThreadScheduler
|
||||||
import Utility.SafeOutput
|
import Utility.SafeOutput
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
import qualified Utility.MagicWormhole as Wormhole
|
import qualified Utility.MagicWormhole as Wormhole
|
||||||
|
|
||||||
import Control.Concurrent.Async
|
import Control.Concurrent.Async
|
||||||
|
@ -193,12 +194,11 @@ serializePairData :: PairData -> String
|
||||||
serializePairData (PairData (HalfAuthToken ha) addrs) = unlines $
|
serializePairData (PairData (HalfAuthToken ha) addrs) = unlines $
|
||||||
T.unpack ha : map formatP2PAddress addrs
|
T.unpack ha : map formatP2PAddress addrs
|
||||||
|
|
||||||
deserializePairData :: String -> Maybe PairData
|
deserializePairData :: [String] -> Maybe PairData
|
||||||
deserializePairData s = case lines s of
|
deserializePairData [] = Nothing
|
||||||
[] -> Nothing
|
deserializePairData (ha:l) = do
|
||||||
(ha:l) -> do
|
addrs <- mapM unformatP2PAddress l
|
||||||
addrs <- mapM unformatP2PAddress l
|
return (PairData (HalfAuthToken (T.pack ha)) addrs)
|
||||||
return (PairData (HalfAuthToken (T.pack ha)) addrs)
|
|
||||||
|
|
||||||
data PairingResult
|
data PairingResult
|
||||||
= PairSuccess
|
= PairSuccess
|
||||||
|
@ -220,7 +220,7 @@ wormholePairing remotename ouraddrs ui = do
|
||||||
-- files. Permissions of received files may allow others
|
-- files. Permissions of received files may allow others
|
||||||
-- to read them. So, set up a temp directory that only
|
-- to read them. So, set up a temp directory that only
|
||||||
-- we can read.
|
-- we can read.
|
||||||
withTmpDir "pair" $ \tmp -> do
|
withTmpDir (toOsPath "pair") $ \tmp -> do
|
||||||
liftIO $ void $ tryIO $ modifyFileMode (toRawFilePath tmp) $
|
liftIO $ void $ tryIO $ modifyFileMode (toRawFilePath tmp) $
|
||||||
removeModes otherGroupModes
|
removeModes otherGroupModes
|
||||||
let sendf = tmp </> "send"
|
let sendf = tmp </> "send"
|
||||||
|
@ -245,13 +245,14 @@ wormholePairing remotename ouraddrs ui = do
|
||||||
then return ReceiveFailed
|
then return ReceiveFailed
|
||||||
else do
|
else do
|
||||||
r <- liftIO $ tryIO $
|
r <- liftIO $ tryIO $
|
||||||
readFileStrict recvf
|
map decodeBS . fileLines' <$> F.readFile'
|
||||||
|
(toOsPath (toRawFilePath recvf))
|
||||||
case r of
|
case r of
|
||||||
Left _e -> return ReceiveFailed
|
Left _e -> return ReceiveFailed
|
||||||
Right s -> maybe
|
Right ls -> maybe
|
||||||
(return ReceiveFailed)
|
(return ReceiveFailed)
|
||||||
(finishPairing 100 remotename ourhalf)
|
(finishPairing 100 remotename ourhalf)
|
||||||
(deserializePairData s)
|
(deserializePairData ls)
|
||||||
|
|
||||||
-- | Allow the peer we're pairing with to authenticate to us,
|
-- | Allow the peer we're pairing with to authenticate to us,
|
||||||
-- using an authtoken constructed from the two HalfAuthTokens.
|
-- using an authtoken constructed from the two HalfAuthTokens.
|
||||||
|
|
|
@ -266,8 +266,8 @@ getAuthEnv = do
|
||||||
|
|
||||||
findRepos :: Options -> IO [Git.Repo]
|
findRepos :: Options -> IO [Git.Repo]
|
||||||
findRepos o = do
|
findRepos o = do
|
||||||
files <- map toRawFilePath . concat
|
files <- concat
|
||||||
<$> mapM dirContents (directoryOption o)
|
<$> mapM (dirContents . toRawFilePath) (directoryOption o)
|
||||||
map Git.Construct.newFrom . catMaybes
|
map Git.Construct.newFrom . catMaybes
|
||||||
<$> mapM Git.Construct.checkForRepo files
|
<$> mapM Git.Construct.checkForRepo files
|
||||||
|
|
||||||
|
|
|
@ -104,7 +104,7 @@ linkKey file oldkey newkey = ifM (isJust <$> isAnnexLink file)
|
||||||
st <- liftIO $ R.getFileStatus file
|
st <- liftIO $ R.getFileStatus file
|
||||||
when (linkCount st > 1) $ do
|
when (linkCount st > 1) $ do
|
||||||
freezeContent oldobj
|
freezeContent oldobj
|
||||||
replaceWorkTreeFile (fromRawFilePath file) $ \tmp -> do
|
replaceWorkTreeFile file $ \tmp -> do
|
||||||
unlessM (checkedCopyFile oldkey oldobj tmp Nothing) $
|
unlessM (checkedCopyFile oldkey oldobj tmp Nothing) $
|
||||||
giveup "can't lock old key"
|
giveup "can't lock old key"
|
||||||
thawContent tmp
|
thawContent tmp
|
||||||
|
|
|
@ -5,6 +5,8 @@
|
||||||
- Licensed under the GNU AGPL version 3 or higher.
|
- Licensed under the GNU AGPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
|
||||||
module Command.ResolveMerge where
|
module Command.ResolveMerge where
|
||||||
|
|
||||||
import Command
|
import Command
|
||||||
|
@ -12,8 +14,9 @@ import qualified Git
|
||||||
import Git.Sha
|
import Git.Sha
|
||||||
import qualified Git.Branch
|
import qualified Git.Branch
|
||||||
import Annex.AutoMerge
|
import Annex.AutoMerge
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import qualified Data.ByteString as S
|
import qualified System.FilePath.ByteString as P
|
||||||
|
|
||||||
cmd :: Command
|
cmd :: Command
|
||||||
cmd = command "resolvemerge" SectionPlumbing
|
cmd = command "resolvemerge" SectionPlumbing
|
||||||
|
@ -26,10 +29,10 @@ seek = withNothing (commandAction start)
|
||||||
start :: CommandStart
|
start :: CommandStart
|
||||||
start = starting "resolvemerge" (ActionItemOther Nothing) (SeekInput []) $ do
|
start = starting "resolvemerge" (ActionItemOther Nothing) (SeekInput []) $ do
|
||||||
us <- fromMaybe nobranch <$> inRepo Git.Branch.current
|
us <- fromMaybe nobranch <$> inRepo Git.Branch.current
|
||||||
d <- fromRawFilePath <$> fromRepo Git.localGitDir
|
d <- fromRepo Git.localGitDir
|
||||||
let merge_head = d </> "MERGE_HEAD"
|
let merge_head = toOsPath $ d P.</> "MERGE_HEAD"
|
||||||
them <- fromMaybe (giveup nomergehead) . extractSha
|
them <- fromMaybe (giveup nomergehead) . extractSha
|
||||||
<$> liftIO (S.readFile merge_head)
|
<$> liftIO (F.readFile' merge_head)
|
||||||
ifM (resolveMerge (Just us) them False)
|
ifM (resolveMerge (Just us) them False)
|
||||||
( do
|
( do
|
||||||
void $ commitResolvedMerge Git.Branch.ManualCommit
|
void $ commitResolvedMerge Git.Branch.ManualCommit
|
||||||
|
|
|
@ -32,6 +32,7 @@ import Annex.SpecialRemote.Config (exportTreeField)
|
||||||
import Remote.Helper.Chunked
|
import Remote.Helper.Chunked
|
||||||
import Remote.Helper.Encryptable (encryptionField, highRandomQualityField)
|
import Remote.Helper.Encryptable (encryptionField, highRandomQualityField)
|
||||||
import Git.Types
|
import Git.Types
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import Test.Tasty
|
import Test.Tasty
|
||||||
import Test.Tasty.Runners
|
import Test.Tasty.Runners
|
||||||
|
@ -255,18 +256,18 @@ test runannex mkr mkk =
|
||||||
get r k
|
get r k
|
||||||
, check "fsck downloaded object" fsck
|
, check "fsck downloaded object" fsck
|
||||||
, check "retrieveKeyFile resume from 0" $ \r k -> do
|
, check "retrieveKeyFile resume from 0" $ \r k -> do
|
||||||
tmp <- fromRawFilePath <$> prepTmp k
|
tmp <- toOsPath <$> prepTmp k
|
||||||
liftIO $ writeFile tmp ""
|
liftIO $ F.writeFile' tmp mempty
|
||||||
lockContentForRemoval k noop removeAnnex
|
lockContentForRemoval k noop removeAnnex
|
||||||
get r k
|
get r k
|
||||||
, check "fsck downloaded object" fsck
|
, check "fsck downloaded object" fsck
|
||||||
, check "retrieveKeyFile resume from 33%" $ \r k -> do
|
, check "retrieveKeyFile resume from 33%" $ \r k -> do
|
||||||
loc <- fromRawFilePath <$> Annex.calcRepo (gitAnnexLocation k)
|
loc <- fromRawFilePath <$> Annex.calcRepo (gitAnnexLocation k)
|
||||||
tmp <- fromRawFilePath <$> prepTmp k
|
tmp <- toOsPath <$> prepTmp k
|
||||||
partial <- liftIO $ bracket (openBinaryFile loc ReadMode) hClose $ \h -> do
|
partial <- liftIO $ bracket (openBinaryFile loc ReadMode) hClose $ \h -> do
|
||||||
sz <- hFileSize h
|
sz <- hFileSize h
|
||||||
L.hGet h $ fromInteger $ sz `div` 3
|
L.hGet h $ fromInteger $ sz `div` 3
|
||||||
liftIO $ L.writeFile tmp partial
|
liftIO $ F.writeFile tmp partial
|
||||||
lockContentForRemoval k noop removeAnnex
|
lockContentForRemoval k noop removeAnnex
|
||||||
get r k
|
get r k
|
||||||
, check "fsck downloaded object" fsck
|
, check "fsck downloaded object" fsck
|
||||||
|
@ -355,11 +356,11 @@ testExportTree runannex mkr mkk1 mkk2 =
|
||||||
storeexport ea k = do
|
storeexport ea k = do
|
||||||
loc <- fromRawFilePath <$> Annex.calcRepo (gitAnnexLocation k)
|
loc <- fromRawFilePath <$> Annex.calcRepo (gitAnnexLocation k)
|
||||||
Remote.storeExport ea loc k testexportlocation nullMeterUpdate
|
Remote.storeExport ea loc k testexportlocation nullMeterUpdate
|
||||||
retrieveexport ea k = withTmpFile "exported" $ \tmp h -> do
|
retrieveexport ea k = withTmpFile (toOsPath "exported") $ \tmp h -> do
|
||||||
liftIO $ hClose h
|
liftIO $ hClose h
|
||||||
tryNonAsync (Remote.retrieveExport ea k testexportlocation tmp nullMeterUpdate) >>= \case
|
tryNonAsync (Remote.retrieveExport ea k testexportlocation (fromRawFilePath (fromOsPath tmp)) nullMeterUpdate) >>= \case
|
||||||
Left _ -> return False
|
Left _ -> return False
|
||||||
Right v -> verifyKeyContentPostRetrieval RetrievalAllKeysSecure AlwaysVerify v k (toRawFilePath tmp)
|
Right v -> verifyKeyContentPostRetrieval RetrievalAllKeysSecure AlwaysVerify v k (fromOsPath tmp)
|
||||||
checkpresentexport ea k = Remote.checkPresentExport ea k testexportlocation
|
checkpresentexport ea k = Remote.checkPresentExport ea k testexportlocation
|
||||||
removeexport ea k = Remote.removeExport ea k testexportlocation
|
removeexport ea k = Remote.removeExport ea k testexportlocation
|
||||||
removeexportdirectory ea = case Remote.removeExportDirectory ea of
|
removeexportdirectory ea = case Remote.removeExportDirectory ea of
|
||||||
|
@ -429,21 +430,21 @@ keySizes base fast = filter want
|
||||||
| otherwise = sz > 0
|
| otherwise = sz > 0
|
||||||
|
|
||||||
randKey :: Int -> Annex Key
|
randKey :: Int -> Annex Key
|
||||||
randKey sz = withTmpFile "randkey" $ \f h -> do
|
randKey sz = withTmpFile (toOsPath "randkey") $ \f h -> do
|
||||||
gen <- liftIO (newGenIO :: IO SystemRandom)
|
gen <- liftIO (newGenIO :: IO SystemRandom)
|
||||||
case genBytes sz gen of
|
case genBytes sz gen of
|
||||||
Left e -> giveup $ "failed to generate random key: " ++ show e
|
Left e -> giveup $ "failed to generate random key: " ++ show e
|
||||||
Right (rand, _) -> liftIO $ B.hPut h rand
|
Right (rand, _) -> liftIO $ B.hPut h rand
|
||||||
liftIO $ hClose h
|
liftIO $ hClose h
|
||||||
let ks = KeySource
|
let ks = KeySource
|
||||||
{ keyFilename = toRawFilePath f
|
{ keyFilename = fromOsPath f
|
||||||
, contentLocation = toRawFilePath f
|
, contentLocation = fromOsPath f
|
||||||
, inodeCache = Nothing
|
, inodeCache = Nothing
|
||||||
}
|
}
|
||||||
k <- case Types.Backend.genKey Backend.Hash.testKeyBackend of
|
k <- case Types.Backend.genKey Backend.Hash.testKeyBackend of
|
||||||
Just a -> a ks nullMeterUpdate
|
Just a -> a ks nullMeterUpdate
|
||||||
Nothing -> giveup "failed to generate random key (backend problem)"
|
Nothing -> giveup "failed to generate random key (backend problem)"
|
||||||
_ <- moveAnnex k (AssociatedFile Nothing) (toRawFilePath f)
|
_ <- moveAnnex k (AssociatedFile Nothing) (fromOsPath f)
|
||||||
return k
|
return k
|
||||||
|
|
||||||
getReadonlyKey :: Remote -> RawFilePath -> Annex Key
|
getReadonlyKey :: Remote -> RawFilePath -> Annex Key
|
||||||
|
|
|
@ -102,14 +102,14 @@ startCheckIncomplete recordnotok file key =
|
||||||
removeAnnexDir :: CommandCleanup -> CommandStart
|
removeAnnexDir :: CommandCleanup -> CommandStart
|
||||||
removeAnnexDir recordok = do
|
removeAnnexDir recordok = do
|
||||||
Annex.Queue.flush
|
Annex.Queue.flush
|
||||||
annexdir <- fromRawFilePath <$> fromRepo gitAnnexDir
|
annexdir <- fromRepo gitAnnexDir
|
||||||
annexobjectdir <- fromRepo gitAnnexObjectDir
|
annexobjectdir <- fromRepo gitAnnexObjectDir
|
||||||
starting ("uninit objects") (ActionItemOther Nothing) (SeekInput []) $ do
|
starting ("uninit objects") (ActionItemOther Nothing) (SeekInput []) $ do
|
||||||
leftovers <- removeUnannexed =<< listKeys InAnnex
|
leftovers <- removeUnannexed =<< listKeys InAnnex
|
||||||
prepareRemoveAnnexDir annexdir
|
prepareRemoveAnnexDir annexdir
|
||||||
if null leftovers
|
if null leftovers
|
||||||
then do
|
then do
|
||||||
liftIO $ removeDirectoryRecursive annexdir
|
liftIO $ removeDirectoryRecursive (fromRawFilePath annexdir)
|
||||||
next recordok
|
next recordok
|
||||||
else giveup $ unlines
|
else giveup $ unlines
|
||||||
[ "Not fully uninitialized"
|
[ "Not fully uninitialized"
|
||||||
|
@ -134,15 +134,15 @@ removeAnnexDir recordok = do
|
||||||
-
|
-
|
||||||
- Also closes sqlite databases that might be in the directory,
|
- Also closes sqlite databases that might be in the directory,
|
||||||
- to avoid later failure to write any cached changes to them. -}
|
- to avoid later failure to write any cached changes to them. -}
|
||||||
prepareRemoveAnnexDir :: FilePath -> Annex ()
|
prepareRemoveAnnexDir :: RawFilePath -> Annex ()
|
||||||
prepareRemoveAnnexDir annexdir = do
|
prepareRemoveAnnexDir annexdir = do
|
||||||
Database.Keys.closeDb
|
Database.Keys.closeDb
|
||||||
liftIO $ prepareRemoveAnnexDir' annexdir
|
liftIO $ prepareRemoveAnnexDir' annexdir
|
||||||
|
|
||||||
prepareRemoveAnnexDir' :: FilePath -> IO ()
|
prepareRemoveAnnexDir' :: RawFilePath -> IO ()
|
||||||
prepareRemoveAnnexDir' annexdir =
|
prepareRemoveAnnexDir' annexdir =
|
||||||
emptyWhenDoesNotExist (dirTreeRecursiveSkipping (const False) annexdir)
|
emptyWhenDoesNotExist (dirTreeRecursiveSkipping (const False) annexdir)
|
||||||
>>= mapM_ (void . tryIO . allowWrite . toRawFilePath)
|
>>= mapM_ (void . tryIO . allowWrite)
|
||||||
|
|
||||||
{- Keys that were moved out of the annex have a hard link still in the
|
{- Keys that were moved out of the annex have a hard link still in the
|
||||||
- annex, with > 1 link count, and those can be removed.
|
- annex, with > 1 link count, and those can be removed.
|
||||||
|
|
|
@ -51,7 +51,7 @@ start si file key = ifM (isJust <$> isAnnexLink file)
|
||||||
perform :: RawFilePath -> Key -> CommandPerform
|
perform :: RawFilePath -> Key -> CommandPerform
|
||||||
perform dest key = do
|
perform dest key = do
|
||||||
destmode <- liftIO $ catchMaybeIO $ fileMode <$> R.getFileStatus dest
|
destmode <- liftIO $ catchMaybeIO $ fileMode <$> R.getFileStatus dest
|
||||||
destic <- replaceWorkTreeFile (fromRawFilePath dest) $ \tmp -> do
|
destic <- replaceWorkTreeFile dest $ \tmp -> do
|
||||||
ifM (inAnnex key)
|
ifM (inAnnex key)
|
||||||
( do
|
( do
|
||||||
r <- linkFromAnnex' key tmp destmode
|
r <- linkFromAnnex' key tmp destmode
|
||||||
|
|
|
@ -35,6 +35,7 @@ import Remote
|
||||||
import Git.Types (fromConfigKey, fromConfigValue)
|
import Git.Types (fromConfigKey, fromConfigValue)
|
||||||
import Utility.DataUnits
|
import Utility.DataUnits
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
cmd :: Command
|
cmd :: Command
|
||||||
cmd = command "vicfg" SectionSetup "edit configuration in git-annex branch"
|
cmd = command "vicfg" SectionSetup "edit configuration in git-annex branch"
|
||||||
|
@ -60,7 +61,10 @@ vicfg curcfg f = do
|
||||||
-- Allow EDITOR to be processed by the shell, so it can contain options.
|
-- Allow EDITOR to be processed by the shell, so it can contain options.
|
||||||
unlessM (liftIO $ boolSystem "sh" [Param "-c", Param $ unwords [vi, shellEscape f]]) $
|
unlessM (liftIO $ boolSystem "sh" [Param "-c", Param $ unwords [vi, shellEscape f]]) $
|
||||||
giveup $ vi ++ " exited nonzero; aborting"
|
giveup $ vi ++ " exited nonzero; aborting"
|
||||||
r <- parseCfg (defCfg curcfg) <$> liftIO (readFileStrict f)
|
r <- liftIO $ parseCfg (defCfg curcfg)
|
||||||
|
. map decodeBS
|
||||||
|
. fileLines'
|
||||||
|
<$> F.readFile' (toOsPath (toRawFilePath f))
|
||||||
liftIO $ removeWhenExistsWith R.removeLink (toRawFilePath f)
|
liftIO $ removeWhenExistsWith R.removeLink (toRawFilePath f)
|
||||||
case r of
|
case r of
|
||||||
Left s -> do
|
Left s -> do
|
||||||
|
@ -278,8 +282,8 @@ lcom = map (\l -> if "#" `isPrefixOf` l then l else '#' : l)
|
||||||
|
|
||||||
{- If there's a parse error, returns a new version of the file,
|
{- If there's a parse error, returns a new version of the file,
|
||||||
- with the problem lines noted. -}
|
- with the problem lines noted. -}
|
||||||
parseCfg :: Cfg -> String -> Either String Cfg
|
parseCfg :: Cfg -> [String] -> Either String Cfg
|
||||||
parseCfg defcfg = go [] defcfg . lines
|
parseCfg defcfg = go [] defcfg
|
||||||
where
|
where
|
||||||
go c cfg []
|
go c cfg []
|
||||||
| null (mapMaybe fst c) = Right cfg
|
| null (mapMaybe fst c) = Right cfg
|
||||||
|
|
|
@ -24,6 +24,7 @@ import Utility.Process as X
|
||||||
import Utility.Path as X
|
import Utility.Path as X
|
||||||
import Utility.Path.AbsRel as X
|
import Utility.Path.AbsRel as X
|
||||||
import Utility.Directory as X
|
import Utility.Directory as X
|
||||||
|
import Utility.SystemDirectory as X
|
||||||
import Utility.MoveFile as X
|
import Utility.MoveFile as X
|
||||||
import Utility.Monad as X
|
import Utility.Monad as X
|
||||||
import Utility.Data as X
|
import Utility.Data as X
|
||||||
|
@ -32,5 +33,6 @@ import Utility.FileSize as X
|
||||||
import Utility.Network as X
|
import Utility.Network as X
|
||||||
import Utility.Split as X
|
import Utility.Split as X
|
||||||
import Utility.FileSystemEncoding as X
|
import Utility.FileSystemEncoding as X
|
||||||
|
import Utility.OsPath as X
|
||||||
|
|
||||||
import Utility.PartialPrelude as X
|
import Utility.PartialPrelude as X
|
||||||
|
|
|
@ -31,7 +31,9 @@ modifyAutoStartFile func = do
|
||||||
f <- autoStartFile
|
f <- autoStartFile
|
||||||
createDirectoryIfMissing True $
|
createDirectoryIfMissing True $
|
||||||
fromRawFilePath (parentDir (toRawFilePath f))
|
fromRawFilePath (parentDir (toRawFilePath f))
|
||||||
viaTmp writeFile f $ unlines dirs'
|
viaTmp (writeFile . fromRawFilePath . fromOsPath)
|
||||||
|
(toOsPath (toRawFilePath f))
|
||||||
|
(unlines dirs')
|
||||||
|
|
||||||
{- Adds a directory to the autostart file. If the directory is already
|
{- Adds a directory to the autostart file. If the directory is already
|
||||||
- present, it's moved to the top, so it will be used as the default
|
- present, it's moved to the top, so it will be used as the default
|
||||||
|
|
|
@ -17,7 +17,9 @@ import Git.Types
|
||||||
import Config
|
import Config
|
||||||
import Utility.Directory.Create
|
import Utility.Directory.Create
|
||||||
import Annex.Version
|
import Annex.Version
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
|
import qualified Data.ByteString as S
|
||||||
import qualified System.FilePath.ByteString as P
|
import qualified System.FilePath.ByteString as P
|
||||||
|
|
||||||
configureSmudgeFilter :: Annex ()
|
configureSmudgeFilter :: Annex ()
|
||||||
|
@ -44,11 +46,12 @@ configureSmudgeFilter = unlessM (fromRepo Git.repoIsLocalBare) $ do
|
||||||
lfs <- readattr lf
|
lfs <- readattr lf
|
||||||
gfs <- readattr gf
|
gfs <- readattr gf
|
||||||
gittop <- Git.localGitDir <$> gitRepo
|
gittop <- Git.localGitDir <$> gitRepo
|
||||||
liftIO $ unless ("filter=annex" `isInfixOf` (lfs ++ gfs)) $ do
|
liftIO $ unless ("filter=annex" `S.isInfixOf` (lfs <> gfs)) $ do
|
||||||
createDirectoryUnder [gittop] (P.takeDirectory lf)
|
createDirectoryUnder [gittop] (P.takeDirectory lf)
|
||||||
writeFile (fromRawFilePath lf) (lfs ++ "\n" ++ unlines stdattr)
|
F.writeFile' (toOsPath lf) $
|
||||||
|
linesFile' (lfs <> encodeBS ("\n" ++ unlines stdattr))
|
||||||
where
|
where
|
||||||
readattr = liftIO . catchDefaultIO "" . readFileStrict . fromRawFilePath
|
readattr = liftIO . catchDefaultIO mempty . F.readFile' . toOsPath
|
||||||
|
|
||||||
configureSmudgeFilterProcess :: Annex ()
|
configureSmudgeFilterProcess :: Annex ()
|
||||||
configureSmudgeFilterProcess =
|
configureSmudgeFilterProcess =
|
||||||
|
@ -65,9 +68,10 @@ stdattr =
|
||||||
-- git-annex does not commit that.
|
-- git-annex does not commit that.
|
||||||
deconfigureSmudgeFilter :: Annex ()
|
deconfigureSmudgeFilter :: Annex ()
|
||||||
deconfigureSmudgeFilter = do
|
deconfigureSmudgeFilter = do
|
||||||
lf <- fromRawFilePath <$> Annex.fromRepo Git.attributesLocal
|
lf <- Annex.fromRepo Git.attributesLocal
|
||||||
ls <- liftIO $ catchDefaultIO [] $ lines <$> readFileStrict lf
|
ls <- liftIO $ catchDefaultIO [] $
|
||||||
liftIO $ writeFile lf $ unlines $
|
map decodeBS . fileLines' <$> F.readFile' (toOsPath lf)
|
||||||
|
liftIO $ writeFile (fromRawFilePath lf) $ unlines $
|
||||||
filter (\l -> l `notElem` stdattr && not (null l)) ls
|
filter (\l -> l `notElem` stdattr && not (null l)) ls
|
||||||
unsetConfig (ConfigKey "filter.annex.smudge")
|
unsetConfig (ConfigKey "filter.annex.smudge")
|
||||||
unsetConfig (ConfigKey "filter.annex.clean")
|
unsetConfig (ConfigKey "filter.annex.clean")
|
||||||
|
|
16
Creds.hs
16
Creds.hs
|
@ -37,9 +37,10 @@ import Remote.Helper.Encryptable (remoteCipher, remoteCipher', embedCreds, Encry
|
||||||
import Utility.Env (getEnv)
|
import Utility.Env (getEnv)
|
||||||
import Utility.Base64
|
import Utility.Base64
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import qualified Data.ByteString.Lazy.Char8 as L
|
import qualified Data.ByteString.Lazy.Char8 as L8
|
||||||
import qualified Data.ByteString.Char8 as S
|
import qualified Data.ByteString.Char8 as S8
|
||||||
import qualified Data.Map as M
|
import qualified Data.Map as M
|
||||||
import qualified System.FilePath.ByteString as P
|
import qualified System.FilePath.ByteString as P
|
||||||
|
|
||||||
|
@ -99,7 +100,7 @@ setRemoteCredPair' pc encsetup gc storage mcreds = case mcreds of
|
||||||
storeconfig creds key (Just cipher) = do
|
storeconfig creds key (Just cipher) = do
|
||||||
cmd <- gpgCmd <$> Annex.getGitConfig
|
cmd <- gpgCmd <$> Annex.getGitConfig
|
||||||
s <- liftIO $ encrypt cmd (pc, gc) cipher
|
s <- liftIO $ encrypt cmd (pc, gc) cipher
|
||||||
(feedBytes $ L.pack $ encodeCredPair creds)
|
(feedBytes $ L8.pack $ encodeCredPair creds)
|
||||||
(readBytesStrictly return)
|
(readBytesStrictly return)
|
||||||
storeconfig' key (Accepted (decodeBS (toB64 s)))
|
storeconfig' key (Accepted (decodeBS (toB64 s)))
|
||||||
storeconfig creds key Nothing =
|
storeconfig creds key Nothing =
|
||||||
|
@ -135,8 +136,8 @@ getRemoteCredPair c gc storage = maybe fromcache (return . Just) =<< fromenv
|
||||||
fromenccreds enccreds cipher storablecipher = do
|
fromenccreds enccreds cipher storablecipher = do
|
||||||
cmd <- gpgCmd <$> Annex.getGitConfig
|
cmd <- gpgCmd <$> Annex.getGitConfig
|
||||||
mcreds <- liftIO $ catchMaybeIO $ decrypt cmd (c, gc) cipher
|
mcreds <- liftIO $ catchMaybeIO $ decrypt cmd (c, gc) cipher
|
||||||
(feedBytes $ L.fromStrict $ fromB64 enccreds)
|
(feedBytes $ L8.fromStrict $ fromB64 enccreds)
|
||||||
(readBytesStrictly $ return . S.unpack)
|
(readBytesStrictly $ return . S8.unpack)
|
||||||
case mcreds of
|
case mcreds of
|
||||||
Just creds -> fromcreds creds
|
Just creds -> fromcreds creds
|
||||||
Nothing -> do
|
Nothing -> do
|
||||||
|
@ -202,7 +203,10 @@ writeCreds creds file = do
|
||||||
liftIO $ writeFileProtected (d P.</> toRawFilePath file) creds
|
liftIO $ writeFileProtected (d P.</> toRawFilePath file) creds
|
||||||
|
|
||||||
readCreds :: FilePath -> Annex (Maybe Creds)
|
readCreds :: FilePath -> Annex (Maybe Creds)
|
||||||
readCreds f = liftIO . catchMaybeIO . readFileStrict =<< credsFile f
|
readCreds f = do
|
||||||
|
f' <- toOsPath . toRawFilePath <$> credsFile f
|
||||||
|
liftIO $ catchMaybeIO $ decodeBS . S8.unlines . fileLines'
|
||||||
|
<$> F.readFile' f'
|
||||||
|
|
||||||
credsFile :: FilePath -> Annex FilePath
|
credsFile :: FilePath -> Annex FilePath
|
||||||
credsFile basefile = do
|
credsFile basefile = do
|
||||||
|
|
|
@ -211,7 +211,7 @@ encrypt gpgcmd c cipher feeder reader = case cipher of
|
||||||
Cipher{} ->
|
Cipher{} ->
|
||||||
let passphrase = cipherPassphrase cipher
|
let passphrase = cipherPassphrase cipher
|
||||||
in case statelessOpenPGPCommand c of
|
in case statelessOpenPGPCommand c of
|
||||||
Just sopcmd -> withTmpDir "sop" $ \d ->
|
Just sopcmd -> withTmpDir (toOsPath "sop") $ \d ->
|
||||||
SOP.encryptSymmetric sopcmd passphrase
|
SOP.encryptSymmetric sopcmd passphrase
|
||||||
(SOP.EmptyDirectory d)
|
(SOP.EmptyDirectory d)
|
||||||
(statelessOpenPGPProfile c)
|
(statelessOpenPGPProfile c)
|
||||||
|
@ -233,7 +233,7 @@ decrypt cmd c cipher feeder reader = case cipher of
|
||||||
Cipher{} ->
|
Cipher{} ->
|
||||||
let passphrase = cipherPassphrase cipher
|
let passphrase = cipherPassphrase cipher
|
||||||
in case statelessOpenPGPCommand c of
|
in case statelessOpenPGPCommand c of
|
||||||
Just sopcmd -> withTmpDir "sop" $ \d ->
|
Just sopcmd -> withTmpDir (toOsPath "sop") $ \d ->
|
||||||
SOP.decryptSymmetric sopcmd passphrase
|
SOP.decryptSymmetric sopcmd passphrase
|
||||||
(SOP.EmptyDirectory d)
|
(SOP.EmptyDirectory d)
|
||||||
feeder reader
|
feeder reader
|
||||||
|
|
|
@ -31,7 +31,7 @@ import qualified System.FilePath.ByteString as P
|
||||||
|
|
||||||
benchmarkDbs :: CriterionMode -> Integer -> Annex ()
|
benchmarkDbs :: CriterionMode -> Integer -> Annex ()
|
||||||
#ifdef WITH_BENCHMARK
|
#ifdef WITH_BENCHMARK
|
||||||
benchmarkDbs mode n = withTmpDirIn "." "benchmark" $ \tmpdir -> do
|
benchmarkDbs mode n = withTmpDirIn "." (toOsPath "benchmark") $ \tmpdir -> do
|
||||||
db <- benchDb (toRawFilePath tmpdir) n
|
db <- benchDb (toRawFilePath tmpdir) n
|
||||||
liftIO $ runMode mode
|
liftIO $ runMode mode
|
||||||
[ bgroup "keys database"
|
[ bgroup "keys database"
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
- Licensed under the GNU AGPL version 3 or higher.
|
- Licensed under the GNU AGPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
{-# LANGUAGE CPP #-}
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
|
||||||
module Git.HashObject where
|
module Git.HashObject where
|
||||||
|
|
||||||
|
@ -82,10 +82,10 @@ instance HashableBlob Builder where
|
||||||
{- Injects a blob into git. Unfortunately, the current git-hash-object
|
{- Injects a blob into git. Unfortunately, the current git-hash-object
|
||||||
- interface does not allow batch hashing without using temp files. -}
|
- interface does not allow batch hashing without using temp files. -}
|
||||||
hashBlob :: HashableBlob b => HashObjectHandle -> b -> IO Sha
|
hashBlob :: HashableBlob b => HashObjectHandle -> b -> IO Sha
|
||||||
hashBlob h b = withTmpFile "hash" $ \tmp tmph -> do
|
hashBlob h b = withTmpFile (toOsPath "hash") $ \tmp tmph -> do
|
||||||
hashableBlobToHandle tmph b
|
hashableBlobToHandle tmph b
|
||||||
hClose tmph
|
hClose tmph
|
||||||
hashFile h (toRawFilePath tmp)
|
hashFile h (fromOsPath tmp)
|
||||||
|
|
||||||
{- Injects some content into git, returning its Sha.
|
{- Injects some content into git, returning its Sha.
|
||||||
-
|
-
|
||||||
|
|
32
Git/Hook.hs
32
Git/Hook.hs
|
@ -6,6 +6,7 @@
|
||||||
-}
|
-}
|
||||||
|
|
||||||
{-# LANGUAGE CPP #-}
|
{-# LANGUAGE CPP #-}
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
|
||||||
module Git.Hook where
|
module Git.Hook where
|
||||||
|
|
||||||
|
@ -14,15 +15,16 @@ import Git
|
||||||
import Utility.Tmp
|
import Utility.Tmp
|
||||||
import Utility.Shell
|
import Utility.Shell
|
||||||
import Utility.FileMode
|
import Utility.FileMode
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
import System.PosixCompat.Files (fileMode)
|
import System.PosixCompat.Files (fileMode)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
import qualified Data.ByteString as B
|
import qualified System.FilePath.ByteString as P
|
||||||
|
|
||||||
data Hook = Hook
|
data Hook = Hook
|
||||||
{ hookName :: FilePath
|
{ hookName :: RawFilePath
|
||||||
, hookScript :: String
|
, hookScript :: String
|
||||||
, hookOldScripts :: [String]
|
, hookOldScripts :: [String]
|
||||||
}
|
}
|
||||||
|
@ -31,8 +33,8 @@ data Hook = Hook
|
||||||
instance Eq Hook where
|
instance Eq Hook where
|
||||||
a == b = hookName a == hookName b
|
a == b = hookName a == hookName b
|
||||||
|
|
||||||
hookFile :: Hook -> Repo -> FilePath
|
hookFile :: Hook -> Repo -> RawFilePath
|
||||||
hookFile h r = fromRawFilePath (localGitDir r) </> "hooks" </> hookName h
|
hookFile h r = localGitDir r P.</> "hooks" P.</> hookName h
|
||||||
|
|
||||||
{- Writes a hook. Returns False if the hook already exists with a different
|
{- Writes a hook. Returns False if the hook already exists with a different
|
||||||
- content. Upgrades old scripts.
|
- content. Upgrades old scripts.
|
||||||
|
@ -48,7 +50,7 @@ hookFile h r = fromRawFilePath (localGitDir r) </> "hooks" </> hookName h
|
||||||
- is run with a bundled bash, so should start with #!/bin/sh
|
- is run with a bundled bash, so should start with #!/bin/sh
|
||||||
-}
|
-}
|
||||||
hookWrite :: Hook -> Repo -> IO Bool
|
hookWrite :: Hook -> Repo -> IO Bool
|
||||||
hookWrite h r = ifM (doesFileExist f)
|
hookWrite h r = ifM (doesFileExist (fromRawFilePath f))
|
||||||
( expectedContent h r >>= \case
|
( expectedContent h r >>= \case
|
||||||
UnexpectedContent -> return False
|
UnexpectedContent -> return False
|
||||||
ExpectedContent -> return True
|
ExpectedContent -> return True
|
||||||
|
@ -58,15 +60,13 @@ hookWrite h r = ifM (doesFileExist f)
|
||||||
where
|
where
|
||||||
f = hookFile h r
|
f = hookFile h r
|
||||||
go = do
|
go = do
|
||||||
-- On Windows, using B.writeFile here avoids
|
-- On Windows, using a ByteString as the file content
|
||||||
-- the newline translation done by writeFile.
|
-- avoids the newline translation done by writeFile.
|
||||||
-- Hook scripts on Windows could use CRLF endings, but
|
-- Hook scripts on Windows could use CRLF endings, but
|
||||||
-- they typically use unix newlines, which does work there
|
-- they typically use unix newlines, which does work there
|
||||||
-- and makes the repository more portable.
|
-- and makes the repository more portable.
|
||||||
viaTmp B.writeFile f (encodeBS (hookScript h))
|
viaTmp F.writeFile' (toOsPath f) (encodeBS (hookScript h))
|
||||||
void $ tryIO $ modifyFileMode
|
void $ tryIO $ modifyFileMode f (addModes executeModes)
|
||||||
(toRawFilePath f)
|
|
||||||
(addModes executeModes)
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
{- Removes a hook. Returns False if the hook contained something else, and
|
{- Removes a hook. Returns False if the hook contained something else, and
|
||||||
|
@ -81,7 +81,7 @@ hookUnWrite h r = ifM (doesFileExist f)
|
||||||
, return True
|
, return True
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
f = hookFile h r
|
f = fromRawFilePath $ hookFile h r
|
||||||
|
|
||||||
data ExpectedContent = UnexpectedContent | ExpectedContent | OldExpectedContent
|
data ExpectedContent = UnexpectedContent | ExpectedContent | OldExpectedContent
|
||||||
|
|
||||||
|
@ -91,7 +91,7 @@ expectedContent h r = do
|
||||||
-- and so a hook file that has CRLF will be treated the same as one
|
-- and so a hook file that has CRLF will be treated the same as one
|
||||||
-- that has LF. That is intentional, since users may have a reason
|
-- that has LF. That is intentional, since users may have a reason
|
||||||
-- to prefer one or the other.
|
-- to prefer one or the other.
|
||||||
content <- readFile $ hookFile h r
|
content <- readFile $ fromRawFilePath $ hookFile h r
|
||||||
return $ if content == hookScript h
|
return $ if content == hookScript h
|
||||||
then ExpectedContent
|
then ExpectedContent
|
||||||
else if any (content ==) (hookOldScripts h)
|
else if any (content ==) (hookOldScripts h)
|
||||||
|
@ -103,13 +103,13 @@ hookExists h r = do
|
||||||
let f = hookFile h r
|
let f = hookFile h r
|
||||||
catchBoolIO $
|
catchBoolIO $
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
isExecutable . fileMode <$> R.getFileStatus (toRawFilePath f)
|
isExecutable . fileMode <$> R.getFileStatus f
|
||||||
#else
|
#else
|
||||||
doesFileExist f
|
doesFileExist (fromRawFilePath f)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
runHook :: (FilePath -> [CommandParam] -> IO a) -> Hook -> [CommandParam] -> Repo -> IO a
|
runHook :: (FilePath -> [CommandParam] -> IO a) -> Hook -> [CommandParam] -> Repo -> IO a
|
||||||
runHook runner h ps r = do
|
runHook runner h ps r = do
|
||||||
let f = hookFile h r
|
let f = fromRawFilePath $ hookFile h r
|
||||||
(c, cps) <- findShellCommand f
|
(c, cps) <- findShellCommand f
|
||||||
runner c (cps ++ ps)
|
runner c (cps ++ ps)
|
||||||
|
|
|
@ -373,4 +373,4 @@ inodeCaches locs repo = guardSafeForLsFiles repo $ do
|
||||||
mkInodeCache
|
mkInodeCache
|
||||||
<$> (readish =<< M.lookup "ino:" m)
|
<$> (readish =<< M.lookup "ino:" m)
|
||||||
<*> (readish =<< M.lookup "size:" m)
|
<*> (readish =<< M.lookup "size:" m)
|
||||||
<*> (parsePOSIXTime =<< (replace ":" "." <$> M.lookup "mtime:" m))
|
<*> (parsePOSIXTime =<< (encodeBS . replace ":" "." <$> M.lookup "mtime:" m))
|
||||||
|
|
|
@ -25,14 +25,14 @@ packDir r = objectsDir r P.</> "pack"
|
||||||
packIdxFile :: RawFilePath -> RawFilePath
|
packIdxFile :: RawFilePath -> RawFilePath
|
||||||
packIdxFile = flip P.replaceExtension "idx"
|
packIdxFile = flip P.replaceExtension "idx"
|
||||||
|
|
||||||
listPackFiles :: Repo -> IO [FilePath]
|
listPackFiles :: Repo -> IO [RawFilePath]
|
||||||
listPackFiles r = filter (".pack" `isSuffixOf`)
|
listPackFiles r = filter (".pack" `B.isSuffixOf`)
|
||||||
<$> catchDefaultIO [] (dirContents $ fromRawFilePath $ packDir r)
|
<$> catchDefaultIO [] (dirContents $ packDir r)
|
||||||
|
|
||||||
listLooseObjectShas :: Repo -> IO [Sha]
|
listLooseObjectShas :: Repo -> IO [Sha]
|
||||||
listLooseObjectShas r = catchDefaultIO [] $
|
listLooseObjectShas r = catchDefaultIO [] $
|
||||||
mapMaybe (extractSha . encodeBS . concat . reverse . take 2 . reverse . splitDirectories)
|
mapMaybe (extractSha . encodeBS . concat . reverse . take 2 . reverse . splitDirectories . decodeBS)
|
||||||
<$> emptyWhenDoesNotExist (dirContentsRecursiveSkipping (== "pack") True (fromRawFilePath (objectsDir r)))
|
<$> emptyWhenDoesNotExist (dirContentsRecursiveSkipping (== "pack") True (objectsDir r))
|
||||||
|
|
||||||
looseObjectFile :: Repo -> Sha -> RawFilePath
|
looseObjectFile :: Repo -> Sha -> RawFilePath
|
||||||
looseObjectFile r sha = objectsDir r P.</> prefix P.</> rest
|
looseObjectFile r sha = objectsDir r P.</> prefix P.</> rest
|
||||||
|
|
|
@ -15,19 +15,22 @@ import Git.Command
|
||||||
import Git.Sha
|
import Git.Sha
|
||||||
import Git.Types
|
import Git.Types
|
||||||
import Git.FilePath
|
import Git.FilePath
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import Data.Char (chr, ord)
|
import Data.Char (chr, ord)
|
||||||
import qualified Data.ByteString as S
|
import qualified Data.ByteString as S
|
||||||
import qualified Data.ByteString.Char8 as S8
|
import qualified Data.ByteString.Char8 as S8
|
||||||
|
import qualified System.FilePath.ByteString as P
|
||||||
|
|
||||||
headRef :: Ref
|
headRef :: Ref
|
||||||
headRef = Ref "HEAD"
|
headRef = Ref "HEAD"
|
||||||
|
|
||||||
headFile :: Repo -> FilePath
|
headFile :: Repo -> RawFilePath
|
||||||
headFile r = fromRawFilePath (localGitDir r) </> "HEAD"
|
headFile r = localGitDir r P.</> "HEAD"
|
||||||
|
|
||||||
setHeadRef :: Ref -> Repo -> IO ()
|
setHeadRef :: Ref -> Repo -> IO ()
|
||||||
setHeadRef ref r = S.writeFile (headFile r) ("ref: " <> fromRef' ref)
|
setHeadRef ref r =
|
||||||
|
F.writeFile' (toOsPath (headFile r)) ("ref: " <> fromRef' ref)
|
||||||
|
|
||||||
{- Converts a fully qualified git ref into a user-visible string. -}
|
{- Converts a fully qualified git ref into a user-visible string. -}
|
||||||
describe :: Ref -> String
|
describe :: Ref -> String
|
||||||
|
|
|
@ -44,8 +44,10 @@ import Utility.Tmp.Dir
|
||||||
import Utility.Rsync
|
import Utility.Rsync
|
||||||
import Utility.FileMode
|
import Utility.FileMode
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import qualified Data.Set as S
|
import qualified Data.Set as S
|
||||||
|
import qualified Data.ByteString as B
|
||||||
import qualified Data.ByteString.Lazy as L
|
import qualified Data.ByteString.Lazy as L
|
||||||
import qualified System.FilePath.ByteString as P
|
import qualified System.FilePath.ByteString as P
|
||||||
|
|
||||||
|
@ -78,29 +80,28 @@ explodePacks :: Repo -> IO Bool
|
||||||
explodePacks r = go =<< listPackFiles r
|
explodePacks r = go =<< listPackFiles r
|
||||||
where
|
where
|
||||||
go [] = return False
|
go [] = return False
|
||||||
go packs = withTmpDir "packs" $ \tmpdir -> do
|
go packs = withTmpDir (toOsPath "packs") $ \tmpdir -> do
|
||||||
r' <- addGitEnv r "GIT_OBJECT_DIRECTORY" tmpdir
|
r' <- addGitEnv r "GIT_OBJECT_DIRECTORY" tmpdir
|
||||||
putStrLn "Unpacking all pack files."
|
putStrLn "Unpacking all pack files."
|
||||||
forM_ packs $ \packfile -> do
|
forM_ packs $ \packfile -> do
|
||||||
-- Just in case permissions are messed up.
|
-- Just in case permissions are messed up.
|
||||||
allowRead (toRawFilePath packfile)
|
allowRead packfile
|
||||||
-- May fail, if pack file is corrupt.
|
-- May fail, if pack file is corrupt.
|
||||||
void $ tryIO $
|
void $ tryIO $
|
||||||
pipeWrite [Param "unpack-objects", Param "-r"] r' $ \h ->
|
pipeWrite [Param "unpack-objects", Param "-r"] r' $ \h ->
|
||||||
L.hPut h =<< L.readFile packfile
|
L.hPut h =<< F.readFile (toOsPath packfile)
|
||||||
objs <- emptyWhenDoesNotExist (dirContentsRecursive tmpdir)
|
objs <- emptyWhenDoesNotExist (dirContentsRecursive (toRawFilePath tmpdir))
|
||||||
forM_ objs $ \objfile -> do
|
forM_ objs $ \objfile -> do
|
||||||
f <- relPathDirToFile
|
f <- relPathDirToFile
|
||||||
(toRawFilePath tmpdir)
|
(toRawFilePath tmpdir)
|
||||||
(toRawFilePath objfile)
|
objfile
|
||||||
let dest = objectsDir r P.</> f
|
let dest = objectsDir r P.</> f
|
||||||
createDirectoryIfMissing True
|
createDirectoryIfMissing True
|
||||||
(fromRawFilePath (parentDir dest))
|
(fromRawFilePath (parentDir dest))
|
||||||
moveFile (toRawFilePath objfile) dest
|
moveFile objfile dest
|
||||||
forM_ packs $ \packfile -> do
|
forM_ packs $ \packfile -> do
|
||||||
let f = toRawFilePath packfile
|
removeWhenExistsWith R.removeLink packfile
|
||||||
removeWhenExistsWith R.removeLink f
|
removeWhenExistsWith R.removeLink (packIdxFile packfile)
|
||||||
removeWhenExistsWith R.removeLink (packIdxFile f)
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
{- Try to retrieve a set of missing objects, from the remotes of a
|
{- Try to retrieve a set of missing objects, from the remotes of a
|
||||||
|
@ -113,13 +114,13 @@ explodePacks r = go =<< listPackFiles r
|
||||||
retrieveMissingObjects :: FsckResults -> Maybe FilePath -> Repo -> IO FsckResults
|
retrieveMissingObjects :: FsckResults -> Maybe FilePath -> Repo -> IO FsckResults
|
||||||
retrieveMissingObjects missing referencerepo r
|
retrieveMissingObjects missing referencerepo r
|
||||||
| not (foundBroken missing) = return missing
|
| not (foundBroken missing) = return missing
|
||||||
| otherwise = withTmpDir "tmprepo" $ \tmpdir -> do
|
| otherwise = withTmpDir (toOsPath "tmprepo") $ \tmpdir -> do
|
||||||
unlessM (boolSystem "git" [Param "init", File tmpdir]) $
|
unlessM (boolSystem "git" [Param "init", File tmpdir]) $
|
||||||
giveup $ "failed to create temp repository in " ++ tmpdir
|
giveup $ "failed to create temp repository in " ++ tmpdir
|
||||||
tmpr <- Config.read =<< Construct.fromPath (toRawFilePath tmpdir)
|
tmpr <- Config.read =<< Construct.fromPath (toRawFilePath tmpdir)
|
||||||
let repoconfig r' = fromRawFilePath (localGitDir r' P.</> "config")
|
let repoconfig r' = toOsPath (localGitDir r' P.</> "config")
|
||||||
whenM (doesFileExist (repoconfig r)) $
|
whenM (doesFileExist (fromRawFilePath (fromOsPath (repoconfig r)))) $
|
||||||
L.readFile (repoconfig r) >>= L.writeFile (repoconfig tmpr)
|
F.readFile (repoconfig r) >>= F.writeFile (repoconfig tmpr)
|
||||||
rs <- Construct.fromRemotes r
|
rs <- Construct.fromRemotes r
|
||||||
stillmissing <- pullremotes tmpr rs fetchrefstags missing
|
stillmissing <- pullremotes tmpr rs fetchrefstags missing
|
||||||
if S.null (knownMissing stillmissing)
|
if S.null (knownMissing stillmissing)
|
||||||
|
@ -248,13 +249,14 @@ badBranches missing r = filterM isbad =<< getAllRefs r
|
||||||
- Relies on packed refs being exploded before it's called.
|
- Relies on packed refs being exploded before it's called.
|
||||||
-}
|
-}
|
||||||
getAllRefs :: Repo -> IO [Ref]
|
getAllRefs :: Repo -> IO [Ref]
|
||||||
getAllRefs r = getAllRefs' (fromRawFilePath (localGitDir r) </> "refs")
|
getAllRefs r = getAllRefs' (localGitDir r P.</> "refs")
|
||||||
|
|
||||||
getAllRefs' :: FilePath -> IO [Ref]
|
getAllRefs' :: RawFilePath -> IO [Ref]
|
||||||
getAllRefs' refdir = do
|
getAllRefs' refdir = do
|
||||||
let topsegs = length (splitPath refdir) - 1
|
let topsegs = length (P.splitPath refdir) - 1
|
||||||
let toref = Ref . toInternalGitPath . encodeBS
|
let toref = Ref . toInternalGitPath . encodeBS
|
||||||
. joinPath . drop topsegs . splitPath
|
. joinPath . drop topsegs . splitPath
|
||||||
|
. decodeBS
|
||||||
map toref <$> emptyWhenDoesNotExist (dirContentsRecursive refdir)
|
map toref <$> emptyWhenDoesNotExist (dirContentsRecursive refdir)
|
||||||
|
|
||||||
explodePackedRefsFile :: Repo -> IO ()
|
explodePackedRefsFile :: Repo -> IO ()
|
||||||
|
@ -262,7 +264,9 @@ explodePackedRefsFile r = do
|
||||||
let f = packedRefsFile r
|
let f = packedRefsFile r
|
||||||
let f' = toRawFilePath f
|
let f' = toRawFilePath f
|
||||||
whenM (doesFileExist f) $ do
|
whenM (doesFileExist f) $ do
|
||||||
rs <- mapMaybe parsePacked . lines
|
rs <- mapMaybe parsePacked
|
||||||
|
. map decodeBS
|
||||||
|
. fileLines'
|
||||||
<$> catchDefaultIO "" (safeReadFile f')
|
<$> catchDefaultIO "" (safeReadFile f')
|
||||||
forM_ rs makeref
|
forM_ rs makeref
|
||||||
removeWhenExistsWith R.removeLink f'
|
removeWhenExistsWith R.removeLink f'
|
||||||
|
@ -473,7 +477,7 @@ displayList items header
|
||||||
-}
|
-}
|
||||||
preRepair :: Repo -> IO ()
|
preRepair :: Repo -> IO ()
|
||||||
preRepair g = do
|
preRepair g = do
|
||||||
unlessM (validhead <$> catchDefaultIO "" (safeReadFile headfile)) $ do
|
unlessM (validhead <$> catchDefaultIO "" (decodeBS <$> safeReadFile headfile)) $ do
|
||||||
removeWhenExistsWith R.removeLink headfile
|
removeWhenExistsWith R.removeLink headfile
|
||||||
writeFile (fromRawFilePath headfile) "ref: refs/heads/master"
|
writeFile (fromRawFilePath headfile) "ref: refs/heads/master"
|
||||||
explodePackedRefsFile g
|
explodePackedRefsFile g
|
||||||
|
@ -651,7 +655,7 @@ runRepair' removablebranch fsckresult forced referencerepo g = do
|
||||||
successfulRepair :: (Bool, [Branch]) -> Bool
|
successfulRepair :: (Bool, [Branch]) -> Bool
|
||||||
successfulRepair = fst
|
successfulRepair = fst
|
||||||
|
|
||||||
safeReadFile :: RawFilePath -> IO String
|
safeReadFile :: RawFilePath -> IO B.ByteString
|
||||||
safeReadFile f = do
|
safeReadFile f = do
|
||||||
allowRead f
|
allowRead f
|
||||||
readFileStrict (fromRawFilePath f)
|
F.readFile' (toOsPath f)
|
||||||
|
|
|
@ -80,5 +80,5 @@ parseAdjustLog l =
|
||||||
"1" -> Just True
|
"1" -> Just True
|
||||||
"0" -> Just False
|
"0" -> Just False
|
||||||
_ -> Nothing
|
_ -> Nothing
|
||||||
t <- parsePOSIXTime ts
|
t <- parsePOSIXTime (encodeBS ts)
|
||||||
return (b, t)
|
return (b, t)
|
||||||
|
|
|
@ -34,6 +34,7 @@ import Logs.File
|
||||||
import qualified Git.LsTree
|
import qualified Git.LsTree
|
||||||
import qualified Git.Tree
|
import qualified Git.Tree
|
||||||
import Annex.UUID
|
import Annex.UUID
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import qualified Data.Map as M
|
import qualified Data.Map as M
|
||||||
import qualified Data.ByteString as B
|
import qualified Data.ByteString as B
|
||||||
|
@ -129,7 +130,7 @@ getExportExcluded :: UUID -> Annex [Git.Tree.TreeItem]
|
||||||
getExportExcluded u = do
|
getExportExcluded u = do
|
||||||
logf <- fromRepo $ gitAnnexExportExcludeLog u
|
logf <- fromRepo $ gitAnnexExportExcludeLog u
|
||||||
liftIO $ catchDefaultIO [] $ exportExcludedParser
|
liftIO $ catchDefaultIO [] $ exportExcludedParser
|
||||||
<$> L.readFile (fromRawFilePath logf)
|
<$> F.readFile (toOsPath logf)
|
||||||
where
|
where
|
||||||
|
|
||||||
exportExcludedParser :: L.ByteString -> [Git.Tree.TreeItem]
|
exportExcludedParser :: L.ByteString -> [Git.Tree.TreeItem]
|
||||||
|
|
72
Logs/File.hs
72
Logs/File.hs
|
@ -26,9 +26,8 @@ import Annex.Perms
|
||||||
import Annex.LockFile
|
import Annex.LockFile
|
||||||
import Annex.ReplaceFile
|
import Annex.ReplaceFile
|
||||||
import Utility.Tmp
|
import Utility.Tmp
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import qualified Data.ByteString as S
|
|
||||||
import qualified Data.ByteString.Char8 as S8
|
|
||||||
import qualified Data.ByteString.Lazy as L
|
import qualified Data.ByteString.Lazy as L
|
||||||
import qualified Data.ByteString.Lazy.Char8 as L8
|
import qualified Data.ByteString.Lazy.Char8 as L8
|
||||||
|
|
||||||
|
@ -36,23 +35,23 @@ import qualified Data.ByteString.Lazy.Char8 as L8
|
||||||
-- making the new file have whatever permissions the git repository is
|
-- making the new file have whatever permissions the git repository is
|
||||||
-- configured to use. Creates the parent directory when necessary.
|
-- configured to use. Creates the parent directory when necessary.
|
||||||
writeLogFile :: RawFilePath -> String -> Annex ()
|
writeLogFile :: RawFilePath -> String -> Annex ()
|
||||||
writeLogFile f c = createDirWhenNeeded f $ viaTmp writelog (fromRawFilePath f) c
|
writeLogFile f c = createDirWhenNeeded f $ viaTmp writelog (toOsPath f) c
|
||||||
where
|
where
|
||||||
writelog tmp c' = do
|
writelog tmp c' = do
|
||||||
liftIO $ writeFile tmp c'
|
liftIO $ writeFile (fromRawFilePath (fromOsPath tmp)) c'
|
||||||
setAnnexFilePerm (toRawFilePath tmp)
|
setAnnexFilePerm (fromOsPath tmp)
|
||||||
|
|
||||||
-- | Runs the action with a handle connected to a temp file.
|
-- | Runs the action with a handle connected to a temp file.
|
||||||
-- The temp file replaces the log file once the action succeeds.
|
-- The temp file replaces the log file once the action succeeds.
|
||||||
withLogHandle :: RawFilePath -> (Handle -> Annex a) -> Annex a
|
withLogHandle :: RawFilePath -> (Handle -> Annex a) -> Annex a
|
||||||
withLogHandle f a = do
|
withLogHandle f a = do
|
||||||
createAnnexDirectory (parentDir f)
|
createAnnexDirectory (parentDir f)
|
||||||
replaceGitAnnexDirFile (fromRawFilePath f) $ \tmp ->
|
replaceGitAnnexDirFile f $ \tmp ->
|
||||||
bracket (setup tmp) cleanup a
|
bracket (setup tmp) cleanup a
|
||||||
where
|
where
|
||||||
setup tmp = do
|
setup tmp = do
|
||||||
setAnnexFilePerm tmp
|
setAnnexFilePerm tmp
|
||||||
liftIO $ openFile (fromRawFilePath tmp) WriteMode
|
liftIO $ F.openFile (toOsPath tmp) WriteMode
|
||||||
cleanup h = liftIO $ hClose h
|
cleanup h = liftIO $ hClose h
|
||||||
|
|
||||||
-- | Appends a line to a log file, first locking it to prevent
|
-- | Appends a line to a log file, first locking it to prevent
|
||||||
|
@ -61,11 +60,9 @@ appendLogFile :: RawFilePath -> RawFilePath -> L.ByteString -> Annex ()
|
||||||
appendLogFile f lck c =
|
appendLogFile f lck c =
|
||||||
createDirWhenNeeded f $
|
createDirWhenNeeded f $
|
||||||
withExclusiveLock lck $ do
|
withExclusiveLock lck $ do
|
||||||
liftIO $ withFile f' AppendMode $
|
liftIO $ F.withFile (toOsPath f) AppendMode $
|
||||||
\h -> L8.hPutStrLn h c
|
\h -> L8.hPutStrLn h c
|
||||||
setAnnexFilePerm (toRawFilePath f')
|
setAnnexFilePerm f
|
||||||
where
|
|
||||||
f' = fromRawFilePath f
|
|
||||||
|
|
||||||
-- | Modifies a log file.
|
-- | Modifies a log file.
|
||||||
--
|
--
|
||||||
|
@ -78,29 +75,28 @@ appendLogFile f lck c =
|
||||||
modifyLogFile :: RawFilePath -> RawFilePath -> ([L.ByteString] -> [L.ByteString]) -> Annex ()
|
modifyLogFile :: RawFilePath -> RawFilePath -> ([L.ByteString] -> [L.ByteString]) -> Annex ()
|
||||||
modifyLogFile f lck modf = withExclusiveLock lck $ do
|
modifyLogFile f lck modf = withExclusiveLock lck $ do
|
||||||
ls <- liftIO $ fromMaybe []
|
ls <- liftIO $ fromMaybe []
|
||||||
<$> tryWhenExists (fileLines <$> L.readFile f')
|
<$> tryWhenExists (fileLines <$> F.readFile f')
|
||||||
let ls' = modf ls
|
let ls' = modf ls
|
||||||
when (ls' /= ls) $
|
when (ls' /= ls) $
|
||||||
createDirWhenNeeded f $
|
createDirWhenNeeded f $
|
||||||
viaTmp writelog f' (L8.unlines ls')
|
viaTmp writelog f' (L8.unlines ls')
|
||||||
where
|
where
|
||||||
f' = fromRawFilePath f
|
f' = toOsPath f
|
||||||
writelog lf b = do
|
writelog lf b = do
|
||||||
liftIO $ L.writeFile lf b
|
liftIO $ F.writeFile lf b
|
||||||
setAnnexFilePerm (toRawFilePath lf)
|
setAnnexFilePerm (fromOsPath lf)
|
||||||
|
|
||||||
-- | Checks the content of a log file to see if any line matches.
|
-- | Checks the content of a log file to see if any line matches.
|
||||||
checkLogFile :: RawFilePath -> RawFilePath -> (L.ByteString -> Bool) -> Annex Bool
|
checkLogFile :: RawFilePath -> RawFilePath -> (L.ByteString -> Bool) -> Annex Bool
|
||||||
checkLogFile f lck matchf = withSharedLock lck $ bracket setup cleanup go
|
checkLogFile f lck matchf = withSharedLock lck $ bracket setup cleanup go
|
||||||
where
|
where
|
||||||
setup = liftIO $ tryWhenExists $ openFile f' ReadMode
|
setup = liftIO $ tryWhenExists $ F.openFile (toOsPath f) ReadMode
|
||||||
cleanup Nothing = noop
|
cleanup Nothing = noop
|
||||||
cleanup (Just h) = liftIO $ hClose h
|
cleanup (Just h) = liftIO $ hClose h
|
||||||
go Nothing = return False
|
go Nothing = return False
|
||||||
go (Just h) = do
|
go (Just h) = do
|
||||||
!r <- liftIO (any matchf . fileLines <$> L.hGetContents h)
|
!r <- liftIO (any matchf . fileLines <$> L.hGetContents h)
|
||||||
return r
|
return r
|
||||||
f' = fromRawFilePath f
|
|
||||||
|
|
||||||
-- | Folds a function over lines of a log file to calculate a value.
|
-- | Folds a function over lines of a log file to calculate a value.
|
||||||
calcLogFile :: RawFilePath -> RawFilePath -> t -> (L.ByteString -> t -> t) -> Annex t
|
calcLogFile :: RawFilePath -> RawFilePath -> t -> (L.ByteString -> t -> t) -> Annex t
|
||||||
|
@ -111,7 +107,7 @@ calcLogFile f lck start update =
|
||||||
calcLogFileUnsafe :: RawFilePath -> t -> (L.ByteString -> t -> t) -> Annex t
|
calcLogFileUnsafe :: RawFilePath -> t -> (L.ByteString -> t -> t) -> Annex t
|
||||||
calcLogFileUnsafe f start update = bracket setup cleanup go
|
calcLogFileUnsafe f start update = bracket setup cleanup go
|
||||||
where
|
where
|
||||||
setup = liftIO $ tryWhenExists $ openFile f' ReadMode
|
setup = liftIO $ tryWhenExists $ F.openFile (toOsPath f) ReadMode
|
||||||
cleanup Nothing = noop
|
cleanup Nothing = noop
|
||||||
cleanup (Just h) = liftIO $ hClose h
|
cleanup (Just h) = liftIO $ hClose h
|
||||||
go Nothing = return start
|
go Nothing = return start
|
||||||
|
@ -120,7 +116,6 @@ calcLogFileUnsafe f start update = bracket setup cleanup go
|
||||||
go' v (l:ls) = do
|
go' v (l:ls) = do
|
||||||
let !v' = update l v
|
let !v' = update l v
|
||||||
go' v' ls
|
go' v' ls
|
||||||
f' = fromRawFilePath f
|
|
||||||
|
|
||||||
-- | Streams lines from a log file, passing each line to the processor,
|
-- | Streams lines from a log file, passing each line to the processor,
|
||||||
-- and then empties the file at the end.
|
-- and then empties the file at the end.
|
||||||
|
@ -134,19 +129,19 @@ calcLogFileUnsafe f start update = bracket setup cleanup go
|
||||||
--
|
--
|
||||||
-- Locking is used to prevent writes to to the log file while this
|
-- Locking is used to prevent writes to to the log file while this
|
||||||
-- is running.
|
-- is running.
|
||||||
streamLogFile :: FilePath -> RawFilePath -> Annex () -> (String -> Annex ()) -> Annex ()
|
streamLogFile :: RawFilePath -> RawFilePath -> Annex () -> (String -> Annex ()) -> Annex ()
|
||||||
streamLogFile f lck finalizer processor =
|
streamLogFile f lck finalizer processor =
|
||||||
withExclusiveLock lck $ do
|
withExclusiveLock lck $ do
|
||||||
streamLogFileUnsafe f finalizer processor
|
streamLogFileUnsafe f finalizer processor
|
||||||
liftIO $ writeFile f ""
|
liftIO $ F.writeFile' (toOsPath f) mempty
|
||||||
setAnnexFilePerm (toRawFilePath f)
|
setAnnexFilePerm f
|
||||||
|
|
||||||
-- Unsafe version that does not do locking, and does not empty the file
|
-- Unsafe version that does not do locking, and does not empty the file
|
||||||
-- at the end.
|
-- at the end.
|
||||||
streamLogFileUnsafe :: FilePath -> Annex () -> (String -> Annex ()) -> Annex ()
|
streamLogFileUnsafe :: RawFilePath -> Annex () -> (String -> Annex ()) -> Annex ()
|
||||||
streamLogFileUnsafe f finalizer processor = bracketOnError setup cleanup go
|
streamLogFileUnsafe f finalizer processor = bracketOnError setup cleanup go
|
||||||
where
|
where
|
||||||
setup = liftIO $ tryWhenExists $ openFile f ReadMode
|
setup = liftIO $ tryWhenExists $ F.openFile (toOsPath f) ReadMode
|
||||||
cleanup Nothing = noop
|
cleanup Nothing = noop
|
||||||
cleanup (Just h) = liftIO $ hClose h
|
cleanup (Just h) = liftIO $ hClose h
|
||||||
go Nothing = finalizer
|
go Nothing = finalizer
|
||||||
|
@ -161,32 +156,3 @@ createDirWhenNeeded f a = a `catchNonAsync` \_e -> do
|
||||||
-- done if writing the file fails.
|
-- done if writing the file fails.
|
||||||
createAnnexDirectory (parentDir f)
|
createAnnexDirectory (parentDir f)
|
||||||
a
|
a
|
||||||
|
|
||||||
-- On windows, readFile does NewlineMode translation,
|
|
||||||
-- stripping CR before LF. When converting to ByteString,
|
|
||||||
-- use this to emulate that.
|
|
||||||
fileLines :: L.ByteString -> [L.ByteString]
|
|
||||||
#ifdef mingw32_HOST_OS
|
|
||||||
fileLines = map stripCR . L8.lines
|
|
||||||
where
|
|
||||||
stripCR b = case L8.unsnoc b of
|
|
||||||
Nothing -> b
|
|
||||||
Just (b', e)
|
|
||||||
| e == '\r' -> b'
|
|
||||||
| otherwise -> b
|
|
||||||
#else
|
|
||||||
fileLines = L8.lines
|
|
||||||
#endif
|
|
||||||
|
|
||||||
fileLines' :: S.ByteString -> [S.ByteString]
|
|
||||||
#ifdef mingw32_HOST_OS
|
|
||||||
fileLines' = map stripCR . S8.lines
|
|
||||||
where
|
|
||||||
stripCR b = case S8.unsnoc b of
|
|
||||||
Nothing -> b
|
|
||||||
Just (b', e)
|
|
||||||
| e == '\r' -> b'
|
|
||||||
| otherwise -> b
|
|
||||||
#else
|
|
||||||
fileLines' = S8.lines
|
|
||||||
#endif
|
|
||||||
|
|
|
@ -79,7 +79,7 @@ logMigration old new = do
|
||||||
-- | Commits a migration to the git-annex branch.
|
-- | Commits a migration to the git-annex branch.
|
||||||
commitMigration :: Annex ()
|
commitMigration :: Annex ()
|
||||||
commitMigration = do
|
commitMigration = do
|
||||||
logf <- fromRawFilePath <$> fromRepo gitAnnexMigrateLog
|
logf <- fromRepo gitAnnexMigrateLog
|
||||||
lckf <- fromRepo gitAnnexMigrateLock
|
lckf <- fromRepo gitAnnexMigrateLock
|
||||||
nv <- liftIO $ newTVarIO (0 :: Integer)
|
nv <- liftIO $ newTVarIO (0 :: Integer)
|
||||||
g <- Annex.gitRepo
|
g <- Annex.gitRepo
|
||||||
|
|
|
@ -14,6 +14,7 @@ import Git.FilePath
|
||||||
import Logs.File
|
import Logs.File
|
||||||
import Utility.InodeCache
|
import Utility.InodeCache
|
||||||
import Annex.LockFile
|
import Annex.LockFile
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import qualified Data.ByteString as S
|
import qualified Data.ByteString as S
|
||||||
import qualified Data.ByteString.Lazy as L
|
import qualified Data.ByteString.Lazy as L
|
||||||
|
@ -48,21 +49,20 @@ streamRestageLog :: Annex () -> (TopFilePath -> InodeCache -> Annex ()) -> Annex
|
||||||
streamRestageLog finalizer processor = do
|
streamRestageLog finalizer processor = do
|
||||||
logf <- fromRepo gitAnnexRestageLog
|
logf <- fromRepo gitAnnexRestageLog
|
||||||
oldf <- fromRepo gitAnnexRestageLogOld
|
oldf <- fromRepo gitAnnexRestageLogOld
|
||||||
let oldf' = fromRawFilePath oldf
|
|
||||||
lckf <- fromRepo gitAnnexRestageLock
|
lckf <- fromRepo gitAnnexRestageLock
|
||||||
|
|
||||||
withExclusiveLock lckf $ liftIO $
|
withExclusiveLock lckf $ liftIO $
|
||||||
whenM (R.doesPathExist logf) $
|
whenM (R.doesPathExist logf) $
|
||||||
ifM (R.doesPathExist oldf)
|
ifM (R.doesPathExist oldf)
|
||||||
( do
|
( do
|
||||||
h <- openFile oldf' AppendMode
|
h <- F.openFile (toOsPath oldf) AppendMode
|
||||||
hPutStr h =<< readFile (fromRawFilePath logf)
|
hPutStr h =<< readFile (fromRawFilePath logf)
|
||||||
hClose h
|
hClose h
|
||||||
liftIO $ removeWhenExistsWith R.removeLink logf
|
liftIO $ removeWhenExistsWith R.removeLink logf
|
||||||
, moveFile logf oldf
|
, moveFile logf oldf
|
||||||
)
|
)
|
||||||
|
|
||||||
streamLogFileUnsafe oldf' finalizer $ \l ->
|
streamLogFileUnsafe oldf finalizer $ \l ->
|
||||||
case parseRestageLog l of
|
case parseRestageLog l of
|
||||||
Just (f, ic) -> processor f ic
|
Just (f, ic) -> processor f ic
|
||||||
Nothing -> noop
|
Nothing -> noop
|
||||||
|
|
|
@ -34,7 +34,7 @@ streamSmudged :: (Key -> TopFilePath -> Annex ()) -> Annex ()
|
||||||
streamSmudged a = do
|
streamSmudged a = do
|
||||||
logf <- fromRepo gitAnnexSmudgeLog
|
logf <- fromRepo gitAnnexSmudgeLog
|
||||||
lckf <- fromRepo gitAnnexSmudgeLock
|
lckf <- fromRepo gitAnnexSmudgeLock
|
||||||
streamLogFile (fromRawFilePath logf) lckf noop $ \l ->
|
streamLogFile logf lckf noop $ \l ->
|
||||||
case parse l of
|
case parse l of
|
||||||
Nothing -> noop
|
Nothing -> noop
|
||||||
Just (k, f) -> a k f
|
Just (k, f) -> a k f
|
||||||
|
|
|
@ -22,6 +22,7 @@ import Annex.LockPool
|
||||||
import Utility.TimeStamp
|
import Utility.TimeStamp
|
||||||
import Logs.File
|
import Logs.File
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
import Annex.Perms
|
import Annex.Perms
|
||||||
#endif
|
#endif
|
||||||
|
@ -29,6 +30,7 @@ import Annex.Perms
|
||||||
import Data.Time.Clock
|
import Data.Time.Clock
|
||||||
import Data.Time.Clock.POSIX
|
import Data.Time.Clock.POSIX
|
||||||
import Control.Concurrent.STM
|
import Control.Concurrent.STM
|
||||||
|
import qualified Data.ByteString as B
|
||||||
import qualified Data.ByteString.Char8 as B8
|
import qualified Data.ByteString.Char8 as B8
|
||||||
import qualified System.FilePath.ByteString as P
|
import qualified System.FilePath.ByteString as P
|
||||||
|
|
||||||
|
@ -118,7 +120,7 @@ checkTransfer t = debugLocks $ do
|
||||||
(Just oldlck, _) -> getLockStatus oldlck
|
(Just oldlck, _) -> getLockStatus oldlck
|
||||||
case v' of
|
case v' of
|
||||||
StatusLockedBy pid -> liftIO $ catchDefaultIO Nothing $
|
StatusLockedBy pid -> liftIO $ catchDefaultIO Nothing $
|
||||||
readTransferInfoFile (Just pid) (fromRawFilePath tfile)
|
readTransferInfoFile (Just pid) tfile
|
||||||
_ -> do
|
_ -> do
|
||||||
mode <- annexFileMode
|
mode <- annexFileMode
|
||||||
-- Ignore failure due to permissions, races, etc.
|
-- Ignore failure due to permissions, races, etc.
|
||||||
|
@ -139,7 +141,7 @@ checkTransfer t = debugLocks $ do
|
||||||
v <- liftIO $ lockShared lck
|
v <- liftIO $ lockShared lck
|
||||||
liftIO $ case v of
|
liftIO $ case v of
|
||||||
Nothing -> catchDefaultIO Nothing $
|
Nothing -> catchDefaultIO Nothing $
|
||||||
readTransferInfoFile Nothing (fromRawFilePath tfile)
|
readTransferInfoFile Nothing tfile
|
||||||
Just lockhandle -> do
|
Just lockhandle -> do
|
||||||
dropLock lockhandle
|
dropLock lockhandle
|
||||||
deletestale
|
deletestale
|
||||||
|
@ -157,7 +159,7 @@ getTransfers' dirs wanted = do
|
||||||
infos <- mapM checkTransfer transfers
|
infos <- mapM checkTransfer transfers
|
||||||
return $ mapMaybe running $ zip transfers infos
|
return $ mapMaybe running $ zip transfers infos
|
||||||
where
|
where
|
||||||
findfiles = liftIO . mapM (emptyWhenDoesNotExist . dirContentsRecursive . fromRawFilePath)
|
findfiles = liftIO . mapM (emptyWhenDoesNotExist . dirContentsRecursive)
|
||||||
=<< mapM (fromRepo . transferDir) dirs
|
=<< mapM (fromRepo . transferDir) dirs
|
||||||
running (t, Just i) = Just (t, i)
|
running (t, Just i) = Just (t, i)
|
||||||
running (_, Nothing) = Nothing
|
running (_, Nothing) = Nothing
|
||||||
|
@ -184,7 +186,7 @@ getFailedTransfers u = catMaybes <$> (liftIO . getpairs =<< concat <$> findfiles
|
||||||
return $ case (mt, mi) of
|
return $ case (mt, mi) of
|
||||||
(Just t, Just i) -> Just (t, i)
|
(Just t, Just i) -> Just (t, i)
|
||||||
_ -> Nothing
|
_ -> Nothing
|
||||||
findfiles = liftIO . mapM (emptyWhenDoesNotExist . dirContentsRecursive . fromRawFilePath)
|
findfiles = liftIO . mapM (emptyWhenDoesNotExist . dirContentsRecursive)
|
||||||
=<< mapM (fromRepo . failedTransferDir u) [Download, Upload]
|
=<< mapM (fromRepo . failedTransferDir u) [Download, Upload]
|
||||||
|
|
||||||
clearFailedTransfers :: UUID -> Annex [(Transfer, TransferInfo)]
|
clearFailedTransfers :: UUID -> Annex [(Transfer, TransferInfo)]
|
||||||
|
@ -244,17 +246,17 @@ failedTransferFile (Transfer direction u kd) r =
|
||||||
P.</> keyFile (mkKey (const kd))
|
P.</> keyFile (mkKey (const kd))
|
||||||
|
|
||||||
{- Parses a transfer information filename to a Transfer. -}
|
{- Parses a transfer information filename to a Transfer. -}
|
||||||
parseTransferFile :: FilePath -> Maybe Transfer
|
parseTransferFile :: RawFilePath -> Maybe Transfer
|
||||||
parseTransferFile file
|
parseTransferFile file
|
||||||
| "lck." `isPrefixOf` takeFileName file = Nothing
|
| "lck." `B.isPrefixOf` P.takeFileName file = Nothing
|
||||||
| otherwise = case drop (length bits - 3) bits of
|
| otherwise = case drop (length bits - 3) bits of
|
||||||
[direction, u, key] -> Transfer
|
[direction, u, key] -> Transfer
|
||||||
<$> parseDirection direction
|
<$> parseDirection direction
|
||||||
<*> pure (toUUID u)
|
<*> pure (toUUID u)
|
||||||
<*> fmap (fromKey id) (fileKey (toRawFilePath key))
|
<*> fmap (fromKey id) (fileKey key)
|
||||||
_ -> Nothing
|
_ -> Nothing
|
||||||
where
|
where
|
||||||
bits = splitDirectories file
|
bits = P.splitDirectories file
|
||||||
|
|
||||||
writeTransferInfoFile :: TransferInfo -> RawFilePath -> Annex ()
|
writeTransferInfoFile :: TransferInfo -> RawFilePath -> Annex ()
|
||||||
writeTransferInfoFile info tfile = writeLogFile tfile $ writeTransferInfo info
|
writeTransferInfoFile info tfile = writeLogFile tfile $ writeTransferInfo info
|
||||||
|
@ -284,9 +286,9 @@ writeTransferInfo info = unlines
|
||||||
in maybe "" fromRawFilePath afile
|
in maybe "" fromRawFilePath afile
|
||||||
]
|
]
|
||||||
|
|
||||||
readTransferInfoFile :: Maybe PID -> FilePath -> IO (Maybe TransferInfo)
|
readTransferInfoFile :: Maybe PID -> RawFilePath -> IO (Maybe TransferInfo)
|
||||||
readTransferInfoFile mpid tfile = catchDefaultIO Nothing $
|
readTransferInfoFile mpid tfile = catchDefaultIO Nothing $
|
||||||
readTransferInfo mpid <$> readFileStrict tfile
|
readTransferInfo mpid . decodeBS <$> F.readFile' (toOsPath tfile)
|
||||||
|
|
||||||
readTransferInfo :: Maybe PID -> String -> Maybe TransferInfo
|
readTransferInfo :: Maybe PID -> String -> Maybe TransferInfo
|
||||||
readTransferInfo mpid s = TransferInfo
|
readTransferInfo mpid s = TransferInfo
|
||||||
|
@ -303,8 +305,10 @@ readTransferInfo mpid s = TransferInfo
|
||||||
<*> pure False
|
<*> pure False
|
||||||
where
|
where
|
||||||
#ifdef mingw32_HOST_OS
|
#ifdef mingw32_HOST_OS
|
||||||
(firstline, otherlines) = separate (== '\n') s
|
(firstliner, otherlines) = separate (== '\n') s
|
||||||
(secondline, rest) = separate (== '\n') otherlines
|
(secondliner, rest) = separate (== '\n') otherlines
|
||||||
|
firstline = dropWhileEnd (== '\r') firstliner
|
||||||
|
secondline = dropWhileEnd (== '\r') secondliner
|
||||||
mpid' = readish secondline
|
mpid' = readish secondline
|
||||||
#else
|
#else
|
||||||
(firstline, rest) = separate (== '\n') s
|
(firstline, rest) = separate (== '\n') s
|
||||||
|
@ -315,7 +319,7 @@ readTransferInfo mpid s = TransferInfo
|
||||||
bits = splitc ' ' firstline
|
bits = splitc ' ' firstline
|
||||||
numbits = length bits
|
numbits = length bits
|
||||||
time = if numbits > 0
|
time = if numbits > 0
|
||||||
then Just <$> parsePOSIXTime =<< headMaybe bits
|
then Just <$> parsePOSIXTime . encodeBS =<< headMaybe bits
|
||||||
else pure Nothing -- not failure
|
else pure Nothing -- not failure
|
||||||
bytes = if numbits > 1
|
bytes = if numbits > 1
|
||||||
then Just <$> readish =<< headMaybe (drop 1 bits)
|
then Just <$> readish =<< headMaybe (drop 1 bits)
|
||||||
|
|
|
@ -32,6 +32,7 @@ import qualified Data.Map as M
|
||||||
import qualified Data.Set as S
|
import qualified Data.Set as S
|
||||||
import Data.Time.Clock.POSIX
|
import Data.Time.Clock.POSIX
|
||||||
import Data.Time
|
import Data.Time
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import Annex.Common
|
import Annex.Common
|
||||||
import qualified Annex
|
import qualified Annex
|
||||||
|
@ -73,14 +74,14 @@ writeUnusedLog prefix l = do
|
||||||
|
|
||||||
readUnusedLog :: RawFilePath -> Annex UnusedLog
|
readUnusedLog :: RawFilePath -> Annex UnusedLog
|
||||||
readUnusedLog prefix = do
|
readUnusedLog prefix = do
|
||||||
f <- fromRawFilePath <$> fromRepo (gitAnnexUnusedLog prefix)
|
f <- fromRepo (gitAnnexUnusedLog prefix)
|
||||||
ifM (liftIO $ doesFileExist f)
|
ifM (liftIO $ doesFileExist (fromRawFilePath f))
|
||||||
( M.fromList . mapMaybe parse . lines
|
( M.fromList . mapMaybe (parse . decodeBS) . fileLines'
|
||||||
<$> liftIO (readFileStrict f)
|
<$> liftIO (F.readFile' (toOsPath f))
|
||||||
, return M.empty
|
, return M.empty
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
parse line = case (readish sint, deserializeKey skey, parsePOSIXTime ts) of
|
parse line = case (readish sint, deserializeKey skey, parsePOSIXTime (encodeBS ts)) of
|
||||||
(Just int, Just key, mtimestamp) -> Just (key, (int, mtimestamp))
|
(Just int, Just key, mtimestamp) -> Just (key, (int, mtimestamp))
|
||||||
_ -> Nothing
|
_ -> Nothing
|
||||||
where
|
where
|
||||||
|
|
|
@ -19,6 +19,7 @@ import Annex.Common
|
||||||
import Utility.TimeStamp
|
import Utility.TimeStamp
|
||||||
import Logs.File
|
import Logs.File
|
||||||
import Types.RepoVersion
|
import Types.RepoVersion
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import Data.Time.Clock.POSIX
|
import Data.Time.Clock.POSIX
|
||||||
|
|
||||||
|
@ -31,14 +32,14 @@ writeUpgradeLog v t = do
|
||||||
|
|
||||||
readUpgradeLog :: Annex [(RepoVersion, POSIXTime)]
|
readUpgradeLog :: Annex [(RepoVersion, POSIXTime)]
|
||||||
readUpgradeLog = do
|
readUpgradeLog = do
|
||||||
logfile <- fromRawFilePath <$> fromRepo gitAnnexUpgradeLog
|
logfile <- fromRepo gitAnnexUpgradeLog
|
||||||
ifM (liftIO $ doesFileExist logfile)
|
ifM (liftIO $ doesFileExist (fromRawFilePath logfile))
|
||||||
( mapMaybe parse . lines
|
( mapMaybe (parse . decodeBS) . fileLines'
|
||||||
<$> liftIO (readFileStrict logfile)
|
<$> liftIO (F.readFile' (toOsPath logfile))
|
||||||
, return []
|
, return []
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
parse line = case (readish sint, parsePOSIXTime ts) of
|
parse line = case (readish sint, parsePOSIXTime (encodeBS ts)) of
|
||||||
(Just v, Just t) -> Just (RepoVersion v, t)
|
(Just v, Just t) -> Just (RepoVersion v, t)
|
||||||
_ -> Nothing
|
_ -> Nothing
|
||||||
where
|
where
|
||||||
|
|
|
@ -35,10 +35,11 @@ import qualified Utility.RawFilePath as R
|
||||||
|
|
||||||
import Network.URI
|
import Network.URI
|
||||||
import qualified System.FilePath.ByteString as P
|
import qualified System.FilePath.ByteString as P
|
||||||
|
import qualified Data.ByteString as S
|
||||||
|
|
||||||
#ifdef WITH_TORRENTPARSER
|
#ifdef WITH_TORRENTPARSER
|
||||||
import Data.Torrent
|
import Data.Torrent
|
||||||
import qualified Data.ByteString.Lazy as B
|
import qualified Utility.FileIO as F
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
remote :: RemoteType
|
remote :: RemoteType
|
||||||
|
@ -208,31 +209,29 @@ downloadTorrentFile u = do
|
||||||
let metadir = othertmp P.</> "torrentmeta" P.</> kf
|
let metadir = othertmp P.</> "torrentmeta" P.</> kf
|
||||||
createAnnexDirectory metadir
|
createAnnexDirectory metadir
|
||||||
showOutput
|
showOutput
|
||||||
ok <- downloadMagnetLink u
|
ok <- downloadMagnetLink u metadir torrent
|
||||||
(fromRawFilePath metadir)
|
|
||||||
(fromRawFilePath torrent)
|
|
||||||
liftIO $ removeDirectoryRecursive
|
liftIO $ removeDirectoryRecursive
|
||||||
(fromRawFilePath metadir)
|
(fromRawFilePath metadir)
|
||||||
return ok
|
return ok
|
||||||
else withOtherTmp $ \othertmp -> do
|
else withOtherTmp $ \othertmp -> do
|
||||||
withTmpFileIn (fromRawFilePath othertmp) "torrent" $ \f h -> do
|
withTmpFileIn (toOsPath othertmp) (toOsPath "torrent") $ \f h -> do
|
||||||
liftIO $ hClose h
|
liftIO $ hClose h
|
||||||
resetAnnexFilePerm (toRawFilePath f)
|
resetAnnexFilePerm (fromOsPath f)
|
||||||
ok <- Url.withUrlOptions $
|
ok <- Url.withUrlOptions $
|
||||||
Url.download nullMeterUpdate Nothing u f
|
Url.download nullMeterUpdate Nothing u (fromRawFilePath (fromOsPath f))
|
||||||
when ok $
|
when ok $
|
||||||
liftIO $ moveFile (toRawFilePath f) torrent
|
liftIO $ moveFile (fromOsPath f) torrent
|
||||||
return ok
|
return ok
|
||||||
)
|
)
|
||||||
|
|
||||||
downloadMagnetLink :: URLString -> FilePath -> FilePath -> Annex Bool
|
downloadMagnetLink :: URLString -> RawFilePath -> RawFilePath -> Annex Bool
|
||||||
downloadMagnetLink u metadir dest = ifM download
|
downloadMagnetLink u metadir dest = ifM download
|
||||||
( liftIO $ do
|
( liftIO $ do
|
||||||
ts <- filter (".torrent" `isSuffixOf`)
|
ts <- filter (".torrent" `S.isSuffixOf`)
|
||||||
<$> dirContents metadir
|
<$> dirContents metadir
|
||||||
case ts of
|
case ts of
|
||||||
(t:[]) -> do
|
(t:[]) -> do
|
||||||
moveFile (toRawFilePath t) (toRawFilePath dest)
|
moveFile t dest
|
||||||
return True
|
return True
|
||||||
_ -> return False
|
_ -> return False
|
||||||
, return False
|
, return False
|
||||||
|
@ -245,7 +244,7 @@ downloadMagnetLink u metadir dest = ifM download
|
||||||
, Param "--seed-time=0"
|
, Param "--seed-time=0"
|
||||||
, Param "--summary-interval=0"
|
, Param "--summary-interval=0"
|
||||||
, Param "-d"
|
, Param "-d"
|
||||||
, File metadir
|
, File (fromRawFilePath metadir)
|
||||||
]
|
]
|
||||||
|
|
||||||
downloadTorrentContent :: Key -> URLString -> FilePath -> Int -> MeterUpdate -> Annex Bool
|
downloadTorrentContent :: Key -> URLString -> FilePath -> Int -> MeterUpdate -> Annex Bool
|
||||||
|
@ -367,7 +366,7 @@ torrentFileSizes :: RawFilePath -> IO [(FilePath, Integer)]
|
||||||
torrentFileSizes torrent = do
|
torrentFileSizes torrent = do
|
||||||
#ifdef WITH_TORRENTPARSER
|
#ifdef WITH_TORRENTPARSER
|
||||||
let mkfile = joinPath . map (scrub . decodeBL)
|
let mkfile = joinPath . map (scrub . decodeBL)
|
||||||
b <- B.readFile (fromRawFilePath torrent)
|
b <- F.readFile (toOsPath torrent)
|
||||||
return $ case readTorrent b of
|
return $ case readTorrent b of
|
||||||
Left e -> giveup $ "failed to parse torrent: " ++ e
|
Left e -> giveup $ "failed to parse torrent: " ++ e
|
||||||
Right t -> case tInfo t of
|
Right t -> case tInfo t of
|
||||||
|
|
|
@ -15,7 +15,6 @@ module Remote.Directory (
|
||||||
removeDirGeneric,
|
removeDirGeneric,
|
||||||
) where
|
) where
|
||||||
|
|
||||||
import qualified Data.ByteString.Lazy as L
|
|
||||||
import qualified Data.Map as M
|
import qualified Data.Map as M
|
||||||
import qualified Data.List.NonEmpty as NE
|
import qualified Data.List.NonEmpty as NE
|
||||||
import qualified System.FilePath.ByteString as P
|
import qualified System.FilePath.ByteString as P
|
||||||
|
@ -52,6 +51,7 @@ import Utility.InodeCache
|
||||||
import Utility.FileMode
|
import Utility.FileMode
|
||||||
import Utility.Directory.Create
|
import Utility.Directory.Create
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
import Utility.OpenFd
|
import Utility.OpenFd
|
||||||
#endif
|
#endif
|
||||||
|
@ -241,12 +241,12 @@ checkDiskSpaceDirectory d k = do
|
||||||
- down. -}
|
- down. -}
|
||||||
finalizeStoreGeneric :: RawFilePath -> RawFilePath -> RawFilePath -> IO ()
|
finalizeStoreGeneric :: RawFilePath -> RawFilePath -> RawFilePath -> IO ()
|
||||||
finalizeStoreGeneric d tmp dest = do
|
finalizeStoreGeneric d tmp dest = do
|
||||||
removeDirGeneric False (fromRawFilePath d) dest'
|
removeDirGeneric False d dest
|
||||||
createDirectoryUnder [d] (parentDir dest)
|
createDirectoryUnder [d] (parentDir dest)
|
||||||
renameDirectory (fromRawFilePath tmp) dest'
|
renameDirectory (fromRawFilePath tmp) dest'
|
||||||
-- may fail on some filesystems
|
-- may fail on some filesystems
|
||||||
void $ tryIO $ do
|
void $ tryIO $ do
|
||||||
mapM_ (preventWrite . toRawFilePath) =<< dirContents dest'
|
mapM_ preventWrite =<< dirContents dest
|
||||||
preventWrite dest
|
preventWrite dest
|
||||||
where
|
where
|
||||||
dest' = fromRawFilePath dest
|
dest' = fromRawFilePath dest
|
||||||
|
@ -257,7 +257,7 @@ retrieveKeyFileM d NoChunks cow = fileRetriever' $ \dest k p iv -> do
|
||||||
src <- liftIO $ fromRawFilePath <$> getLocation d k
|
src <- liftIO $ fromRawFilePath <$> getLocation d k
|
||||||
void $ liftIO $ fileCopier cow src (fromRawFilePath dest) p iv
|
void $ liftIO $ fileCopier cow src (fromRawFilePath dest) p iv
|
||||||
retrieveKeyFileM d _ _ = byteRetriever $ \k sink ->
|
retrieveKeyFileM d _ _ = byteRetriever $ \k sink ->
|
||||||
sink =<< liftIO (L.readFile . fromRawFilePath =<< getLocation d k)
|
sink =<< liftIO (F.readFile . toOsPath =<< getLocation d k)
|
||||||
|
|
||||||
retrieveKeyFileCheapM :: RawFilePath -> ChunkConfig -> Maybe (Key -> AssociatedFile -> FilePath -> Annex ())
|
retrieveKeyFileCheapM :: RawFilePath -> ChunkConfig -> Maybe (Key -> AssociatedFile -> FilePath -> Annex ())
|
||||||
-- no cheap retrieval possible for chunks
|
-- no cheap retrieval possible for chunks
|
||||||
|
@ -275,9 +275,7 @@ retrieveKeyFileCheapM _ _ = Nothing
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
removeKeyM :: RawFilePath -> Remover
|
removeKeyM :: RawFilePath -> Remover
|
||||||
removeKeyM d _proof k = liftIO $ removeDirGeneric True
|
removeKeyM d _proof k = liftIO $ removeDirGeneric True d (storeDir d k)
|
||||||
(fromRawFilePath d)
|
|
||||||
(fromRawFilePath (storeDir d k))
|
|
||||||
|
|
||||||
{- Removes the directory, which must be located under the topdir.
|
{- Removes the directory, which must be located under the topdir.
|
||||||
-
|
-
|
||||||
|
@ -293,28 +291,30 @@ removeKeyM d _proof k = liftIO $ removeDirGeneric True
|
||||||
- can also be removed. Failure to remove such a directory is not treated
|
- can also be removed. Failure to remove such a directory is not treated
|
||||||
- as an error.
|
- as an error.
|
||||||
-}
|
-}
|
||||||
removeDirGeneric :: Bool -> FilePath -> FilePath -> IO ()
|
removeDirGeneric :: Bool -> RawFilePath -> RawFilePath -> IO ()
|
||||||
removeDirGeneric removeemptyparents topdir dir = do
|
removeDirGeneric removeemptyparents topdir dir = do
|
||||||
void $ tryIO $ allowWrite (toRawFilePath dir)
|
void $ tryIO $ allowWrite dir
|
||||||
#ifdef mingw32_HOST_OS
|
#ifdef mingw32_HOST_OS
|
||||||
{- Windows needs the files inside the directory to be writable
|
{- Windows needs the files inside the directory to be writable
|
||||||
- before it can delete them. -}
|
- before it can delete them. -}
|
||||||
void $ tryIO $ mapM_ (allowWrite . toRawFilePath) =<< dirContents dir
|
void $ tryIO $ mapM_ allowWrite =<< dirContents dir
|
||||||
#endif
|
#endif
|
||||||
tryNonAsync (removeDirectoryRecursive dir) >>= \case
|
tryNonAsync (removeDirectoryRecursive dir') >>= \case
|
||||||
Right () -> return ()
|
Right () -> return ()
|
||||||
Left e ->
|
Left e ->
|
||||||
unlessM (doesDirectoryExist topdir <&&> (not <$> doesDirectoryExist dir)) $
|
unlessM (doesDirectoryExist topdir' <&&> (not <$> doesDirectoryExist dir')) $
|
||||||
throwM e
|
throwM e
|
||||||
when removeemptyparents $ do
|
when removeemptyparents $ do
|
||||||
subdir <- relPathDirToFile (toRawFilePath topdir) (P.takeDirectory (toRawFilePath dir))
|
subdir <- relPathDirToFile topdir (P.takeDirectory dir)
|
||||||
goparents (Just (P.takeDirectory subdir)) (Right ())
|
goparents (Just (P.takeDirectory subdir)) (Right ())
|
||||||
where
|
where
|
||||||
goparents _ (Left _e) = return ()
|
goparents _ (Left _e) = return ()
|
||||||
goparents Nothing _ = return ()
|
goparents Nothing _ = return ()
|
||||||
goparents (Just subdir) _ = do
|
goparents (Just subdir) _ = do
|
||||||
let d = topdir </> fromRawFilePath subdir
|
let d = topdir' </> fromRawFilePath subdir
|
||||||
goparents (upFrom subdir) =<< tryIO (removeDirectory d)
|
goparents (upFrom subdir) =<< tryIO (removeDirectory d)
|
||||||
|
dir' = fromRawFilePath dir
|
||||||
|
topdir' = fromRawFilePath topdir
|
||||||
|
|
||||||
checkPresentM :: RawFilePath -> ChunkConfig -> CheckPresent
|
checkPresentM :: RawFilePath -> ChunkConfig -> CheckPresent
|
||||||
checkPresentM d (LegacyChunks _) k = Legacy.checkKey d locations' k
|
checkPresentM d (LegacyChunks _) k = Legacy.checkKey d locations' k
|
||||||
|
@ -338,10 +338,10 @@ storeExportM d cow src _k loc p = do
|
||||||
liftIO $ createDirectoryUnder [d] (P.takeDirectory dest)
|
liftIO $ createDirectoryUnder [d] (P.takeDirectory dest)
|
||||||
-- Write via temp file so that checkPresentGeneric will not
|
-- Write via temp file so that checkPresentGeneric will not
|
||||||
-- see it until it's fully stored.
|
-- see it until it's fully stored.
|
||||||
viaTmp go (fromRawFilePath dest) ()
|
viaTmp go (toOsPath dest) ()
|
||||||
where
|
where
|
||||||
dest = exportPath d loc
|
dest = exportPath d loc
|
||||||
go tmp () = void $ liftIO $ fileCopier cow src tmp p Nothing
|
go tmp () = void $ liftIO $ fileCopier cow src (fromRawFilePath (fromOsPath tmp)) p Nothing
|
||||||
|
|
||||||
retrieveExportM :: RawFilePath -> CopyCoWTried -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Verification
|
retrieveExportM :: RawFilePath -> CopyCoWTried -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Verification
|
||||||
retrieveExportM d cow k loc dest p =
|
retrieveExportM d cow k loc dest p =
|
||||||
|
@ -389,8 +389,7 @@ removeExportLocation topdir loc =
|
||||||
|
|
||||||
listImportableContentsM :: IgnoreInodes -> RawFilePath -> Annex (Maybe (ImportableContentsChunkable Annex (ContentIdentifier, ByteSize)))
|
listImportableContentsM :: IgnoreInodes -> RawFilePath -> Annex (Maybe (ImportableContentsChunkable Annex (ContentIdentifier, ByteSize)))
|
||||||
listImportableContentsM ii dir = liftIO $ do
|
listImportableContentsM ii dir = liftIO $ do
|
||||||
l <- dirContentsRecursiveSkipping (const False) False (fromRawFilePath dir)
|
l' <- mapM go =<< dirContentsRecursiveSkipping (const False) False dir
|
||||||
l' <- mapM (go . toRawFilePath) l
|
|
||||||
return $ Just $ ImportableContentsComplete $
|
return $ Just $ ImportableContentsComplete $
|
||||||
ImportableContents (catMaybes l') []
|
ImportableContents (catMaybes l') []
|
||||||
where
|
where
|
||||||
|
@ -542,11 +541,11 @@ retrieveExportWithContentIdentifierM ii dir cow loc cids dest gk p =
|
||||||
|
|
||||||
storeExportWithContentIdentifierM :: IgnoreInodes -> RawFilePath -> CopyCoWTried -> FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex ContentIdentifier
|
storeExportWithContentIdentifierM :: IgnoreInodes -> RawFilePath -> CopyCoWTried -> FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex ContentIdentifier
|
||||||
storeExportWithContentIdentifierM ii dir cow src _k loc overwritablecids p = do
|
storeExportWithContentIdentifierM ii dir cow src _k loc overwritablecids p = do
|
||||||
liftIO $ createDirectoryUnder [dir] (toRawFilePath destdir)
|
liftIO $ createDirectoryUnder [dir] destdir
|
||||||
withTmpFileIn destdir template $ \tmpf tmph -> do
|
withTmpFileIn (toOsPath destdir) template $ \tmpf tmph -> do
|
||||||
|
let tmpf' = fromOsPath tmpf
|
||||||
liftIO $ hClose tmph
|
liftIO $ hClose tmph
|
||||||
void $ liftIO $ fileCopier cow src tmpf p Nothing
|
void $ liftIO $ fileCopier cow src (fromRawFilePath tmpf') p Nothing
|
||||||
let tmpf' = toRawFilePath tmpf
|
|
||||||
resetAnnexFilePerm tmpf'
|
resetAnnexFilePerm tmpf'
|
||||||
liftIO (R.getSymbolicLinkStatus tmpf') >>= liftIO . mkContentIdentifier ii tmpf' >>= \case
|
liftIO (R.getSymbolicLinkStatus tmpf') >>= liftIO . mkContentIdentifier ii tmpf' >>= \case
|
||||||
Nothing -> giveup "unable to generate content identifier"
|
Nothing -> giveup "unable to generate content identifier"
|
||||||
|
@ -558,8 +557,8 @@ storeExportWithContentIdentifierM ii dir cow src _k loc overwritablecids p = do
|
||||||
return newcid
|
return newcid
|
||||||
where
|
where
|
||||||
dest = exportPath dir loc
|
dest = exportPath dir loc
|
||||||
(destdir, base) = splitFileName (fromRawFilePath dest)
|
(destdir, base) = P.splitFileName dest
|
||||||
template = relatedTemplate (base ++ ".tmp")
|
template = relatedTemplate (base <> ".tmp")
|
||||||
|
|
||||||
removeExportWithContentIdentifierM :: IgnoreInodes -> RawFilePath -> Key -> ExportLocation -> [ContentIdentifier] -> Annex ()
|
removeExportWithContentIdentifierM :: IgnoreInodes -> RawFilePath -> Key -> ExportLocation -> [ContentIdentifier] -> Annex ()
|
||||||
removeExportWithContentIdentifierM ii dir k loc removeablecids =
|
removeExportWithContentIdentifierM ii dir k loc removeablecids =
|
||||||
|
|
|
@ -24,6 +24,7 @@ import Annex.Tmp
|
||||||
import Utility.Metered
|
import Utility.Metered
|
||||||
import Utility.Directory.Create
|
import Utility.Directory.Create
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
withCheckedFiles :: (FilePath -> IO Bool) -> FilePath -> (FilePath -> Key -> [FilePath]) -> Key -> ([FilePath] -> IO Bool) -> IO Bool
|
withCheckedFiles :: (FilePath -> IO Bool) -> FilePath -> (FilePath -> Key -> [FilePath]) -> Key -> ([FilePath] -> IO Bool) -> IO Bool
|
||||||
withCheckedFiles _ [] _locations _ _ = return False
|
withCheckedFiles _ [] _locations _ _ = return False
|
||||||
|
@ -101,13 +102,13 @@ retrieve :: (RawFilePath -> Key -> [RawFilePath]) -> RawFilePath -> Retriever
|
||||||
retrieve locations d basek p _dest miv c = withOtherTmp $ \tmpdir -> do
|
retrieve locations d basek p _dest miv c = withOtherTmp $ \tmpdir -> do
|
||||||
showLongNote "This remote uses the deprecated chunksize setting. So this will be quite slow."
|
showLongNote "This remote uses the deprecated chunksize setting. So this will be quite slow."
|
||||||
let tmp = tmpdir P.</> keyFile basek <> ".directorylegacy.tmp"
|
let tmp = tmpdir P.</> keyFile basek <> ".directorylegacy.tmp"
|
||||||
let tmp' = fromRawFilePath tmp
|
let tmp' = toOsPath tmp
|
||||||
let go = \k sink -> do
|
let go = \k sink -> do
|
||||||
liftIO $ void $ withStoredFiles (fromRawFilePath d) (legacyLocations locations) k $ \fs -> do
|
liftIO $ void $ withStoredFiles (fromRawFilePath d) (legacyLocations locations) k $ \fs -> do
|
||||||
forM_ fs $
|
forM_ fs $
|
||||||
S.appendFile tmp' <=< S.readFile
|
F.appendFile' tmp' <=< S.readFile
|
||||||
return True
|
return True
|
||||||
b <- liftIO $ L.readFile tmp'
|
b <- liftIO $ F.readFile tmp'
|
||||||
liftIO $ removeWhenExistsWith R.removeLink tmp
|
liftIO $ removeWhenExistsWith R.removeLink tmp
|
||||||
sink b
|
sink b
|
||||||
byteRetriever go basek p tmp miv c
|
byteRetriever go basek p tmp miv c
|
||||||
|
|
|
@ -439,8 +439,8 @@ remove' :: Git.Repo -> Remote -> Remote.Rsync.RsyncOpts -> AccessMethod -> Remov
|
||||||
remove' repo r rsyncopts accessmethod proof k
|
remove' repo r rsyncopts accessmethod proof k
|
||||||
| not $ Git.repoIsUrl repo = guardUsable repo (giveup "cannot access remote") $
|
| not $ Git.repoIsUrl repo = guardUsable repo (giveup "cannot access remote") $
|
||||||
liftIO $ Remote.Directory.removeDirGeneric True
|
liftIO $ Remote.Directory.removeDirGeneric True
|
||||||
(gCryptTopDir repo)
|
(toRawFilePath (gCryptTopDir repo))
|
||||||
(fromRawFilePath (parentDir (toRawFilePath (gCryptLocation repo k))))
|
(parentDir (toRawFilePath (gCryptLocation repo k)))
|
||||||
| Git.repoIsSsh repo = shellOrRsync r removeshell removersync
|
| Git.repoIsSsh repo = shellOrRsync r removeshell removersync
|
||||||
| accessmethod == AccessRsyncOverSsh = removersync
|
| accessmethod == AccessRsyncOverSsh = removersync
|
||||||
| otherwise = unsupportedUrl
|
| otherwise = unsupportedUrl
|
||||||
|
@ -529,9 +529,10 @@ getConfigViaRsync r gc = do
|
||||||
let (rsynctransport, rsyncurl, _) = rsyncTransport r gc
|
let (rsynctransport, rsyncurl, _) = rsyncTransport r gc
|
||||||
opts <- rsynctransport
|
opts <- rsynctransport
|
||||||
liftIO $ do
|
liftIO $ do
|
||||||
withTmpFile "tmpconfig" $ \tmpconfig _ -> do
|
withTmpFile (toOsPath "tmpconfig") $ \tmpconfig _ -> do
|
||||||
|
let tmpconfig' = fromRawFilePath $ fromOsPath tmpconfig
|
||||||
void $ rsync $ opts ++
|
void $ rsync $ opts ++
|
||||||
[ Param $ rsyncurl ++ "/config"
|
[ Param $ rsyncurl ++ "/config"
|
||||||
, Param tmpconfig
|
, Param tmpconfig'
|
||||||
]
|
]
|
||||||
Git.Config.fromFile r tmpconfig
|
Git.Config.fromFile r tmpconfig'
|
||||||
|
|
|
@ -324,9 +324,10 @@ tryGitConfigRead autoinit r hasuuid
|
||||||
|
|
||||||
geturlconfig = Url.withUrlOptionsPromptingCreds $ \uo -> do
|
geturlconfig = Url.withUrlOptionsPromptingCreds $ \uo -> do
|
||||||
let url = Git.repoLocation r ++ "/config"
|
let url = Git.repoLocation r ++ "/config"
|
||||||
v <- withTmpFile "git-annex.tmp" $ \tmpfile h -> do
|
v <- withTmpFile (toOsPath "git-annex.tmp") $ \tmpfile h -> do
|
||||||
liftIO $ hClose h
|
liftIO $ hClose h
|
||||||
Url.download' nullMeterUpdate Nothing url tmpfile uo >>= \case
|
let tmpfile' = fromRawFilePath $ fromOsPath tmpfile
|
||||||
|
Url.download' nullMeterUpdate Nothing url tmpfile' uo >>= \case
|
||||||
Right () ->
|
Right () ->
|
||||||
pipedconfig Git.Config.ConfigNullList
|
pipedconfig Git.Config.ConfigNullList
|
||||||
False url "git"
|
False url "git"
|
||||||
|
@ -334,7 +335,7 @@ tryGitConfigRead autoinit r hasuuid
|
||||||
, Param "--null"
|
, Param "--null"
|
||||||
, Param "--list"
|
, Param "--list"
|
||||||
, Param "--file"
|
, Param "--file"
|
||||||
, File tmpfile
|
, File tmpfile'
|
||||||
] >>= return . \case
|
] >>= return . \case
|
||||||
Right r' -> Right r'
|
Right r' -> Right r'
|
||||||
Left exitcode -> Left $ "git config exited " ++ show exitcode
|
Left exitcode -> Left $ "git config exited " ++ show exitcode
|
||||||
|
|
|
@ -5,6 +5,8 @@
|
||||||
- Licensed under the GNU AGPL version 3 or higher.
|
- Licensed under the GNU AGPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
|
||||||
module Remote.Helper.Git where
|
module Remote.Helper.Git where
|
||||||
|
|
||||||
import Annex.Common
|
import Annex.Common
|
||||||
|
@ -21,6 +23,7 @@ import Data.Time.Clock.POSIX
|
||||||
import System.PosixCompat.Files (modificationTime)
|
import System.PosixCompat.Files (modificationTime)
|
||||||
import qualified Data.Map as M
|
import qualified Data.Map as M
|
||||||
import qualified Data.Set as S
|
import qualified Data.Set as S
|
||||||
|
import qualified System.FilePath.ByteString as P
|
||||||
|
|
||||||
repoCheap :: Git.Repo -> Bool
|
repoCheap :: Git.Repo -> Bool
|
||||||
repoCheap = not . Git.repoIsUrl
|
repoCheap = not . Git.repoIsUrl
|
||||||
|
@ -59,9 +62,9 @@ guardUsable r fallback a
|
||||||
|
|
||||||
gitRepoInfo :: Remote -> Annex [(String, String)]
|
gitRepoInfo :: Remote -> Annex [(String, String)]
|
||||||
gitRepoInfo r = do
|
gitRepoInfo r = do
|
||||||
d <- fromRawFilePath <$> fromRepo Git.localGitDir
|
d <- fromRepo Git.localGitDir
|
||||||
mtimes <- liftIO $ mapM (\p -> modificationTime <$> R.getFileStatus (toRawFilePath p))
|
mtimes <- liftIO $ mapM (\p -> modificationTime <$> R.getFileStatus p)
|
||||||
=<< emptyWhenDoesNotExist (dirContentsRecursive (d </> "refs" </> "remotes" </> Remote.name r))
|
=<< emptyWhenDoesNotExist (dirContentsRecursive (d P.</> "refs" P.</> "remotes" P.</> encodeBS (Remote.name r)))
|
||||||
let lastsynctime = case mtimes of
|
let lastsynctime = case mtimes of
|
||||||
[] -> "never"
|
[] -> "never"
|
||||||
_ -> show $ posixSecondsToUTCTime $ realToFrac $ maximum mtimes
|
_ -> show $ posixSecondsToUTCTime $ realToFrac $ maximum mtimes
|
||||||
|
|
|
@ -374,7 +374,7 @@ sendParams = ifM crippledFileSystem
|
||||||
withRsyncScratchDir :: (FilePath -> Annex a) -> Annex a
|
withRsyncScratchDir :: (FilePath -> Annex a) -> Annex a
|
||||||
withRsyncScratchDir a = do
|
withRsyncScratchDir a = do
|
||||||
t <- fromRawFilePath <$> fromRepo gitAnnexTmpObjectDir
|
t <- fromRawFilePath <$> fromRepo gitAnnexTmpObjectDir
|
||||||
withTmpDirIn t "rsynctmp" a
|
withTmpDirIn t (toOsPath "rsynctmp") a
|
||||||
|
|
||||||
rsyncRetrieve :: RsyncOpts -> [RsyncUrl] -> FilePath -> Maybe MeterUpdate -> Annex ()
|
rsyncRetrieve :: RsyncOpts -> [RsyncUrl] -> FilePath -> Maybe MeterUpdate -> Annex ()
|
||||||
rsyncRetrieve o rsyncurls dest meterupdate =
|
rsyncRetrieve o rsyncurls dest meterupdate =
|
||||||
|
|
4
Test.hs
4
Test.hs
|
@ -563,7 +563,7 @@ test_magic = intmpclonerepo $ do
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
test_import :: Assertion
|
test_import :: Assertion
|
||||||
test_import = intmpclonerepo $ Utility.Tmp.Dir.withTmpDir "importtest" $ \importdir -> do
|
test_import = intmpclonerepo $ Utility.Tmp.Dir.withTmpDir (toOsPath (toRawFilePath "importtest")) $ \importdir -> do
|
||||||
(toimport1, importf1, imported1) <- mktoimport importdir "import1"
|
(toimport1, importf1, imported1) <- mktoimport importdir "import1"
|
||||||
git_annex "import" [toimport1] "import"
|
git_annex "import" [toimport1] "import"
|
||||||
annexed_present_imported imported1
|
annexed_present_imported imported1
|
||||||
|
@ -1894,7 +1894,7 @@ test_gpg_crypto = do
|
||||||
testscheme "pubkey"
|
testscheme "pubkey"
|
||||||
where
|
where
|
||||||
gpgcmd = Utility.Gpg.mkGpgCmd Nothing
|
gpgcmd = Utility.Gpg.mkGpgCmd Nothing
|
||||||
testscheme scheme = Utility.Tmp.Dir.withTmpDir "gpgtmp" $ \gpgtmp -> do
|
testscheme scheme = Utility.Tmp.Dir.withTmpDir (toOsPath (toRawFilePath "gpgtmp")) $ \gpgtmp -> do
|
||||||
-- Use the system temp directory as gpg temp directory because
|
-- Use the system temp directory as gpg temp directory because
|
||||||
-- it needs to be able to store the agent socket there,
|
-- it needs to be able to store the agent socket there,
|
||||||
-- which can be problematic when testing some filesystems.
|
-- which can be problematic when testing some filesystems.
|
||||||
|
|
|
@ -5,6 +5,8 @@
|
||||||
- Licensed under the GNU AGPL version 3 or higher.
|
- Licensed under the GNU AGPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
|
||||||
module Test.Framework where
|
module Test.Framework where
|
||||||
|
|
||||||
import Test.Tasty
|
import Test.Tasty
|
||||||
|
@ -302,7 +304,7 @@ ensuredir d = do
|
||||||
- happen concurrently with a test case running, and would be a problem
|
- happen concurrently with a test case running, and would be a problem
|
||||||
- since setEnv is not thread safe. This is run before tasty. -}
|
- since setEnv is not thread safe. This is run before tasty. -}
|
||||||
setTestEnv :: IO a -> IO a
|
setTestEnv :: IO a -> IO a
|
||||||
setTestEnv a = Utility.Tmp.Dir.withTmpDir "testhome" $ \tmphome -> do
|
setTestEnv a = Utility.Tmp.Dir.withTmpDir (toOsPath "testhome") $ \tmphome -> do
|
||||||
tmphomeabs <- fromRawFilePath <$> absPath (toRawFilePath tmphome)
|
tmphomeabs <- fromRawFilePath <$> absPath (toRawFilePath tmphome)
|
||||||
{- Prevent global git configs from affecting the test suite. -}
|
{- Prevent global git configs from affecting the test suite. -}
|
||||||
Utility.Env.Set.setEnv "HOME" tmphomeabs True
|
Utility.Env.Set.setEnv "HOME" tmphomeabs True
|
||||||
|
@ -339,14 +341,14 @@ removeDirectoryForCleanup = removePathForcibly
|
||||||
|
|
||||||
cleanup :: FilePath -> IO ()
|
cleanup :: FilePath -> IO ()
|
||||||
cleanup dir = whenM (doesDirectoryExist dir) $ do
|
cleanup dir = whenM (doesDirectoryExist dir) $ do
|
||||||
Command.Uninit.prepareRemoveAnnexDir' dir
|
Command.Uninit.prepareRemoveAnnexDir' (toRawFilePath dir)
|
||||||
-- This can fail if files in the directory are still open by a
|
-- This can fail if files in the directory are still open by a
|
||||||
-- subprocess.
|
-- subprocess.
|
||||||
void $ tryIO $ removeDirectoryForCleanup dir
|
void $ tryIO $ removeDirectoryForCleanup dir
|
||||||
|
|
||||||
finalCleanup :: IO ()
|
finalCleanup :: IO ()
|
||||||
finalCleanup = whenM (doesDirectoryExist tmpdir) $ do
|
finalCleanup = whenM (doesDirectoryExist tmpdir) $ do
|
||||||
Command.Uninit.prepareRemoveAnnexDir' tmpdir
|
Command.Uninit.prepareRemoveAnnexDir' (toRawFilePath tmpdir)
|
||||||
catchIO (removeDirectoryForCleanup tmpdir) $ \e -> do
|
catchIO (removeDirectoryForCleanup tmpdir) $ \e -> do
|
||||||
print e
|
print e
|
||||||
putStrLn "sleeping 10 seconds and will retry directory cleanup"
|
putStrLn "sleeping 10 seconds and will retry directory cleanup"
|
||||||
|
|
|
@ -18,7 +18,7 @@ formatDirection :: Direction -> B.ByteString
|
||||||
formatDirection Upload = "upload"
|
formatDirection Upload = "upload"
|
||||||
formatDirection Download = "download"
|
formatDirection Download = "download"
|
||||||
|
|
||||||
parseDirection :: String -> Maybe Direction
|
parseDirection :: B.ByteString -> Maybe Direction
|
||||||
parseDirection "upload" = Just Upload
|
parseDirection "upload" = Just Upload
|
||||||
parseDirection "download" = Just Download
|
parseDirection "download" = Just Download
|
||||||
parseDirection _ = Nothing
|
parseDirection _ = Nothing
|
||||||
|
|
|
@ -40,10 +40,9 @@ formatInfoFile :: GitAnnexDistribution -> String
|
||||||
formatInfoFile d = replace "keyVariant = " "keyBackendName = " (show d) ++
|
formatInfoFile d = replace "keyVariant = " "keyBackendName = " (show d) ++
|
||||||
"\n" ++ formatGitAnnexDistribution d
|
"\n" ++ formatGitAnnexDistribution d
|
||||||
|
|
||||||
parseInfoFile :: String -> Maybe GitAnnexDistribution
|
parseInfoFile :: [String] -> Maybe GitAnnexDistribution
|
||||||
parseInfoFile s = case lines s of
|
parseInfoFile (_oldformat:rest) = parseGitAnnexDistribution (unlines rest)
|
||||||
(_oldformat:rest) -> parseGitAnnexDistribution (unlines rest)
|
parseInfoFile _ = Nothing
|
||||||
_ -> Nothing
|
|
||||||
|
|
||||||
formatGitAnnexDistribution :: GitAnnexDistribution -> String
|
formatGitAnnexDistribution :: GitAnnexDistribution -> String
|
||||||
formatGitAnnexDistribution d = unlines
|
formatGitAnnexDistribution d = unlines
|
||||||
|
|
|
@ -15,7 +15,6 @@ import Data.Default
|
||||||
import Data.ByteString.Builder
|
import Data.ByteString.Builder
|
||||||
import qualified Data.ByteString as S
|
import qualified Data.ByteString as S
|
||||||
import qualified Data.ByteString.Short as S (toShort, fromShort)
|
import qualified Data.ByteString.Short as S (toShort, fromShort)
|
||||||
import qualified Data.ByteString.Lazy as L
|
|
||||||
import qualified System.FilePath.ByteString as P
|
import qualified System.FilePath.ByteString as P
|
||||||
import System.PosixCompat.Files (isRegularFile)
|
import System.PosixCompat.Files (isRegularFile)
|
||||||
import Text.Read
|
import Text.Read
|
||||||
|
@ -35,6 +34,7 @@ import Utility.FileMode
|
||||||
import Utility.Tmp
|
import Utility.Tmp
|
||||||
import qualified Upgrade.V2
|
import qualified Upgrade.V2
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
-- v2 adds hashing of filenames of content and location log files.
|
-- v2 adds hashing of filenames of content and location log files.
|
||||||
-- Key information is encoded in filenames differently, so
|
-- Key information is encoded in filenames differently, so
|
||||||
|
@ -198,11 +198,13 @@ fileKey1 file = readKey1 $
|
||||||
replace "&a" "&" $ replace "&s" "%" $ replace "%" "/" file
|
replace "&a" "&" $ replace "&s" "%" $ replace "%" "/" file
|
||||||
|
|
||||||
writeLog1 :: FilePath -> [LogLine] -> IO ()
|
writeLog1 :: FilePath -> [LogLine] -> IO ()
|
||||||
writeLog1 file ls = viaTmp L.writeFile file (toLazyByteString $ buildLog ls)
|
writeLog1 file ls = viaTmp F.writeFile
|
||||||
|
(toOsPath (toRawFilePath file))
|
||||||
|
(toLazyByteString $ buildLog ls)
|
||||||
|
|
||||||
readLog1 :: FilePath -> IO [LogLine]
|
readLog1 :: FilePath -> IO [LogLine]
|
||||||
readLog1 file = catchDefaultIO [] $
|
readLog1 file = catchDefaultIO [] $
|
||||||
parseLog . encodeBL <$> readFileStrict file
|
parseLog <$> F.readFile (toOsPath (toRawFilePath file))
|
||||||
|
|
||||||
lookupKey1 :: FilePath -> Annex (Maybe (Key, Backend))
|
lookupKey1 :: FilePath -> Annex (Maybe (Key, Backend))
|
||||||
lookupKey1 file = do
|
lookupKey1 file = do
|
||||||
|
|
|
@ -20,6 +20,7 @@ import Annex.Content
|
||||||
import Utility.Tmp
|
import Utility.Tmp
|
||||||
import Logs
|
import Logs
|
||||||
import Messages.Progress
|
import Messages.Progress
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
olddir :: Git.Repo -> FilePath
|
olddir :: Git.Repo -> FilePath
|
||||||
olddir g
|
olddir g
|
||||||
|
@ -73,14 +74,14 @@ locationLogs = do
|
||||||
config <- Annex.getGitConfig
|
config <- Annex.getGitConfig
|
||||||
dir <- fromRepo gitStateDir
|
dir <- fromRepo gitStateDir
|
||||||
liftIO $ do
|
liftIO $ do
|
||||||
levela <- dirContents dir
|
levela <- dirContents (toRawFilePath dir)
|
||||||
levelb <- mapM tryDirContents levela
|
levelb <- mapM tryDirContents levela
|
||||||
files <- mapM tryDirContents (concat levelb)
|
files <- mapM tryDirContents (concat levelb)
|
||||||
return $ mapMaybe (islogfile config) (concat files)
|
return $ mapMaybe (islogfile config) (concat files)
|
||||||
where
|
where
|
||||||
tryDirContents d = catchDefaultIO [] $ dirContents d
|
tryDirContents d = catchDefaultIO [] $ dirContents d
|
||||||
islogfile config f = maybe Nothing (\k -> Just (k, f)) $
|
islogfile config f = maybe Nothing (\k -> Just (k, fromRawFilePath f)) $
|
||||||
locationLogFileKey config (toRawFilePath f)
|
locationLogFileKey config f
|
||||||
|
|
||||||
inject :: FilePath -> FilePath -> Annex ()
|
inject :: FilePath -> FilePath -> Annex ()
|
||||||
inject source dest = do
|
inject source dest = do
|
||||||
|
@ -135,12 +136,15 @@ attrLines =
|
||||||
|
|
||||||
gitAttributesUnWrite :: Git.Repo -> IO ()
|
gitAttributesUnWrite :: Git.Repo -> IO ()
|
||||||
gitAttributesUnWrite repo = do
|
gitAttributesUnWrite repo = do
|
||||||
let attributes = fromRawFilePath (Git.attributes repo)
|
let attributes = Git.attributes repo
|
||||||
whenM (doesFileExist attributes) $ do
|
let attributes' = fromRawFilePath attributes
|
||||||
c <- readFileStrict attributes
|
whenM (doesFileExist attributes') $ do
|
||||||
liftIO $ viaTmp writeFile attributes $ unlines $
|
c <- map decodeBS . fileLines'
|
||||||
filter (`notElem` attrLines) $ lines c
|
<$> F.readFile' (toOsPath attributes)
|
||||||
Git.Command.run [Param "add", File attributes] repo
|
liftIO $ viaTmp (writeFile . fromRawFilePath . fromOsPath)
|
||||||
|
(toOsPath attributes)
|
||||||
|
(unlines $ filter (`notElem` attrLines) c)
|
||||||
|
Git.Command.run [Param "add", File attributes'] repo
|
||||||
|
|
||||||
stateDir :: FilePath
|
stateDir :: FilePath
|
||||||
stateDir = addTrailingPathSeparator ".git-annex"
|
stateDir = addTrailingPathSeparator ".git-annex"
|
||||||
|
|
|
@ -34,8 +34,7 @@ import Utility.InodeCache
|
||||||
import Utility.DottedVersion
|
import Utility.DottedVersion
|
||||||
import Annex.AdjustedBranch
|
import Annex.AdjustedBranch
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
import qualified Data.ByteString as S
|
|
||||||
|
|
||||||
upgrade :: Bool -> Annex UpgradeResult
|
upgrade :: Bool -> Annex UpgradeResult
|
||||||
upgrade automatic = flip catchNonAsync onexception $ do
|
upgrade automatic = flip catchNonAsync onexception $ do
|
||||||
|
@ -130,7 +129,7 @@ upgradeDirectWorkTree = do
|
||||||
Just k -> do
|
Just k -> do
|
||||||
stagePointerFile f Nothing =<< hashPointerFile k
|
stagePointerFile f Nothing =<< hashPointerFile k
|
||||||
ifM (isJust <$> getAnnexLinkTarget f)
|
ifM (isJust <$> getAnnexLinkTarget f)
|
||||||
( writepointer (fromRawFilePath f) k
|
( writepointer f k
|
||||||
, fromdirect (fromRawFilePath f) k
|
, fromdirect (fromRawFilePath f) k
|
||||||
)
|
)
|
||||||
Database.Keys.addAssociatedFile k
|
Database.Keys.addAssociatedFile k
|
||||||
|
@ -158,8 +157,8 @@ upgradeDirectWorkTree = do
|
||||||
)
|
)
|
||||||
|
|
||||||
writepointer f k = liftIO $ do
|
writepointer f k = liftIO $ do
|
||||||
removeWhenExistsWith R.removeLink (toRawFilePath f)
|
removeWhenExistsWith R.removeLink f
|
||||||
S.writeFile f (formatPointer k)
|
F.writeFile' (toOsPath f) (formatPointer k)
|
||||||
|
|
||||||
{- Remove all direct mode bookkeeping files. -}
|
{- Remove all direct mode bookkeeping files. -}
|
||||||
removeDirectCruft :: Annex ()
|
removeDirectCruft :: Annex ()
|
||||||
|
|
|
@ -29,6 +29,7 @@ import Annex.Perms
|
||||||
import Utility.InodeCache
|
import Utility.InodeCache
|
||||||
import Annex.InodeSentinal
|
import Annex.InodeSentinal
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
setIndirect :: Annex ()
|
setIndirect :: Annex ()
|
||||||
setIndirect = do
|
setIndirect = do
|
||||||
|
@ -88,8 +89,8 @@ associatedFiles key = do
|
||||||
- the top of the repo. -}
|
- the top of the repo. -}
|
||||||
associatedFilesRelative :: Key -> Annex [FilePath]
|
associatedFilesRelative :: Key -> Annex [FilePath]
|
||||||
associatedFilesRelative key = do
|
associatedFilesRelative key = do
|
||||||
mapping <- fromRawFilePath <$> calcRepo (gitAnnexMapping key)
|
mapping <- calcRepo (gitAnnexMapping key)
|
||||||
liftIO $ catchDefaultIO [] $ withFile mapping ReadMode $ \h ->
|
liftIO $ catchDefaultIO [] $ F.withFile (toOsPath mapping) ReadMode $ \h ->
|
||||||
-- Read strictly to ensure the file is closed promptly
|
-- Read strictly to ensure the file is closed promptly
|
||||||
lines <$> hGetContentsStrict h
|
lines <$> hGetContentsStrict h
|
||||||
|
|
||||||
|
@ -118,8 +119,8 @@ goodContent key file =
|
||||||
recordedInodeCache :: Key -> Annex [InodeCache]
|
recordedInodeCache :: Key -> Annex [InodeCache]
|
||||||
recordedInodeCache key = withInodeCacheFile key $ \f ->
|
recordedInodeCache key = withInodeCacheFile key $ \f ->
|
||||||
liftIO $ catchDefaultIO [] $
|
liftIO $ catchDefaultIO [] $
|
||||||
mapMaybe readInodeCache . lines
|
mapMaybe (readInodeCache . decodeBS) . fileLines'
|
||||||
<$> readFileStrict (fromRawFilePath f)
|
<$> F.readFile' (toOsPath f)
|
||||||
|
|
||||||
{- Removes an inode cache. -}
|
{- Removes an inode cache. -}
|
||||||
removeInodeCache :: Key -> Annex ()
|
removeInodeCache :: Key -> Annex ()
|
||||||
|
|
|
@ -22,6 +22,7 @@ import qualified Git
|
||||||
import Git.FilePath
|
import Git.FilePath
|
||||||
import Config
|
import Config
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
|
||||||
import qualified System.FilePath.ByteString as P
|
import qualified System.FilePath.ByteString as P
|
||||||
import System.PosixCompat.Files (isSymbolicLink)
|
import System.PosixCompat.Files (isSymbolicLink)
|
||||||
|
@ -127,11 +128,12 @@ populateKeysDb = unlessM isBareRepo $ do
|
||||||
-- checked into the repository.
|
-- checked into the repository.
|
||||||
updateSmudgeFilter :: Annex ()
|
updateSmudgeFilter :: Annex ()
|
||||||
updateSmudgeFilter = do
|
updateSmudgeFilter = do
|
||||||
lf <- fromRawFilePath <$> Annex.fromRepo Git.attributesLocal
|
lf <- Annex.fromRepo Git.attributesLocal
|
||||||
ls <- liftIO $ lines <$> catchDefaultIO "" (readFileStrict lf)
|
ls <- liftIO $ map decodeBS . fileLines'
|
||||||
|
<$> catchDefaultIO "" (F.readFile' (toOsPath lf))
|
||||||
let ls' = removedotfilter ls
|
let ls' = removedotfilter ls
|
||||||
when (ls /= ls') $
|
when (ls /= ls') $
|
||||||
liftIO $ writeFile lf (unlines ls')
|
liftIO $ writeFile (fromRawFilePath lf) (unlines ls')
|
||||||
where
|
where
|
||||||
removedotfilter ("* filter=annex":".* !filter":rest) =
|
removedotfilter ("* filter=annex":".* !filter":rest) =
|
||||||
"* filter=annex" : removedotfilter rest
|
"* filter=annex" : removedotfilter rest
|
||||||
|
|
|
@ -189,6 +189,6 @@ winLockFile pid pidfile = do
|
||||||
prefix = pidfile ++ "."
|
prefix = pidfile ++ "."
|
||||||
suffix = ".lck"
|
suffix = ".lck"
|
||||||
cleanstale = mapM_ (void . tryIO . removeFile) =<<
|
cleanstale = mapM_ (void . tryIO . removeFile) =<<
|
||||||
(filter iswinlockfile <$> dirContents (fromRawFilePath (parentDir (toRawFilePath pidfile))))
|
(filter iswinlockfile . map fromRawFilePath <$> dirContents (parentDir (toRawFilePath pidfile)))
|
||||||
iswinlockfile f = suffix `isSuffixOf` f && prefix `isPrefixOf` f
|
iswinlockfile f = suffix `isSuffixOf` f && prefix `isPrefixOf` f
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -70,7 +70,8 @@ watchDir dir ignored scanevents hooks = do
|
||||||
scan d = unless (ignoredPath ignored d) $
|
scan d = unless (ignoredPath ignored d) $
|
||||||
-- Do not follow symlinks when scanning.
|
-- Do not follow symlinks when scanning.
|
||||||
-- This mirrors the inotify startup scan behavior.
|
-- This mirrors the inotify startup scan behavior.
|
||||||
mapM_ go =<< emptyWhenDoesNotExist (dirContentsRecursiveSkipping (const False) False d)
|
mapM_ (go . fromRawFilePath) =<< emptyWhenDoesNotExist
|
||||||
|
(dirContentsRecursiveSkipping (const False) False (toRawFilePath d))
|
||||||
where
|
where
|
||||||
go f
|
go f
|
||||||
| ignoredPath ignored f = noop
|
| ignoredPath ignored f = noop
|
||||||
|
|
|
@ -59,7 +59,7 @@ watchDir i dir ignored scanevents hooks
|
||||||
void (addWatch i watchevents (toInternalFilePath dir) handler)
|
void (addWatch i watchevents (toInternalFilePath dir) handler)
|
||||||
`catchIO` failedaddwatch
|
`catchIO` failedaddwatch
|
||||||
withLock lock $
|
withLock lock $
|
||||||
mapM_ scan =<< filter (not . dirCruft) <$>
|
mapM_ scan =<< filter (not . dirCruft . toRawFilePath) <$>
|
||||||
getDirectoryContents dir
|
getDirectoryContents dir
|
||||||
where
|
where
|
||||||
recurse d = watchDir i d ignored scanevents hooks
|
recurse d = watchDir i d ignored scanevents hooks
|
||||||
|
|
|
@ -77,7 +77,7 @@ data DirInfo = DirInfo
|
||||||
|
|
||||||
getDirInfo :: FilePath -> IO DirInfo
|
getDirInfo :: FilePath -> IO DirInfo
|
||||||
getDirInfo dir = do
|
getDirInfo dir = do
|
||||||
l <- filter (not . dirCruft) <$> getDirectoryContents dir
|
l <- filter (not . dirCruft . toRawFilePath) <$> getDirectoryContents dir
|
||||||
contents <- S.fromList . catMaybes <$> mapM getDirEnt l
|
contents <- S.fromList . catMaybes <$> mapM getDirEnt l
|
||||||
return $ DirInfo dir contents
|
return $ DirInfo dir contents
|
||||||
where
|
where
|
||||||
|
|
|
@ -43,7 +43,8 @@ watchDir dir ignored scanevents hooks = do
|
||||||
runhook h s = maybe noop (\a -> a (filePath evt) s) (h hooks)
|
runhook h s = maybe noop (\a -> a (filePath evt) s) (h hooks)
|
||||||
|
|
||||||
scan d = unless (ignoredPath ignored d) $
|
scan d = unless (ignoredPath ignored d) $
|
||||||
mapM_ go =<< emptyWhenDoesNotExist (dirContentsRecursiveSkipping (const False) False d)
|
mapM_ (go . fromRawFilePath) =<< emptyWhenDoesNotExist
|
||||||
|
(dirContentsRecursiveSkipping (const False) False (toRawFilePath d))
|
||||||
where
|
where
|
||||||
go f
|
go f
|
||||||
| ignoredPath ignored f = noop
|
| ignoredPath ignored f = noop
|
||||||
|
|
|
@ -1,42 +1,48 @@
|
||||||
{- directory traversal and manipulation
|
{- directory traversal and manipulation
|
||||||
-
|
-
|
||||||
- Copyright 2011-2023 Joey Hess <id@joeyh.name>
|
- Copyright 2011-2025 Joey Hess <id@joeyh.name>
|
||||||
-
|
-
|
||||||
- License: BSD-2-clause
|
- License: BSD-2-clause
|
||||||
-}
|
-}
|
||||||
|
|
||||||
{-# LANGUAGE CPP #-}
|
{-# LANGUAGE CPP #-}
|
||||||
{-# LANGUAGE LambdaCase #-}
|
{-# LANGUAGE LambdaCase #-}
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
||||||
|
|
||||||
module Utility.Directory (
|
module Utility.Directory where
|
||||||
module Utility.Directory,
|
|
||||||
module Utility.SystemDirectory
|
|
||||||
) where
|
|
||||||
|
|
||||||
|
#ifdef WITH_OSPATH
|
||||||
|
import System.Directory.OsPath
|
||||||
|
#else
|
||||||
|
import Utility.SystemDirectory
|
||||||
|
#endif
|
||||||
import Control.Monad
|
import Control.Monad
|
||||||
import System.FilePath
|
|
||||||
import System.PosixCompat.Files (isDirectory, isSymbolicLink)
|
import System.PosixCompat.Files (isDirectory, isSymbolicLink)
|
||||||
import Control.Applicative
|
import Control.Applicative
|
||||||
import System.IO.Unsafe (unsafeInterleaveIO)
|
import System.IO.Unsafe (unsafeInterleaveIO)
|
||||||
|
import qualified System.FilePath.ByteString as P
|
||||||
import Data.Maybe
|
import Data.Maybe
|
||||||
import Prelude
|
import Prelude
|
||||||
|
|
||||||
import Utility.SystemDirectory
|
import Utility.OsPath
|
||||||
import Utility.Exception
|
import Utility.Exception
|
||||||
import Utility.Monad
|
import Utility.Monad
|
||||||
import Utility.FileSystemEncoding
|
import Utility.FileSystemEncoding
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
|
||||||
dirCruft :: FilePath -> Bool
|
dirCruft :: R.RawFilePath -> Bool
|
||||||
dirCruft "." = True
|
dirCruft "." = True
|
||||||
dirCruft ".." = True
|
dirCruft ".." = True
|
||||||
dirCruft _ = False
|
dirCruft _ = False
|
||||||
|
|
||||||
{- Lists the contents of a directory.
|
{- Lists the contents of a directory.
|
||||||
- Unlike getDirectoryContents, paths are not relative to the directory. -}
|
- Unlike getDirectoryContents, paths are not relative to the directory. -}
|
||||||
dirContents :: FilePath -> IO [FilePath]
|
dirContents :: RawFilePath -> IO [RawFilePath]
|
||||||
dirContents d = map (d </>) . filter (not . dirCruft) <$> getDirectoryContents d
|
dirContents d =
|
||||||
|
map (\p -> d P.</> fromOsPath p)
|
||||||
|
. filter (not . dirCruft . fromOsPath)
|
||||||
|
<$> getDirectoryContents (toOsPath d)
|
||||||
|
|
||||||
{- Gets files in a directory, and then its subdirectories, recursively,
|
{- Gets files in a directory, and then its subdirectories, recursively,
|
||||||
- and lazily.
|
- and lazily.
|
||||||
|
@ -48,13 +54,13 @@ dirContents d = map (d </>) . filter (not . dirCruft) <$> getDirectoryContents d
|
||||||
- be accessed (the use of unsafeInterleaveIO would make it difficult to
|
- be accessed (the use of unsafeInterleaveIO would make it difficult to
|
||||||
- trap such exceptions).
|
- trap such exceptions).
|
||||||
-}
|
-}
|
||||||
dirContentsRecursive :: FilePath -> IO [FilePath]
|
dirContentsRecursive :: RawFilePath -> IO [RawFilePath]
|
||||||
dirContentsRecursive = dirContentsRecursiveSkipping (const False) True
|
dirContentsRecursive = dirContentsRecursiveSkipping (const False) True
|
||||||
|
|
||||||
{- Skips directories whose basenames match the skipdir. -}
|
{- Skips directories whose basenames match the skipdir. -}
|
||||||
dirContentsRecursiveSkipping :: (FilePath -> Bool) -> Bool -> FilePath -> IO [FilePath]
|
dirContentsRecursiveSkipping :: (RawFilePath -> Bool) -> Bool -> RawFilePath -> IO [RawFilePath]
|
||||||
dirContentsRecursiveSkipping skipdir followsubdirsymlinks topdir
|
dirContentsRecursiveSkipping skipdir followsubdirsymlinks topdir
|
||||||
| skipdir (takeFileName topdir) = return []
|
| skipdir (P.takeFileName topdir) = return []
|
||||||
| otherwise = do
|
| otherwise = do
|
||||||
-- Get the contents of the top directory outside of
|
-- Get the contents of the top directory outside of
|
||||||
-- unsafeInterleaveIO, which allows throwing exceptions if
|
-- unsafeInterleaveIO, which allows throwing exceptions if
|
||||||
|
@ -66,24 +72,26 @@ dirContentsRecursiveSkipping skipdir followsubdirsymlinks topdir
|
||||||
where
|
where
|
||||||
go [] = return []
|
go [] = return []
|
||||||
go (dir:dirs)
|
go (dir:dirs)
|
||||||
| skipdir (takeFileName dir) = go dirs
|
| skipdir (P.takeFileName dir) = go dirs
|
||||||
| otherwise = unsafeInterleaveIO $ do
|
| otherwise = unsafeInterleaveIO $ do
|
||||||
(files, dirs') <- collect [] []
|
(files, dirs') <- collect [] []
|
||||||
=<< catchDefaultIO [] (dirContents dir)
|
=<< catchDefaultIO [] (dirContents dir)
|
||||||
files' <- go (dirs' ++ dirs)
|
files' <- go (dirs' ++ dirs)
|
||||||
return (files ++ files')
|
return (files ++ files')
|
||||||
|
|
||||||
|
collect :: [RawFilePath] -> [RawFilePath] -> [RawFilePath] -> IO ([RawFilePath], [RawFilePath])
|
||||||
collect files dirs' [] = return (reverse files, reverse dirs')
|
collect files dirs' [] = return (reverse files, reverse dirs')
|
||||||
collect files dirs' (entry:entries)
|
collect files dirs' (entry:entries)
|
||||||
| dirCruft entry = collect files dirs' entries
|
| dirCruft entry = collect files dirs' entries
|
||||||
| otherwise = do
|
| otherwise = do
|
||||||
let skip = collect (entry:files) dirs' entries
|
let skip = collect (entry:files) dirs' entries
|
||||||
let recurse = collect files (entry:dirs') entries
|
let recurse = collect files (entry:dirs') entries
|
||||||
ms <- catchMaybeIO $ R.getSymbolicLinkStatus (toRawFilePath entry)
|
ms <- catchMaybeIO $ R.getSymbolicLinkStatus entry
|
||||||
case ms of
|
case ms of
|
||||||
(Just s)
|
(Just s)
|
||||||
| isDirectory s -> recurse
|
| isDirectory s -> recurse
|
||||||
| isSymbolicLink s && followsubdirsymlinks ->
|
| isSymbolicLink s && followsubdirsymlinks ->
|
||||||
ifM (doesDirectoryExist entry)
|
ifM (doesDirectoryExist (toOsPath entry))
|
||||||
( recurse
|
( recurse
|
||||||
, skip
|
, skip
|
||||||
)
|
)
|
||||||
|
@ -98,22 +106,22 @@ dirContentsRecursiveSkipping skipdir followsubdirsymlinks topdir
|
||||||
- be accessed (the use of unsafeInterleaveIO would make it difficult to
|
- be accessed (the use of unsafeInterleaveIO would make it difficult to
|
||||||
- trap such exceptions).
|
- trap such exceptions).
|
||||||
-}
|
-}
|
||||||
dirTreeRecursiveSkipping :: (FilePath -> Bool) -> FilePath -> IO [FilePath]
|
dirTreeRecursiveSkipping :: (RawFilePath -> Bool) -> RawFilePath -> IO [RawFilePath]
|
||||||
dirTreeRecursiveSkipping skipdir topdir
|
dirTreeRecursiveSkipping skipdir topdir
|
||||||
| skipdir (takeFileName topdir) = return []
|
| skipdir (P.takeFileName topdir) = return []
|
||||||
| otherwise = do
|
| otherwise = do
|
||||||
subdirs <- filterM isdir =<< dirContents topdir
|
subdirs <- filterM isdir =<< dirContents topdir
|
||||||
go [] subdirs
|
go [] subdirs
|
||||||
where
|
where
|
||||||
go c [] = return c
|
go c [] = return c
|
||||||
go c (dir:dirs)
|
go c (dir:dirs)
|
||||||
| skipdir (takeFileName dir) = go c dirs
|
| skipdir (P.takeFileName dir) = go c dirs
|
||||||
| otherwise = unsafeInterleaveIO $ do
|
| otherwise = unsafeInterleaveIO $ do
|
||||||
subdirs <- go []
|
subdirs <- go []
|
||||||
=<< filterM isdir
|
=<< filterM isdir
|
||||||
=<< catchDefaultIO [] (dirContents dir)
|
=<< catchDefaultIO [] (dirContents dir)
|
||||||
go (subdirs++dir:c) dirs
|
go (subdirs++dir:c) dirs
|
||||||
isdir p = isDirectory <$> R.getSymbolicLinkStatus (toRawFilePath p)
|
isdir p = isDirectory <$> R.getSymbolicLinkStatus p
|
||||||
|
|
||||||
{- When the action fails due to the directory not existing, returns []. -}
|
{- When the action fails due to the directory not existing, returns []. -}
|
||||||
emptyWhenDoesNotExist :: IO [a] -> IO [a]
|
emptyWhenDoesNotExist :: IO [a] -> IO [a]
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{- streaming directory traversal
|
{- streaming directory reading
|
||||||
-
|
-
|
||||||
- Copyright 2011-2018 Joey Hess <id@joeyh.name>
|
- Copyright 2011-2025 Joey Hess <id@joeyh.name>
|
||||||
-
|
-
|
||||||
- License: BSD-2-clause
|
- License: BSD-2-clause
|
||||||
-}
|
-}
|
||||||
|
@ -14,23 +14,25 @@ module Utility.Directory.Stream (
|
||||||
openDirectory,
|
openDirectory,
|
||||||
closeDirectory,
|
closeDirectory,
|
||||||
readDirectory,
|
readDirectory,
|
||||||
isDirectoryEmpty,
|
isDirectoryPopulated,
|
||||||
) where
|
) where
|
||||||
|
|
||||||
import Control.Monad
|
import Control.Monad
|
||||||
import System.FilePath
|
|
||||||
import Control.Concurrent
|
import Control.Concurrent
|
||||||
import Data.Maybe
|
import Data.Maybe
|
||||||
import Prelude
|
import Prelude
|
||||||
|
|
||||||
#ifdef mingw32_HOST_OS
|
#ifdef mingw32_HOST_OS
|
||||||
import qualified System.Win32 as Win32
|
import qualified System.Win32 as Win32
|
||||||
|
import System.FilePath
|
||||||
#else
|
#else
|
||||||
import qualified System.Posix as Posix
|
import qualified Data.ByteString as B
|
||||||
|
import qualified System.Posix.Directory.ByteString as Posix
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
import Utility.Directory
|
import Utility.Directory
|
||||||
import Utility.Exception
|
import Utility.Exception
|
||||||
|
import Utility.FileSystemEncoding
|
||||||
|
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
data DirectoryHandle = DirectoryHandle IsOpen Posix.DirStream
|
data DirectoryHandle = DirectoryHandle IsOpen Posix.DirStream
|
||||||
|
@ -40,14 +42,14 @@ data DirectoryHandle = DirectoryHandle IsOpen Win32.HANDLE Win32.FindData (MVar
|
||||||
|
|
||||||
type IsOpen = MVar () -- full when the handle is open
|
type IsOpen = MVar () -- full when the handle is open
|
||||||
|
|
||||||
openDirectory :: FilePath -> IO DirectoryHandle
|
openDirectory :: RawFilePath -> IO DirectoryHandle
|
||||||
openDirectory path = do
|
openDirectory path = do
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
dirp <- Posix.openDirStream path
|
dirp <- Posix.openDirStream path
|
||||||
isopen <- newMVar ()
|
isopen <- newMVar ()
|
||||||
return (DirectoryHandle isopen dirp)
|
return (DirectoryHandle isopen dirp)
|
||||||
#else
|
#else
|
||||||
(h, fdat) <- Win32.findFirstFile (path </> "*")
|
(h, fdat) <- Win32.findFirstFile (fromRawFilePath path </> "*")
|
||||||
-- Indicate that the fdat contains a filename that readDirectory
|
-- Indicate that the fdat contains a filename that readDirectory
|
||||||
-- has not yet returned, by making the MVar be full.
|
-- has not yet returned, by making the MVar be full.
|
||||||
-- (There's always at least a "." entry.)
|
-- (There's always at least a "." entry.)
|
||||||
|
@ -75,11 +77,11 @@ closeDirectory (DirectoryHandle isopen h _ alreadyhave) =
|
||||||
|
|
||||||
-- | Reads the next entry from the handle. Once the end of the directory
|
-- | Reads the next entry from the handle. Once the end of the directory
|
||||||
-- is reached, returns Nothing and automatically closes the handle.
|
-- is reached, returns Nothing and automatically closes the handle.
|
||||||
readDirectory :: DirectoryHandle -> IO (Maybe FilePath)
|
readDirectory :: DirectoryHandle -> IO (Maybe RawFilePath)
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
readDirectory hdl@(DirectoryHandle _ dirp) = do
|
readDirectory hdl@(DirectoryHandle _ dirp) = do
|
||||||
e <- Posix.readDirStream dirp
|
e <- Posix.readDirStream dirp
|
||||||
if null e
|
if B.null e
|
||||||
then do
|
then do
|
||||||
closeDirectory hdl
|
closeDirectory hdl
|
||||||
return Nothing
|
return Nothing
|
||||||
|
@ -102,18 +104,18 @@ readDirectory hdl@(DirectoryHandle _ h fdat mv) = do
|
||||||
where
|
where
|
||||||
getfn = do
|
getfn = do
|
||||||
filename <- Win32.getFindDataFileName fdat
|
filename <- Win32.getFindDataFileName fdat
|
||||||
return (Just filename)
|
return (Just (toRawFilePath filename))
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
-- | True only when directory exists and contains nothing.
|
-- | True only when directory exists and is not empty.
|
||||||
-- Throws exception if directory does not exist.
|
isDirectoryPopulated :: RawFilePath -> IO Bool
|
||||||
isDirectoryEmpty :: FilePath -> IO Bool
|
isDirectoryPopulated d = bracket (openDirectory d) closeDirectory check
|
||||||
isDirectoryEmpty d = bracket (openDirectory d) closeDirectory check
|
`catchIO` const (return False)
|
||||||
where
|
where
|
||||||
check h = do
|
check h = do
|
||||||
v <- readDirectory h
|
v <- readDirectory h
|
||||||
case v of
|
case v of
|
||||||
Nothing -> return True
|
Nothing -> return False
|
||||||
Just f
|
Just f
|
||||||
| not (dirCruft f) -> return False
|
| not (dirCruft f) -> return True
|
||||||
| otherwise -> check h
|
| otherwise -> check h
|
||||||
|
|
107
Utility/FileIO.hs
Normal file
107
Utility/FileIO.hs
Normal file
|
@ -0,0 +1,107 @@
|
||||||
|
{- File IO on OsPaths.
|
||||||
|
-
|
||||||
|
- Since Prelude exports many of these as well, this needs to be imported
|
||||||
|
- qualified.
|
||||||
|
-
|
||||||
|
- Copyright 2025 Joey Hess <id@joeyh.name>
|
||||||
|
-
|
||||||
|
- License: BSD-2-clause
|
||||||
|
-}
|
||||||
|
|
||||||
|
{-# OPTIONS_GHC -fno-warn-tabs #-}
|
||||||
|
{-# LANGUAGE CPP #-}
|
||||||
|
{-# LANGUAGE NoImplicitPrelude #-}
|
||||||
|
|
||||||
|
module Utility.FileIO
|
||||||
|
(
|
||||||
|
withFile,
|
||||||
|
openFile,
|
||||||
|
readFile,
|
||||||
|
readFile',
|
||||||
|
writeFile,
|
||||||
|
writeFile',
|
||||||
|
appendFile,
|
||||||
|
appendFile',
|
||||||
|
openTempFile,
|
||||||
|
) where
|
||||||
|
|
||||||
|
#ifdef WITH_OSPATH
|
||||||
|
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
|
import System.File.OsPath
|
||||||
|
#else
|
||||||
|
-- On Windows, System.File.OsPath does not handle UNC-style conversion itself,
|
||||||
|
-- so that has to be done when calling it. See
|
||||||
|
-- https://github.com/haskell/file-io/issues/39
|
||||||
|
import Utility.Path.Windows
|
||||||
|
import Utility.OsPath
|
||||||
|
import System.IO (IO, Handle, IOMode)
|
||||||
|
import qualified System.File.OsPath as O
|
||||||
|
import qualified Data.ByteString as B
|
||||||
|
import qualified Data.ByteString.Lazy as L
|
||||||
|
import Control.Applicative
|
||||||
|
|
||||||
|
withFile :: OsPath -> IOMode -> (Handle -> IO r) -> IO r
|
||||||
|
withFile f m a = do
|
||||||
|
f' <- toOsPath <$> convertToWindowsNativeNamespace (fromOsPath f)
|
||||||
|
O.withFile f' m a
|
||||||
|
|
||||||
|
openFile :: OsPath -> IOMode -> IO Handle
|
||||||
|
openFile f m = do
|
||||||
|
f' <- toOsPath <$> convertToWindowsNativeNamespace (fromOsPath f)
|
||||||
|
O.openFile f' m
|
||||||
|
|
||||||
|
readFile :: OsPath -> IO L.ByteString
|
||||||
|
readFile f = do
|
||||||
|
f' <- toOsPath <$> convertToWindowsNativeNamespace (fromOsPath f)
|
||||||
|
O.readFile f'
|
||||||
|
|
||||||
|
readFile' :: OsPath -> IO B.ByteString
|
||||||
|
readFile' f = do
|
||||||
|
f' <- toOsPath <$> convertToWindowsNativeNamespace (fromOsPath f)
|
||||||
|
O.readFile' f'
|
||||||
|
|
||||||
|
writeFile :: OsPath -> L.ByteString -> IO ()
|
||||||
|
writeFile f b = do
|
||||||
|
f' <- toOsPath <$> convertToWindowsNativeNamespace (fromOsPath f)
|
||||||
|
O.writeFile f' b
|
||||||
|
|
||||||
|
writeFile' :: OsPath -> B.ByteString -> IO ()
|
||||||
|
writeFile' f b = do
|
||||||
|
f' <- toOsPath <$> convertToWindowsNativeNamespace (fromOsPath f)
|
||||||
|
O.writeFile' f' b
|
||||||
|
|
||||||
|
appendFile :: OsPath -> L.ByteString -> IO ()
|
||||||
|
appendFile f b = do
|
||||||
|
f' <- toOsPath <$> convertToWindowsNativeNamespace (fromOsPath f)
|
||||||
|
O.appendFile f' b
|
||||||
|
|
||||||
|
appendFile' :: OsPath -> B.ByteString -> IO ()
|
||||||
|
appendFile' f b = do
|
||||||
|
f' <- toOsPath <$> convertToWindowsNativeNamespace (fromOsPath f)
|
||||||
|
O.appendFile' f' b
|
||||||
|
|
||||||
|
openTempFile :: OsPath -> OsPath -> IO (OsPath, Handle)
|
||||||
|
openTempFile p s = do
|
||||||
|
p' <- toOsPath <$> convertToWindowsNativeNamespace (fromOsPath p)
|
||||||
|
O.openTempFile p' s
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#else
|
||||||
|
-- When not building with OsPath, export FilePath versions
|
||||||
|
-- instead. However, functions still use ByteString for the
|
||||||
|
-- file content in that case, unlike the Strings used by the Prelude.
|
||||||
|
import Utility.OsPath
|
||||||
|
import System.IO (withFile, openFile, openTempFile, IO)
|
||||||
|
import Data.ByteString.Lazy (readFile, writeFile, appendFile)
|
||||||
|
import qualified Data.ByteString as B
|
||||||
|
|
||||||
|
readFile' :: OsPath -> IO B.ByteString
|
||||||
|
readFile' = B.readFile
|
||||||
|
|
||||||
|
writeFile' :: OsPath -> B.ByteString -> IO ()
|
||||||
|
writeFile' = B.writeFile
|
||||||
|
|
||||||
|
appendFile' :: OsPath -> B.ByteString -> IO ()
|
||||||
|
appendFile' = B.appendFile
|
||||||
|
#endif
|
|
@ -27,6 +27,8 @@ import Control.Monad.Catch
|
||||||
import Utility.Exception
|
import Utility.Exception
|
||||||
import Utility.FileSystemEncoding
|
import Utility.FileSystemEncoding
|
||||||
import qualified Utility.RawFilePath as R
|
import qualified Utility.RawFilePath as R
|
||||||
|
import qualified Utility.FileIO as F
|
||||||
|
import Utility.OsPath
|
||||||
|
|
||||||
{- Applies a conversion function to a file's mode. -}
|
{- Applies a conversion function to a file's mode. -}
|
||||||
modifyFileMode :: RawFilePath -> (FileMode -> FileMode) -> IO ()
|
modifyFileMode :: RawFilePath -> (FileMode -> FileMode) -> IO ()
|
||||||
|
@ -178,7 +180,7 @@ writeFileProtected' :: RawFilePath -> (Handle -> IO ()) -> IO ()
|
||||||
writeFileProtected' file writer = bracket setup cleanup writer
|
writeFileProtected' file writer = bracket setup cleanup writer
|
||||||
where
|
where
|
||||||
setup = do
|
setup = do
|
||||||
h <- protectedOutput $ openFile (fromRawFilePath file) WriteMode
|
h <- protectedOutput $ F.openFile (toOsPath file) WriteMode
|
||||||
void $ tryIO $ modifyFileMode file $ removeModes otherGroupModes
|
void $ tryIO $ modifyFileMode file $ removeModes otherGroupModes
|
||||||
return h
|
return h
|
||||||
cleanup = hClose
|
cleanup = hClose
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue