git-annex/Annex/Branch.hs

616 lines
21 KiB
Haskell
Raw Normal View History

2011-06-21 20:08:09 +00:00
{- management of the git-annex branch
-
- Copyright 2011-2016 Joey Hess <id@joeyh.name>
-
- Licensed under the GNU GPL version 3 or higher.
-}
2011-10-04 04:40:47 +00:00
module Annex.Branch (
fullname,
name,
hasOrigin,
hasSibling,
siblingBranches,
create,
2011-06-21 20:08:09 +00:00
update,
forceUpdate,
updateTo,
get,
getHistorical,
change,
maybeChange,
2011-06-22 21:47:06 +00:00
commit,
forceCommit,
files,
withIndex,
performTransitions,
2011-06-21 20:08:09 +00:00
) where
Fix encoding of data written to git-annex branch. Avoid truncating unicode characters to 8 bits. Allow any encoding to be used, as with filenames (but utf8 is the sane choice). Affects metadata and repository descriptions, and preferred content expressions. The question of what's the right encoding for the git-annex branch is a vexing one. utf-8 would be a nice choice, but this leaves the possibility of bad data getting into a git-annex branch somehow, and this resulting in git-annex crashing with encoding errors, which is a failure mode I want to avoid. (Also, preferred content expressions can refer to filenames, and filenames can have any encoding, so limiting to utf-8 would not be ideal.) The union merge code already took care to not assume any encoding for a file. Except it assumes that any \n is a literal newline, and not part of some encoding of a character that happens to contain a newline. (At least utf-8 avoids using newline for anything except liternal newlines.) Adapted the git-annex branch code to use this same approach. Note that there is a potential interop problem with Windows, since FileSystemEncoding doesn't work there, and instead things are always decoded as utf-8. If someone uses non-utf8 encoding for data on the git-annex branch, this can lead to an encoding error on windows. However, this commit doesn't actually make that any worse, because the union merge code would similarly fail with an encoding error on windows in that situation. This commit was sponsored by Kyle Meyer.
2014-05-27 18:16:33 +00:00
import qualified Data.ByteString.Lazy as L
import qualified Data.Set as S
import qualified Data.Map as M
Fix encoding of data written to git-annex branch. Avoid truncating unicode characters to 8 bits. Allow any encoding to be used, as with filenames (but utf8 is the sane choice). Affects metadata and repository descriptions, and preferred content expressions. The question of what's the right encoding for the git-annex branch is a vexing one. utf-8 would be a nice choice, but this leaves the possibility of bad data getting into a git-annex branch somehow, and this resulting in git-annex crashing with encoding errors, which is a failure mode I want to avoid. (Also, preferred content expressions can refer to filenames, and filenames can have any encoding, so limiting to utf-8 would not be ideal.) The union merge code already took care to not assume any encoding for a file. Except it assumes that any \n is a literal newline, and not part of some encoding of a character that happens to contain a newline. (At least utf-8 avoids using newline for anything except liternal newlines.) Adapted the git-annex branch code to use this same approach. Note that there is a potential interop problem with Windows, since FileSystemEncoding doesn't work there, and instead things are always decoded as utf-8. If someone uses non-utf8 encoding for data on the git-annex branch, this can lead to an encoding error on windows. However, this commit doesn't actually make that any worse, because the union merge code would similarly fail with an encoding error on windows in that situation. This commit was sponsored by Kyle Meyer.
2014-05-27 18:16:33 +00:00
import Data.Bits.Utils
import Data.Function
import Control.Concurrent (threadDelay)
import Annex.Common
2011-12-12 21:38:46 +00:00
import Annex.BranchState
2011-12-12 22:03:28 +00:00
import Annex.Journal
import Annex.GitOverlay
import qualified Git
2011-12-14 19:56:11 +00:00
import qualified Git.Command
2011-12-12 22:23:24 +00:00
import qualified Git.Ref
import qualified Git.RefLog
import qualified Git.Sha
import qualified Git.Branch
2011-12-13 01:24:55 +00:00
import qualified Git.UnionMerge
import qualified Git.UpdateIndex
import Git.LsTree (lsTreeParams)
2016-03-14 20:29:19 +00:00
import qualified Git.HashObject
import Annex.HashObject
import Git.Types
import Git.FilePath
2011-10-04 04:40:47 +00:00
import Annex.CatFile
import Annex.Perms
import Logs
import Logs.Transitions
import Logs.Trust.Pure
import Logs.Difference.Pure
import Annex.ReplaceFile
import qualified Annex.Queue
import Annex.Branch.Transitions
import qualified Annex
import Annex.Hook
import Utility.FileSystemEncoding
{- Name of the branch that is used to store git-annex's information. -}
name :: Git.Ref
name = Git.Ref "git-annex"
{- Fully qualified name of the branch. -}
fullname :: Git.Ref
fullname = Git.Ref $ "refs/heads/" ++ fromRef name
{- Branch's name in origin. -}
originname :: Git.Ref
originname = Git.Ref $ "origin/" ++ fromRef name
{- Does origin/git-annex exist? -}
hasOrigin :: Annex Bool
hasOrigin = inRepo $ Git.Ref.exists originname
{- Does the git-annex branch or a sibling foo/git-annex branch exist? -}
hasSibling :: Annex Bool
hasSibling = not . null <$> siblingBranches
{- List of git-annex (shas, branches), including the main one and any
- from remotes. Duplicates are filtered out. -}
siblingBranches :: Annex [(Git.Sha, Git.Branch)]
siblingBranches = inRepo $ Git.Ref.matchingUniq [name]
{- Creates the branch, if it does not already exist. -}
create :: Annex ()
2012-06-12 15:32:06 +00:00
create = void getBranch
{- Returns the ref of the branch, creating it first if necessary. -}
2012-01-10 19:36:54 +00:00
getBranch :: Annex Git.Ref
getBranch = maybe (hasOrigin >>= go >>= use) return =<< branchsha
2012-12-13 04:24:19 +00:00
where
go True = do
inRepo $ Git.Command.run
[Param "branch", Param $ fromRef name, Param $ fromRef originname]
fromMaybe (error $ "failed to create " ++ fromRef name)
2012-12-13 04:24:19 +00:00
<$> branchsha
go False = withIndex' True $
inRepo $ Git.Branch.commitAlways Git.Branch.AutomaticCommit "branch created" fullname []
2012-12-13 04:24:19 +00:00
use sha = do
setIndexSha sha
return sha
branchsha = inRepo $ Git.Ref.sha fullname
2012-09-15 19:40:13 +00:00
{- Ensures that the branch and index are up-to-date; should be
- called before data is read from it. Runs only once per git-annex run. -}
update :: Annex ()
update = runUpdateOnce $ void $ updateTo =<< siblingBranches
{- Forces an update even if one has already been run. -}
forceUpdate :: Annex Bool
forceUpdate = updateTo =<< siblingBranches
{- Merges the specified Refs into the index, if they have any changes not
- already in it. The Branch names are only used in the commit message;
- it's even possible that the provided Branches have not been updated to
- point to the Refs yet.
-
- The branch is fast-forwarded if possible, otherwise a merge commit is
- made.
-
- Before Refs are merged into the index, it's important to first stage the
merge: Use fast-forward merges when possible. Thanks Valentin Haenel for a test case showing how non-fast-forward merges could result in an ongoing pull/merge/push cycle. While the git-annex branch is fast-forwarded, git-annex's index file is still updated using the union merge strategy as before. There's no other way to update the index that would be any faster. It is possible that a union merge and a fast-forward result in different file contents: Files should have the same lines, but a union merge may change their order. If this happens, the next commit made to the git-annex branch will have some unnecessary changes to line orders, but the consistency of data should be preserved. Note that when the journal contains changes, a fast-forward is never attempted, which is fine, because committing those changes would be vanishingly unlikely to leave the git-annex branch at a commit that already exists in one of the remotes. The real difficulty is handling the case where multiple remotes have all changed. git-annex does find the best (ie, newest) one and fast forwards to it. If the remotes are diverged, no fast-forward is done at all. It would be possible to pick one, fast forward to it, and make a merge commit to the rest, I see no benefit to adding that complexity. Determining the best of N changed remotes requires N*2+1 calls to git-log, but these are fast git-log calls, and N is typically small. Also, typically some or all of the remote refs will be the same, and git-log is not called to compare those. In the real world I expect this will almost always add only 1 git-log call to the merge process. (Which already makes N anyway.)
2011-11-06 19:18:45 +00:00
- journal into the index. Otherwise, any changes in the journal would
- later get staged, and might overwrite changes made during the merge.
- This is only done if some of the Refs do need to be merged.
-
- Also handles performing any Transitions that have not yet been
- performed, in either the local branch, or the Refs.
-
- Returns True if any refs were merged in, False otherwise.
-}
updateTo :: [(Git.Sha, Git.Branch)] -> Annex Bool
updateTo pairs = do
-- ensure branch exists, and get its current ref
branchref <- getBranch
dirty <- journalDirty
ignoredrefs <- getIgnoredRefs
let unignoredrefs = excludeset ignoredrefs pairs
tomerge <- if null unignoredrefs
then return []
else do
mergedrefs <- getMergedRefs
filterM isnewer (excludeset mergedrefs unignoredrefs)
if null tomerge
2012-12-13 04:45:27 +00:00
{- Even when no refs need to be merged, the index
- may still be updated if the branch has gotten ahead
- of the index. -}
then do
whenM (needUpdateIndex branchref) $ lockJournal $ \jl -> do
forceUpdateIndex jl branchref
{- When there are journalled changes
- as well as the branch being updated,
- a commit needs to be done. -}
when dirty $
go branchref True [] jl
{- Only needed for a while, to populate the
- newly added merged refs cache with already
- merged refs. Can be safely removed at any time. -}
addMergedRefs unignoredrefs
else lockJournal $ go branchref dirty tomerge
return $ not $ null tomerge
2012-12-13 04:24:19 +00:00
where
excludeset s = filter (\(r, _) -> S.notMember r s)
isnewer (r, _) = inRepo $ Git.Branch.changed fullname r
go branchref dirty tomerge jl = withIndex $ do
let (refs, branches) = unzip tomerge
cleanjournal <- if dirty then stageJournal jl else return noop
let merge_desc = if null tomerge
2012-12-13 04:24:19 +00:00
then "update"
else "merging " ++
unwords (map Git.Ref.describe branches) ++
" into " ++ fromRef name
localtransitions <- parseTransitionsStrictly "local"
<$> getLocal transitionsLog
unless (null tomerge) $ do
2012-12-13 04:24:19 +00:00
showSideAction merge_desc
mapM_ checkBranchDifferences refs
mergeIndex jl refs
let commitrefs = nub $ fullname:refs
ifM (handleTransitions jl localtransitions commitrefs)
( runAnnexHook postUpdateAnnexHook
, do
ff <- if dirty
then return False
else inRepo $ Git.Branch.fastForward fullname refs
if ff
then updateIndex jl branchref
else commitIndex jl branchref merge_desc commitrefs
)
addMergedRefs tomerge
2012-12-13 04:24:19 +00:00
liftIO cleanjournal
Fix a bug in the git-annex branch handling code that could cause info from a remote to not be merged and take effect immediately. This bug was turned up by the test suite, running fsck in direct mode. A repository was cloned, was put into direct mode, was fscked, and fsck incorrectly said that no copy existed of a file, that was actually present in origin. This turned out to occur because fsck first did a Annex.Branch.change, recording that it did not locally have the file. That was recorded in the journal. Since neither the git annex direct not the fsck had yet needed to read any info from the branch, but had only made changes to it, the origin/git-annex branch was not yet merged in. So the journal got a location log entry written to it, but this did not include the location log info for the origin. When fsck then did a Annex.Branch.get, it trusted the journal was cosnsitent, and returned it, again w/o merging from origin/git-annex. This latter behavior is the actual bug. Refer to commit e9bfa8eaed3ff59a4c0bc8d4d677bc493177807c for the thinking behind it being ok to make a change to a file on the branch, without first merging the branch. That thinking still stands. However, it means that files in the journal cannot be trusted to be consistent if the branch has not been merged. So, to fix, just enure the branch gets merged, even when reading from the journal. In tests, this does not seem to cause any extra merging. Except, of course, in the one case described above. But git annex add, etc, are able to make changes w/o first merging the branch.
2013-05-20 19:14:59 +00:00
{- Gets the content of a file, which may be in the journal, or in the index
- (and committed to the branch).
-
- Updates the branch if necessary, to ensure the most up-to-date available
2013-08-29 20:41:59 +00:00
- content is returned.
-
- Returns an empty string if the file doesn't exist yet. -}
get :: FilePath -> Annex String
Fix a bug in the git-annex branch handling code that could cause info from a remote to not be merged and take effect immediately. This bug was turned up by the test suite, running fsck in direct mode. A repository was cloned, was put into direct mode, was fscked, and fsck incorrectly said that no copy existed of a file, that was actually present in origin. This turned out to occur because fsck first did a Annex.Branch.change, recording that it did not locally have the file. That was recorded in the journal. Since neither the git annex direct not the fsck had yet needed to read any info from the branch, but had only made changes to it, the origin/git-annex branch was not yet merged in. So the journal got a location log entry written to it, but this did not include the location log info for the origin. When fsck then did a Annex.Branch.get, it trusted the journal was cosnsitent, and returned it, again w/o merging from origin/git-annex. This latter behavior is the actual bug. Refer to commit e9bfa8eaed3ff59a4c0bc8d4d677bc493177807c for the thinking behind it being ok to make a change to a file on the branch, without first merging the branch. That thinking still stands. However, it means that files in the journal cannot be trusted to be consistent if the branch has not been merged. So, to fix, just enure the branch gets merged, even when reading from the journal. In tests, this does not seem to cause any extra merging. Except, of course, in the one case described above. But git annex add, etc, are able to make changes w/o first merging the branch.
2013-05-20 19:14:59 +00:00
get file = do
update
getLocal file
{- Like get, but does not merge the branch, so the info returned may not
Fix a bug in the git-annex branch handling code that could cause info from a remote to not be merged and take effect immediately. This bug was turned up by the test suite, running fsck in direct mode. A repository was cloned, was put into direct mode, was fscked, and fsck incorrectly said that no copy existed of a file, that was actually present in origin. This turned out to occur because fsck first did a Annex.Branch.change, recording that it did not locally have the file. That was recorded in the journal. Since neither the git annex direct not the fsck had yet needed to read any info from the branch, but had only made changes to it, the origin/git-annex branch was not yet merged in. So the journal got a location log entry written to it, but this did not include the location log info for the origin. When fsck then did a Annex.Branch.get, it trusted the journal was cosnsitent, and returned it, again w/o merging from origin/git-annex. This latter behavior is the actual bug. Refer to commit e9bfa8eaed3ff59a4c0bc8d4d677bc493177807c for the thinking behind it being ok to make a change to a file on the branch, without first merging the branch. That thinking still stands. However, it means that files in the journal cannot be trusted to be consistent if the branch has not been merged. So, to fix, just enure the branch gets merged, even when reading from the journal. In tests, this does not seem to cause any extra merging. Except, of course, in the one case described above. But git annex add, etc, are able to make changes w/o first merging the branch.
2013-05-20 19:14:59 +00:00
- reflect changes in remotes.
- (Changing the value this returns, and then merging is always the
- same as using get, and then changing its value.) -}
getLocal :: FilePath -> Annex String
getLocal file = go =<< getJournalFileStale file
2012-12-13 04:24:19 +00:00
where
Fix a bug in the git-annex branch handling code that could cause info from a remote to not be merged and take effect immediately. This bug was turned up by the test suite, running fsck in direct mode. A repository was cloned, was put into direct mode, was fscked, and fsck incorrectly said that no copy existed of a file, that was actually present in origin. This turned out to occur because fsck first did a Annex.Branch.change, recording that it did not locally have the file. That was recorded in the journal. Since neither the git annex direct not the fsck had yet needed to read any info from the branch, but had only made changes to it, the origin/git-annex branch was not yet merged in. So the journal got a location log entry written to it, but this did not include the location log info for the origin. When fsck then did a Annex.Branch.get, it trusted the journal was cosnsitent, and returned it, again w/o merging from origin/git-annex. This latter behavior is the actual bug. Refer to commit e9bfa8eaed3ff59a4c0bc8d4d677bc493177807c for the thinking behind it being ok to make a change to a file on the branch, without first merging the branch. That thinking still stands. However, it means that files in the journal cannot be trusted to be consistent if the branch has not been merged. So, to fix, just enure the branch gets merged, even when reading from the journal. In tests, this does not seem to cause any extra merging. Except, of course, in the one case described above. But git annex add, etc, are able to make changes w/o first merging the branch.
2013-05-20 19:14:59 +00:00
go (Just journalcontent) = return journalcontent
go Nothing = getRaw file
getRaw :: FilePath -> Annex String
getRaw = getRef fullname
getHistorical :: RefDate -> FilePath -> Annex String
getHistorical date file =
-- This check avoids some ugly error messages when the reflog
-- is empty.
ifM (null <$> inRepo (Git.RefLog.get' [Param (fromRef fullname), Param "-n1"]))
( giveup ("No reflog for " ++ fromRef fullname)
, getRef (Git.Ref.dateRef fullname date) file
)
getRef :: Ref -> FilePath -> Annex String
Fix encoding of data written to git-annex branch. Avoid truncating unicode characters to 8 bits. Allow any encoding to be used, as with filenames (but utf8 is the sane choice). Affects metadata and repository descriptions, and preferred content expressions. The question of what's the right encoding for the git-annex branch is a vexing one. utf-8 would be a nice choice, but this leaves the possibility of bad data getting into a git-annex branch somehow, and this resulting in git-annex crashing with encoding errors, which is a failure mode I want to avoid. (Also, preferred content expressions can refer to filenames, and filenames can have any encoding, so limiting to utf-8 would not be ideal.) The union merge code already took care to not assume any encoding for a file. Except it assumes that any \n is a literal newline, and not part of some encoding of a character that happens to contain a newline. (At least utf-8 avoids using newline for anything except liternal newlines.) Adapted the git-annex branch code to use this same approach. Note that there is a potential interop problem with Windows, since FileSystemEncoding doesn't work there, and instead things are always decoded as utf-8. If someone uses non-utf8 encoding for data on the git-annex branch, this can lead to an encoding error on windows. However, this commit doesn't actually make that any worse, because the union merge code would similarly fail with an encoding error on windows in that situation. This commit was sponsored by Kyle Meyer.
2014-05-27 18:16:33 +00:00
getRef ref file = withIndex $ decodeBS <$> catFile ref file
{- Applies a function to modifiy the content of a file.
-
- Note that this does not cause the branch to be merged, it only
- modifes the current content of the file on the branch.
-}
change :: FilePath -> (String -> String) -> Annex ()
change file f = lockJournal $ \jl -> f <$> getLocal file >>= set jl file
{- Applies a function which can modify the content of a file, or not. -}
maybeChange :: FilePath -> (String -> Maybe String) -> Annex ()
maybeChange file f = lockJournal $ \jl -> do
v <- getLocal file
case f v of
Just v' | v' /= v -> set jl file v'
_ -> noop
{- Records new content of a file into the journal -}
set :: JournalLocked -> FilePath -> String -> Annex ()
2013-04-03 07:52:41 +00:00
set = setJournalFile
{- Stages the journal, and commits staged changes to the branch. -}
commit :: String -> Annex ()
commit = whenM journalDirty . forceCommit
{- Commits the current index to the branch even without any journalled
- changes. -}
forceCommit :: String -> Annex ()
forceCommit message = lockJournal $ \jl -> do
cleanjournal <- stageJournal jl
ref <- getBranch
withIndex $ commitIndex jl ref message [fullname]
2013-04-03 07:52:41 +00:00
liftIO cleanjournal
{- Commits the staged changes in the index to the branch.
-
- Ensures that the branch's index file is first updated to merge the state
- of the branch at branchref, before running the commit action. This
- is needed because the branch may have had changes pushed to it, that
- are not yet reflected in the index.
-
- The branchref value can have been obtained using getBranch at any
- previous point, though getting it a long time ago makes the race
- more likely to occur.
-
- Note that changes may be pushed to the branch at any point in time!
- So, there's a race. If the commit is made using the newly pushed tip of
- the branch as its parent, and that ref has not yet been merged into the
- index, then the result is that the commit will revert the pushed
- changes, since they have not been merged into the index. This race
- is detected and another commit made to fix it.
-
- (It's also possible for the branch to be overwritten,
- losing the commit made here. But that's ok; the data is still in the
- index and will get committed again later.)
-}
commitIndex :: JournalLocked -> Git.Ref -> String -> [Git.Ref] -> Annex ()
commitIndex jl branchref message parents = do
showStoringStateAction
commitIndex' jl branchref message message 0 parents
commitIndex' :: JournalLocked -> Git.Ref -> String -> String -> Integer -> [Git.Ref] -> Annex ()
commitIndex' jl branchref message basemessage retrynum parents = do
updateIndex jl branchref
committedref <- inRepo $ Git.Branch.commitAlways Git.Branch.AutomaticCommit message fullname parents
setIndexSha committedref
parentrefs <- commitparents <$> catObject committedref
when (racedetected branchref parentrefs) $
fixrace committedref parentrefs
2012-12-13 04:24:19 +00:00
where
-- look for "parent ref" lines and return the refs
commitparents = map (Git.Ref . snd) . filter isparent .
Fix encoding of data written to git-annex branch. Avoid truncating unicode characters to 8 bits. Allow any encoding to be used, as with filenames (but utf8 is the sane choice). Affects metadata and repository descriptions, and preferred content expressions. The question of what's the right encoding for the git-annex branch is a vexing one. utf-8 would be a nice choice, but this leaves the possibility of bad data getting into a git-annex branch somehow, and this resulting in git-annex crashing with encoding errors, which is a failure mode I want to avoid. (Also, preferred content expressions can refer to filenames, and filenames can have any encoding, so limiting to utf-8 would not be ideal.) The union merge code already took care to not assume any encoding for a file. Except it assumes that any \n is a literal newline, and not part of some encoding of a character that happens to contain a newline. (At least utf-8 avoids using newline for anything except liternal newlines.) Adapted the git-annex branch code to use this same approach. Note that there is a potential interop problem with Windows, since FileSystemEncoding doesn't work there, and instead things are always decoded as utf-8. If someone uses non-utf8 encoding for data on the git-annex branch, this can lead to an encoding error on windows. However, this commit doesn't actually make that any worse, because the union merge code would similarly fail with an encoding error on windows in that situation. This commit was sponsored by Kyle Meyer.
2014-05-27 18:16:33 +00:00
map (toassoc . decodeBS) . L.split newline
newline = c2w8 '\n'
2012-12-13 04:24:19 +00:00
toassoc = separate (== ' ')
isparent (k,_) = k == "parent"
2012-12-13 04:24:19 +00:00
{- The race can be detected by checking the commit's
- parent, which will be the newly pushed branch,
- instead of the expected ref that the index was updated to. -}
racedetected expectedref parentrefs
| expectedref `elem` parentrefs = False -- good parent
| otherwise = True -- race!
2012-12-13 04:24:19 +00:00
{- To recover from the race, union merge the lost refs
- into the index. -}
2012-12-13 04:24:19 +00:00
fixrace committedref lostrefs = do
showSideAction "recovering from race"
let retrynum' = retrynum+1
-- small sleep to let any activity that caused
-- the race settle down
liftIO $ threadDelay (100000 + fromInteger retrynum')
mergeIndex jl lostrefs
let racemessage = basemessage ++ " (recovery from race #" ++ show retrynum' ++ "; expected commit parent " ++ show branchref ++ " but found " ++ show lostrefs ++ " )"
commitIndex' jl committedref racemessage basemessage retrynum' [committedref]
{- Lists all files on the branch. There may be duplicates in the list. -}
files :: Annex [FilePath]
2012-09-15 19:40:13 +00:00
files = do
update
(++)
<$> branchFiles
<*> getJournalledFilesStale
{- Files in the branch, not including any from journalled changes,
- and without updating the branch. -}
branchFiles :: Annex [FilePath]
branchFiles = withIndex $ inRepo $ Git.Command.pipeNullSplitZombie $
lsTreeParams fullname [Param "--name-only"]
{- Populates the branch's index file with the current branch contents.
-
- This is only done when the index doesn't yet exist, and the index
- is used to build up changes to be commited to the branch, and merge
- in changes from other branches.
-}
genIndex :: Git.Repo -> IO ()
genIndex g = Git.UpdateIndex.streamUpdateIndex g
[Git.UpdateIndex.lsTree fullname g]
{- Merges the specified refs into the index.
- Any changes staged in the index will be preserved. -}
mergeIndex :: JournalLocked -> [Git.Ref] -> Annex ()
mergeIndex jl branches = do
prepareModifyIndex jl
hashhandle <- hashObjectHandle
ch <- catFileHandle
inRepo $ \g -> Git.UnionMerge.mergeIndex hashhandle ch g branches
{- Removes any stale git lock file, to avoid git falling over when
- updating the index.
-
- Since all modifications of the index are performed inside this module,
- and only when the journal is locked, the fact that the journal has to be
- locked when this is called ensures that no other process is currently
- modifying the index. So any index.lock file must be stale, caused
- by git running when the system crashed, or the repository's disk was
- removed, etc.
-}
prepareModifyIndex :: JournalLocked -> Annex ()
prepareModifyIndex _jl = do
index <- fromRepo gitAnnexIndex
void $ liftIO $ tryIO $ removeFile $ index ++ ".lock"
{- Runs an action using the branch's index file. -}
withIndex :: Annex a -> Annex a
withIndex = withIndex' False
withIndex' :: Bool -> Annex a -> Annex a
withIndex' bootstrapping a = do
f <- fromRepo gitAnnexIndex
withIndexFile f $ do
checkIndexOnce $ unlessM (liftIO $ doesFileExist f) $ do
unless bootstrapping create
createAnnexDirectory $ takeDirectory f
unless bootstrapping $ inRepo genIndex
a
{- Updates the branch's index to reflect the current contents of the branch.
- Any changes staged in the index will be preserved.
-
- Compares the ref stored in the lock file with the current
- ref of the branch to see if an update is needed.
-}
updateIndex :: JournalLocked -> Git.Ref -> Annex ()
updateIndex jl branchref = whenM (needUpdateIndex branchref) $
forceUpdateIndex jl branchref
forceUpdateIndex :: JournalLocked -> Git.Ref -> Annex ()
forceUpdateIndex jl branchref = do
withIndex $ mergeIndex jl [fullname]
setIndexSha branchref
{- Checks if the index needs to be updated. -}
needUpdateIndex :: Git.Ref -> Annex Bool
needUpdateIndex branchref = do
f <- fromRepo gitAnnexIndexStatus
committedref <- Git.Ref . firstLine <$>
liftIO (catchDefaultIO "" $ readFileStrict f)
return (committedref /= branchref)
{- Record that the branch's index has been updated to correspond to a
- given ref of the branch. -}
setIndexSha :: Git.Ref -> Annex ()
setIndexSha ref = do
f <- fromRepo gitAnnexIndexStatus
liftIO $ writeFile f $ fromRef ref ++ "\n"
setAnnexFilePerm f
runAnnexHook postUpdateAnnexHook
{- Stages the journal into the index and returns an action that will
- clean up the staged journal files, which should only be run once
- the index has been committed to the branch.
-
- Before staging, this removes any existing git index file lock.
- This is safe to do because stageJournal is the only thing that
- modifies this index file, and only one can run at a time, because
- the journal is locked. So any existing git index file lock must be
- stale, and the journal must contain any data that was in the process
- of being written to the index file when it crashed.
-}
stageJournal :: JournalLocked -> Annex (IO ())
stageJournal jl = withIndex $ do
prepareModifyIndex jl
g <- gitRepo
let dir = gitAnnexJournalDir g
(jlogf, jlogh) <- openjlog
2016-03-14 20:29:19 +00:00
h <- hashObjectHandle
withJournalHandle $ \jh ->
Git.UpdateIndex.streamUpdateIndex g
[genstream dir h jh jlogh]
return $ cleanup dir jlogh jlogf
2012-12-13 04:24:19 +00:00
where
genstream dir h jh jlogh streamer = do
v <- readDirectory jh
case v of
Nothing -> return ()
Just file -> do
unless (dirCruft file) $ do
let path = dir </> file
2016-03-14 20:29:19 +00:00
sha <- Git.HashObject.hashFile h path
hPutStrLn jlogh file
streamer $ Git.UpdateIndex.updateIndexLine
sha FileBlob (asTopFilePath $ fileJournal file)
genstream dir h jh jlogh streamer
-- Clean up the staged files, as listed in the temp log file.
-- The temp file is used to avoid needing to buffer all the
-- filenames in memory.
cleanup dir jlogh jlogf = do
hFlush jlogh
hSeek jlogh AbsoluteSeek 0
stagedfs <- lines <$> hGetContents jlogh
mapM_ (removeFile . (dir </>)) stagedfs
hClose jlogh
nukeFile jlogf
openjlog = do
tmpdir <- fromRepo gitAnnexTmpMiscDir
createAnnexDirectory tmpdir
liftIO $ openTempFile tmpdir "jlog"
{- This is run after the refs have been merged into the index,
- but before the result is committed to the branch.
- (Which is why it's passed the contents of the local branches's
- transition log before that merge took place.)
-
- When the refs contain transitions that have not yet been done locally,
- the transitions are performed on the index, and a new branch
- is created from the result.
-
- When there are transitions recorded locally that have not been done
- to the remote refs, the transitions are performed in the index,
- and committed to the existing branch. In this case, the untransitioned
- remote refs cannot be merged into the branch (since transitions
- throw away history), so they are added to the list of refs to ignore,
- to avoid re-merging content from them again.
-}
handleTransitions :: JournalLocked -> Transitions -> [Git.Ref] -> Annex Bool
handleTransitions jl localts refs = do
m <- M.fromList <$> mapM getreftransition refs
let remotets = M.elems m
if all (localts ==) remotets
then return False
else do
let allts = combineTransitions (localts:remotets)
let (transitionedrefs, untransitionedrefs) =
partition (\r -> M.lookup r m == Just allts) refs
performTransitionsLocked jl allts (localts /= allts) transitionedrefs
ignoreRefs untransitionedrefs
return True
where
getreftransition ref = do
Fix encoding of data written to git-annex branch. Avoid truncating unicode characters to 8 bits. Allow any encoding to be used, as with filenames (but utf8 is the sane choice). Affects metadata and repository descriptions, and preferred content expressions. The question of what's the right encoding for the git-annex branch is a vexing one. utf-8 would be a nice choice, but this leaves the possibility of bad data getting into a git-annex branch somehow, and this resulting in git-annex crashing with encoding errors, which is a failure mode I want to avoid. (Also, preferred content expressions can refer to filenames, and filenames can have any encoding, so limiting to utf-8 would not be ideal.) The union merge code already took care to not assume any encoding for a file. Except it assumes that any \n is a literal newline, and not part of some encoding of a character that happens to contain a newline. (At least utf-8 avoids using newline for anything except liternal newlines.) Adapted the git-annex branch code to use this same approach. Note that there is a potential interop problem with Windows, since FileSystemEncoding doesn't work there, and instead things are always decoded as utf-8. If someone uses non-utf8 encoding for data on the git-annex branch, this can lead to an encoding error on windows. However, this commit doesn't actually make that any worse, because the union merge code would similarly fail with an encoding error on windows in that situation. This commit was sponsored by Kyle Meyer.
2014-05-27 18:16:33 +00:00
ts <- parseTransitionsStrictly "remote" . decodeBS
<$> catFile ref transitionsLog
return (ref, ts)
{- Performs the specified transitions on the contents of the index file,
- commits it to the branch, or creates a new branch.
-}
performTransitions :: Transitions -> Bool -> [Ref] -> Annex ()
performTransitions ts neednewlocalbranch transitionedrefs = lockJournal $ \jl ->
performTransitionsLocked jl ts neednewlocalbranch transitionedrefs
performTransitionsLocked :: JournalLocked -> Transitions -> Bool -> [Ref] -> Annex ()
performTransitionsLocked jl ts neednewlocalbranch transitionedrefs = do
-- For simplicity & speed, we're going to use the Annex.Queue to
-- update the git-annex branch, while it usually holds changes
-- for the head branch. Flush any such changes.
Annex.Queue.flush
withIndex $ do
prepareModifyIndex jl
run $ mapMaybe getTransitionCalculator tlist
Annex.Queue.flush
if neednewlocalbranch
then do
committedref <- inRepo $ Git.Branch.commitAlways Git.Branch.AutomaticCommit message fullname transitionedrefs
setIndexSha committedref
else do
ref <- getBranch
commitIndex jl ref message (nub $ fullname:transitionedrefs)
where
message
| neednewlocalbranch && null transitionedrefs = "new branch for transition " ++ tdesc
| otherwise = "continuing transition " ++ tdesc
tdesc = show $ map describeTransition tlist
tlist = transitionList ts
{- The changes to make to the branch are calculated and applied to
- the branch directly, rather than going through the journal,
- which would be innefficient. (And the journal is not designed
- to hold changes to every file in the branch at once.)
-
- When a file in the branch is changed by transition code,
- that value is remembered and fed into the code for subsequent
- transitions.
-}
run [] = noop
run changers = do
trustmap <- calcTrustMap <$> getRaw trustLog
fs <- branchFiles
forM_ fs $ \f -> do
content <- getRaw f
2016-03-14 20:29:19 +00:00
apply changers f content trustmap
apply [] _ _ _ = return ()
apply (changer:rest) file content trustmap =
case changer file content trustmap of
RemoveFile -> do
Annex.Queue.addUpdateIndex
=<< inRepo (Git.UpdateIndex.unstageFile file)
-- File is deleted; can't run any other
-- transitions on it.
return ()
ChangeFile content' -> do
2016-03-14 20:29:19 +00:00
sha <- hashBlob content'
Annex.Queue.addUpdateIndex $ Git.UpdateIndex.pureStreamer $
Git.UpdateIndex.updateIndexLine sha FileBlob (asTopFilePath file)
2016-03-14 20:29:19 +00:00
apply rest file content' trustmap
PreserveFile ->
2016-03-14 20:29:19 +00:00
apply rest file content trustmap
checkBranchDifferences :: Git.Ref -> Annex ()
checkBranchDifferences ref = do
theirdiffs <- allDifferences . parseDifferencesLog . decodeBS
<$> catFile ref differenceLog
mydiffs <- annexDifferences <$> Annex.getGitConfig
when (theirdiffs /= mydiffs) $
giveup "Remote repository is tuned in incompatable way; cannot be merged with local repository."
ignoreRefs :: [Git.Sha] -> Annex ()
ignoreRefs rs = do
old <- getIgnoredRefs
let s = S.unions [old, S.fromList rs]
f <- fromRepo gitAnnexIgnoredRefs
replaceFile f $ \tmp -> liftIO $ writeFile tmp $
unlines $ map fromRef $ S.elems s
getIgnoredRefs :: Annex (S.Set Git.Sha)
getIgnoredRefs = S.fromList . mapMaybe Git.Sha.extractSha . lines <$> content
where
content = do
f <- fromRepo gitAnnexIgnoredRefs
liftIO $ catchDefaultIO "" $ readFile f
addMergedRefs :: [(Git.Sha, Git.Branch)] -> Annex ()
addMergedRefs [] = return ()
addMergedRefs new = do
old <- getMergedRefs'
-- Keep only the newest sha for each branch.
let l = nubBy ((==) `on` snd) (new ++ old)
f <- fromRepo gitAnnexMergedRefs
replaceFile f $ \tmp -> liftIO $ writeFile tmp $
unlines $ map (\(s, b) -> fromRef s ++ '\t' : fromRef b) l
getMergedRefs :: Annex (S.Set Git.Sha)
getMergedRefs = S.fromList . map fst <$> getMergedRefs'
getMergedRefs' :: Annex [(Git.Sha, Git.Branch)]
getMergedRefs' = do
f <- fromRepo gitAnnexMergedRefs
s <- liftIO $ catchDefaultIO "" $ readFile f
return $ map parse $ lines s
where
parse l =
let (s, b) = separate (== '\t') l
in (Ref s, Ref b)