Record git-annex (5.20140717) in archive suite sid
This commit is contained in:
commit
30665396b6
479 changed files with 10634 additions and 770 deletions
1
.gitattributes
vendored
1
.gitattributes
vendored
|
@ -1 +0,0 @@
|
||||||
debian/changelog merge=dpkg-mergechangelogs
|
|
34
.gitignore
vendored
34
.gitignore
vendored
|
@ -1,34 +0,0 @@
|
||||||
tags
|
|
||||||
Setup
|
|
||||||
*.hi
|
|
||||||
*.o
|
|
||||||
tmp
|
|
||||||
test
|
|
||||||
build-stamp
|
|
||||||
Build/SysConfig.hs
|
|
||||||
Build/InstallDesktopFile
|
|
||||||
Build/EvilSplicer
|
|
||||||
Build/Standalone
|
|
||||||
Build/OSXMkLibs
|
|
||||||
Build/LinuxMkLibs
|
|
||||||
git-annex
|
|
||||||
git-annex.1
|
|
||||||
git-annex-shell.1
|
|
||||||
git-union-merge
|
|
||||||
git-union-merge.1
|
|
||||||
doc/.ikiwiki
|
|
||||||
html
|
|
||||||
*.tix
|
|
||||||
.hpc
|
|
||||||
dist
|
|
||||||
# Sandboxed builds
|
|
||||||
cabal-dev
|
|
||||||
.cabal-sandbox
|
|
||||||
cabal.sandbox.config
|
|
||||||
cabal.config
|
|
||||||
# Project-local emacs configuration
|
|
||||||
.dir-locals.el
|
|
||||||
# OSX related
|
|
||||||
.DS_Store
|
|
||||||
.virthualenv
|
|
||||||
.tasty-rerun-log
|
|
|
@ -5,19 +5,23 @@
|
||||||
- Licensed under the GNU GPL version 3 or higher.
|
- Licensed under the GNU GPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
module Annex.AutoMerge (autoMergeFrom) where
|
module Annex.AutoMerge
|
||||||
|
( autoMergeFrom
|
||||||
|
, resolveMerge
|
||||||
|
, commitResolvedMerge
|
||||||
|
) where
|
||||||
|
|
||||||
import Common.Annex
|
import Common.Annex
|
||||||
import qualified Annex.Queue
|
import qualified Annex.Queue
|
||||||
import Annex.Direct
|
import Annex.Direct
|
||||||
import Annex.CatFile
|
import Annex.CatFile
|
||||||
import Annex.Link
|
import Annex.Link
|
||||||
import qualified Git.Command
|
|
||||||
import qualified Git.LsFiles as LsFiles
|
import qualified Git.LsFiles as LsFiles
|
||||||
import qualified Git.UpdateIndex as UpdateIndex
|
import qualified Git.UpdateIndex as UpdateIndex
|
||||||
import qualified Git.Merge
|
import qualified Git.Merge
|
||||||
import qualified Git.Ref
|
import qualified Git.Ref
|
||||||
import qualified Git
|
import qualified Git
|
||||||
|
import qualified Git.Branch
|
||||||
import Git.Types (BlobType(..))
|
import Git.Types (BlobType(..))
|
||||||
import Config
|
import Config
|
||||||
import Annex.ReplaceFile
|
import Annex.ReplaceFile
|
||||||
|
@ -28,18 +32,22 @@ import qualified Data.Set as S
|
||||||
|
|
||||||
{- Merges from a branch into the current branch
|
{- Merges from a branch into the current branch
|
||||||
- (which may not exist yet),
|
- (which may not exist yet),
|
||||||
- with automatic merge conflict resolution. -}
|
- with automatic merge conflict resolution.
|
||||||
autoMergeFrom :: Git.Ref -> (Maybe Git.Ref) -> Annex Bool
|
-
|
||||||
autoMergeFrom branch currbranch = do
|
- Callers should use Git.Branch.changed first, to make sure that
|
||||||
|
- there are changed from the current branch to the branch being merged in.
|
||||||
|
-}
|
||||||
|
autoMergeFrom :: Git.Ref -> (Maybe Git.Ref) -> Git.Branch.CommitMode -> Annex Bool
|
||||||
|
autoMergeFrom branch currbranch commitmode = do
|
||||||
showOutput
|
showOutput
|
||||||
case currbranch of
|
case currbranch of
|
||||||
Nothing -> go Nothing
|
Nothing -> go Nothing
|
||||||
Just b -> go =<< inRepo (Git.Ref.sha b)
|
Just b -> go =<< inRepo (Git.Ref.sha b)
|
||||||
where
|
where
|
||||||
go old = ifM isDirect
|
go old = ifM isDirect
|
||||||
( mergeDirect currbranch old branch (resolveMerge old branch)
|
( mergeDirect currbranch old branch (resolveMerge old branch) commitmode
|
||||||
, inRepo (Git.Merge.mergeNonInteractive branch)
|
, inRepo (Git.Merge.mergeNonInteractive branch commitmode)
|
||||||
<||> (resolveMerge old branch <&&> commitResolvedMerge)
|
<||> (resolveMerge old branch <&&> commitResolvedMerge commitmode)
|
||||||
)
|
)
|
||||||
|
|
||||||
{- Resolves a conflicted merge. It's important that any conflicts be
|
{- Resolves a conflicted merge. It's important that any conflicts be
|
||||||
|
@ -106,11 +114,11 @@ resolveMerge' (Just us) them u = do
|
||||||
makelink keyUs
|
makelink keyUs
|
||||||
-- Our side is annexed file, other side is not.
|
-- Our side is annexed file, other side is not.
|
||||||
(Just keyUs, Nothing) -> resolveby $ do
|
(Just keyUs, Nothing) -> resolveby $ do
|
||||||
graftin them file
|
graftin them file LsFiles.valThem LsFiles.valThem
|
||||||
makelink keyUs
|
makelink keyUs
|
||||||
-- Our side is not annexed file, other side is.
|
-- Our side is not annexed file, other side is.
|
||||||
(Nothing, Just keyThem) -> resolveby $ do
|
(Nothing, Just keyThem) -> resolveby $ do
|
||||||
graftin us file
|
graftin us file LsFiles.valUs LsFiles.valUs
|
||||||
makelink keyThem
|
makelink keyThem
|
||||||
-- Neither side is annexed file; cannot resolve.
|
-- Neither side is annexed file; cannot resolve.
|
||||||
(Nothing, Nothing) -> return Nothing
|
(Nothing, Nothing) -> return Nothing
|
||||||
|
@ -127,17 +135,41 @@ resolveMerge' (Just us) them u = do
|
||||||
makelink key = do
|
makelink key = do
|
||||||
let dest = variantFile file key
|
let dest = variantFile file key
|
||||||
l <- inRepo $ gitAnnexLink dest key
|
l <- inRepo $ gitAnnexLink dest key
|
||||||
ifM isDirect
|
replacewithlink dest l
|
||||||
( do
|
|
||||||
d <- fromRepo gitAnnexMergeDir
|
|
||||||
replaceFile (d </> dest) $ makeAnnexLink l
|
|
||||||
, replaceFile dest $ makeAnnexLink l
|
|
||||||
)
|
|
||||||
stageSymlink dest =<< hashSymlink l
|
stageSymlink dest =<< hashSymlink l
|
||||||
|
|
||||||
{- stage a graft of a directory or file from a branch -}
|
replacewithlink dest link = ifM isDirect
|
||||||
graftin b item = Annex.Queue.addUpdateIndex
|
( do
|
||||||
=<< fromRepo (UpdateIndex.lsSubTree b item)
|
d <- fromRepo gitAnnexMergeDir
|
||||||
|
replaceFile (d </> dest) $ makeGitLink link
|
||||||
|
, replaceFile dest $ makeGitLink link
|
||||||
|
)
|
||||||
|
|
||||||
|
{- Stage a graft of a directory or file from a branch.
|
||||||
|
-
|
||||||
|
- When there is a conflicted merge where one side is a directory
|
||||||
|
- or file, and the other side is a symlink, git merge always
|
||||||
|
- updates the work tree to contain the non-symlink. So, the
|
||||||
|
- directory or file will already be in the work tree correctly,
|
||||||
|
- and they just need to be staged into place. Do so by copying the
|
||||||
|
- index. (Note that this is also better than calling git-add
|
||||||
|
- because on a crippled filesystem, it preserves any symlink
|
||||||
|
- bits.)
|
||||||
|
-
|
||||||
|
- It's also possible for the branch to have a symlink in it,
|
||||||
|
- which is not a git-annex symlink. In this special case,
|
||||||
|
- git merge does not update the work tree to contain the symlink
|
||||||
|
- from the branch, so we have to do so manually.
|
||||||
|
-}
|
||||||
|
graftin b item select select' = do
|
||||||
|
Annex.Queue.addUpdateIndex
|
||||||
|
=<< fromRepo (UpdateIndex.lsSubTree b item)
|
||||||
|
when (select (LsFiles.unmergedBlobType u) == Just SymlinkBlob) $
|
||||||
|
case select' (LsFiles.unmergedSha u) of
|
||||||
|
Nothing -> noop
|
||||||
|
Just sha -> do
|
||||||
|
link <- catLink True sha
|
||||||
|
replacewithlink item link
|
||||||
|
|
||||||
resolveby a = do
|
resolveby a = do
|
||||||
{- Remove conflicted file from index so merge can be resolved. -}
|
{- Remove conflicted file from index so merge can be resolved. -}
|
||||||
|
@ -146,7 +178,7 @@ resolveMerge' (Just us) them u = do
|
||||||
return (Just file)
|
return (Just file)
|
||||||
|
|
||||||
{- git-merge moves conflicting files away to files
|
{- git-merge moves conflicting files away to files
|
||||||
- named something like f~HEAD or f~branch, but the
|
- named something like f~HEAD or f~branch or just f, but the
|
||||||
- exact name chosen can vary. Once the conflict is resolved,
|
- exact name chosen can vary. Once the conflict is resolved,
|
||||||
- this cruft can be deleted. To avoid deleting legitimate
|
- this cruft can be deleted. To avoid deleting legitimate
|
||||||
- files that look like this, only delete files that are
|
- files that look like this, only delete files that are
|
||||||
|
@ -163,13 +195,12 @@ cleanConflictCruft resolvedfs top = do
|
||||||
liftIO $ nukeFile f
|
liftIO $ nukeFile f
|
||||||
| otherwise = noop
|
| otherwise = noop
|
||||||
s = S.fromList resolvedfs
|
s = S.fromList resolvedfs
|
||||||
matchesresolved f = S.member (base f) s
|
matchesresolved f = S.member f s || S.member (base f) s
|
||||||
base f = reverse $ drop 1 $ dropWhile (/= '~') $ reverse f
|
base f = reverse $ drop 1 $ dropWhile (/= '~') $ reverse f
|
||||||
|
|
||||||
commitResolvedMerge :: Annex Bool
|
commitResolvedMerge :: Git.Branch.CommitMode -> Annex Bool
|
||||||
commitResolvedMerge = inRepo $ Git.Command.runBool
|
commitResolvedMerge commitmode = inRepo $ Git.Branch.commitCommand commitmode
|
||||||
[ Param "commit"
|
[ Param "--no-verify"
|
||||||
, Param "--no-verify"
|
|
||||||
, Param "-m"
|
, Param "-m"
|
||||||
, Param "git-annex automatic merge conflict fix"
|
, Param "git-annex automatic merge conflict fix"
|
||||||
]
|
]
|
||||||
|
|
|
@ -92,7 +92,7 @@ getBranch = maybe (hasOrigin >>= go >>= use) return =<< branchsha
|
||||||
fromMaybe (error $ "failed to create " ++ fromRef name)
|
fromMaybe (error $ "failed to create " ++ fromRef name)
|
||||||
<$> branchsha
|
<$> branchsha
|
||||||
go False = withIndex' True $
|
go False = withIndex' True $
|
||||||
inRepo $ Git.Branch.commitAlways "branch created" fullname []
|
inRepo $ Git.Branch.commitAlways Git.Branch.AutomaticCommit "branch created" fullname []
|
||||||
use sha = do
|
use sha = do
|
||||||
setIndexSha sha
|
setIndexSha sha
|
||||||
return sha
|
return sha
|
||||||
|
@ -252,7 +252,7 @@ commitIndex jl branchref message parents = do
|
||||||
commitIndex' :: JournalLocked -> Git.Ref -> String -> [Git.Ref] -> Annex ()
|
commitIndex' :: JournalLocked -> Git.Ref -> String -> [Git.Ref] -> Annex ()
|
||||||
commitIndex' jl branchref message parents = do
|
commitIndex' jl branchref message parents = do
|
||||||
updateIndex jl branchref
|
updateIndex jl branchref
|
||||||
committedref <- inRepo $ Git.Branch.commitAlways message fullname parents
|
committedref <- inRepo $ Git.Branch.commitAlways Git.Branch.AutomaticCommit message fullname parents
|
||||||
setIndexSha committedref
|
setIndexSha committedref
|
||||||
parentrefs <- commitparents <$> catObject committedref
|
parentrefs <- commitparents <$> catObject committedref
|
||||||
when (racedetected branchref parentrefs) $
|
when (racedetected branchref parentrefs) $
|
||||||
|
@ -389,19 +389,40 @@ stageJournal jl = withIndex $ do
|
||||||
prepareModifyIndex jl
|
prepareModifyIndex jl
|
||||||
g <- gitRepo
|
g <- gitRepo
|
||||||
let dir = gitAnnexJournalDir g
|
let dir = gitAnnexJournalDir g
|
||||||
fs <- getJournalFiles jl
|
(jlogf, jlogh) <- openjlog
|
||||||
liftIO $ do
|
withJournalHandle $ \jh -> do
|
||||||
h <- hashObjectStart g
|
h <- hashObjectStart g
|
||||||
Git.UpdateIndex.streamUpdateIndex g
|
Git.UpdateIndex.streamUpdateIndex g
|
||||||
[genstream dir h fs]
|
[genstream dir h jh jlogh]
|
||||||
hashObjectStop h
|
hashObjectStop h
|
||||||
return $ liftIO $ mapM_ (removeFile . (dir </>)) fs
|
return $ cleanup dir jlogh jlogf
|
||||||
where
|
where
|
||||||
genstream dir h fs streamer = forM_ fs $ \file -> do
|
genstream dir h jh jlogh streamer = do
|
||||||
let path = dir </> file
|
v <- readDirectory jh
|
||||||
sha <- hashFile h path
|
case v of
|
||||||
streamer $ Git.UpdateIndex.updateIndexLine
|
Nothing -> return ()
|
||||||
sha FileBlob (asTopFilePath $ fileJournal file)
|
Just file -> do
|
||||||
|
unless (dirCruft file) $ do
|
||||||
|
let path = dir </> file
|
||||||
|
sha <- hashFile h path
|
||||||
|
hPutStrLn jlogh file
|
||||||
|
streamer $ Git.UpdateIndex.updateIndexLine
|
||||||
|
sha FileBlob (asTopFilePath $ fileJournal file)
|
||||||
|
genstream dir h jh jlogh streamer
|
||||||
|
-- Clean up the staged files, as listed in the temp log file.
|
||||||
|
-- The temp file is used to avoid needing to buffer all the
|
||||||
|
-- filenames in memory.
|
||||||
|
cleanup dir jlogh jlogf = do
|
||||||
|
hFlush jlogh
|
||||||
|
hSeek jlogh AbsoluteSeek 0
|
||||||
|
stagedfs <- lines <$> hGetContents jlogh
|
||||||
|
mapM_ (removeFile . (dir </>)) stagedfs
|
||||||
|
hClose jlogh
|
||||||
|
nukeFile jlogf
|
||||||
|
openjlog = do
|
||||||
|
tmpdir <- fromRepo gitAnnexTmpMiscDir
|
||||||
|
createAnnexDirectory tmpdir
|
||||||
|
liftIO $ openTempFile tmpdir "jlog"
|
||||||
|
|
||||||
{- This is run after the refs have been merged into the index,
|
{- This is run after the refs have been merged into the index,
|
||||||
- but before the result is committed to the branch.
|
- but before the result is committed to the branch.
|
||||||
|
@ -471,7 +492,7 @@ performTransitionsLocked jl ts neednewlocalbranch transitionedrefs = do
|
||||||
Annex.Queue.flush
|
Annex.Queue.flush
|
||||||
if neednewlocalbranch
|
if neednewlocalbranch
|
||||||
then do
|
then do
|
||||||
committedref <- inRepo $ Git.Branch.commitAlways message fullname transitionedrefs
|
committedref <- inRepo $ Git.Branch.commitAlways Git.Branch.AutomaticCommit message fullname transitionedrefs
|
||||||
setIndexSha committedref
|
setIndexSha committedref
|
||||||
else do
|
else do
|
||||||
ref <- getBranch
|
ref <- getBranch
|
||||||
|
|
|
@ -15,6 +15,7 @@ module Annex.CatFile (
|
||||||
catKey,
|
catKey,
|
||||||
catKeyFile,
|
catKeyFile,
|
||||||
catKeyFileHEAD,
|
catKeyFileHEAD,
|
||||||
|
catLink,
|
||||||
) where
|
) where
|
||||||
|
|
||||||
import qualified Data.ByteString.Lazy as L
|
import qualified Data.ByteString.Lazy as L
|
||||||
|
@ -77,21 +78,25 @@ catFileHandle = do
|
||||||
catKey :: Ref -> FileMode -> Annex (Maybe Key)
|
catKey :: Ref -> FileMode -> Annex (Maybe Key)
|
||||||
catKey = catKey' True
|
catKey = catKey' True
|
||||||
|
|
||||||
catKey' :: Bool -> Ref -> FileMode -> Annex (Maybe Key)
|
catKey' :: Bool -> Sha -> FileMode -> Annex (Maybe Key)
|
||||||
catKey' modeguaranteed ref mode
|
catKey' modeguaranteed sha mode
|
||||||
| isSymLink mode = do
|
| isSymLink mode = do
|
||||||
l <- fromInternalGitPath . decodeBS <$> get
|
l <- catLink modeguaranteed sha
|
||||||
return $ if isLinkToAnnex l
|
return $ if isLinkToAnnex l
|
||||||
then fileKey $ takeFileName l
|
then fileKey $ takeFileName l
|
||||||
else Nothing
|
else Nothing
|
||||||
| otherwise = return Nothing
|
| otherwise = return Nothing
|
||||||
|
|
||||||
|
{- Gets a symlink target. -}
|
||||||
|
catLink :: Bool -> Sha -> Annex String
|
||||||
|
catLink modeguaranteed sha = fromInternalGitPath . decodeBS <$> get
|
||||||
where
|
where
|
||||||
-- If the mode is not guaranteed to be correct, avoid
|
-- If the mode is not guaranteed to be correct, avoid
|
||||||
-- buffering the whole file content, which might be large.
|
-- buffering the whole file content, which might be large.
|
||||||
-- 8192 is enough if it really is a symlink.
|
-- 8192 is enough if it really is a symlink.
|
||||||
get
|
get
|
||||||
| modeguaranteed = catObject ref
|
| modeguaranteed = catObject sha
|
||||||
| otherwise = L.take 8192 <$> catObject ref
|
| otherwise = L.take 8192 <$> catObject sha
|
||||||
|
|
||||||
{- Looks up the key corresponding to the Ref using the running cat-file.
|
{- Looks up the key corresponding to the Ref using the running cat-file.
|
||||||
-
|
-
|
||||||
|
|
|
@ -218,7 +218,7 @@ getViaTmpUnchecked = finishGetViaTmp (return True)
|
||||||
|
|
||||||
getViaTmpChecked :: Annex Bool -> Key -> (FilePath -> Annex Bool) -> Annex Bool
|
getViaTmpChecked :: Annex Bool -> Key -> (FilePath -> Annex Bool) -> Annex Bool
|
||||||
getViaTmpChecked check key action =
|
getViaTmpChecked check key action =
|
||||||
prepGetViaTmpChecked key $
|
prepGetViaTmpChecked key False $
|
||||||
finishGetViaTmp check key action
|
finishGetViaTmp check key action
|
||||||
|
|
||||||
{- Prepares to download a key via a tmp file, and checks that there is
|
{- Prepares to download a key via a tmp file, and checks that there is
|
||||||
|
@ -229,8 +229,8 @@ getViaTmpChecked check key action =
|
||||||
-
|
-
|
||||||
- Wen there's enough free space, runs the download action.
|
- Wen there's enough free space, runs the download action.
|
||||||
-}
|
-}
|
||||||
prepGetViaTmpChecked :: Key -> Annex Bool -> Annex Bool
|
prepGetViaTmpChecked :: Key -> a -> Annex a -> Annex a
|
||||||
prepGetViaTmpChecked key getkey = do
|
prepGetViaTmpChecked key unabletoget getkey = do
|
||||||
tmp <- fromRepo $ gitAnnexTmpObjectLocation key
|
tmp <- fromRepo $ gitAnnexTmpObjectLocation key
|
||||||
|
|
||||||
e <- liftIO $ doesFileExist tmp
|
e <- liftIO $ doesFileExist tmp
|
||||||
|
@ -242,7 +242,7 @@ prepGetViaTmpChecked key getkey = do
|
||||||
-- The tmp file may not have been left writable
|
-- The tmp file may not have been left writable
|
||||||
when e $ thawContent tmp
|
when e $ thawContent tmp
|
||||||
getkey
|
getkey
|
||||||
, return False
|
, return unabletoget
|
||||||
)
|
)
|
||||||
|
|
||||||
finishGetViaTmp :: Annex Bool -> Key -> (FilePath -> Annex Bool) -> Annex Bool
|
finishGetViaTmp :: Annex Bool -> Key -> (FilePath -> Annex Bool) -> Annex Bool
|
||||||
|
|
|
@ -36,6 +36,7 @@ import Annex.Exception
|
||||||
import Annex.VariantFile
|
import Annex.VariantFile
|
||||||
import Git.Index
|
import Git.Index
|
||||||
import Annex.Index
|
import Annex.Index
|
||||||
|
import Annex.LockFile
|
||||||
|
|
||||||
{- Uses git ls-files to find files that need to be committed, and stages
|
{- Uses git ls-files to find files that need to be committed, and stages
|
||||||
- them into the index. Returns True if some changes were staged. -}
|
- them into the index. Returns True if some changes were staged. -}
|
||||||
|
@ -150,13 +151,16 @@ addDirect file cache = do
|
||||||
- directory, and the merge is staged into a copy of the index.
|
- directory, and the merge is staged into a copy of the index.
|
||||||
- Then the work tree is updated to reflect the merge, and
|
- Then the work tree is updated to reflect the merge, and
|
||||||
- finally, the merge is committed and the real index updated.
|
- finally, the merge is committed and the real index updated.
|
||||||
|
-
|
||||||
|
- A lock file is used to avoid races with any other caller of mergeDirect.
|
||||||
|
-
|
||||||
|
- To avoid other git processes from making change to the index while our
|
||||||
|
- merge is in progress, the index lock file is used as the temp index
|
||||||
|
- file. This is the same as what git does when updating the index
|
||||||
|
- normally.
|
||||||
-}
|
-}
|
||||||
mergeDirect :: Maybe Git.Ref -> Maybe Git.Ref -> Git.Branch -> Annex Bool -> Annex Bool
|
mergeDirect :: Maybe Git.Ref -> Maybe Git.Ref -> Git.Branch -> Annex Bool -> Git.Branch.CommitMode -> Annex Bool
|
||||||
mergeDirect startbranch oldref branch resolvemerge = do
|
mergeDirect startbranch oldref branch resolvemerge commitmode = exclusively $ do
|
||||||
-- Use the index lock file as the temp index file.
|
|
||||||
-- This is actually what git does when updating the index,
|
|
||||||
-- and so it will prevent other git processes from making
|
|
||||||
-- any changes to the index while our merge is in progress.
|
|
||||||
reali <- fromRepo indexFile
|
reali <- fromRepo indexFile
|
||||||
tmpi <- fromRepo indexFileLock
|
tmpi <- fromRepo indexFileLock
|
||||||
liftIO $ copyFile reali tmpi
|
liftIO $ copyFile reali tmpi
|
||||||
|
@ -168,19 +172,23 @@ mergeDirect startbranch oldref branch resolvemerge = do
|
||||||
createDirectoryIfMissing True d
|
createDirectoryIfMissing True d
|
||||||
|
|
||||||
withIndexFile tmpi $ do
|
withIndexFile tmpi $ do
|
||||||
merged <- stageMerge d branch
|
merged <- stageMerge d branch commitmode
|
||||||
r <- if merged
|
r <- if merged
|
||||||
then return True
|
then return True
|
||||||
else resolvemerge
|
else resolvemerge
|
||||||
mergeDirectCleanup d (fromMaybe Git.Sha.emptyTree oldref)
|
mergeDirectCleanup d (fromMaybe Git.Sha.emptyTree oldref)
|
||||||
mergeDirectCommit merged startbranch branch
|
mergeDirectCommit merged startbranch branch commitmode
|
||||||
|
|
||||||
liftIO $ rename tmpi reali
|
liftIO $ rename tmpi reali
|
||||||
|
|
||||||
return r
|
return r
|
||||||
|
where
|
||||||
|
exclusively = withExclusiveLock gitAnnexMergeLock
|
||||||
|
|
||||||
{- Stage a merge into the index, avoiding changing HEAD or the current
|
{- Stage a merge into the index, avoiding changing HEAD or the current
|
||||||
- branch. -}
|
- branch. -}
|
||||||
stageMerge :: FilePath -> Git.Branch -> Annex Bool
|
stageMerge :: FilePath -> Git.Branch -> Git.Branch.CommitMode -> Annex Bool
|
||||||
stageMerge d branch = do
|
stageMerge d branch commitmode = do
|
||||||
-- XXX A bug in git makes stageMerge unsafe to use if the git repo
|
-- XXX A bug in git makes stageMerge unsafe to use if the git repo
|
||||||
-- is configured with core.symlinks=false
|
-- is configured with core.symlinks=false
|
||||||
-- Using mergeNonInteractive is not ideal though, since it will
|
-- Using mergeNonInteractive is not ideal though, since it will
|
||||||
|
@ -190,7 +198,7 @@ stageMerge d branch = do
|
||||||
-- <http://marc.info/?l=git&m=140262402204212&w=2>
|
-- <http://marc.info/?l=git&m=140262402204212&w=2>
|
||||||
merger <- ifM (coreSymlinks <$> Annex.getGitConfig)
|
merger <- ifM (coreSymlinks <$> Annex.getGitConfig)
|
||||||
( return Git.Merge.stageMerge
|
( return Git.Merge.stageMerge
|
||||||
, return Git.Merge.mergeNonInteractive
|
, return $ \ref -> Git.Merge.mergeNonInteractive ref commitmode
|
||||||
)
|
)
|
||||||
inRepo $ \g -> merger branch $
|
inRepo $ \g -> merger branch $
|
||||||
g { location = Local { gitdir = Git.localGitDir g, worktree = Just d } }
|
g { location = Local { gitdir = Git.localGitDir g, worktree = Just d } }
|
||||||
|
@ -198,8 +206,8 @@ stageMerge d branch = do
|
||||||
{- Commits after a direct mode merge is complete, and after the work
|
{- Commits after a direct mode merge is complete, and after the work
|
||||||
- tree has been updated by mergeDirectCleanup.
|
- tree has been updated by mergeDirectCleanup.
|
||||||
-}
|
-}
|
||||||
mergeDirectCommit :: Bool -> Maybe Git.Ref -> Git.Branch -> Annex ()
|
mergeDirectCommit :: Bool -> Maybe Git.Ref -> Git.Branch -> Git.Branch.CommitMode -> Annex ()
|
||||||
mergeDirectCommit allowff old branch = do
|
mergeDirectCommit allowff old branch commitmode = do
|
||||||
void preCommitDirect
|
void preCommitDirect
|
||||||
d <- fromRepo Git.localGitDir
|
d <- fromRepo Git.localGitDir
|
||||||
let merge_head = d </> "MERGE_HEAD"
|
let merge_head = d </> "MERGE_HEAD"
|
||||||
|
@ -211,7 +219,7 @@ mergeDirectCommit allowff old branch = do
|
||||||
msg <- liftIO $
|
msg <- liftIO $
|
||||||
catchDefaultIO ("merge " ++ fromRef branch) $
|
catchDefaultIO ("merge " ++ fromRef branch) $
|
||||||
readFile merge_msg
|
readFile merge_msg
|
||||||
void $ inRepo $ Git.Branch.commit False msg
|
void $ inRepo $ Git.Branch.commit commitmode False msg
|
||||||
Git.Ref.headRef [Git.Ref.headRef, branch]
|
Git.Ref.headRef [Git.Ref.headRef, branch]
|
||||||
)
|
)
|
||||||
liftIO $ mapM_ nukeFile [merge_head, merge_msg, merge_mode]
|
liftIO $ mapM_ nukeFile [merge_head, merge_msg, merge_mode]
|
||||||
|
@ -346,7 +354,11 @@ toDirectGen k f = do
|
||||||
void $ addAssociatedFile k f
|
void $ addAssociatedFile k f
|
||||||
modifyContent loc $ do
|
modifyContent loc $ do
|
||||||
thawContent loc
|
thawContent loc
|
||||||
replaceFile f $ liftIO . moveFile loc
|
replaceFileOr f
|
||||||
|
(liftIO . moveFile loc)
|
||||||
|
$ \tmp -> do -- rollback
|
||||||
|
liftIO (moveFile tmp loc)
|
||||||
|
freezeContent loc
|
||||||
fromdirect loc = do
|
fromdirect loc = do
|
||||||
replaceFile f $
|
replaceFile f $
|
||||||
liftIO . void . copyFileExternal loc
|
liftIO . void . copyFileExternal loc
|
||||||
|
|
|
@ -17,10 +17,7 @@ import Common.Annex
|
||||||
import Annex.Exception
|
import Annex.Exception
|
||||||
import qualified Git
|
import qualified Git
|
||||||
import Annex.Perms
|
import Annex.Perms
|
||||||
|
import Annex.LockFile
|
||||||
#ifdef mingw32_HOST_OS
|
|
||||||
import Utility.WinLock
|
|
||||||
#endif
|
|
||||||
|
|
||||||
{- Records content for a file in the branch to the journal.
|
{- Records content for a file in the branch to the journal.
|
||||||
-
|
-
|
||||||
|
@ -80,9 +77,18 @@ getJournalFilesStale = do
|
||||||
getDirectoryContents $ gitAnnexJournalDir g
|
getDirectoryContents $ gitAnnexJournalDir g
|
||||||
return $ filter (`notElem` [".", ".."]) fs
|
return $ filter (`notElem` [".", ".."]) fs
|
||||||
|
|
||||||
|
withJournalHandle :: (DirectoryHandle -> IO a) -> Annex a
|
||||||
|
withJournalHandle a = do
|
||||||
|
d <- fromRepo gitAnnexJournalDir
|
||||||
|
bracketIO (openDirectory d) closeDirectory (liftIO . a)
|
||||||
|
|
||||||
{- Checks if there are changes in the journal. -}
|
{- Checks if there are changes in the journal. -}
|
||||||
journalDirty :: Annex Bool
|
journalDirty :: Annex Bool
|
||||||
journalDirty = not . null <$> getJournalFilesStale
|
journalDirty = do
|
||||||
|
d <- fromRepo gitAnnexJournalDir
|
||||||
|
liftIO $
|
||||||
|
(not <$> isDirectoryEmpty d)
|
||||||
|
`catchIO` (const $ doesDirectoryExist d)
|
||||||
|
|
||||||
{- Produces a filename to use in the journal for a file on the branch.
|
{- Produces a filename to use in the journal for a file on the branch.
|
||||||
-
|
-
|
||||||
|
@ -112,19 +118,4 @@ data JournalLocked = ProduceJournalLocked
|
||||||
{- Runs an action that modifies the journal, using locking to avoid
|
{- Runs an action that modifies the journal, using locking to avoid
|
||||||
- contention with other git-annex processes. -}
|
- contention with other git-annex processes. -}
|
||||||
lockJournal :: (JournalLocked -> Annex a) -> Annex a
|
lockJournal :: (JournalLocked -> Annex a) -> Annex a
|
||||||
lockJournal a = do
|
lockJournal a = withExclusiveLock gitAnnexJournalLock $ a ProduceJournalLocked
|
||||||
lockfile <- fromRepo gitAnnexJournalLock
|
|
||||||
createAnnexDirectory $ takeDirectory lockfile
|
|
||||||
mode <- annexFileMode
|
|
||||||
bracketIO (lock lockfile mode) unlock (const $ a ProduceJournalLocked)
|
|
||||||
where
|
|
||||||
#ifndef mingw32_HOST_OS
|
|
||||||
lock lockfile mode = do
|
|
||||||
l <- noUmask mode $ createFile lockfile mode
|
|
||||||
waitToSetLock l (WriteLock, AbsoluteSeek, 0, 0)
|
|
||||||
return l
|
|
||||||
unlock = closeFd
|
|
||||||
#else
|
|
||||||
lock lockfile _mode = waitToLock $ lockExclusive lockfile
|
|
||||||
unlock = dropLock
|
|
||||||
#endif
|
|
||||||
|
|
|
@ -68,6 +68,9 @@ getAnnexLinkTarget file = ifM (coreSymlinks <$> Annex.getGitConfig)
|
||||||
then ""
|
then ""
|
||||||
else s
|
else s
|
||||||
|
|
||||||
|
makeAnnexLink :: LinkTarget -> FilePath -> Annex ()
|
||||||
|
makeAnnexLink = makeGitLink
|
||||||
|
|
||||||
{- Creates a link on disk.
|
{- Creates a link on disk.
|
||||||
-
|
-
|
||||||
- On a filesystem that does not support symlinks, writes the link target
|
- On a filesystem that does not support symlinks, writes the link target
|
||||||
|
@ -75,8 +78,8 @@ getAnnexLinkTarget file = ifM (coreSymlinks <$> Annex.getGitConfig)
|
||||||
- it's staged as such, so use addAnnexLink when adding a new file or
|
- it's staged as such, so use addAnnexLink when adding a new file or
|
||||||
- modified link to git.
|
- modified link to git.
|
||||||
-}
|
-}
|
||||||
makeAnnexLink :: LinkTarget -> FilePath -> Annex ()
|
makeGitLink :: LinkTarget -> FilePath -> Annex ()
|
||||||
makeAnnexLink linktarget file = ifM (coreSymlinks <$> Annex.getGitConfig)
|
makeGitLink linktarget file = ifM (coreSymlinks <$> Annex.getGitConfig)
|
||||||
( liftIO $ do
|
( liftIO $ do
|
||||||
void $ tryIO $ removeFile file
|
void $ tryIO $ removeFile file
|
||||||
createSymbolicLink linktarget file
|
createSymbolicLink linktarget file
|
||||||
|
|
87
Annex/LockFile.hs
Normal file
87
Annex/LockFile.hs
Normal file
|
@ -0,0 +1,87 @@
|
||||||
|
{- git-annex lock files.
|
||||||
|
-
|
||||||
|
- Copyright 2012, 2014 Joey Hess <joey@kitenet.net>
|
||||||
|
-
|
||||||
|
- Licensed under the GNU GPL version 3 or higher.
|
||||||
|
-}
|
||||||
|
|
||||||
|
{-# LANGUAGE CPP #-}
|
||||||
|
|
||||||
|
module Annex.LockFile (
|
||||||
|
lockFileShared,
|
||||||
|
unlockFile,
|
||||||
|
getLockPool,
|
||||||
|
withExclusiveLock,
|
||||||
|
) where
|
||||||
|
|
||||||
|
import Common.Annex
|
||||||
|
import Annex
|
||||||
|
import Types.LockPool
|
||||||
|
import qualified Git
|
||||||
|
import Annex.Exception
|
||||||
|
import Annex.Perms
|
||||||
|
|
||||||
|
import qualified Data.Map as M
|
||||||
|
|
||||||
|
#ifdef mingw32_HOST_OS
|
||||||
|
import Utility.WinLock
|
||||||
|
#endif
|
||||||
|
|
||||||
|
{- Create a specified lock file, and takes a shared lock, which is retained
|
||||||
|
- in the pool. -}
|
||||||
|
lockFileShared :: FilePath -> Annex ()
|
||||||
|
lockFileShared file = go =<< fromLockPool file
|
||||||
|
where
|
||||||
|
go (Just _) = noop -- already locked
|
||||||
|
go Nothing = do
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
|
mode <- annexFileMode
|
||||||
|
lockhandle <- liftIO $ noUmask mode $
|
||||||
|
openFd file ReadOnly (Just mode) defaultFileFlags
|
||||||
|
liftIO $ waitToSetLock lockhandle (ReadLock, AbsoluteSeek, 0, 0)
|
||||||
|
#else
|
||||||
|
lockhandle <- liftIO $ waitToLock $ lockShared file
|
||||||
|
#endif
|
||||||
|
changeLockPool $ M.insert file lockhandle
|
||||||
|
|
||||||
|
unlockFile :: FilePath -> Annex ()
|
||||||
|
unlockFile file = maybe noop go =<< fromLockPool file
|
||||||
|
where
|
||||||
|
go lockhandle = do
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
|
liftIO $ closeFd lockhandle
|
||||||
|
#else
|
||||||
|
liftIO $ dropLock lockhandle
|
||||||
|
#endif
|
||||||
|
changeLockPool $ M.delete file
|
||||||
|
|
||||||
|
getLockPool :: Annex LockPool
|
||||||
|
getLockPool = getState lockpool
|
||||||
|
|
||||||
|
fromLockPool :: FilePath -> Annex (Maybe LockHandle)
|
||||||
|
fromLockPool file = M.lookup file <$> getLockPool
|
||||||
|
|
||||||
|
changeLockPool :: (LockPool -> LockPool) -> Annex ()
|
||||||
|
changeLockPool a = do
|
||||||
|
m <- getLockPool
|
||||||
|
changeState $ \s -> s { lockpool = a m }
|
||||||
|
|
||||||
|
{- Runs an action with an exclusive lock held. If the lock is already
|
||||||
|
- held, blocks until it becomes free. -}
|
||||||
|
withExclusiveLock :: (Git.Repo -> FilePath) -> Annex a -> Annex a
|
||||||
|
withExclusiveLock getlockfile a = do
|
||||||
|
lockfile <- fromRepo getlockfile
|
||||||
|
createAnnexDirectory $ takeDirectory lockfile
|
||||||
|
mode <- annexFileMode
|
||||||
|
bracketIO (lock lockfile mode) unlock (const a)
|
||||||
|
where
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
|
lock lockfile mode = do
|
||||||
|
l <- noUmask mode $ createFile lockfile mode
|
||||||
|
waitToSetLock l (WriteLock, AbsoluteSeek, 0, 0)
|
||||||
|
return l
|
||||||
|
unlock = closeFd
|
||||||
|
#else
|
||||||
|
lock lockfile _mode = waitToLock $ lockExclusive lockfile
|
||||||
|
unlock = dropLock
|
||||||
|
#endif
|
|
@ -1,60 +0,0 @@
|
||||||
{- git-annex lock pool
|
|
||||||
-
|
|
||||||
- Copyright 2012, 2014 Joey Hess <joey@kitenet.net>
|
|
||||||
-
|
|
||||||
- Licensed under the GNU GPL version 3 or higher.
|
|
||||||
-}
|
|
||||||
|
|
||||||
{-# LANGUAGE CPP #-}
|
|
||||||
|
|
||||||
module Annex.LockPool where
|
|
||||||
|
|
||||||
import Common.Annex
|
|
||||||
import Annex
|
|
||||||
import Types.LockPool
|
|
||||||
|
|
||||||
import qualified Data.Map as M
|
|
||||||
|
|
||||||
#ifndef mingw32_HOST_OS
|
|
||||||
import Annex.Perms
|
|
||||||
#else
|
|
||||||
import Utility.WinLock
|
|
||||||
#endif
|
|
||||||
|
|
||||||
{- Create a specified lock file, and takes a shared lock. -}
|
|
||||||
lockFile :: FilePath -> Annex ()
|
|
||||||
lockFile file = go =<< fromPool file
|
|
||||||
where
|
|
||||||
go (Just _) = noop -- already locked
|
|
||||||
go Nothing = do
|
|
||||||
#ifndef mingw32_HOST_OS
|
|
||||||
mode <- annexFileMode
|
|
||||||
lockhandle <- liftIO $ noUmask mode $
|
|
||||||
openFd file ReadOnly (Just mode) defaultFileFlags
|
|
||||||
liftIO $ waitToSetLock lockhandle (ReadLock, AbsoluteSeek, 0, 0)
|
|
||||||
#else
|
|
||||||
lockhandle <- liftIO $ waitToLock $ lockShared file
|
|
||||||
#endif
|
|
||||||
changePool $ M.insert file lockhandle
|
|
||||||
|
|
||||||
unlockFile :: FilePath -> Annex ()
|
|
||||||
unlockFile file = maybe noop go =<< fromPool file
|
|
||||||
where
|
|
||||||
go lockhandle = do
|
|
||||||
#ifndef mingw32_HOST_OS
|
|
||||||
liftIO $ closeFd lockhandle
|
|
||||||
#else
|
|
||||||
liftIO $ dropLock lockhandle
|
|
||||||
#endif
|
|
||||||
changePool $ M.delete file
|
|
||||||
|
|
||||||
getPool :: Annex LockPool
|
|
||||||
getPool = getState lockpool
|
|
||||||
|
|
||||||
fromPool :: FilePath -> Annex (Maybe LockHandle)
|
|
||||||
fromPool file = M.lookup file <$> getPool
|
|
||||||
|
|
||||||
changePool :: (LockPool -> LockPool) -> Annex ()
|
|
||||||
changePool a = do
|
|
||||||
m <- getPool
|
|
||||||
changeState $ \s -> s { lockpool = a m }
|
|
88
Annex/MakeRepo.hs
Normal file
88
Annex/MakeRepo.hs
Normal file
|
@ -0,0 +1,88 @@
|
||||||
|
{- making local repositories (used by webapp mostly)
|
||||||
|
-
|
||||||
|
- Copyright 2012-2014 Joey Hess <joey@kitenet.net>
|
||||||
|
-
|
||||||
|
- Licensed under the GNU GPL version 3 or higher.
|
||||||
|
-}
|
||||||
|
|
||||||
|
module Annex.MakeRepo where
|
||||||
|
|
||||||
|
import Assistant.WebApp.Common
|
||||||
|
import Annex.Init
|
||||||
|
import qualified Git.Construct
|
||||||
|
import qualified Git.Config
|
||||||
|
import qualified Git.Command
|
||||||
|
import qualified Git.Branch
|
||||||
|
import qualified Annex
|
||||||
|
import Annex.UUID
|
||||||
|
import Annex.Direct
|
||||||
|
import Types.StandardGroups
|
||||||
|
import Logs.PreferredContent
|
||||||
|
import qualified Annex.Branch
|
||||||
|
|
||||||
|
{- Makes a new git repository. Or, if a git repository already
|
||||||
|
- exists, returns False. -}
|
||||||
|
makeRepo :: FilePath -> Bool -> IO Bool
|
||||||
|
makeRepo path bare = ifM (probeRepoExists path)
|
||||||
|
( return False
|
||||||
|
, do
|
||||||
|
(transcript, ok) <-
|
||||||
|
processTranscript "git" (toCommand params) Nothing
|
||||||
|
unless ok $
|
||||||
|
error $ "git init failed!\nOutput:\n" ++ transcript
|
||||||
|
return True
|
||||||
|
)
|
||||||
|
where
|
||||||
|
baseparams = [Param "init", Param "--quiet"]
|
||||||
|
params
|
||||||
|
| bare = baseparams ++ [Param "--bare", File path]
|
||||||
|
| otherwise = baseparams ++ [File path]
|
||||||
|
|
||||||
|
{- Runs an action in the git repository in the specified directory. -}
|
||||||
|
inDir :: FilePath -> Annex a -> IO a
|
||||||
|
inDir dir a = do
|
||||||
|
state <- Annex.new =<< Git.Config.read =<< Git.Construct.fromPath dir
|
||||||
|
Annex.eval state a
|
||||||
|
|
||||||
|
{- Creates a new repository, and returns its UUID. -}
|
||||||
|
initRepo :: Bool -> Bool -> FilePath -> Maybe String -> Maybe StandardGroup -> IO UUID
|
||||||
|
initRepo True primary_assistant_repo dir desc mgroup = inDir dir $ do
|
||||||
|
initRepo' desc mgroup
|
||||||
|
{- Initialize the master branch, so things that expect
|
||||||
|
- to have it will work, before any files are added. -}
|
||||||
|
unlessM (Git.Config.isBare <$> gitRepo) $
|
||||||
|
void $ inRepo $ Git.Branch.commitCommand Git.Branch.AutomaticCommit
|
||||||
|
[ Param "--quiet"
|
||||||
|
, Param "--allow-empty"
|
||||||
|
, Param "-m"
|
||||||
|
, Param "created repository"
|
||||||
|
]
|
||||||
|
{- Repositories directly managed by the assistant use direct mode.
|
||||||
|
-
|
||||||
|
- Automatic gc is disabled, as it can be slow. Insted, gc is done
|
||||||
|
- once a day.
|
||||||
|
-}
|
||||||
|
when primary_assistant_repo $ do
|
||||||
|
setDirect True
|
||||||
|
inRepo $ Git.Command.run
|
||||||
|
[Param "config", Param "gc.auto", Param "0"]
|
||||||
|
getUUID
|
||||||
|
{- Repo already exists, could be a non-git-annex repo though so
|
||||||
|
- still initialize it. -}
|
||||||
|
initRepo False _ dir desc mgroup = inDir dir $ do
|
||||||
|
initRepo' desc mgroup
|
||||||
|
getUUID
|
||||||
|
|
||||||
|
initRepo' :: Maybe String -> Maybe StandardGroup -> Annex ()
|
||||||
|
initRepo' desc mgroup = unlessM isInitialized $ do
|
||||||
|
initialize desc
|
||||||
|
u <- getUUID
|
||||||
|
maybe noop (defaultStandardGroup u) mgroup
|
||||||
|
{- Ensure branch gets committed right away so it is
|
||||||
|
- available for merging immediately. -}
|
||||||
|
Annex.Branch.commit "update"
|
||||||
|
|
||||||
|
{- Checks if a git repo exists at a location. -}
|
||||||
|
probeRepoExists :: FilePath -> IO Bool
|
||||||
|
probeRepoExists dir = isJust <$>
|
||||||
|
catchDefaultIO Nothing (Git.Construct.checkForRepo dir)
|
|
@ -7,6 +7,7 @@
|
||||||
|
|
||||||
module Annex.MetaData (
|
module Annex.MetaData (
|
||||||
genMetaData,
|
genMetaData,
|
||||||
|
dateMetaData,
|
||||||
module X
|
module X
|
||||||
) where
|
) where
|
||||||
|
|
||||||
|
@ -37,20 +38,18 @@ genMetaData :: Key -> FilePath -> FileStatus -> Annex ()
|
||||||
genMetaData key file status = do
|
genMetaData key file status = do
|
||||||
maybe noop (flip copyMetaData key) =<< catKeyFileHEAD file
|
maybe noop (flip copyMetaData key) =<< catKeyFileHEAD file
|
||||||
whenM (annexGenMetaData <$> Annex.getGitConfig) $ do
|
whenM (annexGenMetaData <$> Annex.getGitConfig) $ do
|
||||||
metadata <- getCurrentMetaData key
|
curr <- getCurrentMetaData key
|
||||||
let metadata' = genMetaData' status metadata
|
addMetaData key (dateMetaData mtime curr)
|
||||||
unless (metadata' == emptyMetaData) $
|
where
|
||||||
addMetaData key metadata'
|
mtime = posixSecondsToUTCTime $ realToFrac $ modificationTime status
|
||||||
|
|
||||||
{- Generates metadata from the FileStatus.
|
{- Generates metadata for a file's date stamp.
|
||||||
- Does not overwrite any existing metadata values. -}
|
- Does not overwrite any existing metadata values. -}
|
||||||
genMetaData' :: FileStatus -> MetaData -> MetaData
|
dateMetaData :: UTCTime -> MetaData -> MetaData
|
||||||
genMetaData' status old = MetaData $ M.fromList $ filter isnew
|
dateMetaData mtime old = MetaData $ M.fromList $ filter isnew
|
||||||
[ (yearMetaField, S.singleton $ toMetaValue $ show y)
|
[ (yearMetaField, S.singleton $ toMetaValue $ show y)
|
||||||
, (monthMetaField, S.singleton $ toMetaValue $ show m)
|
, (monthMetaField, S.singleton $ toMetaValue $ show m)
|
||||||
]
|
]
|
||||||
where
|
where
|
||||||
isnew (f, _) = S.null (currentMetaDataValues f old)
|
isnew (f, _) = S.null (currentMetaDataValues f old)
|
||||||
(y, m, _d) = toGregorian $ utctDay $
|
(y, m, _d) = toGregorian $ utctDay $ mtime
|
||||||
posixSecondsToUTCTime $ realToFrac $
|
|
||||||
modificationTime status
|
|
||||||
|
|
|
@ -23,11 +23,16 @@ import Annex.Exception
|
||||||
- Throws an IO exception when it was unable to replace the file.
|
- Throws an IO exception when it was unable to replace the file.
|
||||||
-}
|
-}
|
||||||
replaceFile :: FilePath -> (FilePath -> Annex ()) -> Annex ()
|
replaceFile :: FilePath -> (FilePath -> Annex ()) -> Annex ()
|
||||||
replaceFile file a = do
|
replaceFile file action = replaceFileOr file action (liftIO . nukeFile)
|
||||||
|
|
||||||
|
{- If unable to replace the file with the temp file, runs the
|
||||||
|
- rollback action, which is responsible for cleaning up the temp file. -}
|
||||||
|
replaceFileOr :: FilePath -> (FilePath -> Annex ()) -> (FilePath -> Annex ()) -> Annex ()
|
||||||
|
replaceFileOr file action rollback = do
|
||||||
tmpdir <- fromRepo gitAnnexTmpMiscDir
|
tmpdir <- fromRepo gitAnnexTmpMiscDir
|
||||||
void $ createAnnexDirectory tmpdir
|
void $ createAnnexDirectory tmpdir
|
||||||
bracketIO (setup tmpdir) nukeFile $ \tmpfile -> do
|
bracketAnnex (liftIO $ setup tmpdir) rollback $ \tmpfile -> do
|
||||||
a tmpfile
|
action tmpfile
|
||||||
liftIO $ catchIO (rename tmpfile file) (fallback tmpfile)
|
liftIO $ catchIO (rename tmpfile file) (fallback tmpfile)
|
||||||
where
|
where
|
||||||
setup tmpdir = do
|
setup tmpdir = do
|
||||||
|
|
|
@ -25,7 +25,7 @@ import Data.Hash.MD5
|
||||||
import System.Exit
|
import System.Exit
|
||||||
|
|
||||||
import Common.Annex
|
import Common.Annex
|
||||||
import Annex.LockPool
|
import Annex.LockFile
|
||||||
import qualified Build.SysConfig as SysConfig
|
import qualified Build.SysConfig as SysConfig
|
||||||
import qualified Annex
|
import qualified Annex
|
||||||
import qualified Git
|
import qualified Git
|
||||||
|
@ -119,13 +119,13 @@ prepSocket socketfile = do
|
||||||
-- If the lock pool is empty, this is the first ssh of this
|
-- If the lock pool is empty, this is the first ssh of this
|
||||||
-- run. There could be stale ssh connections hanging around
|
-- run. There could be stale ssh connections hanging around
|
||||||
-- from a previous git-annex run that was interrupted.
|
-- from a previous git-annex run that was interrupted.
|
||||||
whenM (not . any isLock . M.keys <$> getPool)
|
whenM (not . any isLock . M.keys <$> getLockPool)
|
||||||
sshCleanup
|
sshCleanup
|
||||||
-- Cleanup at end of this run.
|
-- Cleanup at end of this run.
|
||||||
Annex.addCleanup SshCachingCleanup sshCleanup
|
Annex.addCleanup SshCachingCleanup sshCleanup
|
||||||
|
|
||||||
liftIO $ createDirectoryIfMissing True $ parentDir socketfile
|
liftIO $ createDirectoryIfMissing True $ parentDir socketfile
|
||||||
lockFile $ socket2lock socketfile
|
lockFileShared $ socket2lock socketfile
|
||||||
|
|
||||||
enumSocketFiles :: Annex [FilePath]
|
enumSocketFiles :: Annex [FilePath]
|
||||||
enumSocketFiles = go =<< sshCacheDir
|
enumSocketFiles = go =<< sshCacheDir
|
||||||
|
|
|
@ -433,7 +433,7 @@ genViewBranch :: View -> Annex () -> Annex Git.Branch
|
||||||
genViewBranch view a = withIndex $ do
|
genViewBranch view a = withIndex $ do
|
||||||
a
|
a
|
||||||
let branch = branchView view
|
let branch = branchView view
|
||||||
void $ inRepo $ Git.Branch.commit True (fromRef branch) branch []
|
void $ inRepo $ Git.Branch.commit Git.Branch.AutomaticCommit True (fromRef branch) branch []
|
||||||
return branch
|
return branch
|
||||||
|
|
||||||
{- Runs an action using the view index file.
|
{- Runs an action using the view index file.
|
||||||
|
|
44
Assistant.hs
44
Assistant.hs
|
@ -52,9 +52,12 @@ import qualified Utility.Daemon
|
||||||
import Utility.ThreadScheduler
|
import Utility.ThreadScheduler
|
||||||
import Utility.HumanTime
|
import Utility.HumanTime
|
||||||
import qualified Build.SysConfig as SysConfig
|
import qualified Build.SysConfig as SysConfig
|
||||||
#ifndef mingw32_HOST_OS
|
|
||||||
import Utility.LogFile
|
|
||||||
import Annex.Perms
|
import Annex.Perms
|
||||||
|
import Utility.LogFile
|
||||||
|
#ifdef mingw32_HOST_OS
|
||||||
|
import Utility.Env
|
||||||
|
import Config.Files
|
||||||
|
import System.Environment (getArgs)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
import System.Log.Logger
|
import System.Log.Logger
|
||||||
|
@ -72,19 +75,18 @@ startDaemon :: Bool -> Bool -> Maybe Duration -> Maybe String -> Maybe HostName
|
||||||
startDaemon assistant foreground startdelay cannotrun listenhost startbrowser = do
|
startDaemon assistant foreground startdelay cannotrun listenhost startbrowser = do
|
||||||
Annex.changeState $ \s -> s { Annex.daemon = True }
|
Annex.changeState $ \s -> s { Annex.daemon = True }
|
||||||
pidfile <- fromRepo gitAnnexPidFile
|
pidfile <- fromRepo gitAnnexPidFile
|
||||||
#ifndef mingw32_HOST_OS
|
|
||||||
logfile <- fromRepo gitAnnexLogFile
|
logfile <- fromRepo gitAnnexLogFile
|
||||||
|
liftIO $ debugM desc $ "logging to " ++ logfile
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
createAnnexDirectory (parentDir logfile)
|
createAnnexDirectory (parentDir logfile)
|
||||||
logfd <- liftIO $ openLog logfile
|
logfd <- liftIO $ handleToFd =<< openLog logfile
|
||||||
if foreground
|
if foreground
|
||||||
then do
|
then do
|
||||||
origout <- liftIO $ catchMaybeIO $
|
origout <- liftIO $ catchMaybeIO $
|
||||||
fdToHandle =<< dup stdOutput
|
fdToHandle =<< dup stdOutput
|
||||||
origerr <- liftIO $ catchMaybeIO $
|
origerr <- liftIO $ catchMaybeIO $
|
||||||
fdToHandle =<< dup stdError
|
fdToHandle =<< dup stdError
|
||||||
let undaemonize a = do
|
let undaemonize = Utility.Daemon.foreground logfd (Just pidfile)
|
||||||
debugM desc $ "logging to " ++ logfile
|
|
||||||
Utility.Daemon.foreground logfd (Just pidfile) a
|
|
||||||
start undaemonize $
|
start undaemonize $
|
||||||
case startbrowser of
|
case startbrowser of
|
||||||
Nothing -> Nothing
|
Nothing -> Nothing
|
||||||
|
@ -92,13 +94,29 @@ startDaemon assistant foreground startdelay cannotrun listenhost startbrowser =
|
||||||
else
|
else
|
||||||
start (Utility.Daemon.daemonize logfd (Just pidfile) False) Nothing
|
start (Utility.Daemon.daemonize logfd (Just pidfile) False) Nothing
|
||||||
#else
|
#else
|
||||||
-- Windows is always foreground, and has no log file.
|
-- Windows doesn't daemonize, but does redirect output to the
|
||||||
|
-- log file. The only way to do so is to restart the program.
|
||||||
when (foreground || not foreground) $ do
|
when (foreground || not foreground) $ do
|
||||||
liftIO $ Utility.Daemon.lockPidFile pidfile
|
let flag = "GIT_ANNEX_OUTPUT_REDIR"
|
||||||
start id $ do
|
createAnnexDirectory (parentDir logfile)
|
||||||
case startbrowser of
|
ifM (liftIO $ isNothing <$> getEnv flag)
|
||||||
Nothing -> Nothing
|
( liftIO $ withFile devNull WriteMode $ \nullh -> do
|
||||||
Just a -> Just $ a Nothing Nothing
|
loghandle <- openLog logfile
|
||||||
|
e <- getEnvironment
|
||||||
|
cmd <- readProgramFile
|
||||||
|
ps <- getArgs
|
||||||
|
(_, _, _, pid) <- createProcess (proc cmd ps)
|
||||||
|
{ env = Just (addEntry flag "1" e)
|
||||||
|
, std_in = UseHandle nullh
|
||||||
|
, std_out = UseHandle loghandle
|
||||||
|
, std_err = UseHandle loghandle
|
||||||
|
}
|
||||||
|
exitWith =<< waitForProcess pid
|
||||||
|
, start (Utility.Daemon.foreground (Just pidfile)) $
|
||||||
|
case startbrowser of
|
||||||
|
Nothing -> Nothing
|
||||||
|
Just a -> Just $ a Nothing Nothing
|
||||||
|
)
|
||||||
#endif
|
#endif
|
||||||
where
|
where
|
||||||
desc
|
desc
|
||||||
|
|
|
@ -92,9 +92,9 @@ installNautilus :: FilePath -> IO ()
|
||||||
#ifdef linux_HOST_OS
|
#ifdef linux_HOST_OS
|
||||||
installNautilus program = do
|
installNautilus program = do
|
||||||
scriptdir <- (\d -> d </> "nautilus" </> "scripts") <$> userDataDir
|
scriptdir <- (\d -> d </> "nautilus" </> "scripts") <$> userDataDir
|
||||||
whenM (doesDirectoryExist scriptdir) $ do
|
createDirectoryIfMissing True scriptdir
|
||||||
genscript scriptdir "get"
|
genscript scriptdir "get"
|
||||||
genscript scriptdir "drop"
|
genscript scriptdir "drop"
|
||||||
where
|
where
|
||||||
genscript scriptdir action =
|
genscript scriptdir action =
|
||||||
installscript (scriptdir </> scriptname action) $ unlines
|
installscript (scriptdir </> scriptname action) $ unlines
|
||||||
|
|
|
@ -96,7 +96,7 @@ reconnectRemotes notifypushes rs = void $ do
|
||||||
=<< fromMaybe [] . M.lookup (Remote.uuid r) . connectRemoteNotifiers
|
=<< fromMaybe [] . M.lookup (Remote.uuid r) . connectRemoteNotifiers
|
||||||
<$> getDaemonStatus
|
<$> getDaemonStatus
|
||||||
|
|
||||||
{- Updates the local sync branch, then pushes it to all remotes, in
|
{- Pushes the local sync branch to all remotes, in
|
||||||
- parallel, along with the git-annex branch. This is the same
|
- parallel, along with the git-annex branch. This is the same
|
||||||
- as "git annex sync", except in parallel, and will co-exist with use of
|
- as "git annex sync", except in parallel, and will co-exist with use of
|
||||||
- "git annex sync".
|
- "git annex sync".
|
||||||
|
@ -148,7 +148,6 @@ pushToRemotes' now notifypushes remotes = do
|
||||||
go _ _ _ _ [] = return [] -- no remotes, so nothing to do
|
go _ _ _ _ [] = return [] -- no remotes, so nothing to do
|
||||||
go shouldretry (Just branch) g u rs = do
|
go shouldretry (Just branch) g u rs = do
|
||||||
debug ["pushing to", show rs]
|
debug ["pushing to", show rs]
|
||||||
liftIO $ Command.Sync.updateBranch (Command.Sync.syncBranch branch) g
|
|
||||||
(succeeded, failed) <- liftIO $ inParallel (push g branch) rs
|
(succeeded, failed) <- liftIO $ inParallel (push g branch) rs
|
||||||
updatemap succeeded []
|
updatemap succeeded []
|
||||||
if null failed
|
if null failed
|
||||||
|
|
|
@ -35,6 +35,7 @@ import qualified Annex
|
||||||
import Utility.InodeCache
|
import Utility.InodeCache
|
||||||
import Annex.Content.Direct
|
import Annex.Content.Direct
|
||||||
import qualified Command.Sync
|
import qualified Command.Sync
|
||||||
|
import qualified Git.Branch
|
||||||
|
|
||||||
import Data.Time.Clock
|
import Data.Time.Clock
|
||||||
import Data.Tuple.Utils
|
import Data.Tuple.Utils
|
||||||
|
@ -219,7 +220,11 @@ commitStaged = do
|
||||||
v <- tryAnnex Annex.Queue.flush
|
v <- tryAnnex Annex.Queue.flush
|
||||||
case v of
|
case v of
|
||||||
Left _ -> return False
|
Left _ -> return False
|
||||||
Right _ -> Command.Sync.commitStaged ""
|
Right _ -> do
|
||||||
|
ok <- Command.Sync.commitStaged Git.Branch.AutomaticCommit ""
|
||||||
|
when ok $
|
||||||
|
Command.Sync.updateSyncBranch =<< inRepo Git.Branch.current
|
||||||
|
return ok
|
||||||
|
|
||||||
{- OSX needs a short delay after a file is added before locking it down,
|
{- OSX needs a short delay after a file is added before locking it down,
|
||||||
- when using a non-direct mode repository, as pasting a file seems to
|
- when using a non-direct mode repository, as pasting a file seems to
|
||||||
|
|
|
@ -78,12 +78,13 @@ onChange file
|
||||||
changedbranch = fileToBranch file
|
changedbranch = fileToBranch file
|
||||||
|
|
||||||
mergecurrent (Just current)
|
mergecurrent (Just current)
|
||||||
| equivBranches changedbranch current = do
|
| equivBranches changedbranch current =
|
||||||
debug
|
whenM (liftAnnex $ inRepo $ Git.Branch.changed current changedbranch) $ do
|
||||||
[ "merging", Git.fromRef changedbranch
|
debug
|
||||||
, "into", Git.fromRef current
|
[ "merging", Git.fromRef changedbranch
|
||||||
]
|
, "into", Git.fromRef current
|
||||||
void $ liftAnnex $ autoMergeFrom changedbranch (Just current)
|
]
|
||||||
|
void $ liftAnnex $ autoMergeFrom changedbranch (Just current) Git.Branch.AutomaticCommit
|
||||||
mergecurrent _ = noop
|
mergecurrent _ = noop
|
||||||
|
|
||||||
handleDesynced = case fromTaggedBranch changedbranch of
|
handleDesynced = case fromTaggedBranch changedbranch of
|
||||||
|
|
|
@ -23,7 +23,7 @@ import Assistant.TransferQueue
|
||||||
import Assistant.Types.UrlRenderer
|
import Assistant.Types.UrlRenderer
|
||||||
import qualified Annex.Branch
|
import qualified Annex.Branch
|
||||||
import qualified Git.LsFiles
|
import qualified Git.LsFiles
|
||||||
import qualified Git.Command
|
import qualified Git.Command.Batch
|
||||||
import qualified Git.Config
|
import qualified Git.Config
|
||||||
import Utility.ThreadScheduler
|
import Utility.ThreadScheduler
|
||||||
import qualified Assistant.Threads.Watcher as Watcher
|
import qualified Assistant.Threads.Watcher as Watcher
|
||||||
|
@ -167,7 +167,7 @@ dailyCheck urlrenderer = do
|
||||||
- to have a lot of small objects and they should not be a
|
- to have a lot of small objects and they should not be a
|
||||||
- significant size. -}
|
- significant size. -}
|
||||||
when (Git.Config.getMaybe "gc.auto" g == Just "0") $
|
when (Git.Config.getMaybe "gc.auto" g == Just "0") $
|
||||||
liftIO $ void $ Git.Command.runBatch batchmaker
|
liftIO $ void $ Git.Command.Batch.run batchmaker
|
||||||
[ Param "-c", Param "gc.auto=670000"
|
[ Param "-c", Param "gc.auto=670000"
|
||||||
, Param "gc"
|
, Param "gc"
|
||||||
, Param "--auto"
|
, Param "--auto"
|
||||||
|
@ -224,7 +224,7 @@ checkLogSize n = do
|
||||||
totalsize <- liftIO $ sum <$> mapM filesize logs
|
totalsize <- liftIO $ sum <$> mapM filesize logs
|
||||||
when (totalsize > 2 * oneMegabyte) $ do
|
when (totalsize > 2 * oneMegabyte) $ do
|
||||||
notice ["Rotated logs due to size:", show totalsize]
|
notice ["Rotated logs due to size:", show totalsize]
|
||||||
liftIO $ openLog f >>= redirLog
|
liftIO $ openLog f >>= handleToFd >>= redirLog
|
||||||
when (n < maxLogs + 1) $ do
|
when (n < maxLogs + 1) $ do
|
||||||
df <- liftIO $ getDiskFree $ takeDirectory f
|
df <- liftIO $ getDiskFree $ takeDirectory f
|
||||||
case df of
|
case df of
|
||||||
|
|
|
@ -33,6 +33,7 @@ import Utility.ThreadScheduler
|
||||||
import Utility.Tmp
|
import Utility.Tmp
|
||||||
import Utility.UserInfo
|
import Utility.UserInfo
|
||||||
import Utility.Gpg
|
import Utility.Gpg
|
||||||
|
import Utility.FileMode
|
||||||
import qualified Utility.Lsof as Lsof
|
import qualified Utility.Lsof as Lsof
|
||||||
import qualified Build.SysConfig
|
import qualified Build.SysConfig
|
||||||
import qualified Utility.Url as Url
|
import qualified Utility.Url as Url
|
||||||
|
@ -348,7 +349,7 @@ verifyDistributionSig :: FilePath -> IO Bool
|
||||||
verifyDistributionSig sig = do
|
verifyDistributionSig sig = do
|
||||||
p <- readProgramFile
|
p <- readProgramFile
|
||||||
if isAbsolute p
|
if isAbsolute p
|
||||||
then withTmpDir "git-annex-gpg.tmp" $ \gpgtmp -> do
|
then withUmask 0o0077 $ withTmpDir "git-annex-gpg.tmp" $ \gpgtmp -> do
|
||||||
let trustedkeys = takeDirectory p </> "trustedkeys.gpg"
|
let trustedkeys = takeDirectory p </> "trustedkeys.gpg"
|
||||||
boolSystem gpgcmd
|
boolSystem gpgcmd
|
||||||
[ Param "--no-default-keyring"
|
[ Param "--no-default-keyring"
|
||||||
|
|
|
@ -14,13 +14,11 @@ import Assistant.WebApp.Gpg
|
||||||
import Assistant.WebApp.MakeRemote
|
import Assistant.WebApp.MakeRemote
|
||||||
import Assistant.Sync
|
import Assistant.Sync
|
||||||
import Assistant.Restart
|
import Assistant.Restart
|
||||||
import Annex.Init
|
import Annex.MakeRepo
|
||||||
import qualified Git
|
import qualified Git
|
||||||
import qualified Git.Construct
|
|
||||||
import qualified Git.Config
|
import qualified Git.Config
|
||||||
import qualified Git.Command
|
import qualified Git.Command
|
||||||
import qualified Git.Branch
|
import qualified Git.Branch
|
||||||
import qualified Annex
|
|
||||||
import Config.Files
|
import Config.Files
|
||||||
import Utility.FreeDesktop
|
import Utility.FreeDesktop
|
||||||
import Utility.DiskFree
|
import Utility.DiskFree
|
||||||
|
@ -30,14 +28,12 @@ import Utility.Mounts
|
||||||
import Utility.DataUnits
|
import Utility.DataUnits
|
||||||
import Remote (prettyUUID)
|
import Remote (prettyUUID)
|
||||||
import Annex.UUID
|
import Annex.UUID
|
||||||
import Annex.Direct
|
|
||||||
import Types.StandardGroups
|
import Types.StandardGroups
|
||||||
import Logs.PreferredContent
|
import Logs.PreferredContent
|
||||||
import Logs.UUID
|
import Logs.UUID
|
||||||
import Utility.UserInfo
|
import Utility.UserInfo
|
||||||
import Config
|
import Config
|
||||||
import Utility.Gpg
|
import Utility.Gpg
|
||||||
import qualified Annex.Branch
|
|
||||||
import qualified Remote.GCrypt as GCrypt
|
import qualified Remote.GCrypt as GCrypt
|
||||||
import qualified Types.Remote
|
import qualified Types.Remote
|
||||||
|
|
||||||
|
@ -413,69 +409,6 @@ startFullAssistant path repogroup setup = do
|
||||||
fromJust $ postFirstRun webapp
|
fromJust $ postFirstRun webapp
|
||||||
redirect $ T.pack url
|
redirect $ T.pack url
|
||||||
|
|
||||||
{- Makes a new git repository. Or, if a git repository already
|
|
||||||
- exists, returns False. -}
|
|
||||||
makeRepo :: FilePath -> Bool -> IO Bool
|
|
||||||
makeRepo path bare = ifM (probeRepoExists path)
|
|
||||||
( return False
|
|
||||||
, do
|
|
||||||
(transcript, ok) <-
|
|
||||||
processTranscript "git" (toCommand params) Nothing
|
|
||||||
unless ok $
|
|
||||||
error $ "git init failed!\nOutput:\n" ++ transcript
|
|
||||||
return True
|
|
||||||
)
|
|
||||||
where
|
|
||||||
baseparams = [Param "init", Param "--quiet"]
|
|
||||||
params
|
|
||||||
| bare = baseparams ++ [Param "--bare", File path]
|
|
||||||
| otherwise = baseparams ++ [File path]
|
|
||||||
|
|
||||||
{- Runs an action in the git repository in the specified directory. -}
|
|
||||||
inDir :: FilePath -> Annex a -> IO a
|
|
||||||
inDir dir a = do
|
|
||||||
state <- Annex.new =<< Git.Config.read =<< Git.Construct.fromPath dir
|
|
||||||
Annex.eval state a
|
|
||||||
|
|
||||||
{- Creates a new repository, and returns its UUID. -}
|
|
||||||
initRepo :: Bool -> Bool -> FilePath -> Maybe String -> Maybe StandardGroup -> IO UUID
|
|
||||||
initRepo True primary_assistant_repo dir desc mgroup = inDir dir $ do
|
|
||||||
initRepo' desc mgroup
|
|
||||||
{- Initialize the master branch, so things that expect
|
|
||||||
- to have it will work, before any files are added. -}
|
|
||||||
unlessM (Git.Config.isBare <$> gitRepo) $
|
|
||||||
void $ inRepo $ Git.Command.runBool
|
|
||||||
[ Param "commit"
|
|
||||||
, Param "--quiet"
|
|
||||||
, Param "--allow-empty"
|
|
||||||
, Param "-m"
|
|
||||||
, Param "created repository"
|
|
||||||
]
|
|
||||||
{- Repositories directly managed by the assistant use direct mode.
|
|
||||||
-
|
|
||||||
- Automatic gc is disabled, as it can be slow. Insted, gc is done
|
|
||||||
- once a day.
|
|
||||||
-}
|
|
||||||
when primary_assistant_repo $ do
|
|
||||||
setDirect True
|
|
||||||
inRepo $ Git.Command.run
|
|
||||||
[Param "config", Param "gc.auto", Param "0"]
|
|
||||||
getUUID
|
|
||||||
{- Repo already exists, could be a non-git-annex repo though so
|
|
||||||
- still initialize it. -}
|
|
||||||
initRepo False _ dir desc mgroup = inDir dir $ do
|
|
||||||
initRepo' desc mgroup
|
|
||||||
getUUID
|
|
||||||
|
|
||||||
initRepo' :: Maybe String -> Maybe StandardGroup -> Annex ()
|
|
||||||
initRepo' desc mgroup = unlessM isInitialized $ do
|
|
||||||
initialize desc
|
|
||||||
u <- getUUID
|
|
||||||
maybe noop (defaultStandardGroup u) mgroup
|
|
||||||
{- Ensure branch gets committed right away so it is
|
|
||||||
- available for merging immediately. -}
|
|
||||||
Annex.Branch.commit "update"
|
|
||||||
|
|
||||||
{- Checks if the user can write to a directory.
|
{- Checks if the user can write to a directory.
|
||||||
-
|
-
|
||||||
- The directory may be in the process of being created; if so
|
- The directory may be in the process of being created; if so
|
||||||
|
@ -486,11 +419,6 @@ canWrite dir = do
|
||||||
(return dir, return $ parentDir dir)
|
(return dir, return $ parentDir dir)
|
||||||
catchBoolIO $ fileAccess tocheck False True False
|
catchBoolIO $ fileAccess tocheck False True False
|
||||||
|
|
||||||
{- Checks if a git repo exists at a location. -}
|
|
||||||
probeRepoExists :: FilePath -> IO Bool
|
|
||||||
probeRepoExists dir = isJust <$>
|
|
||||||
catchDefaultIO Nothing (Git.Construct.checkForRepo dir)
|
|
||||||
|
|
||||||
{- Gets the UUID of the git repo at a location, which may not exist, or
|
{- Gets the UUID of the git repo at a location, which may not exist, or
|
||||||
- not be a git-annex repo. -}
|
- not be a git-annex repo. -}
|
||||||
probeUUID :: FilePath -> IO (Maybe UUID)
|
probeUUID :: FilePath -> IO (Maybe UUID)
|
||||||
|
|
|
@ -118,20 +118,22 @@ openFileBrowser = do
|
||||||
path <- liftAnnex $ fromRepo Git.repoPath
|
path <- liftAnnex $ fromRepo Git.repoPath
|
||||||
#ifdef darwin_HOST_OS
|
#ifdef darwin_HOST_OS
|
||||||
let cmd = "open"
|
let cmd = "open"
|
||||||
let params = [Param path]
|
let p = proc cmd [path]
|
||||||
#else
|
#else
|
||||||
#ifdef mingw32_HOST_OS
|
#ifdef mingw32_HOST_OS
|
||||||
|
{- Changing to the directory and then opening . works around
|
||||||
|
- spaces in directory name, etc. -}
|
||||||
let cmd = "cmd"
|
let cmd = "cmd"
|
||||||
let params = [Param $ "/c start " ++ path]
|
let p = (proc cmd ["/c start ."]) { cwd = Just path }
|
||||||
#else
|
#else
|
||||||
let cmd = "xdg-open"
|
let cmd = "xdg-open"
|
||||||
let params = [Param path]
|
let p = proc cmd [path]
|
||||||
#endif
|
#endif
|
||||||
#endif
|
#endif
|
||||||
ifM (liftIO $ inPath cmd)
|
ifM (liftIO $ inPath cmd)
|
||||||
( do
|
( do
|
||||||
let run = void $ liftIO $ forkIO $ void $
|
let run = void $ liftIO $ forkIO $ void $
|
||||||
boolSystem cmd params
|
createProcess p
|
||||||
run
|
run
|
||||||
#ifdef mingw32_HOST_OS
|
#ifdef mingw32_HOST_OS
|
||||||
{- On windows, if the file browser is not
|
{- On windows, if the file browser is not
|
||||||
|
|
|
@ -154,9 +154,11 @@ data RemovableDrive = RemovableDrive
|
||||||
data RepoKey = RepoKey KeyId | NoRepoKey
|
data RepoKey = RepoKey KeyId | NoRepoKey
|
||||||
deriving (Read, Show, Eq, Ord)
|
deriving (Read, Show, Eq, Ord)
|
||||||
|
|
||||||
|
#if ! MIN_VERSION_path_pieces(0,1,4)
|
||||||
instance PathPiece Bool where
|
instance PathPiece Bool where
|
||||||
toPathPiece = pack . show
|
toPathPiece = pack . show
|
||||||
fromPathPiece = readish . unpack
|
fromPathPiece = readish . unpack
|
||||||
|
#endif
|
||||||
|
|
||||||
instance PathPiece RemovableDrive where
|
instance PathPiece RemovableDrive where
|
||||||
toPathPiece = pack . show
|
toPathPiece = pack . show
|
||||||
|
|
|
@ -44,6 +44,7 @@ genBackend hash = Just Backend
|
||||||
, getKey = keyValue hash
|
, getKey = keyValue hash
|
||||||
, fsckKey = Just $ checkKeyChecksum hash
|
, fsckKey = Just $ checkKeyChecksum hash
|
||||||
, canUpgradeKey = Just needsUpgrade
|
, canUpgradeKey = Just needsUpgrade
|
||||||
|
, fastMigrate = Just trivialMigrate
|
||||||
}
|
}
|
||||||
|
|
||||||
genBackendE :: Hash -> Maybe Backend
|
genBackendE :: Hash -> Maybe Backend
|
||||||
|
@ -129,6 +130,15 @@ needsUpgrade :: Key -> Bool
|
||||||
needsUpgrade key = "\\" `isPrefixOf` keyHash key ||
|
needsUpgrade key = "\\" `isPrefixOf` keyHash key ||
|
||||||
any (not . validExtension) (takeExtensions $ keyName key)
|
any (not . validExtension) (takeExtensions $ keyName key)
|
||||||
|
|
||||||
|
{- Fast migration from hashE to hash backend. (Optimisation) -}
|
||||||
|
trivialMigrate :: Key -> Backend -> Maybe Key
|
||||||
|
trivialMigrate oldkey newbackend
|
||||||
|
| keyBackendName oldkey == name newbackend ++ "E" = Just $ oldkey
|
||||||
|
{ keyName = keyHash oldkey
|
||||||
|
, keyBackendName = name newbackend
|
||||||
|
}
|
||||||
|
| otherwise = Nothing
|
||||||
|
|
||||||
hashFile :: Hash -> FilePath -> Integer -> Annex String
|
hashFile :: Hash -> FilePath -> Integer -> Annex String
|
||||||
hashFile hash file filesize = liftIO $ go hash
|
hashFile hash file filesize = liftIO $ go hash
|
||||||
where
|
where
|
||||||
|
|
|
@ -24,6 +24,7 @@ backend = Backend
|
||||||
, getKey = const $ return Nothing
|
, getKey = const $ return Nothing
|
||||||
, fsckKey = Nothing
|
, fsckKey = Nothing
|
||||||
, canUpgradeKey = Nothing
|
, canUpgradeKey = Nothing
|
||||||
|
, fastMigrate = Nothing
|
||||||
}
|
}
|
||||||
|
|
||||||
{- Every unique url has a corresponding key. -}
|
{- Every unique url has a corresponding key. -}
|
||||||
|
|
|
@ -22,6 +22,7 @@ backend = Backend
|
||||||
, getKey = keyValue
|
, getKey = keyValue
|
||||||
, fsckKey = Nothing
|
, fsckKey = Nothing
|
||||||
, canUpgradeKey = Nothing
|
, canUpgradeKey = Nothing
|
||||||
|
, fastMigrate = Nothing
|
||||||
}
|
}
|
||||||
|
|
||||||
{- The key includes the file size, modification time, and the
|
{- The key includes the file size, modification time, and the
|
||||||
|
|
6
Build/BuildVersion.hs
Normal file
6
Build/BuildVersion.hs
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
{- Outputs the version of git-annex that was built, for use by
|
||||||
|
- autobuilders. Note that this includes the git rev. -}
|
||||||
|
|
||||||
|
import Build.Version
|
||||||
|
|
||||||
|
main = putStr =<< getVersion
|
|
@ -17,7 +17,7 @@ import qualified Git.Version
|
||||||
|
|
||||||
tests :: [TestCase]
|
tests :: [TestCase]
|
||||||
tests =
|
tests =
|
||||||
[ TestCase "version" getVersion
|
[ TestCase "version" (Config "packageversion" . StringConfig <$> getVersion)
|
||||||
, TestCase "UPGRADE_LOCATION" getUpgradeLocation
|
, TestCase "UPGRADE_LOCATION" getUpgradeLocation
|
||||||
, TestCase "git" $ requireCmd "git" "git --version >/dev/null"
|
, TestCase "git" $ requireCmd "git" "git --version >/dev/null"
|
||||||
, TestCase "git version" getGitVersion
|
, TestCase "git version" getGitVersion
|
||||||
|
@ -60,7 +60,7 @@ shaTestCases l = map make l
|
||||||
Config key . MaybeStringConfig <$> search (shacmds n)
|
Config key . MaybeStringConfig <$> search (shacmds n)
|
||||||
where
|
where
|
||||||
key = "sha" ++ show n
|
key = "sha" ++ show n
|
||||||
search [] = return Nothing
|
search [] = return Nothing
|
||||||
search (c:cmds) = do
|
search (c:cmds) = do
|
||||||
sha <- externalSHA c n "/dev/null"
|
sha <- externalSHA c n "/dev/null"
|
||||||
if sha == Right knowngood
|
if sha == Right knowngood
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
{- Builds distributon info files for each git-annex release in a directory
|
{- Downloads git-annex autobuilds and installs them into the git-annex
|
||||||
- tree, which must itself be part of a git-annex repository. Only files
|
- repository in ~/lib/downloads that is used to distribute git-annex
|
||||||
- that are present have their info file created.
|
- releases.
|
||||||
|
-
|
||||||
|
- Generates info files, containing the version (of the corresponding file
|
||||||
|
- from the autobuild).
|
||||||
-
|
-
|
||||||
- Also gpg signs the files.
|
- Also gpg signs the files.
|
||||||
-}
|
-}
|
||||||
|
@ -9,25 +12,87 @@ import Common.Annex
|
||||||
import Types.Distribution
|
import Types.Distribution
|
||||||
import Build.Version
|
import Build.Version
|
||||||
import Utility.UserInfo
|
import Utility.UserInfo
|
||||||
import Utility.Path
|
import Utility.Url
|
||||||
import qualified Git.Construct
|
import qualified Git.Construct
|
||||||
import qualified Annex
|
import qualified Annex
|
||||||
import Annex.Content
|
import Annex.Content
|
||||||
import Backend
|
import Backend
|
||||||
import Git.Command
|
import Git.Command
|
||||||
|
|
||||||
|
import Data.Default
|
||||||
import Data.Time.Clock
|
import Data.Time.Clock
|
||||||
|
import Data.Char
|
||||||
|
|
||||||
-- git-annex distribution signing key (for Joey Hess)
|
-- git-annex distribution signing key (for Joey Hess)
|
||||||
signingKey :: String
|
signingKey :: String
|
||||||
signingKey = "89C809CB"
|
signingKey = "89C809CB"
|
||||||
|
|
||||||
main = do
|
-- URL to an autobuilt git-annex file, and the place to install
|
||||||
state <- Annex.new =<< Git.Construct.fromPath =<< getRepoDir
|
-- it in the repository.
|
||||||
Annex.eval state makeinfos
|
autobuilds :: [(URLString, FilePath)]
|
||||||
|
autobuilds =
|
||||||
|
(map linuxarch ["i386", "amd64", "armel"]) ++
|
||||||
|
(map androidversion ["4.0", "4.3"]) ++
|
||||||
|
[ (autobuild "x86_64-apple-mavericks/git-annex.dmg", "git-annex/OSX/current/10.9_Mavericks/git-annex.dmg")
|
||||||
|
, (autobuild "windows/git-annex-installer.exe", "git-annex/windows/current/git-annex-installer.exe")
|
||||||
|
]
|
||||||
|
where
|
||||||
|
linuxarch a =
|
||||||
|
( autobuild (a ++ "/git-annex-standalone-" ++ a ++ ".tar.gz")
|
||||||
|
, "git-annex/linux/current/git-annex-standalone-" ++ a ++ ".tar.gz"
|
||||||
|
)
|
||||||
|
androidversion v =
|
||||||
|
( autobuild ("android/" ++ v ++ "/git-annex.apk")
|
||||||
|
, "git-annex/android/current/" ++ v ++ "/git-annex.apk"
|
||||||
|
)
|
||||||
|
autobuild f = "https://downloads.kitenet.net/git-annex/autobuild/" ++ f
|
||||||
|
|
||||||
makeinfos :: Annex ()
|
main :: IO ()
|
||||||
makeinfos = do
|
main = do
|
||||||
|
repodir <- getRepoDir
|
||||||
|
updated <- catMaybes <$> mapM (getbuild repodir) autobuilds
|
||||||
|
state <- Annex.new =<< Git.Construct.fromPath repodir
|
||||||
|
Annex.eval state (makeinfos updated)
|
||||||
|
|
||||||
|
-- Download a build from the autobuilder, and return its version.
|
||||||
|
-- It's very important that the version matches the build, otherwise
|
||||||
|
-- auto-upgrades can loop reatedly. So, check build-version before
|
||||||
|
-- and after downloading the file.
|
||||||
|
getbuild :: FilePath -> (URLString, FilePath) -> IO (Maybe (FilePath, Version))
|
||||||
|
getbuild repodir (url, f) = do
|
||||||
|
bv1 <- getbv
|
||||||
|
let dest = repodir </> f
|
||||||
|
let tmp = dest ++ ".tmp"
|
||||||
|
nukeFile tmp
|
||||||
|
createDirectoryIfMissing True (parentDir dest)
|
||||||
|
let oops s = do
|
||||||
|
nukeFile tmp
|
||||||
|
putStrLn $ "*** " ++ s
|
||||||
|
return Nothing
|
||||||
|
ifM (download url tmp def)
|
||||||
|
( do
|
||||||
|
bv2 <- getbv
|
||||||
|
case bv2 of
|
||||||
|
Nothing -> oops $ "no build-version file for " ++ url
|
||||||
|
(Just v)
|
||||||
|
| bv2 == bv1 -> do
|
||||||
|
nukeFile dest
|
||||||
|
renameFile tmp dest
|
||||||
|
-- remove git rev part of version
|
||||||
|
let v' = takeWhile (/= '-') v
|
||||||
|
return $ Just (f, v')
|
||||||
|
| otherwise -> oops $ "build version changed while downloading " ++ url ++ " " ++ show (bv1, bv2)
|
||||||
|
, oops $ "failed to download " ++ url
|
||||||
|
)
|
||||||
|
where
|
||||||
|
bvurl = takeDirectory url ++ "/build-version"
|
||||||
|
getbv = do
|
||||||
|
bv <- catchDefaultIO "" $ readProcess "curl" ["--silent", bvurl]
|
||||||
|
return $ if null bv || any (not . versionchar) bv then Nothing else Just bv
|
||||||
|
versionchar c = isAlphaNum c || c == '.' || c == '-'
|
||||||
|
|
||||||
|
makeinfos :: [(FilePath, Version)] -> Annex ()
|
||||||
|
makeinfos updated = do
|
||||||
version <- liftIO getChangelogVersion
|
version <- liftIO getChangelogVersion
|
||||||
void $ inRepo $ runBool
|
void $ inRepo $ runBool
|
||||||
[ Param "commit"
|
[ Param "commit"
|
||||||
|
@ -37,25 +102,24 @@ makeinfos = do
|
||||||
]
|
]
|
||||||
basedir <- liftIO getRepoDir
|
basedir <- liftIO getRepoDir
|
||||||
now <- liftIO getCurrentTime
|
now <- liftIO getCurrentTime
|
||||||
liftIO $ putStrLn $ "building info files for version " ++ version ++ " in " ++ basedir
|
liftIO $ putStrLn $ "building info files in " ++ basedir
|
||||||
fs <- liftIO $ dirContentsRecursiveSkipping (const False) True (basedir </> "git-annex")
|
forM_ updated $ \(f, bv) -> do
|
||||||
forM_ fs $ \f -> do
|
v <- lookupFile (basedir </> f)
|
||||||
v <- lookupFile f
|
|
||||||
case v of
|
case v of
|
||||||
Nothing -> noop
|
Nothing -> noop
|
||||||
Just k -> whenM (inAnnex k) $ do
|
Just k -> whenM (inAnnex k) $ do
|
||||||
liftIO $ putStrLn f
|
liftIO $ putStrLn f
|
||||||
let infofile = f ++ ".info"
|
let infofile = basedir </> f ++ ".info"
|
||||||
liftIO $ writeFile infofile $ show $ GitAnnexDistribution
|
liftIO $ writeFile infofile $ show $ GitAnnexDistribution
|
||||||
{ distributionUrl = mkUrl basedir f
|
{ distributionUrl = mkUrl f
|
||||||
, distributionKey = k
|
, distributionKey = k
|
||||||
, distributionVersion = version
|
, distributionVersion = bv
|
||||||
, distributionReleasedate = now
|
, distributionReleasedate = now
|
||||||
, distributionUrgentUpgrade = Nothing
|
, distributionUrgentUpgrade = Nothing
|
||||||
}
|
}
|
||||||
void $ inRepo $ runBool [Param "add", File infofile]
|
void $ inRepo $ runBool [Param "add", File infofile]
|
||||||
signFile infofile
|
signFile infofile
|
||||||
signFile f
|
signFile (basedir </> f)
|
||||||
void $ inRepo $ runBool
|
void $ inRepo $ runBool
|
||||||
[ Param "commit"
|
[ Param "commit"
|
||||||
, Param "-m"
|
, Param "-m"
|
||||||
|
@ -70,7 +134,7 @@ makeinfos = do
|
||||||
, Params "sync"
|
, Params "sync"
|
||||||
]
|
]
|
||||||
|
|
||||||
{- Check for out of date info files. -}
|
-- Check for out of date info files.
|
||||||
infos <- liftIO $ filter (".info" `isSuffixOf`)
|
infos <- liftIO $ filter (".info" `isSuffixOf`)
|
||||||
<$> dirContentsRecursive (basedir </> "git-annex")
|
<$> dirContentsRecursive (basedir </> "git-annex")
|
||||||
ds <- liftIO $ forM infos (readish <$$> readFile)
|
ds <- liftIO $ forM infos (readish <$$> readFile)
|
||||||
|
@ -88,8 +152,8 @@ getRepoDir = do
|
||||||
home <- liftIO myHomeDir
|
home <- liftIO myHomeDir
|
||||||
return $ home </> "lib" </> "downloads"
|
return $ home </> "lib" </> "downloads"
|
||||||
|
|
||||||
mkUrl :: FilePath -> FilePath -> String
|
mkUrl :: FilePath -> String
|
||||||
mkUrl basedir f = "https://downloads.kitenet.net/" ++ relPathDirToFile basedir f
|
mkUrl f = "https://downloads.kitenet.net/" ++ f
|
||||||
|
|
||||||
signFile :: FilePath -> Annex ()
|
signFile :: FilePath -> Annex ()
|
||||||
signFile f = do
|
signFile f = do
|
||||||
|
|
|
@ -20,7 +20,7 @@ import Data.Maybe
|
||||||
import Data.List
|
import Data.List
|
||||||
|
|
||||||
import Utility.Monad
|
import Utility.Monad
|
||||||
import Utility.Process
|
import Utility.Process hiding (env)
|
||||||
import Utility.Env
|
import Utility.Env
|
||||||
|
|
||||||
data CmdParams = CmdParams
|
data CmdParams = CmdParams
|
||||||
|
|
|
@ -37,13 +37,16 @@ main = do
|
||||||
mustSucceed "ln" [File "dist/build/git-annex/git-annex.exe", File gitannex]
|
mustSucceed "ln" [File "dist/build/git-annex/git-annex.exe", File gitannex]
|
||||||
let license = tmpdir </> licensefile
|
let license = tmpdir </> licensefile
|
||||||
mustSucceed "sh" [Param "-c", Param $ "zcat standalone/licences.gz > '" ++ license ++ "'"]
|
mustSucceed "sh" [Param "-c", Param $ "zcat standalone/licences.gz > '" ++ license ++ "'"]
|
||||||
extrafiles <- forM (cygwinPrograms ++ cygwinDlls) $ \f -> do
|
extrabins <- forM (cygwinPrograms ++ cygwinDlls) $ \f -> do
|
||||||
p <- searchPath f
|
p <- searchPath f
|
||||||
when (isNothing p) $
|
when (isNothing p) $
|
||||||
print ("unable to find in PATH", f)
|
print ("unable to find in PATH", f)
|
||||||
return p
|
return p
|
||||||
writeFile nsifile $ makeInstaller gitannex license $
|
webappscript <- vbsLauncher tmpdir "git-annex-webapp" "git-annex webapp"
|
||||||
catMaybes extrafiles
|
autostartscript <- vbsLauncher tmpdir "git-annex-autostart" "git annex assistant --autostart"
|
||||||
|
writeFile nsifile $ makeInstaller gitannex license
|
||||||
|
(catMaybes extrabins)
|
||||||
|
[ webappscript, autostartscript ]
|
||||||
mustSucceed "makensis" [File nsifile]
|
mustSucceed "makensis" [File nsifile]
|
||||||
removeFile nsifile -- left behind if makensis fails
|
removeFile nsifile -- left behind if makensis fails
|
||||||
where
|
where
|
||||||
|
@ -54,6 +57,17 @@ main = do
|
||||||
True -> return ()
|
True -> return ()
|
||||||
False -> error $ cmd ++ " failed"
|
False -> error $ cmd ++ " failed"
|
||||||
|
|
||||||
|
{- Generates a .vbs launcher which runs a command without any visible DOS
|
||||||
|
- box. -}
|
||||||
|
vbsLauncher :: FilePath -> String -> String -> IO String
|
||||||
|
vbsLauncher tmpdir basename cmd = do
|
||||||
|
let f = tmpdir </> basename ++ ".vbs"
|
||||||
|
writeFile f $ unlines
|
||||||
|
[ "Set objshell=CreateObject(\"Wscript.Shell\")"
|
||||||
|
, "objShell.Run(\"" ++ cmd ++ "\"), 0, False"
|
||||||
|
]
|
||||||
|
return f
|
||||||
|
|
||||||
gitannexprogram :: FilePath
|
gitannexprogram :: FilePath
|
||||||
gitannexprogram = "git-annex.exe"
|
gitannexprogram = "git-annex.exe"
|
||||||
|
|
||||||
|
@ -67,11 +81,14 @@ uninstaller :: FilePath
|
||||||
uninstaller = "git-annex-uninstall.exe"
|
uninstaller = "git-annex-uninstall.exe"
|
||||||
|
|
||||||
gitInstallDir :: Exp FilePath
|
gitInstallDir :: Exp FilePath
|
||||||
gitInstallDir = fromString "$PROGRAMFILES\\Git\\bin"
|
gitInstallDir = fromString "$PROGRAMFILES\\Git"
|
||||||
|
|
||||||
startMenuItem :: Exp FilePath
|
startMenuItem :: Exp FilePath
|
||||||
startMenuItem = "$SMPROGRAMS/git-annex.lnk"
|
startMenuItem = "$SMPROGRAMS/git-annex.lnk"
|
||||||
|
|
||||||
|
autoStartItem :: Exp FilePath
|
||||||
|
autoStartItem = "$SMSTARTUP/git-annex-autostart.lnk"
|
||||||
|
|
||||||
needGit :: Exp String
|
needGit :: Exp String
|
||||||
needGit = strConcat
|
needGit = strConcat
|
||||||
[ fromString "You need git installed to use git-annex. Looking at "
|
[ fromString "You need git installed to use git-annex. Looking at "
|
||||||
|
@ -81,8 +98,8 @@ needGit = strConcat
|
||||||
, fromString "You can install git from http:////git-scm.com//"
|
, fromString "You can install git from http:////git-scm.com//"
|
||||||
]
|
]
|
||||||
|
|
||||||
makeInstaller :: FilePath -> FilePath -> [FilePath] -> String
|
makeInstaller :: FilePath -> FilePath -> [FilePath] -> [FilePath] -> String
|
||||||
makeInstaller gitannex license extrafiles = nsis $ do
|
makeInstaller gitannex license extrabins launchers = nsis $ do
|
||||||
name "git-annex"
|
name "git-annex"
|
||||||
outFile $ str installer
|
outFile $ str installer
|
||||||
{- Installing into the same directory as git avoids needing to modify
|
{- Installing into the same directory as git avoids needing to modify
|
||||||
|
@ -101,30 +118,46 @@ makeInstaller gitannex license extrafiles = nsis $ do
|
||||||
-- Start menu shortcut
|
-- Start menu shortcut
|
||||||
Development.NSIS.createDirectory "$SMPROGRAMS"
|
Development.NSIS.createDirectory "$SMPROGRAMS"
|
||||||
createShortcut startMenuItem
|
createShortcut startMenuItem
|
||||||
[ Target "$INSTDIR/git-annex.exe"
|
[ Target "wscript.exe"
|
||||||
, Parameters "webapp"
|
, Parameters "\"$INSTDIR/git-annex-webapp.vbs\""
|
||||||
, IconFile "$INSTDIR/git-annex.exe"
|
, StartOptions "SW_SHOWNORMAL"
|
||||||
|
, IconFile "$INSTDIR/cmd/git-annex.exe"
|
||||||
, IconIndex 2
|
, IconIndex 2
|
||||||
, StartOptions "SW_SHOWMINIMIZED"
|
|
||||||
, KeyboardShortcut "ALT|CONTROL|a"
|
, KeyboardShortcut "ALT|CONTROL|a"
|
||||||
, Description "git-annex webapp"
|
, Description "git-annex webapp"
|
||||||
]
|
]
|
||||||
-- Groups of files to install
|
createShortcut autoStartItem
|
||||||
section "main" [] $ do
|
[ Target "wscript.exe"
|
||||||
setOutPath "$INSTDIR"
|
, Parameters "\"$INSTDIR/git-annex-autostart.vbs\""
|
||||||
|
, StartOptions "SW_SHOWNORMAL"
|
||||||
|
, IconFile "$INSTDIR/cmd/git-annex.exe"
|
||||||
|
, IconIndex 2
|
||||||
|
, Description "git-annex autostart"
|
||||||
|
]
|
||||||
|
section "bins" [] $ do
|
||||||
|
setOutPath "$INSTDIR\\bin"
|
||||||
|
mapM_ addfile extrabins
|
||||||
|
section "cmd" [] $ do
|
||||||
|
setOutPath "$INSTDIR\\cmd"
|
||||||
addfile gitannex
|
addfile gitannex
|
||||||
|
section "meta" [] $ do
|
||||||
|
setOutPath "$INSTDIR"
|
||||||
addfile license
|
addfile license
|
||||||
mapM_ addfile extrafiles
|
mapM_ addfile launchers
|
||||||
writeUninstaller $ str uninstaller
|
writeUninstaller $ str uninstaller
|
||||||
uninstall $ do
|
uninstall $ do
|
||||||
delete [RebootOK] $ startMenuItem
|
delete [RebootOK] $ startMenuItem
|
||||||
mapM_ (\f -> delete [RebootOK] $ fromString $ "$INSTDIR/" ++ f) $
|
delete [RebootOK] $ autoStartItem
|
||||||
[ gitannexprogram
|
removefilesFrom "$INSTDIR/bin" extrabins
|
||||||
, licensefile
|
removefilesFrom "$INSTDIR/cmd" [gitannex]
|
||||||
|
removefilesFrom "$INSTDIR" $
|
||||||
|
launchers ++
|
||||||
|
[ license
|
||||||
, uninstaller
|
, uninstaller
|
||||||
] ++ cygwinPrograms ++ cygwinDlls
|
]
|
||||||
where
|
where
|
||||||
addfile f = file [] (str f)
|
addfile f = file [] (str f)
|
||||||
|
removefilesFrom d = mapM_ (\f -> delete [RebootOK] $ fromString $ d ++ "/" ++ takeFileName f)
|
||||||
|
|
||||||
cygwinPrograms :: [FilePath]
|
cygwinPrograms :: [FilePath]
|
||||||
cygwinPrograms = map (\p -> p ++ ".exe") bundledPrograms
|
cygwinPrograms = map (\p -> p ++ ".exe") bundledPrograms
|
||||||
|
|
|
@ -10,10 +10,11 @@ import System.Directory
|
||||||
import Data.Char
|
import Data.Char
|
||||||
import System.Process
|
import System.Process
|
||||||
|
|
||||||
import Build.TestConfig
|
|
||||||
import Utility.Monad
|
import Utility.Monad
|
||||||
import Utility.Exception
|
import Utility.Exception
|
||||||
|
|
||||||
|
type Version = String
|
||||||
|
|
||||||
{- Set when making an official release. (Distribution vendors should set
|
{- Set when making an official release. (Distribution vendors should set
|
||||||
- this too.) -}
|
- this too.) -}
|
||||||
isReleaseBuild :: IO Bool
|
isReleaseBuild :: IO Bool
|
||||||
|
@ -25,10 +26,10 @@ isReleaseBuild = isJust <$> catchMaybeIO (getEnv "RELEASE_BUILD")
|
||||||
-
|
-
|
||||||
- If git or a git repo is not available, or something goes wrong,
|
- If git or a git repo is not available, or something goes wrong,
|
||||||
- or this is a release build, just use the version from the changelog. -}
|
- or this is a release build, just use the version from the changelog. -}
|
||||||
getVersion :: Test
|
getVersion :: IO Version
|
||||||
getVersion = do
|
getVersion = do
|
||||||
changelogversion <- getChangelogVersion
|
changelogversion <- getChangelogVersion
|
||||||
version <- ifM (isReleaseBuild)
|
ifM (isReleaseBuild)
|
||||||
( return changelogversion
|
( return changelogversion
|
||||||
, catchDefaultIO changelogversion $ do
|
, catchDefaultIO changelogversion $ do
|
||||||
let major = takeWhile (/= '.') changelogversion
|
let major = takeWhile (/= '.') changelogversion
|
||||||
|
@ -40,9 +41,8 @@ getVersion = do
|
||||||
then return changelogversion
|
then return changelogversion
|
||||||
else return $ concat [ major, ".", autoversion ]
|
else return $ concat [ major, ".", autoversion ]
|
||||||
)
|
)
|
||||||
return $ Config "packageversion" (StringConfig version)
|
|
||||||
|
|
||||||
getChangelogVersion :: IO String
|
getChangelogVersion :: IO Version
|
||||||
getChangelogVersion = do
|
getChangelogVersion = do
|
||||||
changelog <- readFile "debian/changelog"
|
changelog <- readFile "debian/changelog"
|
||||||
let verline = takeWhile (/= '\n') changelog
|
let verline = takeWhile (/= '\n') changelog
|
||||||
|
|
|
@ -54,6 +54,7 @@ import qualified Command.Whereis
|
||||||
import qualified Command.List
|
import qualified Command.List
|
||||||
import qualified Command.Log
|
import qualified Command.Log
|
||||||
import qualified Command.Merge
|
import qualified Command.Merge
|
||||||
|
import qualified Command.ResolveMerge
|
||||||
import qualified Command.Info
|
import qualified Command.Info
|
||||||
import qualified Command.Status
|
import qualified Command.Status
|
||||||
import qualified Command.Migrate
|
import qualified Command.Migrate
|
||||||
|
@ -164,6 +165,7 @@ cmds = concat
|
||||||
, Command.List.def
|
, Command.List.def
|
||||||
, Command.Log.def
|
, Command.Log.def
|
||||||
, Command.Merge.def
|
, Command.Merge.def
|
||||||
|
, Command.ResolveMerge.def
|
||||||
, Command.Info.def
|
, Command.Info.def
|
||||||
, Command.Status.def
|
, Command.Status.def
|
||||||
, Command.Migrate.def
|
, Command.Migrate.def
|
||||||
|
|
|
@ -97,15 +97,17 @@ performQuvi relaxed pageurl videourl file = ifAnnexed file addurl geturl
|
||||||
where
|
where
|
||||||
quviurl = setDownloader pageurl QuviDownloader
|
quviurl = setDownloader pageurl QuviDownloader
|
||||||
addurl key = next $ cleanup quviurl file key Nothing
|
addurl key = next $ cleanup quviurl file key Nothing
|
||||||
geturl = next $ addUrlFileQuvi relaxed quviurl videourl file
|
geturl = next $ isJust <$> addUrlFileQuvi relaxed quviurl videourl file
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef WITH_QUVI
|
#ifdef WITH_QUVI
|
||||||
addUrlFileQuvi :: Bool -> URLString -> URLString -> FilePath -> Annex Bool
|
addUrlFileQuvi :: Bool -> URLString -> URLString -> FilePath -> Annex (Maybe Key)
|
||||||
addUrlFileQuvi relaxed quviurl videourl file = do
|
addUrlFileQuvi relaxed quviurl videourl file = do
|
||||||
key <- Backend.URL.fromUrl quviurl Nothing
|
key <- Backend.URL.fromUrl quviurl Nothing
|
||||||
ifM (pure relaxed <||> Annex.getState Annex.fast)
|
ifM (pure relaxed <||> Annex.getState Annex.fast)
|
||||||
( cleanup quviurl file key Nothing
|
( do
|
||||||
|
cleanup' quviurl file key Nothing
|
||||||
|
return (Just key)
|
||||||
, do
|
, do
|
||||||
{- Get the size, and use that to check
|
{- Get the size, and use that to check
|
||||||
- disk space. However, the size info is not
|
- disk space. However, the size info is not
|
||||||
|
@ -113,7 +115,7 @@ addUrlFileQuvi relaxed quviurl videourl file = do
|
||||||
- might change and we want to be able to download
|
- might change and we want to be able to download
|
||||||
- it later. -}
|
- it later. -}
|
||||||
sizedkey <- addSizeUrlKey videourl key
|
sizedkey <- addSizeUrlKey videourl key
|
||||||
prepGetViaTmpChecked sizedkey $ do
|
prepGetViaTmpChecked sizedkey Nothing $ do
|
||||||
tmp <- fromRepo $ gitAnnexTmpObjectLocation key
|
tmp <- fromRepo $ gitAnnexTmpObjectLocation key
|
||||||
showOutput
|
showOutput
|
||||||
ok <- Transfer.notifyTransfer Transfer.Download (Just file) $
|
ok <- Transfer.notifyTransfer Transfer.Download (Just file) $
|
||||||
|
@ -121,15 +123,17 @@ addUrlFileQuvi relaxed quviurl videourl file = do
|
||||||
liftIO $ createDirectoryIfMissing True (parentDir tmp)
|
liftIO $ createDirectoryIfMissing True (parentDir tmp)
|
||||||
downloadUrl [videourl] tmp
|
downloadUrl [videourl] tmp
|
||||||
if ok
|
if ok
|
||||||
then cleanup quviurl file key (Just tmp)
|
then do
|
||||||
else return False
|
cleanup' quviurl file key (Just tmp)
|
||||||
|
return (Just key)
|
||||||
|
else return Nothing
|
||||||
)
|
)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
perform :: Bool -> URLString -> FilePath -> CommandPerform
|
perform :: Bool -> URLString -> FilePath -> CommandPerform
|
||||||
perform relaxed url file = ifAnnexed file addurl geturl
|
perform relaxed url file = ifAnnexed file addurl geturl
|
||||||
where
|
where
|
||||||
geturl = next $ addUrlFile relaxed url file
|
geturl = next $ isJust <$> addUrlFile relaxed url file
|
||||||
addurl key
|
addurl key
|
||||||
| relaxed = do
|
| relaxed = do
|
||||||
setUrlPresent key url
|
setUrlPresent key url
|
||||||
|
@ -149,7 +153,7 @@ perform relaxed url file = ifAnnexed file addurl geturl
|
||||||
stop
|
stop
|
||||||
)
|
)
|
||||||
|
|
||||||
addUrlFile :: Bool -> URLString -> FilePath -> Annex Bool
|
addUrlFile :: Bool -> URLString -> FilePath -> Annex (Maybe Key)
|
||||||
addUrlFile relaxed url file = do
|
addUrlFile relaxed url file = do
|
||||||
liftIO $ createDirectoryIfMissing True (parentDir file)
|
liftIO $ createDirectoryIfMissing True (parentDir file)
|
||||||
ifM (Annex.getState Annex.fast <||> pure relaxed)
|
ifM (Annex.getState Annex.fast <||> pure relaxed)
|
||||||
|
@ -159,13 +163,13 @@ addUrlFile relaxed url file = do
|
||||||
download url file
|
download url file
|
||||||
)
|
)
|
||||||
|
|
||||||
download :: URLString -> FilePath -> Annex Bool
|
download :: URLString -> FilePath -> Annex (Maybe Key)
|
||||||
download url file = do
|
download url file = do
|
||||||
{- Generate a dummy key to use for this download, before we can
|
{- Generate a dummy key to use for this download, before we can
|
||||||
- examine the file and find its real key. This allows resuming
|
- examine the file and find its real key. This allows resuming
|
||||||
- downloads, as the dummy key for a given url is stable. -}
|
- downloads, as the dummy key for a given url is stable. -}
|
||||||
dummykey <- addSizeUrlKey url =<< Backend.URL.fromUrl url Nothing
|
dummykey <- addSizeUrlKey url =<< Backend.URL.fromUrl url Nothing
|
||||||
prepGetViaTmpChecked dummykey $ do
|
prepGetViaTmpChecked dummykey Nothing $ do
|
||||||
tmp <- fromRepo $ gitAnnexTmpObjectLocation dummykey
|
tmp <- fromRepo $ gitAnnexTmpObjectLocation dummykey
|
||||||
showOutput
|
showOutput
|
||||||
ifM (runtransfer dummykey tmp)
|
ifM (runtransfer dummykey tmp)
|
||||||
|
@ -178,9 +182,11 @@ download url file = do
|
||||||
}
|
}
|
||||||
k <- genKey source backend
|
k <- genKey source backend
|
||||||
case k of
|
case k of
|
||||||
Nothing -> return False
|
Nothing -> return Nothing
|
||||||
Just (key, _) -> cleanup url file key (Just tmp)
|
Just (key, _) -> do
|
||||||
, return False
|
cleanup' url file key (Just tmp)
|
||||||
|
return (Just key)
|
||||||
|
, return Nothing
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
runtransfer dummykey tmp = Transfer.notifyTransfer Transfer.Download (Just file) $
|
runtransfer dummykey tmp = Transfer.notifyTransfer Transfer.Download (Just file) $
|
||||||
|
@ -200,6 +206,11 @@ addSizeUrlKey url key = do
|
||||||
|
|
||||||
cleanup :: URLString -> FilePath -> Key -> Maybe FilePath -> Annex Bool
|
cleanup :: URLString -> FilePath -> Key -> Maybe FilePath -> Annex Bool
|
||||||
cleanup url file key mtmp = do
|
cleanup url file key mtmp = do
|
||||||
|
cleanup' url file key mtmp
|
||||||
|
return True
|
||||||
|
|
||||||
|
cleanup' :: URLString -> FilePath -> Key -> Maybe FilePath -> Annex ()
|
||||||
|
cleanup' url file key mtmp = do
|
||||||
when (isJust mtmp) $
|
when (isJust mtmp) $
|
||||||
logStatus key InfoPresent
|
logStatus key InfoPresent
|
||||||
setUrlPresent key url
|
setUrlPresent key url
|
||||||
|
@ -210,9 +221,8 @@ cleanup url file key mtmp = do
|
||||||
- must already exist, so flush the queue. -}
|
- must already exist, so flush the queue. -}
|
||||||
Annex.Queue.flush
|
Annex.Queue.flush
|
||||||
maybe noop (moveAnnex key) mtmp
|
maybe noop (moveAnnex key) mtmp
|
||||||
return True
|
|
||||||
|
|
||||||
nodownload :: Bool -> URLString -> FilePath -> Annex Bool
|
nodownload :: Bool -> URLString -> FilePath -> Annex (Maybe Key)
|
||||||
nodownload relaxed url file = do
|
nodownload relaxed url file = do
|
||||||
(exists, size) <- if relaxed
|
(exists, size) <- if relaxed
|
||||||
then pure (True, Nothing)
|
then pure (True, Nothing)
|
||||||
|
@ -220,10 +230,11 @@ nodownload relaxed url file = do
|
||||||
if exists
|
if exists
|
||||||
then do
|
then do
|
||||||
key <- Backend.URL.fromUrl url size
|
key <- Backend.URL.fromUrl url size
|
||||||
cleanup url file key Nothing
|
cleanup' url file key Nothing
|
||||||
|
return (Just key)
|
||||||
else do
|
else do
|
||||||
warning $ "unable to access url: " ++ url
|
warning $ "unable to access url: " ++ url
|
||||||
return False
|
return Nothing
|
||||||
|
|
||||||
url2file :: URI -> Maybe Int -> Int -> FilePath
|
url2file :: URI -> Maybe Int -> Int -> FilePath
|
||||||
url2file url pathdepth pathmax = case pathdepth of
|
url2file url pathdepth pathmax = case pathdepth of
|
||||||
|
|
|
@ -14,6 +14,7 @@ import Annex.Init
|
||||||
import Config.Files
|
import Config.Files
|
||||||
import qualified Build.SysConfig
|
import qualified Build.SysConfig
|
||||||
import Utility.HumanTime
|
import Utility.HumanTime
|
||||||
|
import Assistant.Install
|
||||||
|
|
||||||
import System.Environment
|
import System.Environment
|
||||||
|
|
||||||
|
@ -50,6 +51,7 @@ start foreground stopdaemon autostart startdelay
|
||||||
liftIO $ autoStart startdelay
|
liftIO $ autoStart startdelay
|
||||||
stop
|
stop
|
||||||
| otherwise = do
|
| otherwise = do
|
||||||
|
liftIO ensureInstalled
|
||||||
ensureInitialized
|
ensureInitialized
|
||||||
Command.Watch.start True foreground stopdaemon startdelay
|
Command.Watch.start True foreground stopdaemon startdelay
|
||||||
|
|
||||||
|
|
|
@ -12,8 +12,8 @@ import Control.Exception.Extensible
|
||||||
import Common.Annex
|
import Common.Annex
|
||||||
import Command
|
import Command
|
||||||
import qualified Git
|
import qualified Git
|
||||||
import qualified Git.Command
|
|
||||||
import qualified Git.LsFiles
|
import qualified Git.LsFiles
|
||||||
|
import qualified Git.Branch
|
||||||
import Config
|
import Config
|
||||||
import Annex.Direct
|
import Annex.Direct
|
||||||
import Annex.Exception
|
import Annex.Exception
|
||||||
|
@ -33,9 +33,8 @@ perform :: CommandPerform
|
||||||
perform = do
|
perform = do
|
||||||
showStart "commit" ""
|
showStart "commit" ""
|
||||||
showOutput
|
showOutput
|
||||||
_ <- inRepo $ Git.Command.runBool
|
_ <- inRepo $ Git.Branch.commitCommand Git.Branch.ManualCommit
|
||||||
[ Param "commit"
|
[ Param "-a"
|
||||||
, Param "-a"
|
|
||||||
, Param "-m"
|
, Param "-m"
|
||||||
, Param "commit before switching to direct mode"
|
, Param "commit before switching to direct mode"
|
||||||
]
|
]
|
||||||
|
|
|
@ -33,6 +33,9 @@ import Annex.Quvi
|
||||||
import qualified Utility.Quvi as Quvi
|
import qualified Utility.Quvi as Quvi
|
||||||
import Command.AddUrl (addUrlFileQuvi)
|
import Command.AddUrl (addUrlFileQuvi)
|
||||||
#endif
|
#endif
|
||||||
|
import Types.MetaData
|
||||||
|
import Logs.MetaData
|
||||||
|
import Annex.MetaData
|
||||||
|
|
||||||
def :: [Command]
|
def :: [Command]
|
||||||
def = [notBareRepo $ withOptions [templateOption, relaxedOption] $
|
def = [notBareRepo $ withOptions [templateOption, relaxedOption] $
|
||||||
|
@ -165,12 +168,14 @@ performDownload relaxed cache todownload = case location todownload of
|
||||||
Nothing -> return True
|
Nothing -> return True
|
||||||
Just f -> do
|
Just f -> do
|
||||||
showStart "addurl" f
|
showStart "addurl" f
|
||||||
ok <- getter f
|
mk <- getter f
|
||||||
if ok
|
case mk of
|
||||||
then do
|
Just key -> do
|
||||||
|
whenM (annexGenMetaData <$> Annex.getGitConfig) $
|
||||||
|
addMetaData key $ extractMetaData todownload
|
||||||
showEndOk
|
showEndOk
|
||||||
return True
|
return True
|
||||||
else do
|
Nothing -> do
|
||||||
showEndFail
|
showEndFail
|
||||||
checkFeedBroken (feedurl todownload)
|
checkFeedBroken (feedurl todownload)
|
||||||
|
|
||||||
|
@ -205,25 +210,12 @@ defaultTemplate = "${feedtitle}/${itemtitle}${extension}"
|
||||||
{- Generates a filename to use for a feed item by filling out the template.
|
{- Generates a filename to use for a feed item by filling out the template.
|
||||||
- The filename may not be unique. -}
|
- The filename may not be unique. -}
|
||||||
feedFile :: Utility.Format.Format -> ToDownload -> String -> FilePath
|
feedFile :: Utility.Format.Format -> ToDownload -> String -> FilePath
|
||||||
feedFile tmpl i extension = Utility.Format.format tmpl $ M.fromList
|
feedFile tmpl i extension = Utility.Format.format tmpl $
|
||||||
[ field "feedtitle" $ getFeedTitle $ feed i
|
M.map sanitizeFilePath $ M.fromList $ extractFields i ++
|
||||||
, fieldMaybe "itemtitle" $ getItemTitle $ item i
|
[ ("extension", extension)
|
||||||
, fieldMaybe "feedauthor" $ getFeedAuthor $ feed i
|
, extractField "itempubdate" [pubdate $ item i]
|
||||||
, fieldMaybe "itemauthor" $ getItemAuthor $ item i
|
]
|
||||||
, fieldMaybe "itemsummary" $ getItemSummary $ item i
|
|
||||||
, fieldMaybe "itemdescription" $ getItemDescription $ item i
|
|
||||||
, fieldMaybe "itemrights" $ getItemRights $ item i
|
|
||||||
, fieldMaybe "itemid" $ snd <$> getItemId (item i)
|
|
||||||
, fieldMaybe "itempubdate" $ pubdate $ item i
|
|
||||||
, ("extension", sanitizeFilePath extension)
|
|
||||||
]
|
|
||||||
where
|
where
|
||||||
field k v =
|
|
||||||
let s = sanitizeFilePath v in
|
|
||||||
if null s then (k, "none") else (k, s)
|
|
||||||
fieldMaybe k Nothing = (k, "none")
|
|
||||||
fieldMaybe k (Just v) = field k v
|
|
||||||
|
|
||||||
#if MIN_VERSION_feed(0,3,9)
|
#if MIN_VERSION_feed(0,3,9)
|
||||||
pubdate itm = case getItemPublishDate itm :: Maybe (Maybe UTCTime) of
|
pubdate itm = case getItemPublishDate itm :: Maybe (Maybe UTCTime) of
|
||||||
Just (Just d) -> Just $
|
Just (Just d) -> Just $
|
||||||
|
@ -234,11 +226,46 @@ feedFile tmpl i extension = Utility.Format.format tmpl $ M.fromList
|
||||||
pubdate _ = Nothing
|
pubdate _ = Nothing
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
extractMetaData :: ToDownload -> MetaData
|
||||||
|
extractMetaData i = case getItemPublishDate (item i) :: Maybe (Maybe UTCTime) of
|
||||||
|
Just (Just d) -> unionMetaData meta (dateMetaData d meta)
|
||||||
|
_ -> meta
|
||||||
|
where
|
||||||
|
tometa (k, v) = (mkMetaFieldUnchecked k, S.singleton (toMetaValue v))
|
||||||
|
meta = MetaData $ M.fromList $ map tometa $ extractFields i
|
||||||
|
|
||||||
|
{- Extract fields from the feed and item, that are both used as metadata,
|
||||||
|
- and to generate the filename. -}
|
||||||
|
extractFields :: ToDownload -> [(String, String)]
|
||||||
|
extractFields i = map (uncurry extractField)
|
||||||
|
[ ("feedtitle", [feedtitle])
|
||||||
|
, ("itemtitle", [itemtitle])
|
||||||
|
, ("feedauthor", [feedauthor])
|
||||||
|
, ("itemauthor", [itemauthor])
|
||||||
|
, ("itemsummary", [getItemSummary $ item i])
|
||||||
|
, ("itemdescription", [getItemDescription $ item i])
|
||||||
|
, ("itemrights", [getItemRights $ item i])
|
||||||
|
, ("itemid", [snd <$> getItemId (item i)])
|
||||||
|
, ("title", [itemtitle, feedtitle])
|
||||||
|
, ("author", [itemauthor, feedauthor])
|
||||||
|
]
|
||||||
|
where
|
||||||
|
feedtitle = Just $ getFeedTitle $ feed i
|
||||||
|
itemtitle = getItemTitle $ item i
|
||||||
|
feedauthor = getFeedAuthor $ feed i
|
||||||
|
itemauthor = getItemAuthor $ item i
|
||||||
|
|
||||||
|
extractField :: String -> [Maybe String] -> (String, String)
|
||||||
|
extractField k [] = (k, "none")
|
||||||
|
extractField k (Just v:_)
|
||||||
|
| not (null v) = (k, v)
|
||||||
|
extractField k (_:rest) = extractField k rest
|
||||||
|
|
||||||
{- Called when there is a problem with a feed.
|
{- Called when there is a problem with a feed.
|
||||||
- Throws an error if the feed is broken, otherwise shows a warning. -}
|
- Throws an error if the feed is broken, otherwise shows a warning. -}
|
||||||
feedProblem :: URLString -> String -> Annex ()
|
feedProblem :: URLString -> String -> Annex ()
|
||||||
feedProblem url message = ifM (checkFeedBroken url)
|
feedProblem url message = ifM (checkFeedBroken url)
|
||||||
( error $ message ++ " (having repeated problems with this feed!)"
|
( error $ message ++ " (having repeated problems with feed: " ++ url ++ ")"
|
||||||
, warning $ "warning: " ++ message
|
, warning $ "warning: " ++ message
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@ import Control.Exception.Extensible
|
||||||
import Common.Annex
|
import Common.Annex
|
||||||
import Command
|
import Command
|
||||||
import qualified Git
|
import qualified Git
|
||||||
import qualified Git.Command
|
import qualified Git.Branch
|
||||||
import qualified Git.LsFiles
|
import qualified Git.LsFiles
|
||||||
import Git.FileMode
|
import Git.FileMode
|
||||||
import Config
|
import Config
|
||||||
|
@ -49,9 +49,8 @@ perform = do
|
||||||
showStart "commit" ""
|
showStart "commit" ""
|
||||||
whenM stageDirect $ do
|
whenM stageDirect $ do
|
||||||
showOutput
|
showOutput
|
||||||
void $ inRepo $ Git.Command.runBool
|
void $ inRepo $ Git.Branch.commitCommand Git.Branch.ManualCommit
|
||||||
[ Param "commit"
|
[ Param "-m"
|
||||||
, Param "-m"
|
|
||||||
, Param "commit before switching to indirect mode"
|
, Param "commit before switching to indirect mode"
|
||||||
]
|
]
|
||||||
showEndOk
|
showEndOk
|
||||||
|
|
|
@ -11,7 +11,7 @@ import Common.Annex
|
||||||
import Command
|
import Command
|
||||||
import Backend
|
import Backend
|
||||||
import qualified Types.Key
|
import qualified Types.Key
|
||||||
import qualified Types.Backend
|
import Types.Backend (canUpgradeKey, fastMigrate)
|
||||||
import Types.KeySource
|
import Types.KeySource
|
||||||
import Annex.Content
|
import Annex.Content
|
||||||
import qualified Command.ReKey
|
import qualified Command.ReKey
|
||||||
|
@ -51,8 +51,7 @@ start file key = do
|
||||||
upgradableKey :: Backend -> Key -> Bool
|
upgradableKey :: Backend -> Key -> Bool
|
||||||
upgradableKey backend key = isNothing (Types.Key.keySize key) || backendupgradable
|
upgradableKey backend key = isNothing (Types.Key.keySize key) || backendupgradable
|
||||||
where
|
where
|
||||||
backendupgradable = maybe False (\a -> a key)
|
backendupgradable = maybe False (\a -> a key) (canUpgradeKey backend)
|
||||||
(Types.Backend.canUpgradeKey backend)
|
|
||||||
|
|
||||||
{- Store the old backend's key in the new backend
|
{- Store the old backend's key in the new backend
|
||||||
- The old backend's key is not dropped from it, because there may
|
- The old backend's key is not dropped from it, because there may
|
||||||
|
@ -67,15 +66,22 @@ perform :: FilePath -> Key -> Backend -> Backend -> CommandPerform
|
||||||
perform file oldkey oldbackend newbackend = go =<< genkey
|
perform file oldkey oldbackend newbackend = go =<< genkey
|
||||||
where
|
where
|
||||||
go Nothing = stop
|
go Nothing = stop
|
||||||
go (Just newkey) = stopUnless checkcontent $ finish newkey
|
go (Just (newkey, knowngoodcontent))
|
||||||
|
| knowngoodcontent = finish newkey
|
||||||
|
| otherwise = stopUnless checkcontent $ finish newkey
|
||||||
checkcontent = Command.Fsck.checkBackend oldbackend oldkey $ Just file
|
checkcontent = Command.Fsck.checkBackend oldbackend oldkey $ Just file
|
||||||
finish newkey = stopUnless (Command.ReKey.linkKey oldkey newkey) $
|
finish newkey = stopUnless (Command.ReKey.linkKey oldkey newkey) $
|
||||||
next $ Command.ReKey.cleanup file oldkey newkey
|
next $ Command.ReKey.cleanup file oldkey newkey
|
||||||
genkey = do
|
genkey = case maybe Nothing (\fm -> fm oldkey newbackend) (fastMigrate oldbackend) of
|
||||||
content <- calcRepo $ gitAnnexLocation oldkey
|
Just newkey -> return $ Just (newkey, True)
|
||||||
let source = KeySource
|
Nothing -> do
|
||||||
{ keyFilename = file
|
content <- calcRepo $ gitAnnexLocation oldkey
|
||||||
, contentLocation = content
|
let source = KeySource
|
||||||
, inodeCache = Nothing
|
{ keyFilename = file
|
||||||
}
|
, contentLocation = content
|
||||||
liftM fst <$> genKey source (Just newbackend)
|
, inodeCache = Nothing
|
||||||
|
}
|
||||||
|
v <- genKey source (Just newbackend)
|
||||||
|
return $ case v of
|
||||||
|
Just (newkey, _) -> Just (newkey, False)
|
||||||
|
_ -> Nothing
|
||||||
|
|
40
Command/ResolveMerge.hs
Normal file
40
Command/ResolveMerge.hs
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
{- git-annex command
|
||||||
|
-
|
||||||
|
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||||
|
-
|
||||||
|
- Licensed under the GNU GPL version 3 or higher.
|
||||||
|
-}
|
||||||
|
|
||||||
|
module Command.ResolveMerge where
|
||||||
|
|
||||||
|
import Common.Annex
|
||||||
|
import Command
|
||||||
|
import qualified Git
|
||||||
|
import Git.Sha
|
||||||
|
import qualified Git.Branch
|
||||||
|
import Annex.AutoMerge
|
||||||
|
|
||||||
|
def :: [Command]
|
||||||
|
def = [command "resolvemerge" paramNothing seek SectionPlumbing
|
||||||
|
"resolve merge conflicts"]
|
||||||
|
|
||||||
|
seek :: CommandSeek
|
||||||
|
seek ps = withNothing start ps
|
||||||
|
|
||||||
|
start :: CommandStart
|
||||||
|
start = do
|
||||||
|
showStart "resolvemerge" ""
|
||||||
|
us <- fromMaybe nobranch <$> inRepo Git.Branch.current
|
||||||
|
d <- fromRepo Git.localGitDir
|
||||||
|
let merge_head = d </> "MERGE_HEAD"
|
||||||
|
them <- fromMaybe (error nomergehead) . extractSha
|
||||||
|
<$> liftIO (readFile merge_head)
|
||||||
|
ifM (resolveMerge (Just us) them)
|
||||||
|
( do
|
||||||
|
void $ commitResolvedMerge Git.Branch.ManualCommit
|
||||||
|
next $ next $ return True
|
||||||
|
, error "Merge conflict could not be automatically resolved."
|
||||||
|
)
|
||||||
|
where
|
||||||
|
nobranch = error "No branch is currently checked out."
|
||||||
|
nomergehead = error "No SHA found in .git/merge_head"
|
|
@ -127,14 +127,12 @@ commit = next $ next $ ifM isDirect
|
||||||
showStart "commit" ""
|
showStart "commit" ""
|
||||||
void stageDirect
|
void stageDirect
|
||||||
void preCommitDirect
|
void preCommitDirect
|
||||||
commitStaged commitmessage
|
commitStaged Git.Branch.ManualCommit commitmessage
|
||||||
, do
|
, do
|
||||||
showStart "commit" ""
|
showStart "commit" ""
|
||||||
Annex.Branch.commit "update"
|
Annex.Branch.commit "update"
|
||||||
-- Commit will fail when the tree is clean, so ignore failure.
|
inRepo $ Git.Branch.commitQuiet Git.Branch.ManualCommit
|
||||||
_ <- inRepo $ tryIO . Git.Command.runQuiet
|
[ Param "-a"
|
||||||
[ Param "commit"
|
|
||||||
, Param "-a"
|
|
||||||
, Param "-m"
|
, Param "-m"
|
||||||
, Param commitmessage
|
, Param commitmessage
|
||||||
]
|
]
|
||||||
|
@ -143,14 +141,14 @@ commit = next $ next $ ifM isDirect
|
||||||
where
|
where
|
||||||
commitmessage = "git-annex automatic sync"
|
commitmessage = "git-annex automatic sync"
|
||||||
|
|
||||||
commitStaged :: String -> Annex Bool
|
commitStaged :: Git.Branch.CommitMode -> String -> Annex Bool
|
||||||
commitStaged commitmessage = go =<< inRepo Git.Branch.currentUnsafe
|
commitStaged commitmode commitmessage = go =<< inRepo Git.Branch.currentUnsafe
|
||||||
where
|
where
|
||||||
go Nothing = return False
|
go Nothing = return False
|
||||||
go (Just branch) = do
|
go (Just branch) = do
|
||||||
runAnnexHook preCommitAnnexHook
|
runAnnexHook preCommitAnnexHook
|
||||||
parent <- inRepo $ Git.Ref.sha branch
|
parent <- inRepo $ Git.Ref.sha branch
|
||||||
void $ inRepo $ Git.Branch.commit False commitmessage branch
|
void $ inRepo $ Git.Branch.commit commitmode False commitmessage branch
|
||||||
(maybeToList parent)
|
(maybeToList parent)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -169,11 +167,16 @@ mergeLocal (Just branch) = go =<< needmerge
|
||||||
go False = stop
|
go False = stop
|
||||||
go True = do
|
go True = do
|
||||||
showStart "merge" $ Git.Ref.describe syncbranch
|
showStart "merge" $ Git.Ref.describe syncbranch
|
||||||
next $ next $ autoMergeFrom syncbranch (Just branch)
|
next $ next $ autoMergeFrom syncbranch (Just branch) Git.Branch.ManualCommit
|
||||||
|
|
||||||
pushLocal :: Maybe Git.Ref -> CommandStart
|
pushLocal :: Maybe Git.Ref -> CommandStart
|
||||||
pushLocal Nothing = stop
|
pushLocal b = do
|
||||||
pushLocal (Just branch) = do
|
updateSyncBranch b
|
||||||
|
stop
|
||||||
|
|
||||||
|
updateSyncBranch :: Maybe Git.Ref -> Annex ()
|
||||||
|
updateSyncBranch Nothing = noop
|
||||||
|
updateSyncBranch (Just branch) = do
|
||||||
-- Update the sync branch to match the new state of the branch
|
-- Update the sync branch to match the new state of the branch
|
||||||
inRepo $ updateBranch $ syncBranch branch
|
inRepo $ updateBranch $ syncBranch branch
|
||||||
-- In direct mode, we're operating on some special direct mode
|
-- In direct mode, we're operating on some special direct mode
|
||||||
|
@ -181,7 +184,6 @@ pushLocal (Just branch) = do
|
||||||
-- branch.
|
-- branch.
|
||||||
whenM isDirect $
|
whenM isDirect $
|
||||||
inRepo $ updateBranch $ fromDirectBranch branch
|
inRepo $ updateBranch $ fromDirectBranch branch
|
||||||
stop
|
|
||||||
|
|
||||||
updateBranch :: Git.Ref -> Git.Repo -> IO ()
|
updateBranch :: Git.Ref -> Git.Repo -> IO ()
|
||||||
updateBranch syncbranch g =
|
updateBranch syncbranch g =
|
||||||
|
@ -217,7 +219,7 @@ mergeRemote remote b = case b of
|
||||||
Just thisbranch ->
|
Just thisbranch ->
|
||||||
and <$> (mapM (merge (Just thisbranch)) =<< tomerge (branchlist b))
|
and <$> (mapM (merge (Just thisbranch)) =<< tomerge (branchlist b))
|
||||||
where
|
where
|
||||||
merge thisbranch = flip autoMergeFrom thisbranch . remoteBranch remote
|
merge thisbranch br = autoMergeFrom (remoteBranch remote br) thisbranch Git.Branch.ManualCommit
|
||||||
tomerge = filterM (changed remote)
|
tomerge = filterM (changed remote)
|
||||||
branchlist Nothing = []
|
branchlist Nothing = []
|
||||||
branchlist (Just branch) = [branch, syncBranch branch]
|
branchlist (Just branch) = [branch, syncBranch branch]
|
||||||
|
|
|
@ -16,6 +16,7 @@ import qualified Annex
|
||||||
import Annex.Content
|
import Annex.Content
|
||||||
import Annex.Content.Direct
|
import Annex.Content.Direct
|
||||||
import qualified Git.Command
|
import qualified Git.Command
|
||||||
|
import qualified Git.Branch
|
||||||
import qualified Git.Ref
|
import qualified Git.Ref
|
||||||
import qualified Git.DiffTree as DiffTree
|
import qualified Git.DiffTree as DiffTree
|
||||||
import Utility.CopyFile
|
import Utility.CopyFile
|
||||||
|
@ -45,9 +46,8 @@ wrapUnannex a = ifM isDirect
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
commit = inRepo $ Git.Command.run
|
commit = inRepo $ Git.Branch.commitCommand Git.Branch.ManualCommit
|
||||||
[ Param "commit"
|
[ Param "-q"
|
||||||
, Param "-q"
|
|
||||||
, Param "--allow-empty"
|
, Param "--allow-empty"
|
||||||
, Param "--no-verify"
|
, Param "--no-verify"
|
||||||
, Param "-m", Param "content removed from git annex"
|
, Param "-m", Param "content removed from git annex"
|
||||||
|
|
|
@ -16,6 +16,10 @@ import qualified Command.Unannex
|
||||||
import qualified Annex.Branch
|
import qualified Annex.Branch
|
||||||
import Annex.Content
|
import Annex.Content
|
||||||
import Annex.Init
|
import Annex.Init
|
||||||
|
import Utility.FileMode
|
||||||
|
|
||||||
|
import System.IO.HVFS
|
||||||
|
import System.IO.HVFS.Utils
|
||||||
|
|
||||||
def :: [Command]
|
def :: [Command]
|
||||||
def = [addCheck check $ command "uninit" paramPaths seek
|
def = [addCheck check $ command "uninit" paramPaths seek
|
||||||
|
@ -56,6 +60,7 @@ finish = do
|
||||||
annexdir <- fromRepo gitAnnexDir
|
annexdir <- fromRepo gitAnnexDir
|
||||||
annexobjectdir <- fromRepo gitAnnexObjectDir
|
annexobjectdir <- fromRepo gitAnnexObjectDir
|
||||||
leftovers <- removeUnannexed =<< getKeysPresent InAnnex
|
leftovers <- removeUnannexed =<< getKeysPresent InAnnex
|
||||||
|
liftIO $ prepareRemoveAnnexDir annexdir
|
||||||
if null leftovers
|
if null leftovers
|
||||||
then liftIO $ removeDirectoryRecursive annexdir
|
then liftIO $ removeDirectoryRecursive annexdir
|
||||||
else error $ unlines
|
else error $ unlines
|
||||||
|
@ -82,6 +87,12 @@ finish = do
|
||||||
[Param "branch", Param "-D", Param $ Git.fromRef Annex.Branch.name]
|
[Param "branch", Param "-D", Param $ Git.fromRef Annex.Branch.name]
|
||||||
liftIO exitSuccess
|
liftIO exitSuccess
|
||||||
|
|
||||||
|
{- Turn on write bits in all remaining files in the annex directory, in
|
||||||
|
- preparation for removal. -}
|
||||||
|
prepareRemoveAnnexDir :: FilePath -> IO ()
|
||||||
|
prepareRemoveAnnexDir annexdir =
|
||||||
|
recurseDir SystemFS annexdir >>= mapM_ (void . tryIO . allowWrite)
|
||||||
|
|
||||||
{- Keys that were moved out of the annex have a hard link still in the
|
{- Keys that were moved out of the annex have a hard link still in the
|
||||||
- annex, with > 1 link count, and those can be removed.
|
- annex, with > 1 link count, and those can be removed.
|
||||||
-
|
-
|
||||||
|
|
|
@ -10,9 +10,6 @@
|
||||||
module Command.Unused where
|
module Command.Unused where
|
||||||
|
|
||||||
import qualified Data.Set as S
|
import qualified Data.Set as S
|
||||||
import Data.BloomFilter
|
|
||||||
import Data.BloomFilter.Easy
|
|
||||||
import Data.BloomFilter.Hash
|
|
||||||
import Control.Monad.ST
|
import Control.Monad.ST
|
||||||
import qualified Data.Map as M
|
import qualified Data.Map as M
|
||||||
|
|
||||||
|
@ -36,6 +33,7 @@ import Annex.CatFile
|
||||||
import Types.Key
|
import Types.Key
|
||||||
import Git.FilePath
|
import Git.FilePath
|
||||||
import Logs.View (is_branchView)
|
import Logs.View (is_branchView)
|
||||||
|
import Utility.Bloom
|
||||||
|
|
||||||
def :: [Command]
|
def :: [Command]
|
||||||
def = [withOptions [unusedFromOption] $ command "unused" paramNothing seek
|
def = [withOptions [unusedFromOption] $ command "unused" paramNothing seek
|
||||||
|
|
|
@ -139,9 +139,11 @@ genCfg cfg descs = unlines $ intercalate [""]
|
||||||
grouplist = unwords $ map fromStandardGroup [minBound..]
|
grouplist = unwords $ map fromStandardGroup [minBound..]
|
||||||
|
|
||||||
preferredcontent = settings cfg descs cfgPreferredContentMap
|
preferredcontent = settings cfg descs cfgPreferredContentMap
|
||||||
[ com "Repository preferred contents" ]
|
[ com "Repository preferred contents"
|
||||||
|
, com "(Set to \"standard\" to use a repository's group's preferred contents)"
|
||||||
|
]
|
||||||
(\(s, u) -> line "wanted" u s)
|
(\(s, u) -> line "wanted" u s)
|
||||||
(\u -> line "wanted" u "standard")
|
(\u -> line "wanted" u "")
|
||||||
|
|
||||||
requiredcontent = settings cfg descs cfgRequiredContentMap
|
requiredcontent = settings cfg descs cfgRequiredContentMap
|
||||||
[ com "Repository required contents" ]
|
[ com "Repository required contents" ]
|
||||||
|
@ -153,7 +155,7 @@ genCfg cfg descs = unlines $ intercalate [""]
|
||||||
, com "(Used by repositories with \"groupwanted\" in their preferred contents)"
|
, com "(Used by repositories with \"groupwanted\" in their preferred contents)"
|
||||||
]
|
]
|
||||||
(\(s, g) -> gline g s)
|
(\(s, g) -> gline g s)
|
||||||
(\g -> gline g "standard")
|
(\g -> gline g "")
|
||||||
where
|
where
|
||||||
gline g value = [ unwords ["groupwanted", g, "=", value] ]
|
gline g value = [ unwords ["groupwanted", g, "=", value] ]
|
||||||
allgroups = S.unions $ stdgroups : M.elems (cfgGroupMap cfg)
|
allgroups = S.unions $ stdgroups : M.elems (cfgGroupMap cfg)
|
||||||
|
|
|
@ -14,6 +14,7 @@ import Git
|
||||||
import Git.Sha
|
import Git.Sha
|
||||||
import Git.Command
|
import Git.Command
|
||||||
import qualified Git.Ref
|
import qualified Git.Ref
|
||||||
|
import qualified Git.BuildVersion
|
||||||
|
|
||||||
{- The currently checked out branch.
|
{- The currently checked out branch.
|
||||||
-
|
-
|
||||||
|
@ -103,6 +104,31 @@ fastForward branch (first:rest) repo =
|
||||||
(False, True) -> findbest c rs -- worse
|
(False, True) -> findbest c rs -- worse
|
||||||
(False, False) -> findbest c rs -- same
|
(False, False) -> findbest c rs -- same
|
||||||
|
|
||||||
|
{- The user may have set commit.gpgsign, indending all their manual
|
||||||
|
- commits to be signed. But signing automatic/background commits could
|
||||||
|
- easily lead to unwanted gpg prompts or failures.
|
||||||
|
-}
|
||||||
|
data CommitMode = ManualCommit | AutomaticCommit
|
||||||
|
deriving (Eq)
|
||||||
|
|
||||||
|
applyCommitMode :: CommitMode -> [CommandParam] -> [CommandParam]
|
||||||
|
applyCommitMode commitmode ps
|
||||||
|
| commitmode == AutomaticCommit && not (Git.BuildVersion.older "2.0.0") =
|
||||||
|
Param "--no-gpg-sign" : ps
|
||||||
|
| otherwise = ps
|
||||||
|
|
||||||
|
{- Commit via the usual git command. -}
|
||||||
|
commitCommand :: CommitMode -> [CommandParam] -> Repo -> IO Bool
|
||||||
|
commitCommand = commitCommand' runBool
|
||||||
|
|
||||||
|
{- Commit will fail when the tree is clean. This suppresses that error. -}
|
||||||
|
commitQuiet :: CommitMode -> [CommandParam] -> Repo -> IO ()
|
||||||
|
commitQuiet commitmode ps = void . tryIO . commitCommand' runQuiet commitmode ps
|
||||||
|
|
||||||
|
commitCommand' :: ([CommandParam] -> Repo -> IO a) -> CommitMode -> [CommandParam] -> Repo -> IO a
|
||||||
|
commitCommand' runner commitmode ps = runner $
|
||||||
|
Param "commit" : applyCommitMode commitmode ps
|
||||||
|
|
||||||
{- Commits the index into the specified branch (or other ref),
|
{- Commits the index into the specified branch (or other ref),
|
||||||
- with the specified parent refs, and returns the committed sha.
|
- with the specified parent refs, and returns the committed sha.
|
||||||
-
|
-
|
||||||
|
@ -112,30 +138,31 @@ fastForward branch (first:rest) repo =
|
||||||
- Unlike git-commit, does not run any hooks, or examine the work tree
|
- Unlike git-commit, does not run any hooks, or examine the work tree
|
||||||
- in any way.
|
- in any way.
|
||||||
-}
|
-}
|
||||||
commit :: Bool -> String -> Branch -> [Ref] -> Repo -> IO (Maybe Sha)
|
commit :: CommitMode -> Bool -> String -> Branch -> [Ref] -> Repo -> IO (Maybe Sha)
|
||||||
commit allowempty message branch parentrefs repo = do
|
commit commitmode allowempty message branch parentrefs repo = do
|
||||||
tree <- getSha "write-tree" $
|
tree <- getSha "write-tree" $
|
||||||
pipeReadStrict [Param "write-tree"] repo
|
pipeReadStrict [Param "write-tree"] repo
|
||||||
ifM (cancommit tree)
|
ifM (cancommit tree)
|
||||||
( do
|
( do
|
||||||
sha <- getSha "commit-tree" $ pipeWriteRead
|
sha <- getSha "commit-tree" $
|
||||||
(map Param $ ["commit-tree", fromRef tree] ++ ps)
|
pipeWriteRead ([Param "commit-tree", Param (fromRef tree)] ++ ps) sendmsg repo
|
||||||
(Just $ flip hPutStr message) repo
|
|
||||||
update branch sha repo
|
update branch sha repo
|
||||||
return $ Just sha
|
return $ Just sha
|
||||||
, return Nothing
|
, return Nothing
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
ps = concatMap (\r -> ["-p", fromRef r]) parentrefs
|
ps = applyCommitMode commitmode $
|
||||||
|
map Param $ concatMap (\r -> ["-p", fromRef r]) parentrefs
|
||||||
cancommit tree
|
cancommit tree
|
||||||
| allowempty = return True
|
| allowempty = return True
|
||||||
| otherwise = case parentrefs of
|
| otherwise = case parentrefs of
|
||||||
[p] -> maybe False (tree /=) <$> Git.Ref.tree p repo
|
[p] -> maybe False (tree /=) <$> Git.Ref.tree p repo
|
||||||
_ -> return True
|
_ -> return True
|
||||||
|
sendmsg = Just $ flip hPutStr message
|
||||||
|
|
||||||
commitAlways :: String -> Branch -> [Ref] -> Repo -> IO Sha
|
commitAlways :: CommitMode -> String -> Branch -> [Ref] -> Repo -> IO Sha
|
||||||
commitAlways message branch parentrefs repo = fromJust
|
commitAlways commitmode message branch parentrefs repo = fromJust
|
||||||
<$> commit True message branch parentrefs repo
|
<$> commit commitmode True message branch parentrefs repo
|
||||||
|
|
||||||
{- A leading + makes git-push force pushing a branch. -}
|
{- A leading + makes git-push force pushing a branch. -}
|
||||||
forcePush :: String -> String
|
forcePush :: String -> String
|
||||||
|
|
|
@ -13,7 +13,6 @@ import Common
|
||||||
import Git
|
import Git
|
||||||
import Git.Types
|
import Git.Types
|
||||||
import qualified Utility.CoProcess as CoProcess
|
import qualified Utility.CoProcess as CoProcess
|
||||||
import Utility.Batch
|
|
||||||
|
|
||||||
{- Constructs a git command line operating on the specified repo. -}
|
{- Constructs a git command line operating on the specified repo. -}
|
||||||
gitCommandLine :: [CommandParam] -> Repo -> [CommandParam]
|
gitCommandLine :: [CommandParam] -> Repo -> [CommandParam]
|
||||||
|
@ -31,12 +30,6 @@ runBool :: [CommandParam] -> Repo -> IO Bool
|
||||||
runBool params repo = assertLocal repo $
|
runBool params repo = assertLocal repo $
|
||||||
boolSystemEnv "git" (gitCommandLine params repo) (gitEnv repo)
|
boolSystemEnv "git" (gitCommandLine params repo) (gitEnv repo)
|
||||||
|
|
||||||
{- Runs git in batch mode. -}
|
|
||||||
runBatch :: BatchCommandMaker -> [CommandParam] -> Repo -> IO Bool
|
|
||||||
runBatch batchmaker params repo = assertLocal repo $ do
|
|
||||||
let (cmd, params') = batchmaker ("git", gitCommandLine params repo)
|
|
||||||
boolSystemEnv cmd params' (gitEnv repo)
|
|
||||||
|
|
||||||
{- Runs git in the specified repo, throwing an error if it fails. -}
|
{- Runs git in the specified repo, throwing an error if it fails. -}
|
||||||
run :: [CommandParam] -> Repo -> IO ()
|
run :: [CommandParam] -> Repo -> IO ()
|
||||||
run params repo = assertLocal repo $
|
run params repo = assertLocal repo $
|
||||||
|
|
19
Git/Command/Batch.hs
Normal file
19
Git/Command/Batch.hs
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
{- running batch git commands
|
||||||
|
-
|
||||||
|
- Copyright 2010-2013 Joey Hess <joey@kitenet.net>
|
||||||
|
-
|
||||||
|
- Licensed under the GNU GPL version 3 or higher.
|
||||||
|
-}
|
||||||
|
|
||||||
|
module Git.Command.Batch where
|
||||||
|
|
||||||
|
import Common
|
||||||
|
import Git
|
||||||
|
import Git.Command
|
||||||
|
import Utility.Batch
|
||||||
|
|
||||||
|
{- Runs git in batch mode. -}
|
||||||
|
run :: BatchCommandMaker -> [CommandParam] -> Repo -> IO Bool
|
||||||
|
run batchmaker params repo = assertLocal repo $ do
|
||||||
|
let (cmd, params') = batchmaker ("git", gitCommandLine params repo)
|
||||||
|
boolSystemEnv cmd params' (gitEnv repo)
|
|
@ -99,6 +99,9 @@ getParticiantList globalconfigrepo repo remotename = KeyIds $ parse $ firstJust
|
||||||
remoteParticipantConfigKey :: RemoteName -> String
|
remoteParticipantConfigKey :: RemoteName -> String
|
||||||
remoteParticipantConfigKey = remoteConfigKey "gcrypt-participants"
|
remoteParticipantConfigKey = remoteConfigKey "gcrypt-participants"
|
||||||
|
|
||||||
|
remotePublishParticipantConfigKey :: RemoteName -> String
|
||||||
|
remotePublishParticipantConfigKey = remoteConfigKey "gcrypt-publish-participants"
|
||||||
|
|
||||||
remoteSigningKey :: RemoteName -> String
|
remoteSigningKey :: RemoteName -> String
|
||||||
remoteSigningKey = remoteConfigKey "gcrypt-signingkey"
|
remoteSigningKey = remoteConfigKey "gcrypt-signingkey"
|
||||||
|
|
||||||
|
|
13
Git/Merge.hs
13
Git/Merge.hs
|
@ -11,14 +11,19 @@ import Common
|
||||||
import Git
|
import Git
|
||||||
import Git.Command
|
import Git.Command
|
||||||
import Git.BuildVersion
|
import Git.BuildVersion
|
||||||
|
import Git.Branch (CommitMode(..))
|
||||||
|
|
||||||
{- Avoids recent git's interactive merge. -}
|
{- Avoids recent git's interactive merge. -}
|
||||||
mergeNonInteractive :: Ref -> Repo -> IO Bool
|
mergeNonInteractive :: Ref -> CommitMode -> Repo -> IO Bool
|
||||||
mergeNonInteractive branch
|
mergeNonInteractive branch commitmode
|
||||||
| older "1.7.7.6" = merge [Param $ fromRef branch]
|
| older "1.7.7.6" = merge [Param $ fromRef branch]
|
||||||
| otherwise = merge [Param "--no-edit", Param $ fromRef branch]
|
| otherwise = merge $ [Param "--no-edit", Param $ fromRef branch]
|
||||||
where
|
where
|
||||||
merge ps = runBool $ Param "merge" : ps
|
merge ps = runBool $ cp ++ [Param "merge"] ++ ps
|
||||||
|
cp
|
||||||
|
| commitmode == AutomaticCommit =
|
||||||
|
[Param "-c", Param "commit.gpgsign=false"]
|
||||||
|
| otherwise = []
|
||||||
|
|
||||||
{- Stage the merge into the index, but do not commit it.-}
|
{- Stage the merge into the index, but do not commit it.-}
|
||||||
stageMerge :: Ref -> Repo -> IO Bool
|
stageMerge :: Ref -> Repo -> IO Bool
|
||||||
|
|
|
@ -82,15 +82,15 @@ new lim = Queue 0 (fromMaybe defaultLimit lim) M.empty
|
||||||
-}
|
-}
|
||||||
addCommand :: String -> [CommandParam] -> [FilePath] -> Queue -> Repo -> IO Queue
|
addCommand :: String -> [CommandParam] -> [FilePath] -> Queue -> Repo -> IO Queue
|
||||||
addCommand subcommand params files q repo =
|
addCommand subcommand params files q repo =
|
||||||
updateQueue action different (length newfiles) q repo
|
updateQueue action different (length files) q repo
|
||||||
where
|
where
|
||||||
key = actionKey action
|
key = actionKey action
|
||||||
action = CommandAction
|
action = CommandAction
|
||||||
{ getSubcommand = subcommand
|
{ getSubcommand = subcommand
|
||||||
, getParams = params
|
, getParams = params
|
||||||
, getFiles = newfiles
|
, getFiles = allfiles
|
||||||
}
|
}
|
||||||
newfiles = map File files ++ maybe [] getFiles (M.lookup key $ items q)
|
allfiles = map File files ++ maybe [] getFiles (M.lookup key $ items q)
|
||||||
|
|
||||||
different (CommandAction { getSubcommand = s }) = s /= subcommand
|
different (CommandAction { getSubcommand = s }) = s /= subcommand
|
||||||
different _ = True
|
different _ = True
|
||||||
|
|
|
@ -42,6 +42,7 @@ module Locations (
|
||||||
gitAnnexJournalDir,
|
gitAnnexJournalDir,
|
||||||
gitAnnexJournalLock,
|
gitAnnexJournalLock,
|
||||||
gitAnnexPreCommitLock,
|
gitAnnexPreCommitLock,
|
||||||
|
gitAnnexMergeLock,
|
||||||
gitAnnexIndex,
|
gitAnnexIndex,
|
||||||
gitAnnexIndexStatus,
|
gitAnnexIndexStatus,
|
||||||
gitAnnexViewIndex,
|
gitAnnexViewIndex,
|
||||||
|
@ -262,6 +263,10 @@ gitAnnexJournalLock r = gitAnnexDir r </> "journal.lck"
|
||||||
gitAnnexPreCommitLock :: Git.Repo -> FilePath
|
gitAnnexPreCommitLock :: Git.Repo -> FilePath
|
||||||
gitAnnexPreCommitLock r = gitAnnexDir r </> "precommit.lck"
|
gitAnnexPreCommitLock r = gitAnnexDir r </> "precommit.lck"
|
||||||
|
|
||||||
|
{- Lock file for direct mode merge. -}
|
||||||
|
gitAnnexMergeLock :: Git.Repo -> FilePath
|
||||||
|
gitAnnexMergeLock r = gitAnnexDir r </> "merge.lck"
|
||||||
|
|
||||||
{- .git/annex/index is used to stage changes to the git-annex branch -}
|
{- .git/annex/index is used to stage changes to the git-annex branch -}
|
||||||
gitAnnexIndex :: Git.Repo -> FilePath
|
gitAnnexIndex :: Git.Repo -> FilePath
|
||||||
gitAnnexIndex r = gitAnnexDir r </> "index"
|
gitAnnexIndex r = gitAnnexDir r </> "index"
|
||||||
|
|
|
@ -95,10 +95,12 @@ addMetaData k metadata = addMetaData' k metadata =<< liftIO getPOSIXTime
|
||||||
- will tend to be generated across the different log files, and so
|
- will tend to be generated across the different log files, and so
|
||||||
- git will be able to pack the data more efficiently. -}
|
- git will be able to pack the data more efficiently. -}
|
||||||
addMetaData' :: Key -> MetaData -> POSIXTime -> Annex ()
|
addMetaData' :: Key -> MetaData -> POSIXTime -> Annex ()
|
||||||
addMetaData' k (MetaData m) now = Annex.Branch.change (metaDataLogFile k) $
|
addMetaData' k d@(MetaData m) now
|
||||||
showLog . simplifyLog
|
| d == emptyMetaData = noop
|
||||||
. S.insert (LogEntry now metadata)
|
| otherwise = Annex.Branch.change (metaDataLogFile k) $
|
||||||
. parseLog
|
showLog . simplifyLog
|
||||||
|
. S.insert (LogEntry now metadata)
|
||||||
|
. parseLog
|
||||||
where
|
where
|
||||||
metadata = MetaData $ M.filterWithKey (\f _ -> not (isLastChangedField f)) m
|
metadata = MetaData $ M.filterWithKey (\f _ -> not (isLastChangedField f)) m
|
||||||
|
|
||||||
|
|
7
Makefile
7
Makefile
|
@ -59,7 +59,7 @@ retest: git-annex
|
||||||
|
|
||||||
# hothasktags chokes on some template haskell etc, so ignore errors
|
# hothasktags chokes on some template haskell etc, so ignore errors
|
||||||
tags:
|
tags:
|
||||||
find . | grep -v /.git/ | grep -v /tmp/ | grep -v /dist/ | grep -v /doc/ | egrep '\.hs$$' | xargs hothasktags > tags 2>/dev/null
|
(for f in $$(find . | grep -v /.git/ | grep -v /tmp/ | grep -v /dist/ | grep -v /doc/ | egrep '\.hs$$'); do hothasktags -c --cpp -c -traditional -c --include=dist/build/autogen/cabal_macros.h $$f; done) 2>/dev/null | sort > tags
|
||||||
|
|
||||||
# If ikiwiki is available, build static html docs suitable for being
|
# If ikiwiki is available, build static html docs suitable for being
|
||||||
# shipped in the software package.
|
# shipped in the software package.
|
||||||
|
@ -83,7 +83,8 @@ clean:
|
||||||
rm -rf tmp dist git-annex $(mans) configure *.tix .hpc \
|
rm -rf tmp dist git-annex $(mans) configure *.tix .hpc \
|
||||||
doc/.ikiwiki html dist tags Build/SysConfig.hs build-stamp \
|
doc/.ikiwiki html dist tags Build/SysConfig.hs build-stamp \
|
||||||
Setup Build/InstallDesktopFile Build/EvilSplicer \
|
Setup Build/InstallDesktopFile Build/EvilSplicer \
|
||||||
Build/Standalone Build/OSXMkLibs Build/LinuxMkLibs Build/DistributionUpdate \
|
Build/Standalone Build/OSXMkLibs Build/LinuxMkLibs \
|
||||||
|
Build/DistributionUpdate Build/BuildVersion \
|
||||||
git-union-merge .tasty-rerun-log
|
git-union-merge .tasty-rerun-log
|
||||||
find . -name \*.o -exec rm {} \;
|
find . -name \*.o -exec rm {} \;
|
||||||
find . -name \*.hi -exec rm {} \;
|
find . -name \*.hi -exec rm {} \;
|
||||||
|
@ -255,7 +256,7 @@ hdevtools:
|
||||||
distributionupdate:
|
distributionupdate:
|
||||||
git pull
|
git pull
|
||||||
cabal configure
|
cabal configure
|
||||||
ghc --make Build/DistributionUpdate -XPackageImports -optP-include -optPdist/build/autogen/cabal_macros.h
|
ghc -Wall --make Build/DistributionUpdate -XPackageImports -optP-include -optPdist/build/autogen/cabal_macros.h
|
||||||
./Build/DistributionUpdate
|
./Build/DistributionUpdate
|
||||||
|
|
||||||
.PHONY: git-annex git-union-merge git-recover-repository tags build-stamp
|
.PHONY: git-annex git-union-merge git-recover-repository tags build-stamp
|
||||||
|
|
|
@ -263,10 +263,14 @@ shellOrRsync r ashell arsync = case method of
|
||||||
- participants, which gcrypt requires is the case, and may not be
|
- participants, which gcrypt requires is the case, and may not be
|
||||||
- depending on system configuration.
|
- depending on system configuration.
|
||||||
-
|
-
|
||||||
- (For shared encryption, gcrypt's default behavior is used.) -}
|
- (For shared encryption, gcrypt's default behavior is used.)
|
||||||
|
-
|
||||||
|
- Also, sets gcrypt-publish-participants to avoid unncessary gpg
|
||||||
|
- passphrase prompts.
|
||||||
|
-}
|
||||||
setGcryptEncryption :: RemoteConfig -> String -> Annex ()
|
setGcryptEncryption :: RemoteConfig -> String -> Annex ()
|
||||||
setGcryptEncryption c remotename = do
|
setGcryptEncryption c remotename = do
|
||||||
let participants = ConfigKey $ Git.GCrypt.remoteParticipantConfigKey remotename
|
let participants = remoteconfig Git.GCrypt.remoteParticipantConfigKey
|
||||||
case extractCipher c of
|
case extractCipher c of
|
||||||
Nothing -> noCrypto
|
Nothing -> noCrypto
|
||||||
Just (EncryptedCipher _ _ (KeyIds { keyIds = ks})) -> do
|
Just (EncryptedCipher _ _ (KeyIds { keyIds = ks})) -> do
|
||||||
|
@ -278,6 +282,10 @@ setGcryptEncryption c remotename = do
|
||||||
(k:_) -> setConfig signingkey k
|
(k:_) -> setConfig signingkey k
|
||||||
Just (SharedCipher _) ->
|
Just (SharedCipher _) ->
|
||||||
unsetConfig participants
|
unsetConfig participants
|
||||||
|
setConfig (remoteconfig Git.GCrypt.remotePublishParticipantConfigKey)
|
||||||
|
(Git.Config.boolConfig True)
|
||||||
|
where
|
||||||
|
remoteconfig n = ConfigKey $ n remotename
|
||||||
|
|
||||||
store :: Remote -> Remote.Rsync.RsyncOpts -> (Cipher, Key) -> Key -> MeterUpdate -> Annex Bool
|
store :: Remote -> Remote.Rsync.RsyncOpts -> (Cipher, Key) -> Key -> MeterUpdate -> Annex Bool
|
||||||
store r rsyncopts (cipher, enck) k p
|
store r rsyncopts (cipher, enck) k p
|
||||||
|
|
|
@ -191,20 +191,11 @@ tryGitConfigRead r
|
||||||
| Git.repoIsHttp r = store geturlconfig
|
| Git.repoIsHttp r = store geturlconfig
|
||||||
| Git.GCrypt.isEncrypted r = handlegcrypt =<< getConfigMaybe (remoteConfig r "uuid")
|
| Git.GCrypt.isEncrypted r = handlegcrypt =<< getConfigMaybe (remoteConfig r "uuid")
|
||||||
| Git.repoIsUrl r = return r
|
| Git.repoIsUrl r = return r
|
||||||
| otherwise = store $ safely $ do
|
| otherwise = store $ liftIO $
|
||||||
s <- Annex.new r
|
readlocalannexconfig `catchNonAsync` (const $ return r)
|
||||||
Annex.eval s $ do
|
|
||||||
Annex.BranchState.disableUpdate
|
|
||||||
ensureInitialized
|
|
||||||
Annex.getState Annex.repo
|
|
||||||
where
|
where
|
||||||
haveconfig = not . M.null . Git.config
|
haveconfig = not . M.null . Git.config
|
||||||
|
|
||||||
-- Reading config can fail due to IO error or
|
|
||||||
-- for other reasons; catch all possible exceptions.
|
|
||||||
safely a = either (const $ return r) return
|
|
||||||
=<< liftIO (try a :: IO (Either SomeException Git.Repo))
|
|
||||||
|
|
||||||
pipedconfig cmd params = do
|
pipedconfig cmd params = do
|
||||||
v <- Git.Config.fromPipe r cmd params
|
v <- Git.Config.fromPipe r cmd params
|
||||||
case v of
|
case v of
|
||||||
|
@ -283,6 +274,16 @@ tryGitConfigRead r
|
||||||
Just v -> store $ liftIO $ setUUID r $
|
Just v -> store $ liftIO $ setUUID r $
|
||||||
genUUIDInNameSpace gCryptNameSpace v
|
genUUIDInNameSpace gCryptNameSpace v
|
||||||
|
|
||||||
|
{- The local repo may not yet be initialized, so try to initialize
|
||||||
|
- it if allowed. However, if that fails, still return the read
|
||||||
|
- git config. -}
|
||||||
|
readlocalannexconfig = do
|
||||||
|
s <- Annex.new r
|
||||||
|
Annex.eval s $ do
|
||||||
|
Annex.BranchState.disableUpdate
|
||||||
|
void $ tryAnnex $ ensureInitialized
|
||||||
|
Annex.getState Annex.repo
|
||||||
|
|
||||||
{- Checks if a given remote has the content for a key inAnnex.
|
{- Checks if a given remote has the content for a key inAnnex.
|
||||||
- If the remote cannot be accessed, or if it cannot determine
|
- If the remote cannot be accessed, or if it cannot determine
|
||||||
- whether it has the content, returns a Left error message.
|
- whether it has the content, returns a Left error message.
|
||||||
|
|
|
@ -15,7 +15,7 @@ import Common.Annex
|
||||||
import Types.Remote
|
import Types.Remote
|
||||||
import Types.CleanupActions
|
import Types.CleanupActions
|
||||||
import qualified Annex
|
import qualified Annex
|
||||||
import Annex.LockPool
|
import Annex.LockFile
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
import Annex.Perms
|
import Annex.Perms
|
||||||
#else
|
#else
|
||||||
|
@ -48,7 +48,7 @@ runHooks :: Remote -> Maybe String -> Maybe String -> Annex a -> Annex a
|
||||||
runHooks r starthook stophook a = do
|
runHooks r starthook stophook a = do
|
||||||
dir <- fromRepo gitAnnexRemotesDir
|
dir <- fromRepo gitAnnexRemotesDir
|
||||||
let lck = dir </> remoteid ++ ".lck"
|
let lck = dir </> remoteid ++ ".lck"
|
||||||
whenM (notElem lck . M.keys <$> getPool) $ do
|
whenM (notElem lck . M.keys <$> getLockPool) $ do
|
||||||
liftIO $ createDirectoryIfMissing True dir
|
liftIO $ createDirectoryIfMissing True dir
|
||||||
firstrun lck
|
firstrun lck
|
||||||
a
|
a
|
||||||
|
@ -63,7 +63,7 @@ runHooks r starthook stophook a = do
|
||||||
-- of it from running the stophook. If another
|
-- of it from running the stophook. If another
|
||||||
-- instance is shutting down right now, this
|
-- instance is shutting down right now, this
|
||||||
-- will block waiting for its exclusive lock to clear.
|
-- will block waiting for its exclusive lock to clear.
|
||||||
lockFile lck
|
lockFileShared lck
|
||||||
|
|
||||||
-- The starthook is run even if some other git-annex
|
-- The starthook is run even if some other git-annex
|
||||||
-- is already running, and ran it before.
|
-- is already running, and ran it before.
|
||||||
|
|
62
Remote/S3.hs
62
Remote/S3.hs
|
@ -255,20 +255,28 @@ iaMunge = (>>= munge)
|
||||||
| isSpace c = []
|
| isSpace c = []
|
||||||
| otherwise = "&" ++ show (ord c) ++ ";"
|
| otherwise = "&" ++ show (ord c) ++ ";"
|
||||||
|
|
||||||
|
{- Generate the bucket if it does not already exist, including creating the
|
||||||
|
- UUID file within the bucket.
|
||||||
|
-
|
||||||
|
- To check if the bucket exists, ask for its location. However, some ACLs
|
||||||
|
- can allow read/write to buckets, but not querying location, so first
|
||||||
|
- check if the UUID file already exists and we can skip doing anything.
|
||||||
|
-}
|
||||||
genBucket :: RemoteConfig -> UUID -> Annex ()
|
genBucket :: RemoteConfig -> UUID -> Annex ()
|
||||||
genBucket c u = do
|
genBucket c u = do
|
||||||
conn <- s3ConnectionRequired c u
|
conn <- s3ConnectionRequired c u
|
||||||
showAction "checking bucket"
|
showAction "checking bucket"
|
||||||
loc <- liftIO $ getBucketLocation conn bucket
|
unlessM ((== Right True) <$> checkUUIDFile c u conn) $ do
|
||||||
case loc of
|
loc <- liftIO $ getBucketLocation conn bucket
|
||||||
Right _ -> writeUUIDFile c u
|
case loc of
|
||||||
Left err@(NetworkError _) -> s3Error err
|
Right _ -> writeUUIDFile c u
|
||||||
Left (AWSError _ _) -> do
|
Left err@(NetworkError _) -> s3Error err
|
||||||
showAction $ "creating bucket in " ++ datacenter
|
Left (AWSError _ _) -> do
|
||||||
res <- liftIO $ createBucketIn conn bucket datacenter
|
showAction $ "creating bucket in " ++ datacenter
|
||||||
case res of
|
res <- liftIO $ createBucketIn conn bucket datacenter
|
||||||
Right _ -> writeUUIDFile c u
|
case res of
|
||||||
Left err -> s3Error err
|
Right _ -> writeUUIDFile c u
|
||||||
|
Left err -> s3Error err
|
||||||
where
|
where
|
||||||
bucket = fromJust $ getBucket c
|
bucket = fromJust $ getBucket c
|
||||||
datacenter = fromJust $ M.lookup "datacenter" c
|
datacenter = fromJust $ M.lookup "datacenter" c
|
||||||
|
@ -284,20 +292,38 @@ genBucket c u = do
|
||||||
writeUUIDFile :: RemoteConfig -> UUID -> Annex ()
|
writeUUIDFile :: RemoteConfig -> UUID -> Annex ()
|
||||||
writeUUIDFile c u = do
|
writeUUIDFile c u = do
|
||||||
conn <- s3ConnectionRequired c u
|
conn <- s3ConnectionRequired c u
|
||||||
go conn =<< liftIO (tryNonAsync $ getObject conn $ mkobject L.empty)
|
v <- checkUUIDFile c u conn
|
||||||
|
case v of
|
||||||
|
Left e -> error e
|
||||||
|
Right True -> return ()
|
||||||
|
Right False -> do
|
||||||
|
let object = setStorageClass (getStorageClass c) (mkobject uuidb)
|
||||||
|
either s3Error return =<< liftIO (sendObject conn object)
|
||||||
where
|
where
|
||||||
go _conn (Right (Right o)) = unless (obj_data o == uuidb) $
|
file = uuidFile c
|
||||||
error $ "This bucket is already in use by a different S3 special remote, with UUID: " ++ show (obj_data o)
|
|
||||||
go conn _ = do
|
|
||||||
let object = setStorageClass (getStorageClass c) (mkobject uuidb)
|
|
||||||
either s3Error return =<< liftIO (sendObject conn object)
|
|
||||||
|
|
||||||
file = filePrefix c ++ "annex-uuid"
|
|
||||||
uuidb = L.fromChunks [T.encodeUtf8 $ T.pack $ fromUUID u]
|
uuidb = L.fromChunks [T.encodeUtf8 $ T.pack $ fromUUID u]
|
||||||
bucket = fromJust $ getBucket c
|
bucket = fromJust $ getBucket c
|
||||||
|
|
||||||
mkobject = S3Object bucket file "" (getXheaders c)
|
mkobject = S3Object bucket file "" (getXheaders c)
|
||||||
|
|
||||||
|
{- Checks if the UUID file exists in the bucket and has the specified UUID already. -}
|
||||||
|
checkUUIDFile :: RemoteConfig -> UUID -> AWSConnection -> Annex (Either String Bool)
|
||||||
|
checkUUIDFile c u conn = check <$> liftIO (tryNonAsync $ getObject conn $ mkobject L.empty)
|
||||||
|
where
|
||||||
|
check (Right (Right o))
|
||||||
|
| obj_data o == uuidb = Right True
|
||||||
|
| otherwise = Left $ "This bucket is already in use by a different S3 special remote, with UUID: " ++ show (obj_data o)
|
||||||
|
check _ = Right False
|
||||||
|
|
||||||
|
uuidb = L.fromChunks [T.encodeUtf8 $ T.pack $ fromUUID u]
|
||||||
|
bucket = fromJust $ getBucket c
|
||||||
|
file = uuidFile c
|
||||||
|
|
||||||
|
mkobject = S3Object bucket file "" (getXheaders c)
|
||||||
|
|
||||||
|
uuidFile :: RemoteConfig -> FilePath
|
||||||
|
uuidFile c = filePrefix c ++ "annex-uuid"
|
||||||
|
|
||||||
s3ConnectionRequired :: RemoteConfig -> UUID -> Annex AWSConnection
|
s3ConnectionRequired :: RemoteConfig -> UUID -> Annex AWSConnection
|
||||||
s3ConnectionRequired c u =
|
s3ConnectionRequired c u =
|
||||||
maybe (error "Cannot connect to S3") return =<< s3Connection c u
|
maybe (error "Cannot connect to S3") return =<< s3Connection c u
|
||||||
|
|
100
Test.hs
100
Test.hs
|
@ -22,9 +22,7 @@ import qualified Options.Applicative.Types as Opt
|
||||||
#endif
|
#endif
|
||||||
import Control.Exception.Extensible
|
import Control.Exception.Extensible
|
||||||
import qualified Data.Map as M
|
import qualified Data.Map as M
|
||||||
import System.IO.HVFS (SystemFS(..))
|
|
||||||
import qualified Text.JSON
|
import qualified Text.JSON
|
||||||
import System.Path
|
|
||||||
|
|
||||||
import Common
|
import Common
|
||||||
|
|
||||||
|
@ -78,6 +76,7 @@ import qualified Utility.Hash
|
||||||
import qualified Utility.Scheduled
|
import qualified Utility.Scheduled
|
||||||
import qualified Utility.HumanTime
|
import qualified Utility.HumanTime
|
||||||
import qualified Utility.ThreadScheduler
|
import qualified Utility.ThreadScheduler
|
||||||
|
import qualified Command.Uninit
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
import qualified CmdLine.GitAnnex as GitAnnex
|
import qualified CmdLine.GitAnnex as GitAnnex
|
||||||
import qualified Remote.Helper.Encryptable
|
import qualified Remote.Helper.Encryptable
|
||||||
|
@ -218,10 +217,11 @@ unitTests note gettestenv = testGroup ("Unit Tests " ++ note)
|
||||||
, check "conflict resolution" test_conflict_resolution
|
, check "conflict resolution" test_conflict_resolution
|
||||||
, check "conflict resolution movein regression" test_conflict_resolution_movein_regression
|
, check "conflict resolution movein regression" test_conflict_resolution_movein_regression
|
||||||
, check "conflict resolution (mixed directory and file)" test_mixed_conflict_resolution
|
, check "conflict resolution (mixed directory and file)" test_mixed_conflict_resolution
|
||||||
, check "conflict resolution symlinks" test_conflict_resolution_symlinks
|
, check "conflict resolution symlink bit" test_conflict_resolution_symlink_bit
|
||||||
, check "conflict resolution (uncommitted local file)" test_uncommitted_conflict_resolution
|
, check "conflict resolution (uncommitted local file)" test_uncommitted_conflict_resolution
|
||||||
, check "conflict resolution (removed file)" test_remove_conflict_resolution
|
, check "conflict resolution (removed file)" test_remove_conflict_resolution
|
||||||
, check "conflict resolution (nonannexed)" test_nonannexed_conflict_resolution
|
, check "conflict resolution (nonannexed file)" test_nonannexed_file_conflict_resolution
|
||||||
|
, check "conflict resolution (nonannexed symlink)" test_nonannexed_symlink_conflict_resolution
|
||||||
, check "map" test_map
|
, check "map" test_map
|
||||||
, check "uninit" test_uninit
|
, check "uninit" test_uninit
|
||||||
, check "uninit (in git-annex branch)" test_uninit_inbranch
|
, check "uninit (in git-annex branch)" test_uninit_inbranch
|
||||||
|
@ -857,6 +857,7 @@ test_conflict_resolution testenv =
|
||||||
let v = filter (variantprefix `isPrefixOf`) l
|
let v = filter (variantprefix `isPrefixOf`) l
|
||||||
length v == 2
|
length v == 2
|
||||||
@? (what ++ " not exactly 2 variant files in: " ++ show l)
|
@? (what ++ " not exactly 2 variant files in: " ++ show l)
|
||||||
|
conflictor `notElem` l @? ("conflictor still present after conflict resolution")
|
||||||
indir testenv d $ do
|
indir testenv d $ do
|
||||||
git_annex testenv "get" v @? "get failed"
|
git_annex testenv "get" v @? "get failed"
|
||||||
git_annex_expectoutput testenv "find" v v
|
git_annex_expectoutput testenv "find" v v
|
||||||
|
@ -946,14 +947,14 @@ test_remove_conflict_resolution testenv = do
|
||||||
length v == 1
|
length v == 1
|
||||||
@? (what ++ " too many variant files in: " ++ show v)
|
@? (what ++ " too many variant files in: " ++ show v)
|
||||||
|
|
||||||
{- Check merge confalict resolution when a file is annexed in one repo,
|
{- Check merge confalict resolution when a file is annexed in one repo,
|
||||||
- and checked directly into git in the other repo.
|
- and checked directly into git in the other repo.
|
||||||
-
|
-
|
||||||
- This test requires indirect mode to set it up, but tests both direct and
|
- This test requires indirect mode to set it up, but tests both direct and
|
||||||
- indirect mode.
|
- indirect mode.
|
||||||
-}
|
-}
|
||||||
test_nonannexed_conflict_resolution :: TestEnv -> Assertion
|
test_nonannexed_file_conflict_resolution :: TestEnv -> Assertion
|
||||||
test_nonannexed_conflict_resolution testenv = do
|
test_nonannexed_file_conflict_resolution testenv = do
|
||||||
check True False
|
check True False
|
||||||
check False False
|
check False False
|
||||||
check True True
|
check True True
|
||||||
|
@ -995,6 +996,57 @@ test_nonannexed_conflict_resolution testenv = do
|
||||||
s == Just nonannexed_content
|
s == Just nonannexed_content
|
||||||
@? (what ++ " wrong content for nonannexed file: " ++ show s)
|
@? (what ++ " wrong content for nonannexed file: " ++ show s)
|
||||||
|
|
||||||
|
|
||||||
|
{- Check merge confalict resolution when a file is annexed in one repo,
|
||||||
|
- and is a non-git-annex symlink in the other repo.
|
||||||
|
-
|
||||||
|
- Test can only run when coreSymlinks is supported, because git needs to
|
||||||
|
- be able to check out the non-git-annex symlink.
|
||||||
|
-}
|
||||||
|
test_nonannexed_symlink_conflict_resolution :: TestEnv -> Assertion
|
||||||
|
test_nonannexed_symlink_conflict_resolution testenv = do
|
||||||
|
check True False
|
||||||
|
check False False
|
||||||
|
check True True
|
||||||
|
check False True
|
||||||
|
where
|
||||||
|
check inr1 switchdirect = withtmpclonerepo testenv False $ \r1 ->
|
||||||
|
withtmpclonerepo testenv False $ \r2 -> do
|
||||||
|
whenM (checkRepo (Types.coreSymlinks <$> Annex.getGitConfig) r1
|
||||||
|
<&&> isInDirect r1 <&&> isInDirect r2) $ do
|
||||||
|
indir testenv r1 $ do
|
||||||
|
disconnectOrigin
|
||||||
|
writeFile conflictor "conflictor"
|
||||||
|
git_annex testenv "add" [conflictor] @? "add conflicter failed"
|
||||||
|
git_annex testenv "sync" [] @? "sync failed in r1"
|
||||||
|
indir testenv r2 $ do
|
||||||
|
disconnectOrigin
|
||||||
|
createSymbolicLink symlinktarget "conflictor"
|
||||||
|
boolSystem "git" [Params "add", File conflictor] @? "git add conflictor failed"
|
||||||
|
git_annex testenv "sync" [] @? "sync failed in r2"
|
||||||
|
pair testenv r1 r2
|
||||||
|
let l = if inr1 then [r1, r2] else [r2, r1]
|
||||||
|
forM_ l $ \r -> indir testenv r $ do
|
||||||
|
when switchdirect $
|
||||||
|
git_annex testenv "direct" [] @? "failed switching to direct mode"
|
||||||
|
git_annex testenv "sync" [] @? "sync failed"
|
||||||
|
checkmerge ("r1" ++ show switchdirect) r1
|
||||||
|
checkmerge ("r2" ++ show switchdirect) r2
|
||||||
|
conflictor = "conflictor"
|
||||||
|
symlinktarget = "dummy-target"
|
||||||
|
variantprefix = conflictor ++ ".variant"
|
||||||
|
checkmerge what d = do
|
||||||
|
l <- getDirectoryContents d
|
||||||
|
let v = filter (variantprefix `isPrefixOf`) l
|
||||||
|
not (null v)
|
||||||
|
@? (what ++ " conflictor variant file missing in: " ++ show l )
|
||||||
|
length v == 1
|
||||||
|
@? (what ++ " too many variant files in: " ++ show v)
|
||||||
|
conflictor `elem` l @? (what ++ " conflictor file missing in: " ++ show l)
|
||||||
|
s <- catchMaybeIO (readSymbolicLink (d </> conflictor))
|
||||||
|
s == Just symlinktarget
|
||||||
|
@? (what ++ " wrong target for nonannexed symlink: " ++ show s)
|
||||||
|
|
||||||
{- Check merge conflict resolution when there is a local file,
|
{- Check merge conflict resolution when there is a local file,
|
||||||
- that is not staged or committed, that conflicts with what's being added
|
- that is not staged or committed, that conflicts with what's being added
|
||||||
- from the remmote.
|
- from the remmote.
|
||||||
|
@ -1045,8 +1097,8 @@ test_uncommitted_conflict_resolution testenv = do
|
||||||
{- On Windows/FAT, repeated conflict resolution sometimes
|
{- On Windows/FAT, repeated conflict resolution sometimes
|
||||||
- lost track of whether a file was a symlink.
|
- lost track of whether a file was a symlink.
|
||||||
-}
|
-}
|
||||||
test_conflict_resolution_symlinks :: TestEnv -> Assertion
|
test_conflict_resolution_symlink_bit :: TestEnv -> Assertion
|
||||||
test_conflict_resolution_symlinks testenv = do
|
test_conflict_resolution_symlink_bit testenv = do
|
||||||
withtmpclonerepo testenv False $ \r1 ->
|
withtmpclonerepo testenv False $ \r1 ->
|
||||||
withtmpclonerepo testenv False $ \r2 -> do
|
withtmpclonerepo testenv False $ \r2 -> do
|
||||||
withtmpclonerepo testenv False $ \r3 -> do
|
withtmpclonerepo testenv False $ \r3 -> do
|
||||||
|
@ -1360,10 +1412,13 @@ intmpclonerepoInDirect testenv a = intmpclonerepo testenv $
|
||||||
Annex.Init.initialize Nothing
|
Annex.Init.initialize Nothing
|
||||||
Config.isDirect
|
Config.isDirect
|
||||||
|
|
||||||
isInDirect :: FilePath -> IO Bool
|
checkRepo :: Types.Annex a -> FilePath -> IO a
|
||||||
isInDirect d = do
|
checkRepo getval d = do
|
||||||
s <- Annex.new =<< Git.Construct.fromPath d
|
s <- Annex.new =<< Git.Construct.fromPath d
|
||||||
not <$> Annex.eval s Config.isDirect
|
Annex.eval s getval
|
||||||
|
|
||||||
|
isInDirect :: FilePath -> IO Bool
|
||||||
|
isInDirect = checkRepo (not <$> Config.isDirect)
|
||||||
|
|
||||||
intmpbareclonerepo :: TestEnv -> Assertion -> Assertion
|
intmpbareclonerepo :: TestEnv -> Assertion -> Assertion
|
||||||
intmpbareclonerepo testenv a = withtmpclonerepo testenv True $ \r -> indir testenv r a
|
intmpbareclonerepo testenv a = withtmpclonerepo testenv True $ \r -> indir testenv r a
|
||||||
|
@ -1406,9 +1461,9 @@ clonerepo testenv old new bare = do
|
||||||
ensuretmpdir
|
ensuretmpdir
|
||||||
let b = if bare then " --bare" else ""
|
let b = if bare then " --bare" else ""
|
||||||
boolSystem "git" [Params ("clone -q" ++ b), File old, File new] @? "git clone failed"
|
boolSystem "git" [Params ("clone -q" ++ b), File old, File new] @? "git clone failed"
|
||||||
|
configrepo testenv new
|
||||||
indir testenv new $
|
indir testenv new $
|
||||||
git_annex testenv "init" ["-q", new] @? "git annex init failed"
|
git_annex testenv "init" ["-q", new] @? "git annex init failed"
|
||||||
configrepo testenv new
|
|
||||||
unless bare $
|
unless bare $
|
||||||
indir testenv new $
|
indir testenv new $
|
||||||
handleforcedirect testenv
|
handleforcedirect testenv
|
||||||
|
@ -1416,8 +1471,11 @@ clonerepo testenv old new bare = do
|
||||||
|
|
||||||
configrepo :: TestEnv -> FilePath -> IO ()
|
configrepo :: TestEnv -> FilePath -> IO ()
|
||||||
configrepo testenv dir = indir testenv dir $ do
|
configrepo testenv dir = indir testenv dir $ do
|
||||||
|
-- ensure git is set up to let commits happen
|
||||||
boolSystem "git" [Params "config user.name", Param "Test User"] @? "git config failed"
|
boolSystem "git" [Params "config user.name", Param "Test User"] @? "git config failed"
|
||||||
boolSystem "git" [Params "config user.email test@example.com"] @? "git config failed"
|
boolSystem "git" [Params "config user.email test@example.com"] @? "git config failed"
|
||||||
|
-- avoid signed commits by test suite
|
||||||
|
boolSystem "git" [Params "config commit.gpgsign false"] @? "git config failed"
|
||||||
|
|
||||||
handleforcedirect :: TestEnv -> IO ()
|
handleforcedirect :: TestEnv -> IO ()
|
||||||
handleforcedirect testenv = when (M.lookup "FORCEDIRECT" testenv == Just "1") $
|
handleforcedirect testenv = when (M.lookup "FORCEDIRECT" testenv == Just "1") $
|
||||||
|
@ -1434,11 +1492,7 @@ cleanup = cleanup' False
|
||||||
|
|
||||||
cleanup' :: Bool -> FilePath -> IO ()
|
cleanup' :: Bool -> FilePath -> IO ()
|
||||||
cleanup' final dir = whenM (doesDirectoryExist dir) $ do
|
cleanup' final dir = whenM (doesDirectoryExist dir) $ do
|
||||||
-- Allow all files and directories to be written to, so
|
Command.Uninit.prepareRemoveAnnexDir dir
|
||||||
-- they can be deleted. Both git and git-annex use file
|
|
||||||
-- permissions to prevent deletion.
|
|
||||||
recurseDir SystemFS dir >>=
|
|
||||||
mapM_ (void . tryIO . Utility.FileMode.allowWrite)
|
|
||||||
-- This sometimes fails on Windows, due to some files
|
-- This sometimes fails on Windows, due to some files
|
||||||
-- being still opened by a subprocess.
|
-- being still opened by a subprocess.
|
||||||
catchIO (removeDirectoryRecursive dir) $ \e ->
|
catchIO (removeDirectoryRecursive dir) $ \e ->
|
||||||
|
|
|
@ -17,6 +17,7 @@ data BackendA a = Backend
|
||||||
, getKey :: KeySource -> a (Maybe Key)
|
, getKey :: KeySource -> a (Maybe Key)
|
||||||
, fsckKey :: Maybe (Key -> FilePath -> a Bool)
|
, fsckKey :: Maybe (Key -> FilePath -> a Bool)
|
||||||
, canUpgradeKey :: Maybe (Key -> Bool)
|
, canUpgradeKey :: Maybe (Key -> Bool)
|
||||||
|
, fastMigrate :: Maybe (Key -> BackendA a -> Maybe Key)
|
||||||
}
|
}
|
||||||
|
|
||||||
instance Show (BackendA a) where
|
instance Show (BackendA a) where
|
||||||
|
|
60
Utility/Bloom.hs
Normal file
60
Utility/Bloom.hs
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
{- bloomfilter compatability wrapper
|
||||||
|
-
|
||||||
|
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||||
|
-
|
||||||
|
- License: BSD-2-clause
|
||||||
|
-}
|
||||||
|
|
||||||
|
{-# LANGUAGE CPP #-}
|
||||||
|
|
||||||
|
module Utility.Bloom (
|
||||||
|
Bloom,
|
||||||
|
suggestSizing,
|
||||||
|
Hashable,
|
||||||
|
cheapHashes,
|
||||||
|
notElemB,
|
||||||
|
|
||||||
|
newMB,
|
||||||
|
insertMB,
|
||||||
|
unsafeFreezeMB,
|
||||||
|
) where
|
||||||
|
|
||||||
|
#if MIN_VERSION_bloomfilter(2,0,0)
|
||||||
|
import qualified Data.BloomFilter.Mutable as MBloom
|
||||||
|
import qualified Data.BloomFilter as Bloom
|
||||||
|
#else
|
||||||
|
import qualified Data.BloomFilter as Bloom
|
||||||
|
#endif
|
||||||
|
import Data.BloomFilter.Easy (suggestSizing, Bloom)
|
||||||
|
import Data.BloomFilter.Hash (Hashable, cheapHashes)
|
||||||
|
import Control.Monad.ST.Safe (ST)
|
||||||
|
|
||||||
|
#if MIN_VERSION_bloomfilter(2,0,0)
|
||||||
|
|
||||||
|
notElemB :: a -> Bloom a -> Bool
|
||||||
|
notElemB = Bloom.notElem
|
||||||
|
|
||||||
|
newMB :: (a -> [Bloom.Hash]) -> Int -> ST s (MBloom.MBloom s a)
|
||||||
|
newMB = MBloom.new
|
||||||
|
|
||||||
|
insertMB :: MBloom.MBloom s a -> a -> ST s ()
|
||||||
|
insertMB = MBloom.insert
|
||||||
|
|
||||||
|
unsafeFreezeMB :: MBloom.MBloom s a -> ST s (Bloom a)
|
||||||
|
unsafeFreezeMB = Bloom.unsafeFreeze
|
||||||
|
|
||||||
|
#else
|
||||||
|
|
||||||
|
notElemB :: a -> Bloom a -> Bool
|
||||||
|
notElemB = Bloom.notElemB
|
||||||
|
|
||||||
|
newMB :: (a -> [Bloom.Hash]) -> Int -> ST s (Bloom.MBloom s a)
|
||||||
|
newMB = Bloom.newMB
|
||||||
|
|
||||||
|
insertMB :: Bloom.MBloom s a -> a -> ST s ()
|
||||||
|
insertMB = Bloom.insertMB
|
||||||
|
|
||||||
|
unsafeFreezeMB :: Bloom.MBloom s a -> ST s (Bloom a)
|
||||||
|
unsafeFreezeMB = Bloom.unsafeFreezeMB
|
||||||
|
|
||||||
|
#endif
|
|
@ -21,6 +21,8 @@ import Utility.WinLock
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
import System.Posix
|
import System.Posix
|
||||||
import Control.Concurrent.Async
|
import Control.Concurrent.Async
|
||||||
|
#else
|
||||||
|
import System.Exit
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
|
@ -54,15 +56,26 @@ daemonize logfd pidfile changedirectory a = do
|
||||||
wait =<< asyncWithUnmask (\unmask -> unmask a)
|
wait =<< asyncWithUnmask (\unmask -> unmask a)
|
||||||
out
|
out
|
||||||
out = exitImmediately ExitSuccess
|
out = exitImmediately ExitSuccess
|
||||||
|
#endif
|
||||||
|
|
||||||
{- To run an action that is normally daemonized in the forground. -}
|
{- To run an action that is normally daemonized in the forground. -}
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
foreground :: Fd -> Maybe FilePath -> IO () -> IO ()
|
foreground :: Fd -> Maybe FilePath -> IO () -> IO ()
|
||||||
foreground logfd pidfile a = do
|
foreground logfd pidfile a = do
|
||||||
|
#else
|
||||||
|
foreground :: Maybe FilePath -> IO () -> IO ()
|
||||||
|
foreground pidfile a = do
|
||||||
|
#endif
|
||||||
maybe noop lockPidFile pidfile
|
maybe noop lockPidFile pidfile
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
_ <- tryIO createSession
|
_ <- tryIO createSession
|
||||||
redirLog logfd
|
redirLog logfd
|
||||||
|
#endif
|
||||||
a
|
a
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
exitImmediately ExitSuccess
|
exitImmediately ExitSuccess
|
||||||
|
#else
|
||||||
|
exitWith ExitSuccess
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
{- Locks the pid file, with an exclusive, non-blocking lock,
|
{- Locks the pid file, with an exclusive, non-blocking lock,
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
{- directory manipulation
|
{- directory traversal and manipulation
|
||||||
-
|
-
|
||||||
- Copyright 2011-2014 Joey Hess <joey@kitenet.net>
|
- Copyright 2011-2014 Joey Hess <joey@kitenet.net>
|
||||||
-
|
-
|
||||||
|
@ -11,12 +11,20 @@ module Utility.Directory where
|
||||||
|
|
||||||
import System.IO.Error
|
import System.IO.Error
|
||||||
import System.Directory
|
import System.Directory
|
||||||
import Control.Exception (throw)
|
import Control.Exception (throw, bracket)
|
||||||
import Control.Monad
|
import Control.Monad
|
||||||
import Control.Monad.IfElse
|
import Control.Monad.IfElse
|
||||||
import System.FilePath
|
import System.FilePath
|
||||||
import Control.Applicative
|
import Control.Applicative
|
||||||
|
import Control.Concurrent
|
||||||
import System.IO.Unsafe (unsafeInterleaveIO)
|
import System.IO.Unsafe (unsafeInterleaveIO)
|
||||||
|
import Data.Maybe
|
||||||
|
|
||||||
|
#ifdef mingw32_HOST_OS
|
||||||
|
import qualified System.Win32 as Win32
|
||||||
|
#else
|
||||||
|
import qualified System.Posix as Posix
|
||||||
|
#endif
|
||||||
|
|
||||||
import Utility.PosixFiles
|
import Utility.PosixFiles
|
||||||
import Utility.SafeCommand
|
import Utility.SafeCommand
|
||||||
|
@ -133,3 +141,90 @@ nukeFile file = void $ tryWhenExists go
|
||||||
#else
|
#else
|
||||||
go = removeFile file
|
go = removeFile file
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
|
data DirectoryHandle = DirectoryHandle IsOpen Posix.DirStream
|
||||||
|
#else
|
||||||
|
data DirectoryHandle = DirectoryHandle IsOpen Win32.HANDLE Win32.FindData (MVar ())
|
||||||
|
#endif
|
||||||
|
|
||||||
|
type IsOpen = MVar () -- full when the handle is open
|
||||||
|
|
||||||
|
openDirectory :: FilePath -> IO DirectoryHandle
|
||||||
|
openDirectory path = do
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
|
dirp <- Posix.openDirStream path
|
||||||
|
isopen <- newMVar ()
|
||||||
|
return (DirectoryHandle isopen dirp)
|
||||||
|
#else
|
||||||
|
(h, fdat) <- Win32.findFirstFile (path </> "*")
|
||||||
|
-- Indicate that the fdat contains a filename that readDirectory
|
||||||
|
-- has not yet returned, by making the MVar be full.
|
||||||
|
-- (There's always at least a "." entry.)
|
||||||
|
alreadyhave <- newMVar ()
|
||||||
|
isopen <- newMVar ()
|
||||||
|
return (DirectoryHandle isopen h fdat alreadyhave)
|
||||||
|
#endif
|
||||||
|
|
||||||
|
closeDirectory :: DirectoryHandle -> IO ()
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
|
closeDirectory (DirectoryHandle isopen dirp) =
|
||||||
|
whenOpen isopen $
|
||||||
|
Posix.closeDirStream dirp
|
||||||
|
#else
|
||||||
|
closeDirectory (DirectoryHandle isopen h _ alreadyhave) =
|
||||||
|
whenOpen isopen $ do
|
||||||
|
_ <- tryTakeMVar alreadyhave
|
||||||
|
Win32.findClose h
|
||||||
|
#endif
|
||||||
|
where
|
||||||
|
whenOpen :: IsOpen -> IO () -> IO ()
|
||||||
|
whenOpen mv f = do
|
||||||
|
v <- tryTakeMVar mv
|
||||||
|
when (isJust v) f
|
||||||
|
|
||||||
|
{- |Reads the next entry from the handle. Once the end of the directory
|
||||||
|
is reached, returns Nothing and automatically closes the handle.
|
||||||
|
-}
|
||||||
|
readDirectory :: DirectoryHandle -> IO (Maybe FilePath)
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
|
readDirectory hdl@(DirectoryHandle _ dirp) = do
|
||||||
|
e <- Posix.readDirStream dirp
|
||||||
|
if null e
|
||||||
|
then do
|
||||||
|
closeDirectory hdl
|
||||||
|
return Nothing
|
||||||
|
else return (Just e)
|
||||||
|
#else
|
||||||
|
readDirectory hdl@(DirectoryHandle _ h fdat mv) = do
|
||||||
|
-- If the MVar is full, then the filename in fdat has
|
||||||
|
-- not yet been returned. Otherwise, need to find the next
|
||||||
|
-- file.
|
||||||
|
r <- tryTakeMVar mv
|
||||||
|
case r of
|
||||||
|
Just () -> getfn
|
||||||
|
Nothing -> do
|
||||||
|
more <- Win32.findNextFile h fdat
|
||||||
|
if more
|
||||||
|
then getfn
|
||||||
|
else do
|
||||||
|
closeDirectory hdl
|
||||||
|
return Nothing
|
||||||
|
where
|
||||||
|
getfn = do
|
||||||
|
filename <- Win32.getFindDataFileName fdat
|
||||||
|
return (Just filename)
|
||||||
|
#endif
|
||||||
|
|
||||||
|
-- True only when directory exists and contains nothing.
|
||||||
|
-- Throws exception if directory does not exist.
|
||||||
|
isDirectoryEmpty :: FilePath -> IO Bool
|
||||||
|
isDirectoryEmpty d = bracket (openDirectory d) closeDirectory check
|
||||||
|
where
|
||||||
|
check h = do
|
||||||
|
v <- readDirectory h
|
||||||
|
case v of
|
||||||
|
Nothing -> return True
|
||||||
|
Just f
|
||||||
|
| not (dirCruft f) -> return False
|
||||||
|
| otherwise -> check h
|
||||||
|
|
|
@ -14,6 +14,7 @@ import Utility.SafeCommand
|
||||||
import Utility.Process
|
import Utility.Process
|
||||||
import Utility.FileSystemEncoding
|
import Utility.FileSystemEncoding
|
||||||
import Utility.Misc
|
import Utility.Misc
|
||||||
|
import Utility.Exception
|
||||||
|
|
||||||
import Data.List
|
import Data.List
|
||||||
import Data.Char
|
import Data.Char
|
||||||
|
@ -22,7 +23,7 @@ import System.IO
|
||||||
|
|
||||||
externalSHA :: String -> Int -> FilePath -> IO (Either String String)
|
externalSHA :: String -> Int -> FilePath -> IO (Either String String)
|
||||||
externalSHA command shasize file = do
|
externalSHA command shasize file = do
|
||||||
ls <- lines <$> readsha (toCommand [File file])
|
ls <- lines <$> catchDefaultIO "" (readsha (toCommand [File file]))
|
||||||
return $ sanitycheck =<< parse ls
|
return $ sanitycheck =<< parse ls
|
||||||
where
|
where
|
||||||
{- sha commands output the filename, so need to set fileEncoding -}
|
{- sha commands output the filename, so need to set fileEncoding -}
|
||||||
|
|
|
@ -15,13 +15,10 @@ import Common
|
||||||
import System.Posix.Types
|
import System.Posix.Types
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifndef mingw32_HOST_OS
|
openLog :: FilePath -> IO Handle
|
||||||
openLog :: FilePath -> IO Fd
|
|
||||||
openLog logfile = do
|
openLog logfile = do
|
||||||
rotateLog logfile
|
rotateLog logfile
|
||||||
openFd logfile WriteOnly (Just stdFileMode)
|
openFile logfile AppendMode
|
||||||
defaultFileFlags { append = True }
|
|
||||||
#endif
|
|
||||||
|
|
||||||
rotateLog :: FilePath -> IO ()
|
rotateLog :: FilePath -> IO ()
|
||||||
rotateLog logfile = go 0
|
rotateLog logfile = go 0
|
||||||
|
|
|
@ -57,8 +57,7 @@ unboundDelay time = do
|
||||||
waitForTermination :: IO ()
|
waitForTermination :: IO ()
|
||||||
waitForTermination = do
|
waitForTermination = do
|
||||||
#ifdef mingw32_HOST_OS
|
#ifdef mingw32_HOST_OS
|
||||||
runEvery (Seconds 600) $
|
forever $ threadDelaySeconds (Seconds 6000)
|
||||||
void getLine
|
|
||||||
#else
|
#else
|
||||||
lock <- newEmptyMVar
|
lock <- newEmptyMVar
|
||||||
let check sig = void $
|
let check sig = void $
|
||||||
|
|
71
debian/changelog
vendored
71
debian/changelog
vendored
|
@ -1,3 +1,74 @@
|
||||||
|
git-annex (5.20140717) unstable; urgency=high
|
||||||
|
|
||||||
|
* Fix minor FD leak in journal code. Closes: #754608
|
||||||
|
* direct: Fix handling of case where a work tree subdirectory cannot
|
||||||
|
be written to due to permissions.
|
||||||
|
* migrate: Avoid re-checksumming when migrating from hashE to hash backend.
|
||||||
|
* uninit: Avoid failing final removal in some direct mode repositories
|
||||||
|
due to file modes.
|
||||||
|
* S3: Deal with AWS ACL configurations that do not allow creating or
|
||||||
|
checking the location of a bucket, but only reading and writing content to
|
||||||
|
it.
|
||||||
|
* resolvemerge: New plumbing command that runs the automatic merge conflict
|
||||||
|
resolver.
|
||||||
|
* Deal with change in git 2.0 that made indirect mode merge conflict
|
||||||
|
resolution leave behind old files.
|
||||||
|
* sync: Fix git sync with local git remotes even when they don't have an
|
||||||
|
annex.uuid set. (The assistant already did so.)
|
||||||
|
* Set gcrypt-publish-participants when setting up a gcrypt repository,
|
||||||
|
to avoid unncessary passphrase prompts.
|
||||||
|
This is a security/usability tradeoff. To avoid exposing the gpg key
|
||||||
|
ids who can decrypt the repository, users can unset
|
||||||
|
gcrypt-publish-participants.
|
||||||
|
* Install nautilus hooks even when ~/.local/share/nautilus/ does not yet
|
||||||
|
exist, since it is not automatically created for Gnome 3 users.
|
||||||
|
* Windows: Move .vbs files out of git\bin, to avoid that being in the
|
||||||
|
PATH, which caused some weird breakage. (Thanks, divB)
|
||||||
|
* Windows: Fix locking issue that prevented the webapp starting
|
||||||
|
(since 5.20140707).
|
||||||
|
|
||||||
|
-- Joey Hess <joeyh@debian.org> Thu, 17 Jul 2014 11:27:25 -0400
|
||||||
|
|
||||||
|
git-annex (5.20140709) unstable; urgency=medium
|
||||||
|
|
||||||
|
* Fix race in direct mode merge code that could cause all files in the
|
||||||
|
repository to be removed. It should be able to recover repositories
|
||||||
|
experiencing this bug without data loss. See:
|
||||||
|
http://git-annex.branchable.com/bugs/bad_merge_commit_deleting_all_files/
|
||||||
|
* Fix git version that supported --no-gpg-sign.
|
||||||
|
* Fix bug in automatic merge conflict resolution, when one side is an
|
||||||
|
annexed symlink, and the other side is a non-annexed symlink.
|
||||||
|
* Really fix bug that caused the assistant to make many unncessary
|
||||||
|
empty merge commits.
|
||||||
|
|
||||||
|
-- Joey Hess <joeyh@debian.org> Wed, 09 Jul 2014 15:28:03 -0400
|
||||||
|
|
||||||
|
git-annex (5.20140707) unstable; urgency=medium
|
||||||
|
|
||||||
|
* assistant: Fix bug, introduced in last release, that caused the assistant
|
||||||
|
to make many unncessary empty merge commits.
|
||||||
|
* assistant: Fix one-way assistant->assistant sync in direct mode.
|
||||||
|
* Fix bug in annex.queuesize calculation that caused much more
|
||||||
|
queue flushing than necessary.
|
||||||
|
* importfeed: When annex.genmetadata is set, metadata from the feed
|
||||||
|
is added to files that are imported from it.
|
||||||
|
* Support users who have set commit.gpgsign, by disabling gpg signatures
|
||||||
|
for git-annex branch commits and commits made by the assistant.
|
||||||
|
* Fix memory leak when committing millions of changes to the git-annex
|
||||||
|
branch, eg after git-annex add has run on 2 million files in one go.
|
||||||
|
* Support building with bloomfilter 2.0.0.
|
||||||
|
* Run standalone install process when the assistant is started
|
||||||
|
(was only being run when the webapp was opened).
|
||||||
|
* Android: patch git to avoid fchmod, which fails on /sdcard.
|
||||||
|
* Windows: Got rid of that pesky DOS box when starting the webapp.
|
||||||
|
* Windows: Added Startup menu item so assistant starts automatically
|
||||||
|
on login.
|
||||||
|
* Windows: Fix opening file browser from webapp when repo is in a
|
||||||
|
directory with spaces.
|
||||||
|
* Windows: Assistant now logs to daemon.log.
|
||||||
|
|
||||||
|
-- Joey Hess <joeyh@debian.org> Mon, 07 Jul 2014 12:24:13 -0400
|
||||||
|
|
||||||
git-annex (5.20140613) unstable; urgency=medium
|
git-annex (5.20140613) unstable; urgency=medium
|
||||||
|
|
||||||
* Ignore setsid failures.
|
* Ignore setsid failures.
|
||||||
|
|
|
@ -5,8 +5,8 @@ to retrieve the file's content (its value).
|
||||||
Multiple pluggable key-value backends are supported, and a single repository
|
Multiple pluggable key-value backends are supported, and a single repository
|
||||||
can use different ones for different files.
|
can use different ones for different files.
|
||||||
|
|
||||||
* `SHA256E` -- The default backend for new files, combines a SHA256 hash of
|
* `SHA256E` -- The default backend for new files, combines a 256 bit SHA-2
|
||||||
the file's content with the file's extension. This allows
|
hash of the file's content with the file's extension. This allows
|
||||||
verifying that the file content is right, and can avoid duplicates of
|
verifying that the file content is right, and can avoid duplicates of
|
||||||
files with the same content. Its need to generate checksums
|
files with the same content. Its need to generate checksums
|
||||||
can make it slower for large files.
|
can make it slower for large files.
|
||||||
|
@ -16,12 +16,13 @@ can use different ones for different files.
|
||||||
the same basename, size, and modification time has the same content.
|
the same basename, size, and modification time has the same content.
|
||||||
This is the least expensive backend, recommended for really large
|
This is the least expensive backend, recommended for really large
|
||||||
files or slow systems.
|
files or slow systems.
|
||||||
* `SHA512`, `SHA512E` -- Best currently available hash, for the very paranoid.
|
* `SHA512`, `SHA512E` -- Best SHA-2 hash, for the very paranoid.
|
||||||
* `SHA1`, `SHA1E` -- Smaller hash than `SHA256` for those who want a checksum
|
* `SHA1`, `SHA1E` -- Smaller hash than `SHA256` for those who want a checksum
|
||||||
but are not concerned about security.
|
but are not concerned about security.
|
||||||
* `SHA384`, `SHA384E`, `SHA224`, `SHA224E` -- Hashes for people who like
|
* `SHA384`, `SHA384E`, `SHA224`, `SHA224E` -- Hashes for people who like
|
||||||
unusual sizes.
|
unusual sizes.
|
||||||
* `SKEIN512`, `SKEIN256` -- [Skein hash](http://en.wikipedia.org/wiki/Skein_hash),
|
* `SKEIN512`, `SKEIN512E`, `SKEIN256`, `SKEIN256E`
|
||||||
|
-- [Skein hash](http://en.wikipedia.org/wiki/Skein_hash),
|
||||||
a well-regarded SHA3 hash competition finalist.
|
a well-regarded SHA3 hash competition finalist.
|
||||||
|
|
||||||
The `annex.backends` git-config setting can be used to list the backends
|
The `annex.backends` git-config setting can be used to list the backends
|
||||||
|
|
166
doc/bugs/Android_fails_on_Google_Nexus_10_Jellybean.mdwn
Normal file
166
doc/bugs/Android_fails_on_Google_Nexus_10_Jellybean.mdwn
Normal file
|
@ -0,0 +1,166 @@
|
||||||
|
### Please describe the problem.
|
||||||
|
|
||||||
|
Install seems to die because /data/app-lib not found. Sorry, I did not copy. Git-annex log is below.
|
||||||
|
|
||||||
|
I tried To run git-annex second time, here's what terminal says.
|
||||||
|
|
||||||
|
|
||||||
|
Falling back to hardcoded app location; cannot find expected files in /data/app-lib
|
||||||
|
git annex webapp
|
||||||
|
u0_a36@manta:/sdcard/git-annex.home $ git annex webapp
|
||||||
|
WARNING: linker: git-annex has text relocations. This is wasting memory and is a security risk. Please fix.
|
||||||
|
error: fchmod on /sdcard/mediashare/.git/config.lock failed: Operation not permitted
|
||||||
|
error: fchmod on /sdcard/mediashare/.git/config.lock failed: Operation not permitted
|
||||||
|
|
||||||
|
From git terminal, can start web viewer, it offers to make repo. I chose /sdcard/mediashare, result is browser fail:
|
||||||
|
|
||||||
|
git [Param "config",Param "annex.uuid",Param "380f6ec2-a7b0-43db-9447-f0de1b5a1b5b"] failed
|
||||||
|
|
||||||
|
The install did create /sdcard/mediashare. I did have the sdcard directory all along.
|
||||||
|
|
||||||
|
I can't say for sure what else is in file system. ES File manager shows /data exists, but it is empty. But tablet not easy to diagnose
|
||||||
|
|
||||||
|
### What steps will reproduce the problem?
|
||||||
|
|
||||||
|
Install git-annex.apk from website. I downloaded 20140620.
|
||||||
|
|
||||||
|
### What version of git-annex are you using? On what operating system?
|
||||||
|
|
||||||
|
Android 4.4.2 on Nexus tablet.
|
||||||
|
|
||||||
|
### Please provide any additional information below.
|
||||||
|
|
||||||
|
Git-anex-install.log. it is only file in /sdcard/git-annex.home. note it says it is installing to /data/data/. I may manually create that structure and see if a reinstall ends differently.
|
||||||
|
|
||||||
|
[[!format sh """
|
||||||
|
# If you can, paste a complete transcript of the problem occurring here.
|
||||||
|
# If the problem is with the git-annex assistant, paste in .git/annex/daemon.log
|
||||||
|
Installation starting to /data/data/ga.androidterm
|
||||||
|
1bebb0d66f3f7c5ac4889b86669cab04ebee9bba
|
||||||
|
installing busybox
|
||||||
|
installing git-annex
|
||||||
|
installing git-shell
|
||||||
|
installing git-upload-pack
|
||||||
|
installing git
|
||||||
|
installing gpg
|
||||||
|
installing rsync
|
||||||
|
installing ssh
|
||||||
|
installing ssh-keygen
|
||||||
|
linking ./bin/git-upload-archive to git
|
||||||
|
linking ./bin/git-receive-pack to git
|
||||||
|
linking ./libexec/git-core/git-help to git
|
||||||
|
linking ./libexec/git-core/git-fsck to git
|
||||||
|
linking ./libexec/git-core/git-cat-file to git
|
||||||
|
linking ./libexec/git-core/git-init to git
|
||||||
|
linking ./libexec/git-core/git-checkout-index to git
|
||||||
|
linking ./libexec/git-core/git-notes to git
|
||||||
|
linking ./libexec/git-core/git-grep to git
|
||||||
|
linking ./libexec/git-core/git-blame to git
|
||||||
|
linking ./libexec/git-core/git-verify-tag to git
|
||||||
|
linking ./libexec/git-core/git-write-tree to git
|
||||||
|
linking ./libexec/git-core/git-log to git
|
||||||
|
linking ./libexec/git-core/git-stage to git
|
||||||
|
linking ./libexec/git-core/git-update-ref to git
|
||||||
|
linking ./libexec/git-core/git-status to git
|
||||||
|
linking ./libexec/git-core/git-show-branch to git
|
||||||
|
linking ./libexec/git-core/git-merge-file to git
|
||||||
|
linking ./libexec/git-core/git-for-each-ref to git
|
||||||
|
linking ./libexec/git-core/git to git
|
||||||
|
linking ./libexec/git-core/git-replace to git
|
||||||
|
linking ./libexec/git-core/git-update-index to git
|
||||||
|
linking ./libexec/git-core/git-annotate to git
|
||||||
|
linking ./libexec/git-core/git-patch-id to git
|
||||||
|
linking ./libexec/git-core/git-merge-recursive to git
|
||||||
|
linking ./libexec/git-core/git-rm to git
|
||||||
|
linking ./libexec/git-core/git-ls-tree to git
|
||||||
|
linking ./libexec/git-core/git-update-server-info to git
|
||||||
|
linking ./libexec/git-core/git-diff-tree to git
|
||||||
|
linking ./libexec/git-core/git-merge-tree to git
|
||||||
|
linking ./libexec/git-core/git-mktag to git
|
||||||
|
linking ./libexec/git-core/git-rev-list to git
|
||||||
|
linking ./libexec/git-core/git-column to git
|
||||||
|
linking ./libexec/git-core/git-apply to git
|
||||||
|
linking ./libexec/git-core/git-var to git
|
||||||
|
linking ./libexec/git-core/git-rev-parse to git
|
||||||
|
linking ./libexec/git-core/git-archive to git
|
||||||
|
linking ./libexec/git-core/git-verify-pack to git
|
||||||
|
linking ./libexec/git-core/git-push to git
|
||||||
|
linking ./libexec/git-core/git-commit to git
|
||||||
|
linking ./libexec/git-core/git-tag to git
|
||||||
|
linking ./libexec/git-core/git-pack-refs to git
|
||||||
|
linking ./libexec/git-core/git-fmt-merge-msg to git
|
||||||
|
linking ./libexec/git-core/git-fast-export to git
|
||||||
|
linking ./libexec/git-core/git-remote-ext to git
|
||||||
|
linking ./libexec/git-core/git-mailsplit to git
|
||||||
|
linking ./libexec/git-core/git-send-pack to git
|
||||||
|
linking ./libexec/git-core/git-diff-index to git
|
||||||
|
linking ./libexec/git-core/git-mailinfo to git
|
||||||
|
linking ./libexec/git-core/git-revert to git
|
||||||
|
linking ./libexec/git-core/git-diff-files to git
|
||||||
|
linking ./libexec/git-core/git-merge-ours to git
|
||||||
|
linking ./libexec/git-core/git-show-ref to git
|
||||||
|
linking ./libexec/git-core/git-diff to git
|
||||||
|
linking ./libexec/git-core/git-clean to git
|
||||||
|
linking ./libexec/git-core/git-bundle to git
|
||||||
|
linking ./libexec/git-core/git-check-mailmap to git
|
||||||
|
linking ./libexec/git-core/git-describe to git
|
||||||
|
linking ./libexec/git-core/git-branch to git
|
||||||
|
linking ./libexec/git-core/git-checkout to git
|
||||||
|
linking ./libexec/git-core/git-name-rev to git
|
||||||
|
linking ./libexec/git-core/git-gc to git
|
||||||
|
linking ./libexec/git-core/git-fetch to git
|
||||||
|
linking ./libexec/git-core/git-whatchanged to git
|
||||||
|
linking ./libexec/git-core/git-cherry to git
|
||||||
|
linking ./libexec/git-core/git-reflog to git
|
||||||
|
linking ./libexec/git-core/git-hash-object to git
|
||||||
|
linking ./libexec/git-core/git-init-db to git
|
||||||
|
linking ./libexec/git-core/git-rerere to git
|
||||||
|
linking ./libexec/git-core/git-reset to git
|
||||||
|
linking ./libexec/git-core/git-stripspace to git
|
||||||
|
linking ./libexec/git-core/git-prune to git
|
||||||
|
linking ./libexec/git-core/git-mktree to git
|
||||||
|
linking ./libexec/git-core/git-unpack-file to git
|
||||||
|
linking ./libexec/git-core/git-remote to git
|
||||||
|
linking ./libexec/git-core/git-commit-tree to git
|
||||||
|
linking ./libexec/git-core/git-symbolic-ref to git
|
||||||
|
linking ./libexec/git-core/git-credential to git
|
||||||
|
linking ./libexec/git-core/git-check-ignore to git
|
||||||
|
linking ./libexec/git-core/git-shortlog to git
|
||||||
|
linking ./libexec/git-core/git-fetch-pack to git
|
||||||
|
linking ./libexec/git-core/git-clone to git
|
||||||
|
linking ./libexec/git-core/git-mv to git
|
||||||
|
linking ./libexec/git-core/git-read-tree to git
|
||||||
|
linking ./libexec/git-core/git-merge-subtree to git
|
||||||
|
linking ./libexec/git-core/git-ls-remote to git
|
||||||
|
linking ./libexec/git-core/git-config to git
|
||||||
|
linking ./libexec/git-core/git-cherry-pick to git
|
||||||
|
linking ./libexec/git-core/git-merge to git
|
||||||
|
linking ./libexec/git-core/git-prune-packed to git
|
||||||
|
linking ./libexec/git-core/git-count-objects to git
|
||||||
|
linking ./libexec/git-core/git-merge-base to git
|
||||||
|
linking ./libexec/git-core/git-index-pack to git
|
||||||
|
linking ./libexec/git-core/git-repack to git
|
||||||
|
linking ./libexec/git-core/git-show to git
|
||||||
|
linking ./libexec/git-core/git-fsck-objects to git
|
||||||
|
linking ./libexec/git-core/git-format-patch to git
|
||||||
|
linking ./libexec/git-core/git-bisect--helper to git
|
||||||
|
linking ./libexec/git-core/git-upload-archive to git
|
||||||
|
linking ./libexec/git-core/git-ls-files to git
|
||||||
|
linking ./libexec/git-core/git-check-attr to git
|
||||||
|
linking ./libexec/git-core/git-get-tar-commit-id to git
|
||||||
|
linking ./libexec/git-core/git-remote-fd to git
|
||||||
|
linking ./libexec/git-core/git-unpack-objects to git
|
||||||
|
linking ./libexec/git-core/git-add to git
|
||||||
|
linking ./libexec/git-core/git-check-ref-format to git
|
||||||
|
linking ./libexec/git-core/git-merge-index to git
|
||||||
|
linking ./libexec/git-core/git-pack-objects to git
|
||||||
|
linking ./libexec/git-core/git-receive-pack to git
|
||||||
|
linking ./libexec/git-core/git-pack-redundant to git
|
||||||
|
linking ./libexec/git-core/git-shell to git-shell
|
||||||
|
linking ./libexec/git-core/git-upload-pack to git-upload-pack
|
||||||
|
Installation complete
|
||||||
|
|
||||||
|
# End of transcript or log.
|
||||||
|
"""]]
|
||||||
|
|
||||||
|
> [[fixed|done]] --[[Joey]]
|
|
@ -0,0 +1,25 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="http://joeyh.name/"
|
||||||
|
ip="209.250.56.55"
|
||||||
|
subject="comment 1"
|
||||||
|
date="2014-07-03T20:10:41Z"
|
||||||
|
content="""
|
||||||
|
This is not an installation problem; the /data/app-lib message is a red herring.
|
||||||
|
|
||||||
|
Is /sdcard/mediashare a directory that already existed? If so, perhaps it's some \"mediashare\" thing that has a even more crippled filesystem than usual. Seems possible, but I don't know. Want to rule it out..
|
||||||
|
|
||||||
|
The actual failure seems to be when git tries to write to its config.lock file, and changes its permissions. This is a recent change in git, commit daa22c6f8da466bd7a438f1bc27375fd737ffcf3, \"config: preserve config file permissions on edits\".
|
||||||
|
|
||||||
|
[[!language C \"\"\"
|
||||||
|
+ if (fchmod(fd, st.st_mode & 07777) < 0) {
|
||||||
|
+ error(\"fchmod on %s failed: %s\",
|
||||||
|
+ lock->filename, strerror(errno));
|
||||||
|
+ ret = CONFIG_NO_WRITE;
|
||||||
|
+ goto out_free;
|
||||||
|
+ }
|
||||||
|
\"\"\"]]
|
||||||
|
|
||||||
|
This seems utterly innocuous; the config file has some mode, and this just sets that same mode back (excluding some high bit flags). But Android goes out of its way to make /sdcard the most craptacular filesystem in use on any Linux system, so I'm not really surprised that it might just refuse all fchmod even when it's a no-op. (This is the only fchmod call currently in git.)
|
||||||
|
|
||||||
|
I've patched the bundled git to work around this. Will be a while until there is an updated autobuild..
|
||||||
|
"""]]
|
|
@ -0,0 +1,8 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="http://joeyh.name/"
|
||||||
|
ip="209.250.56.55"
|
||||||
|
subject="comment 2"
|
||||||
|
date="2014-07-04T17:52:35Z"
|
||||||
|
content="""
|
||||||
|
This problem should be fixed in the most recent daily build of git-annex for android. Testing appreciated.
|
||||||
|
"""]]
|
|
@ -0,0 +1,64 @@
|
||||||
|
### Please describe the problem.
|
||||||
|
|
||||||
|
The assistant/webapp doesn't drop files from the local (source) repository after transferring it to the 2 backup repositories (numcopies 2), but they are listed with:
|
||||||
|
|
||||||
|
git annex find --want-drop
|
||||||
|
|
||||||
|
### What steps will reproduce the problem?
|
||||||
|
|
||||||
|
#### mintcream ####
|
||||||
|
|
||||||
|
git init annex
|
||||||
|
cd ~/annex
|
||||||
|
git commit -m "create" --allow-empty
|
||||||
|
git annex init mintcream
|
||||||
|
git annex numcopies 2
|
||||||
|
git annex group here source
|
||||||
|
git config annex.autocommit false
|
||||||
|
git annex webapp
|
||||||
|
|
||||||
|
#### liquorice ####
|
||||||
|
|
||||||
|
git init annex
|
||||||
|
cd ~/annex
|
||||||
|
git annex init liquorice
|
||||||
|
git annex group here backup
|
||||||
|
|
||||||
|
#### candyfloss ####
|
||||||
|
|
||||||
|
git init annex
|
||||||
|
cd ~/annex
|
||||||
|
git annex init candyfloss
|
||||||
|
git annex group here backup
|
||||||
|
|
||||||
|
#### mintcream ####
|
||||||
|
|
||||||
|
(add both backup repositories in webapp as "remote repositories")
|
||||||
|
(copy files into ~/annex directory)
|
||||||
|
git annex add
|
||||||
|
git commit -m "add some files"
|
||||||
|
(use "sync now" to prod assistant into noticing the commit)
|
||||||
|
|
||||||
|
### What was I expecting to happen? ###
|
||||||
|
|
||||||
|
The assistant to transfer the files to liquorice and candyfloss, then for the assistant to drop the files from mintcream.
|
||||||
|
|
||||||
|
### What actually happened? ###
|
||||||
|
|
||||||
|
The assistant transfers the files to liquorice and candyfloss. No files are dropped from mintcream.
|
||||||
|
|
||||||
|
### What version of git-annex are you using? On what operating system?
|
||||||
|
|
||||||
|
git-annex version: 5.20140707-g923b436
|
||||||
|
|
||||||
|
Arch Linux (git-annex-bin from AUR)
|
||||||
|
|
||||||
|
### Please provide any additional information below.
|
||||||
|
|
||||||
|
I wish to retain control of the commits on "master" (annex.autocommit false) but want the assistant to handle moving/dropping the files as required in the background.
|
||||||
|
|
||||||
|
git annex drop --auto
|
||||||
|
|
||||||
|
works as expected.
|
||||||
|
|
||||||
|
> [[done]]; user misconfiguration. --[[Joey]]
|
|
@ -0,0 +1,10 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="http://joeyh.name/"
|
||||||
|
ip="209.250.56.2"
|
||||||
|
subject="comment 1"
|
||||||
|
date="2014-07-10T18:48:17Z"
|
||||||
|
content="""
|
||||||
|
Reproduced. `git annex sync --content` has the same problem.
|
||||||
|
|
||||||
|
Of course, both it and the assistant *do* check if files can be dropped. For some reason, it is deciding it is not safe to drop the file.
|
||||||
|
"""]]
|
|
@ -0,0 +1,10 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="http://joeyh.name/"
|
||||||
|
ip="209.250.56.2"
|
||||||
|
subject="user misconfiguration"
|
||||||
|
date="2014-07-10T19:02:00Z"
|
||||||
|
content="""
|
||||||
|
Reason is simple: You manually put the repository into the source group, but its preferred content is not set to \"standard\". No matter what group a repository is in, you have to set its preferred content to something, or git-annex will default to assuming you want the repo to retain all files.
|
||||||
|
|
||||||
|
So, `git annex wanted mintcream standard` and away you go. You'll also want to set that for the other 2 repos probably..
|
||||||
|
"""]]
|
|
@ -0,0 +1,40 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="CandyAngel"
|
||||||
|
ip="81.111.193.130"
|
||||||
|
subject="comment 3"
|
||||||
|
date="2014-07-11T10:41:23Z"
|
||||||
|
content="""
|
||||||
|
Ohhh I see!
|
||||||
|
|
||||||
|
I was expecting \"standard\" to be the default because of what vicfg shows..
|
||||||
|
|
||||||
|
# Repository preferred contents
|
||||||
|
# (for web)
|
||||||
|
#wanted 00000000-0000-0000-0000-000000000001 = standard
|
||||||
|
# (for test)
|
||||||
|
#wanted 025d4d21-7648-426c-a406-bb7f27688afe = standard
|
||||||
|
|
||||||
|
# Group preferred contents
|
||||||
|
# (Used by repositories with \"groupwanted\" in their preferred contents)
|
||||||
|
#groupwanted archive = standard
|
||||||
|
#groupwanted backup = standard
|
||||||
|
#groupwanted client = standard
|
||||||
|
#groupwanted incrementalbackup = standard
|
||||||
|
#groupwanted manual = standard
|
||||||
|
#groupwanted public = standard
|
||||||
|
#groupwanted smallarchive = standard
|
||||||
|
#groupwanted source = standard
|
||||||
|
#groupwanted transfer = standard
|
||||||
|
|
||||||
|
In my experience with configuration files, a commented out line like this:
|
||||||
|
|
||||||
|
#wanted 025d4d21-7648-426c-a406-bb7f27688afe = standard
|
||||||
|
|
||||||
|
without any \"this is an example\" text above it means \"this is the default setting\". Everything in vicfg looks like it is the current settings, rather than just placeholders..
|
||||||
|
|
||||||
|
I understand why you need to set the wanted explicitly (at least from the command line), but the way information is shown in vicfg led me to interact with it incorrectly. Would it be worth adding a disclaimer that commented lines are examples, not defaults? As far as I am aware, the logic I explained above (commented line == default) is the \"norm\" in *nix configuration files, which would make vicfg non-intuitive.
|
||||||
|
|
||||||
|
All I need to do now is not be so bothered by how messy the git-annex branch looks when the assistant is running things! :D
|
||||||
|
|
||||||
|
Thankies
|
||||||
|
"""]]
|
|
@ -0,0 +1,14 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="CandyAngel"
|
||||||
|
ip="81.111.193.130"
|
||||||
|
subject="comment 4"
|
||||||
|
date="2014-07-11T10:57:58Z"
|
||||||
|
content="""
|
||||||
|
Actually, I'm still a little confused.
|
||||||
|
|
||||||
|
If git-annex was presuming I wanted to keep all the files as you say, why were they listed in `git annex find --want-drop` and dropped by `git annex drop --auto`?
|
||||||
|
|
||||||
|
Shouldn't they have been empty and a no-op respectively?
|
||||||
|
|
||||||
|
There seems to be a difference in the behaviour between the command line (wanting to and actually dropping the files) and the assistant (wanting to keep them) for the same settings.
|
||||||
|
"""]]
|
|
@ -0,0 +1,10 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="http://joeyh.name/"
|
||||||
|
ip="209.250.56.2"
|
||||||
|
subject="comment 5"
|
||||||
|
date="2014-07-11T18:27:44Z"
|
||||||
|
content="""
|
||||||
|
The assistant defaults to assuming all files are wanted if there's no preferred content settings, while command-line dropping stuff defaults to assuming no files are wanted (or more accurately, that you'll drop anything you don't want and get anything you do) when there's no preferred content settings. So the default differs, but this only matters when not setting preferred contents.
|
||||||
|
|
||||||
|
I agree that the vicfg could be misread, so have changed it.
|
||||||
|
"""]]
|
19
doc/bugs/Assistant_merge_loop.mdwn
Normal file
19
doc/bugs/Assistant_merge_loop.mdwn
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
The assistant appears to be in a merge loop with at least two of my repos. It's creating thousands of merge commits without any changes. One repository that contains around 600 files that change very very rarely now has 63528 commits.
|
||||||
|
|
||||||
|
Here's a screenshot from tig: [[https://ssl.zerodogg.org/~zerodogg/private/tmp/Skjermdump_fra_2014-07-05_07:09:22-2014-07-05.png]]
|
||||||
|
|
||||||
|
I can privately provide a copy of the git repo itself if needed.
|
||||||
|
|
||||||
|
Using the standalone build, 64bit, on ArchLinux, Fedora 20 and Ubuntu 14.04.
|
||||||
|
|
||||||
|
$ git annex version
|
||||||
|
git-annex version: 5.20140610-g5ec8bcf
|
||||||
|
build flags: Assistant Webapp Webapp-secure Pairing Testsuite S3 WebDAV Inotify DBus DesktopNotify XMPP DNS Feeds Quvi TDFA CryptoHash
|
||||||
|
key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SKEIN256E SKEIN512E SHA256 SHA1 SHA512 SHA224 SHA384 SKEIN256 SKEIN512 WORM URL
|
||||||
|
remote types: git gcrypt S3 bup directory rsync web webdav tahoe glacier ddar hook external
|
||||||
|
local repository version: 5
|
||||||
|
supported repository version: 5
|
||||||
|
upgrade supported from repository versions: 0 1 2 4
|
||||||
|
|
||||||
|
> [[fixed|done]]. Note that 5.20140708 contained an incomplete fix for this
|
||||||
|
> bug. --[[Joey]]
|
|
@ -0,0 +1,8 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="https://www.google.com/accounts/o8/id?id=AItOawk9nck8WX8-ADF3Fdh5vFo4Qrw1I_bJcR8"
|
||||||
|
nickname="Jon Ander"
|
||||||
|
subject="comment 10"
|
||||||
|
date="2014-07-16T20:32:57Z"
|
||||||
|
content="""
|
||||||
|
I have two computers with Debian testing (5.20140529) that aren't having the issue, and one with Debian Sid (5.20140709) that is still creating the empty merge commits
|
||||||
|
"""]]
|
|
@ -0,0 +1,10 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="http://joeyh.name/"
|
||||||
|
ip="209.250.56.2"
|
||||||
|
subject="comment 11"
|
||||||
|
date="2014-07-16T20:36:32Z"
|
||||||
|
content="""
|
||||||
|
Has the assistant been restarted since git-annex was upgraded to the fixed version?
|
||||||
|
|
||||||
|
Can you post a debug.log?
|
||||||
|
"""]]
|
|
@ -0,0 +1,14 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="http://joeyh.name/"
|
||||||
|
ip="209.250.56.55"
|
||||||
|
subject="comment 1"
|
||||||
|
date="2014-07-05T20:34:39Z"
|
||||||
|
content="""
|
||||||
|
I am seeing some evidence of this in my own family's repo, where one node updated to 5.20140613 and started making series of empty commits with message \"merge refs/heads/synced/master\" and only 1 parent (so not really a merge).
|
||||||
|
|
||||||
|
Quite likely [[!commit d6711800ad261fb4c37fc361bc84918d1e296bc4]] is at fault. Probably the fastForwardable check isn't quite right.
|
||||||
|
|
||||||
|
This should only affect direct mode repositories. When only one node has the problem, it won't be bad, but if multiple nodes are doing this, their repos never converge and keep growing.
|
||||||
|
|
||||||
|
Hmm, I think I have partially reproduced it with 2 direct mode repos, each having the other as a remote. `git annex sync` repeatedly in each does not add unncessary commits, but running the assistant in each does. In this particular case, it manages to converge eventually after several commits.
|
||||||
|
"""]]
|
|
@ -0,0 +1,12 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="http://joeyh.name/"
|
||||||
|
ip="209.250.56.55"
|
||||||
|
subject="comment 2"
|
||||||
|
date="2014-07-05T21:17:17Z"
|
||||||
|
content="""
|
||||||
|
Well, it looks like this is as simple as the assistant trying to merge refs/remotes/$foo/synced/master into the current branch, when that ref is behind or the same as the current branch. Nothing to merge, so it does some pointless work and then the fastForwardable check runs -- and that check looks for refs between the \"old\" and \"new\" refs. Since the \"new\" is behind the \"old\", there are no such commits, and the unnecessary empty commit results.
|
||||||
|
|
||||||
|
The reason only the assistant is affected is because `git-annex sync` already checked Git.Branch.changed before trying to do any merging, which avoids the problem.
|
||||||
|
|
||||||
|
Fix committed.
|
||||||
|
"""]]
|
|
@ -0,0 +1,62 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="EskildHustvedt"
|
||||||
|
ip="80.202.103.55"
|
||||||
|
subject="comment 3"
|
||||||
|
date="2014-07-08T08:13:40Z"
|
||||||
|
content="""
|
||||||
|
I'm still seeing this problem in 5.20140707-g923b436
|
||||||
|
|
||||||
|
[0 zerodogg@firefly annexed]$ git annex version
|
||||||
|
git-annex version: 5.20140707-g923b436
|
||||||
|
build flags: Assistant Webapp Webapp-secure Pairing Testsuite S3 WebDAV Inotify DBus DesktopNotify XMPP DNS Feeds Quvi TDFA CryptoHash
|
||||||
|
key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SKEIN256E SKEIN512E SHA256 SHA1 SHA512 SHA224 SHA384 SKEIN256 SKEIN512 WORM URL
|
||||||
|
remote types: git gcrypt S3 bup directory rsync web webdav tahoe glacier ddar hook external
|
||||||
|
local repository version: 5
|
||||||
|
supported repository version: 5
|
||||||
|
upgrade supported from repository versions: 0 1 2 4
|
||||||
|
[0 zerodogg@firefly annexed]$ git graph | head -n20
|
||||||
|
* d4bf68f - (HEAD, annex/direct/master) merge refs/remotes/serenity/synced/master (3 minutes ago) <Eskild Hustvedt>
|
||||||
|
|\
|
||||||
|
* | d03f280 - merge refs/remotes/browncoats/synced/master (3 minutes ago) <Eskild Hustvedt>
|
||||||
|
|/
|
||||||
|
* e6863b8 - (serenity/synced/master, browncoats/synced/master) merge refs/remotes/serenity/synced/master (3 minutes ago) <Eskild Hustvedt>
|
||||||
|
|\
|
||||||
|
* \ 616d985 - merge refs/remotes/browncoats/synced/master (3 minutes ago) <Eskild Hustvedt>
|
||||||
|
|\ \
|
||||||
|
| |/
|
||||||
|
* | 3b39706 - merge refs/remotes/serenity/synced/master (3 minutes ago) <Eskild Hustvedt>
|
||||||
|
|\ \
|
||||||
|
| |/
|
||||||
|
* | 6d354cc - merge refs/remotes/browncoats/synced/master (4 minutes ago) <Eskild Hustvedt>
|
||||||
|
|\ \
|
||||||
|
| |/
|
||||||
|
* | 710c3c1 - merge refs/remotes/serenity/synced/master (4 minutes ago) <Eskild Hustvedt>
|
||||||
|
|\ \
|
||||||
|
| |/
|
||||||
|
* | 763930f - merge refs/remotes/browncoats/synced/master (4 minutes ago) <Eskild Hustvedt>
|
||||||
|
|/
|
||||||
|
[0 zerodogg@firefly annexed]$ git annex assistant --stop
|
||||||
|
[0 zerodogg@firefly annexed]$ git annex assistant
|
||||||
|
[0 zerodogg@firefly annexed]$ git graph | head -n20
|
||||||
|
* 947f1a2 - (HEAD, annex/direct/master) merge refs/remotes/serenity/synced/master (15 seconds ago) <Eskild Hustvedt>
|
||||||
|
|\
|
||||||
|
* | 19c6043 - merge refs/remotes/browncoats/synced/master (18 seconds ago) <Eskild Hustvedt>
|
||||||
|
|/
|
||||||
|
* b453741 - (serenity/synced/master, browncoats/synced/master) merge refs/remotes/serenity/synced/master (18 seconds ago) <Eskild Hustvedt>
|
||||||
|
|\
|
||||||
|
* \ 6baaebd - merge refs/remotes/browncoats/synced/master (18 seconds ago) <Eskild Hustvedt>
|
||||||
|
|\ \
|
||||||
|
| |/
|
||||||
|
* | 03e4fa2 - merge refs/remotes/serenity/synced/master (24 seconds ago) <Eskild Hustvedt>
|
||||||
|
|\ \
|
||||||
|
| |/
|
||||||
|
* | 33302d8 - merge refs/remotes/browncoats/synced/master (24 seconds ago) <Eskild Hustvedt>
|
||||||
|
|\ \
|
||||||
|
| |/
|
||||||
|
* | d4bf68f - merge refs/remotes/serenity/synced/master (4 minutes ago) <Eskild Hustvedt>
|
||||||
|
|\ \
|
||||||
|
| |/
|
||||||
|
* | d03f280 - merge refs/remotes/browncoats/synced/master (4 minutes ago) <Eskild Hustvedt>
|
||||||
|
|/
|
||||||
|
[0 zerodogg@firefly annexed]$
|
||||||
|
"""]]
|
|
@ -0,0 +1,8 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="EskildHustvedt"
|
||||||
|
ip="80.202.103.55"
|
||||||
|
subject="comment 4"
|
||||||
|
date="2014-07-09T00:24:36Z"
|
||||||
|
content="""
|
||||||
|
As far as I can see in my repo, the empty merges started on 2014-05-27, but then appear to resolve themselves after 40-50 commits on that day. They reappear again on 2014-06-03, and appears to have kept going daily ever since.
|
||||||
|
"""]]
|
|
@ -0,0 +1,8 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="http://joeyh.name/"
|
||||||
|
ip="209.250.56.55"
|
||||||
|
subject="comment 5"
|
||||||
|
date="2014-07-09T19:00:35Z"
|
||||||
|
content="""
|
||||||
|
I have confirmed this is still happening, though I had certianly thought I had reproduced and fixed it.
|
||||||
|
"""]]
|
|
@ -0,0 +1,80 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="https://www.google.com/accounts/o8/id?id=AItOawmN5jDf53oRJZsTo8Ahj2uXzCzq6HcvEro"
|
||||||
|
nickname="Gregory"
|
||||||
|
subject="confirmed?"
|
||||||
|
date="2014-07-15T01:29:00Z"
|
||||||
|
content="""
|
||||||
|
I seem to be getting this behavior, in tandem with the [bad merge commit deleting all files](http://git-annex.branchable.com/bugs/bad_merge_commit_deleting_all_files/) on
|
||||||
|
|
||||||
|
git-annex version: 5.20140709-gf15d2aa
|
||||||
|
build flags: Assistant Webapp Webapp-secure Pairing Testsuite S3 WebDAV Inotify DBus DesktopNotify XMPP DNS Feeds Quvi TDFA CryptoHash
|
||||||
|
key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SKEIN256E SKEIN512E SHA256 SHA1 SHA512 SHA224 SHA384 SKEIN256 SKEIN512 WORM URL
|
||||||
|
remote types: git gcrypt S3 bup directory rsync web webdav tahoe glacier ddar hook external
|
||||||
|
local repository version: unknown
|
||||||
|
supported repository version: 5
|
||||||
|
upgrade supported from repository versions: 0 1 2 4
|
||||||
|
|
||||||
|
Here is my log over the past couple weeks when I basically made no changes to the filesystem.
|
||||||
|
|
||||||
|
git log --oneline --decorate --color --graph
|
||||||
|
|
||||||
|
<pre>
|
||||||
|
* b304ad7 (HEAD, origin/synced/master, origin/master, origin/HEAD, master)
|
||||||
|
* 568cf6c merge refs/remotes/diskb/synced/master
|
||||||
|
* 5e426d0 merge refs/remotes/diskb/synced/master
|
||||||
|
* b2fa076 merge refs/remotes/diskb/synced/master
|
||||||
|
* b66a37d merge refs/remotes/diskb/synced/master
|
||||||
|
|\
|
||||||
|
* | 910cba5 merge refs/remotes/diskb/synced/master
|
||||||
|
|/
|
||||||
|
* 60736c3 merge refs/remotes/diskb/synced/master
|
||||||
|
* a957439 merge refs/remotes/diskb/synced/master
|
||||||
|
|\
|
||||||
|
* \ 5c135c0 merge refs/remotes/diskb/synced/master
|
||||||
|
|\ \
|
||||||
|
| |/
|
||||||
|
* | 52d8b66 merge refs/heads/synced/master
|
||||||
|
|\ \
|
||||||
|
* | | d77f3a2 merge refs/remotes/diskb/synced/master
|
||||||
|
| |/
|
||||||
|
|/|
|
||||||
|
* | 03bb56a merge refs/remotes/diskb/synced/master
|
||||||
|
|\ \
|
||||||
|
* \ \ bb000db merge refs/heads/synced/master
|
||||||
|
|\ \ \
|
||||||
|
| |/ /
|
||||||
|
|/| /
|
||||||
|
| |/
|
||||||
|
* | 3bc8520 merge refs/heads/synced/master
|
||||||
|
|/
|
||||||
|
* 1c3ee7e
|
||||||
|
* d3b096a merge refs/remotes/diskb/synced/master
|
||||||
|
|\
|
||||||
|
* \ 0fa0f6d merge refs/heads/synced/master
|
||||||
|
|\ \
|
||||||
|
| |/
|
||||||
|
* | 173592c merge refs/remotes/diskb/synced/master
|
||||||
|
|\ \
|
||||||
|
| |/
|
||||||
|
* | 3dd8086 merge refs/remotes/diskb/synced/master
|
||||||
|
|\ \
|
||||||
|
| |/
|
||||||
|
* | 68be2a1 merge refs/heads/synced/master
|
||||||
|
|\ \
|
||||||
|
| |/
|
||||||
|
* | bb304f4 merge refs/remotes/diskb/synced/master
|
||||||
|
|\ \
|
||||||
|
| |/
|
||||||
|
|/|
|
||||||
|
* | 1c9a2cd
|
||||||
|
* | 298b362 merge refs/heads/synced/master
|
||||||
|
|/
|
||||||
|
* 4c23257 merge refs/remotes/diskb/synced/master
|
||||||
|
|\
|
||||||
|
* | b709997 merge refs/remotes/diskb/synced/master
|
||||||
|
|/
|
||||||
|
* 215f061 merge refs/remotes/diskb/synced/master
|
||||||
|
|\
|
||||||
|
* \ e0f75b4 merge refs/heads/synced/master
|
||||||
|
</pre>
|
||||||
|
"""]]
|
|
@ -0,0 +1,8 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="http://joeyh.name/"
|
||||||
|
ip="209.250.56.2"
|
||||||
|
subject="comment 7"
|
||||||
|
date="2014-07-15T19:15:13Z"
|
||||||
|
content="""
|
||||||
|
This bug and the other one are fixed in 5.20140709. I assume that your `git log` dates from an earlier version.
|
||||||
|
"""]]
|
|
@ -0,0 +1,8 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="https://www.google.com/accounts/o8/id?id=AItOawk9nck8WX8-ADF3Fdh5vFo4Qrw1I_bJcR8"
|
||||||
|
nickname="Jon Ander"
|
||||||
|
subject="comment 8"
|
||||||
|
date="2014-07-16T13:42:16Z"
|
||||||
|
content="""
|
||||||
|
I'm still having this issue in 5.20140709
|
||||||
|
"""]]
|
|
@ -0,0 +1,10 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="http://joeyh.name/"
|
||||||
|
ip="209.250.56.2"
|
||||||
|
subject="comment 9"
|
||||||
|
date="2014-07-16T18:08:26Z"
|
||||||
|
content="""
|
||||||
|
Are you sure that you have upgraded git-annex on every machine that uses that repository? You could have one old un-upgraded one still causing commits that would of course be visible on the rest.
|
||||||
|
|
||||||
|
Also, what version exactly does `git-annex version` show?
|
||||||
|
"""]]
|
|
@ -54,3 +54,5 @@ Linux quad 3.8.0-34-generic #49~precise1-Ubuntu SMP Wed Nov 13 18:05:00 UTC 2013
|
||||||
"""]]
|
"""]]
|
||||||
|
|
||||||
> Ancient git-annex version. Doubt it affects current version. [[!tag moreinfo]] --[[Joey]]
|
> Ancient git-annex version. Doubt it affects current version. [[!tag moreinfo]] --[[Joey]]
|
||||||
|
|
||||||
|
>> Actually, this is a dup of [[runs_of_of_memory_adding_2_million_files]] so [[done]] --[[Joey]]
|
||||||
|
|
|
@ -0,0 +1,35 @@
|
||||||
|
### Please describe the problem.
|
||||||
|
|
||||||
|
[380 of 462] Compiling Assistant.WebApp.Types ( Assistant/WebApp/Types.hs, dist/build/git-annex/git-annex-tmp/Assistant/WebApp/Types.o )
|
||||||
|
|
||||||
|
Assistant/WebApp/Types.hs:157:10:
|
||||||
|
Duplicate instance declarations:
|
||||||
|
instance PathPiece Bool
|
||||||
|
-- Defined at Assistant/WebApp/Types.hs:157:10
|
||||||
|
instance PathPiece Bool
|
||||||
|
-- Defined in `path-pieces-0.1.4:Web.PathPieces'
|
||||||
|
cabal: Error: some packages failed to install:
|
||||||
|
git-annex-5.20140709 failed during the building phase. The exception was:
|
||||||
|
ExitFailure 1
|
||||||
|
|
||||||
|
|
||||||
|
### What steps will reproduce the problem?
|
||||||
|
|
||||||
|
cabal install git-annex --bindir=$HOME/bin
|
||||||
|
|
||||||
|
|
||||||
|
### What version of git-annex are you using? On what operating system?
|
||||||
|
git-annex-5.20140709, Fedora 20
|
||||||
|
|
||||||
|
|
||||||
|
### Please provide any additional information below.
|
||||||
|
|
||||||
|
[[!format sh """
|
||||||
|
# If you can, paste a complete transcript of the problem occurring here.
|
||||||
|
# If the problem is with the git-annex assistant, paste in .git/annex/daemon.log
|
||||||
|
|
||||||
|
|
||||||
|
# End of transcript or log.
|
||||||
|
"""]]
|
||||||
|
|
||||||
|
> Already fixed in git yesterday. [[done]] --[[Joey]]
|
|
@ -0,0 +1,51 @@
|
||||||
|
### Please describe the problem.
|
||||||
|
When creating a symlink in repository A, and creating a regular file under the same name in repository B, syncing B will yield the result that the symlink is lost, and both the original filename and the .variant file will point to the same annex object containing the original content from B.
|
||||||
|
|
||||||
|
Both A and B are indirect mode repos.
|
||||||
|
|
||||||
|
### What steps will reproduce the problem?
|
||||||
|
|
||||||
|
[[!format sh """
|
||||||
|
|
||||||
|
#Initial state:
|
||||||
|
|
||||||
|
repo-A$ echo file1
|
||||||
|
This is file 1.
|
||||||
|
repo-B$ echo file1
|
||||||
|
This is file 1.
|
||||||
|
|
||||||
|
#Make conflicting changes:
|
||||||
|
|
||||||
|
repo-A$ ln -s file1 file2; git add file2; git commit -m "Add file2 as symlink."
|
||||||
|
repo-B$ echo "This is file 2." > file2; git annex add file2; git commit -m "Add file2 as regular file."
|
||||||
|
|
||||||
|
#Sync it:
|
||||||
|
|
||||||
|
repo-A$ git annex sync
|
||||||
|
repo-B$ git annex sync
|
||||||
|
|
||||||
|
#Strange result in repo-B:
|
||||||
|
|
||||||
|
repo-B$ ls -l file2*
|
||||||
|
file2 -> .git/annex/objects/$HASH1
|
||||||
|
file2.variant1234 -> .git/annex/objects/$HASH1
|
||||||
|
repo-B$ cat file2 file2.variantXXXX
|
||||||
|
This is file 2.
|
||||||
|
This is file 2.
|
||||||
|
|
||||||
|
#Repo-A leaves the symlink change untouched and adds a .variant containing the new regular file data.
|
||||||
|
|
||||||
|
repo-A$ ls -l file*
|
||||||
|
file2 -> file1
|
||||||
|
file2.variant1234 -> .git/annex/objects/$HASH1
|
||||||
|
repo-A$ cat file.variant1234
|
||||||
|
This is file 2.
|
||||||
|
"""]]
|
||||||
|
### What version of git-annex are you using? On what operating system?
|
||||||
|
Linux 3.15.3
|
||||||
|
git-annex 5.20140613
|
||||||
|
|
||||||
|
|
||||||
|
[[!tag confirmed]]
|
||||||
|
|
||||||
|
> [[fixed|done]] --[[Joey]]
|
|
@ -0,0 +1,8 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="zardoz"
|
||||||
|
ip="134.147.14.84"
|
||||||
|
subject="comment 1"
|
||||||
|
date="2014-07-07T14:09:33Z"
|
||||||
|
content="""
|
||||||
|
Sorry, the initial «echos» should have been «cat» of course.
|
||||||
|
"""]]
|
|
@ -0,0 +1,8 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="http://joeyh.name/"
|
||||||
|
ip="209.250.56.55"
|
||||||
|
subject="comment 2"
|
||||||
|
date="2014-07-07T17:17:49Z"
|
||||||
|
content="""
|
||||||
|
Drat, so many bug fixes and test cases and this still got through?
|
||||||
|
"""]]
|
|
@ -0,0 +1,8 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="http://joeyh.name/"
|
||||||
|
ip="209.250.56.55"
|
||||||
|
subject="comment 3"
|
||||||
|
date="2014-07-07T17:19:03Z"
|
||||||
|
content="""
|
||||||
|
Ah, I see, it's explicitly because the non-git-annex symlink is involved. Whew!
|
||||||
|
"""]]
|
|
@ -0,0 +1,15 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="http://joeyh.name/"
|
||||||
|
ip="209.250.56.55"
|
||||||
|
subject="analysis"
|
||||||
|
date="2014-07-08T17:52:59Z"
|
||||||
|
content="""
|
||||||
|
When resolveMerge' calls graftin to add A's file2 symlink to B's tree, it actually stages the right symlink (the non-annexed one).
|
||||||
|
|
||||||
|
However, the work tree is left as-is, so it still has the annexed symlink in it. So git status shows file2 as modified. Later syncs will commit that.
|
||||||
|
|
||||||
|
This is why the sync in A doesn't have the problem, as there things are the other way around, and git-annex makes the git-annex symlink, leaving the non-annexed symlink as-is in the work tree.
|
||||||
|
|
||||||
|
So, graftin needs to update the work tree. But it's tricky because graftin is called in 3 situations: non-symlink file, directory, and non-annexed symlink.
|
||||||
|
Interestingly, in the other 2 cases, git-merge already takes care of updating the work tree -- it deletes the annexed symlink and puts in place either the non-symlink file or the directory. It's only the the case of a merge conflict involving 2 symlinks that git merge doesn't update the tree in this way. It's nice to be able to rely on git-merge in the other 2 cases, especially the directory case (avoids having to manually check out the directory).
|
||||||
|
"""]]
|
|
@ -0,0 +1,10 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="zardoz"
|
||||||
|
ip="78.49.247.112"
|
||||||
|
subject="comment 5"
|
||||||
|
date="2014-07-08T19:17:29Z"
|
||||||
|
content="""
|
||||||
|
Thanks for the swift fix + analysis! If I’m ever around, I’ll lend you a hand on your next truckload of firewood! ;>
|
||||||
|
|
||||||
|
Cheers!
|
||||||
|
"""]]
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue