Merge branch 'bs'
This commit is contained in:
commit
37db1fa5a0
230 changed files with 2045 additions and 1413 deletions
|
@ -112,8 +112,8 @@ adjustToSymlink = adjustToSymlink' gitAnnexLink
|
|||
adjustToSymlink' :: (FilePath -> Key -> Git.Repo -> GitConfig -> IO FilePath) -> TreeItem -> Annex (Maybe TreeItem)
|
||||
adjustToSymlink' gitannexlink ti@(TreeItem f _m s) = catKey s >>= \case
|
||||
Just k -> do
|
||||
absf <- inRepo $ \r -> absPath $
|
||||
fromTopFilePath f r
|
||||
absf <- inRepo $ \r -> absPath $
|
||||
fromRawFilePath $ fromTopFilePath f r
|
||||
linktarget <- calcRepo $ gitannexlink absf k
|
||||
Just . TreeItem f (fromTreeItemType TreeSymlink)
|
||||
<$> hashSymlink linktarget
|
||||
|
@ -376,7 +376,7 @@ mergeToAdjustedBranch tomerge (origbranch, adj) mergeconfig canresolvemerge comm
|
|||
-}
|
||||
changestomerge (Just updatedorig) = withOtherTmp $ \othertmpdir -> do
|
||||
tmpwt <- fromRepo gitAnnexMergeDir
|
||||
git_dir <- fromRepo Git.localGitDir
|
||||
git_dir <- fromRawFilePath <$> fromRepo Git.localGitDir
|
||||
withTmpDirIn othertmpdir "git" $ \tmpgit -> withWorkTreeRelated tmpgit $
|
||||
withemptydir tmpwt $ withWorkTree tmpwt $ do
|
||||
liftIO $ writeFile (tmpgit </> "HEAD") (fromRef updatedorig)
|
||||
|
@ -580,7 +580,7 @@ reverseAdjustedTree basis adj csha = do
|
|||
where
|
||||
m = M.fromList $ map (\i@(TreeItem f' _ _) -> (norm f', i)) $
|
||||
map diffTreeToTreeItem changes
|
||||
norm = normalise . getTopFilePath
|
||||
norm = normalise . fromRawFilePath . getTopFilePath
|
||||
|
||||
diffTreeToTreeItem :: Git.DiffTree.DiffTreeItem -> TreeItem
|
||||
diffTreeToTreeItem dti = TreeItem
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Annex.AutoMerge
|
||||
( autoMergeFrom
|
||||
, resolveMerge
|
||||
|
@ -122,7 +124,7 @@ resolveMerge us them inoverlay = do
|
|||
unless (null deleted) $
|
||||
Annex.Queue.addCommand "rm"
|
||||
[Param "--quiet", Param "-f", Param "--"]
|
||||
deleted
|
||||
(map fromRawFilePath deleted)
|
||||
void $ liftIO cleanup2
|
||||
|
||||
when merged $ do
|
||||
|
@ -169,7 +171,7 @@ resolveMerge' unstagedmap (Just us) them inoverlay u = do
|
|||
-- Neither side is annexed file; cannot resolve.
|
||||
(Nothing, Nothing) -> return ([], Nothing)
|
||||
where
|
||||
file = LsFiles.unmergedFile u
|
||||
file = fromRawFilePath $ LsFiles.unmergedFile u
|
||||
|
||||
getkey select =
|
||||
case select (LsFiles.unmergedSha u) of
|
||||
|
@ -196,30 +198,30 @@ resolveMerge' unstagedmap (Just us) them inoverlay u = do
|
|||
|
||||
stagefile :: FilePath -> Annex FilePath
|
||||
stagefile f
|
||||
| inoverlay = (</> f) <$> fromRepo Git.repoPath
|
||||
| inoverlay = (</> f) . fromRawFilePath <$> fromRepo Git.repoPath
|
||||
| otherwise = pure f
|
||||
|
||||
makesymlink key dest = do
|
||||
l <- calcRepo $ gitAnnexLink dest key
|
||||
unless inoverlay $ replacewithsymlink dest l
|
||||
dest' <- stagefile dest
|
||||
dest' <- toRawFilePath <$> stagefile dest
|
||||
stageSymlink dest' =<< hashSymlink l
|
||||
|
||||
replacewithsymlink dest link = withworktree dest $ \f ->
|
||||
replaceFile f $ makeGitLink link
|
||||
replaceFile f $ makeGitLink link . toRawFilePath
|
||||
|
||||
makepointer key dest destmode = do
|
||||
unless inoverlay $
|
||||
unlessM (reuseOldFile unstagedmap key file dest) $
|
||||
linkFromAnnex key dest destmode >>= \case
|
||||
LinkAnnexFailed -> liftIO $
|
||||
writePointerFile dest key destmode
|
||||
writePointerFile (toRawFilePath dest) key destmode
|
||||
_ -> noop
|
||||
dest' <- stagefile dest
|
||||
dest' <- toRawFilePath <$> stagefile dest
|
||||
stagePointerFile dest' destmode =<< hashPointerFile key
|
||||
unless inoverlay $
|
||||
Database.Keys.addAssociatedFile key
|
||||
=<< inRepo (toTopFilePath dest)
|
||||
=<< inRepo (toTopFilePath (toRawFilePath dest))
|
||||
|
||||
withworktree f a = a f
|
||||
|
||||
|
@ -239,7 +241,7 @@ resolveMerge' unstagedmap (Just us) them inoverlay u = do
|
|||
Nothing -> noop
|
||||
Just sha -> do
|
||||
link <- catSymLinkTarget sha
|
||||
replacewithsymlink item link
|
||||
replacewithsymlink item (fromRawFilePath link)
|
||||
-- And when grafting in anything else vs a symlink,
|
||||
-- the work tree already contains what we want.
|
||||
(_, Just TreeSymlink) -> noop
|
||||
|
@ -290,8 +292,8 @@ cleanConflictCruft resolvedks resolvedfs unstagedmap = do
|
|||
matchesresolved is i f
|
||||
| S.member f fs || S.member (conflictCruftBase f) fs = anyM id
|
||||
[ pure (S.member i is)
|
||||
, inks <$> isAnnexLink f
|
||||
, inks <$> liftIO (isPointerFile f)
|
||||
, inks <$> isAnnexLink (toRawFilePath f)
|
||||
, inks <$> liftIO (isPointerFile (toRawFilePath f))
|
||||
]
|
||||
| otherwise = return False
|
||||
|
||||
|
@ -328,13 +330,13 @@ commitResolvedMerge commitmode = inRepo $ Git.Branch.commitCommand commitmode
|
|||
|
||||
type InodeMap = M.Map InodeCacheKey FilePath
|
||||
|
||||
inodeMap :: Annex ([FilePath], IO Bool) -> Annex InodeMap
|
||||
inodeMap :: Annex ([RawFilePath], IO Bool) -> Annex InodeMap
|
||||
inodeMap getfiles = do
|
||||
(fs, cleanup) <- getfiles
|
||||
fsis <- forM fs $ \f -> do
|
||||
mi <- withTSDelta (liftIO . genInodeCache f)
|
||||
return $ case mi of
|
||||
Nothing -> Nothing
|
||||
Just i -> Just (inodeCacheToKey Strongly i, f)
|
||||
Just i -> Just (inodeCacheToKey Strongly i, fromRawFilePath f)
|
||||
void $ liftIO cleanup
|
||||
return $ M.fromList $ catMaybes fsis
|
||||
|
|
|
@ -215,7 +215,7 @@ updateTo' pairs = do
|
|||
- content is returned.
|
||||
-
|
||||
- Returns an empty string if the file doesn't exist yet. -}
|
||||
get :: FilePath -> Annex L.ByteString
|
||||
get :: RawFilePath -> Annex L.ByteString
|
||||
get file = do
|
||||
update
|
||||
getLocal file
|
||||
|
@ -224,21 +224,21 @@ get file = do
|
|||
- reflect changes in remotes.
|
||||
- (Changing the value this returns, and then merging is always the
|
||||
- same as using get, and then changing its value.) -}
|
||||
getLocal :: FilePath -> Annex L.ByteString
|
||||
getLocal :: RawFilePath -> Annex L.ByteString
|
||||
getLocal file = go =<< getJournalFileStale file
|
||||
where
|
||||
go (Just journalcontent) = return journalcontent
|
||||
go Nothing = getRef fullname file
|
||||
|
||||
{- Gets the content of a file as staged in the branch's index. -}
|
||||
getStaged :: FilePath -> Annex L.ByteString
|
||||
getStaged :: RawFilePath -> Annex L.ByteString
|
||||
getStaged = getRef indexref
|
||||
where
|
||||
-- This makes git cat-file be run with ":file",
|
||||
-- so it looks at the index.
|
||||
indexref = Ref ""
|
||||
|
||||
getHistorical :: RefDate -> FilePath -> Annex L.ByteString
|
||||
getHistorical :: RefDate -> RawFilePath -> Annex L.ByteString
|
||||
getHistorical date file =
|
||||
-- This check avoids some ugly error messages when the reflog
|
||||
-- is empty.
|
||||
|
@ -247,7 +247,7 @@ getHistorical date file =
|
|||
, getRef (Git.Ref.dateRef fullname date) file
|
||||
)
|
||||
|
||||
getRef :: Ref -> FilePath -> Annex L.ByteString
|
||||
getRef :: Ref -> RawFilePath -> Annex L.ByteString
|
||||
getRef ref file = withIndex $ catFile ref file
|
||||
|
||||
{- Applies a function to modify the content of a file.
|
||||
|
@ -255,11 +255,11 @@ getRef ref file = withIndex $ catFile ref file
|
|||
- Note that this does not cause the branch to be merged, it only
|
||||
- modifes the current content of the file on the branch.
|
||||
-}
|
||||
change :: Journalable content => FilePath -> (L.ByteString -> content) -> Annex ()
|
||||
change :: Journalable content => RawFilePath -> (L.ByteString -> content) -> Annex ()
|
||||
change file f = lockJournal $ \jl -> f <$> getLocal file >>= set jl file
|
||||
|
||||
{- Applies a function which can modify the content of a file, or not. -}
|
||||
maybeChange :: Journalable content => FilePath -> (L.ByteString -> Maybe content) -> Annex ()
|
||||
maybeChange :: Journalable content => RawFilePath -> (L.ByteString -> Maybe content) -> Annex ()
|
||||
maybeChange file f = lockJournal $ \jl -> do
|
||||
v <- getLocal file
|
||||
case f v of
|
||||
|
@ -269,7 +269,7 @@ maybeChange file f = lockJournal $ \jl -> do
|
|||
_ -> noop
|
||||
|
||||
{- Records new content of a file into the journal -}
|
||||
set :: Journalable content => JournalLocked -> FilePath -> content -> Annex ()
|
||||
set :: Journalable content => JournalLocked -> RawFilePath -> content -> Annex ()
|
||||
set = setJournalFile
|
||||
|
||||
{- Commit message used when making a commit of whatever data has changed
|
||||
|
@ -353,23 +353,23 @@ commitIndex' jl branchref message basemessage retrynum parents = do
|
|||
|
||||
{- Lists all files on the branch. including ones in the journal
|
||||
- that have not been committed yet. There may be duplicates in the list. -}
|
||||
files :: Annex [FilePath]
|
||||
files :: Annex [RawFilePath]
|
||||
files = do
|
||||
update
|
||||
-- ++ forces the content of the first list to be buffered in memory,
|
||||
-- so use getJournalledFilesStale which should be much smaller most
|
||||
-- of the time. branchFiles will stream as the list is consumed.
|
||||
(++)
|
||||
<$> getJournalledFilesStale
|
||||
<$> (map toRawFilePath <$> getJournalledFilesStale)
|
||||
<*> branchFiles
|
||||
|
||||
{- Files in the branch, not including any from journalled changes,
|
||||
- and without updating the branch. -}
|
||||
branchFiles :: Annex [FilePath]
|
||||
branchFiles :: Annex [RawFilePath]
|
||||
branchFiles = withIndex $ inRepo branchFiles'
|
||||
|
||||
branchFiles' :: Git.Repo -> IO [FilePath]
|
||||
branchFiles' = Git.Command.pipeNullSplitZombie
|
||||
branchFiles' :: Git.Repo -> IO [RawFilePath]
|
||||
branchFiles' = Git.Command.pipeNullSplitZombie'
|
||||
(lsTreeParams Git.LsTree.LsTreeRecursive fullname [Param "--name-only"])
|
||||
|
||||
{- Populates the branch's index file with the current branch contents.
|
||||
|
@ -482,7 +482,7 @@ stageJournal jl commitindex = withIndex $ withOtherTmp $ \tmpdir -> do
|
|||
sha <- Git.HashObject.hashFile h path
|
||||
hPutStrLn jlogh file
|
||||
streamer $ Git.UpdateIndex.updateIndexLine
|
||||
sha TreeFile (asTopFilePath $ fileJournal file)
|
||||
sha TreeFile (asTopFilePath $ fileJournal $ toRawFilePath file)
|
||||
genstream dir h jh jlogh streamer
|
||||
-- Clean up the staged files, as listed in the temp log file.
|
||||
-- The temp file is used to avoid needing to buffer all the
|
||||
|
@ -593,7 +593,7 @@ performTransitionsLocked jl ts neednewlocalbranch transitionedrefs = do
|
|||
if L.null content'
|
||||
then do
|
||||
Annex.Queue.addUpdateIndex
|
||||
=<< inRepo (Git.UpdateIndex.unstageFile file)
|
||||
=<< inRepo (Git.UpdateIndex.unstageFile (fromRawFilePath file))
|
||||
-- File is deleted; can't run any other
|
||||
-- transitions on it.
|
||||
return ()
|
||||
|
|
|
@ -34,7 +34,7 @@ data FileTransition
|
|||
= ChangeFile Builder
|
||||
| PreserveFile
|
||||
|
||||
type TransitionCalculator = TrustMap -> M.Map UUID RemoteConfig -> FilePath -> L.ByteString -> FileTransition
|
||||
type TransitionCalculator = TrustMap -> M.Map UUID RemoteConfig -> RawFilePath -> L.ByteString -> FileTransition
|
||||
|
||||
getTransitionCalculator :: Transition -> Maybe TransitionCalculator
|
||||
getTransitionCalculator ForgetGitHistory = Nothing
|
||||
|
|
|
@ -39,12 +39,12 @@ import Annex.Link
|
|||
import Annex.CurrentBranch
|
||||
import Types.AdjustedBranch
|
||||
|
||||
catFile :: Git.Branch -> FilePath -> Annex L.ByteString
|
||||
catFile :: Git.Branch -> RawFilePath -> Annex L.ByteString
|
||||
catFile branch file = do
|
||||
h <- catFileHandle
|
||||
liftIO $ Git.CatFile.catFile h branch file
|
||||
|
||||
catFileDetails :: Git.Branch -> FilePath -> Annex (Maybe (L.ByteString, Sha, ObjectType))
|
||||
catFileDetails :: Git.Branch -> RawFilePath -> Annex (Maybe (L.ByteString, Sha, ObjectType))
|
||||
catFileDetails branch file = do
|
||||
h <- catFileHandle
|
||||
liftIO $ Git.CatFile.catFileDetails h branch file
|
||||
|
@ -109,8 +109,8 @@ catKey ref = go =<< catObjectMetaData ref
|
|||
go _ = return Nothing
|
||||
|
||||
{- Gets a symlink target. -}
|
||||
catSymLinkTarget :: Sha -> Annex String
|
||||
catSymLinkTarget sha = fromInternalGitPath . decodeBL <$> get
|
||||
catSymLinkTarget :: Sha -> Annex RawFilePath
|
||||
catSymLinkTarget sha = fromInternalGitPath . L.toStrict <$> get
|
||||
where
|
||||
-- Avoid buffering the whole file content, which might be large.
|
||||
-- 8192 is enough if it really is a symlink.
|
||||
|
@ -137,24 +137,24 @@ catSymLinkTarget sha = fromInternalGitPath . decodeBL <$> get
|
|||
-
|
||||
- So, this gets info from the index, unless running as a daemon.
|
||||
-}
|
||||
catKeyFile :: FilePath -> Annex (Maybe Key)
|
||||
catKeyFile :: RawFilePath -> Annex (Maybe Key)
|
||||
catKeyFile f = ifM (Annex.getState Annex.daemon)
|
||||
( catKeyFileHEAD f
|
||||
, catKey $ Git.Ref.fileRef f
|
||||
)
|
||||
|
||||
catKeyFileHEAD :: FilePath -> Annex (Maybe Key)
|
||||
catKeyFileHEAD :: RawFilePath -> Annex (Maybe Key)
|
||||
catKeyFileHEAD f = catKey $ Git.Ref.fileFromRef Git.Ref.headRef f
|
||||
|
||||
{- Look in the original branch from whence an adjusted branch is based
|
||||
- to find the file. But only when the adjustment hides some files. -}
|
||||
catKeyFileHidden :: FilePath -> CurrBranch -> Annex (Maybe Key)
|
||||
catKeyFileHidden :: RawFilePath -> CurrBranch -> Annex (Maybe Key)
|
||||
catKeyFileHidden = hiddenCat catKey
|
||||
|
||||
catObjectMetaDataHidden :: FilePath -> CurrBranch -> Annex (Maybe (Integer, ObjectType))
|
||||
catObjectMetaDataHidden :: RawFilePath -> CurrBranch -> Annex (Maybe (Integer, ObjectType))
|
||||
catObjectMetaDataHidden = hiddenCat catObjectMetaData
|
||||
|
||||
hiddenCat :: (Ref -> Annex (Maybe a)) -> FilePath -> CurrBranch -> Annex (Maybe a)
|
||||
hiddenCat :: (Ref -> Annex (Maybe a)) -> RawFilePath -> CurrBranch -> Annex (Maybe a)
|
||||
hiddenCat a f (Just origbranch, Just adj)
|
||||
| adjustmentHidesFiles adj = a (Git.Ref.fileFromRef origbranch f)
|
||||
hiddenCat _ _ _ = return Nothing
|
||||
|
|
|
@ -76,7 +76,7 @@ watchChangedRefs = do
|
|||
chan <- liftIO $ newTBMChanIO 100
|
||||
|
||||
g <- gitRepo
|
||||
let refdir = Git.localGitDir g </> "refs"
|
||||
let refdir = fromRawFilePath (Git.localGitDir g) </> "refs"
|
||||
liftIO $ createDirectoryIfMissing True refdir
|
||||
|
||||
let notifyhook = Just $ notifyHook chan
|
||||
|
|
|
@ -89,17 +89,20 @@ import Annex.Content.LowLevel
|
|||
import Annex.Content.PointerFile
|
||||
import Annex.Concurrent
|
||||
import Types.WorkerPool
|
||||
import qualified Utility.RawFilePath as R
|
||||
|
||||
import qualified System.FilePath.ByteString as P
|
||||
|
||||
{- Checks if a given key's content is currently present. -}
|
||||
inAnnex :: Key -> Annex Bool
|
||||
inAnnex key = inAnnexCheck key $ liftIO . doesFileExist
|
||||
inAnnex key = inAnnexCheck key $ liftIO . R.doesPathExist
|
||||
|
||||
{- Runs an arbitrary check on a key's content. -}
|
||||
inAnnexCheck :: Key -> (FilePath -> Annex Bool) -> Annex Bool
|
||||
inAnnexCheck :: Key -> (RawFilePath -> Annex Bool) -> Annex Bool
|
||||
inAnnexCheck key check = inAnnex' id False check key
|
||||
|
||||
{- inAnnex that performs an arbitrary check of the key's content. -}
|
||||
inAnnex' :: (a -> Bool) -> a -> (FilePath -> Annex a) -> Key -> Annex a
|
||||
inAnnex' :: (a -> Bool) -> a -> (RawFilePath -> Annex a) -> Key -> Annex a
|
||||
inAnnex' isgood bad check key = withObjectLoc key $ \loc -> do
|
||||
r <- check loc
|
||||
if isgood r
|
||||
|
@ -120,12 +123,15 @@ inAnnex' isgood bad check key = withObjectLoc key $ \loc -> do
|
|||
{- Like inAnnex, checks if the object file for a key exists,
|
||||
- but there are no guarantees it has the right content. -}
|
||||
objectFileExists :: Key -> Annex Bool
|
||||
objectFileExists key = calcRepo (gitAnnexLocation key) >>= liftIO . doesFileExist
|
||||
objectFileExists key =
|
||||
calcRepo (gitAnnexLocation key)
|
||||
>>= liftIO . R.doesPathExist
|
||||
|
||||
{- A safer check; the key's content must not only be present, but
|
||||
- is not in the process of being removed. -}
|
||||
inAnnexSafe :: Key -> Annex (Maybe Bool)
|
||||
inAnnexSafe key = inAnnex' (fromMaybe True) (Just False) go key
|
||||
inAnnexSafe key =
|
||||
inAnnex' (fromMaybe True) (Just False) (go . fromRawFilePath) key
|
||||
where
|
||||
is_locked = Nothing
|
||||
is_unlocked = Just True
|
||||
|
@ -246,7 +252,7 @@ winLocker _ _ Nothing = return Nothing
|
|||
|
||||
lockContentUsing :: ContentLocker -> Key -> Annex a -> Annex a
|
||||
lockContentUsing locker key a = do
|
||||
contentfile <- calcRepo $ gitAnnexLocation key
|
||||
contentfile <- fromRawFilePath <$> calcRepo (gitAnnexLocation key)
|
||||
lockfile <- contentLockFile key
|
||||
bracket
|
||||
(lock contentfile lockfile)
|
||||
|
@ -474,11 +480,11 @@ moveAnnex key src = ifM (checkSecureHashes key)
|
|||
, return False
|
||||
)
|
||||
where
|
||||
storeobject dest = ifM (liftIO $ doesFileExist dest)
|
||||
storeobject dest = ifM (liftIO $ R.doesPathExist dest)
|
||||
( alreadyhave
|
||||
, modifyContent dest $ do
|
||||
, modifyContent dest' $ do
|
||||
freezeContent src
|
||||
liftIO $ moveFile src dest
|
||||
liftIO $ moveFile src dest'
|
||||
g <- Annex.gitRepo
|
||||
fs <- map (`fromTopFilePath` g)
|
||||
<$> Database.Keys.getAssociatedFiles key
|
||||
|
@ -486,6 +492,8 @@ moveAnnex key src = ifM (checkSecureHashes key)
|
|||
ics <- mapM (populatePointerFile (Restage True) key dest) fs
|
||||
Database.Keys.storeInodeCaches' key [dest] (catMaybes ics)
|
||||
)
|
||||
where
|
||||
dest' = fromRawFilePath dest
|
||||
alreadyhave = liftIO $ removeFile src
|
||||
|
||||
checkSecureHashes :: Key -> Annex Bool
|
||||
|
@ -505,7 +513,7 @@ data LinkAnnexResult = LinkAnnexOk | LinkAnnexFailed | LinkAnnexNoop
|
|||
linkToAnnex :: Key -> FilePath -> Maybe InodeCache -> Annex LinkAnnexResult
|
||||
linkToAnnex key src srcic = ifM (checkSecureHashes key)
|
||||
( do
|
||||
dest <- calcRepo (gitAnnexLocation key)
|
||||
dest <- fromRawFilePath <$> calcRepo (gitAnnexLocation key)
|
||||
modifyContent dest $ linkAnnex To key src srcic dest Nothing
|
||||
, return LinkAnnexFailed
|
||||
)
|
||||
|
@ -515,7 +523,7 @@ linkFromAnnex :: Key -> FilePath -> Maybe FileMode -> Annex LinkAnnexResult
|
|||
linkFromAnnex key dest destmode = do
|
||||
src <- calcRepo (gitAnnexLocation key)
|
||||
srcic <- withTSDelta (liftIO . genInodeCache src)
|
||||
linkAnnex From key src srcic dest destmode
|
||||
linkAnnex From key (fromRawFilePath src) srcic dest destmode
|
||||
|
||||
data FromTo = From | To
|
||||
|
||||
|
@ -534,7 +542,7 @@ data FromTo = From | To
|
|||
linkAnnex :: FromTo -> Key -> FilePath -> Maybe InodeCache -> FilePath -> Maybe FileMode -> Annex LinkAnnexResult
|
||||
linkAnnex _ _ _ Nothing _ _ = return LinkAnnexFailed
|
||||
linkAnnex fromto key src (Just srcic) dest destmode =
|
||||
withTSDelta (liftIO . genInodeCache dest) >>= \case
|
||||
withTSDelta (liftIO . genInodeCache dest') >>= \case
|
||||
Just destic -> do
|
||||
cs <- Database.Keys.getInodeCaches key
|
||||
if null cs
|
||||
|
@ -551,12 +559,13 @@ linkAnnex fromto key src (Just srcic) dest destmode =
|
|||
Linked -> noop
|
||||
checksrcunchanged
|
||||
where
|
||||
dest' = toRawFilePath dest
|
||||
failed = do
|
||||
Database.Keys.addInodeCaches key [srcic]
|
||||
return LinkAnnexFailed
|
||||
checksrcunchanged = withTSDelta (liftIO . genInodeCache src) >>= \case
|
||||
checksrcunchanged = withTSDelta (liftIO . genInodeCache (toRawFilePath src)) >>= \case
|
||||
Just srcic' | compareStrong srcic srcic' -> do
|
||||
destic <- withTSDelta (liftIO . genInodeCache dest)
|
||||
destic <- withTSDelta (liftIO . genInodeCache dest')
|
||||
Database.Keys.addInodeCaches key $
|
||||
catMaybes [destic, Just srcic]
|
||||
return LinkAnnexOk
|
||||
|
@ -567,7 +576,7 @@ linkAnnex fromto key src (Just srcic) dest destmode =
|
|||
{- Removes the annex object file for a key. Lowlevel. -}
|
||||
unlinkAnnex :: Key -> Annex ()
|
||||
unlinkAnnex key = do
|
||||
obj <- calcRepo $ gitAnnexLocation key
|
||||
obj <- fromRawFilePath <$> calcRepo (gitAnnexLocation key)
|
||||
modifyContent obj $ do
|
||||
secureErase obj
|
||||
liftIO $ nukeFile obj
|
||||
|
@ -616,15 +625,15 @@ prepSendAnnex key = withObjectLoc key $ \f -> do
|
|||
else pure cache
|
||||
return $ if null cache'
|
||||
then Nothing
|
||||
else Just (f, sameInodeCache f cache')
|
||||
else Just (fromRawFilePath f, sameInodeCache f cache')
|
||||
|
||||
{- Performs an action, passing it the location to use for a key's content. -}
|
||||
withObjectLoc :: Key -> (FilePath -> Annex a) -> Annex a
|
||||
withObjectLoc :: Key -> (RawFilePath -> Annex a) -> Annex a
|
||||
withObjectLoc key a = a =<< calcRepo (gitAnnexLocation key)
|
||||
|
||||
cleanObjectLoc :: Key -> Annex () -> Annex ()
|
||||
cleanObjectLoc key cleaner = do
|
||||
file <- calcRepo $ gitAnnexLocation key
|
||||
file <- fromRawFilePath <$> calcRepo (gitAnnexLocation key)
|
||||
void $ tryIO $ thawContentDir file
|
||||
cleaner
|
||||
liftIO $ removeparents file (3 :: Int)
|
||||
|
@ -640,8 +649,9 @@ cleanObjectLoc key cleaner = do
|
|||
removeAnnex :: ContentRemovalLock -> Annex ()
|
||||
removeAnnex (ContentRemovalLock key) = withObjectLoc key $ \file ->
|
||||
cleanObjectLoc key $ do
|
||||
secureErase file
|
||||
liftIO $ nukeFile file
|
||||
let file' = fromRawFilePath file
|
||||
secureErase file'
|
||||
liftIO $ nukeFile file'
|
||||
g <- Annex.gitRepo
|
||||
mapM_ (\f -> void $ tryIO $ resetpointer $ fromTopFilePath f g)
|
||||
=<< Database.Keys.getAssociatedFiles key
|
||||
|
@ -655,7 +665,7 @@ removeAnnex (ContentRemovalLock key) = withObjectLoc key $ \file ->
|
|||
-- If it was a hard link to the annex object,
|
||||
-- that object might have been frozen as part of the
|
||||
-- removal process, so thaw it.
|
||||
, void $ tryIO $ thawContent file
|
||||
, void $ tryIO $ thawContent $ fromRawFilePath file
|
||||
)
|
||||
|
||||
{- Check if a file contains the unmodified content of the key.
|
||||
|
@ -663,12 +673,12 @@ removeAnnex (ContentRemovalLock key) = withObjectLoc key $ \file ->
|
|||
- The expensive way to tell is to do a verification of its content.
|
||||
- The cheaper way is to see if the InodeCache for the key matches the
|
||||
- file. -}
|
||||
isUnmodified :: Key -> FilePath -> Annex Bool
|
||||
isUnmodified :: Key -> RawFilePath -> Annex Bool
|
||||
isUnmodified key f = go =<< geti
|
||||
where
|
||||
go Nothing = return False
|
||||
go (Just fc) = isUnmodifiedCheap' key fc <||> expensivecheck fc
|
||||
expensivecheck fc = ifM (verifyKeyContent RetrievalAllKeysSecure AlwaysVerify UnVerified key f)
|
||||
expensivecheck fc = ifM (verifyKeyContent RetrievalAllKeysSecure AlwaysVerify UnVerified key (fromRawFilePath f))
|
||||
( do
|
||||
-- The file could have been modified while it was
|
||||
-- being verified. Detect that.
|
||||
|
@ -691,7 +701,7 @@ isUnmodified key f = go =<< geti
|
|||
- this may report a false positive when repeated edits are made to a file
|
||||
- within a small time window (eg 1 second).
|
||||
-}
|
||||
isUnmodifiedCheap :: Key -> FilePath -> Annex Bool
|
||||
isUnmodifiedCheap :: Key -> RawFilePath -> Annex Bool
|
||||
isUnmodifiedCheap key f = maybe (return False) (isUnmodifiedCheap' key)
|
||||
=<< withTSDelta (liftIO . genInodeCache f)
|
||||
|
||||
|
@ -703,7 +713,7 @@ isUnmodifiedCheap' key fc =
|
|||
- returns the file it was moved to. -}
|
||||
moveBad :: Key -> Annex FilePath
|
||||
moveBad key = do
|
||||
src <- calcRepo $ gitAnnexLocation key
|
||||
src <- fromRawFilePath <$> calcRepo (gitAnnexLocation key)
|
||||
bad <- fromRepo gitAnnexBadDir
|
||||
let dest = bad </> takeFileName src
|
||||
createAnnexDirectory (parentDir dest)
|
||||
|
@ -734,7 +744,7 @@ listKeys keyloc = do
|
|||
if depth < 2
|
||||
then do
|
||||
contents' <- filterM (present s) contents
|
||||
let keys = mapMaybe (fileKey . takeFileName) contents'
|
||||
let keys = mapMaybe (fileKey . P.takeFileName . toRawFilePath) contents'
|
||||
continue keys []
|
||||
else do
|
||||
let deeper = walk s (depth - 1)
|
||||
|
@ -791,7 +801,7 @@ preseedTmp key file = go =<< inAnnex key
|
|||
copy = ifM (liftIO $ doesFileExist file)
|
||||
( return True
|
||||
, do
|
||||
s <- calcRepo $ gitAnnexLocation key
|
||||
s <- fromRawFilePath <$> (calcRepo $ gitAnnexLocation key)
|
||||
liftIO $ ifM (doesFileExist s)
|
||||
( copyFileExternal CopyTimeStamps s file
|
||||
, return False
|
||||
|
@ -808,7 +818,7 @@ dirKeys dirspec = do
|
|||
contents <- liftIO $ getDirectoryContents dir
|
||||
files <- liftIO $ filterM doesFileExist $
|
||||
map (dir </>) contents
|
||||
return $ mapMaybe (fileKey . takeFileName) files
|
||||
return $ mapMaybe (fileKey . P.takeFileName . toRawFilePath) files
|
||||
, return []
|
||||
)
|
||||
|
||||
|
@ -827,7 +837,8 @@ staleKeysPrune dirspec nottransferred = do
|
|||
|
||||
dir <- fromRepo dirspec
|
||||
forM_ dups $ \k ->
|
||||
pruneTmpWorkDirBefore (dir </> keyFile k) (liftIO . removeFile)
|
||||
pruneTmpWorkDirBefore (dir </> fromRawFilePath (keyFile k))
|
||||
(liftIO . removeFile)
|
||||
|
||||
if nottransferred
|
||||
then do
|
||||
|
|
|
@ -128,7 +128,7 @@ checkDiskSpace' need destdir key alreadythere samefilesystem = ifM (Annex.getSta
|
|||
_ -> return True
|
||||
)
|
||||
where
|
||||
dir = maybe (fromRepo gitAnnexDir) return destdir
|
||||
dir = maybe (fromRawFilePath <$> fromRepo gitAnnexDir) return destdir
|
||||
|
||||
needMoreDiskSpace :: Integer -> String
|
||||
needMoreDiskSpace n = "not enough free space, need " ++
|
||||
|
|
|
@ -30,17 +30,19 @@ import Utility.Touch
|
|||
-
|
||||
- Returns an InodeCache if it populated the pointer file.
|
||||
-}
|
||||
populatePointerFile :: Restage -> Key -> FilePath -> FilePath -> Annex (Maybe InodeCache)
|
||||
populatePointerFile :: Restage -> Key -> RawFilePath -> RawFilePath -> Annex (Maybe InodeCache)
|
||||
populatePointerFile restage k obj f = go =<< liftIO (isPointerFile f)
|
||||
where
|
||||
go (Just k') | k == k' = do
|
||||
destmode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus f
|
||||
liftIO $ nukeFile f
|
||||
(ic, populated) <- replaceFile f $ \tmp -> do
|
||||
ok <- linkOrCopy k obj tmp destmode >>= \case
|
||||
let f' = fromRawFilePath f
|
||||
destmode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus f'
|
||||
liftIO $ nukeFile f'
|
||||
(ic, populated) <- replaceFile f' $ \tmp -> do
|
||||
let tmp' = toRawFilePath tmp
|
||||
ok <- linkOrCopy k (fromRawFilePath obj) tmp destmode >>= \case
|
||||
Just _ -> thawContent tmp >> return True
|
||||
Nothing -> liftIO (writePointerFile tmp k destmode) >> return False
|
||||
ic <- withTSDelta (liftIO . genInodeCache tmp)
|
||||
Nothing -> liftIO (writePointerFile tmp' k destmode) >> return False
|
||||
ic <- withTSDelta (liftIO . genInodeCache tmp')
|
||||
return (ic, ok)
|
||||
maybe noop (restagePointerFile restage f) ic
|
||||
if populated
|
||||
|
@ -51,14 +53,15 @@ populatePointerFile restage k obj f = go =<< liftIO (isPointerFile f)
|
|||
{- Removes the content from a pointer file, replacing it with a pointer.
|
||||
-
|
||||
- Does not check if the pointer file is modified. -}
|
||||
depopulatePointerFile :: Key -> FilePath -> Annex ()
|
||||
depopulatePointerFile :: Key -> RawFilePath -> Annex ()
|
||||
depopulatePointerFile key file = do
|
||||
st <- liftIO $ catchMaybeIO $ getFileStatus file
|
||||
let file' = fromRawFilePath file
|
||||
st <- liftIO $ catchMaybeIO $ getFileStatus file'
|
||||
let mode = fmap fileMode st
|
||||
secureErase file
|
||||
liftIO $ nukeFile file
|
||||
ic <- replaceFile file $ \tmp -> do
|
||||
liftIO $ writePointerFile tmp key mode
|
||||
secureErase file'
|
||||
liftIO $ nukeFile file'
|
||||
ic <- replaceFile file' $ \tmp -> do
|
||||
liftIO $ writePointerFile (toRawFilePath tmp) key mode
|
||||
#if ! defined(mingw32_HOST_OS)
|
||||
-- Don't advance mtime; this avoids unncessary re-smudging
|
||||
-- by git in some cases.
|
||||
|
@ -66,5 +69,5 @@ depopulatePointerFile key file = do
|
|||
(\t -> touch tmp t False)
|
||||
(fmap modificationTimeHiRes st)
|
||||
#endif
|
||||
withTSDelta (liftIO . genInodeCache tmp)
|
||||
withTSDelta (liftIO . genInodeCache (toRawFilePath tmp))
|
||||
maybe noop (restagePointerFile (Restage True) file) ic
|
||||
|
|
|
@ -54,5 +54,5 @@ setDifferences = do
|
|||
else return ds
|
||||
)
|
||||
forM_ (listDifferences ds') $ \d ->
|
||||
setConfig (ConfigKey $ differenceConfigKey d) (differenceConfigVal d)
|
||||
setConfig (differenceConfigKey d) (differenceConfigVal d)
|
||||
recordDifferences ds' u
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{- git-annex file locations
|
||||
-
|
||||
- Copyright 2010-2017 Joey Hess <id@joeyh.name>
|
||||
- Copyright 2010-2019 Joey Hess <id@joeyh.name>
|
||||
-
|
||||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
@ -19,7 +19,10 @@ module Annex.DirHashes (
|
|||
|
||||
import Data.Default
|
||||
import Data.Bits
|
||||
import qualified Data.ByteArray
|
||||
import qualified Data.ByteArray as BA
|
||||
import qualified Data.ByteArray.Encoding as BA
|
||||
import qualified Data.ByteString as S
|
||||
import qualified System.FilePath.ByteString as P
|
||||
|
||||
import Common
|
||||
import Key
|
||||
|
@ -28,7 +31,7 @@ import Types.Difference
|
|||
import Utility.Hash
|
||||
import Utility.MD5
|
||||
|
||||
type Hasher = Key -> FilePath
|
||||
type Hasher = Key -> RawFilePath
|
||||
|
||||
-- Number of hash levels to use. 2 is the default.
|
||||
newtype HashLevels = HashLevels Int
|
||||
|
@ -47,7 +50,7 @@ configHashLevels d config
|
|||
| hasDifference d (annexDifferences config) = HashLevels 1
|
||||
| otherwise = def
|
||||
|
||||
branchHashDir :: GitConfig -> Key -> String
|
||||
branchHashDir :: GitConfig -> Key -> S.ByteString
|
||||
branchHashDir = hashDirLower . branchHashLevels
|
||||
|
||||
{- Two different directory hashes may be used. The mixed case hash
|
||||
|
@ -60,19 +63,26 @@ branchHashDir = hashDirLower . branchHashLevels
|
|||
dirHashes :: [HashLevels -> Hasher]
|
||||
dirHashes = [hashDirLower, hashDirMixed]
|
||||
|
||||
hashDirs :: HashLevels -> Int -> String -> FilePath
|
||||
hashDirs (HashLevels 1) sz s = addTrailingPathSeparator $ take sz s
|
||||
hashDirs _ sz s = addTrailingPathSeparator $ take sz s </> drop sz s
|
||||
hashDirs :: HashLevels -> Int -> S.ByteString -> RawFilePath
|
||||
hashDirs (HashLevels 1) sz s = P.addTrailingPathSeparator $ S.take sz s
|
||||
hashDirs _ sz s = P.addTrailingPathSeparator $ h P.</> t
|
||||
where
|
||||
(h, t) = S.splitAt sz s
|
||||
|
||||
hashDirLower :: HashLevels -> Hasher
|
||||
hashDirLower n k = hashDirs n 3 $ take 6 $ show $ md5s $ serializeKey' $ nonChunkKey k
|
||||
hashDirLower n k = hashDirs n 3 $ S.pack $ take 6 $ conv $
|
||||
md5s $ serializeKey' $ nonChunkKey k
|
||||
where
|
||||
conv v = BA.unpack $
|
||||
(BA.convertToBase BA.Base16 v :: BA.Bytes)
|
||||
|
||||
{- This was originally using Data.Hash.MD5 from MissingH. This new version
|
||||
- is faster, but ugly as it has to replicate the 4 Word32's that produced. -}
|
||||
hashDirMixed :: HashLevels -> Hasher
|
||||
hashDirMixed n k = hashDirs n 2 $ take 4 $ concatMap display_32bits_as_dir $
|
||||
encodeWord32 $ map fromIntegral $ Data.ByteArray.unpack $
|
||||
Utility.Hash.md5s $ serializeKey' $ nonChunkKey k
|
||||
hashDirMixed n k = hashDirs n 2 $ S.pack $ take 4 $
|
||||
concatMap display_32bits_as_dir $
|
||||
encodeWord32 $ map fromIntegral $ BA.unpack $
|
||||
Utility.Hash.md5s $ serializeKey' $ nonChunkKey k
|
||||
where
|
||||
encodeWord32 (b1:b2:b3:b4:rest) =
|
||||
(shiftL b4 24 .|. shiftL b3 16 .|. shiftL b2 8 .|. b1)
|
||||
|
|
|
@ -49,7 +49,8 @@ type Reason = String
|
|||
handleDropsFrom :: [UUID] -> [Remote] -> Reason -> Bool -> Key -> AssociatedFile -> [VerifiedCopy] -> (CommandStart -> CommandCleanup) -> Annex ()
|
||||
handleDropsFrom locs rs reason fromhere key afile preverified runner = do
|
||||
g <- Annex.gitRepo
|
||||
l <- map (`fromTopFilePath` g) <$> Database.Keys.getAssociatedFiles key
|
||||
l <- map (`fromTopFilePath` g)
|
||||
<$> Database.Keys.getAssociatedFiles key
|
||||
let fs = case afile of
|
||||
AssociatedFile (Just f) -> nub (f : l)
|
||||
AssociatedFile Nothing -> l
|
||||
|
@ -62,7 +63,7 @@ handleDropsFrom locs rs reason fromhere key afile preverified runner = do
|
|||
(untrusted, have) <- trustPartition UnTrusted locs
|
||||
numcopies <- if null fs
|
||||
then getNumCopies
|
||||
else maximum <$> mapM getFileNumCopies fs
|
||||
else maximum <$> mapM (getFileNumCopies . fromRawFilePath) fs
|
||||
return (NumCopies (length have), numcopies, S.fromList untrusted)
|
||||
|
||||
{- Check that we have enough copies still to drop the content.
|
||||
|
@ -107,7 +108,7 @@ handleDropsFrom locs rs reason fromhere key afile preverified runner = do
|
|||
[ "dropped"
|
||||
, case afile of
|
||||
AssociatedFile Nothing -> serializeKey key
|
||||
AssociatedFile (Just af) -> af
|
||||
AssociatedFile (Just af) -> fromRawFilePath af
|
||||
, "(from " ++ maybe "here" show u ++ ")"
|
||||
, "(copies now " ++ show (fromNumCopies have - 1) ++ ")"
|
||||
, ": " ++ reason
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Annex.Environment where
|
||||
|
||||
import Annex.Common
|
||||
|
@ -45,6 +47,6 @@ ensureCommit a = either retry return =<< tryNonAsync a
|
|||
where
|
||||
retry _ = do
|
||||
name <- liftIO $ either (const "unknown") id <$> myUserName
|
||||
setConfig (ConfigKey "user.name") name
|
||||
setConfig (ConfigKey "user.email") name
|
||||
setConfig "user.name" name
|
||||
setConfig "user.email" name
|
||||
a
|
||||
|
|
|
@ -54,7 +54,7 @@ checkFileMatcher' getmatcher file notconfigured = do
|
|||
matcher <- getmatcher file
|
||||
checkMatcher matcher Nothing afile S.empty notconfigured d
|
||||
where
|
||||
afile = AssociatedFile (Just file)
|
||||
afile = AssociatedFile (Just (toRawFilePath file))
|
||||
-- checkMatcher will never use this, because afile is provided.
|
||||
d = return True
|
||||
|
||||
|
@ -72,7 +72,7 @@ checkMatcher' :: FileMatcher Annex -> MatchInfo -> AssumeNotPresent -> Annex Boo
|
|||
checkMatcher' matcher mi notpresent =
|
||||
matchMrun matcher $ \a -> a notpresent mi
|
||||
|
||||
fileMatchInfo :: FilePath -> Annex MatchInfo
|
||||
fileMatchInfo :: RawFilePath -> Annex MatchInfo
|
||||
fileMatchInfo file = do
|
||||
matchfile <- getTopFilePath <$> inRepo (toTopFilePath file)
|
||||
return $ MatchingFile FileInfo
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Annex.Fixup where
|
||||
|
||||
import Git.Types
|
||||
|
@ -17,6 +19,7 @@ import Utility.SafeCommand
|
|||
import Utility.Directory
|
||||
import Utility.Exception
|
||||
import Utility.Monad
|
||||
import Utility.FileSystemEncoding
|
||||
import Utility.PartialPrelude
|
||||
|
||||
import System.IO
|
||||
|
@ -27,6 +30,8 @@ import Data.Maybe
|
|||
import Control.Monad
|
||||
import Control.Monad.IfElse
|
||||
import qualified Data.Map as M
|
||||
import qualified System.FilePath.ByteString as P
|
||||
import qualified Data.ByteString as S
|
||||
import Control.Applicative
|
||||
import Prelude
|
||||
|
||||
|
@ -50,10 +55,10 @@ disableWildcardExpansion r = r
|
|||
fixupDirect :: Repo -> Repo
|
||||
fixupDirect r@(Repo { location = l@(Local { gitdir = d, worktree = Nothing }) }) = do
|
||||
r
|
||||
{ location = l { worktree = Just (parentDir d) }
|
||||
{ location = l { worktree = Just (toRawFilePath (parentDir (fromRawFilePath d))) }
|
||||
, gitGlobalOpts = gitGlobalOpts r ++
|
||||
[ Param "-c"
|
||||
, Param $ coreBare ++ "=" ++ boolConfig False
|
||||
, Param $ fromConfigKey coreBare ++ "=" ++ boolConfig False
|
||||
]
|
||||
}
|
||||
fixupDirect r = r
|
||||
|
@ -108,12 +113,13 @@ fixupUnusualRepos r@(Repo { location = l@(Local { worktree = Just w, gitdir = d
|
|||
, return r
|
||||
)
|
||||
where
|
||||
dotgit = w </> ".git"
|
||||
dotgit = w P.</> ".git"
|
||||
dotgit' = fromRawFilePath dotgit
|
||||
|
||||
replacedotgit = whenM (doesFileExist dotgit) $ do
|
||||
linktarget <- relPathDirToFile w d
|
||||
nukeFile dotgit
|
||||
createSymbolicLink linktarget dotgit
|
||||
replacedotgit = whenM (doesFileExist dotgit') $ do
|
||||
linktarget <- relPathDirToFile (fromRawFilePath w) (fromRawFilePath d)
|
||||
nukeFile dotgit'
|
||||
createSymbolicLink linktarget dotgit'
|
||||
|
||||
unsetcoreworktree =
|
||||
maybe (error "unset core.worktree failed") (\_ -> return ())
|
||||
|
@ -123,13 +129,13 @@ fixupUnusualRepos r@(Repo { location = l@(Local { worktree = Just w, gitdir = d
|
|||
-- git-worktree sets up a "commondir" file that contains
|
||||
-- the path to the main git directory.
|
||||
-- Using --separate-git-dir does not.
|
||||
catchDefaultIO Nothing (headMaybe . lines <$> readFile (d </> "commondir")) >>= \case
|
||||
catchDefaultIO Nothing (headMaybe . lines <$> readFile (fromRawFilePath (d P.</> "commondir"))) >>= \case
|
||||
Just gd -> do
|
||||
-- Make the worktree's git directory
|
||||
-- contain an annex symlink to the main
|
||||
-- repository's annex directory.
|
||||
let linktarget = gd </> "annex"
|
||||
createSymbolicLink linktarget (dotgit </> "annex")
|
||||
createSymbolicLink linktarget (dotgit' </> "annex")
|
||||
Nothing -> return ()
|
||||
|
||||
-- Repo adjusted, so that symlinks to objects that get checked
|
||||
|
@ -139,12 +145,12 @@ fixupUnusualRepos r@(Repo { location = l@(Local { worktree = Just w, gitdir = d
|
|||
| coreSymlinks c = r { location = l { gitdir = dotgit } }
|
||||
| otherwise = r
|
||||
|
||||
notnoannex = isNothing <$> noAnnexFileContent (Git.repoWorkTree r)
|
||||
notnoannex = isNothing <$> noAnnexFileContent (fmap fromRawFilePath (Git.repoWorkTree r))
|
||||
fixupUnusualRepos r _ = return r
|
||||
|
||||
needsSubmoduleFixup :: Repo -> Bool
|
||||
needsSubmoduleFixup (Repo { location = (Local { worktree = Just _, gitdir = d }) }) =
|
||||
(".git" </> "modules") `isInfixOf` d
|
||||
(".git" P.</> "modules") `S.isInfixOf` d
|
||||
needsSubmoduleFixup _ = False
|
||||
|
||||
needsGitLinkFixup :: Repo -> IO Bool
|
||||
|
@ -152,6 +158,6 @@ needsGitLinkFixup (Repo { location = (Local { worktree = Just wt, gitdir = d })
|
|||
-- Optimization: Avoid statting .git in the common case; only
|
||||
-- when the gitdir is not in the usual place inside the worktree
|
||||
-- might .git be a file.
|
||||
| wt </> ".git" == d = return False
|
||||
| otherwise = doesFileExist (wt </> ".git")
|
||||
| wt P.</> ".git" == d = return False
|
||||
| otherwise = doesFileExist (fromRawFilePath (wt P.</> ".git"))
|
||||
needsGitLinkFixup _ = return False
|
||||
|
|
|
@ -54,7 +54,7 @@ withWorkTree d = withAltRepo
|
|||
(\g -> return $ g { location = modlocation (location g), gitGlobalOpts = gitGlobalOpts g ++ disableSmudgeConfig })
|
||||
(\g g' -> g' { location = location g, gitGlobalOpts = gitGlobalOpts g })
|
||||
where
|
||||
modlocation l@(Local {}) = l { worktree = Just d }
|
||||
modlocation l@(Local {}) = l { worktree = Just (toRawFilePath d) }
|
||||
modlocation _ = error "withWorkTree of non-local git repo"
|
||||
disableSmudgeConfig = map Param
|
||||
[ "-c", "filter.annex.smudge="
|
||||
|
@ -73,7 +73,8 @@ withWorkTreeRelated :: FilePath -> Annex a -> Annex a
|
|||
withWorkTreeRelated d = withAltRepo modrepo unmodrepo
|
||||
where
|
||||
modrepo g = liftIO $ do
|
||||
g' <- addGitEnv g "GIT_COMMON_DIR" =<< absPath (localGitDir g)
|
||||
g' <- addGitEnv g "GIT_COMMON_DIR"
|
||||
=<< absPath (fromRawFilePath (localGitDir g))
|
||||
g'' <- addGitEnv g' "GIT_DIR" d
|
||||
return (g'' { gitEnvOverridesGitDir = True })
|
||||
unmodrepo g g' = g'
|
||||
|
|
|
@ -57,6 +57,7 @@ import Control.Concurrent.STM
|
|||
import qualified Data.Map.Strict as M
|
||||
import qualified Data.Set as S
|
||||
import qualified System.FilePath.Posix as Posix
|
||||
import qualified System.FilePath.ByteString as P
|
||||
|
||||
{- Configures how to build an import tree. -}
|
||||
data ImportTreeConfig
|
||||
|
@ -123,7 +124,7 @@ buildImportCommit remote importtreeconfig importcommitconfig importable =
|
|||
Nothing -> pure committedtree
|
||||
Just dir ->
|
||||
let subtreeref = Ref $
|
||||
fromRef committedtree ++ ":" ++ getTopFilePath dir
|
||||
fromRef committedtree ++ ":" ++ fromRawFilePath (getTopFilePath dir)
|
||||
in fromMaybe emptyTree
|
||||
<$> inRepo (Git.Ref.tree subtreeref)
|
||||
updateexportdb importedtree
|
||||
|
@ -267,9 +268,9 @@ buildImportTrees basetree msubdir importable = History
|
|||
let lf = fromImportLocation loc
|
||||
let treepath = asTopFilePath lf
|
||||
let topf = asTopFilePath $
|
||||
maybe lf (\sd -> getTopFilePath sd </> lf) msubdir
|
||||
maybe lf (\sd -> getTopFilePath sd P.</> lf) msubdir
|
||||
relf <- fromRepo $ fromTopFilePath topf
|
||||
symlink <- calcRepo $ gitAnnexLink relf k
|
||||
symlink <- calcRepo $ gitAnnexLink (fromRawFilePath relf) k
|
||||
linksha <- hashSymlink symlink
|
||||
return $ TreeItem treepath (fromTreeItemType TreeSymlink) linksha
|
||||
|
||||
|
@ -327,7 +328,7 @@ downloadImport remote importtreeconfig importablecontents = do
|
|||
(k:_) -> return $ Left $ Just (loc, k)
|
||||
[] -> do
|
||||
job <- liftIO $ newEmptyTMVarIO
|
||||
let ai = ActionItemOther (Just (fromImportLocation loc))
|
||||
let ai = ActionItemOther (Just (fromRawFilePath (fromImportLocation loc)))
|
||||
let downloadaction = starting ("import " ++ Remote.name remote) ai $ do
|
||||
when oldversion $
|
||||
showNote "old version"
|
||||
|
@ -368,9 +369,9 @@ downloadImport remote importtreeconfig importablecontents = do
|
|||
|
||||
mkkey loc tmpfile = do
|
||||
f <- fromRepo $ fromTopFilePath $ locworktreefilename loc
|
||||
backend <- chooseBackend f
|
||||
backend <- chooseBackend (fromRawFilePath f)
|
||||
let ks = KeySource
|
||||
{ keyFilename = f
|
||||
{ keyFilename = (fromRawFilePath f)
|
||||
, contentLocation = tmpfile
|
||||
, inodeCache = Nothing
|
||||
}
|
||||
|
@ -379,7 +380,7 @@ downloadImport remote importtreeconfig importablecontents = do
|
|||
locworktreefilename loc = asTopFilePath $ case importtreeconfig of
|
||||
ImportTree -> fromImportLocation loc
|
||||
ImportSubTree subdir _ ->
|
||||
getTopFilePath subdir </> fromImportLocation loc
|
||||
getTopFilePath subdir P.</> fromImportLocation loc
|
||||
|
||||
getcidkey cidmap db cid = liftIO $
|
||||
CIDDb.getContentIdentifierKeys db rs cid >>= \case
|
||||
|
@ -450,7 +451,7 @@ wantImport :: FileMatcher Annex -> ImportLocation -> ByteSize -> Annex Bool
|
|||
wantImport matcher loc sz = checkMatcher' matcher mi mempty
|
||||
where
|
||||
mi = MatchingInfo $ ProvidedInfo
|
||||
{ providedFilePath = Right $ fromImportLocation loc
|
||||
{ providedFilePath = Right $ fromRawFilePath $ fromImportLocation loc
|
||||
, providedKey = unavail "key"
|
||||
, providedFileSize = Right sz
|
||||
, providedMimeType = unavail "mime"
|
||||
|
@ -503,4 +504,4 @@ listImportableContents r = fmap removegitspecial
|
|||
, importableHistory =
|
||||
map removegitspecial (importableHistory ic)
|
||||
}
|
||||
gitspecial l = ".git" `elem` Posix.splitDirectories (fromImportLocation l)
|
||||
gitspecial l = ".git" `elem` Posix.splitDirectories (fromRawFilePath (fromImportLocation l))
|
||||
|
|
|
@ -92,7 +92,7 @@ lockDown' cfg file = tryIO $ ifM crippledFileSystem
|
|||
nohardlink = withTSDelta $ liftIO . nohardlink'
|
||||
|
||||
nohardlink' delta = do
|
||||
cache <- genInodeCache file delta
|
||||
cache <- genInodeCache (toRawFilePath file) delta
|
||||
return $ LockedDown cfg $ KeySource
|
||||
{ keyFilename = file
|
||||
, contentLocation = file
|
||||
|
@ -112,7 +112,7 @@ lockDown' cfg file = tryIO $ ifM crippledFileSystem
|
|||
|
||||
withhardlink' delta tmpfile = do
|
||||
createLink file tmpfile
|
||||
cache <- genInodeCache tmpfile delta
|
||||
cache <- genInodeCache (toRawFilePath tmpfile) delta
|
||||
return $ LockedDown cfg $ KeySource
|
||||
{ keyFilename = file
|
||||
, contentLocation = tmpfile
|
||||
|
@ -136,7 +136,7 @@ ingestAdd' meterupdate ld@(Just (LockedDown cfg source)) mk = do
|
|||
then addLink f k mic
|
||||
else do
|
||||
mode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus (contentLocation source)
|
||||
stagePointerFile f mode =<< hashPointerFile k
|
||||
stagePointerFile (toRawFilePath f) mode =<< hashPointerFile k
|
||||
return (Just k)
|
||||
|
||||
{- Ingests a locked down file into the annex. Does not update the working
|
||||
|
@ -187,7 +187,7 @@ ingest' preferredbackend meterupdate (Just (LockedDown cfg source)) mk restage =
|
|||
gounlocked _ _ _ = failure "failed statting file"
|
||||
|
||||
success k mcache s = do
|
||||
genMetaData k (keyFilename source) s
|
||||
genMetaData k (toRawFilePath (keyFilename source)) s
|
||||
return (Just k, mcache)
|
||||
|
||||
failure msg = do
|
||||
|
@ -202,7 +202,8 @@ finishIngestUnlocked key source = do
|
|||
|
||||
finishIngestUnlocked' :: Key -> KeySource -> Restage -> Annex ()
|
||||
finishIngestUnlocked' key source restage = do
|
||||
Database.Keys.addAssociatedFile key =<< inRepo (toTopFilePath (keyFilename source))
|
||||
Database.Keys.addAssociatedFile key
|
||||
=<< inRepo (toTopFilePath (toRawFilePath (keyFilename source)))
|
||||
populateAssociatedFiles key source restage
|
||||
|
||||
{- Copy to any other locations using the same key. -}
|
||||
|
@ -211,7 +212,7 @@ populateAssociatedFiles key source restage = do
|
|||
obj <- calcRepo (gitAnnexLocation key)
|
||||
g <- Annex.gitRepo
|
||||
ingestedf <- flip fromTopFilePath g
|
||||
<$> inRepo (toTopFilePath (keyFilename source))
|
||||
<$> inRepo (toTopFilePath (toRawFilePath (keyFilename source)))
|
||||
afs <- map (`fromTopFilePath` g) <$> Database.Keys.getAssociatedFiles key
|
||||
forM_ (filter (/= ingestedf) afs) $
|
||||
populatePointerFile restage key obj
|
||||
|
@ -226,8 +227,8 @@ cleanCruft source = when (contentLocation source /= keyFilename source) $
|
|||
cleanOldKeys :: FilePath -> Key -> Annex ()
|
||||
cleanOldKeys file newkey = do
|
||||
g <- Annex.gitRepo
|
||||
ingestedf <- flip fromTopFilePath g <$> inRepo (toTopFilePath file)
|
||||
topf <- inRepo (toTopFilePath file)
|
||||
topf <- inRepo (toTopFilePath (toRawFilePath file))
|
||||
ingestedf <- fromRepo $ fromTopFilePath topf
|
||||
oldkeys <- filter (/= newkey)
|
||||
<$> Database.Keys.getAssociatedKey topf
|
||||
forM_ oldkeys $ \key ->
|
||||
|
@ -243,7 +244,7 @@ cleanOldKeys file newkey = do
|
|||
-- so no need for any recovery.
|
||||
(f:_) -> do
|
||||
ic <- withTSDelta (liftIO . genInodeCache f)
|
||||
void $ linkToAnnex key f ic
|
||||
void $ linkToAnnex key (fromRawFilePath f) ic
|
||||
_ -> logStatus key InfoMissing
|
||||
|
||||
{- On error, put the file back so it doesn't seem to have vanished.
|
||||
|
@ -254,7 +255,7 @@ restoreFile file key e = do
|
|||
liftIO $ nukeFile file
|
||||
-- The key could be used by other files too, so leave the
|
||||
-- content in the annex, and make a copy back to the file.
|
||||
obj <- calcRepo $ gitAnnexLocation key
|
||||
obj <- fromRawFilePath <$> calcRepo (gitAnnexLocation key)
|
||||
unlessM (liftIO $ copyFileExternal CopyTimeStamps obj file) $
|
||||
warning $ "Unable to restore content of " ++ file ++ "; it should be located in " ++ obj
|
||||
thawContent file
|
||||
|
@ -264,7 +265,7 @@ restoreFile file key e = do
|
|||
makeLink :: FilePath -> Key -> Maybe InodeCache -> Annex String
|
||||
makeLink file key mcache = flip catchNonAsync (restoreFile file key) $ do
|
||||
l <- calcRepo $ gitAnnexLink file key
|
||||
replaceFile file $ makeAnnexLink l
|
||||
replaceFile file $ makeAnnexLink l . toRawFilePath
|
||||
|
||||
-- touch symlink to have same time as the original file,
|
||||
-- as provided in the InodeCache
|
||||
|
@ -291,7 +292,7 @@ addLink file key mcache = ifM (coreSymlinks <$> Annex.getGitConfig)
|
|||
Annex.Queue.addCommand "add" (ps++[Param "--"]) [file]
|
||||
, do
|
||||
l <- makeLink file key mcache
|
||||
addAnnexLink l file
|
||||
addAnnexLink l (toRawFilePath file)
|
||||
)
|
||||
|
||||
{- Parameters to pass to git add, forcing addition of ignored files. -}
|
||||
|
@ -329,8 +330,8 @@ addAnnexedFile file key mtmp = ifM addUnlocked
|
|||
(pure Nothing)
|
||||
(\tmp -> liftIO $ catchMaybeIO $ fileMode <$> getFileStatus tmp)
|
||||
mtmp
|
||||
stagePointerFile file mode =<< hashPointerFile key
|
||||
Database.Keys.addAssociatedFile key =<< inRepo (toTopFilePath file)
|
||||
stagePointerFile (toRawFilePath file) mode =<< hashPointerFile key
|
||||
Database.Keys.addAssociatedFile key =<< inRepo (toTopFilePath (toRawFilePath file))
|
||||
case mtmp of
|
||||
Just tmp -> ifM (moveAnnex key tmp)
|
||||
( linkunlocked mode >> return True
|
||||
|
@ -349,6 +350,6 @@ addAnnexedFile file key mtmp = ifM addUnlocked
|
|||
where
|
||||
linkunlocked mode = linkFromAnnex key file mode >>= \case
|
||||
LinkAnnexFailed -> liftIO $
|
||||
writePointerFile file key mode
|
||||
writePointerFile (toRawFilePath file) key mode
|
||||
_ -> return ()
|
||||
writepointer mode = liftIO $ writePointerFile file key mode
|
||||
writepointer mode = liftIO $ writePointerFile (toRawFilePath file) key mode
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
-}
|
||||
|
||||
{-# LANGUAGE CPP #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Annex.Init (
|
||||
ensureInitialized,
|
||||
|
@ -22,6 +23,7 @@ import qualified Annex
|
|||
import qualified Git
|
||||
import qualified Git.Config
|
||||
import qualified Git.Objects
|
||||
import Git.Types (fromConfigValue)
|
||||
import qualified Annex.Branch
|
||||
import Logs.UUID
|
||||
import Logs.Trust.Basic
|
||||
|
@ -54,7 +56,7 @@ import Data.Either
|
|||
import qualified Data.Map as M
|
||||
|
||||
checkCanInitialize :: Annex a -> Annex a
|
||||
checkCanInitialize a = inRepo (noAnnexFileContent . Git.repoWorkTree) >>= \case
|
||||
checkCanInitialize a = inRepo (noAnnexFileContent . fmap fromRawFilePath . Git.repoWorkTree) >>= \case
|
||||
Nothing -> a
|
||||
Just noannexmsg -> do
|
||||
warning "Initialization prevented by .noannex file (remove the file to override)"
|
||||
|
@ -65,7 +67,9 @@ checkCanInitialize a = inRepo (noAnnexFileContent . Git.repoWorkTree) >>= \case
|
|||
genDescription :: Maybe String -> Annex UUIDDesc
|
||||
genDescription (Just d) = return $ UUIDDesc $ encodeBS d
|
||||
genDescription Nothing = do
|
||||
reldir <- liftIO . relHome =<< liftIO . absPath =<< fromRepo Git.repoPath
|
||||
reldir <- liftIO . relHome
|
||||
=<< liftIO . absPath . fromRawFilePath
|
||||
=<< fromRepo Git.repoPath
|
||||
hostname <- fromMaybe "" <$> liftIO getHostname
|
||||
let at = if null hostname then "" else "@"
|
||||
v <- liftIO myUserName
|
||||
|
@ -204,7 +208,7 @@ checkCrippledFileSystem = whenM probeCrippledFileSystem $ do
|
|||
- filesystem. -}
|
||||
whenM (coreSymlinks <$> Annex.getGitConfig) $ do
|
||||
warning "Disabling core.symlinks."
|
||||
setConfig (ConfigKey "core.symlinks")
|
||||
setConfig "core.symlinks"
|
||||
(Git.Config.boolConfig False)
|
||||
|
||||
probeLockSupport :: Annex Bool
|
||||
|
@ -274,5 +278,5 @@ initSharedClone True = do
|
|||
- affect it. -}
|
||||
propigateSecureHashesOnly :: Annex ()
|
||||
propigateSecureHashesOnly =
|
||||
maybe noop (setConfig (ConfigKey "annex.securehashesonly"))
|
||||
maybe noop (setConfig "annex.securehashesonly" . fromConfigValue)
|
||||
=<< getGlobalConfig "annex.securehashesonly"
|
||||
|
|
|
@ -29,7 +29,7 @@ compareInodeCachesWith = ifM inodesChanged ( return Weakly, return Strongly )
|
|||
|
||||
{- Checks if one of the provided old InodeCache matches the current
|
||||
- version of a file. -}
|
||||
sameInodeCache :: FilePath -> [InodeCache] -> Annex Bool
|
||||
sameInodeCache :: RawFilePath -> [InodeCache] -> Annex Bool
|
||||
sameInodeCache _ [] = return False
|
||||
sameInodeCache file old = go =<< withTSDelta (liftIO . genInodeCache file)
|
||||
where
|
||||
|
@ -78,7 +78,7 @@ createInodeSentinalFile :: Bool -> Annex ()
|
|||
createInodeSentinalFile evenwithobjects =
|
||||
unlessM (alreadyexists <||> hasobjects) $ do
|
||||
s <- annexSentinalFile
|
||||
createAnnexDirectory (parentDir (sentinalFile s))
|
||||
createAnnexDirectory (parentDir (fromRawFilePath (sentinalFile s)))
|
||||
liftIO $ writeSentinalFile s
|
||||
where
|
||||
alreadyexists = liftIO. sentinalFileExists =<< annexSentinalFile
|
||||
|
|
|
@ -20,7 +20,9 @@ import Utility.Directory.Stream
|
|||
|
||||
import qualified Data.ByteString.Lazy as L
|
||||
import qualified Data.ByteString as S
|
||||
import qualified System.FilePath.ByteString as P
|
||||
import Data.ByteString.Builder
|
||||
import Data.Char
|
||||
|
||||
class Journalable t where
|
||||
writeJournalHandle :: Handle -> t -> IO ()
|
||||
|
@ -44,18 +46,18 @@ instance Journalable Builder where
|
|||
- getJournalFileStale to always return a consistent journal file
|
||||
- content, although possibly not the most current one.
|
||||
-}
|
||||
setJournalFile :: Journalable content => JournalLocked -> FilePath -> content -> Annex ()
|
||||
setJournalFile :: Journalable content => JournalLocked -> RawFilePath -> content -> Annex ()
|
||||
setJournalFile _jl file content = withOtherTmp $ \tmp -> do
|
||||
createAnnexDirectory =<< fromRepo gitAnnexJournalDir
|
||||
-- journal file is written atomically
|
||||
jfile <- fromRepo $ journalFile file
|
||||
jfile <- fromRawFilePath <$> fromRepo (journalFile file)
|
||||
let tmpfile = tmp </> takeFileName jfile
|
||||
liftIO $ do
|
||||
withFile tmpfile WriteMode $ \h -> writeJournalHandle h content
|
||||
moveFile tmpfile jfile
|
||||
|
||||
{- Gets any journalled content for a file in the branch. -}
|
||||
getJournalFile :: JournalLocked -> FilePath -> Annex (Maybe L.ByteString)
|
||||
getJournalFile :: JournalLocked -> RawFilePath -> Annex (Maybe L.ByteString)
|
||||
getJournalFile _jl = getJournalFileStale
|
||||
|
||||
{- Without locking, this is not guaranteed to be the most recent
|
||||
|
@ -69,9 +71,9 @@ getJournalFile _jl = getJournalFileStale
|
|||
- concurrency or other issues with a lazy read, and the minor loss of
|
||||
- laziness doesn't matter much, as the files are not very large.
|
||||
-}
|
||||
getJournalFileStale :: FilePath -> Annex (Maybe L.ByteString)
|
||||
getJournalFileStale :: RawFilePath -> Annex (Maybe L.ByteString)
|
||||
getJournalFileStale file = inRepo $ \g -> catchMaybeIO $
|
||||
L.fromStrict <$> S.readFile (journalFile file g)
|
||||
L.fromStrict <$> S.readFile (fromRawFilePath $ journalFile file g)
|
||||
|
||||
{- List of existing journal files, but without locking, may miss new ones
|
||||
- just being added, or may have false positives if the journal is staged
|
||||
|
@ -81,7 +83,8 @@ getJournalledFilesStale = do
|
|||
g <- gitRepo
|
||||
fs <- liftIO $ catchDefaultIO [] $
|
||||
getDirectoryContents $ gitAnnexJournalDir g
|
||||
return $ filter (`notElem` [".", ".."]) $ map fileJournal fs
|
||||
return $ filter (`notElem` [".", ".."]) $
|
||||
map (fromRawFilePath . fileJournal . toRawFilePath) fs
|
||||
|
||||
withJournalHandle :: (DirectoryHandle -> IO a) -> Annex a
|
||||
withJournalHandle a = do
|
||||
|
@ -102,19 +105,33 @@ journalDirty = do
|
|||
- used in the branch is not necessary, and all the files are put directly
|
||||
- in the journal directory.
|
||||
-}
|
||||
journalFile :: FilePath -> Git.Repo -> FilePath
|
||||
journalFile file repo = gitAnnexJournalDir repo </> concatMap mangle file
|
||||
journalFile :: RawFilePath -> Git.Repo -> RawFilePath
|
||||
journalFile file repo = gitAnnexJournalDir' repo P.</> S.concatMap mangle file
|
||||
where
|
||||
mangle c
|
||||
| c == pathSeparator = "_"
|
||||
| c == '_' = "__"
|
||||
| otherwise = [c]
|
||||
| P.isPathSeparator c = S.singleton underscore
|
||||
| c == underscore = S.pack [underscore, underscore]
|
||||
| otherwise = S.singleton c
|
||||
underscore = fromIntegral (ord '_')
|
||||
|
||||
{- Converts a journal file (relative to the journal dir) back to the
|
||||
- filename on the branch. -}
|
||||
fileJournal :: FilePath -> FilePath
|
||||
fileJournal = replace [pathSeparator, pathSeparator] "_" .
|
||||
replace "_" [pathSeparator]
|
||||
fileJournal :: RawFilePath -> RawFilePath
|
||||
fileJournal = go
|
||||
where
|
||||
go b =
|
||||
let (h, t) = S.break (== underscore) b
|
||||
in h <> case S.uncons t of
|
||||
Nothing -> t
|
||||
Just (_u, t') -> case S.uncons t' of
|
||||
Nothing -> t'
|
||||
Just (w, t'')
|
||||
| w == underscore ->
|
||||
S.cons underscore (go t'')
|
||||
| otherwise ->
|
||||
S.cons P.pathSeparator (go t')
|
||||
|
||||
underscore = fromIntegral (ord '_')
|
||||
|
||||
{- Sentinal value, only produced by lockJournal; required
|
||||
- as a parameter by things that need to ensure the journal is
|
||||
|
|
|
@ -39,11 +39,12 @@ import qualified Utility.RawFilePath as R
|
|||
import qualified Data.ByteString as S
|
||||
import qualified Data.ByteString.Char8 as S8
|
||||
import qualified Data.ByteString.Lazy as L
|
||||
import qualified System.FilePath.ByteString as P
|
||||
|
||||
type LinkTarget = String
|
||||
|
||||
{- Checks if a file is a link to a key. -}
|
||||
isAnnexLink :: FilePath -> Annex (Maybe Key)
|
||||
isAnnexLink :: RawFilePath -> Annex (Maybe Key)
|
||||
isAnnexLink file = maybe Nothing parseLinkTargetOrPointer <$> getAnnexLinkTarget file
|
||||
|
||||
{- Gets the link target of a symlink.
|
||||
|
@ -54,13 +55,13 @@ isAnnexLink file = maybe Nothing parseLinkTargetOrPointer <$> getAnnexLinkTarget
|
|||
- Returns Nothing if the file is not a symlink, or not a link to annex
|
||||
- content.
|
||||
-}
|
||||
getAnnexLinkTarget :: FilePath -> Annex (Maybe S.ByteString)
|
||||
getAnnexLinkTarget :: RawFilePath -> Annex (Maybe S.ByteString)
|
||||
getAnnexLinkTarget f = getAnnexLinkTarget' f
|
||||
=<< (coreSymlinks <$> Annex.getGitConfig)
|
||||
|
||||
{- Pass False to force looking inside file, for when git checks out
|
||||
- symlinks as plain files. -}
|
||||
getAnnexLinkTarget' :: FilePath -> Bool -> Annex (Maybe S.ByteString)
|
||||
getAnnexLinkTarget' :: RawFilePath -> Bool -> Annex (Maybe S.ByteString)
|
||||
getAnnexLinkTarget' file coresymlinks = if coresymlinks
|
||||
then check probesymlink $
|
||||
return Nothing
|
||||
|
@ -75,9 +76,9 @@ getAnnexLinkTarget' file coresymlinks = if coresymlinks
|
|||
| otherwise -> return Nothing
|
||||
Nothing -> fallback
|
||||
|
||||
probesymlink = R.readSymbolicLink $ toRawFilePath file
|
||||
probesymlink = R.readSymbolicLink file
|
||||
|
||||
probefilecontent = withFile file ReadMode $ \h -> do
|
||||
probefilecontent = withFile (fromRawFilePath file) ReadMode $ \h -> do
|
||||
s <- S.hGet h unpaddedMaxPointerSz
|
||||
-- If we got the full amount, the file is too large
|
||||
-- to be a symlink target.
|
||||
|
@ -92,7 +93,7 @@ getAnnexLinkTarget' file coresymlinks = if coresymlinks
|
|||
then mempty
|
||||
else s
|
||||
|
||||
makeAnnexLink :: LinkTarget -> FilePath -> Annex ()
|
||||
makeAnnexLink :: LinkTarget -> RawFilePath -> Annex ()
|
||||
makeAnnexLink = makeGitLink
|
||||
|
||||
{- Creates a link on disk.
|
||||
|
@ -102,48 +103,48 @@ makeAnnexLink = makeGitLink
|
|||
- it's staged as such, so use addAnnexLink when adding a new file or
|
||||
- modified link to git.
|
||||
-}
|
||||
makeGitLink :: LinkTarget -> FilePath -> Annex ()
|
||||
makeGitLink :: LinkTarget -> RawFilePath -> Annex ()
|
||||
makeGitLink linktarget file = ifM (coreSymlinks <$> Annex.getGitConfig)
|
||||
( liftIO $ do
|
||||
void $ tryIO $ removeFile file
|
||||
createSymbolicLink linktarget file
|
||||
, liftIO $ writeFile file linktarget
|
||||
void $ tryIO $ removeFile (fromRawFilePath file)
|
||||
createSymbolicLink linktarget (fromRawFilePath file)
|
||||
, liftIO $ writeFile (fromRawFilePath file) linktarget
|
||||
)
|
||||
|
||||
{- Creates a link on disk, and additionally stages it in git. -}
|
||||
addAnnexLink :: LinkTarget -> FilePath -> Annex ()
|
||||
addAnnexLink :: LinkTarget -> RawFilePath -> Annex ()
|
||||
addAnnexLink linktarget file = do
|
||||
makeAnnexLink linktarget file
|
||||
stageSymlink file =<< hashSymlink linktarget
|
||||
|
||||
{- Injects a symlink target into git, returning its Sha. -}
|
||||
hashSymlink :: LinkTarget -> Annex Sha
|
||||
hashSymlink linktarget = hashBlob $ toRawFilePath $ toInternalGitPath linktarget
|
||||
hashSymlink = hashBlob . toInternalGitPath . toRawFilePath
|
||||
|
||||
{- Stages a symlink to an annexed object, using a Sha of its target. -}
|
||||
stageSymlink :: FilePath -> Sha -> Annex ()
|
||||
stageSymlink :: RawFilePath -> Sha -> Annex ()
|
||||
stageSymlink file sha =
|
||||
Annex.Queue.addUpdateIndex =<<
|
||||
inRepo (Git.UpdateIndex.stageSymlink file sha)
|
||||
inRepo (Git.UpdateIndex.stageSymlink (fromRawFilePath file) sha)
|
||||
|
||||
{- Injects a pointer file content into git, returning its Sha. -}
|
||||
hashPointerFile :: Key -> Annex Sha
|
||||
hashPointerFile key = hashBlob $ formatPointer key
|
||||
|
||||
{- Stages a pointer file, using a Sha of its content -}
|
||||
stagePointerFile :: FilePath -> Maybe FileMode -> Sha -> Annex ()
|
||||
stagePointerFile :: RawFilePath -> Maybe FileMode -> Sha -> Annex ()
|
||||
stagePointerFile file mode sha =
|
||||
Annex.Queue.addUpdateIndex =<<
|
||||
inRepo (Git.UpdateIndex.stageFile sha treeitemtype file)
|
||||
inRepo (Git.UpdateIndex.stageFile sha treeitemtype $ fromRawFilePath file)
|
||||
where
|
||||
treeitemtype
|
||||
| maybe False isExecutable mode = TreeExecutable
|
||||
| otherwise = TreeFile
|
||||
|
||||
writePointerFile :: FilePath -> Key -> Maybe FileMode -> IO ()
|
||||
writePointerFile :: RawFilePath -> Key -> Maybe FileMode -> IO ()
|
||||
writePointerFile file k mode = do
|
||||
S.writeFile file (formatPointer k)
|
||||
maybe noop (setFileMode file) mode
|
||||
S.writeFile (fromRawFilePath file) (formatPointer k)
|
||||
maybe noop (setFileMode $ fromRawFilePath file) mode
|
||||
|
||||
newtype Restage = Restage Bool
|
||||
|
||||
|
@ -172,14 +173,14 @@ newtype Restage = Restage Bool
|
|||
- the worktree file is changed by something else before git update-index
|
||||
- gets to look at it.
|
||||
-}
|
||||
restagePointerFile :: Restage -> FilePath -> InodeCache -> Annex ()
|
||||
restagePointerFile :: Restage -> RawFilePath -> InodeCache -> Annex ()
|
||||
restagePointerFile (Restage False) f _ =
|
||||
toplevelWarning True $ unableToRestage (Just f)
|
||||
toplevelWarning True $ unableToRestage $ Just $ fromRawFilePath f
|
||||
restagePointerFile (Restage True) f orig = withTSDelta $ \tsd -> do
|
||||
-- update-index is documented as picky about "./file" and it
|
||||
-- fails on "../../repo/path/file" when cwd is not in the repo
|
||||
-- being acted on. Avoid these problems with an absolute path.
|
||||
absf <- liftIO $ absPath f
|
||||
absf <- liftIO $ absPath $ fromRawFilePath f
|
||||
Annex.Queue.addInternalAction runner [(absf, isunmodified tsd)]
|
||||
where
|
||||
isunmodified tsd = genInodeCache f tsd >>= return . \case
|
||||
|
@ -200,7 +201,7 @@ restagePointerFile (Restage True) f orig = withTSDelta $ \tsd -> do
|
|||
unlockindex = liftIO . maybe noop Git.LockFile.closeLock
|
||||
showwarning = warning $ unableToRestage Nothing
|
||||
go Nothing = showwarning
|
||||
go (Just _) = withTmpDirIn (Git.localGitDir r) "annexindex" $ \tmpdir -> do
|
||||
go (Just _) = withTmpDirIn (fromRawFilePath $ Git.localGitDir r) "annexindex" $ \tmpdir -> do
|
||||
let tmpindex = tmpdir </> "index"
|
||||
let updatetmpindex = do
|
||||
r' <- Git.Env.addGitEnv r Git.Index.indexEnv
|
||||
|
@ -252,7 +253,7 @@ parseLinkTargetOrPointerLazy b =
|
|||
{- Parses a symlink target to a Key. -}
|
||||
parseLinkTarget :: S.ByteString -> Maybe Key
|
||||
parseLinkTarget l
|
||||
| isLinkToAnnex l = fileKey' $ snd $ S8.breakEnd pathsep l
|
||||
| isLinkToAnnex l = fileKey $ snd $ S8.breakEnd pathsep l
|
||||
| otherwise = Nothing
|
||||
where
|
||||
pathsep '/' = True
|
||||
|
@ -262,9 +263,9 @@ parseLinkTarget l
|
|||
pathsep _ = False
|
||||
|
||||
formatPointer :: Key -> S.ByteString
|
||||
formatPointer k = prefix <> keyFile' k <> nl
|
||||
formatPointer k = prefix <> keyFile k <> nl
|
||||
where
|
||||
prefix = toRawFilePath $ toInternalGitPath (pathSeparator:objectDir)
|
||||
prefix = toInternalGitPath $ P.pathSeparator `S.cons` objectDir'
|
||||
nl = S8.singleton '\n'
|
||||
|
||||
{- Maximum size of a file that could be a pointer to a key.
|
||||
|
@ -283,8 +284,8 @@ unpaddedMaxPointerSz = 8192
|
|||
{- Checks if a worktree file is a pointer to a key.
|
||||
-
|
||||
- Unlocked files whose content is present are not detected by this. -}
|
||||
isPointerFile :: FilePath -> IO (Maybe Key)
|
||||
isPointerFile f = catchDefaultIO Nothing $ withFile f ReadMode $ \h ->
|
||||
isPointerFile :: RawFilePath -> IO (Maybe Key)
|
||||
isPointerFile f = catchDefaultIO Nothing $ withFile (fromRawFilePath f) ReadMode $ \h ->
|
||||
parseLinkTargetOrPointer <$> S.hGet h unpaddedMaxPointerSz
|
||||
|
||||
{- Checks a symlink target or pointer file first line to see if it
|
||||
|
@ -301,8 +302,7 @@ isLinkToAnnex s = p `S.isInfixOf` s
|
|||
|| p' `S.isInfixOf` s
|
||||
#endif
|
||||
where
|
||||
sp = (pathSeparator:objectDir)
|
||||
p = toRawFilePath sp
|
||||
p = P.pathSeparator `S.cons` objectDir'
|
||||
#ifdef mingw32_HOST_OS
|
||||
p' = toRawFilePath (toInternalGitPath sp)
|
||||
p' = toInternalGitPath p
|
||||
#endif
|
||||
|
|
|
@ -9,13 +9,12 @@
|
|||
|
||||
module Annex.Locations (
|
||||
keyFile,
|
||||
keyFile',
|
||||
fileKey,
|
||||
fileKey',
|
||||
keyPaths,
|
||||
keyPath,
|
||||
annexDir,
|
||||
objectDir,
|
||||
objectDir',
|
||||
gitAnnexLocation,
|
||||
gitAnnexLocationDepth,
|
||||
gitAnnexLink,
|
||||
|
@ -62,6 +61,7 @@ module Annex.Locations (
|
|||
gitAnnexFeedState,
|
||||
gitAnnexMergeDir,
|
||||
gitAnnexJournalDir,
|
||||
gitAnnexJournalDir',
|
||||
gitAnnexJournalLock,
|
||||
gitAnnexGitQueueLock,
|
||||
gitAnnexPreCommitLock,
|
||||
|
@ -93,6 +93,7 @@ module Annex.Locations (
|
|||
import Data.Char
|
||||
import Data.Default
|
||||
import qualified Data.ByteString.Char8 as S8
|
||||
import qualified System.FilePath.ByteString as P
|
||||
|
||||
import Common
|
||||
import Key
|
||||
|
@ -104,6 +105,7 @@ import qualified Git.Types as Git
|
|||
import Git.FilePath
|
||||
import Annex.DirHashes
|
||||
import Annex.Fixup
|
||||
import qualified Utility.RawFilePath as R
|
||||
|
||||
{- Conventions:
|
||||
-
|
||||
|
@ -120,24 +122,27 @@ import Annex.Fixup
|
|||
|
||||
{- The directory git annex uses for local state, relative to the .git
|
||||
- directory -}
|
||||
annexDir :: FilePath
|
||||
annexDir = addTrailingPathSeparator "annex"
|
||||
annexDir :: RawFilePath
|
||||
annexDir = P.addTrailingPathSeparator "annex"
|
||||
|
||||
{- The directory git annex uses for locally available object content,
|
||||
- relative to the .git directory -}
|
||||
objectDir :: FilePath
|
||||
objectDir = addTrailingPathSeparator $ annexDir </> "objects"
|
||||
objectDir = fromRawFilePath objectDir'
|
||||
|
||||
objectDir' :: RawFilePath
|
||||
objectDir' = P.addTrailingPathSeparator $ annexDir P.</> "objects"
|
||||
|
||||
{- Annexed file's possible locations relative to the .git directory.
|
||||
- There are two different possibilities, using different hashes.
|
||||
-
|
||||
- Also, some repositories have a Difference in hash directory depth.
|
||||
-}
|
||||
annexLocations :: GitConfig -> Key -> [FilePath]
|
||||
annexLocations :: GitConfig -> Key -> [RawFilePath]
|
||||
annexLocations config key = map (annexLocation config key) dirHashes
|
||||
|
||||
annexLocation :: GitConfig -> Key -> (HashLevels -> Hasher) -> FilePath
|
||||
annexLocation config key hasher = objectDir </> keyPath key (hasher $ objectHashLevels config)
|
||||
annexLocation :: GitConfig -> Key -> (HashLevels -> Hasher) -> RawFilePath
|
||||
annexLocation config key hasher = objectDir' P.</> keyPath key (hasher $ objectHashLevels config)
|
||||
|
||||
{- Number of subdirectories from the gitAnnexObjectDir
|
||||
- to the gitAnnexLocation. -}
|
||||
|
@ -157,9 +162,14 @@ gitAnnexLocationDepth config = hashlevels + 1
|
|||
- This does not take direct mode into account, so in direct mode it is not
|
||||
- the actual location of the file's content.
|
||||
-}
|
||||
gitAnnexLocation :: Key -> Git.Repo -> GitConfig -> IO FilePath
|
||||
gitAnnexLocation key r config = gitAnnexLocation' key r config (annexCrippledFileSystem config) (coreSymlinks config) doesFileExist (Git.localGitDir r)
|
||||
gitAnnexLocation' :: Key -> Git.Repo -> GitConfig -> Bool -> Bool -> (FilePath -> IO Bool) -> FilePath -> IO FilePath
|
||||
gitAnnexLocation :: Key -> Git.Repo -> GitConfig -> IO RawFilePath
|
||||
gitAnnexLocation key r config = gitAnnexLocation' key r config
|
||||
(annexCrippledFileSystem config)
|
||||
(coreSymlinks config)
|
||||
R.doesPathExist
|
||||
(Git.localGitDir r)
|
||||
|
||||
gitAnnexLocation' :: Key -> Git.Repo -> GitConfig -> Bool -> Bool -> (RawFilePath -> IO Bool) -> RawFilePath -> IO RawFilePath
|
||||
gitAnnexLocation' key r config crippled symlinkssupported checker gitdir
|
||||
{- Bare repositories default to hashDirLower for new
|
||||
- content, as it's more portable. But check all locations. -}
|
||||
|
@ -181,7 +191,7 @@ gitAnnexLocation' key r config crippled symlinkssupported checker gitdir
|
|||
only = return . inrepo . annexLocation config key
|
||||
checkall = check $ map inrepo $ annexLocations config key
|
||||
|
||||
inrepo d = gitdir </> d
|
||||
inrepo d = gitdir P.</> d
|
||||
check locs@(l:_) = fromMaybe l <$> firstM checker locs
|
||||
check [] = error "internal"
|
||||
|
||||
|
@ -192,17 +202,22 @@ gitAnnexLink file key r config = do
|
|||
let absfile = absNormPathUnix currdir file
|
||||
let gitdir = getgitdir currdir
|
||||
loc <- gitAnnexLocation' key r config False False (\_ -> return True) gitdir
|
||||
toInternalGitPath <$> relPathDirToFile (parentDir absfile) loc
|
||||
fromRawFilePath . toInternalGitPath . toRawFilePath
|
||||
<$> relPathDirToFile (parentDir absfile) (fromRawFilePath loc)
|
||||
where
|
||||
getgitdir currdir
|
||||
{- This special case is for git submodules on filesystems not
|
||||
- supporting symlinks; generate link target that will
|
||||
- work portably. -}
|
||||
| not (coreSymlinks config) && needsSubmoduleFixup r =
|
||||
absNormPathUnix currdir $ Git.repoPath r </> ".git"
|
||||
toRawFilePath $
|
||||
absNormPathUnix currdir $ fromRawFilePath $
|
||||
Git.repoPath r P.</> ".git"
|
||||
| otherwise = Git.localGitDir r
|
||||
absNormPathUnix d p = toInternalGitPath $
|
||||
absPathFrom (toInternalGitPath d) (toInternalGitPath p)
|
||||
absNormPathUnix d p = fromRawFilePath $ toInternalGitPath $ toRawFilePath $
|
||||
absPathFrom
|
||||
(fromRawFilePath $ toInternalGitPath $ toRawFilePath d)
|
||||
(fromRawFilePath $ toInternalGitPath $ toRawFilePath p)
|
||||
|
||||
{- Calculates a symlink target as would be used in a typical git
|
||||
- repository, with .git in the top of the work tree. -}
|
||||
|
@ -211,7 +226,7 @@ gitAnnexLinkCanonical file key r config = gitAnnexLink file key r' config'
|
|||
where
|
||||
r' = case r of
|
||||
Git.Repo { Git.location = l@Git.Local { Git.worktree = Just wt } } ->
|
||||
r { Git.location = l { Git.gitdir = wt </> ".git" } }
|
||||
r { Git.location = l { Git.gitdir = wt P.</> ".git" } }
|
||||
_ -> r
|
||||
config' = config
|
||||
{ annexCrippledFileSystem = False
|
||||
|
@ -222,61 +237,69 @@ gitAnnexLinkCanonical file key r config = gitAnnexLink file key r' config'
|
|||
gitAnnexContentLock :: Key -> Git.Repo -> GitConfig -> IO FilePath
|
||||
gitAnnexContentLock key r config = do
|
||||
loc <- gitAnnexLocation key r config
|
||||
return $ loc ++ ".lck"
|
||||
return $ fromRawFilePath loc ++ ".lck"
|
||||
|
||||
{- File that maps from a key to the file(s) in the git repository.
|
||||
- Used in direct mode. -}
|
||||
gitAnnexMapping :: Key -> Git.Repo -> GitConfig -> IO FilePath
|
||||
gitAnnexMapping key r config = do
|
||||
loc <- gitAnnexLocation key r config
|
||||
return $ loc ++ ".map"
|
||||
return $ fromRawFilePath loc ++ ".map"
|
||||
|
||||
{- File that caches information about a key's content, used to determine
|
||||
- if a file has changed.
|
||||
- Used in direct mode. -}
|
||||
gitAnnexInodeCache :: Key -> Git.Repo -> GitConfig -> IO FilePath
|
||||
gitAnnexInodeCache key r config = do
|
||||
gitAnnexInodeCache key r config = do
|
||||
loc <- gitAnnexLocation key r config
|
||||
return $ loc ++ ".cache"
|
||||
return $ fromRawFilePath loc ++ ".cache"
|
||||
|
||||
gitAnnexInodeSentinal :: Git.Repo -> FilePath
|
||||
gitAnnexInodeSentinal r = gitAnnexDir r </> "sentinal"
|
||||
gitAnnexInodeSentinal :: Git.Repo -> RawFilePath
|
||||
gitAnnexInodeSentinal r = gitAnnexDir r P.</> "sentinal"
|
||||
|
||||
gitAnnexInodeSentinalCache :: Git.Repo -> FilePath
|
||||
gitAnnexInodeSentinalCache r = gitAnnexInodeSentinal r ++ ".cache"
|
||||
gitAnnexInodeSentinalCache :: Git.Repo -> RawFilePath
|
||||
gitAnnexInodeSentinalCache r = gitAnnexInodeSentinal r <> ".cache"
|
||||
|
||||
{- The annex directory of a repository. -}
|
||||
gitAnnexDir :: Git.Repo -> FilePath
|
||||
gitAnnexDir r = addTrailingPathSeparator $ Git.localGitDir r </> annexDir
|
||||
gitAnnexDir :: Git.Repo -> RawFilePath
|
||||
gitAnnexDir r = P.addTrailingPathSeparator $ Git.localGitDir r P.</> annexDir
|
||||
|
||||
{- The part of the annex directory where file contents are stored. -}
|
||||
gitAnnexObjectDir :: Git.Repo -> FilePath
|
||||
gitAnnexObjectDir r = addTrailingPathSeparator $ Git.localGitDir r </> objectDir
|
||||
gitAnnexObjectDir r = fromRawFilePath $
|
||||
P.addTrailingPathSeparator $ Git.localGitDir r P.</> objectDir'
|
||||
|
||||
{- .git/annex/tmp/ is used for temp files for key's contents -}
|
||||
gitAnnexTmpObjectDir :: Git.Repo -> FilePath
|
||||
gitAnnexTmpObjectDir r = addTrailingPathSeparator $ gitAnnexDir r </> "tmp"
|
||||
gitAnnexTmpObjectDir = fromRawFilePath . gitAnnexTmpObjectDir'
|
||||
|
||||
gitAnnexTmpObjectDir' :: Git.Repo -> RawFilePath
|
||||
gitAnnexTmpObjectDir' r = P.addTrailingPathSeparator $ gitAnnexDir r P.</> "tmp"
|
||||
|
||||
{- .git/annex/othertmp/ is used for other temp files -}
|
||||
gitAnnexTmpOtherDir :: Git.Repo -> FilePath
|
||||
gitAnnexTmpOtherDir r = addTrailingPathSeparator $ gitAnnexDir r </> "othertmp"
|
||||
gitAnnexTmpOtherDir r = fromRawFilePath $
|
||||
P.addTrailingPathSeparator $ gitAnnexDir r P.</> "othertmp"
|
||||
|
||||
{- Lock file for gitAnnexTmpOtherDir. -}
|
||||
gitAnnexTmpOtherLock :: Git.Repo -> FilePath
|
||||
gitAnnexTmpOtherLock r = gitAnnexDir r </> "othertmp.lck"
|
||||
gitAnnexTmpOtherLock r = fromRawFilePath $ gitAnnexDir r P.</> "othertmp.lck"
|
||||
|
||||
{- .git/annex/misctmp/ was used by old versions of git-annex and is still
|
||||
- used during initialization -}
|
||||
gitAnnexTmpOtherDirOld :: Git.Repo -> FilePath
|
||||
gitAnnexTmpOtherDirOld r = addTrailingPathSeparator $ gitAnnexDir r </> "misctmp"
|
||||
gitAnnexTmpOtherDirOld r = fromRawFilePath $
|
||||
P.addTrailingPathSeparator $ gitAnnexDir r P.</> "misctmp"
|
||||
|
||||
{- .git/annex/watchtmp/ is used by the watcher and assistant -}
|
||||
gitAnnexTmpWatcherDir :: Git.Repo -> FilePath
|
||||
gitAnnexTmpWatcherDir r = addTrailingPathSeparator $ gitAnnexDir r </> "watchtmp"
|
||||
gitAnnexTmpWatcherDir r = fromRawFilePath $
|
||||
P.addTrailingPathSeparator $ gitAnnexDir r P.</> "watchtmp"
|
||||
|
||||
{- The temp file to use for a given key's content. -}
|
||||
gitAnnexTmpObjectLocation :: Key -> Git.Repo -> FilePath
|
||||
gitAnnexTmpObjectLocation key r = gitAnnexTmpObjectDir r </> keyFile key
|
||||
gitAnnexTmpObjectLocation key r = fromRawFilePath $
|
||||
gitAnnexTmpObjectDir' r P.</> keyFile key
|
||||
|
||||
{- Given a temp file such as gitAnnexTmpObjectLocation, makes a name for a
|
||||
- subdirectory in the same location, that can be used as a work area
|
||||
|
@ -293,19 +316,21 @@ gitAnnexTmpWorkDir p =
|
|||
|
||||
{- .git/annex/bad/ is used for bad files found during fsck -}
|
||||
gitAnnexBadDir :: Git.Repo -> FilePath
|
||||
gitAnnexBadDir r = addTrailingPathSeparator $ gitAnnexDir r </> "bad"
|
||||
gitAnnexBadDir r = fromRawFilePath $
|
||||
P.addTrailingPathSeparator $ gitAnnexDir r P.</> "bad"
|
||||
|
||||
{- The bad file to use for a given key. -}
|
||||
gitAnnexBadLocation :: Key -> Git.Repo -> FilePath
|
||||
gitAnnexBadLocation key r = gitAnnexBadDir r </> keyFile key
|
||||
gitAnnexBadLocation key r = gitAnnexBadDir r </> fromRawFilePath (keyFile key)
|
||||
|
||||
{- .git/annex/foounused is used to number possibly unused keys -}
|
||||
gitAnnexUnusedLog :: FilePath -> Git.Repo -> FilePath
|
||||
gitAnnexUnusedLog prefix r = gitAnnexDir r </> (prefix ++ "unused")
|
||||
gitAnnexUnusedLog prefix r =
|
||||
fromRawFilePath (gitAnnexDir r) </> (prefix ++ "unused")
|
||||
|
||||
{- .git/annex/keys/ contains a database of information about keys. -}
|
||||
gitAnnexKeysDb :: Git.Repo -> FilePath
|
||||
gitAnnexKeysDb r = gitAnnexDir r </> "keys"
|
||||
gitAnnexKeysDb r = fromRawFilePath $ gitAnnexDir r P.</> "keys"
|
||||
|
||||
{- Lock file for the keys database. -}
|
||||
gitAnnexKeysDbLock :: Git.Repo -> FilePath
|
||||
|
@ -319,7 +344,8 @@ gitAnnexKeysDbIndexCache r = gitAnnexKeysDb r ++ ".cache"
|
|||
{- .git/annex/fsck/uuid/ is used to store information about incremental
|
||||
- fscks. -}
|
||||
gitAnnexFsckDir :: UUID -> Git.Repo -> FilePath
|
||||
gitAnnexFsckDir u r = gitAnnexDir r </> "fsck" </> fromUUID u
|
||||
gitAnnexFsckDir u r = fromRawFilePath $
|
||||
gitAnnexDir r P.</> "fsck" P.</> fromUUID u
|
||||
|
||||
{- used to store information about incremental fscks. -}
|
||||
gitAnnexFsckState :: UUID -> Git.Repo -> FilePath
|
||||
|
@ -335,20 +361,21 @@ gitAnnexFsckDbLock u r = gitAnnexFsckDir u r </> "fsck.lck"
|
|||
|
||||
{- .git/annex/fsckresults/uuid is used to store results of git fscks -}
|
||||
gitAnnexFsckResultsLog :: UUID -> Git.Repo -> FilePath
|
||||
gitAnnexFsckResultsLog u r = gitAnnexDir r </> "fsckresults" </> fromUUID u
|
||||
gitAnnexFsckResultsLog u r = fromRawFilePath $
|
||||
gitAnnexDir r P.</> "fsckresults" P.</> fromUUID u
|
||||
|
||||
{- .git/annex/smudge.log is used to log smudges worktree files that need to
|
||||
- be updated. -}
|
||||
gitAnnexSmudgeLog :: Git.Repo -> FilePath
|
||||
gitAnnexSmudgeLog r = gitAnnexDir r </> "smudge.log"
|
||||
gitAnnexSmudgeLog r = fromRawFilePath $ gitAnnexDir r P.</> "smudge.log"
|
||||
|
||||
gitAnnexSmudgeLock :: Git.Repo -> FilePath
|
||||
gitAnnexSmudgeLock r = gitAnnexDir r </> "smudge.lck"
|
||||
gitAnnexSmudgeLock r = fromRawFilePath $ gitAnnexDir r P.</> "smudge.lck"
|
||||
|
||||
{- .git/annex/export/uuid/ is used to store information about
|
||||
- exports to special remotes. -}
|
||||
gitAnnexExportDir :: UUID -> Git.Repo -> FilePath
|
||||
gitAnnexExportDir u r = gitAnnexDir r </> "export" </> fromUUID u
|
||||
gitAnnexExportDir u r = fromRawFilePath (gitAnnexDir r) </> "export" </> fromUUID u
|
||||
|
||||
{- Directory containing database used to record export info. -}
|
||||
gitAnnexExportDbDir :: UUID -> Git.Repo -> FilePath
|
||||
|
@ -365,7 +392,8 @@ gitAnnexExportUpdateLock u r = gitAnnexExportDbDir u r ++ ".upl"
|
|||
{- Log file used to keep track of files that were in the tree exported to a
|
||||
- remote, but were excluded by its preferred content settings. -}
|
||||
gitAnnexExportExcludeLog :: UUID -> Git.Repo -> FilePath
|
||||
gitAnnexExportExcludeLog u r = gitAnnexDir r </> "export.ex" </> fromUUID u
|
||||
gitAnnexExportExcludeLog u r = fromRawFilePath $
|
||||
gitAnnexDir r P.</> "export.ex" P.</> fromUUID u
|
||||
|
||||
{- Directory containing database used to record remote content ids.
|
||||
-
|
||||
|
@ -373,7 +401,7 @@ gitAnnexExportExcludeLog u r = gitAnnexDir r </> "export.ex" </> fromUUID u
|
|||
- need to be rebuilt with a new name.)
|
||||
-}
|
||||
gitAnnexContentIdentifierDbDir :: Git.Repo -> FilePath
|
||||
gitAnnexContentIdentifierDbDir r = gitAnnexDir r </> "cids"
|
||||
gitAnnexContentIdentifierDbDir r = fromRawFilePath $ gitAnnexDir r P.</> "cids"
|
||||
|
||||
{- Lock file for writing to the content id database. -}
|
||||
gitAnnexContentIdentifierLock :: Git.Repo -> FilePath
|
||||
|
@ -382,125 +410,137 @@ gitAnnexContentIdentifierLock r = gitAnnexContentIdentifierDbDir r ++ ".lck"
|
|||
{- .git/annex/schedulestate is used to store information about when
|
||||
- scheduled jobs were last run. -}
|
||||
gitAnnexScheduleState :: Git.Repo -> FilePath
|
||||
gitAnnexScheduleState r = gitAnnexDir r </> "schedulestate"
|
||||
gitAnnexScheduleState r = fromRawFilePath $ gitAnnexDir r P.</> "schedulestate"
|
||||
|
||||
{- .git/annex/creds/ is used to store credentials to access some special
|
||||
- remotes. -}
|
||||
gitAnnexCredsDir :: Git.Repo -> FilePath
|
||||
gitAnnexCredsDir r = addTrailingPathSeparator $ gitAnnexDir r </> "creds"
|
||||
gitAnnexCredsDir r = fromRawFilePath $
|
||||
P.addTrailingPathSeparator $ gitAnnexDir r P.</> "creds"
|
||||
|
||||
{- .git/annex/certificate.pem and .git/annex/key.pem are used by the webapp
|
||||
- when HTTPS is enabled -}
|
||||
gitAnnexWebCertificate :: Git.Repo -> FilePath
|
||||
gitAnnexWebCertificate r = gitAnnexDir r </> "certificate.pem"
|
||||
gitAnnexWebCertificate r = fromRawFilePath $ gitAnnexDir r P.</> "certificate.pem"
|
||||
gitAnnexWebPrivKey :: Git.Repo -> FilePath
|
||||
gitAnnexWebPrivKey r = gitAnnexDir r </> "privkey.pem"
|
||||
gitAnnexWebPrivKey r = fromRawFilePath $ gitAnnexDir r P.</> "privkey.pem"
|
||||
|
||||
{- .git/annex/feeds/ is used to record per-key (url) state by importfeeds -}
|
||||
gitAnnexFeedStateDir :: Git.Repo -> FilePath
|
||||
gitAnnexFeedStateDir r = addTrailingPathSeparator $ gitAnnexDir r </> "feedstate"
|
||||
gitAnnexFeedStateDir r = fromRawFilePath $
|
||||
P.addTrailingPathSeparator $ gitAnnexDir r P.</> "feedstate"
|
||||
|
||||
gitAnnexFeedState :: Key -> Git.Repo -> FilePath
|
||||
gitAnnexFeedState k r = gitAnnexFeedStateDir r </> keyFile k
|
||||
gitAnnexFeedState k r = gitAnnexFeedStateDir r </> fromRawFilePath (keyFile k)
|
||||
|
||||
{- .git/annex/merge/ is used as a empty work tree for direct mode merges and
|
||||
- merges in adjusted branches. -}
|
||||
gitAnnexMergeDir :: Git.Repo -> FilePath
|
||||
gitAnnexMergeDir r = addTrailingPathSeparator $ gitAnnexDir r </> "merge"
|
||||
gitAnnexMergeDir r = fromRawFilePath $
|
||||
P.addTrailingPathSeparator $ gitAnnexDir r P.</> "merge"
|
||||
|
||||
{- .git/annex/transfer/ is used to record keys currently
|
||||
- being transferred, and other transfer bookkeeping info. -}
|
||||
gitAnnexTransferDir :: Git.Repo -> FilePath
|
||||
gitAnnexTransferDir r = addTrailingPathSeparator $ gitAnnexDir r </> "transfer"
|
||||
gitAnnexTransferDir r = fromRawFilePath $
|
||||
P.addTrailingPathSeparator $ gitAnnexDir r P.</> "transfer"
|
||||
|
||||
{- .git/annex/journal/ is used to journal changes made to the git-annex
|
||||
- branch -}
|
||||
gitAnnexJournalDir :: Git.Repo -> FilePath
|
||||
gitAnnexJournalDir r = addTrailingPathSeparator $ gitAnnexDir r </> "journal"
|
||||
gitAnnexJournalDir r = fromRawFilePath $
|
||||
P.addTrailingPathSeparator $ gitAnnexDir r P.</> "journal"
|
||||
|
||||
gitAnnexJournalDir' :: Git.Repo -> RawFilePath
|
||||
gitAnnexJournalDir' r = P.addTrailingPathSeparator $ gitAnnexDir r P.</> "journal"
|
||||
|
||||
{- Lock file for the journal. -}
|
||||
gitAnnexJournalLock :: Git.Repo -> FilePath
|
||||
gitAnnexJournalLock r = gitAnnexDir r </> "journal.lck"
|
||||
gitAnnexJournalLock r = fromRawFilePath $ gitAnnexDir r P.</> "journal.lck"
|
||||
|
||||
{- Lock file for flushing a git queue that writes to the git index or
|
||||
- other git state that should only have one writer at a time. -}
|
||||
gitAnnexGitQueueLock :: Git.Repo -> FilePath
|
||||
gitAnnexGitQueueLock r = gitAnnexDir r </> "gitqueue.lck"
|
||||
gitAnnexGitQueueLock r = fromRawFilePath $ gitAnnexDir r P.</> "gitqueue.lck"
|
||||
|
||||
{- Lock file for the pre-commit hook. -}
|
||||
gitAnnexPreCommitLock :: Git.Repo -> FilePath
|
||||
gitAnnexPreCommitLock r = gitAnnexDir r </> "precommit.lck"
|
||||
gitAnnexPreCommitLock r = fromRawFilePath $ gitAnnexDir r P.</> "precommit.lck"
|
||||
|
||||
{- Lock file for direct mode merge. -}
|
||||
gitAnnexMergeLock :: Git.Repo -> FilePath
|
||||
gitAnnexMergeLock r = gitAnnexDir r </> "merge.lck"
|
||||
gitAnnexMergeLock r = fromRawFilePath $ gitAnnexDir r P.</> "merge.lck"
|
||||
|
||||
{- .git/annex/index is used to stage changes to the git-annex branch -}
|
||||
gitAnnexIndex :: Git.Repo -> FilePath
|
||||
gitAnnexIndex r = gitAnnexDir r </> "index"
|
||||
gitAnnexIndex r = fromRawFilePath $ gitAnnexDir r P.</> "index"
|
||||
|
||||
{- Holds the ref of the git-annex branch that the index was last updated to.
|
||||
-
|
||||
- The .lck in the name is a historical accident; this is not used as a
|
||||
- lock. -}
|
||||
gitAnnexIndexStatus :: Git.Repo -> FilePath
|
||||
gitAnnexIndexStatus r = gitAnnexDir r </> "index.lck"
|
||||
gitAnnexIndexStatus r = fromRawFilePath $ gitAnnexDir r P.</> "index.lck"
|
||||
|
||||
{- The index file used to generate a filtered branch view._-}
|
||||
gitAnnexViewIndex :: Git.Repo -> FilePath
|
||||
gitAnnexViewIndex r = gitAnnexDir r </> "viewindex"
|
||||
gitAnnexViewIndex r = fromRawFilePath $ gitAnnexDir r P.</> "viewindex"
|
||||
|
||||
{- File containing a log of recently accessed views. -}
|
||||
gitAnnexViewLog :: Git.Repo -> FilePath
|
||||
gitAnnexViewLog r = gitAnnexDir r </> "viewlog"
|
||||
gitAnnexViewLog r = fromRawFilePath $ gitAnnexDir r P.</> "viewlog"
|
||||
|
||||
{- List of refs that have already been merged into the git-annex branch. -}
|
||||
gitAnnexMergedRefs :: Git.Repo -> FilePath
|
||||
gitAnnexMergedRefs r = gitAnnexDir r </> "mergedrefs"
|
||||
gitAnnexMergedRefs r = fromRawFilePath $ gitAnnexDir r P.</> "mergedrefs"
|
||||
|
||||
{- List of refs that should not be merged into the git-annex branch. -}
|
||||
gitAnnexIgnoredRefs :: Git.Repo -> FilePath
|
||||
gitAnnexIgnoredRefs r = gitAnnexDir r </> "ignoredrefs"
|
||||
gitAnnexIgnoredRefs r = fromRawFilePath $ gitAnnexDir r P.</> "ignoredrefs"
|
||||
|
||||
{- Pid file for daemon mode. -}
|
||||
gitAnnexPidFile :: Git.Repo -> FilePath
|
||||
gitAnnexPidFile r = gitAnnexDir r </> "daemon.pid"
|
||||
gitAnnexPidFile r = fromRawFilePath $ gitAnnexDir r P.</> "daemon.pid"
|
||||
|
||||
{- Pid lock file for pidlock mode -}
|
||||
gitAnnexPidLockFile :: Git.Repo -> FilePath
|
||||
gitAnnexPidLockFile r = gitAnnexDir r </> "pidlock"
|
||||
gitAnnexPidLockFile r = fromRawFilePath $ gitAnnexDir r P.</> "pidlock"
|
||||
|
||||
{- Status file for daemon mode. -}
|
||||
gitAnnexDaemonStatusFile :: Git.Repo -> FilePath
|
||||
gitAnnexDaemonStatusFile r = gitAnnexDir r </> "daemon.status"
|
||||
gitAnnexDaemonStatusFile r = fromRawFilePath $
|
||||
gitAnnexDir r P.</> "daemon.status"
|
||||
|
||||
{- Log file for daemon mode. -}
|
||||
gitAnnexLogFile :: Git.Repo -> FilePath
|
||||
gitAnnexLogFile r = gitAnnexDir r </> "daemon.log"
|
||||
gitAnnexLogFile r = fromRawFilePath $ gitAnnexDir r P.</> "daemon.log"
|
||||
|
||||
{- Log file for fuzz test. -}
|
||||
gitAnnexFuzzTestLogFile :: Git.Repo -> FilePath
|
||||
gitAnnexFuzzTestLogFile r = gitAnnexDir r </> "fuzztest.log"
|
||||
gitAnnexFuzzTestLogFile r = fromRawFilePath $
|
||||
gitAnnexDir r P.</> "fuzztest.log"
|
||||
|
||||
{- Html shim file used to launch the webapp. -}
|
||||
gitAnnexHtmlShim :: Git.Repo -> FilePath
|
||||
gitAnnexHtmlShim r = gitAnnexDir r </> "webapp.html"
|
||||
gitAnnexHtmlShim r = fromRawFilePath $ gitAnnexDir r P.</> "webapp.html"
|
||||
|
||||
{- File containing the url to the webapp. -}
|
||||
gitAnnexUrlFile :: Git.Repo -> FilePath
|
||||
gitAnnexUrlFile r = gitAnnexDir r </> "url"
|
||||
gitAnnexUrlFile r = fromRawFilePath $ gitAnnexDir r P.</> "url"
|
||||
|
||||
{- Temporary file used to edit configuriation from the git-annex branch. -}
|
||||
gitAnnexTmpCfgFile :: Git.Repo -> FilePath
|
||||
gitAnnexTmpCfgFile r = gitAnnexDir r </> "config.tmp"
|
||||
gitAnnexTmpCfgFile r = fromRawFilePath $ gitAnnexDir r P.</> "config.tmp"
|
||||
|
||||
{- .git/annex/ssh/ is used for ssh connection caching -}
|
||||
gitAnnexSshDir :: Git.Repo -> FilePath
|
||||
gitAnnexSshDir r = addTrailingPathSeparator $ gitAnnexDir r </> "ssh"
|
||||
gitAnnexSshDir r = fromRawFilePath $
|
||||
P.addTrailingPathSeparator $ gitAnnexDir r P.</> "ssh"
|
||||
|
||||
{- .git/annex/remotes/ is used for remote-specific state. -}
|
||||
gitAnnexRemotesDir :: Git.Repo -> FilePath
|
||||
gitAnnexRemotesDir r = addTrailingPathSeparator $ gitAnnexDir r </> "remotes"
|
||||
gitAnnexRemotesDir r = fromRawFilePath $
|
||||
P.addTrailingPathSeparator $ gitAnnexDir r P.</> "remotes"
|
||||
|
||||
{- This is the base directory name used by the assistant when making
|
||||
- repositories, by default. -}
|
||||
|
@ -557,11 +597,8 @@ reSanitizeKeyName = preSanitizeKeyName' True
|
|||
- Changing what this function escapes and how is not a good idea, as it
|
||||
- can cause existing objects to get lost.
|
||||
-}
|
||||
keyFile :: Key -> FilePath
|
||||
keyFile = fromRawFilePath . keyFile'
|
||||
|
||||
keyFile' :: Key -> RawFilePath
|
||||
keyFile' k =
|
||||
keyFile :: Key -> RawFilePath
|
||||
keyFile k =
|
||||
let b = serializeKey' k
|
||||
in if S8.any (`elem` ['&', '%', ':', '/']) b
|
||||
then S8.concatMap esc b
|
||||
|
@ -576,11 +613,8 @@ keyFile' k =
|
|||
|
||||
{- Reverses keyFile, converting a filename fragment (ie, the basename of
|
||||
- the symlink target) into a key. -}
|
||||
fileKey :: FilePath -> Maybe Key
|
||||
fileKey = fileKey' . toRawFilePath
|
||||
|
||||
fileKey' :: RawFilePath -> Maybe Key
|
||||
fileKey' = deserializeKey' . S8.intercalate "/" . map go . S8.split '%'
|
||||
fileKey :: RawFilePath -> Maybe Key
|
||||
fileKey = deserializeKey' . S8.intercalate "/" . map go . S8.split '%'
|
||||
where
|
||||
go = S8.concat . unescafterfirst . S8.split '&'
|
||||
unescafterfirst [] = []
|
||||
|
@ -599,8 +633,8 @@ fileKey' = deserializeKey' . S8.intercalate "/" . map go . S8.split '%'
|
|||
- The file is put in a directory with the same name, this allows
|
||||
- write-protecting the directory to avoid accidental deletion of the file.
|
||||
-}
|
||||
keyPath :: Key -> Hasher -> FilePath
|
||||
keyPath key hasher = hasher key </> f </> f
|
||||
keyPath :: Key -> Hasher -> RawFilePath
|
||||
keyPath key hasher = hasher key P.</> f P.</> f
|
||||
where
|
||||
f = keyFile key
|
||||
|
||||
|
@ -610,5 +644,5 @@ keyPath key hasher = hasher key </> f </> f
|
|||
- This is compatible with the annexLocations, for interoperability between
|
||||
- special remotes and git-annex repos.
|
||||
-}
|
||||
keyPaths :: Key -> [FilePath]
|
||||
keyPaths :: Key -> [RawFilePath]
|
||||
keyPaths key = map (\h -> keyPath key (h def)) dirHashes
|
||||
|
|
|
@ -37,7 +37,7 @@ import Data.Time.Clock.POSIX
|
|||
-
|
||||
- Also, can generate new metadata, if configured to do so.
|
||||
-}
|
||||
genMetaData :: Key -> FilePath -> FileStatus -> Annex ()
|
||||
genMetaData :: Key -> RawFilePath -> FileStatus -> Annex ()
|
||||
genMetaData key file status = do
|
||||
catKeyFileHEAD file >>= \case
|
||||
Nothing -> noop
|
||||
|
@ -53,8 +53,8 @@ genMetaData key file status = do
|
|||
where
|
||||
mtime = posixSecondsToUTCTime $ realToFrac $ modificationTime status
|
||||
warncopied = warning $
|
||||
"Copied metadata from old version of " ++ file ++ " to new version. " ++
|
||||
"If you don't want this copied metadata, run: git annex metadata --remove-all " ++ file
|
||||
"Copied metadata from old version of " ++ fromRawFilePath file ++ " to new version. " ++
|
||||
"If you don't want this copied metadata, run: git annex metadata --remove-all " ++ fromRawFilePath file
|
||||
-- If the only fields copied were date metadata, and they'll
|
||||
-- be overwritten with the current mtime, no need to warn about
|
||||
-- copying.
|
||||
|
|
|
@ -60,7 +60,7 @@ notifyDrop (AssociatedFile (Just f)) ok = do
|
|||
wanted <- Annex.getState Annex.desktopnotify
|
||||
when (notifyFinish wanted) $ liftIO $ do
|
||||
client <- DBus.Client.connectSession
|
||||
void $ Notify.notify client (droppedNote ok f)
|
||||
void $ Notify.notify client (droppedNote ok (fromRawFilePath f))
|
||||
#else
|
||||
notifyDrop (AssociatedFile (Just _)) _ = noop
|
||||
#endif
|
||||
|
|
|
@ -72,7 +72,7 @@ getFileNumCopies f = fromSources
|
|||
|
||||
getAssociatedFileNumCopies :: AssociatedFile -> Annex NumCopies
|
||||
getAssociatedFileNumCopies (AssociatedFile afile) =
|
||||
maybe getNumCopies getFileNumCopies afile
|
||||
maybe getNumCopies getFileNumCopies (fromRawFilePath <$> afile)
|
||||
|
||||
{- This is the globally visible numcopies value for a file. So it does
|
||||
- not include local configuration in the git config or command line
|
||||
|
|
|
@ -70,7 +70,7 @@ annexFileMode = withShared $ return . go
|
|||
createAnnexDirectory :: FilePath -> Annex ()
|
||||
createAnnexDirectory dir = walk dir [] =<< top
|
||||
where
|
||||
top = parentDir <$> fromRepo gitAnnexDir
|
||||
top = parentDir . fromRawFilePath <$> fromRepo gitAnnexDir
|
||||
walk d below stop
|
||||
| d `equalFilePath` stop = done
|
||||
| otherwise = ifM (liftIO $ doesDirectoryExist d)
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Annex.SpecialRemote (
|
||||
module Annex.SpecialRemote,
|
||||
module Annex.SpecialRemote.Config
|
||||
|
|
|
@ -43,6 +43,7 @@ import Annex.LockPool
|
|||
#endif
|
||||
|
||||
import Control.Concurrent.STM
|
||||
import qualified Data.ByteString as S
|
||||
|
||||
{- Some ssh commands are fed stdin on a pipe and so should be allowed to
|
||||
- consume it. But ssh commands that are not piped stdin should generally
|
||||
|
@ -325,7 +326,7 @@ sizeof_sockaddr_un_sun_path = 100
|
|||
{- Note that this looks at the true length of the path in bytes, as it will
|
||||
- appear on disk. -}
|
||||
valid_unix_socket_path :: FilePath -> Bool
|
||||
valid_unix_socket_path f = length (decodeW8 f) < sizeof_sockaddr_un_sun_path
|
||||
valid_unix_socket_path f = S.length (encodeBS f) < sizeof_sockaddr_un_sun_path
|
||||
|
||||
{- Parses the SSH port, and returns the other OpenSSH options. If
|
||||
- several ports are found, the last one takes precedence. -}
|
||||
|
|
|
@ -11,7 +11,10 @@
|
|||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Annex.UUID (
|
||||
configkeyUUID,
|
||||
getUUID,
|
||||
getRepoUUID,
|
||||
getUncachedUUID,
|
||||
|
@ -32,6 +35,7 @@ import Annex.Common
|
|||
import qualified Annex
|
||||
import qualified Git
|
||||
import qualified Git.Config
|
||||
import Git.Types
|
||||
import Config
|
||||
|
||||
import qualified Data.UUID as U
|
||||
|
@ -39,8 +43,8 @@ import qualified Data.UUID.V4 as U4
|
|||
import qualified Data.UUID.V5 as U5
|
||||
import Data.String
|
||||
|
||||
configkey :: ConfigKey
|
||||
configkey = annexConfig "uuid"
|
||||
configkeyUUID :: ConfigKey
|
||||
configkeyUUID = annexConfig "uuid"
|
||||
|
||||
{- Generates a random UUID, that does not include the MAC address. -}
|
||||
genUUID :: IO UUID
|
||||
|
@ -81,20 +85,16 @@ getRepoUUID r = do
|
|||
|
||||
removeRepoUUID :: Annex ()
|
||||
removeRepoUUID = do
|
||||
unsetConfig configkey
|
||||
unsetConfig configkeyUUID
|
||||
storeUUID NoUUID
|
||||
|
||||
getUncachedUUID :: Git.Repo -> UUID
|
||||
getUncachedUUID = toUUID . Git.Config.get key ""
|
||||
where
|
||||
(ConfigKey key) = configkey
|
||||
getUncachedUUID = toUUID . Git.Config.get configkeyUUID ""
|
||||
|
||||
-- Does the repo's config have a key for the UUID?
|
||||
-- True even when the key has no value.
|
||||
isUUIDConfigured :: Git.Repo -> Bool
|
||||
isUUIDConfigured = isJust . Git.Config.getMaybe key
|
||||
where
|
||||
(ConfigKey key) = configkey
|
||||
isUUIDConfigured = isJust . Git.Config.getMaybe configkeyUUID
|
||||
|
||||
{- Make sure that the repo has an annex.uuid setting. -}
|
||||
prepUUID :: Annex ()
|
||||
|
@ -104,7 +104,7 @@ prepUUID = whenM ((==) NoUUID <$> getUUID) $
|
|||
storeUUID :: UUID -> Annex ()
|
||||
storeUUID u = do
|
||||
Annex.changeGitConfig $ \c -> c { annexUUID = u }
|
||||
storeUUIDIn configkey u
|
||||
storeUUIDIn configkeyUUID u
|
||||
|
||||
storeUUIDIn :: ConfigKey -> UUID -> Annex ()
|
||||
storeUUIDIn configfield = setConfig configfield . fromUUID
|
||||
|
@ -112,7 +112,7 @@ storeUUIDIn configfield = setConfig configfield . fromUUID
|
|||
{- Only sets the configkey in the Repo; does not change .git/config -}
|
||||
setUUID :: Git.Repo -> UUID -> IO Git.Repo
|
||||
setUUID r u = do
|
||||
let s = show configkey ++ "=" ++ fromUUID u
|
||||
let s = encodeBS' $ show configkeyUUID ++ "=" ++ fromUUID u
|
||||
Git.Config.store s r
|
||||
|
||||
-- Dummy uuid for the whole web. Do not alter.
|
||||
|
|
|
@ -36,7 +36,7 @@ mkVariant file variant = takeDirectory file
|
|||
-}
|
||||
variantFile :: FilePath -> Key -> FilePath
|
||||
variantFile file key
|
||||
| doubleconflict = mkVariant file (keyFile key)
|
||||
| doubleconflict = mkVariant file (fromRawFilePath (keyFile key))
|
||||
| otherwise = mkVariant file (shortHash $ serializeKey' key)
|
||||
where
|
||||
doubleconflict = variantMarker `isInfixOf` file
|
||||
|
|
|
@ -6,11 +6,13 @@
|
|||
-}
|
||||
|
||||
{-# LANGUAGE CPP #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Annex.Version where
|
||||
|
||||
import Annex.Common
|
||||
import Config
|
||||
import Git.Types
|
||||
import Types.RepoVersion
|
||||
import qualified Annex
|
||||
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Annex.View where
|
||||
|
||||
import Annex.Common
|
||||
|
@ -80,7 +82,7 @@ parseViewParam s = case separate (== '=') s of
|
|||
)
|
||||
where
|
||||
mkFilterValues v
|
||||
| any (`elem` v) "*?" = FilterGlob v
|
||||
| any (`elem` v) ['*', '?'] = FilterGlob v
|
||||
| otherwise = FilterValues $ S.singleton $ toMetaValue $ encodeBS v
|
||||
mkExcludeValues = ExcludeValues . S.singleton . toMetaValue . encodeBS
|
||||
|
||||
|
@ -358,13 +360,14 @@ applyView' mkviewedfile getfilemetadata view = do
|
|||
|
||||
go uh topf _sha _mode (Just k) = do
|
||||
metadata <- getCurrentMetaData k
|
||||
let f = getTopFilePath topf
|
||||
let f = fromRawFilePath $ getTopFilePath topf
|
||||
let metadata' = getfilemetadata f `unionMetaData` metadata
|
||||
forM_ (genviewedfiles f metadata') $ \fv -> do
|
||||
f' <- fromRepo $ fromTopFilePath $ asTopFilePath fv
|
||||
f' <- fromRawFilePath <$>
|
||||
fromRepo (fromTopFilePath $ asTopFilePath $ toRawFilePath fv)
|
||||
stagesymlink uh f' =<< calcRepo (gitAnnexLink f' k)
|
||||
go uh topf (Just sha) (Just treeitemtype) Nothing
|
||||
| "." `isPrefixOf` getTopFilePath topf =
|
||||
| "." `B.isPrefixOf` getTopFilePath topf =
|
||||
liftIO $ Git.UpdateIndex.streamUpdateIndex' uh $
|
||||
pureStreamer $ updateIndexLine sha treeitemtype topf
|
||||
go _ _ _ _ _ = noop
|
||||
|
@ -403,7 +406,7 @@ withViewChanges addmeta removemeta = do
|
|||
=<< catKey (DiffTree.dstsha item)
|
||||
| otherwise = noop
|
||||
handlechange item a = maybe noop
|
||||
(void . commandAction . a (getTopFilePath $ DiffTree.file item))
|
||||
(void . commandAction . a (fromRawFilePath $ getTopFilePath $ DiffTree.file item))
|
||||
|
||||
{- Runs an action using the view index file.
|
||||
- Note that the file does not necessarily exist, or can contain
|
||||
|
|
|
@ -23,6 +23,7 @@ import Database.Types
|
|||
import qualified Database.Keys
|
||||
import qualified Database.Keys.SQL
|
||||
import Config
|
||||
import qualified Utility.RawFilePath as R
|
||||
|
||||
{- Looks up the key corresponding to an annexed file in the work tree,
|
||||
- by examining what the file links to.
|
||||
|
@ -33,35 +34,35 @@ import Config
|
|||
- When in an adjusted branch that may have hidden the file, looks for a
|
||||
- pointer to a key in the original branch.
|
||||
-}
|
||||
lookupFile :: FilePath -> Annex (Maybe Key)
|
||||
lookupFile :: RawFilePath -> Annex (Maybe Key)
|
||||
lookupFile = lookupFile' catkeyfile
|
||||
where
|
||||
catkeyfile file =
|
||||
ifM (liftIO $ doesFileExist file)
|
||||
ifM (liftIO $ doesFileExist $ fromRawFilePath file)
|
||||
( catKeyFile file
|
||||
, catKeyFileHidden file =<< getCurrentBranch
|
||||
)
|
||||
|
||||
lookupFileNotHidden :: FilePath -> Annex (Maybe Key)
|
||||
lookupFileNotHidden :: RawFilePath -> Annex (Maybe Key)
|
||||
lookupFileNotHidden = lookupFile' catkeyfile
|
||||
where
|
||||
catkeyfile file =
|
||||
ifM (liftIO $ doesFileExist file)
|
||||
ifM (liftIO $ doesFileExist $ fromRawFilePath file)
|
||||
( catKeyFile file
|
||||
, return Nothing
|
||||
)
|
||||
|
||||
lookupFile' :: (FilePath -> Annex (Maybe Key)) -> FilePath -> Annex (Maybe Key)
|
||||
lookupFile' :: (RawFilePath -> Annex (Maybe Key)) -> RawFilePath -> Annex (Maybe Key)
|
||||
lookupFile' catkeyfile file = isAnnexLink file >>= \case
|
||||
Just key -> return (Just key)
|
||||
Nothing -> catkeyfile file
|
||||
|
||||
{- Modifies an action to only act on files that are already annexed,
|
||||
- and passes the key on to it. -}
|
||||
whenAnnexed :: (FilePath -> Key -> Annex (Maybe a)) -> FilePath -> Annex (Maybe a)
|
||||
whenAnnexed :: (RawFilePath -> Key -> Annex (Maybe a)) -> RawFilePath -> Annex (Maybe a)
|
||||
whenAnnexed a file = ifAnnexed file (a file) (return Nothing)
|
||||
|
||||
ifAnnexed :: FilePath -> (Key -> Annex a) -> Annex a -> Annex a
|
||||
ifAnnexed :: RawFilePath -> (Key -> Annex a) -> Annex a -> Annex a
|
||||
ifAnnexed file yes no = maybe no yes =<< lookupFile file
|
||||
|
||||
{- Find all unlocked files and update the keys database for them.
|
||||
|
@ -98,14 +99,16 @@ scanUnlockedFiles = whenM (inRepo Git.Ref.headExists <&&> not <$> isBareRepo) $
|
|||
f <- fromRepo $ fromTopFilePath tf
|
||||
liftIO (isPointerFile f) >>= \case
|
||||
Just k' | k' == k -> do
|
||||
destmode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus f
|
||||
ic <- replaceFile f $ \tmp ->
|
||||
destmode <- liftIO $ catchMaybeIO $
|
||||
fileMode <$> R.getFileStatus f
|
||||
ic <- replaceFile (fromRawFilePath f) $ \tmp -> do
|
||||
let tmp' = toRawFilePath tmp
|
||||
linkFromAnnex k tmp destmode >>= \case
|
||||
LinkAnnexOk ->
|
||||
withTSDelta (liftIO . genInodeCache tmp)
|
||||
withTSDelta (liftIO . genInodeCache tmp')
|
||||
LinkAnnexNoop -> return Nothing
|
||||
LinkAnnexFailed -> liftIO $ do
|
||||
writePointerFile tmp k destmode
|
||||
writePointerFile tmp' k destmode
|
||||
return Nothing
|
||||
maybe noop (restagePointerFile (Restage True) f) ic
|
||||
_ -> noop
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Assistant.MakeRemote where
|
||||
|
||||
import Assistant.Common
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Assistant.MakeRepo where
|
||||
|
||||
import Assistant.WebApp.Common
|
||||
|
|
|
@ -91,7 +91,7 @@ runRepair u mrmt destructiverepair = do
|
|||
remoterepair fsckresults = case Remote.repairRepo =<< mrmt of
|
||||
Nothing -> return False
|
||||
Just mkrepair -> do
|
||||
thisrepopath <- liftIO . absPath
|
||||
thisrepopath <- liftIO . absPath . fromRawFilePath
|
||||
=<< liftAnnex (fromRepo Git.repoPath)
|
||||
a <- liftAnnex $ mkrepair $
|
||||
repair fsckresults (Just thisrepopath)
|
||||
|
@ -130,7 +130,7 @@ repairStaleGitLocks r = do
|
|||
repairStaleLocks lockfiles
|
||||
return $ not $ null lockfiles
|
||||
where
|
||||
findgitfiles = dirContentsRecursiveSkipping (== dropTrailingPathSeparator annexDir) True . Git.localGitDir
|
||||
findgitfiles = dirContentsRecursiveSkipping (== dropTrailingPathSeparator (fromRawFilePath annexDir)) True . fromRawFilePath . Git.localGitDir
|
||||
islock f
|
||||
| "gc.pid" `isInfixOf` f = False
|
||||
| ".lock" `isSuffixOf` f = True
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Assistant.Sync where
|
||||
|
||||
import Assistant.Common
|
||||
|
|
|
@ -286,7 +286,7 @@ handleAdds lockdowndir havelsof delayadd cs = returnWhen (null incomplete) $ do
|
|||
ks = keySource ld
|
||||
doadd = sanitycheck ks $ do
|
||||
(mkey, _mcache) <- liftAnnex $ do
|
||||
showStart "add" $ keyFilename ks
|
||||
showStart "add" $ toRawFilePath $ keyFilename ks
|
||||
ingest nullMeterUpdate (Just $ LockedDown lockdownconfig ks) Nothing
|
||||
maybe (failedingest change) (done change $ keyFilename ks) mkey
|
||||
add _ _ = return Nothing
|
||||
|
@ -308,7 +308,7 @@ handleAdds lockdowndir havelsof delayadd cs = returnWhen (null incomplete) $ do
|
|||
if M.null m
|
||||
then forM toadd (add cfg)
|
||||
else forM toadd $ \c -> do
|
||||
mcache <- liftIO $ genInodeCache (changeFile c) delta
|
||||
mcache <- liftIO $ genInodeCache (toRawFilePath (changeFile c)) delta
|
||||
case mcache of
|
||||
Nothing -> add cfg c
|
||||
Just cache ->
|
||||
|
@ -325,7 +325,7 @@ handleAdds lockdowndir havelsof delayadd cs = returnWhen (null incomplete) $ do
|
|||
removedKeysMap :: InodeComparisonType -> [Change] -> Annex (M.Map InodeCacheKey Key)
|
||||
removedKeysMap ct l = do
|
||||
mks <- forM (filter isRmChange l) $ \c ->
|
||||
catKeyFile $ changeFile c
|
||||
catKeyFile $ toRawFilePath $ changeFile c
|
||||
M.fromList . concat <$> mapM mkpairs (catMaybes mks)
|
||||
where
|
||||
mkpairs k = map (\c -> (inodeCacheToKey ct c, k)) <$>
|
||||
|
@ -339,7 +339,7 @@ handleAdds lockdowndir havelsof delayadd cs = returnWhen (null incomplete) $ do
|
|||
done change file key = liftAnnex $ do
|
||||
logStatus key InfoPresent
|
||||
mode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus file
|
||||
stagePointerFile file mode =<< hashPointerFile key
|
||||
stagePointerFile (toRawFilePath file) mode =<< hashPointerFile key
|
||||
showEndOk
|
||||
return $ Just $ finishedChange change key
|
||||
|
||||
|
@ -457,5 +457,5 @@ checkChangeContent change@(Change { changeInfo = i }) =
|
|||
handleDrops "file renamed" present k af []
|
||||
where
|
||||
f = changeFile change
|
||||
af = AssociatedFile (Just f)
|
||||
af = AssociatedFile (Just (toRawFilePath f))
|
||||
checkChangeContent _ = noop
|
||||
|
|
|
@ -44,7 +44,8 @@ configMonitorThread = namedThread "ConfigMonitor" $ loop =<< getConfigs
|
|||
when (old /= new) $ do
|
||||
let changedconfigs = new `S.difference` old
|
||||
debug $ "reloading config" :
|
||||
map fst (S.toList changedconfigs)
|
||||
map (fromRawFilePath . fst)
|
||||
(S.toList changedconfigs)
|
||||
reloadConfigs new
|
||||
{- Record a commit to get this config
|
||||
- change pushed out to remotes. -}
|
||||
|
@ -53,10 +54,10 @@ configMonitorThread = namedThread "ConfigMonitor" $ loop =<< getConfigs
|
|||
loop new
|
||||
|
||||
{- Config files, and their checksums. -}
|
||||
type Configs = S.Set (FilePath, Sha)
|
||||
type Configs = S.Set (RawFilePath, Sha)
|
||||
|
||||
{- All git-annex's config files, and actions to run when they change. -}
|
||||
configFilesActions :: [(FilePath, Assistant ())]
|
||||
configFilesActions :: [(RawFilePath, Assistant ())]
|
||||
configFilesActions =
|
||||
[ (uuidLog, void $ liftAnnex uuidDescMapLoad)
|
||||
, (remoteLog, void $ liftAnnex remoteListRefresh)
|
||||
|
@ -89,5 +90,5 @@ getConfigs :: Assistant Configs
|
|||
getConfigs = S.fromList . map extract
|
||||
<$> liftAnnex (inRepo $ LsTree.lsTreeFiles Annex.Branch.fullname files)
|
||||
where
|
||||
files = map fst configFilesActions
|
||||
files = map (fromRawFilePath . fst) configFilesActions
|
||||
extract treeitem = (getTopFilePath $ LsTree.file treeitem, LsTree.sha treeitem)
|
||||
|
|
|
@ -26,7 +26,7 @@ import qualified Command.Sync
|
|||
mergeThread :: NamedThread
|
||||
mergeThread = namedThread "Merger" $ do
|
||||
g <- liftAnnex gitRepo
|
||||
let dir = Git.localGitDir g </> "refs"
|
||||
let dir = fromRawFilePath (Git.localGitDir g) </> "refs"
|
||||
liftIO $ createDirectoryIfMissing True dir
|
||||
let hook a = Just <$> asIO2 (runHandler a)
|
||||
changehook <- hook onChange
|
||||
|
|
|
@ -159,7 +159,7 @@ handleMount urlrenderer dir = do
|
|||
-}
|
||||
remotesUnder :: FilePath -> Assistant [Remote]
|
||||
remotesUnder dir = do
|
||||
repotop <- liftAnnex $ fromRepo Git.repoPath
|
||||
repotop <- liftAnnex $ fromRawFilePath <$> fromRepo Git.repoPath
|
||||
rs <- liftAnnex remoteList
|
||||
pairs <- liftAnnex $ mapM (checkremote repotop) rs
|
||||
let (waschanged, rs') = unzip pairs
|
||||
|
|
|
@ -119,7 +119,7 @@ pairReqReceived False urlrenderer msg = do
|
|||
pairAckReceived :: Bool -> Maybe PairingInProgress -> PairMsg -> [PairingInProgress] -> Assistant [PairingInProgress]
|
||||
pairAckReceived True (Just pip) msg cache = do
|
||||
stopSending pip
|
||||
repodir <- repoPath <$> liftAnnex gitRepo
|
||||
repodir <- fromRawFilePath . repoPath <$> liftAnnex gitRepo
|
||||
liftIO $ setupAuthorizedKeys msg repodir
|
||||
finishedLocalPairing msg (inProgressSshKeyPair pip)
|
||||
startSending pip PairDone $ multicastPairMsg
|
||||
|
|
|
@ -155,10 +155,11 @@ dailyCheck urlrenderer = do
|
|||
(unstaged, cleanup) <- liftIO $ Git.LsFiles.notInRepo False ["."] g
|
||||
now <- liftIO getPOSIXTime
|
||||
forM_ unstaged $ \file -> do
|
||||
ms <- liftIO $ catchMaybeIO $ getSymbolicLinkStatus file
|
||||
let file' = fromRawFilePath file
|
||||
ms <- liftIO $ catchMaybeIO $ getSymbolicLinkStatus file'
|
||||
case ms of
|
||||
Just s | toonew (statusChangeTime s) now -> noop
|
||||
| isSymbolicLink s -> addsymlink file ms
|
||||
| isSymbolicLink s -> addsymlink file' ms
|
||||
_ -> noop
|
||||
liftIO $ void cleanup
|
||||
|
||||
|
@ -268,5 +269,5 @@ checkOldUnused urlrenderer = go =<< annexExpireUnused <$> liftAnnex Annex.getGit
|
|||
checkRepoExists :: Assistant ()
|
||||
checkRepoExists = do
|
||||
g <- liftAnnex gitRepo
|
||||
liftIO $ unlessM (doesDirectoryExist $ Git.repoPath g) $
|
||||
liftIO $ unlessM (doesDirectoryExist $ fromRawFilePath $ Git.repoPath g) $
|
||||
terminateSelf
|
||||
|
|
|
@ -138,8 +138,9 @@ startupScan scanner = do
|
|||
top <- liftAnnex $ fromRepo Git.repoPath
|
||||
(fs, cleanup) <- liftAnnex $ inRepo $ LsFiles.deleted [top]
|
||||
forM_ fs $ \f -> do
|
||||
liftAnnex $ onDel' f
|
||||
maybe noop recordChange =<< madeChange f RmChange
|
||||
let f' = fromRawFilePath f
|
||||
liftAnnex $ onDel' f'
|
||||
maybe noop recordChange =<< madeChange f' RmChange
|
||||
void $ liftIO cleanup
|
||||
|
||||
liftAnnex $ showAction "started"
|
||||
|
@ -206,14 +207,14 @@ shouldRestage ds = scanComplete ds || forceRestage ds
|
|||
|
||||
onAddUnlocked :: Bool -> GetFileMatcher -> Handler
|
||||
onAddUnlocked symlinkssupported matcher f fs = do
|
||||
mk <- liftIO $ isPointerFile f
|
||||
mk <- liftIO $ isPointerFile $ toRawFilePath f
|
||||
case mk of
|
||||
Nothing -> onAddUnlocked' contentchanged addassociatedfile addlink samefilestatus symlinkssupported matcher f fs
|
||||
Just k -> addlink f k
|
||||
where
|
||||
addassociatedfile key file =
|
||||
Database.Keys.addAssociatedFile key
|
||||
=<< inRepo (toTopFilePath file)
|
||||
=<< inRepo (toTopFilePath (toRawFilePath file))
|
||||
samefilestatus key file status = do
|
||||
cache <- Database.Keys.getInodeCaches key
|
||||
curr <- withTSDelta $ \delta -> liftIO $ toInodeCache delta file status
|
||||
|
@ -223,12 +224,12 @@ onAddUnlocked symlinkssupported matcher f fs = do
|
|||
_ -> return False
|
||||
contentchanged oldkey file = do
|
||||
Database.Keys.removeAssociatedFile oldkey
|
||||
=<< inRepo (toTopFilePath file)
|
||||
=<< inRepo (toTopFilePath (toRawFilePath file))
|
||||
unlessM (inAnnex oldkey) $
|
||||
logStatus oldkey InfoMissing
|
||||
addlink file key = do
|
||||
mode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus file
|
||||
liftAnnex $ stagePointerFile file mode =<< hashPointerFile key
|
||||
liftAnnex $ stagePointerFile (toRawFilePath file) mode =<< hashPointerFile key
|
||||
madeChange file $ LinkChange (Just key)
|
||||
|
||||
onAddUnlocked'
|
||||
|
@ -240,7 +241,7 @@ onAddUnlocked'
|
|||
-> GetFileMatcher
|
||||
-> Handler
|
||||
onAddUnlocked' contentchanged addassociatedfile addlink samefilestatus symlinkssupported matcher file fs = do
|
||||
v <- liftAnnex $ catKeyFile file
|
||||
v <- liftAnnex $ catKeyFile (toRawFilePath file)
|
||||
case (v, fs) of
|
||||
(Just key, Just filestatus) ->
|
||||
ifM (liftAnnex $ samefilestatus key file filestatus)
|
||||
|
@ -270,7 +271,8 @@ onAddUnlocked' contentchanged addassociatedfile addlink samefilestatus symlinkss
|
|||
guardSymlinkStandin mk a
|
||||
| symlinkssupported = a
|
||||
| otherwise = do
|
||||
linktarget <- liftAnnex $ getAnnexLinkTarget file
|
||||
linktarget <- liftAnnex $ getAnnexLinkTarget $
|
||||
toRawFilePath file
|
||||
case linktarget of
|
||||
Nothing -> a
|
||||
Just lt -> do
|
||||
|
@ -287,7 +289,7 @@ onAddUnlocked' contentchanged addassociatedfile addlink samefilestatus symlinkss
|
|||
onAddSymlink :: Handler
|
||||
onAddSymlink file filestatus = unlessIgnored file $ do
|
||||
linktarget <- liftIO (catchMaybeIO $ readSymbolicLink file)
|
||||
kv <- liftAnnex (lookupFile file)
|
||||
kv <- liftAnnex (lookupFile (toRawFilePath file))
|
||||
onAddSymlink' linktarget kv file filestatus
|
||||
|
||||
onAddSymlink' :: Maybe String -> Maybe Key -> Handler
|
||||
|
@ -299,7 +301,7 @@ onAddSymlink' linktarget mk file filestatus = go mk
|
|||
then ensurestaged (Just link) =<< getDaemonStatus
|
||||
else do
|
||||
liftAnnex $ replaceFile file $
|
||||
makeAnnexLink link
|
||||
makeAnnexLink link . toRawFilePath
|
||||
addLink file link (Just key)
|
||||
-- other symlink, not git-annex
|
||||
go Nothing = ensurestaged linktarget =<< getDaemonStatus
|
||||
|
@ -332,8 +334,8 @@ addLink file link mk = do
|
|||
case v of
|
||||
Just (currlink, sha, _type)
|
||||
| s2w8 link == L.unpack currlink ->
|
||||
stageSymlink file sha
|
||||
_ -> stageSymlink file =<< hashSymlink link
|
||||
stageSymlink (toRawFilePath file) sha
|
||||
_ -> stageSymlink (toRawFilePath file) =<< hashSymlink link
|
||||
madeChange file $ LinkChange mk
|
||||
|
||||
onDel :: Handler
|
||||
|
@ -344,12 +346,12 @@ onDel file _ = do
|
|||
|
||||
onDel' :: FilePath -> Annex ()
|
||||
onDel' file = do
|
||||
topfile <- inRepo (toTopFilePath file)
|
||||
topfile <- inRepo (toTopFilePath (toRawFilePath file))
|
||||
withkey $ flip Database.Keys.removeAssociatedFile topfile
|
||||
Annex.Queue.addUpdateIndex =<<
|
||||
inRepo (Git.UpdateIndex.unstageFile file)
|
||||
where
|
||||
withkey a = maybe noop a =<< catKeyFile file
|
||||
withkey a = maybe noop a =<< catKeyFile (toRawFilePath file)
|
||||
|
||||
{- A directory has been deleted, or moved, so tell git to remove anything
|
||||
- that was inside it from its cache. Since it could reappear at any time,
|
||||
|
@ -360,14 +362,15 @@ onDel' file = do
|
|||
onDelDir :: Handler
|
||||
onDelDir dir _ = do
|
||||
debug ["directory deleted", dir]
|
||||
(fs, clean) <- liftAnnex $ inRepo $ LsFiles.deleted [dir]
|
||||
(fs, clean) <- liftAnnex $ inRepo $ LsFiles.deleted [toRawFilePath dir]
|
||||
let fs' = map fromRawFilePath fs
|
||||
|
||||
liftAnnex $ mapM_ onDel' fs
|
||||
liftAnnex $ mapM_ onDel' fs'
|
||||
|
||||
-- Get the events queued up as fast as possible, so the
|
||||
-- committer sees them all in one block.
|
||||
now <- liftIO getCurrentTime
|
||||
recordChanges $ map (\f -> Change now f RmChange) fs
|
||||
recordChanges $ map (\f -> Change now f RmChange) fs'
|
||||
|
||||
void $ liftIO clean
|
||||
noChange
|
||||
|
|
|
@ -100,7 +100,7 @@ webAppThread assistantdata urlrenderer noannex cannotrun postfirstrun listenhost
|
|||
getreldir
|
||||
| noannex = return Nothing
|
||||
| otherwise = Just <$>
|
||||
(relHome =<< absPath
|
||||
(relHome =<< absPath . fromRawFilePath
|
||||
=<< getAnnex' (fromRepo repoPath))
|
||||
go tlssettings addr webapp htmlshim urlfile = do
|
||||
let url = myUrl tlssettings webapp addr
|
||||
|
|
|
@ -161,7 +161,7 @@ genTransfer t info = case transferRemote info of
|
|||
AssociatedFile Nothing -> noop
|
||||
AssociatedFile (Just af) -> void $
|
||||
addAlert $ makeAlertFiller True $
|
||||
transferFileAlert direction True af
|
||||
transferFileAlert direction True (fromRawFilePath af)
|
||||
unless isdownload $
|
||||
handleDrops
|
||||
("object uploaded to " ++ show remote)
|
||||
|
|
|
@ -64,7 +64,7 @@ describeUnused' whenbig = liftAnnex $ go =<< readUnusedLog ""
|
|||
|
||||
sumkeysize s k = s + fromMaybe 0 (fromKey keySize k)
|
||||
|
||||
forpath a = inRepo $ liftIO . a . Git.repoPath
|
||||
forpath a = inRepo $ liftIO . a . fromRawFilePath . Git.repoPath
|
||||
|
||||
{- With a duration, expires all unused files that are older.
|
||||
- With Nothing, expires *all* unused files. -}
|
||||
|
|
|
@ -87,7 +87,7 @@ startDistributionDownload d = go =<< liftIO . newVersionLocation d =<< liftIO ol
|
|||
hook <- asIO1 $ distributionDownloadComplete d dest cleanup
|
||||
modifyDaemonStatus_ $ \s -> s
|
||||
{ transferHook = M.insert k hook (transferHook s) }
|
||||
maybe noop (queueTransfer "upgrade" Next (AssociatedFile (Just f)) t)
|
||||
maybe noop (queueTransfer "upgrade" Next (AssociatedFile (Just (toRawFilePath f))) t)
|
||||
=<< liftAnnex (remoteFromUUID webUUID)
|
||||
startTransfer t
|
||||
k = mkKey $ const $ distributionKey d
|
||||
|
@ -113,7 +113,7 @@ distributionDownloadComplete d dest cleanup t
|
|||
| transferDirection t == Download = do
|
||||
debug ["finished downloading git-annex distribution"]
|
||||
maybe (failedupgrade "bad download") go
|
||||
=<< liftAnnex (withObjectLoc k fsckit)
|
||||
=<< liftAnnex (withObjectLoc k (fsckit . fromRawFilePath))
|
||||
| otherwise = cleanup
|
||||
where
|
||||
k = mkKey $ const $ distributionKey d
|
||||
|
|
|
@ -78,7 +78,7 @@ deleteCurrentRepository = dangerPage $ do
|
|||
sanityVerifierAForm $ SanityVerifier magicphrase
|
||||
case result of
|
||||
FormSuccess _ -> liftH $ do
|
||||
dir <- liftAnnex $ fromRepo Git.repoPath
|
||||
dir <- liftAnnex $ fromRawFilePath <$> fromRepo Git.repoPath
|
||||
liftIO $ removeAutoStartFile dir
|
||||
|
||||
{- Disable syncing to this repository, and all
|
||||
|
|
|
@ -101,11 +101,12 @@ setRepoConfig uuid mremote oldc newc = do
|
|||
- there's not. Special remotes don't normally
|
||||
- have that, and don't use it. Temporarily add
|
||||
- it if it's missing. -}
|
||||
let remotefetch = "remote." ++ T.unpack (repoName oldc) ++ ".fetch"
|
||||
let remotefetch = Git.ConfigKey $ encodeBS' $
|
||||
"remote." ++ T.unpack (repoName oldc) ++ ".fetch"
|
||||
needfetch <- isNothing <$> fromRepo (Git.Config.getMaybe remotefetch)
|
||||
when needfetch $
|
||||
inRepo $ Git.Command.run
|
||||
[Param "config", Param remotefetch, Param ""]
|
||||
[Param "config", Param (Git.fromConfigKey remotefetch), Param ""]
|
||||
inRepo $ Git.Command.run
|
||||
[ Param "remote"
|
||||
, Param "rename"
|
||||
|
@ -237,7 +238,7 @@ checkAssociatedDirectory cfg (Just r) = do
|
|||
RepoGroupStandard gr -> case associatedDirectory repoconfig gr of
|
||||
Just d -> inRepo $ \g ->
|
||||
createDirectoryIfMissing True $
|
||||
Git.repoPath g </> d
|
||||
fromRawFilePath (Git.repoPath g) </> d
|
||||
Nothing -> noop
|
||||
_ -> noop
|
||||
|
||||
|
|
|
@ -336,7 +336,7 @@ getFinishAddDriveR drive = go
|
|||
isnew <- liftIO $ makeRepo dir True
|
||||
{- Removable drives are not reliable media, so enable fsync. -}
|
||||
liftIO $ inDir dir $
|
||||
setConfig (ConfigKey "core.fsyncobjectfiles")
|
||||
setConfig "core.fsyncobjectfiles"
|
||||
(Git.Config.boolConfig True)
|
||||
(u, r) <- a isnew
|
||||
when isnew $
|
||||
|
|
|
@ -173,7 +173,7 @@ getFinishLocalPairR = postFinishLocalPairR
|
|||
postFinishLocalPairR :: PairMsg -> Handler Html
|
||||
#ifdef WITH_PAIRING
|
||||
postFinishLocalPairR msg = promptSecret (Just msg) $ \_ secret -> do
|
||||
repodir <- liftH $ repoPath <$> liftAnnex gitRepo
|
||||
repodir <- liftH $ fromRawFilePath . repoPath <$> liftAnnex gitRepo
|
||||
liftIO $ setup repodir
|
||||
startLocalPairing PairAck (cleanup repodir) alert uuid "" secret
|
||||
where
|
||||
|
|
|
@ -94,7 +94,7 @@ storePrefs p = do
|
|||
unsetConfig (annexConfig "numcopies") -- deprecated
|
||||
setConfig (annexConfig "autoupgrade") (fromAutoUpgrade $ autoUpgrade p)
|
||||
unlessM ((==) <$> pure (autoStart p) <*> inAutoStartFile) $ do
|
||||
here <- fromRepo Git.repoPath
|
||||
here <- fromRawFilePath <$> fromRepo Git.repoPath
|
||||
liftIO $ if autoStart p
|
||||
then addAutoStartFile here
|
||||
else removeAutoStartFile here
|
||||
|
@ -118,5 +118,5 @@ postPreferencesR = page "Preferences" (Just Configuration) $ do
|
|||
|
||||
inAutoStartFile :: Annex Bool
|
||||
inAutoStartFile = do
|
||||
here <- liftIO . absPath =<< fromRepo Git.repoPath
|
||||
here <- liftIO . absPath . fromRawFilePath =<< fromRepo Git.repoPath
|
||||
any (`equalFilePath` here) <$> liftIO readAutoStartFile
|
||||
|
|
|
@ -20,7 +20,7 @@ import Types.StandardGroups
|
|||
import Utility.UserInfo
|
||||
import Utility.Gpg
|
||||
import Types.Remote (RemoteConfig)
|
||||
import Git.Types (RemoteName, fromRef)
|
||||
import Git.Types (RemoteName, fromRef, fromConfigKey)
|
||||
import qualified Remote.GCrypt as GCrypt
|
||||
import qualified Annex
|
||||
import qualified Git.Command
|
||||
|
@ -317,7 +317,8 @@ testServer sshinput@(SshInput { inputHostname = Just hn }) = do
|
|||
else T.pack $ "Failed to ssh to the server. Transcript: " ++ s
|
||||
finduuid (k, v)
|
||||
| k == "annex.uuid" = Just $ toUUID v
|
||||
| k == GCrypt.coreGCryptId = Just $ genUUIDInNameSpace gCryptNameSpace v
|
||||
| k == fromConfigKey GCrypt.coreGCryptId =
|
||||
Just $ genUUIDInNameSpace gCryptNameSpace v
|
||||
| otherwise = Nothing
|
||||
|
||||
checkcommand c = "if which " ++ c ++ "; then " ++ report c ++ "; fi"
|
||||
|
|
|
@ -45,7 +45,7 @@ transfersDisplay = do
|
|||
transferPaused info || isNothing (startedTime info)
|
||||
desc transfer info = case associatedFile info of
|
||||
AssociatedFile Nothing -> serializeKey $ transferKey transfer
|
||||
AssociatedFile (Just af) -> af
|
||||
AssociatedFile (Just af) -> fromRawFilePath af
|
||||
|
||||
{- Simplifies a list of transfers, avoiding display of redundant
|
||||
- equivilant transfers. -}
|
||||
|
@ -118,7 +118,8 @@ getFileBrowserR = whenM openFileBrowser redirectBack
|
|||
- blocking the response to the browser on it. -}
|
||||
openFileBrowser :: Handler Bool
|
||||
openFileBrowser = do
|
||||
path <- liftIO . absPath =<< liftAnnex (fromRepo Git.repoPath)
|
||||
path <- liftIO . absPath . fromRawFilePath
|
||||
=<< liftAnnex (fromRepo Git.repoPath)
|
||||
#ifdef darwin_HOST_OS
|
||||
let cmd = "open"
|
||||
let p = proc cmd [path]
|
||||
|
|
|
@ -188,7 +188,7 @@ trivialMigrate' oldkey newbackend afile maxextlen
|
|||
AssociatedFile Nothing -> Nothing
|
||||
AssociatedFile (Just file) -> Just $ alterKey oldkey $ \d -> d
|
||||
{ keyName = keyHash oldkey
|
||||
<> encodeBS (selectExtension maxextlen file)
|
||||
<> encodeBS' (selectExtension maxextlen (fromRawFilePath file))
|
||||
, keyVariety = newvariety
|
||||
}
|
||||
{- Upgrade to fix bad previous migration that created a
|
||||
|
|
|
@ -11,6 +11,7 @@ import Annex.Common
|
|||
import Utility.Hash
|
||||
|
||||
import qualified Data.ByteString as S
|
||||
import qualified Data.ByteString.Lazy as L
|
||||
|
||||
{- Generates a keyName from an input string. Takes care of sanitizing it.
|
||||
- If it's not too long, the full string is used as the keyName.
|
||||
|
@ -21,11 +22,12 @@ genKeyName s
|
|||
-- Avoid making keys longer than the length of a SHA256 checksum.
|
||||
| bytelen > sha256len = encodeBS' $
|
||||
truncateFilePath (sha256len - md5len - 1) s' ++ "-" ++
|
||||
show (md5 (encodeBL s))
|
||||
show (md5 bl)
|
||||
| otherwise = encodeBS' s'
|
||||
where
|
||||
s' = preSanitizeKeyName s
|
||||
bytelen = length (decodeW8 s')
|
||||
bl = encodeBL s
|
||||
bytelen = fromIntegral $ L.length bl
|
||||
|
||||
sha256len = 64
|
||||
md5len = 32
|
||||
|
|
|
@ -38,7 +38,8 @@ keyValue source _ = do
|
|||
let f = contentLocation source
|
||||
stat <- liftIO $ getFileStatus f
|
||||
sz <- liftIO $ getFileSize' f stat
|
||||
relf <- getTopFilePath <$> inRepo (toTopFilePath $ keyFilename source)
|
||||
relf <- fromRawFilePath . getTopFilePath
|
||||
<$> inRepo (toTopFilePath $ toRawFilePath $ keyFilename source)
|
||||
return $ Just $ mkKey $ \k -> k
|
||||
{ keyName = genKeyName relf
|
||||
, keyVariety = WORMKey
|
||||
|
|
10
CHANGELOG
10
CHANGELOG
|
@ -1,3 +1,13 @@
|
|||
git-annex (7.20191219) UNRELEASED; urgency=medium
|
||||
|
||||
* Optimised processing of many files, especially by commands like find
|
||||
and whereis that only report on the state of the repository. Commands
|
||||
like get also sped up in cases where they have to check a lot of
|
||||
files but only transfer a few files. Speedups range from 30-100%.
|
||||
* Added build dependency on the filepath-bytestring library.
|
||||
|
||||
-- Joey Hess <id@joeyh.name> Wed, 18 Dec 2019 15:12:40 -0400
|
||||
|
||||
git-annex (7.20191218) upstream; urgency=medium
|
||||
|
||||
* git-lfs: The url provided to initremote/enableremote will now be
|
||||
|
|
35
COPYRIGHT
35
COPYRIGHT
|
@ -29,6 +29,11 @@ Copyright: 2018 Joey Hess <id@joeyh.name>
|
|||
2013 Michael Snoyman
|
||||
License: Expat
|
||||
|
||||
Files: Utility/Attoparsec.hs
|
||||
Copyright: 2019 Joey Hess <id@joeyh.name>
|
||||
2007-2015 Bryan O'Sullivan
|
||||
License: BSD-3-clause
|
||||
|
||||
Files: Utility/GitLFS.hs
|
||||
Copyright: © 2019 Joey Hess <id@joeyh.name>
|
||||
License: AGPL-3+
|
||||
|
@ -112,7 +117,35 @@ License: BSD-2-clause
|
|||
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
|
||||
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
|
||||
License: BSD-3-clause
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
.
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
.
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
.
|
||||
3. Neither the name of the author nor the names of his contributors
|
||||
may be used to endorse or promote products derived from this software
|
||||
without specific prior written permission.
|
||||
.
|
||||
THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS ``AS IS'' AND ANY EXPRESS
|
||||
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
|
||||
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
||||
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
||||
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
License: Expat
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
|
|
|
@ -102,7 +102,8 @@ batchFilesMatching :: BatchFormat -> (FilePath -> CommandStart) -> Annex ()
|
|||
batchFilesMatching fmt a = do
|
||||
matcher <- getMatcher
|
||||
batchStart fmt $ \f ->
|
||||
ifM (matcher $ MatchingFile $ FileInfo f f)
|
||||
let f' = toRawFilePath f
|
||||
in ifM (matcher $ MatchingFile $ FileInfo f' f')
|
||||
( a f
|
||||
, return Nothing
|
||||
)
|
||||
|
|
|
@ -92,7 +92,7 @@ gitAnnexGlobalOptions = commonGlobalOptions ++
|
|||
where
|
||||
setnumcopies n = Annex.changeState $ \s -> s { Annex.forcenumcopies = Just $ NumCopies n }
|
||||
setuseragent v = Annex.changeState $ \s -> s { Annex.useragent = Just v }
|
||||
setgitconfig v = Annex.adjustGitRepo $ \r -> Git.Config.store v $
|
||||
setgitconfig v = Annex.adjustGitRepo $ \r -> Git.Config.store (encodeBS' v) $
|
||||
r { gitGlobalOpts = gitGlobalOpts r ++ [Param "-c", Param v] }
|
||||
setdesktopnotify v = Annex.changeState $ \s -> s { Annex.desktopnotify = Annex.desktopnotify s <> v }
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ remoteUUID = Field "remoteuuid" $
|
|||
associatedFile :: Field
|
||||
associatedFile = Field "associatedfile" $ \f ->
|
||||
-- is the file a safe relative filename?
|
||||
not (absoluteGitPath f) && not ("../" `isPrefixOf` f)
|
||||
not (absoluteGitPath (toRawFilePath f)) && not ("../" `isPrefixOf` f)
|
||||
|
||||
direct :: Field
|
||||
direct = Field "direct" $ \f -> f == "1"
|
||||
|
|
|
@ -33,12 +33,13 @@ import Annex.CurrentBranch
|
|||
import Annex.Content
|
||||
import Annex.InodeSentinal
|
||||
import qualified Database.Keys
|
||||
import qualified Utility.RawFilePath as R
|
||||
|
||||
withFilesInGit :: (FilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
|
||||
withFilesInGit :: (RawFilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
|
||||
withFilesInGit a l = seekActions $ prepFiltered a $
|
||||
seekHelper LsFiles.inRepo l
|
||||
|
||||
withFilesInGitNonRecursive :: String -> (FilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
|
||||
withFilesInGitNonRecursive :: String -> (RawFilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
|
||||
withFilesInGitNonRecursive needforce a l = ifM (Annex.getState Annex.force)
|
||||
( withFilesInGit a l
|
||||
, if null l
|
||||
|
@ -48,7 +49,7 @@ withFilesInGitNonRecursive needforce a l = ifM (Annex.getState Annex.force)
|
|||
where
|
||||
getfiles c [] = return (reverse c)
|
||||
getfiles c ((WorkTreeItem p):ps) = do
|
||||
(fs, cleanup) <- inRepo $ LsFiles.inRepo [p]
|
||||
(fs, cleanup) <- inRepo $ LsFiles.inRepo [toRawFilePath p]
|
||||
case fs of
|
||||
[f] -> do
|
||||
void $ liftIO $ cleanup
|
||||
|
@ -58,11 +59,11 @@ withFilesInGitNonRecursive needforce a l = ifM (Annex.getState Annex.force)
|
|||
getfiles c ps
|
||||
_ -> giveup needforce
|
||||
|
||||
withFilesNotInGit :: Bool -> (FilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
|
||||
withFilesNotInGit :: Bool -> (RawFilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
|
||||
withFilesNotInGit skipdotfiles a l
|
||||
| skipdotfiles = do
|
||||
{- dotfiles are not acted on unless explicitly listed -}
|
||||
files <- filter (not . dotfile) <$>
|
||||
files <- filter (not . dotfile . fromRawFilePath) <$>
|
||||
seekunless (null ps && not (null l)) ps
|
||||
dotfiles <- seekunless (null dotps) dotps
|
||||
go (files++dotfiles)
|
||||
|
@ -74,9 +75,9 @@ withFilesNotInGit skipdotfiles a l
|
|||
force <- Annex.getState Annex.force
|
||||
g <- gitRepo
|
||||
liftIO $ Git.Command.leaveZombie
|
||||
<$> LsFiles.notInRepo force (map (\(WorkTreeItem f) -> f) l') g
|
||||
<$> LsFiles.notInRepo force (map (\(WorkTreeItem f) -> toRawFilePath f) l') g
|
||||
go fs = seekActions $ prepFiltered a $
|
||||
return $ concat $ segmentPaths (map (\(WorkTreeItem f) -> f) l) fs
|
||||
return $ concat $ segmentPaths (map (\(WorkTreeItem f) -> toRawFilePath f) l) fs
|
||||
|
||||
withPathContents :: ((FilePath, FilePath) -> CommandSeek) -> CmdParams -> CommandSeek
|
||||
withPathContents a params = do
|
||||
|
@ -93,8 +94,8 @@ withPathContents a params = do
|
|||
, return [(p, takeFileName p)]
|
||||
)
|
||||
checkmatch matcher (f, relf) = matcher $ MatchingFile $ FileInfo
|
||||
{ currFile = f
|
||||
, matchFile = relf
|
||||
{ currFile = toRawFilePath f
|
||||
, matchFile = toRawFilePath relf
|
||||
}
|
||||
|
||||
withWords :: ([String] -> CommandSeek) -> CmdParams -> CommandSeek
|
||||
|
@ -110,30 +111,30 @@ withPairs a params = seekActions $ return $ map a $ pairs [] params
|
|||
pairs c (x:y:xs) = pairs ((x,y):c) xs
|
||||
pairs _ _ = giveup "expected pairs"
|
||||
|
||||
withFilesToBeCommitted :: (FilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
|
||||
withFilesToBeCommitted :: (RawFilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
|
||||
withFilesToBeCommitted a l = seekActions $ prepFiltered a $
|
||||
seekHelper LsFiles.stagedNotDeleted l
|
||||
|
||||
isOldUnlocked :: FilePath -> Annex Bool
|
||||
isOldUnlocked :: RawFilePath -> Annex Bool
|
||||
isOldUnlocked f = liftIO (notSymlink f) <&&>
|
||||
(isJust <$> catKeyFile f <||> isJust <$> catKeyFileHEAD f)
|
||||
|
||||
{- unlocked pointer files that are staged, and whose content has not been
|
||||
- modified-}
|
||||
withUnmodifiedUnlockedPointers :: (FilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
|
||||
withUnmodifiedUnlockedPointers :: (RawFilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
|
||||
withUnmodifiedUnlockedPointers a l = seekActions $
|
||||
prepFiltered a unlockedfiles
|
||||
where
|
||||
unlockedfiles = filterM isUnmodifiedUnlocked
|
||||
=<< seekHelper LsFiles.typeChangedStaged l
|
||||
|
||||
isUnmodifiedUnlocked :: FilePath -> Annex Bool
|
||||
isUnmodifiedUnlocked :: RawFilePath -> Annex Bool
|
||||
isUnmodifiedUnlocked f = catKeyFile f >>= \case
|
||||
Nothing -> return False
|
||||
Just k -> sameInodeCache f =<< Database.Keys.getInodeCaches k
|
||||
|
||||
{- Finds files that may be modified. -}
|
||||
withFilesMaybeModified :: (FilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
|
||||
withFilesMaybeModified :: (RawFilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
|
||||
withFilesMaybeModified a params = seekActions $
|
||||
prepFiltered a $ seekHelper LsFiles.modified params
|
||||
|
||||
|
@ -225,20 +226,21 @@ withKeyOptions' ko auto mkkeyaction fallbackaction params = do
|
|||
forM_ ts $ \(t, i) ->
|
||||
keyaction (transferKey t, mkActionItem (t, i))
|
||||
|
||||
prepFiltered :: (FilePath -> CommandSeek) -> Annex [FilePath] -> Annex [CommandSeek]
|
||||
prepFiltered :: (RawFilePath -> CommandSeek) -> Annex [RawFilePath] -> Annex [CommandSeek]
|
||||
prepFiltered a fs = do
|
||||
matcher <- Limit.getMatcher
|
||||
map (process matcher) <$> fs
|
||||
where
|
||||
process matcher f = whenM (matcher $ MatchingFile $ FileInfo f f) $ a f
|
||||
process matcher f =
|
||||
whenM (matcher $ MatchingFile $ FileInfo f f) $ a f
|
||||
|
||||
seekActions :: Annex [CommandSeek] -> Annex ()
|
||||
seekActions gen = sequence_ =<< gen
|
||||
|
||||
seekHelper :: ([FilePath] -> Git.Repo -> IO ([FilePath], IO Bool)) -> [WorkTreeItem] -> Annex [FilePath]
|
||||
seekHelper :: ([RawFilePath] -> Git.Repo -> IO ([RawFilePath], IO Bool)) -> [WorkTreeItem] -> Annex [RawFilePath]
|
||||
seekHelper a l = inRepo $ \g ->
|
||||
concat . concat <$> forM (segmentXargsOrdered l')
|
||||
(runSegmentPaths (\fs -> Git.Command.leaveZombie <$> a fs g))
|
||||
(runSegmentPaths (\fs -> Git.Command.leaveZombie <$> a fs g) . map toRawFilePath)
|
||||
where
|
||||
l' = map (\(WorkTreeItem f) -> f) l
|
||||
|
||||
|
@ -264,14 +266,14 @@ workTreeItems' (AllowHidden allowhidden) ps = do
|
|||
unlessM (exists p <||> hidden currbranch p) $ do
|
||||
toplevelWarning False (p ++ " not found")
|
||||
Annex.incError
|
||||
return (map WorkTreeItem ps)
|
||||
return (map (WorkTreeItem) ps)
|
||||
where
|
||||
exists p = isJust <$> liftIO (catchMaybeIO $ getSymbolicLinkStatus p)
|
||||
hidden currbranch p
|
||||
| allowhidden = do
|
||||
f <- liftIO $ relPathCwdToFile p
|
||||
isJust <$> catObjectMetaDataHidden f currbranch
|
||||
isJust <$> catObjectMetaDataHidden (toRawFilePath f) currbranch
|
||||
| otherwise = return False
|
||||
|
||||
notSymlink :: FilePath -> IO Bool
|
||||
notSymlink f = liftIO $ not . isSymbolicLink <$> getSymbolicLinkStatus f
|
||||
notSymlink :: RawFilePath -> IO Bool
|
||||
notSymlink f = liftIO $ not . isSymbolicLink <$> R.getSymbolicLinkStatus f
|
||||
|
|
|
@ -19,6 +19,7 @@ import Annex.Link
|
|||
import Annex.Tmp
|
||||
import Messages.Progress
|
||||
import Git.FilePath
|
||||
import qualified Utility.RawFilePath as R
|
||||
|
||||
cmd :: Command
|
||||
cmd = notBareRepo $
|
||||
|
@ -50,7 +51,7 @@ optParser desc = AddOptions
|
|||
seek :: AddOptions -> CommandSeek
|
||||
seek o = startConcurrency commandStages $ do
|
||||
matcher <- largeFilesMatcher
|
||||
let gofile file = ifM (checkFileMatcher matcher file <||> Annex.getState Annex.force)
|
||||
let gofile file = ifM (checkFileMatcher matcher (fromRawFilePath file) <||> Annex.getState Annex.force)
|
||||
( start file
|
||||
, ifM (annexAddSmallFiles <$> Annex.getGitConfig)
|
||||
( startSmall file
|
||||
|
@ -61,7 +62,7 @@ seek o = startConcurrency commandStages $ do
|
|||
Batch fmt
|
||||
| updateOnly o ->
|
||||
giveup "--update --batch is not supported"
|
||||
| otherwise -> batchFilesMatching fmt gofile
|
||||
| otherwise -> batchFilesMatching fmt (gofile . toRawFilePath)
|
||||
NoBatch -> do
|
||||
l <- workTreeItems (addThese o)
|
||||
let go a = a (commandAction . gofile) l
|
||||
|
@ -71,28 +72,28 @@ seek o = startConcurrency commandStages $ do
|
|||
go withUnmodifiedUnlockedPointers
|
||||
|
||||
{- Pass file off to git-add. -}
|
||||
startSmall :: FilePath -> CommandStart
|
||||
startSmall :: RawFilePath -> CommandStart
|
||||
startSmall file = starting "add" (ActionItemWorkTreeFile file) $
|
||||
next $ addSmall file
|
||||
|
||||
addSmall :: FilePath -> Annex Bool
|
||||
addSmall :: RawFilePath -> Annex Bool
|
||||
addSmall file = do
|
||||
showNote "non-large file; adding content to git repository"
|
||||
addFile file
|
||||
|
||||
addFile :: FilePath -> Annex Bool
|
||||
addFile :: RawFilePath -> Annex Bool
|
||||
addFile file = do
|
||||
ps <- forceParams
|
||||
Annex.Queue.addCommand "add" (ps++[Param "--"]) [file]
|
||||
Annex.Queue.addCommand "add" (ps++[Param "--"]) [fromRawFilePath file]
|
||||
return True
|
||||
|
||||
start :: FilePath -> CommandStart
|
||||
start :: RawFilePath -> CommandStart
|
||||
start file = do
|
||||
mk <- liftIO $ isPointerFile file
|
||||
maybe go fixuppointer mk
|
||||
where
|
||||
go = ifAnnexed file addpresent add
|
||||
add = liftIO (catchMaybeIO $ getSymbolicLinkStatus file) >>= \case
|
||||
add = liftIO (catchMaybeIO $ R.getSymbolicLinkStatus file) >>= \case
|
||||
Nothing -> stop
|
||||
Just s
|
||||
| not (isRegularFile s) && not (isSymbolicLink s) -> stop
|
||||
|
@ -102,13 +103,13 @@ start file = do
|
|||
then next $ addFile file
|
||||
else perform file
|
||||
addpresent key =
|
||||
liftIO (catchMaybeIO $ getSymbolicLinkStatus file) >>= \case
|
||||
liftIO (catchMaybeIO $ R.getSymbolicLinkStatus file) >>= \case
|
||||
Just s | isSymbolicLink s -> fixuplink key
|
||||
_ -> add
|
||||
fixuplink key = starting "add" (ActionItemWorkTreeFile file) $ do
|
||||
-- the annexed symlink is present but not yet added to git
|
||||
liftIO $ removeFile file
|
||||
addLink file key Nothing
|
||||
liftIO $ removeFile (fromRawFilePath file)
|
||||
addLink (fromRawFilePath file) key Nothing
|
||||
next $
|
||||
cleanup key =<< inAnnex key
|
||||
fixuppointer key = starting "add" (ActionItemWorkTreeFile file) $ do
|
||||
|
@ -116,14 +117,14 @@ start file = do
|
|||
Database.Keys.addAssociatedFile key =<< inRepo (toTopFilePath file)
|
||||
next $ addFile file
|
||||
|
||||
perform :: FilePath -> CommandPerform
|
||||
perform :: RawFilePath -> CommandPerform
|
||||
perform file = withOtherTmp $ \tmpdir -> do
|
||||
lockingfile <- not <$> addUnlocked
|
||||
let cfg = LockDownConfig
|
||||
{ lockingFile = lockingfile
|
||||
, hardlinkFileTmpDir = Just tmpdir
|
||||
}
|
||||
ld <- lockDown cfg file
|
||||
ld <- lockDown cfg (fromRawFilePath file)
|
||||
let sizer = keySource <$> ld
|
||||
v <- metered Nothing sizer $ \_meter meterupdate ->
|
||||
ingestAdd meterupdate ld
|
||||
|
|
|
@ -31,7 +31,7 @@ perform key = next $ do
|
|||
addLink file key Nothing
|
||||
return True
|
||||
where
|
||||
file = "unused." ++ keyFile key
|
||||
file = "unused." ++ fromRawFilePath (keyFile key)
|
||||
|
||||
{- The content is not in the annex, but in another directory, and
|
||||
- it seems better to error out, rather than moving bad/tmp content into
|
||||
|
|
|
@ -156,7 +156,7 @@ startRemote r o file uri sz = do
|
|||
performRemote r o uri file' sz
|
||||
|
||||
performRemote :: Remote -> AddUrlOptions -> URLString -> FilePath -> Maybe Integer -> CommandPerform
|
||||
performRemote r o uri file sz = ifAnnexed file adduri geturi
|
||||
performRemote r o uri file sz = ifAnnexed (toRawFilePath file) adduri geturi
|
||||
where
|
||||
loguri = setDownloader uri OtherDownloader
|
||||
adduri = addUrlChecked o loguri file (Remote.uuid r) checkexistssize
|
||||
|
@ -180,7 +180,7 @@ downloadRemoteFile r o uri file sz = checkCanAdd file $ do
|
|||
setTempUrl urlkey loguri
|
||||
let downloader = \dest p -> fst
|
||||
<$> Remote.retrieveKeyFile r urlkey
|
||||
(AssociatedFile (Just file)) dest p
|
||||
(AssociatedFile (Just (toRawFilePath file))) dest p
|
||||
ret <- downloadWith downloader urlkey (Remote.uuid r) loguri file
|
||||
removeTempUrl urlkey
|
||||
return ret
|
||||
|
@ -212,7 +212,7 @@ startWeb o urlstring = go $ fromMaybe bad $ parseURI urlstring
|
|||
performWeb o urlstring file urlinfo
|
||||
|
||||
performWeb :: AddUrlOptions -> URLString -> FilePath -> Url.UrlInfo -> CommandPerform
|
||||
performWeb o url file urlinfo = ifAnnexed file addurl geturl
|
||||
performWeb o url file urlinfo = ifAnnexed (toRawFilePath file) addurl geturl
|
||||
where
|
||||
geturl = next $ isJust <$> addUrlFile (downloadOptions o) url urlinfo file
|
||||
addurl = addUrlChecked o url file webUUID $ \k ->
|
||||
|
@ -258,7 +258,7 @@ addUrlFile o url urlinfo file =
|
|||
|
||||
downloadWeb :: DownloadOptions -> URLString -> Url.UrlInfo -> FilePath -> Annex (Maybe Key)
|
||||
downloadWeb o url urlinfo file =
|
||||
go =<< downloadWith' downloader urlkey webUUID url (AssociatedFile (Just file))
|
||||
go =<< downloadWith' downloader urlkey webUUID url (AssociatedFile (Just (toRawFilePath file)))
|
||||
where
|
||||
urlkey = addSizeUrlKey urlinfo $ Backend.URL.fromUrl url Nothing
|
||||
downloader f p = downloadUrl urlkey p [url] f
|
||||
|
@ -278,7 +278,7 @@ downloadWeb o url urlinfo file =
|
|||
-- first, and check if that is already an annexed file,
|
||||
-- to avoid unnecessary work in that case.
|
||||
| otherwise = youtubeDlFileNameHtmlOnly url >>= \case
|
||||
Right dest -> ifAnnexed dest
|
||||
Right dest -> ifAnnexed (toRawFilePath dest)
|
||||
(alreadyannexed dest)
|
||||
(dl dest)
|
||||
Left _ -> normalfinish tmp
|
||||
|
@ -345,7 +345,7 @@ downloadWith :: (FilePath -> MeterUpdate -> Annex Bool) -> Key -> UUID -> URLStr
|
|||
downloadWith downloader dummykey u url file =
|
||||
go =<< downloadWith' downloader dummykey u url afile
|
||||
where
|
||||
afile = AssociatedFile (Just file)
|
||||
afile = AssociatedFile (Just (toRawFilePath file))
|
||||
go Nothing = return Nothing
|
||||
go (Just tmp) = finishDownloadWith tmp u url file
|
||||
|
||||
|
@ -401,7 +401,7 @@ addWorkTree u url file key mtmp = case mtmp of
|
|||
-- than the work tree file.
|
||||
liftIO $ renameFile file tmp
|
||||
go
|
||||
else void $ Command.Add.addSmall file
|
||||
else void $ Command.Add.addSmall (toRawFilePath file)
|
||||
where
|
||||
go = do
|
||||
maybeShowJSON $ JSONChunk [("key", serializeKey key)]
|
||||
|
|
|
@ -10,6 +10,9 @@ module Command.Config where
|
|||
import Command
|
||||
import Logs.Config
|
||||
import Config
|
||||
import Git.Types (ConfigKey(..), fromConfigValue)
|
||||
|
||||
import qualified Data.ByteString.Char8 as S8
|
||||
|
||||
cmd :: Command
|
||||
cmd = noMessages $ command "config" SectionSetup
|
||||
|
@ -17,9 +20,9 @@ cmd = noMessages $ command "config" SectionSetup
|
|||
paramNothing (seek <$$> optParser)
|
||||
|
||||
data Action
|
||||
= SetConfig ConfigName ConfigValue
|
||||
| GetConfig ConfigName
|
||||
| UnsetConfig ConfigName
|
||||
= SetConfig ConfigKey ConfigValue
|
||||
| GetConfig ConfigKey
|
||||
| UnsetConfig ConfigKey
|
||||
|
||||
type Name = String
|
||||
type Value = String
|
||||
|
@ -48,19 +51,19 @@ optParser _ = setconfig <|> getconfig <|> unsetconfig
|
|||
)
|
||||
|
||||
seek :: Action -> CommandSeek
|
||||
seek (SetConfig name val) = commandAction $
|
||||
startingUsualMessages name (ActionItemOther (Just val)) $ do
|
||||
setGlobalConfig name val
|
||||
setConfig (ConfigKey name) val
|
||||
seek (SetConfig ck@(ConfigKey name) val) = commandAction $
|
||||
startingUsualMessages (decodeBS' name) (ActionItemOther (Just (fromConfigValue val))) $ do
|
||||
setGlobalConfig ck val
|
||||
setConfig ck (fromConfigValue val)
|
||||
next $ return True
|
||||
seek (UnsetConfig name) = commandAction $
|
||||
startingUsualMessages name (ActionItemOther (Just "unset")) $do
|
||||
unsetGlobalConfig name
|
||||
unsetConfig (ConfigKey name)
|
||||
seek (UnsetConfig ck@(ConfigKey name)) = commandAction $
|
||||
startingUsualMessages (decodeBS' name) (ActionItemOther (Just "unset")) $do
|
||||
unsetGlobalConfig ck
|
||||
unsetConfig ck
|
||||
next $ return True
|
||||
seek (GetConfig name) = commandAction $
|
||||
seek (GetConfig ck) = commandAction $
|
||||
startingCustomOutput (ActionItemOther Nothing) $ do
|
||||
getGlobalConfig name >>= \case
|
||||
getGlobalConfig ck >>= \case
|
||||
Nothing -> return ()
|
||||
Just v -> liftIO $ putStrLn v
|
||||
Just (ConfigValue v) -> liftIO $ S8.putStrLn v
|
||||
next $ return True
|
||||
|
|
|
@ -12,6 +12,7 @@ import Annex.UUID
|
|||
import Annex.Init
|
||||
import qualified Annex.Branch
|
||||
import qualified Git.Config
|
||||
import Git.Types
|
||||
import Remote.GCrypt (coreGCryptId)
|
||||
import qualified CmdLine.GitAnnexShell.Fields as Fields
|
||||
import CmdLine.GitAnnexShell.Checks
|
||||
|
@ -28,11 +29,12 @@ seek = withNothing (commandAction start)
|
|||
start :: CommandStart
|
||||
start = do
|
||||
u <- findOrGenUUID
|
||||
showConfig "annex.uuid" $ fromUUID u
|
||||
showConfig coreGCryptId =<< fromRepo (Git.Config.get coreGCryptId "")
|
||||
showConfig configkeyUUID $ fromUUID u
|
||||
showConfig coreGCryptId . fromConfigValue
|
||||
=<< fromRepo (Git.Config.get coreGCryptId mempty)
|
||||
stop
|
||||
where
|
||||
showConfig k v = liftIO $ putStrLn $ k ++ "=" ++ v
|
||||
showConfig k v = liftIO $ putStrLn $ fromConfigKey k ++ "=" ++ v
|
||||
|
||||
{- The repository may not yet have a UUID; automatically initialize it
|
||||
- when there's a git-annex branch available or if the autoinit field was
|
||||
|
|
|
@ -9,6 +9,9 @@ module Command.ContentLocation where
|
|||
|
||||
import Command
|
||||
import Annex.Content
|
||||
import qualified Utility.RawFilePath as R
|
||||
|
||||
import qualified Data.ByteString.Char8 as B8
|
||||
|
||||
cmd :: Command
|
||||
cmd = noCommit $ noMessages $
|
||||
|
@ -20,10 +23,10 @@ cmd = noCommit $ noMessages $
|
|||
run :: () -> String -> Annex Bool
|
||||
run _ p = do
|
||||
let k = fromMaybe (giveup "bad key") $ deserializeKey p
|
||||
maybe (return False) (\f -> liftIO (putStrLn f) >> return True)
|
||||
maybe (return False) (\f -> liftIO (B8.putStrLn f) >> return True)
|
||||
=<< inAnnex' (pure True) Nothing check k
|
||||
where
|
||||
check f = ifM (liftIO (doesFileExist f))
|
||||
check f = ifM (liftIO (R.doesPathExist f))
|
||||
( return (Just f)
|
||||
, return Nothing
|
||||
)
|
||||
|
|
|
@ -47,7 +47,7 @@ seek :: CopyOptions -> CommandSeek
|
|||
seek o = startConcurrency commandStages $ do
|
||||
let go = whenAnnexed $ start o
|
||||
case batchOption o of
|
||||
Batch fmt -> batchFilesMatching fmt go
|
||||
Batch fmt -> batchFilesMatching fmt (go . toRawFilePath)
|
||||
NoBatch -> withKeyOptions
|
||||
(keyOptions o) (autoMode o)
|
||||
(commandAction . Command.Move.startKey (fromToOptions o) Command.Move.RemoveNever)
|
||||
|
@ -57,12 +57,12 @@ seek o = startConcurrency commandStages $ do
|
|||
{- A copy is just a move that does not delete the source file.
|
||||
- However, auto mode avoids unnecessary copies, and avoids getting or
|
||||
- sending non-preferred content. -}
|
||||
start :: CopyOptions -> FilePath -> Key -> CommandStart
|
||||
start :: CopyOptions -> RawFilePath -> Key -> CommandStart
|
||||
start o file key = stopUnless shouldCopy $
|
||||
Command.Move.start (fromToOptions o) Command.Move.RemoveNever file key
|
||||
where
|
||||
shouldCopy
|
||||
| autoMode o = want <||> numCopiesCheck file key (<)
|
||||
| autoMode o = want <||> numCopiesCheck (fromRawFilePath file) key (<)
|
||||
| otherwise = return True
|
||||
want = case fromToOptions o of
|
||||
Right (ToRemote dest) ->
|
||||
|
|
|
@ -85,12 +85,13 @@ fixupReq req@(Req {}) =
|
|||
check rOldFile rOldMode (\r f -> r { rOldFile = f }) req
|
||||
>>= check rNewFile rNewMode (\r f -> r { rNewFile = f })
|
||||
where
|
||||
check getfile getmode setfile r = case readTreeItemType (getmode r) of
|
||||
check getfile getmode setfile r = case readTreeItemType (encodeBS' (getmode r)) of
|
||||
Just TreeSymlink -> do
|
||||
v <- getAnnexLinkTarget' (getfile r) False
|
||||
v <- getAnnexLinkTarget' (toRawFilePath (getfile r)) False
|
||||
case parseLinkTargetOrPointer =<< v of
|
||||
Nothing -> return r
|
||||
Just k -> withObjectLoc k (pure . setfile r)
|
||||
Just k -> withObjectLoc k $
|
||||
pure . setfile r . fromRawFilePath
|
||||
_ -> return r
|
||||
|
||||
externalDiffer :: String -> [String] -> Differ
|
||||
|
|
|
@ -54,7 +54,7 @@ parseDropFromOption = parseRemoteOption <$> strOption
|
|||
seek :: DropOptions -> CommandSeek
|
||||
seek o = startConcurrency transferStages $
|
||||
case batchOption o of
|
||||
Batch fmt -> batchFilesMatching fmt go
|
||||
Batch fmt -> batchFilesMatching fmt (go . toRawFilePath)
|
||||
NoBatch -> withKeyOptions (keyOptions o) (autoMode o)
|
||||
(commandAction . startKeys o)
|
||||
(withFilesInGit (commandAction . go))
|
||||
|
@ -62,7 +62,7 @@ seek o = startConcurrency transferStages $
|
|||
where
|
||||
go = whenAnnexed $ start o
|
||||
|
||||
start :: DropOptions -> FilePath -> Key -> CommandStart
|
||||
start :: DropOptions -> RawFilePath -> Key -> CommandStart
|
||||
start o file key = start' o key afile ai
|
||||
where
|
||||
afile = AssociatedFile (Just file)
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Command.EnableRemote where
|
||||
|
||||
import Command
|
||||
|
|
|
@ -22,5 +22,5 @@ cmd = noCommit $ noMessages $ dontCheck repoExists $
|
|||
run :: Maybe Utility.Format.Format -> String -> Annex Bool
|
||||
run format p = do
|
||||
let k = fromMaybe (giveup "bad key") $ deserializeKey p
|
||||
showFormatted format (serializeKey k) (keyVars k)
|
||||
showFormatted format (serializeKey' k) (keyVars k)
|
||||
return True
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
-}
|
||||
|
||||
{-# LANGUAGE TupleSections, BangPatterns #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Command.Export where
|
||||
|
||||
|
@ -70,7 +71,7 @@ optParser _ = ExportOptions
|
|||
-- To handle renames which swap files, the exported file is first renamed
|
||||
-- to a stable temporary name based on the key.
|
||||
exportTempName :: ExportKey -> ExportLocation
|
||||
exportTempName ek = mkExportLocation $
|
||||
exportTempName ek = mkExportLocation $ toRawFilePath $
|
||||
".git-annex-tmp-content-" ++ serializeKey (asKey (ek))
|
||||
|
||||
seek :: ExportOptions -> CommandSeek
|
||||
|
@ -250,7 +251,7 @@ startExport :: Remote -> ExportHandle -> MVar FileUploaded -> MVar AllFilled ->
|
|||
startExport r db cvar allfilledvar ti = do
|
||||
ek <- exportKey (Git.LsTree.sha ti)
|
||||
stopUnless (notrecordedpresent ek) $
|
||||
starting ("export " ++ name r) (ActionItemOther (Just f)) $
|
||||
starting ("export " ++ name r) (ActionItemOther (Just (fromRawFilePath f))) $
|
||||
ifM (either (const False) id <$> tryNonAsync (checkPresentExport (exportActions r) (asKey ek) loc))
|
||||
( next $ cleanupExport r db ek loc False
|
||||
, do
|
||||
|
@ -313,14 +314,14 @@ startUnexport r db f shas = do
|
|||
eks <- forM (filter (/= nullSha) shas) exportKey
|
||||
if null eks
|
||||
then stop
|
||||
else starting ("unexport " ++ name r) (ActionItemOther (Just f')) $
|
||||
else starting ("unexport " ++ name r) (ActionItemOther (Just (fromRawFilePath f'))) $
|
||||
performUnexport r db eks loc
|
||||
where
|
||||
loc = mkExportLocation f'
|
||||
f' = getTopFilePath f
|
||||
|
||||
startUnexport' :: Remote -> ExportHandle -> TopFilePath -> ExportKey -> CommandStart
|
||||
startUnexport' r db f ek = starting ("unexport " ++ name r) (ActionItemOther (Just f')) $
|
||||
startUnexport' r db f ek = starting ("unexport " ++ name r) (ActionItemOther (Just (fromRawFilePath f'))) $
|
||||
performUnexport r db [ek] loc
|
||||
where
|
||||
loc = mkExportLocation f'
|
||||
|
@ -363,16 +364,15 @@ startRecoverIncomplete r db sha oldf
|
|||
| otherwise = do
|
||||
ek <- exportKey sha
|
||||
let loc = exportTempName ek
|
||||
starting ("unexport " ++ name r) (ActionItemOther (Just (fromExportLocation loc))) $ do
|
||||
starting ("unexport " ++ name r) (ActionItemOther (Just (fromRawFilePath (fromExportLocation loc)))) $ do
|
||||
liftIO $ removeExportedLocation db (asKey ek) oldloc
|
||||
performUnexport r db [ek] loc
|
||||
where
|
||||
oldloc = mkExportLocation oldf'
|
||||
oldf' = getTopFilePath oldf
|
||||
oldloc = mkExportLocation $ getTopFilePath oldf
|
||||
|
||||
startMoveToTempName :: Remote -> ExportHandle -> TopFilePath -> ExportKey -> CommandStart
|
||||
startMoveToTempName r db f ek = starting ("rename " ++ name r)
|
||||
(ActionItemOther $ Just $ f' ++ " -> " ++ fromExportLocation tmploc)
|
||||
(ActionItemOther $ Just $ fromRawFilePath f' ++ " -> " ++ fromRawFilePath (fromExportLocation tmploc))
|
||||
(performRename r db ek loc tmploc)
|
||||
where
|
||||
loc = mkExportLocation f'
|
||||
|
@ -383,7 +383,7 @@ startMoveFromTempName :: Remote -> ExportHandle -> ExportKey -> TopFilePath -> C
|
|||
startMoveFromTempName r db ek f = do
|
||||
let tmploc = exportTempName ek
|
||||
stopUnless (liftIO $ elem tmploc <$> getExportedLocation db (asKey ek)) $
|
||||
starting ("rename " ++ name r) (ActionItemOther (Just (fromExportLocation tmploc ++ " -> " ++ f'))) $
|
||||
starting ("rename " ++ name r) (ActionItemOther (Just (fromRawFilePath (fromExportLocation tmploc) ++ " -> " ++ fromRawFilePath f'))) $
|
||||
performRename r db ek tmploc loc
|
||||
where
|
||||
loc = mkExportLocation f'
|
||||
|
|
|
@ -9,6 +9,8 @@ module Command.Find where
|
|||
|
||||
import Data.Default
|
||||
import qualified Data.Map as M
|
||||
import qualified Data.ByteString as S
|
||||
import qualified Data.ByteString.Char8 as S8
|
||||
|
||||
import Command
|
||||
import Annex.Content
|
||||
|
@ -57,17 +59,17 @@ seek o = case batchOption o of
|
|||
(commandAction . startKeys o)
|
||||
(withFilesInGit (commandAction . go))
|
||||
=<< workTreeItems (findThese o)
|
||||
Batch fmt -> batchFilesMatching fmt go
|
||||
Batch fmt -> batchFilesMatching fmt (go . toRawFilePath)
|
||||
where
|
||||
go = whenAnnexed $ start o
|
||||
|
||||
-- only files inAnnex are shown, unless the user has requested
|
||||
-- others via a limit
|
||||
start :: FindOptions -> FilePath -> Key -> CommandStart
|
||||
start :: FindOptions -> RawFilePath -> Key -> CommandStart
|
||||
start o file key =
|
||||
stopUnless (limited <||> inAnnex key) $
|
||||
startingCustomOutput key $ do
|
||||
showFormatted (formatOption o) file $ ("file", file) : keyVars key
|
||||
showFormatted (formatOption o) file $ ("file", fromRawFilePath file) : keyVars key
|
||||
next $ return True
|
||||
|
||||
startKeys :: FindOptions -> (Key, ActionItem) -> CommandStart
|
||||
|
@ -75,11 +77,11 @@ startKeys o (key, ActionItemBranchFilePath (BranchFilePath _ topf) _) =
|
|||
start o (getTopFilePath topf) key
|
||||
startKeys _ _ = stop
|
||||
|
||||
showFormatted :: Maybe Utility.Format.Format -> String -> [(String, String)] -> Annex ()
|
||||
showFormatted :: Maybe Utility.Format.Format -> S.ByteString -> [(String, String)] -> Annex ()
|
||||
showFormatted format unformatted vars =
|
||||
unlessM (showFullJSON $ JSONChunk vars) $
|
||||
case format of
|
||||
Nothing -> liftIO $ putStrLn unformatted
|
||||
Nothing -> liftIO $ S8.putStrLn unformatted
|
||||
Just formatter -> liftIO $ putStr $
|
||||
Utility.Format.format formatter $
|
||||
M.fromList vars
|
||||
|
@ -91,8 +93,8 @@ keyVars key =
|
|||
, ("bytesize", size show)
|
||||
, ("humansize", size $ roughSize storageUnits True)
|
||||
, ("keyname", decodeBS $ fromKey keyName key)
|
||||
, ("hashdirlower", hashDirLower def key)
|
||||
, ("hashdirmixed", hashDirMixed def key)
|
||||
, ("hashdirlower", fromRawFilePath $ hashDirLower def key)
|
||||
, ("hashdirmixed", fromRawFilePath $ hashDirMixed def key)
|
||||
, ("mtime", whenavail show $ fromKey keyMtime key)
|
||||
]
|
||||
where
|
||||
|
|
|
@ -17,6 +17,7 @@ import Annex.Content
|
|||
import Annex.Perms
|
||||
import qualified Annex.Queue
|
||||
import qualified Database.Keys
|
||||
import qualified Utility.RawFilePath as R
|
||||
|
||||
#if ! defined(mingw32_HOST_OS)
|
||||
import Utility.Touch
|
||||
|
@ -37,13 +38,14 @@ seek ps = unlessM crippledFileSystem $ do
|
|||
|
||||
data FixWhat = FixSymlinks | FixAll
|
||||
|
||||
start :: FixWhat -> FilePath -> Key -> CommandStart
|
||||
start :: FixWhat -> RawFilePath -> Key -> CommandStart
|
||||
start fixwhat file key = do
|
||||
currlink <- liftIO $ catchMaybeIO $ readSymbolicLink file
|
||||
wantlink <- calcRepo $ gitAnnexLink file key
|
||||
currlink <- liftIO $ catchMaybeIO $ R.readSymbolicLink file
|
||||
wantlink <- calcRepo $ gitAnnexLink (fromRawFilePath file) key
|
||||
case currlink of
|
||||
Just l
|
||||
| l /= wantlink -> fixby $ fixSymlink file wantlink
|
||||
| l /= toRawFilePath wantlink -> fixby $
|
||||
fixSymlink (fromRawFilePath file) wantlink
|
||||
| otherwise -> stop
|
||||
Nothing -> case fixwhat of
|
||||
FixAll -> fixthin
|
||||
|
@ -51,11 +53,11 @@ start fixwhat file key = do
|
|||
where
|
||||
fixby = starting "fix" (mkActionItem (key, file))
|
||||
fixthin = do
|
||||
obj <- calcRepo $ gitAnnexLocation key
|
||||
obj <- calcRepo (gitAnnexLocation key)
|
||||
stopUnless (isUnmodified key file <&&> isUnmodified key obj) $ do
|
||||
thin <- annexThin <$> Annex.getGitConfig
|
||||
fs <- liftIO $ catchMaybeIO $ getFileStatus file
|
||||
os <- liftIO $ catchMaybeIO $ getFileStatus obj
|
||||
fs <- liftIO $ catchMaybeIO $ R.getFileStatus file
|
||||
os <- liftIO $ catchMaybeIO $ R.getFileStatus obj
|
||||
case (linkCount <$> fs, linkCount <$> os, thin) of
|
||||
(Just 1, Just 1, True) ->
|
||||
fixby $ makeHardLink file key
|
||||
|
@ -63,21 +65,22 @@ start fixwhat file key = do
|
|||
fixby $ breakHardLink file key obj
|
||||
_ -> stop
|
||||
|
||||
breakHardLink :: FilePath -> Key -> FilePath -> CommandPerform
|
||||
breakHardLink :: RawFilePath -> Key -> RawFilePath -> CommandPerform
|
||||
breakHardLink file key obj = do
|
||||
replaceFile file $ \tmp -> do
|
||||
mode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus file
|
||||
unlessM (checkedCopyFile key obj tmp mode) $
|
||||
replaceFile (fromRawFilePath file) $ \tmp -> do
|
||||
mode <- liftIO $ catchMaybeIO $ fileMode <$> R.getFileStatus file
|
||||
let obj' = fromRawFilePath obj
|
||||
unlessM (checkedCopyFile key obj' tmp mode) $
|
||||
error "unable to break hard link"
|
||||
thawContent tmp
|
||||
modifyContent obj $ freezeContent obj
|
||||
modifyContent obj' $ freezeContent obj'
|
||||
Database.Keys.storeInodeCaches key [file]
|
||||
next $ return True
|
||||
|
||||
makeHardLink :: FilePath -> Key -> CommandPerform
|
||||
makeHardLink :: RawFilePath -> Key -> CommandPerform
|
||||
makeHardLink file key = do
|
||||
replaceFile file $ \tmp -> do
|
||||
mode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus file
|
||||
replaceFile (fromRawFilePath file) $ \tmp -> do
|
||||
mode <- liftIO $ catchMaybeIO $ fileMode <$> R.getFileStatus file
|
||||
linkFromAnnex key tmp mode >>= \case
|
||||
LinkAnnexFailed -> error "unable to make hard link"
|
||||
_ -> noop
|
||||
|
|
|
@ -51,7 +51,7 @@ seekBatch fmt = batchInput fmt parse commandAction
|
|||
in if not (null keyname) && not (null file)
|
||||
then Right $ go file (keyOpt keyname)
|
||||
else Left "Expected pairs of key and filename"
|
||||
go file key = starting "fromkey" (mkActionItem (key, file)) $
|
||||
go file key = starting "fromkey" (mkActionItem (key, toRawFilePath file)) $
|
||||
perform key file
|
||||
|
||||
start :: Bool -> (String, FilePath) -> CommandStart
|
||||
|
@ -61,7 +61,7 @@ start force (keyname, file) = do
|
|||
inbackend <- inAnnex key
|
||||
unless inbackend $ giveup $
|
||||
"key ("++ keyname ++") is not present in backend (use --force to override this sanity check)"
|
||||
starting "fromkey" (mkActionItem (key, file)) $
|
||||
starting "fromkey" (mkActionItem (key, toRawFilePath file)) $
|
||||
perform key file
|
||||
|
||||
-- From user input to a Key.
|
||||
|
@ -80,7 +80,7 @@ keyOpt s = case parseURI s of
|
|||
Nothing -> giveup $ "bad key/url " ++ s
|
||||
|
||||
perform :: Key -> FilePath -> CommandPerform
|
||||
perform key file = lookupFileNotHidden file >>= \case
|
||||
perform key file = lookupFileNotHidden (toRawFilePath file) >>= \case
|
||||
Nothing -> ifM (liftIO $ doesFileExist file)
|
||||
( hasothercontent
|
||||
, do
|
||||
|
|
|
@ -35,6 +35,7 @@ import qualified Database.Fsck as FsckDb
|
|||
import Types.CleanupActions
|
||||
import Types.Key
|
||||
import Types.ActionItem
|
||||
import qualified Utility.RawFilePath as R
|
||||
|
||||
import Data.Time.Clock.POSIX
|
||||
import System.Posix.Types (EpochTime)
|
||||
|
@ -102,11 +103,11 @@ checkDeadRepo u =
|
|||
whenM ((==) DeadTrusted <$> lookupTrust u) $
|
||||
earlyWarning "Warning: Fscking a repository that is currently marked as dead."
|
||||
|
||||
start :: Maybe Remote -> Incremental -> FilePath -> Key -> CommandStart
|
||||
start from inc file key = Backend.getBackend file key >>= \case
|
||||
start :: Maybe Remote -> Incremental -> RawFilePath -> Key -> CommandStart
|
||||
start from inc file key = Backend.getBackend (fromRawFilePath file) key >>= \case
|
||||
Nothing -> stop
|
||||
Just backend -> do
|
||||
numcopies <- getFileNumCopies file
|
||||
numcopies <- getFileNumCopies (fromRawFilePath file)
|
||||
case from of
|
||||
Nothing -> go $ perform key file backend numcopies
|
||||
Just r -> go $ performRemote key afile backend numcopies r
|
||||
|
@ -114,9 +115,9 @@ start from inc file key = Backend.getBackend file key >>= \case
|
|||
go = runFsck inc (mkActionItem (key, afile)) key
|
||||
afile = AssociatedFile (Just file)
|
||||
|
||||
perform :: Key -> FilePath -> Backend -> NumCopies -> Annex Bool
|
||||
perform :: Key -> RawFilePath -> Backend -> NumCopies -> Annex Bool
|
||||
perform key file backend numcopies = do
|
||||
keystatus <- getKeyFileStatus key file
|
||||
keystatus <- getKeyFileStatus key (fromRawFilePath file)
|
||||
check
|
||||
-- order matters
|
||||
[ fixLink key file
|
||||
|
@ -163,7 +164,7 @@ performRemote key afile backend numcopies remote =
|
|||
pid <- liftIO getPID
|
||||
t <- fromRepo gitAnnexTmpObjectDir
|
||||
createAnnexDirectory t
|
||||
let tmp = t </> "fsck" ++ show pid ++ "." ++ keyFile key
|
||||
let tmp = t </> "fsck" ++ show pid ++ "." ++ fromRawFilePath (keyFile key)
|
||||
let cleanup = liftIO $ catchIO (removeFile tmp) (const noop)
|
||||
cleanup
|
||||
cleanup `after` a tmp
|
||||
|
@ -203,18 +204,18 @@ check :: [Annex Bool] -> Annex Bool
|
|||
check cs = and <$> sequence cs
|
||||
|
||||
{- Checks that symlinks points correctly to the annexed content. -}
|
||||
fixLink :: Key -> FilePath -> Annex Bool
|
||||
fixLink :: Key -> RawFilePath -> Annex Bool
|
||||
fixLink key file = do
|
||||
want <- calcRepo $ gitAnnexLink file key
|
||||
want <- calcRepo $ gitAnnexLink (fromRawFilePath file) key
|
||||
have <- getAnnexLinkTarget file
|
||||
maybe noop (go want) have
|
||||
return True
|
||||
where
|
||||
go want have
|
||||
| want /= fromInternalGitPath (fromRawFilePath have) = do
|
||||
| want /= fromRawFilePath (fromInternalGitPath have) = do
|
||||
showNote "fixing link"
|
||||
liftIO $ createDirectoryIfMissing True (parentDir file)
|
||||
liftIO $ removeFile file
|
||||
liftIO $ createDirectoryIfMissing True (parentDir (fromRawFilePath file))
|
||||
liftIO $ removeFile (fromRawFilePath file)
|
||||
addAnnexLink want file
|
||||
| otherwise = noop
|
||||
|
||||
|
@ -222,7 +223,7 @@ fixLink key file = do
|
|||
- in this repository only. -}
|
||||
verifyLocationLog :: Key -> KeyStatus -> ActionItem -> Annex Bool
|
||||
verifyLocationLog key keystatus ai = do
|
||||
obj <- calcRepo $ gitAnnexLocation key
|
||||
obj <- fromRawFilePath <$> calcRepo (gitAnnexLocation key)
|
||||
present <- if isKeyUnlockedThin keystatus
|
||||
then liftIO (doesFileExist obj)
|
||||
else inAnnex key
|
||||
|
@ -267,7 +268,7 @@ verifyLocationLog' key ai present u updatestatus = do
|
|||
fix InfoMissing
|
||||
warning $
|
||||
"** Based on the location log, " ++
|
||||
actionItemDesc ai ++
|
||||
decodeBS' (actionItemDesc ai) ++
|
||||
"\n** was expected to be present, " ++
|
||||
"but its content is missing."
|
||||
return False
|
||||
|
@ -302,14 +303,14 @@ verifyRequiredContent key ai@(ActionItemAssociatedFile afile _) = do
|
|||
missingrequired <- Remote.prettyPrintUUIDs "missingrequired" missinglocs
|
||||
warning $
|
||||
"** Required content " ++
|
||||
actionItemDesc ai ++
|
||||
decodeBS' (actionItemDesc ai) ++
|
||||
" is missing from these repositories:\n" ++
|
||||
missingrequired
|
||||
return False
|
||||
verifyRequiredContent _ _ = return True
|
||||
|
||||
{- Verifies the associated file records. -}
|
||||
verifyAssociatedFiles :: Key -> KeyStatus -> FilePath -> Annex Bool
|
||||
verifyAssociatedFiles :: Key -> KeyStatus -> RawFilePath -> Annex Bool
|
||||
verifyAssociatedFiles key keystatus file = do
|
||||
when (isKeyUnlockedThin keystatus) $ do
|
||||
f <- inRepo $ toTopFilePath file
|
||||
|
@ -318,7 +319,7 @@ verifyAssociatedFiles key keystatus file = do
|
|||
Database.Keys.addAssociatedFile key f
|
||||
return True
|
||||
|
||||
verifyWorkTree :: Key -> FilePath -> Annex Bool
|
||||
verifyWorkTree :: Key -> RawFilePath -> Annex Bool
|
||||
verifyWorkTree key file = do
|
||||
{- Make sure that a pointer file is replaced with its content,
|
||||
- when the content is available. -}
|
||||
|
@ -326,12 +327,12 @@ verifyWorkTree key file = do
|
|||
case mk of
|
||||
Just k | k == key -> whenM (inAnnex key) $ do
|
||||
showNote "fixing worktree content"
|
||||
replaceFile file $ \tmp -> do
|
||||
mode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus file
|
||||
replaceFile (fromRawFilePath file) $ \tmp -> do
|
||||
mode <- liftIO $ catchMaybeIO $ fileMode <$> R.getFileStatus file
|
||||
ifM (annexThin <$> Annex.getGitConfig)
|
||||
( void $ linkFromAnnex key tmp mode
|
||||
, do
|
||||
obj <- calcRepo $ gitAnnexLocation key
|
||||
obj <- fromRawFilePath <$> calcRepo (gitAnnexLocation key)
|
||||
void $ checkedCopyFile key obj tmp mode
|
||||
thawContent tmp
|
||||
)
|
||||
|
@ -348,8 +349,8 @@ checkKeySize :: Key -> KeyStatus -> ActionItem -> Annex Bool
|
|||
checkKeySize _ KeyUnlockedThin _ = return True
|
||||
checkKeySize key _ ai = do
|
||||
file <- calcRepo $ gitAnnexLocation key
|
||||
ifM (liftIO $ doesFileExist file)
|
||||
( checkKeySizeOr badContent key file ai
|
||||
ifM (liftIO $ R.doesPathExist file)
|
||||
( checkKeySizeOr badContent key (fromRawFilePath file) ai
|
||||
, return True
|
||||
)
|
||||
|
||||
|
@ -375,7 +376,7 @@ checkKeySizeOr bad key file ai = case fromKey keySize key of
|
|||
badsize a b = do
|
||||
msg <- bad key
|
||||
warning $ concat
|
||||
[ actionItemDesc ai
|
||||
[ decodeBS' (actionItemDesc ai)
|
||||
, ": Bad file size ("
|
||||
, compareSizes storageUnits True a b
|
||||
, "); "
|
||||
|
@ -393,11 +394,11 @@ checkKeyUpgrade backend key ai (AssociatedFile (Just file)) =
|
|||
case Types.Backend.canUpgradeKey backend of
|
||||
Just a | a key -> do
|
||||
warning $ concat
|
||||
[ actionItemDesc ai
|
||||
[ decodeBS' (actionItemDesc ai)
|
||||
, ": Can be upgraded to an improved key format. "
|
||||
, "You can do so by running: git annex migrate --backend="
|
||||
, decodeBS (formatKeyVariety (fromKey keyVariety key)) ++ " "
|
||||
, file
|
||||
, decodeBS' file
|
||||
]
|
||||
return True
|
||||
_ -> return True
|
||||
|
@ -416,10 +417,10 @@ checkKeyUpgrade _ _ _ (AssociatedFile Nothing) =
|
|||
-}
|
||||
checkBackend :: Backend -> Key -> KeyStatus -> AssociatedFile -> Annex Bool
|
||||
checkBackend backend key keystatus afile = do
|
||||
content <- calcRepo $ gitAnnexLocation key
|
||||
content <- calcRepo (gitAnnexLocation key)
|
||||
ifM (pure (isKeyUnlockedThin keystatus) <&&> (not <$> isUnmodified key content))
|
||||
( nocheck
|
||||
, checkBackendOr badContent backend key content ai
|
||||
, checkBackendOr badContent backend key (fromRawFilePath content) ai
|
||||
)
|
||||
where
|
||||
nocheck = return True
|
||||
|
@ -448,7 +449,7 @@ checkBackendOr' bad backend key file ai postcheck =
|
|||
unless ok $ do
|
||||
msg <- bad key
|
||||
warning $ concat
|
||||
[ actionItemDesc ai
|
||||
[ decodeBS' (actionItemDesc ai)
|
||||
, ": Bad file content; "
|
||||
, msg
|
||||
]
|
||||
|
@ -460,7 +461,7 @@ checkKeyNumCopies :: Key -> AssociatedFile -> NumCopies -> Annex Bool
|
|||
checkKeyNumCopies key afile numcopies = do
|
||||
let (desc, hasafile) = case afile of
|
||||
AssociatedFile Nothing -> (serializeKey key, False)
|
||||
AssociatedFile (Just af) -> (af, True)
|
||||
AssociatedFile (Just af) -> (fromRawFilePath af, True)
|
||||
locs <- loggedLocations key
|
||||
(untrustedlocations, otherlocations) <- trustPartition UnTrusted locs
|
||||
(deadlocations, safelocations) <- trustPartition DeadTrusted otherlocations
|
||||
|
@ -515,7 +516,7 @@ badContent key = do
|
|||
badContentRemote :: Remote -> FilePath -> Key -> Annex String
|
||||
badContentRemote remote localcopy key = do
|
||||
bad <- fromRepo gitAnnexBadDir
|
||||
let destbad = bad </> keyFile key
|
||||
let destbad = bad </> fromRawFilePath (keyFile key)
|
||||
movedbad <- ifM (inAnnex key <||> liftIO (doesFileExist destbad))
|
||||
( return False
|
||||
, do
|
||||
|
@ -669,8 +670,8 @@ isKeyUnlockedThin KeyMissing = False
|
|||
getKeyStatus :: Key -> Annex KeyStatus
|
||||
getKeyStatus key = catchDefaultIO KeyMissing $ do
|
||||
afs <- not . null <$> Database.Keys.getAssociatedFiles key
|
||||
obj <- calcRepo $ gitAnnexLocation key
|
||||
multilink <- ((> 1) . linkCount <$> liftIO (getFileStatus obj))
|
||||
obj <- calcRepo (gitAnnexLocation key)
|
||||
multilink <- ((> 1) . linkCount <$> liftIO (R.getFileStatus obj))
|
||||
return $ if multilink && afs
|
||||
then KeyUnlockedThin
|
||||
else KeyPresent
|
||||
|
@ -680,7 +681,7 @@ getKeyFileStatus key file = do
|
|||
s <- getKeyStatus key
|
||||
case s of
|
||||
KeyUnlockedThin -> catchDefaultIO KeyUnlockedThin $
|
||||
ifM (isJust <$> isAnnexLink file)
|
||||
ifM (isJust <$> isAnnexLink (toRawFilePath file))
|
||||
( return KeyLockedThin
|
||||
, return KeyUnlockedThin
|
||||
)
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Command.FuzzTest where
|
||||
|
||||
import Command
|
||||
|
@ -13,6 +15,7 @@ import qualified Git.Config
|
|||
import Config
|
||||
import Utility.ThreadScheduler
|
||||
import Utility.DiskFree
|
||||
import Git.Types (fromConfigKey)
|
||||
|
||||
import Data.Time.Clock
|
||||
import System.Random (getStdRandom, random, randomR)
|
||||
|
@ -32,25 +35,23 @@ start :: CommandStart
|
|||
start = do
|
||||
guardTest
|
||||
logf <- fromRepo gitAnnexFuzzTestLogFile
|
||||
showStart "fuzztest" logf
|
||||
showStart "fuzztest" (toRawFilePath logf)
|
||||
logh <- liftIO $ openFile logf WriteMode
|
||||
void $ forever $ fuzz logh
|
||||
stop
|
||||
|
||||
guardTest :: Annex ()
|
||||
guardTest = unlessM (fromMaybe False . Git.Config.isTrue <$> getConfig key "") $
|
||||
guardTest = unlessM (fromMaybe False . Git.Config.isTrue' <$> getConfig key mempty) $
|
||||
giveup $ unlines
|
||||
[ "Running fuzz tests *writes* to and *deletes* files in"
|
||||
, "this repository, and pushes those changes to other"
|
||||
, "repositories! This is a developer tool, not something"
|
||||
, "to play with."
|
||||
, ""
|
||||
, "Refusing to run fuzz tests, since " ++ keyname ++ " is not set!"
|
||||
, "Refusing to run fuzz tests, since " ++ fromConfigKey key ++ " is not set!"
|
||||
]
|
||||
where
|
||||
key = annexConfig "eat-my-repository"
|
||||
(ConfigKey keyname) = key
|
||||
|
||||
|
||||
fuzz :: Handle -> Annex ()
|
||||
fuzz logh = do
|
||||
|
|
|
@ -42,19 +42,19 @@ seek o = startConcurrency transferStages $ do
|
|||
from <- maybe (pure Nothing) (Just <$$> getParsed) (getFrom o)
|
||||
let go = whenAnnexed $ start o from
|
||||
case batchOption o of
|
||||
Batch fmt -> batchFilesMatching fmt go
|
||||
Batch fmt -> batchFilesMatching fmt (go . toRawFilePath)
|
||||
NoBatch -> withKeyOptions (keyOptions o) (autoMode o)
|
||||
(commandAction . startKeys from)
|
||||
(withFilesInGit (commandAction . go))
|
||||
=<< workTreeItems (getFiles o)
|
||||
|
||||
start :: GetOptions -> Maybe Remote -> FilePath -> Key -> CommandStart
|
||||
start :: GetOptions -> Maybe Remote -> RawFilePath -> Key -> CommandStart
|
||||
start o from file key = start' expensivecheck from key afile ai
|
||||
where
|
||||
afile = AssociatedFile (Just file)
|
||||
ai = mkActionItem (key, afile)
|
||||
expensivecheck
|
||||
| autoMode o = numCopiesCheck file key (<)
|
||||
| autoMode o = numCopiesCheck (fromRawFilePath file) key (<)
|
||||
<||> wantGet False (Just key) afile
|
||||
| otherwise = return True
|
||||
|
||||
|
|
|
@ -97,7 +97,7 @@ duplicateModeParser =
|
|||
|
||||
seek :: ImportOptions -> CommandSeek
|
||||
seek o@(LocalImportOptions {}) = startConcurrency commandStages $ do
|
||||
repopath <- liftIO . absPath =<< fromRepo Git.repoPath
|
||||
repopath <- liftIO . absPath . fromRawFilePath =<< fromRepo Git.repoPath
|
||||
inrepops <- liftIO $ filter (dirContains repopath) <$> mapM absPath (importFiles o)
|
||||
unless (null inrepops) $ do
|
||||
giveup $ "cannot import files from inside the working tree (use git annex add instead): " ++ unwords inrepops
|
||||
|
@ -110,14 +110,14 @@ seek o@(RemoteImportOptions {}) = startConcurrency commandStages $ do
|
|||
giveup "That remote does not support imports."
|
||||
subdir <- maybe
|
||||
(pure Nothing)
|
||||
(Just <$$> inRepo . toTopFilePath)
|
||||
(Just <$$> inRepo . toTopFilePath . toRawFilePath)
|
||||
(importToSubDir o)
|
||||
seekRemote r (importToBranch o) subdir
|
||||
|
||||
startLocal :: GetFileMatcher -> DuplicateMode -> (FilePath, FilePath) -> CommandStart
|
||||
startLocal largematcher mode (srcfile, destfile) =
|
||||
ifM (liftIO $ isRegularFile <$> getSymbolicLinkStatus srcfile)
|
||||
( starting "import" (ActionItemWorkTreeFile destfile)
|
||||
( starting "import" (ActionItemWorkTreeFile (toRawFilePath destfile))
|
||||
pickaction
|
||||
, stop
|
||||
)
|
||||
|
@ -181,7 +181,7 @@ startLocal largematcher mode (srcfile, destfile) =
|
|||
-- weakly the same as the origianlly locked down file's
|
||||
-- inode cache. (Since the file may have been copied,
|
||||
-- its inodes may not be the same.)
|
||||
newcache <- withTSDelta $ liftIO . genInodeCache destfile
|
||||
newcache <- withTSDelta $ liftIO . genInodeCache (toRawFilePath destfile)
|
||||
let unchanged = case (newcache, inodeCache (keySource ld)) of
|
||||
(_, Nothing) -> True
|
||||
(Just newc, Just c) | compareWeak c newc -> True
|
||||
|
@ -202,7 +202,7 @@ startLocal largematcher mode (srcfile, destfile) =
|
|||
>>= maybe
|
||||
stop
|
||||
(\addedk -> next $ Command.Add.cleanup addedk True)
|
||||
, next $ Command.Add.addSmall destfile
|
||||
, next $ Command.Add.addSmall $ toRawFilePath destfile
|
||||
)
|
||||
notoverwriting why = do
|
||||
warning $ "not overwriting existing " ++ destfile ++ " " ++ why
|
||||
|
|
|
@ -67,7 +67,7 @@ seek o = do
|
|||
|
||||
getFeed :: ImportFeedOptions -> Cache -> URLString -> CommandSeek
|
||||
getFeed opts cache url = do
|
||||
showStart "importfeed" url
|
||||
showStart' "importfeed" (Just url)
|
||||
downloadFeed url >>= \case
|
||||
Nothing -> showEndResult =<< feedProblem url
|
||||
"downloading the feed failed"
|
||||
|
@ -222,7 +222,7 @@ performDownload opts cache todownload = case location todownload of
|
|||
case dest of
|
||||
Nothing -> return True
|
||||
Just f -> do
|
||||
showStart "addurl" url
|
||||
showStart' "addurl" (Just url)
|
||||
ks <- getter f
|
||||
if null ks
|
||||
then do
|
||||
|
@ -244,7 +244,7 @@ performDownload opts cache todownload = case location todownload of
|
|||
- to be re-downloaded. -}
|
||||
makeunique url n file = ifM alreadyexists
|
||||
( ifM forced
|
||||
( ifAnnexed f checksameurl tryanother
|
||||
( ifAnnexed (toRawFilePath f) checksameurl tryanother
|
||||
, tryanother
|
||||
)
|
||||
, return $ Just f
|
||||
|
|
|
@ -152,7 +152,7 @@ itemInfo o p = ifM (isdir p)
|
|||
v' <- Remote.nameToUUID' p
|
||||
case v' of
|
||||
Right u -> uuidInfo o u
|
||||
Left _ -> ifAnnexed p
|
||||
Left _ -> ifAnnexed (toRawFilePath p)
|
||||
(fileInfo o p)
|
||||
(treeishInfo o p)
|
||||
)
|
||||
|
@ -161,7 +161,7 @@ itemInfo o p = ifM (isdir p)
|
|||
|
||||
noInfo :: String -> Annex ()
|
||||
noInfo s = do
|
||||
showStart "info" s
|
||||
showStart "info" (encodeBS' s)
|
||||
showNote $ "not a directory or an annexed file or a treeish or a remote or a uuid"
|
||||
showEndFail
|
||||
|
||||
|
@ -311,8 +311,8 @@ showStat :: Stat -> StatState ()
|
|||
showStat s = maybe noop calc =<< s
|
||||
where
|
||||
calc (desc, a) = do
|
||||
(lift . showHeader) desc
|
||||
lift . showRaw =<< a
|
||||
(lift . showHeader . encodeBS') desc
|
||||
lift . showRaw . encodeBS' =<< a
|
||||
|
||||
repo_list :: TrustLevel -> Stat
|
||||
repo_list level = stat n $ nojson $ lift $ do
|
||||
|
@ -435,7 +435,7 @@ transfer_list = stat desc $ nojson $ lift $ do
|
|||
desc = "transfers in progress"
|
||||
line uuidmap t i = unwords
|
||||
[ formatDirection (transferDirection t) ++ "ing"
|
||||
, actionItemDesc $ mkActionItem
|
||||
, fromRawFilePath $ actionItemDesc $ mkActionItem
|
||||
(transferKey t, associatedFile i)
|
||||
, if transferDirection t == Upload then "to" else "from"
|
||||
, maybe (fromUUID $ transferUUID t) Remote.name $
|
||||
|
@ -444,7 +444,7 @@ transfer_list = stat desc $ nojson $ lift $ do
|
|||
jsonify t i = object $ map (\(k, v) -> (packString k, v)) $
|
||||
[ ("transfer", toJSON' (formatDirection (transferDirection t)))
|
||||
, ("key", toJSON' (transferKey t))
|
||||
, ("file", toJSON' afile)
|
||||
, ("file", toJSON' (fromRawFilePath <$> afile))
|
||||
, ("remote", toJSON' (fromUUID (transferUUID t) :: String))
|
||||
]
|
||||
where
|
||||
|
@ -454,7 +454,7 @@ disk_size :: Stat
|
|||
disk_size = simpleStat "available local disk space" $
|
||||
calcfree
|
||||
<$> (lift $ annexDiskReserve <$> Annex.getGitConfig)
|
||||
<*> (lift $ inRepo $ getDiskFree . gitAnnexDir)
|
||||
<*> (lift $ inRepo $ getDiskFree . fromRawFilePath . gitAnnexDir)
|
||||
<*> mkSizer
|
||||
where
|
||||
calcfree reserve (Just have) sizer = unwords
|
||||
|
@ -577,7 +577,7 @@ getDirStatInfo o dir = do
|
|||
then return (numcopiesstats, repodata)
|
||||
else do
|
||||
locs <- Remote.keyLocations key
|
||||
nc <- updateNumCopiesStats file numcopiesstats locs
|
||||
nc <- updateNumCopiesStats (fromRawFilePath file) numcopiesstats locs
|
||||
return (nc, updateRepoData key locs repodata)
|
||||
return $! (presentdata', referenceddata', numcopiesstats', repodata')
|
||||
, return vs
|
||||
|
@ -674,7 +674,7 @@ staleSize label dirspec = go =<< lift (dirKeys dirspec)
|
|||
keysizes keys = do
|
||||
dir <- lift $ fromRepo dirspec
|
||||
liftIO $ forM keys $ \k -> catchDefaultIO 0 $
|
||||
getFileSize (dir </> keyFile k)
|
||||
getFileSize (dir </> fromRawFilePath (keyFile k))
|
||||
|
||||
aside :: String -> String
|
||||
aside s = " (" ++ s ++ ")"
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Command.InitRemote where
|
||||
|
||||
import qualified Data.Map as M
|
||||
|
|
|
@ -42,7 +42,7 @@ seek o = do
|
|||
(commandAction . (whenAnnexed (start s)))
|
||||
=<< workTreeItems (inprogressFiles o)
|
||||
|
||||
start :: S.Set Key -> FilePath -> Key -> CommandStart
|
||||
start :: S.Set Key -> RawFilePath -> Key -> CommandStart
|
||||
start s _file k
|
||||
| S.member k s = start' k
|
||||
| otherwise = stop
|
||||
|
|
|
@ -72,7 +72,7 @@ getList o
|
|||
printHeader :: [(UUID, RemoteName, TrustLevel)] -> Annex ()
|
||||
printHeader l = liftIO $ putStrLn $ lheader $ map (\(_, n, t) -> (n, t)) l
|
||||
|
||||
start :: [(UUID, RemoteName, TrustLevel)] -> FilePath -> Key -> CommandStart
|
||||
start :: [(UUID, RemoteName, TrustLevel)] -> RawFilePath -> Key -> CommandStart
|
||||
start l file key = do
|
||||
ls <- S.fromList <$> keyLocations key
|
||||
liftIO $ putStrLn $ format (map (\(u, _, t) -> (t, S.member u ls)) l) file
|
||||
|
@ -88,8 +88,8 @@ lheader remotes = unlines (zipWith formatheader [0..] remotes) ++ pipes (length
|
|||
trust UnTrusted = " (untrusted)"
|
||||
trust _ = ""
|
||||
|
||||
format :: [(TrustLevel, Present)] -> FilePath -> String
|
||||
format remotes file = thereMap ++ " " ++ file
|
||||
format :: [(TrustLevel, Present)] -> RawFilePath -> String
|
||||
format remotes file = thereMap ++ " " ++ fromRawFilePath file
|
||||
where
|
||||
thereMap = concatMap there remotes
|
||||
there (UnTrusted, True) = "x"
|
||||
|
|
|
@ -20,6 +20,7 @@ import qualified Database.Keys
|
|||
import Annex.Ingest
|
||||
import Logs.Location
|
||||
import Git.FilePath
|
||||
import qualified Utility.RawFilePath as R
|
||||
|
||||
cmd :: Command
|
||||
cmd = withGlobalOptions [jsonOptions, annexedMatchingOptions] $
|
||||
|
@ -32,7 +33,7 @@ seek ps = do
|
|||
l <- workTreeItems ps
|
||||
withFilesInGit (commandAction . (whenAnnexed startNew)) l
|
||||
|
||||
startNew :: FilePath -> Key -> CommandStart
|
||||
startNew :: RawFilePath -> Key -> CommandStart
|
||||
startNew file key = ifM (isJust <$> isAnnexLink file)
|
||||
( stop
|
||||
, starting "lock" (mkActionItem (key, file)) $
|
||||
|
@ -43,7 +44,7 @@ startNew file key = ifM (isJust <$> isAnnexLink file)
|
|||
| key' == key = cont
|
||||
| otherwise = errorModified
|
||||
go Nothing =
|
||||
ifM (isUnmodified key file)
|
||||
ifM (isUnmodified key file)
|
||||
( cont
|
||||
, ifM (Annex.getState Annex.force)
|
||||
( cont
|
||||
|
@ -52,28 +53,29 @@ startNew file key = ifM (isJust <$> isAnnexLink file)
|
|||
)
|
||||
cont = performNew file key
|
||||
|
||||
performNew :: FilePath -> Key -> CommandPerform
|
||||
performNew :: RawFilePath -> Key -> CommandPerform
|
||||
performNew file key = do
|
||||
lockdown =<< calcRepo (gitAnnexLocation key)
|
||||
addLink file key
|
||||
addLink (fromRawFilePath file) key
|
||||
=<< withTSDelta (liftIO . genInodeCache file)
|
||||
next $ cleanupNew file key
|
||||
where
|
||||
lockdown obj = do
|
||||
ifM (isUnmodified key obj)
|
||||
( breakhardlink obj
|
||||
, repopulate obj
|
||||
, repopulate (fromRawFilePath obj)
|
||||
)
|
||||
whenM (liftIO $ doesFileExist obj) $
|
||||
freezeContent obj
|
||||
whenM (liftIO $ R.doesPathExist obj) $
|
||||
freezeContent $ fromRawFilePath obj
|
||||
|
||||
-- It's ok if the file is hard linked to obj, but if some other
|
||||
-- associated file is, we need to break that link to lock down obj.
|
||||
breakhardlink obj = whenM (catchBoolIO $ (> 1) . linkCount <$> liftIO (getFileStatus obj)) $ do
|
||||
breakhardlink obj = whenM (catchBoolIO $ (> 1) . linkCount <$> liftIO (R.getFileStatus obj)) $ do
|
||||
mfc <- withTSDelta (liftIO . genInodeCache file)
|
||||
unlessM (sameInodeCache obj (maybeToList mfc)) $ do
|
||||
modifyContent obj $ replaceFile obj $ \tmp -> do
|
||||
unlessM (checkedCopyFile key obj tmp Nothing) $
|
||||
let obj' = fromRawFilePath obj
|
||||
modifyContent obj' $ replaceFile obj' $ \tmp -> do
|
||||
unlessM (checkedCopyFile key obj' tmp Nothing) $
|
||||
giveup "unable to lock file"
|
||||
Database.Keys.storeInodeCaches key [obj]
|
||||
|
||||
|
@ -86,27 +88,27 @@ performNew file key = do
|
|||
liftIO $ nukeFile obj
|
||||
case mfile of
|
||||
Just unmodified ->
|
||||
unlessM (checkedCopyFile key unmodified obj Nothing)
|
||||
unlessM (checkedCopyFile key (fromRawFilePath unmodified) obj Nothing)
|
||||
lostcontent
|
||||
Nothing -> lostcontent
|
||||
|
||||
lostcontent = logStatus key InfoMissing
|
||||
|
||||
cleanupNew :: FilePath -> Key -> CommandCleanup
|
||||
cleanupNew :: RawFilePath -> Key -> CommandCleanup
|
||||
cleanupNew file key = do
|
||||
Database.Keys.removeAssociatedFile key =<< inRepo (toTopFilePath file)
|
||||
return True
|
||||
|
||||
startOld :: FilePath -> CommandStart
|
||||
startOld :: RawFilePath -> CommandStart
|
||||
startOld file = do
|
||||
unlessM (Annex.getState Annex.force)
|
||||
errorModified
|
||||
starting "lock" (ActionItemWorkTreeFile file) $
|
||||
performOld file
|
||||
|
||||
performOld :: FilePath -> CommandPerform
|
||||
performOld :: RawFilePath -> CommandPerform
|
||||
performOld file = do
|
||||
Annex.Queue.addCommand "checkout" [Param "--"] [file]
|
||||
Annex.Queue.addCommand "checkout" [Param "--"] [fromRawFilePath file]
|
||||
next $ return True
|
||||
|
||||
errorModified :: a
|
||||
|
|
|
@ -92,10 +92,10 @@ seek o = do
|
|||
([], True) -> commandAction (startAll o outputter)
|
||||
(_, True) -> giveup "Cannot specify both files and --all"
|
||||
|
||||
start :: LogOptions -> (FilePath -> Outputter) -> FilePath -> Key -> CommandStart
|
||||
start :: LogOptions -> (FilePath -> Outputter) -> RawFilePath -> Key -> CommandStart
|
||||
start o outputter file key = do
|
||||
(changes, cleanup) <- getKeyLog key (passthruOptions o)
|
||||
showLogIncremental (outputter file) changes
|
||||
showLogIncremental (outputter (fromRawFilePath file)) changes
|
||||
void $ liftIO cleanup
|
||||
stop
|
||||
|
||||
|
@ -199,9 +199,9 @@ compareChanges format changes = concatMap diff changes
|
|||
getKeyLog :: Key -> [CommandParam] -> Annex ([RefChange], IO Bool)
|
||||
getKeyLog key os = do
|
||||
top <- fromRepo Git.repoPath
|
||||
p <- liftIO $ relPathCwdToFile top
|
||||
p <- liftIO $ relPathCwdToFile $ fromRawFilePath top
|
||||
config <- Annex.getGitConfig
|
||||
let logfile = p </> locationLogFile config key
|
||||
let logfile = p </> fromRawFilePath (locationLogFile config key)
|
||||
getGitLog [logfile] (Param "--remove-empty" : os)
|
||||
|
||||
{- Streams the git log for all git-annex branch changes. -}
|
||||
|
@ -220,7 +220,7 @@ getGitLog fs os = do
|
|||
[ Param $ Git.fromRef Annex.Branch.fullname
|
||||
, Param "--"
|
||||
] ++ map Param fs
|
||||
return (parseGitRawLog ls, cleanup)
|
||||
return (parseGitRawLog (map decodeBL' ls), cleanup)
|
||||
|
||||
-- Parses chunked git log --raw output, which looks something like:
|
||||
--
|
||||
|
@ -250,7 +250,7 @@ parseGitRawLog = parse epoch
|
|||
(tss, cl') -> (parseTimeStamp tss, cl')
|
||||
mrc = do
|
||||
(old, new) <- parseRawChangeLine cl
|
||||
key <- locationLogFileKey c2
|
||||
key <- locationLogFileKey (toRawFilePath c2)
|
||||
return $ RefChange
|
||||
{ changetime = ts
|
||||
, oldref = old
|
||||
|
|
|
@ -29,11 +29,12 @@ run _ file = seekSingleGitFile file >>= \case
|
|||
|
||||
-- To support absolute filenames, pass through git ls-files.
|
||||
-- But, this plumbing command does not recurse through directories.
|
||||
seekSingleGitFile :: FilePath -> Annex (Maybe FilePath)
|
||||
seekSingleGitFile :: FilePath -> Annex (Maybe RawFilePath)
|
||||
seekSingleGitFile file = do
|
||||
(l, cleanup) <- inRepo (Git.LsFiles.inRepo [file])
|
||||
(l, cleanup) <- inRepo (Git.LsFiles.inRepo [toRawFilePath file])
|
||||
r <- case l of
|
||||
(f:[]) | takeFileName f == takeFileName file -> return (Just f)
|
||||
(f:[]) | takeFileName (fromRawFilePath f) == takeFileName file ->
|
||||
return (Just f)
|
||||
_ -> return Nothing
|
||||
void $ liftIO cleanup
|
||||
return r
|
||||
|
|
|
@ -46,7 +46,9 @@ start = startingNoMessage (ActionItemOther Nothing) $ do
|
|||
umap <- uuidDescMap
|
||||
trustmap <- trustMapLoad
|
||||
|
||||
file <- (</>) <$> fromRepo gitAnnexDir <*> pure "map.dot"
|
||||
file <- (</>)
|
||||
<$> fromRepo (fromRawFilePath . gitAnnexDir)
|
||||
<*> pure "map.dot"
|
||||
|
||||
liftIO $ writeFile file (drawMap rs trustmap umap)
|
||||
next $
|
||||
|
@ -176,7 +178,8 @@ absRepo reference r
|
|||
| Git.repoIsUrl reference = return $ Git.Construct.localToUrl reference r
|
||||
| Git.repoIsUrl r = return r
|
||||
| otherwise = liftIO $ do
|
||||
r' <- Git.Construct.fromAbsPath =<< absPath (Git.repoPath r)
|
||||
r' <- Git.Construct.fromAbsPath
|
||||
=<< absPath (fromRawFilePath (Git.repoPath r))
|
||||
r'' <- safely $ flip Annex.eval Annex.gitRepo =<< Annex.new r'
|
||||
return (fromMaybe r' r'')
|
||||
|
||||
|
@ -234,7 +237,7 @@ tryScan r
|
|||
where
|
||||
remotecmd = "sh -c " ++ shellEscape
|
||||
(cddir ++ " && " ++ "git config --null --list")
|
||||
dir = Git.repoPath r
|
||||
dir = fromRawFilePath $ Git.repoPath r
|
||||
cddir
|
||||
| "/~" `isPrefixOf` dir =
|
||||
let (userhome, reldir) = span (/= '/') (drop 1 dir)
|
||||
|
|
|
@ -92,7 +92,7 @@ seek o = case batchOption o of
|
|||
)
|
||||
_ -> giveup "--batch is currently only supported in --json mode"
|
||||
|
||||
start :: VectorClock -> MetaDataOptions -> FilePath -> Key -> CommandStart
|
||||
start :: VectorClock -> MetaDataOptions -> RawFilePath -> Key -> CommandStart
|
||||
start c o file k = startKeys c o (k, mkActionItem (k, afile))
|
||||
where
|
||||
afile = AssociatedFile (Just file)
|
||||
|
@ -147,7 +147,7 @@ instance FromJSON MetaDataFields where
|
|||
fieldsField :: T.Text
|
||||
fieldsField = T.pack "fields"
|
||||
|
||||
parseJSONInput :: String -> Either String (Either FilePath Key, MetaData)
|
||||
parseJSONInput :: String -> Either String (Either RawFilePath Key, MetaData)
|
||||
parseJSONInput i = do
|
||||
v <- eitherDecode (BU.fromString i)
|
||||
let m = case itemAdded v of
|
||||
|
@ -155,16 +155,16 @@ parseJSONInput i = do
|
|||
Just (MetaDataFields m') -> m'
|
||||
case (itemKey v, itemFile v) of
|
||||
(Just k, _) -> Right (Right k, m)
|
||||
(Nothing, Just f) -> Right (Left f, m)
|
||||
(Nothing, Just f) -> Right (Left (toRawFilePath f), m)
|
||||
(Nothing, Nothing) -> Left "JSON input is missing either file or key"
|
||||
|
||||
startBatch :: (Either FilePath Key, MetaData) -> CommandStart
|
||||
startBatch :: (Either RawFilePath Key, MetaData) -> CommandStart
|
||||
startBatch (i, (MetaData m)) = case i of
|
||||
Left f -> do
|
||||
mk <- lookupFile f
|
||||
case mk of
|
||||
Just k -> go k (mkActionItem (k, AssociatedFile (Just f)))
|
||||
Nothing -> giveup $ "not an annexed file: " ++ f
|
||||
Nothing -> giveup $ "not an annexed file: " ++ fromRawFilePath f
|
||||
Right k -> go k (mkActionItem k)
|
||||
where
|
||||
go k ai = starting "metadata" ai $ do
|
||||
|
|
|
@ -28,16 +28,16 @@ cmd = withGlobalOptions [annexedMatchingOptions] $
|
|||
seek :: CmdParams -> CommandSeek
|
||||
seek = withFilesInGit (commandAction . (whenAnnexed start)) <=< workTreeItems
|
||||
|
||||
start :: FilePath -> Key -> CommandStart
|
||||
start :: RawFilePath -> Key -> CommandStart
|
||||
start file key = do
|
||||
forced <- Annex.getState Annex.force
|
||||
v <- Backend.getBackend file key
|
||||
v <- Backend.getBackend (fromRawFilePath file) key
|
||||
case v of
|
||||
Nothing -> stop
|
||||
Just oldbackend -> do
|
||||
exists <- inAnnex key
|
||||
newbackend <- maybe defaultBackend return
|
||||
=<< chooseBackend file
|
||||
=<< chooseBackend (fromRawFilePath file)
|
||||
if (newbackend /= oldbackend || upgradableKey oldbackend key || forced) && exists
|
||||
then starting "migrate" (mkActionItem (key, file)) $
|
||||
perform file key oldbackend newbackend
|
||||
|
@ -63,7 +63,7 @@ upgradableKey backend key = isNothing (fromKey keySize key) || backendupgradable
|
|||
- data cannot get corrupted after the fsck but before the new key is
|
||||
- generated.
|
||||
-}
|
||||
perform :: FilePath -> Key -> Backend -> Backend -> CommandPerform
|
||||
perform :: RawFilePath -> Key -> Backend -> Backend -> CommandPerform
|
||||
perform file oldkey oldbackend newbackend = go =<< genkey (fastMigrate oldbackend)
|
||||
where
|
||||
go Nothing = stop
|
||||
|
@ -85,8 +85,8 @@ perform file oldkey oldbackend newbackend = go =<< genkey (fastMigrate oldbacken
|
|||
genkey Nothing = do
|
||||
content <- calcRepo $ gitAnnexLocation oldkey
|
||||
let source = KeySource
|
||||
{ keyFilename = file
|
||||
, contentLocation = content
|
||||
{ keyFilename = fromRawFilePath file
|
||||
, contentLocation = fromRawFilePath content
|
||||
, inodeCache = Nothing
|
||||
}
|
||||
v <- genKey source nullMeterUpdate (Just newbackend)
|
||||
|
|
|
@ -47,7 +47,7 @@ seek o = startConcurrency transferStages $
|
|||
(withFilesInGit (commandAction . (whenAnnexed $ start o)))
|
||||
=<< workTreeItems (mirrorFiles o)
|
||||
|
||||
start :: MirrorOptions -> FilePath -> Key -> CommandStart
|
||||
start :: MirrorOptions -> RawFilePath -> Key -> CommandStart
|
||||
start o file k = startKey o afile (k, ai)
|
||||
where
|
||||
afile = AssociatedFile (Just file)
|
||||
|
@ -75,4 +75,4 @@ startKey o afile (key, ai) = case fromToOptions o of
|
|||
where
|
||||
getnumcopies = case afile of
|
||||
AssociatedFile Nothing -> getNumCopies
|
||||
AssociatedFile (Just af) -> getFileNumCopies af
|
||||
AssociatedFile (Just af) -> getFileNumCopies (fromRawFilePath af)
|
||||
|
|
|
@ -57,13 +57,13 @@ seek :: MoveOptions -> CommandSeek
|
|||
seek o = startConcurrency transferStages $ do
|
||||
let go = whenAnnexed $ start (fromToOptions o) (removeWhen o)
|
||||
case batchOption o of
|
||||
Batch fmt -> batchFilesMatching fmt go
|
||||
Batch fmt -> batchFilesMatching fmt (go . toRawFilePath)
|
||||
NoBatch -> withKeyOptions (keyOptions o) False
|
||||
(commandAction . startKey (fromToOptions o) (removeWhen o))
|
||||
(withFilesInGit (commandAction . go))
|
||||
=<< workTreeItems (moveFiles o)
|
||||
|
||||
start :: FromToHereOptions -> RemoveWhen -> FilePath -> Key -> CommandStart
|
||||
start :: FromToHereOptions -> RemoveWhen -> RawFilePath -> Key -> CommandStart
|
||||
start fromto removewhen f k = start' fromto removewhen afile k ai
|
||||
where
|
||||
afile = AssociatedFile (Just f)
|
||||
|
|
|
@ -137,7 +137,8 @@ send ups fs = do
|
|||
mk <- lookupFile f
|
||||
case mk of
|
||||
Nothing -> noop
|
||||
Just k -> withObjectLoc k (addlist f)
|
||||
Just k -> withObjectLoc k $
|
||||
addlist f . fromRawFilePath
|
||||
liftIO $ hClose h
|
||||
|
||||
serverkey <- uftpKey
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Command.P2P where
|
||||
|
||||
import Command
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Command.PostReceive where
|
||||
|
||||
import Command
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue