merging sqlite and bs branches

Since the sqlite branch uses blobs extensively, there are some
performance benefits, ByteStrings now get stored and retrieved w/o
conversion in some cases like in Database.Export.
This commit is contained in:
Joey Hess 2019-12-06 15:17:54 -04:00
commit 2f9a80d803
No known key found for this signature in database
GPG key ID: DB12DB0FF05F8F38
266 changed files with 2860 additions and 1325 deletions

View file

@ -147,7 +147,7 @@ data AnnexState = AnnexState
, activeremotes :: MVar (M.Map (Types.Remote.RemoteA Annex) Integer)
, keysdbhandle :: Maybe Keys.DbHandle
, cachedcurrentbranch :: (Maybe (Maybe Git.Branch, Maybe Adjustment))
, cachedgitenv :: Maybe [(String, String)]
, cachedgitenv :: Maybe (FilePath, [(String, String)])
, urloptions :: Maybe UrlOptions
}

View file

@ -104,7 +104,7 @@ autoMergeFrom branch currbranch mergeconfig canresolvemerge commitmode = do
-}
resolveMerge :: Maybe Git.Ref -> Git.Ref -> Bool -> Annex Bool
resolveMerge us them inoverlay = do
top <- if inoverlay
top <- toRawFilePath <$> if inoverlay
then pure "."
else fromRepo Git.repoPath
(fs, cleanup) <- inRepo (LsFiles.unmerged [top])
@ -122,7 +122,7 @@ resolveMerge us them inoverlay = do
unless (null deleted) $
Annex.Queue.addCommand "rm"
[Param "--quiet", Param "-f", Param "--"]
deleted
(map fromRawFilePath deleted)
void $ liftIO cleanup2
when merged $ do
@ -169,7 +169,7 @@ resolveMerge' unstagedmap (Just us) them inoverlay u = do
-- Neither side is annexed file; cannot resolve.
(Nothing, Nothing) -> return ([], Nothing)
where
file = LsFiles.unmergedFile u
file = fromRawFilePath $ LsFiles.unmergedFile u
getkey select =
case select (LsFiles.unmergedSha u) of
@ -202,20 +202,20 @@ resolveMerge' unstagedmap (Just us) them inoverlay u = do
makesymlink key dest = do
l <- calcRepo $ gitAnnexLink dest key
unless inoverlay $ replacewithsymlink dest l
dest' <- stagefile dest
dest' <- toRawFilePath <$> stagefile dest
stageSymlink dest' =<< hashSymlink l
replacewithsymlink dest link = withworktree dest $ \f ->
replaceFile f $ makeGitLink link
replaceFile f $ makeGitLink link . toRawFilePath
makepointer key dest destmode = do
unless inoverlay $
unlessM (reuseOldFile unstagedmap key file dest) $
linkFromAnnex key dest destmode >>= \case
LinkAnnexFailed -> liftIO $
writePointerFile dest key destmode
writePointerFile (toRawFilePath dest) key destmode
_ -> noop
dest' <- stagefile dest
dest' <- toRawFilePath <$> stagefile dest
stagePointerFile dest' destmode =<< hashPointerFile key
unless inoverlay $
Database.Keys.addAssociatedFile key
@ -239,7 +239,7 @@ resolveMerge' unstagedmap (Just us) them inoverlay u = do
Nothing -> noop
Just sha -> do
link <- catSymLinkTarget sha
replacewithsymlink item link
replacewithsymlink item (fromRawFilePath link)
-- And when grafting in anything else vs a symlink,
-- the work tree already contains what we want.
(_, Just TreeSymlink) -> noop
@ -290,8 +290,8 @@ cleanConflictCruft resolvedks resolvedfs unstagedmap = do
matchesresolved is i f
| S.member f fs || S.member (conflictCruftBase f) fs = anyM id
[ pure (S.member i is)
, inks <$> isAnnexLink f
, inks <$> liftIO (isPointerFile f)
, inks <$> isAnnexLink (toRawFilePath f)
, inks <$> liftIO (isPointerFile (toRawFilePath f))
]
| otherwise = return False
@ -328,13 +328,14 @@ commitResolvedMerge commitmode = inRepo $ Git.Branch.commitCommand commitmode
type InodeMap = M.Map InodeCacheKey FilePath
inodeMap :: Annex ([FilePath], IO Bool) -> Annex InodeMap
inodeMap :: Annex ([RawFilePath], IO Bool) -> Annex InodeMap
inodeMap getfiles = do
(fs, cleanup) <- getfiles
fsis <- forM fs $ \f -> do
mi <- withTSDelta (liftIO . genInodeCache f)
let f' = fromRawFilePath f
mi <- withTSDelta (liftIO . genInodeCache f')
return $ case mi of
Nothing -> Nothing
Just i -> Just (inodeCacheToKey Strongly i, f)
Just i -> Just (inodeCacheToKey Strongly i, f')
void $ liftIO cleanup
return $ M.fromList $ catMaybes fsis

View file

@ -215,7 +215,7 @@ updateTo' pairs = do
- content is returned.
-
- Returns an empty string if the file doesn't exist yet. -}
get :: FilePath -> Annex L.ByteString
get :: RawFilePath -> Annex L.ByteString
get file = do
update
getLocal file
@ -224,21 +224,21 @@ get file = do
- reflect changes in remotes.
- (Changing the value this returns, and then merging is always the
- same as using get, and then changing its value.) -}
getLocal :: FilePath -> Annex L.ByteString
getLocal :: RawFilePath -> Annex L.ByteString
getLocal file = go =<< getJournalFileStale file
where
go (Just journalcontent) = return journalcontent
go Nothing = getRef fullname file
{- Gets the content of a file as staged in the branch's index. -}
getStaged :: FilePath -> Annex L.ByteString
getStaged :: RawFilePath -> Annex L.ByteString
getStaged = getRef indexref
where
-- This makes git cat-file be run with ":file",
-- so it looks at the index.
indexref = Ref ""
getHistorical :: RefDate -> FilePath -> Annex L.ByteString
getHistorical :: RefDate -> RawFilePath -> Annex L.ByteString
getHistorical date file =
-- This check avoids some ugly error messages when the reflog
-- is empty.
@ -247,7 +247,7 @@ getHistorical date file =
, getRef (Git.Ref.dateRef fullname date) file
)
getRef :: Ref -> FilePath -> Annex L.ByteString
getRef :: Ref -> RawFilePath -> Annex L.ByteString
getRef ref file = withIndex $ catFile ref file
{- Applies a function to modify the content of a file.
@ -255,11 +255,11 @@ getRef ref file = withIndex $ catFile ref file
- Note that this does not cause the branch to be merged, it only
- modifes the current content of the file on the branch.
-}
change :: Journalable content => FilePath -> (L.ByteString -> content) -> Annex ()
change :: Journalable content => RawFilePath -> (L.ByteString -> content) -> Annex ()
change file f = lockJournal $ \jl -> f <$> getLocal file >>= set jl file
{- Applies a function which can modify the content of a file, or not. -}
maybeChange :: Journalable content => FilePath -> (L.ByteString -> Maybe content) -> Annex ()
maybeChange :: Journalable content => RawFilePath -> (L.ByteString -> Maybe content) -> Annex ()
maybeChange file f = lockJournal $ \jl -> do
v <- getLocal file
case f v of
@ -269,7 +269,7 @@ maybeChange file f = lockJournal $ \jl -> do
_ -> noop
{- Records new content of a file into the journal -}
set :: Journalable content => JournalLocked -> FilePath -> content -> Annex ()
set :: Journalable content => JournalLocked -> RawFilePath -> content -> Annex ()
set = setJournalFile
{- Commit message used when making a commit of whatever data has changed
@ -353,23 +353,23 @@ commitIndex' jl branchref message basemessage retrynum parents = do
{- Lists all files on the branch. including ones in the journal
- that have not been committed yet. There may be duplicates in the list. -}
files :: Annex [FilePath]
files :: Annex [RawFilePath]
files = do
update
-- ++ forces the content of the first list to be buffered in memory,
-- so use getJournalledFilesStale which should be much smaller most
-- of the time. branchFiles will stream as the list is consumed.
(++)
<$> getJournalledFilesStale
<$> (map toRawFilePath <$> getJournalledFilesStale)
<*> branchFiles
{- Files in the branch, not including any from journalled changes,
- and without updating the branch. -}
branchFiles :: Annex [FilePath]
branchFiles :: Annex [RawFilePath]
branchFiles = withIndex $ inRepo branchFiles'
branchFiles' :: Git.Repo -> IO [FilePath]
branchFiles' = Git.Command.pipeNullSplitZombie
branchFiles' :: Git.Repo -> IO [RawFilePath]
branchFiles' = Git.Command.pipeNullSplitZombie'
(lsTreeParams Git.LsTree.LsTreeRecursive fullname [Param "--name-only"])
{- Populates the branch's index file with the current branch contents.
@ -593,14 +593,14 @@ performTransitionsLocked jl ts neednewlocalbranch transitionedrefs = do
if L.null content'
then do
Annex.Queue.addUpdateIndex
=<< inRepo (Git.UpdateIndex.unstageFile file)
=<< inRepo (Git.UpdateIndex.unstageFile (fromRawFilePath file))
-- File is deleted; can't run any other
-- transitions on it.
return ()
else do
sha <- hashBlob content'
Annex.Queue.addUpdateIndex $ Git.UpdateIndex.pureStreamer $
Git.UpdateIndex.updateIndexLine sha TreeFile (asTopFilePath file)
Git.UpdateIndex.updateIndexLine sha TreeFile (asTopFilePath (fromRawFilePath file))
apply rest file content'
checkBranchDifferences :: Git.Ref -> Annex ()

View file

@ -34,7 +34,7 @@ data FileTransition
= ChangeFile Builder
| PreserveFile
type TransitionCalculator = TrustMap -> M.Map UUID RemoteConfig -> FilePath -> L.ByteString -> FileTransition
type TransitionCalculator = TrustMap -> M.Map UUID RemoteConfig -> RawFilePath -> L.ByteString -> FileTransition
getTransitionCalculator :: Transition -> Maybe TransitionCalculator
getTransitionCalculator ForgetGitHistory = Nothing

View file

@ -39,12 +39,12 @@ import Annex.Link
import Annex.CurrentBranch
import Types.AdjustedBranch
catFile :: Git.Branch -> FilePath -> Annex L.ByteString
catFile :: Git.Branch -> RawFilePath -> Annex L.ByteString
catFile branch file = do
h <- catFileHandle
liftIO $ Git.CatFile.catFile h branch file
catFileDetails :: Git.Branch -> FilePath -> Annex (Maybe (L.ByteString, Sha, ObjectType))
catFileDetails :: Git.Branch -> RawFilePath -> Annex (Maybe (L.ByteString, Sha, ObjectType))
catFileDetails branch file = do
h <- catFileHandle
liftIO $ Git.CatFile.catFileDetails h branch file
@ -109,8 +109,8 @@ catKey ref = go =<< catObjectMetaData ref
go _ = return Nothing
{- Gets a symlink target. -}
catSymLinkTarget :: Sha -> Annex String
catSymLinkTarget sha = fromInternalGitPath . decodeBL <$> get
catSymLinkTarget :: Sha -> Annex RawFilePath
catSymLinkTarget sha = fromInternalGitPath . L.toStrict <$> get
where
-- Avoid buffering the whole file content, which might be large.
-- 8192 is enough if it really is a symlink.
@ -137,24 +137,24 @@ catSymLinkTarget sha = fromInternalGitPath . decodeBL <$> get
-
- So, this gets info from the index, unless running as a daemon.
-}
catKeyFile :: FilePath -> Annex (Maybe Key)
catKeyFile :: RawFilePath -> Annex (Maybe Key)
catKeyFile f = ifM (Annex.getState Annex.daemon)
( catKeyFileHEAD f
, catKey $ Git.Ref.fileRef f
)
catKeyFileHEAD :: FilePath -> Annex (Maybe Key)
catKeyFileHEAD :: RawFilePath -> Annex (Maybe Key)
catKeyFileHEAD f = catKey $ Git.Ref.fileFromRef Git.Ref.headRef f
{- Look in the original branch from whence an adjusted branch is based
- to find the file. But only when the adjustment hides some files. -}
catKeyFileHidden :: FilePath -> CurrBranch -> Annex (Maybe Key)
catKeyFileHidden :: RawFilePath -> CurrBranch -> Annex (Maybe Key)
catKeyFileHidden = hiddenCat catKey
catObjectMetaDataHidden :: FilePath -> CurrBranch -> Annex (Maybe (Integer, ObjectType))
catObjectMetaDataHidden :: RawFilePath -> CurrBranch -> Annex (Maybe (Integer, ObjectType))
catObjectMetaDataHidden = hiddenCat catObjectMetaData
hiddenCat :: (Ref -> Annex (Maybe a)) -> FilePath -> CurrBranch -> Annex (Maybe a)
hiddenCat :: (Ref -> Annex (Maybe a)) -> RawFilePath -> CurrBranch -> Annex (Maybe a)
hiddenCat a f (Just origbranch, Just adj)
| adjustmentHidesFiles adj = a (Git.Ref.fileFromRef origbranch f)
hiddenCat _ _ _ = return Nothing

View file

@ -329,7 +329,7 @@ getViaTmpFromDisk rsp v key action = checkallowed $ do
checkallowed a = case rsp of
RetrievalAllKeysSecure -> a
RetrievalVerifiableKeysSecure
| isVerifiable (keyVariety key) -> a
| isVerifiable (fromKey keyVariety key) -> a
| otherwise -> ifM (annexAllowUnverifiedDownloads <$> Annex.getGitConfig)
( a
, warnUnverifiableInsecure key >> return False
@ -353,7 +353,7 @@ verifyKeyContent :: RetrievalSecurityPolicy -> VerifyConfig -> Verification -> K
verifyKeyContent rsp v verification k f = case (rsp, verification) of
(_, Verified) -> return True
(RetrievalVerifiableKeysSecure, _)
| isVerifiable (keyVariety k) -> verify
| isVerifiable (fromKey keyVariety k) -> verify
| otherwise -> ifM (annexAllowUnverifiedDownloads <$> Annex.getGitConfig)
( verify
, warnUnverifiableInsecure k >> return False
@ -365,12 +365,12 @@ verifyKeyContent rsp v verification k f = case (rsp, verification) of
(_, MustVerify) -> verify
where
verify = enteringStage VerifyStage $ verifysize <&&> verifycontent
verifysize = case keySize k of
verifysize = case fromKey keySize k of
Nothing -> return True
Just size -> do
size' <- liftIO $ catchDefaultIO 0 $ getFileSize f
return (size' == size)
verifycontent = case Types.Backend.verifyKeyContent =<< Backend.maybeLookupBackendVariety (keyVariety k) of
verifycontent = case Types.Backend.verifyKeyContent =<< Backend.maybeLookupBackendVariety (fromKey keyVariety k) of
Nothing -> return True
Just verifier -> verifier k f
@ -382,7 +382,7 @@ warnUnverifiableInsecure k = warning $ unwords
, "this safety check.)"
]
where
kv = decodeBS (formatKeyVariety (keyVariety k))
kv = decodeBS (formatKeyVariety (fromKey keyVariety k))
data VerifyConfig = AlwaysVerify | NoVerify | RemoteVerify Remote | DefaultVerify
@ -483,17 +483,17 @@ moveAnnex key src = ifM (checkSecureHashes key)
fs <- map (`fromTopFilePath` g)
<$> Database.Keys.getAssociatedFiles key
unless (null fs) $ do
ics <- mapM (populatePointerFile (Restage True) key dest) fs
ics <- mapM (populatePointerFile (Restage True) key (toRawFilePath dest) . toRawFilePath) fs
Database.Keys.storeInodeCaches' key [dest] (catMaybes ics)
)
alreadyhave = liftIO $ removeFile src
checkSecureHashes :: Key -> Annex Bool
checkSecureHashes key
| cryptographicallySecure (keyVariety key) = return True
| cryptographicallySecure (fromKey keyVariety key) = return True
| otherwise = ifM (annexSecureHashesOnly <$> Annex.getGitConfig)
( do
warning $ "annex.securehashesonly blocked adding " ++ decodeBS (formatKeyVariety (keyVariety key)) ++ " key to annex objects"
warning $ "annex.securehashesonly blocked adding " ++ decodeBS (formatKeyVariety (fromKey keyVariety key)) ++ " key to annex objects"
return False
, return True
)
@ -650,7 +650,7 @@ removeAnnex (ContentRemovalLock key) = withObjectLoc key $ \file ->
-- Check associated pointer file for modifications, and reset if
-- it's unmodified.
resetpointer file = ifM (isUnmodified key file)
( depopulatePointerFile key file
( depopulatePointerFile key (toRawFilePath file)
-- Modified file, so leave it alone.
-- If it was a hard link to the annex object,
-- that object might have been frozen as part of the

View file

@ -100,7 +100,7 @@ preserveGitMode _ _ = return True
- when doing concurrent downloads.
-}
checkDiskSpace :: Maybe FilePath -> Key -> Integer -> Bool -> Annex Bool
checkDiskSpace destdir key = checkDiskSpace' (fromMaybe 1 (keySize key)) destdir key
checkDiskSpace destdir key = checkDiskSpace' (fromMaybe 1 (fromKey keySize key)) destdir key
{- Allows specifying the size of the key, if it's known, which is useful
- as not all keys know their size. -}

View file

@ -30,16 +30,17 @@ import Utility.Touch
-
- Returns an InodeCache if it populated the pointer file.
-}
populatePointerFile :: Restage -> Key -> FilePath -> FilePath -> Annex (Maybe InodeCache)
populatePointerFile :: Restage -> Key -> RawFilePath -> RawFilePath -> Annex (Maybe InodeCache)
populatePointerFile restage k obj f = go =<< liftIO (isPointerFile f)
where
go (Just k') | k == k' = do
destmode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus f
liftIO $ nukeFile f
(ic, populated) <- replaceFile f $ \tmp -> do
ok <- linkOrCopy k obj tmp destmode >>= \case
let f' = fromRawFilePath f
destmode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus f'
liftIO $ nukeFile f'
(ic, populated) <- replaceFile f' $ \tmp -> do
ok <- linkOrCopy k (fromRawFilePath obj) tmp destmode >>= \case
Just _ -> thawContent tmp >> return True
Nothing -> liftIO (writePointerFile tmp k destmode) >> return False
Nothing -> liftIO (writePointerFile (toRawFilePath tmp) k destmode) >> return False
ic <- withTSDelta (liftIO . genInodeCache tmp)
return (ic, ok)
maybe noop (restagePointerFile restage f) ic
@ -51,14 +52,15 @@ populatePointerFile restage k obj f = go =<< liftIO (isPointerFile f)
{- Removes the content from a pointer file, replacing it with a pointer.
-
- Does not check if the pointer file is modified. -}
depopulatePointerFile :: Key -> FilePath -> Annex ()
depopulatePointerFile :: Key -> RawFilePath -> Annex ()
depopulatePointerFile key file = do
st <- liftIO $ catchMaybeIO $ getFileStatus file
let file' = fromRawFilePath file
st <- liftIO $ catchMaybeIO $ getFileStatus file'
let mode = fmap fileMode st
secureErase file
liftIO $ nukeFile file
ic <- replaceFile file $ \tmp -> do
liftIO $ writePointerFile tmp key mode
secureErase file'
liftIO $ nukeFile file'
ic <- replaceFile file' $ \tmp -> do
liftIO $ writePointerFile (toRawFilePath tmp) key mode
#if ! defined(mingw32_HOST_OS)
-- Don't advance mtime; this avoids unncessary re-smudging
-- by git in some cases.

View file

@ -54,5 +54,5 @@ setDifferences = do
else return ds
)
forM_ (listDifferences ds') $ \d ->
setConfig (ConfigKey $ differenceConfigKey d) (differenceConfigVal d)
setConfig (differenceConfigKey d) (differenceConfigVal d)
recordDifferences ds' u

View file

@ -65,14 +65,14 @@ hashDirs (HashLevels 1) sz s = addTrailingPathSeparator $ take sz s
hashDirs _ sz s = addTrailingPathSeparator $ take sz s </> drop sz s
hashDirLower :: HashLevels -> Hasher
hashDirLower n k = hashDirs n 3 $ take 6 $ show $ md5 $ serializeKey' $ nonChunkKey k
hashDirLower n k = hashDirs n 3 $ take 6 $ show $ md5s $ serializeKey' $ nonChunkKey k
{- This was originally using Data.Hash.MD5 from MissingH. This new version
- is faster, but ugly as it has to replicate the 4 Word32's that produced. -}
hashDirMixed :: HashLevels -> Hasher
hashDirMixed n k = hashDirs n 2 $ take 4 $ concatMap display_32bits_as_dir $
encodeWord32 $ map fromIntegral $ Data.ByteArray.unpack $
Utility.Hash.md5 $ serializeKey' $ nonChunkKey k
Utility.Hash.md5s $ serializeKey' $ nonChunkKey k
where
encodeWord32 (b1:b2:b3:b4:rest) =
(shiftL b4 24 .|. shiftL b3 16 .|. shiftL b2 8 .|. b1)

View file

@ -49,7 +49,8 @@ type Reason = String
handleDropsFrom :: [UUID] -> [Remote] -> Reason -> Bool -> Key -> AssociatedFile -> [VerifiedCopy] -> (CommandStart -> CommandCleanup) -> Annex ()
handleDropsFrom locs rs reason fromhere key afile preverified runner = do
g <- Annex.gitRepo
l <- map (`fromTopFilePath` g) <$> Database.Keys.getAssociatedFiles key
l <- map toRawFilePath . map (`fromTopFilePath` g)
<$> Database.Keys.getAssociatedFiles key
let fs = case afile of
AssociatedFile (Just f) -> nub (f : l)
AssociatedFile Nothing -> l
@ -62,7 +63,7 @@ handleDropsFrom locs rs reason fromhere key afile preverified runner = do
(untrusted, have) <- trustPartition UnTrusted locs
numcopies <- if null fs
then getNumCopies
else maximum <$> mapM getFileNumCopies fs
else maximum <$> mapM (getFileNumCopies . fromRawFilePath) fs
return (NumCopies (length have), numcopies, S.fromList untrusted)
{- Check that we have enough copies still to drop the content.
@ -107,7 +108,7 @@ handleDropsFrom locs rs reason fromhere key afile preverified runner = do
[ "dropped"
, case afile of
AssociatedFile Nothing -> serializeKey key
AssociatedFile (Just af) -> af
AssociatedFile (Just af) -> fromRawFilePath af
, "(from " ++ maybe "here" show u ++ ")"
, "(copies now " ++ show (fromNumCopies have - 1) ++ ")"
, ": " ++ reason

View file

@ -5,6 +5,8 @@
- Licensed under the GNU AGPL version 3 or higher.
-}
{-# LANGUAGE OverloadedStrings #-}
module Annex.Environment where
import Annex.Common
@ -45,6 +47,6 @@ ensureCommit a = either retry return =<< tryNonAsync a
where
retry _ = do
name <- liftIO $ either (const "unknown") id <$> myUserName
setConfig (ConfigKey "user.name") name
setConfig (ConfigKey "user.email") name
setConfig "user.name" name
setConfig "user.email" name
a

View file

@ -33,7 +33,7 @@ exportKey :: Git.Sha -> Annex ExportKey
exportKey sha = mk <$> catKey sha
where
mk (Just k) = AnnexKey k
mk Nothing = GitKey $ Key
mk Nothing = GitKey $ mkKey $ \k -> k
{ keyName = encodeBS $ Git.fromRef sha
, keyVariety = SHA1Key (HasExt False)
, keySize = Nothing

View file

@ -54,7 +54,7 @@ checkFileMatcher' getmatcher file notconfigured = do
matcher <- getmatcher file
checkMatcher matcher Nothing afile S.empty notconfigured d
where
afile = AssociatedFile (Just file)
afile = AssociatedFile (Just (toRawFilePath file))
-- checkMatcher will never use this, because afile is provided.
d = return True
@ -62,7 +62,7 @@ checkMatcher :: FileMatcher Annex -> Maybe Key -> AssociatedFile -> AssumeNotPre
checkMatcher matcher mkey afile notpresent notconfigured d
| isEmpty matcher = notconfigured
| otherwise = case (mkey, afile) of
(_, AssociatedFile (Just file)) -> go =<< fileMatchInfo file
(_, AssociatedFile (Just file)) -> go =<< fileMatchInfo (fromRawFilePath file)
(Just key, _) -> go (MatchingKey key afile)
_ -> d
where

View file

@ -5,6 +5,8 @@
- Licensed under the GNU AGPL version 3 or higher.
-}
{-# LANGUAGE OverloadedStrings #-}
module Annex.Fixup where
import Git.Types
@ -53,7 +55,7 @@ fixupDirect r@(Repo { location = l@(Local { gitdir = d, worktree = Nothing }) })
{ location = l { worktree = Just (parentDir d) }
, gitGlobalOpts = gitGlobalOpts r ++
[ Param "-c"
, Param $ coreBare ++ "=" ++ boolConfig False
, Param $ fromConfigKey coreBare ++ "=" ++ boolConfig False
]
}
fixupDirect r = r

View file

@ -14,7 +14,6 @@ import Git
import Git.Types
import Git.Index
import Git.Env
import Utility.Env
import qualified Annex
import qualified Annex.Queue
@ -23,28 +22,29 @@ withIndexFile :: FilePath -> Annex a -> Annex a
withIndexFile f a = do
f' <- liftIO $ indexEnvVal f
withAltRepo
(usecachedgitenv $ \g -> liftIO $ addGitEnv g indexEnv f')
(usecachedgitenv f' $ \g -> addGitEnv g indexEnv f')
(\g g' -> g' { gitEnv = gitEnv g })
a
where
-- This is an optimisation. Since withIndexFile is run repeatedly,
-- and addGitEnv uses the slow getEnvironment when gitEnv is Nothing,
-- we cache the environment the first time, and reuse it in
-- subsequent calls.
-- typically with the same file, and addGitEnv uses the slow
-- getEnvironment when gitEnv is Nothing, and has to do a
-- nontrivial amount of work, we cache the modified environment
-- the first time, and reuse it in subsequent calls for the same
-- index file.
--
-- (This could be done at another level; eg when creating the
-- Git object in the first place, but it's more efficient to let
-- the enviroment be inherited in all calls to git where it
-- the environment be inherited in all calls to git where it
-- does not need to be modified.)
usecachedgitenv m g = case gitEnv g of
Just _ -> m g
Nothing -> do
e <- Annex.withState $ \s -> case Annex.cachedgitenv s of
Nothing -> do
e <- getEnvironment
return (s { Annex.cachedgitenv = Just e }, e)
Just e -> return (s, e)
m (g { gitEnv = Just e })
usecachedgitenv f' m g = case gitEnv g of
Just _ -> liftIO $ m g
Nothing -> Annex.withState $ \s -> case Annex.cachedgitenv s of
Just (cachedf, cachede) | f' == cachedf ->
return (s, g { gitEnv = Just cachede })
_ -> do
g' <- m g
return (s { Annex.cachedgitenv = (,) <$> Just f' <*> gitEnv g' }, g')
{- Runs an action using a different git work tree.
-

View file

@ -264,7 +264,7 @@ buildImportTrees basetree msubdir importable = History
graftTree' importtree subdir basetree repo hdl
mktreeitem (loc, k) = do
let lf = fromImportLocation loc
let lf = fromRawFilePath (fromImportLocation loc)
let treepath = asTopFilePath lf
let topf = asTopFilePath $
maybe lf (\sd -> getTopFilePath sd </> lf) msubdir
@ -327,7 +327,7 @@ downloadImport remote importtreeconfig importablecontents = do
(k:_) -> return $ Left $ Just (loc, k)
[] -> do
job <- liftIO $ newEmptyTMVarIO
let ai = ActionItemOther (Just (fromImportLocation loc))
let ai = ActionItemOther (Just (fromRawFilePath (fromImportLocation loc)))
let downloadaction = starting ("import " ++ Remote.name remote) ai $ do
when oldversion $
showNote "old version"
@ -377,9 +377,9 @@ downloadImport remote importtreeconfig importablecontents = do
fmap fst <$> genKey ks nullMeterUpdate backend
locworktreefilename loc = asTopFilePath $ case importtreeconfig of
ImportTree -> fromImportLocation loc
ImportTree -> fromRawFilePath (fromImportLocation loc)
ImportSubTree subdir _ ->
getTopFilePath subdir </> fromImportLocation loc
getTopFilePath subdir </> fromRawFilePath (fromImportLocation loc)
getcidkey cidmap db cid = liftIO $
CIDDb.getContentIdentifierKeys db rs cid >>= \case
@ -398,7 +398,7 @@ downloadImport remote importtreeconfig importablecontents = do
{- Temporary key used for import of a ContentIdentifier while downloading
- content, before generating its real key. -}
importKey :: ContentIdentifier -> Integer -> Key
importKey (ContentIdentifier cid) size = stubKey
importKey (ContentIdentifier cid) size = mkKey $ \k -> k
{ keyName = cid
, keyVariety = OtherKey "CID"
, keySize = Just size
@ -450,7 +450,7 @@ wantImport :: FileMatcher Annex -> ImportLocation -> ByteSize -> Annex Bool
wantImport matcher loc sz = checkMatcher' matcher mi mempty
where
mi = MatchingInfo $ ProvidedInfo
{ providedFilePath = Right $ fromImportLocation loc
{ providedFilePath = Right $ fromRawFilePath $ fromImportLocation loc
, providedKey = unavail "key"
, providedFileSize = Right sz
, providedMimeType = unavail "mime"
@ -503,4 +503,4 @@ listImportableContents r = fmap removegitspecial
, importableHistory =
map removegitspecial (importableHistory ic)
}
gitspecial l = ".git" `elem` Posix.splitDirectories (fromImportLocation l)
gitspecial l = ".git" `elem` Posix.splitDirectories (fromRawFilePath (fromImportLocation l))

View file

@ -136,7 +136,7 @@ ingestAdd' meterupdate ld@(Just (LockedDown cfg source)) mk = do
then addLink f k mic
else do
mode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus (contentLocation source)
stagePointerFile f mode =<< hashPointerFile k
stagePointerFile (toRawFilePath f) mode =<< hashPointerFile k
return (Just k)
{- Ingests a locked down file into the annex. Does not update the working
@ -187,7 +187,7 @@ ingest' preferredbackend meterupdate (Just (LockedDown cfg source)) mk restage =
gounlocked _ _ _ = failure "failed statting file"
success k mcache s = do
genMetaData k (keyFilename source) s
genMetaData k (toRawFilePath (keyFilename source)) s
return (Just k, mcache)
failure msg = do
@ -208,13 +208,13 @@ finishIngestUnlocked' key source restage = do
{- Copy to any other locations using the same key. -}
populateAssociatedFiles :: Key -> KeySource -> Restage -> Annex ()
populateAssociatedFiles key source restage = do
obj <- calcRepo (gitAnnexLocation key)
obj <- toRawFilePath <$> calcRepo (gitAnnexLocation key)
g <- Annex.gitRepo
ingestedf <- flip fromTopFilePath g
<$> inRepo (toTopFilePath (keyFilename source))
afs <- map (`fromTopFilePath` g) <$> Database.Keys.getAssociatedFiles key
forM_ (filter (/= ingestedf) afs) $
populatePointerFile restage key obj
populatePointerFile restage key obj . toRawFilePath
cleanCruft :: KeySource -> Annex ()
cleanCruft source = when (contentLocation source /= keyFilename source) $
@ -264,7 +264,7 @@ restoreFile file key e = do
makeLink :: FilePath -> Key -> Maybe InodeCache -> Annex String
makeLink file key mcache = flip catchNonAsync (restoreFile file key) $ do
l <- calcRepo $ gitAnnexLink file key
replaceFile file $ makeAnnexLink l
replaceFile file $ makeAnnexLink l . toRawFilePath
-- touch symlink to have same time as the original file,
-- as provided in the InodeCache
@ -291,7 +291,7 @@ addLink file key mcache = ifM (coreSymlinks <$> Annex.getGitConfig)
Annex.Queue.addCommand "add" (ps++[Param "--"]) [file]
, do
l <- makeLink file key mcache
addAnnexLink l file
addAnnexLink l (toRawFilePath file)
)
{- Parameters to pass to git add, forcing addition of ignored files. -}
@ -329,7 +329,7 @@ addAnnexedFile file key mtmp = ifM addUnlocked
(pure Nothing)
(\tmp -> liftIO $ catchMaybeIO $ fileMode <$> getFileStatus tmp)
mtmp
stagePointerFile file mode =<< hashPointerFile key
stagePointerFile (toRawFilePath file) mode =<< hashPointerFile key
Database.Keys.addAssociatedFile key =<< inRepo (toTopFilePath file)
case mtmp of
Just tmp -> ifM (moveAnnex key tmp)
@ -349,6 +349,6 @@ addAnnexedFile file key mtmp = ifM addUnlocked
where
linkunlocked mode = linkFromAnnex key file mode >>= \case
LinkAnnexFailed -> liftIO $
writePointerFile file key mode
writePointerFile (toRawFilePath file) key mode
_ -> return ()
writepointer mode = liftIO $ writePointerFile file key mode
writepointer mode = liftIO $ writePointerFile (toRawFilePath file) key mode

View file

@ -6,6 +6,7 @@
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
module Annex.Init (
ensureInitialized,
@ -22,6 +23,7 @@ import qualified Annex
import qualified Git
import qualified Git.Config
import qualified Git.Objects
import Git.Types (fromConfigValue)
import qualified Annex.Branch
import Logs.UUID
import Logs.Trust.Basic
@ -204,7 +206,7 @@ checkCrippledFileSystem = whenM probeCrippledFileSystem $ do
- filesystem. -}
whenM (coreSymlinks <$> Annex.getGitConfig) $ do
warning "Disabling core.symlinks."
setConfig (ConfigKey "core.symlinks")
setConfig "core.symlinks"
(Git.Config.boolConfig False)
probeLockSupport :: Annex Bool
@ -274,5 +276,5 @@ initSharedClone True = do
- affect it. -}
propigateSecureHashesOnly :: Annex ()
propigateSecureHashesOnly =
maybe noop (setConfig (ConfigKey "annex.securehashesonly"))
maybe noop (setConfig "annex.securehashesonly" . fromConfigValue)
=<< getGlobalConfig "annex.securehashesonly"

View file

@ -44,18 +44,18 @@ instance Journalable Builder where
- getJournalFileStale to always return a consistent journal file
- content, although possibly not the most current one.
-}
setJournalFile :: Journalable content => JournalLocked -> FilePath -> content -> Annex ()
setJournalFile :: Journalable content => JournalLocked -> RawFilePath -> content -> Annex ()
setJournalFile _jl file content = withOtherTmp $ \tmp -> do
createAnnexDirectory =<< fromRepo gitAnnexJournalDir
-- journal file is written atomically
jfile <- fromRepo $ journalFile file
jfile <- fromRepo $ journalFile $ fromRawFilePath file
let tmpfile = tmp </> takeFileName jfile
liftIO $ do
withFile tmpfile WriteMode $ \h -> writeJournalHandle h content
moveFile tmpfile jfile
{- Gets any journalled content for a file in the branch. -}
getJournalFile :: JournalLocked -> FilePath -> Annex (Maybe L.ByteString)
getJournalFile :: JournalLocked -> RawFilePath -> Annex (Maybe L.ByteString)
getJournalFile _jl = getJournalFileStale
{- Without locking, this is not guaranteed to be the most recent
@ -69,9 +69,9 @@ getJournalFile _jl = getJournalFileStale
- concurrency or other issues with a lazy read, and the minor loss of
- laziness doesn't matter much, as the files are not very large.
-}
getJournalFileStale :: FilePath -> Annex (Maybe L.ByteString)
getJournalFileStale :: RawFilePath -> Annex (Maybe L.ByteString)
getJournalFileStale file = inRepo $ \g -> catchMaybeIO $
L.fromStrict <$> S.readFile (journalFile file g)
L.fromStrict <$> S.readFile (journalFile (fromRawFilePath file) g)
{- List of existing journal files, but without locking, may miss new ones
- just being added, or may have false positives if the journal is staged

View file

@ -43,7 +43,7 @@ import qualified Data.ByteString.Lazy as L
type LinkTarget = String
{- Checks if a file is a link to a key. -}
isAnnexLink :: FilePath -> Annex (Maybe Key)
isAnnexLink :: RawFilePath -> Annex (Maybe Key)
isAnnexLink file = maybe Nothing parseLinkTargetOrPointer <$> getAnnexLinkTarget file
{- Gets the link target of a symlink.
@ -54,13 +54,13 @@ isAnnexLink file = maybe Nothing parseLinkTargetOrPointer <$> getAnnexLinkTarget
- Returns Nothing if the file is not a symlink, or not a link to annex
- content.
-}
getAnnexLinkTarget :: FilePath -> Annex (Maybe S.ByteString)
getAnnexLinkTarget :: RawFilePath -> Annex (Maybe S.ByteString)
getAnnexLinkTarget f = getAnnexLinkTarget' f
=<< (coreSymlinks <$> Annex.getGitConfig)
{- Pass False to force looking inside file, for when git checks out
- symlinks as plain files. -}
getAnnexLinkTarget' :: FilePath -> Bool -> Annex (Maybe S.ByteString)
getAnnexLinkTarget' :: RawFilePath -> Bool -> Annex (Maybe S.ByteString)
getAnnexLinkTarget' file coresymlinks = if coresymlinks
then check probesymlink $
return Nothing
@ -75,9 +75,9 @@ getAnnexLinkTarget' file coresymlinks = if coresymlinks
| otherwise -> return Nothing
Nothing -> fallback
probesymlink = R.readSymbolicLink $ toRawFilePath file
probesymlink = R.readSymbolicLink file
probefilecontent = withFile file ReadMode $ \h -> do
probefilecontent = withFile (fromRawFilePath file) ReadMode $ \h -> do
s <- S.hGet h unpaddedMaxPointerSz
-- If we got the full amount, the file is too large
-- to be a symlink target.
@ -92,7 +92,7 @@ getAnnexLinkTarget' file coresymlinks = if coresymlinks
then mempty
else s
makeAnnexLink :: LinkTarget -> FilePath -> Annex ()
makeAnnexLink :: LinkTarget -> RawFilePath -> Annex ()
makeAnnexLink = makeGitLink
{- Creates a link on disk.
@ -102,48 +102,48 @@ makeAnnexLink = makeGitLink
- it's staged as such, so use addAnnexLink when adding a new file or
- modified link to git.
-}
makeGitLink :: LinkTarget -> FilePath -> Annex ()
makeGitLink :: LinkTarget -> RawFilePath -> Annex ()
makeGitLink linktarget file = ifM (coreSymlinks <$> Annex.getGitConfig)
( liftIO $ do
void $ tryIO $ removeFile file
createSymbolicLink linktarget file
, liftIO $ writeFile file linktarget
void $ tryIO $ removeFile (fromRawFilePath file)
createSymbolicLink linktarget (fromRawFilePath file)
, liftIO $ writeFile (fromRawFilePath file) linktarget
)
{- Creates a link on disk, and additionally stages it in git. -}
addAnnexLink :: LinkTarget -> FilePath -> Annex ()
addAnnexLink :: LinkTarget -> RawFilePath -> Annex ()
addAnnexLink linktarget file = do
makeAnnexLink linktarget file
stageSymlink file =<< hashSymlink linktarget
{- Injects a symlink target into git, returning its Sha. -}
hashSymlink :: LinkTarget -> Annex Sha
hashSymlink linktarget = hashBlob $ toRawFilePath $ toInternalGitPath linktarget
hashSymlink = hashBlob . toInternalGitPath . toRawFilePath
{- Stages a symlink to an annexed object, using a Sha of its target. -}
stageSymlink :: FilePath -> Sha -> Annex ()
stageSymlink :: RawFilePath -> Sha -> Annex ()
stageSymlink file sha =
Annex.Queue.addUpdateIndex =<<
inRepo (Git.UpdateIndex.stageSymlink file sha)
inRepo (Git.UpdateIndex.stageSymlink (fromRawFilePath file) sha)
{- Injects a pointer file content into git, returning its Sha. -}
hashPointerFile :: Key -> Annex Sha
hashPointerFile key = hashBlob $ formatPointer key
{- Stages a pointer file, using a Sha of its content -}
stagePointerFile :: FilePath -> Maybe FileMode -> Sha -> Annex ()
stagePointerFile :: RawFilePath -> Maybe FileMode -> Sha -> Annex ()
stagePointerFile file mode sha =
Annex.Queue.addUpdateIndex =<<
inRepo (Git.UpdateIndex.stageFile sha treeitemtype file)
inRepo (Git.UpdateIndex.stageFile sha treeitemtype $ fromRawFilePath file)
where
treeitemtype
| maybe False isExecutable mode = TreeExecutable
| otherwise = TreeFile
writePointerFile :: FilePath -> Key -> Maybe FileMode -> IO ()
writePointerFile :: RawFilePath -> Key -> Maybe FileMode -> IO ()
writePointerFile file k mode = do
S.writeFile file (formatPointer k)
maybe noop (setFileMode file) mode
S.writeFile (fromRawFilePath file) (formatPointer k)
maybe noop (setFileMode $ fromRawFilePath file) mode
newtype Restage = Restage Bool
@ -172,17 +172,17 @@ newtype Restage = Restage Bool
- the worktree file is changed by something else before git update-index
- gets to look at it.
-}
restagePointerFile :: Restage -> FilePath -> InodeCache -> Annex ()
restagePointerFile :: Restage -> RawFilePath -> InodeCache -> Annex ()
restagePointerFile (Restage False) f _ =
toplevelWarning True $ unableToRestage (Just f)
toplevelWarning True $ unableToRestage $ Just $ fromRawFilePath f
restagePointerFile (Restage True) f orig = withTSDelta $ \tsd -> do
-- update-index is documented as picky about "./file" and it
-- fails on "../../repo/path/file" when cwd is not in the repo
-- being acted on. Avoid these problems with an absolute path.
absf <- liftIO $ absPath f
absf <- liftIO $ absPath $ fromRawFilePath f
Annex.Queue.addInternalAction runner [(absf, isunmodified tsd)]
where
isunmodified tsd = genInodeCache f tsd >>= return . \case
isunmodified tsd = genInodeCache' f tsd >>= return . \case
Nothing -> False
Just new -> compareStrong orig new
@ -264,7 +264,7 @@ parseLinkTarget l
formatPointer :: Key -> S.ByteString
formatPointer k = prefix <> keyFile' k <> nl
where
prefix = toRawFilePath $ toInternalGitPath (pathSeparator:objectDir)
prefix = toInternalGitPath $ toRawFilePath (pathSeparator:objectDir)
nl = S8.singleton '\n'
{- Maximum size of a file that could be a pointer to a key.
@ -283,8 +283,8 @@ unpaddedMaxPointerSz = 8192
{- Checks if a worktree file is a pointer to a key.
-
- Unlocked files whose content is present are not detected by this. -}
isPointerFile :: FilePath -> IO (Maybe Key)
isPointerFile f = catchDefaultIO Nothing $ withFile f ReadMode $ \h ->
isPointerFile :: RawFilePath -> IO (Maybe Key)
isPointerFile f = catchDefaultIO Nothing $ withFile (fromRawFilePath f) ReadMode $ \h ->
parseLinkTargetOrPointer <$> S.hGet h unpaddedMaxPointerSz
{- Checks a symlink target or pointer file first line to see if it

View file

@ -95,7 +95,6 @@ module Annex.Locations (
import Data.Char
import Data.Default
import qualified Data.ByteString.Char8 as S8
import qualified Data.ByteString.Lazy as L
import Common
import Key
@ -195,7 +194,8 @@ gitAnnexLink file key r config = do
let absfile = absNormPathUnix currdir file
let gitdir = getgitdir currdir
loc <- gitAnnexLocation' key r config False False (\_ -> return True) gitdir
toInternalGitPath <$> relPathDirToFile (parentDir absfile) loc
fromRawFilePath . toInternalGitPath . toRawFilePath
<$> relPathDirToFile (parentDir absfile) loc
where
getgitdir currdir
{- This special case is for git submodules on filesystems not
@ -204,8 +204,10 @@ gitAnnexLink file key r config = do
| not (coreSymlinks config) && needsSubmoduleFixup r =
absNormPathUnix currdir $ Git.repoPath r </> ".git"
| otherwise = Git.localGitDir r
absNormPathUnix d p = toInternalGitPath $
absPathFrom (toInternalGitPath d) (toInternalGitPath p)
absNormPathUnix d p = fromRawFilePath $ toInternalGitPath $ toRawFilePath $
absPathFrom
(fromRawFilePath $ toInternalGitPath $ toRawFilePath d)
(fromRawFilePath $ toInternalGitPath $ toRawFilePath p)
{- Calculates a symlink target as would be used in a typical git
- repository, with .git in the top of the work tree. -}
@ -569,8 +571,8 @@ keyFile = fromRawFilePath . keyFile'
keyFile' :: Key -> RawFilePath
keyFile' k =
let b = L.toStrict (serializeKey' k)
in if any (`S8.elem` b) ['&', '%', ':', '/']
let b = serializeKey' k
in if S8.any (`elem` ['&', '%', ':', '/']) b
then S8.concatMap esc b
else b
where
@ -580,6 +582,7 @@ keyFile' k =
esc '/' = "%"
esc c = S8.singleton c
{- Reverses keyFile, converting a filename fragment (ie, the basename of
- the symlink target) into a key. -}
fileKey :: FilePath -> Maybe Key

View file

@ -37,7 +37,7 @@ import Data.Time.Clock.POSIX
-
- Also, can generate new metadata, if configured to do so.
-}
genMetaData :: Key -> FilePath -> FileStatus -> Annex ()
genMetaData :: Key -> RawFilePath -> FileStatus -> Annex ()
genMetaData key file status = do
catKeyFileHEAD file >>= \case
Nothing -> noop
@ -53,8 +53,8 @@ genMetaData key file status = do
where
mtime = posixSecondsToUTCTime $ realToFrac $ modificationTime status
warncopied = warning $
"Copied metadata from old version of " ++ file ++ " to new version. " ++
"If you don't want this copied metadata, run: git annex metadata --remove-all " ++ file
"Copied metadata from old version of " ++ fromRawFilePath file ++ " to new version. " ++
"If you don't want this copied metadata, run: git annex metadata --remove-all " ++ fromRawFilePath file
-- If the only fields copied were date metadata, and they'll
-- be overwritten with the current mtime, no need to warn about
-- copying.

View file

@ -60,7 +60,7 @@ notifyDrop (AssociatedFile (Just f)) ok = do
wanted <- Annex.getState Annex.desktopnotify
when (notifyFinish wanted) $ liftIO $ do
client <- DBus.Client.connectSession
void $ Notify.notify client (droppedNote ok f)
void $ Notify.notify client (droppedNote ok (fromRawFilePath f))
#else
notifyDrop (AssociatedFile (Just _)) _ = noop
#endif

View file

@ -72,7 +72,7 @@ getFileNumCopies f = fromSources
getAssociatedFileNumCopies :: AssociatedFile -> Annex NumCopies
getAssociatedFileNumCopies (AssociatedFile afile) =
maybe getNumCopies getFileNumCopies afile
maybe getNumCopies getFileNumCopies (fromRawFilePath <$> afile)
{- This is the globally visible numcopies value for a file. So it does
- not include local configuration in the git config or command line

View file

@ -5,6 +5,8 @@
- Licensed under the GNU AGPL version 3 or higher.
-}
{-# LANGUAGE OverloadedStrings #-}
module Annex.SpecialRemote (
module Annex.SpecialRemote,
module Annex.SpecialRemote.Config

View file

@ -40,15 +40,15 @@ import Data.Ord
upload :: Observable v => UUID -> Key -> AssociatedFile -> RetryDecider -> (MeterUpdate -> Annex v) -> NotifyWitness -> Annex v
upload u key f d a _witness = guardHaveUUID u $
runTransfer (Transfer Upload u key) f d a
runTransfer (Transfer Upload u (fromKey id key)) f d a
alwaysUpload :: Observable v => UUID -> Key -> AssociatedFile -> RetryDecider -> (MeterUpdate -> Annex v) -> NotifyWitness -> Annex v
alwaysUpload u key f d a _witness = guardHaveUUID u $
alwaysRunTransfer (Transfer Upload u key) f d a
alwaysRunTransfer (Transfer Upload u (fromKey id key)) f d a
download :: Observable v => UUID -> Key -> AssociatedFile -> RetryDecider -> (MeterUpdate -> Annex v) -> NotifyWitness -> Annex v
download u key f d a _witness = guardHaveUUID u $
runTransfer (Transfer Download u key) f d a
runTransfer (Transfer Download u (fromKey id key)) f d a
guardHaveUUID :: Observable v => UUID -> Annex v -> Annex v
guardHaveUUID u a
@ -185,7 +185,7 @@ checkSecureHashes t a
, a
)
where
variety = keyVariety (transferKey t)
variety = fromKey keyVariety (transferKey t)
type RetryDecider = Annex (TransferInfo -> TransferInfo -> Annex Bool)

View file

@ -11,7 +11,10 @@
- Licensed under the GNU AGPL version 3 or higher.
-}
{-# LANGUAGE OverloadedStrings #-}
module Annex.UUID (
configkeyUUID,
getUUID,
getRepoUUID,
getUncachedUUID,
@ -32,6 +35,7 @@ import Annex.Common
import qualified Annex
import qualified Git
import qualified Git.Config
import Git.Types
import Config
import qualified Data.UUID as U
@ -39,8 +43,8 @@ import qualified Data.UUID.V4 as U4
import qualified Data.UUID.V5 as U5
import Data.String
configkey :: ConfigKey
configkey = annexConfig "uuid"
configkeyUUID :: ConfigKey
configkeyUUID = annexConfig "uuid"
{- Generates a random UUID, that does not include the MAC address. -}
genUUID :: IO UUID
@ -81,20 +85,16 @@ getRepoUUID r = do
removeRepoUUID :: Annex ()
removeRepoUUID = do
unsetConfig configkey
unsetConfig configkeyUUID
storeUUID NoUUID
getUncachedUUID :: Git.Repo -> UUID
getUncachedUUID = toUUID . Git.Config.get key ""
where
(ConfigKey key) = configkey
getUncachedUUID = toUUID . Git.Config.get configkeyUUID ""
-- Does the repo's config have a key for the UUID?
-- True even when the key has no value.
isUUIDConfigured :: Git.Repo -> Bool
isUUIDConfigured = isJust . Git.Config.getMaybe key
where
(ConfigKey key) = configkey
isUUIDConfigured = isJust . Git.Config.getMaybe configkeyUUID
{- Make sure that the repo has an annex.uuid setting. -}
prepUUID :: Annex ()
@ -104,7 +104,7 @@ prepUUID = whenM ((==) NoUUID <$> getUUID) $
storeUUID :: UUID -> Annex ()
storeUUID u = do
Annex.changeGitConfig $ \c -> c { annexUUID = u }
storeUUIDIn configkey u
storeUUIDIn configkeyUUID u
storeUUIDIn :: ConfigKey -> UUID -> Annex ()
storeUUIDIn configfield = setConfig configfield . fromUUID
@ -112,7 +112,7 @@ storeUUIDIn configfield = setConfig configfield . fromUUID
{- Only sets the configkey in the Repo; does not change .git/config -}
setUUID :: Git.Repo -> UUID -> IO Git.Repo
setUUID r u = do
let s = show configkey ++ "=" ++ fromUUID u
let s = encodeBS' $ show configkeyUUID ++ "=" ++ fromUUID u
Git.Config.store s r
-- Dummy uuid for the whole web. Do not alter.

View file

@ -10,7 +10,7 @@ module Annex.VariantFile where
import Annex.Common
import Utility.Hash
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString as S
variantMarker :: String
variantMarker = ".variant-"
@ -41,5 +41,5 @@ variantFile file key
where
doubleconflict = variantMarker `isInfixOf` file
shortHash :: L.ByteString -> String
shortHash = take 4 . show . md5
shortHash :: S.ByteString -> String
shortHash = take 4 . show . md5s

View file

@ -6,11 +6,13 @@
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
module Annex.Version where
import Annex.Common
import Config
import Git.Types
import Types.RepoVersion
import qualified Annex

View file

@ -343,11 +343,11 @@ narrowView = applyView' viewedFileReuse getViewedFileMetaData
applyView' :: MkViewedFile -> (FilePath -> MetaData) -> View -> Annex Git.Branch
applyView' mkviewedfile getfilemetadata view = do
top <- fromRepo Git.repoPath
(l, clean) <- inRepo $ Git.LsFiles.stagedDetails [top]
(l, clean) <- inRepo $ Git.LsFiles.stagedDetails [toRawFilePath top]
liftIO . nukeFile =<< fromRepo gitAnnexViewIndex
uh <- withViewIndex $ inRepo Git.UpdateIndex.startUpdateIndex
forM_ l $ \(f, sha, mode) -> do
topf <- inRepo (toTopFilePath f)
topf <- inRepo (toTopFilePath $ fromRawFilePath f)
go uh topf sha (toTreeItemType =<< mode) =<< lookupFile f
liftIO $ do
void $ stopUpdateIndex uh

View file

@ -32,35 +32,35 @@ import Config
- When in an adjusted branch that may have hidden the file, looks for a
- pointer to a key in the original branch.
-}
lookupFile :: FilePath -> Annex (Maybe Key)
lookupFile :: RawFilePath -> Annex (Maybe Key)
lookupFile = lookupFile' catkeyfile
where
catkeyfile file =
ifM (liftIO $ doesFileExist file)
ifM (liftIO $ doesFileExist $ fromRawFilePath file)
( catKeyFile file
, catKeyFileHidden file =<< getCurrentBranch
)
lookupFileNotHidden :: FilePath -> Annex (Maybe Key)
lookupFileNotHidden :: RawFilePath -> Annex (Maybe Key)
lookupFileNotHidden = lookupFile' catkeyfile
where
catkeyfile file =
ifM (liftIO $ doesFileExist file)
ifM (liftIO $ doesFileExist $ fromRawFilePath file)
( catKeyFile file
, return Nothing
)
lookupFile' :: (FilePath -> Annex (Maybe Key)) -> FilePath -> Annex (Maybe Key)
lookupFile' :: (RawFilePath -> Annex (Maybe Key)) -> RawFilePath -> Annex (Maybe Key)
lookupFile' catkeyfile file = isAnnexLink file >>= \case
Just key -> return (Just key)
Nothing -> catkeyfile file
{- Modifies an action to only act on files that are already annexed,
- and passes the key on to it. -}
whenAnnexed :: (FilePath -> Key -> Annex (Maybe a)) -> FilePath -> Annex (Maybe a)
whenAnnexed :: (RawFilePath -> Key -> Annex (Maybe a)) -> RawFilePath -> Annex (Maybe a)
whenAnnexed a file = ifAnnexed file (a file) (return Nothing)
ifAnnexed :: FilePath -> (Key -> Annex a) -> Annex a -> Annex a
ifAnnexed :: RawFilePath -> (Key -> Annex a) -> Annex a -> Annex a
ifAnnexed file yes no = maybe no yes =<< lookupFile file
{- Find all unlocked files and update the keys database for them.
@ -95,7 +95,7 @@ scanUnlockedFiles = whenM (inRepo Git.Ref.headExists <&&> not <$> isBareRepo) $
liftIO . Database.Keys.SQL.addAssociatedFileFast k tf
whenM (inAnnex k) $ do
f <- fromRepo $ fromTopFilePath tf
liftIO (isPointerFile f) >>= \case
liftIO (isPointerFile (toRawFilePath f)) >>= \case
Just k' | k' == k -> do
destmode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus f
ic <- replaceFile f $ \tmp ->
@ -104,7 +104,7 @@ scanUnlockedFiles = whenM (inRepo Git.Ref.headExists <&&> not <$> isBareRepo) $
withTSDelta (liftIO . genInodeCache tmp)
LinkAnnexNoop -> return Nothing
LinkAnnexFailed -> liftIO $ do
writePointerFile tmp k destmode
writePointerFile (toRawFilePath tmp) k destmode
return Nothing
maybe noop (restagePointerFile (Restage True) f) ic
maybe noop (restagePointerFile (Restage True) (toRawFilePath f)) ic
_ -> noop

View file

@ -64,7 +64,7 @@ removableRemote urlrenderer uuid = do
where
queueremaining r k =
queueTransferWhenSmall "remaining object in unwanted remote"
(AssociatedFile Nothing) (Transfer Download uuid k) r
(AssociatedFile Nothing) (Transfer Download uuid (fromKey id k)) r
{- Scanning for keys can take a long time; do not tie up
- the Annex monad while doing it, so other threads continue to
- run. -}

View file

@ -5,6 +5,8 @@
- Licensed under the GNU AGPL version 3 or higher.
-}
{-# LANGUAGE OverloadedStrings #-}
module Assistant.MakeRemote where
import Assistant.Common

View file

@ -5,6 +5,8 @@
- Licensed under the GNU AGPL version 3 or higher.
-}
{-# LANGUAGE OverloadedStrings #-}
module Assistant.MakeRepo where
import Assistant.WebApp.Common

View file

@ -5,6 +5,8 @@
- Licensed under the GNU AGPL version 3 or higher.
-}
{-# LANGUAGE OverloadedStrings #-}
module Assistant.Sync where
import Assistant.Common

View file

@ -286,7 +286,7 @@ handleAdds lockdowndir havelsof delayadd cs = returnWhen (null incomplete) $ do
ks = keySource ld
doadd = sanitycheck ks $ do
(mkey, _mcache) <- liftAnnex $ do
showStart "add" $ keyFilename ks
showStart "add" $ toRawFilePath $ keyFilename ks
ingest nullMeterUpdate (Just $ LockedDown lockdownconfig ks) Nothing
maybe (failedingest change) (done change $ keyFilename ks) mkey
add _ _ = return Nothing
@ -325,7 +325,7 @@ handleAdds lockdowndir havelsof delayadd cs = returnWhen (null incomplete) $ do
removedKeysMap :: InodeComparisonType -> [Change] -> Annex (M.Map InodeCacheKey Key)
removedKeysMap ct l = do
mks <- forM (filter isRmChange l) $ \c ->
catKeyFile $ changeFile c
catKeyFile $ toRawFilePath $ changeFile c
M.fromList . concat <$> mapM mkpairs (catMaybes mks)
where
mkpairs k = map (\c -> (inodeCacheToKey ct c, k)) <$>
@ -339,7 +339,7 @@ handleAdds lockdowndir havelsof delayadd cs = returnWhen (null incomplete) $ do
done change file key = liftAnnex $ do
logStatus key InfoPresent
mode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus file
stagePointerFile file mode =<< hashPointerFile key
stagePointerFile (toRawFilePath file) mode =<< hashPointerFile key
showEndOk
return $ Just $ finishedChange change key
@ -457,5 +457,5 @@ checkChangeContent change@(Change { changeInfo = i }) =
handleDrops "file renamed" present k af []
where
f = changeFile change
af = AssociatedFile (Just f)
af = AssociatedFile (Just (toRawFilePath f))
checkChangeContent _ = noop

View file

@ -44,7 +44,8 @@ configMonitorThread = namedThread "ConfigMonitor" $ loop =<< getConfigs
when (old /= new) $ do
let changedconfigs = new `S.difference` old
debug $ "reloading config" :
map fst (S.toList changedconfigs)
map (fromRawFilePath . fst)
(S.toList changedconfigs)
reloadConfigs new
{- Record a commit to get this config
- change pushed out to remotes. -}
@ -53,10 +54,10 @@ configMonitorThread = namedThread "ConfigMonitor" $ loop =<< getConfigs
loop new
{- Config files, and their checksums. -}
type Configs = S.Set (FilePath, Sha)
type Configs = S.Set (RawFilePath, Sha)
{- All git-annex's config files, and actions to run when they change. -}
configFilesActions :: [(FilePath, Assistant ())]
configFilesActions :: [(RawFilePath, Assistant ())]
configFilesActions =
[ (uuidLog, void $ liftAnnex uuidDescMapLoad)
, (remoteLog, void $ liftAnnex remoteListRefresh)
@ -89,5 +90,5 @@ getConfigs :: Assistant Configs
getConfigs = S.fromList . map extract
<$> liftAnnex (inRepo $ LsTree.lsTreeFiles Annex.Branch.fullname files)
where
files = map fst configFilesActions
extract treeitem = (getTopFilePath $ LsTree.file treeitem, LsTree.sha treeitem)
files = map (fromRawFilePath . fst) configFilesActions
extract treeitem = (toRawFilePath $ getTopFilePath $ LsTree.file treeitem, LsTree.sha treeitem)

View file

@ -155,10 +155,11 @@ dailyCheck urlrenderer = do
(unstaged, cleanup) <- liftIO $ Git.LsFiles.notInRepo False ["."] g
now <- liftIO getPOSIXTime
forM_ unstaged $ \file -> do
ms <- liftIO $ catchMaybeIO $ getSymbolicLinkStatus file
let file' = fromRawFilePath file
ms <- liftIO $ catchMaybeIO $ getSymbolicLinkStatus file'
case ms of
Just s | toonew (statusChangeTime s) now -> noop
| isSymbolicLink s -> addsymlink file ms
| isSymbolicLink s -> addsymlink file' ms
_ -> noop
liftIO $ void cleanup

View file

@ -186,7 +186,7 @@ genTransfer :: Direction -> Bool -> Key -> S.Set UUID -> Remote -> Maybe (Remote
genTransfer direction want key slocs r
| direction == Upload && Remote.readonly r = Nothing
| S.member (Remote.uuid r) slocs == want = Just
(r, Transfer direction (Remote.uuid r) key)
(r, Transfer direction (Remote.uuid r) (fromKey id key))
| otherwise = Nothing
remoteHas :: Remote -> Key -> Annex Bool

View file

@ -136,10 +136,12 @@ startupScan scanner = do
-- Notice any files that were deleted before
-- watching was started.
top <- liftAnnex $ fromRepo Git.repoPath
(fs, cleanup) <- liftAnnex $ inRepo $ LsFiles.deleted [top]
(fs, cleanup) <- liftAnnex $ inRepo $ LsFiles.deleted
[toRawFilePath top]
forM_ fs $ \f -> do
liftAnnex $ onDel' f
maybe noop recordChange =<< madeChange f RmChange
let f' = fromRawFilePath f
liftAnnex $ onDel' f'
maybe noop recordChange =<< madeChange f' RmChange
void $ liftIO cleanup
liftAnnex $ showAction "started"
@ -206,7 +208,7 @@ shouldRestage ds = scanComplete ds || forceRestage ds
onAddUnlocked :: Bool -> GetFileMatcher -> Handler
onAddUnlocked symlinkssupported matcher f fs = do
mk <- liftIO $ isPointerFile f
mk <- liftIO $ isPointerFile $ toRawFilePath f
case mk of
Nothing -> onAddUnlocked' contentchanged addassociatedfile addlink samefilestatus symlinkssupported matcher f fs
Just k -> addlink f k
@ -228,7 +230,7 @@ onAddUnlocked symlinkssupported matcher f fs = do
logStatus oldkey InfoMissing
addlink file key = do
mode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus file
liftAnnex $ stagePointerFile file mode =<< hashPointerFile key
liftAnnex $ stagePointerFile (toRawFilePath file) mode =<< hashPointerFile key
madeChange file $ LinkChange (Just key)
onAddUnlocked'
@ -240,7 +242,7 @@ onAddUnlocked'
-> GetFileMatcher
-> Handler
onAddUnlocked' contentchanged addassociatedfile addlink samefilestatus symlinkssupported matcher file fs = do
v <- liftAnnex $ catKeyFile file
v <- liftAnnex $ catKeyFile (toRawFilePath file)
case (v, fs) of
(Just key, Just filestatus) ->
ifM (liftAnnex $ samefilestatus key file filestatus)
@ -270,7 +272,8 @@ onAddUnlocked' contentchanged addassociatedfile addlink samefilestatus symlinkss
guardSymlinkStandin mk a
| symlinkssupported = a
| otherwise = do
linktarget <- liftAnnex $ getAnnexLinkTarget file
linktarget <- liftAnnex $ getAnnexLinkTarget $
toRawFilePath file
case linktarget of
Nothing -> a
Just lt -> do
@ -287,7 +290,7 @@ onAddUnlocked' contentchanged addassociatedfile addlink samefilestatus symlinkss
onAddSymlink :: Handler
onAddSymlink file filestatus = unlessIgnored file $ do
linktarget <- liftIO (catchMaybeIO $ readSymbolicLink file)
kv <- liftAnnex (lookupFile file)
kv <- liftAnnex (lookupFile (toRawFilePath file))
onAddSymlink' linktarget kv file filestatus
onAddSymlink' :: Maybe String -> Maybe Key -> Handler
@ -299,7 +302,7 @@ onAddSymlink' linktarget mk file filestatus = go mk
then ensurestaged (Just link) =<< getDaemonStatus
else do
liftAnnex $ replaceFile file $
makeAnnexLink link
makeAnnexLink link . toRawFilePath
addLink file link (Just key)
-- other symlink, not git-annex
go Nothing = ensurestaged linktarget =<< getDaemonStatus
@ -332,8 +335,8 @@ addLink file link mk = do
case v of
Just (currlink, sha, _type)
| s2w8 link == L.unpack currlink ->
stageSymlink file sha
_ -> stageSymlink file =<< hashSymlink link
stageSymlink (toRawFilePath file) sha
_ -> stageSymlink (toRawFilePath file) =<< hashSymlink link
madeChange file $ LinkChange mk
onDel :: Handler
@ -349,7 +352,7 @@ onDel' file = do
Annex.Queue.addUpdateIndex =<<
inRepo (Git.UpdateIndex.unstageFile file)
where
withkey a = maybe noop a =<< catKeyFile file
withkey a = maybe noop a =<< catKeyFile (toRawFilePath file)
{- A directory has been deleted, or moved, so tell git to remove anything
- that was inside it from its cache. Since it could reappear at any time,
@ -360,14 +363,15 @@ onDel' file = do
onDelDir :: Handler
onDelDir dir _ = do
debug ["directory deleted", dir]
(fs, clean) <- liftAnnex $ inRepo $ LsFiles.deleted [dir]
(fs, clean) <- liftAnnex $ inRepo $ LsFiles.deleted [toRawFilePath dir]
let fs' = map fromRawFilePath fs
liftAnnex $ mapM_ onDel' fs
liftAnnex $ mapM_ onDel' fs'
-- Get the events queued up as fast as possible, so the
-- committer sees them all in one block.
now <- liftIO getCurrentTime
recordChanges $ map (\f -> Change now f RmChange) fs
recordChanges $ map (\f -> Change now f RmChange) fs'
void $ liftIO clean
noChange

View file

@ -96,7 +96,7 @@ queueTransfersMatching matching reason schedule k f direction
inset s r = S.member (Remote.uuid r) s
gentransfer r = Transfer
{ transferDirection = direction
, transferKey = k
, transferKeyData = fromKey id k
, transferUUID = Remote.uuid r
}
defer
@ -129,7 +129,7 @@ queueDeferredDownloads reason schedule = do
where
gentransfer r = Transfer
{ transferDirection = Download
, transferKey = k
, transferKeyData = fromKey id k
, transferUUID = Remote.uuid r
}

View file

@ -161,7 +161,7 @@ genTransfer t info = case transferRemote info of
AssociatedFile Nothing -> noop
AssociatedFile (Just af) -> void $
addAlert $ makeAlertFiller True $
transferFileAlert direction True af
transferFileAlert direction True (fromRawFilePath af)
unless isdownload $
handleDrops
("object uploaded to " ++ show remote)

View file

@ -62,7 +62,7 @@ describeUnused' whenbig = liftAnnex $ go =<< readUnusedLog ""
tenthused Nothing _ = False
tenthused (Just disksize) used = used >= disksize `div` 10
sumkeysize s k = s + fromMaybe 0 (keySize k)
sumkeysize s k = s + fromMaybe 0 (fromKey keySize k)
forpath a = inRepo $ liftIO . a . Git.repoPath

View file

@ -25,7 +25,6 @@ import Annex.Content
import Annex.UUID
import qualified Backend
import qualified Types.Backend
import qualified Types.Key
import Assistant.TransferQueue
import Assistant.TransferSlots
import Remote (remoteFromUUID)
@ -88,16 +87,16 @@ startDistributionDownload d = go =<< liftIO . newVersionLocation d =<< liftIO ol
hook <- asIO1 $ distributionDownloadComplete d dest cleanup
modifyDaemonStatus_ $ \s -> s
{ transferHook = M.insert k hook (transferHook s) }
maybe noop (queueTransfer "upgrade" Next (AssociatedFile (Just f)) t)
maybe noop (queueTransfer "upgrade" Next (AssociatedFile (Just (toRawFilePath f))) t)
=<< liftAnnex (remoteFromUUID webUUID)
startTransfer t
k = distributionKey d
k = mkKey $ const $ distributionKey d
u = distributionUrl d
f = takeFileName u ++ " (for upgrade)"
t = Transfer
{ transferDirection = Download
, transferUUID = webUUID
, transferKey = k
, transferKeyData = fromKey id k
}
cleanup = liftAnnex $ do
lockContentForRemoval k removeAnnex
@ -117,8 +116,8 @@ distributionDownloadComplete d dest cleanup t
=<< liftAnnex (withObjectLoc k fsckit)
| otherwise = cleanup
where
k = distributionKey d
fsckit f = case Backend.maybeLookupBackendVariety (Types.Key.keyVariety k) of
k = mkKey $ const $ distributionKey d
fsckit f = case Backend.maybeLookupBackendVariety (fromKey keyVariety k) of
Nothing -> return $ Just f
Just b -> case Types.Backend.verifyKeyContent b of
Nothing -> return $ Just f

View file

@ -101,11 +101,12 @@ setRepoConfig uuid mremote oldc newc = do
- there's not. Special remotes don't normally
- have that, and don't use it. Temporarily add
- it if it's missing. -}
let remotefetch = "remote." ++ T.unpack (repoName oldc) ++ ".fetch"
let remotefetch = Git.ConfigKey $ encodeBS' $
"remote." ++ T.unpack (repoName oldc) ++ ".fetch"
needfetch <- isNothing <$> fromRepo (Git.Config.getMaybe remotefetch)
when needfetch $
inRepo $ Git.Command.run
[Param "config", Param remotefetch, Param ""]
[Param "config", Param (Git.fromConfigKey remotefetch), Param ""]
inRepo $ Git.Command.run
[ Param "remote"
, Param "rename"

View file

@ -336,7 +336,7 @@ getFinishAddDriveR drive = go
isnew <- liftIO $ makeRepo dir True
{- Removable drives are not reliable media, so enable fsync. -}
liftIO $ inDir dir $
setConfig (ConfigKey "core.fsyncobjectfiles")
setConfig "core.fsyncobjectfiles"
(Git.Config.boolConfig True)
(u, r) <- a isnew
when isnew $

View file

@ -20,7 +20,7 @@ import Types.StandardGroups
import Utility.UserInfo
import Utility.Gpg
import Types.Remote (RemoteConfig)
import Git.Types (RemoteName, fromRef)
import Git.Types (RemoteName, fromRef, fromConfigKey)
import qualified Remote.GCrypt as GCrypt
import qualified Annex
import qualified Git.Command
@ -317,7 +317,8 @@ testServer sshinput@(SshInput { inputHostname = Just hn }) = do
else T.pack $ "Failed to ssh to the server. Transcript: " ++ s
finduuid (k, v)
| k == "annex.uuid" = Just $ toUUID v
| k == GCrypt.coreGCryptId = Just $ genUUIDInNameSpace gCryptNameSpace v
| k == fromConfigKey GCrypt.coreGCryptId =
Just $ genUUIDInNameSpace gCryptNameSpace v
| otherwise = Nothing
checkcommand c = "if which " ++ c ++ "; then " ++ report c ++ "; fi"

View file

@ -45,7 +45,7 @@ transfersDisplay = do
transferPaused info || isNothing (startedTime info)
desc transfer info = case associatedFile info of
AssociatedFile Nothing -> serializeKey $ transferKey transfer
AssociatedFile (Just af) -> af
AssociatedFile (Just af) -> fromRawFilePath af
{- Simplifies a list of transfers, avoiding display of redundant
- equivilant transfers. -}

View file

@ -59,16 +59,18 @@ genKey source meterupdate preferredbackend = do
Just k -> Just (makesane k, b)
where
-- keyNames should not contain newline characters.
makesane k = k { keyName = S8.map fixbadchar (keyName k) }
makesane k = alterKey k $ \d -> d
{ keyName = S8.map fixbadchar (fromKey keyName k)
}
fixbadchar c
| c == '\n' = '_'
| otherwise = c
getBackend :: FilePath -> Key -> Annex (Maybe Backend)
getBackend file k = case maybeLookupBackendVariety (keyVariety k) of
getBackend file k = case maybeLookupBackendVariety (fromKey keyVariety k) of
Just backend -> return $ Just backend
Nothing -> do
warning $ "skipping " ++ file ++ " (unknown backend " ++ decodeBS (formatKeyVariety (keyVariety k)) ++ ")"
warning $ "skipping " ++ file ++ " (unknown backend " ++ decodeBS (formatKeyVariety (fromKey keyVariety k)) ++ ")"
return Nothing
{- Looks up the backend that should be used for a file.
@ -95,4 +97,4 @@ varietyMap = M.fromList $ zip (map B.backendVariety list) list
isStableKey :: Key -> Bool
isStableKey k = maybe False (`B.isStableKey` k)
(maybeLookupBackendVariety (keyVariety k))
(maybeLookupBackendVariety (fromKey keyVariety k))

View file

@ -91,7 +91,7 @@ keyValue hash source meterupdate = do
let file = contentLocation source
filesize <- liftIO $ getFileSize file
s <- hashFile hash file meterupdate
return $ Just $ stubKey
return $ Just $ mkKey $ \k -> k
{ keyName = encodeBS s
, keyVariety = hashKeyVariety hash (HasExt False)
, keySize = Just filesize
@ -105,8 +105,8 @@ keyValueE hash source meterupdate =
addE k = do
maxlen <- annexMaxExtensionLength <$> Annex.getGitConfig
let ext = selectExtension maxlen (keyFilename source)
return $ Just $ k
{ keyName = keyName k <> encodeBS ext
return $ Just $ alterKey k $ \d -> d
{ keyName = keyName d <> encodeBS ext
, keyVariety = hashKeyVariety hash (HasExt True)
}
@ -169,7 +169,7 @@ needsUpgrade :: Key -> Bool
needsUpgrade key = or
[ "\\" `S8.isPrefixOf` keyHash key
, any (not . validInExtension) (decodeBS $ snd $ splitKeyNameExtension key)
, not (hasExt (keyVariety key)) && keyHash key /= keyName key
, not (hasExt (fromKey keyVariety key)) && keyHash key /= fromKey keyName key
]
trivialMigrate :: Key -> Backend -> AssociatedFile -> Annex (Maybe Key)
@ -179,30 +179,31 @@ trivialMigrate oldkey newbackend afile = trivialMigrate' oldkey newbackend afile
trivialMigrate' :: Key -> Backend -> AssociatedFile -> Maybe Int -> Maybe Key
trivialMigrate' oldkey newbackend afile maxextlen
{- Fast migration from hashE to hash backend. -}
| migratable && hasExt oldvariety = Just $ oldkey
| migratable && hasExt oldvariety = Just $ alterKey oldkey $ \d -> d
{ keyName = keyHash oldkey
, keyVariety = newvariety
}
{- Fast migration from hash to hashE backend. -}
| migratable && hasExt newvariety = case afile of
AssociatedFile Nothing -> Nothing
AssociatedFile (Just file) -> Just $ oldkey
AssociatedFile (Just file) -> Just $ alterKey oldkey $ \d -> d
{ keyName = keyHash oldkey
<> encodeBS (selectExtension maxextlen file)
<> encodeBS' (selectExtension maxextlen (fromRawFilePath file))
, keyVariety = newvariety
}
{- Upgrade to fix bad previous migration that created a
- non-extension preserving key, with an extension
- in its keyName. -}
| newvariety == oldvariety && not (hasExt oldvariety) &&
keyHash oldkey /= keyName oldkey = Just $ oldkey
{ keyName = keyHash oldkey
}
keyHash oldkey /= fromKey keyName oldkey =
Just $ alterKey oldkey $ \d -> d
{ keyName = keyHash oldkey
}
| otherwise = Nothing
where
migratable = oldvariety /= newvariety
&& sameExceptExt oldvariety newvariety
oldvariety = keyVariety oldkey
oldvariety = fromKey keyVariety oldkey
newvariety = backendVariety newbackend
hashFile :: Hash -> FilePath -> MeterUpdate -> Annex String
@ -294,5 +295,7 @@ testKeyBackend =
let b = genBackendE (SHA2Hash (HashSize 256))
in b { getKey = \ks p -> (fmap addE) <$> getKey b ks p }
where
addE k = k { keyName = keyName k <> longext }
addE k = alterKey k $ \d -> d
{ keyName = keyName d <> longext
}
longext = ".this-is-a-test-key"

View file

@ -32,7 +32,7 @@ backend = Backend
{- Every unique url has a corresponding key. -}
fromUrl :: String -> Maybe Integer -> Key
fromUrl url size = stubKey
fromUrl url size = mkKey $ \k -> k
{ keyName = genKeyName url
, keyVariety = URLKey
, keySize = size

View file

@ -39,7 +39,7 @@ keyValue source _ = do
stat <- liftIO $ getFileStatus f
sz <- liftIO $ getFileSize' f stat
relf <- getTopFilePath <$> inRepo (toTopFilePath $ keyFilename source)
return $ Just $ stubKey
return $ Just $ mkKey $ \k -> k
{ keyName = genKeyName relf
, keyVariety = WORMKey
, keySize = Just sz
@ -48,14 +48,14 @@ keyValue source _ = do
{- Old WORM keys could contain spaces, and can be upgraded to remove them. -}
needsUpgrade :: Key -> Bool
needsUpgrade key = ' ' `S8.elem` keyName key
needsUpgrade key = ' ' `S8.elem` fromKey keyName key
removeSpaces :: Key -> Backend -> AssociatedFile -> Annex (Maybe Key)
removeSpaces oldkey newbackend _
| migratable = return $ Just $ oldkey
{ keyName = encodeBS $ reSanitizeKeyName $ decodeBS $ keyName oldkey }
| migratable = return $ Just $ alterKey oldkey $ \d -> d
{ keyName = encodeBS $ reSanitizeKeyName $ decodeBS $ keyName d }
| otherwise = return Nothing
where
migratable = oldvariety == newvariety
oldvariety = keyVariety oldkey
oldvariety = fromKey keyVariety oldkey
newvariety = backendVariety newbackend

View file

@ -18,6 +18,14 @@ git-annex (8.20191107) UNRELEASED; urgency=medium
git-annex (7.20191115) UNRELEASED; urgency=medium
* Sped up many git-annex commands that operate on many files, by
using ByteStrings. Some commands like find got up to 60% faster.
* Sped up many git-annex commands that operate on many files, by
avoiding reserialization of keys.
find got 7% faster; whereis 3% faster; and git-annex get when
all files are already present got 5% faster
* Sped up many git-annex commands that query the git-annex branch.
In particular whereis got 1.5% faster.
* Stop displaying rsync progress, and use git-annex's own progress display
for local-to-local repo transfers.
* git-lfs: The url provided to initremote/enableremote will now be

View file

@ -29,6 +29,11 @@ Copyright: 2018 Joey Hess <id@joeyh.name>
2013 Michael Snoyman
License: Expat
Files: Utility/Attoparsec.hs
Copyright: 2019 Joey Hess <id@joeyh.name>
2007-2015 Bryan O'Sullivan
License: BSD-3-clause
Files: Utility/GitLFS.hs
Copyright: © 2019 Joey Hess <id@joeyh.name>
License: AGPL-3+
@ -113,6 +118,34 @@ License: BSD-2-clause
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
License: BSD-3-clause
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
.
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
.
3. Neither the name of the author nor the names of his contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
.
THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS ``AS IS'' AND ANY EXPRESS
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
License: Expat
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the

View file

@ -92,7 +92,7 @@ gitAnnexGlobalOptions = commonGlobalOptions ++
where
setnumcopies n = Annex.changeState $ \s -> s { Annex.forcenumcopies = Just $ NumCopies n }
setuseragent v = Annex.changeState $ \s -> s { Annex.useragent = Just v }
setgitconfig v = Annex.adjustGitRepo $ \r -> Git.Config.store v $
setgitconfig v = Annex.adjustGitRepo $ \r -> Git.Config.store (encodeBS' v) $
r { gitGlobalOpts = gitGlobalOpts r ++ [Param "-c", Param v] }
setdesktopnotify v = Annex.changeState $ \s -> s { Annex.desktopnotify = Annex.desktopnotify s <> v }

View file

@ -30,7 +30,7 @@ remoteUUID = Field "remoteuuid" $
associatedFile :: Field
associatedFile = Field "associatedfile" $ \f ->
-- is the file a safe relative filename?
not (absoluteGitPath f) && not ("../" `isPrefixOf` f)
not (absoluteGitPath (toRawFilePath f)) && not ("../" `isPrefixOf` f)
direct :: Field
direct = Field "direct" $ \f -> f == "1"

View file

@ -34,11 +34,11 @@ import Annex.Content
import Annex.InodeSentinal
import qualified Database.Keys
withFilesInGit :: (FilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
withFilesInGit :: (RawFilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
withFilesInGit a l = seekActions $ prepFiltered a $
seekHelper LsFiles.inRepo l
withFilesInGitNonRecursive :: String -> (FilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
withFilesInGitNonRecursive :: String -> (RawFilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
withFilesInGitNonRecursive needforce a l = ifM (Annex.getState Annex.force)
( withFilesInGit a l
, if null l
@ -48,7 +48,7 @@ withFilesInGitNonRecursive needforce a l = ifM (Annex.getState Annex.force)
where
getfiles c [] = return (reverse c)
getfiles c ((WorkTreeItem p):ps) = do
(fs, cleanup) <- inRepo $ LsFiles.inRepo [p]
(fs, cleanup) <- inRepo $ LsFiles.inRepo [toRawFilePath p]
case fs of
[f] -> do
void $ liftIO $ cleanup
@ -58,11 +58,11 @@ withFilesInGitNonRecursive needforce a l = ifM (Annex.getState Annex.force)
getfiles c ps
_ -> giveup needforce
withFilesNotInGit :: Bool -> (FilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
withFilesNotInGit :: Bool -> (RawFilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
withFilesNotInGit skipdotfiles a l
| skipdotfiles = do
{- dotfiles are not acted on unless explicitly listed -}
files <- filter (not . dotfile) <$>
files <- filter (not . dotfile . fromRawFilePath) <$>
seekunless (null ps && not (null l)) ps
dotfiles <- seekunless (null dotps) dotps
go (files++dotfiles)
@ -74,9 +74,9 @@ withFilesNotInGit skipdotfiles a l
force <- Annex.getState Annex.force
g <- gitRepo
liftIO $ Git.Command.leaveZombie
<$> LsFiles.notInRepo force (map (\(WorkTreeItem f) -> f) l') g
<$> LsFiles.notInRepo force (map (\(WorkTreeItem f) -> toRawFilePath f) l') g
go fs = seekActions $ prepFiltered a $
return $ concat $ segmentPaths (map (\(WorkTreeItem f) -> f) l) fs
return $ concat $ segmentPaths (map (\(WorkTreeItem f) -> toRawFilePath f) l) fs
withPathContents :: ((FilePath, FilePath) -> CommandSeek) -> CmdParams -> CommandSeek
withPathContents a params = do
@ -110,30 +110,30 @@ withPairs a params = seekActions $ return $ map a $ pairs [] params
pairs c (x:y:xs) = pairs ((x,y):c) xs
pairs _ _ = giveup "expected pairs"
withFilesToBeCommitted :: (FilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
withFilesToBeCommitted :: (RawFilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
withFilesToBeCommitted a l = seekActions $ prepFiltered a $
seekHelper LsFiles.stagedNotDeleted l
isOldUnlocked :: FilePath -> Annex Bool
isOldUnlocked :: RawFilePath -> Annex Bool
isOldUnlocked f = liftIO (notSymlink f) <&&>
(isJust <$> catKeyFile f <||> isJust <$> catKeyFileHEAD f)
{- unlocked pointer files that are staged, and whose content has not been
- modified-}
withUnmodifiedUnlockedPointers :: (FilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
withUnmodifiedUnlockedPointers :: (RawFilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
withUnmodifiedUnlockedPointers a l = seekActions $
prepFiltered a unlockedfiles
where
unlockedfiles = filterM isUnmodifiedUnlocked
=<< seekHelper LsFiles.typeChangedStaged l
isUnmodifiedUnlocked :: FilePath -> Annex Bool
isUnmodifiedUnlocked :: RawFilePath -> Annex Bool
isUnmodifiedUnlocked f = catKeyFile f >>= \case
Nothing -> return False
Just k -> sameInodeCache f =<< Database.Keys.getInodeCaches k
Just k -> sameInodeCache (fromRawFilePath f) =<< Database.Keys.getInodeCaches k
{- Finds files that may be modified. -}
withFilesMaybeModified :: (FilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
withFilesMaybeModified :: (RawFilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
withFilesMaybeModified a params = seekActions $
prepFiltered a $ seekHelper LsFiles.modified params
@ -169,7 +169,7 @@ withKeyOptions ko auto keyaction = withKeyOptions' ko auto mkkeyaction
return $ \v@(k, ai) ->
let i = case ai of
ActionItemBranchFilePath (BranchFilePath _ topf) _ ->
MatchingKey k (AssociatedFile $ Just $ getTopFilePath topf)
MatchingKey k (AssociatedFile $ Just $ toRawFilePath $ getTopFilePath topf)
_ -> MatchingKey k (AssociatedFile Nothing)
in whenM (matcher i) $
keyaction v
@ -225,20 +225,22 @@ withKeyOptions' ko auto mkkeyaction fallbackaction params = do
forM_ ts $ \(t, i) ->
keyaction (transferKey t, mkActionItem (t, i))
prepFiltered :: (FilePath -> CommandSeek) -> Annex [FilePath] -> Annex [CommandSeek]
prepFiltered :: (RawFilePath -> CommandSeek) -> Annex [RawFilePath] -> Annex [CommandSeek]
prepFiltered a fs = do
matcher <- Limit.getMatcher
map (process matcher) <$> fs
where
process matcher f = whenM (matcher $ MatchingFile $ FileInfo f f) $ a f
process matcher f =
let f' = fromRawFilePath f
in whenM (matcher $ MatchingFile $ FileInfo f' f') $ a f
seekActions :: Annex [CommandSeek] -> Annex ()
seekActions gen = sequence_ =<< gen
seekHelper :: ([FilePath] -> Git.Repo -> IO ([FilePath], IO Bool)) -> [WorkTreeItem] -> Annex [FilePath]
seekHelper :: ([RawFilePath] -> Git.Repo -> IO ([RawFilePath], IO Bool)) -> [WorkTreeItem] -> Annex [RawFilePath]
seekHelper a l = inRepo $ \g ->
concat . concat <$> forM (segmentXargsOrdered l')
(runSegmentPaths (\fs -> Git.Command.leaveZombie <$> a fs g))
(runSegmentPaths (\fs -> Git.Command.leaveZombie <$> a fs g) . map toRawFilePath)
where
l' = map (\(WorkTreeItem f) -> f) l
@ -264,14 +266,14 @@ workTreeItems' (AllowHidden allowhidden) ps = do
unlessM (exists p <||> hidden currbranch p) $ do
toplevelWarning False (p ++ " not found")
Annex.incError
return (map WorkTreeItem ps)
return (map (WorkTreeItem) ps)
where
exists p = isJust <$> liftIO (catchMaybeIO $ getSymbolicLinkStatus p)
hidden currbranch p
| allowhidden = do
f <- liftIO $ relPathCwdToFile p
isJust <$> catObjectMetaDataHidden f currbranch
isJust <$> catObjectMetaDataHidden (toRawFilePath f) currbranch
| otherwise = return False
notSymlink :: FilePath -> IO Bool
notSymlink f = liftIO $ not . isSymbolicLink <$> getSymbolicLinkStatus f
notSymlink :: RawFilePath -> IO Bool
notSymlink f = liftIO $ not . isSymbolicLink <$> getSymbolicLinkStatus (fromRawFilePath f)

View file

@ -50,7 +50,7 @@ optParser desc = AddOptions
seek :: AddOptions -> CommandSeek
seek o = startConcurrency commandStages $ do
matcher <- largeFilesMatcher
let gofile file = ifM (checkFileMatcher matcher file <||> Annex.getState Annex.force)
let gofile file = ifM (checkFileMatcher matcher (fromRawFilePath file) <||> Annex.getState Annex.force)
( start file
, ifM (annexAddSmallFiles <$> Annex.getGitConfig)
( startSmall file
@ -61,7 +61,7 @@ seek o = startConcurrency commandStages $ do
Batch fmt
| updateOnly o ->
giveup "--update --batch is not supported"
| otherwise -> batchFilesMatching fmt gofile
| otherwise -> batchFilesMatching fmt (gofile . toRawFilePath)
NoBatch -> do
l <- workTreeItems (addThese o)
let go a = a (commandAction . gofile) l
@ -71,28 +71,28 @@ seek o = startConcurrency commandStages $ do
go withUnmodifiedUnlockedPointers
{- Pass file off to git-add. -}
startSmall :: FilePath -> CommandStart
startSmall :: RawFilePath -> CommandStart
startSmall file = starting "add" (ActionItemWorkTreeFile file) $
next $ addSmall file
addSmall :: FilePath -> Annex Bool
addSmall :: RawFilePath -> Annex Bool
addSmall file = do
showNote "non-large file; adding content to git repository"
addFile file
addFile :: FilePath -> Annex Bool
addFile :: RawFilePath -> Annex Bool
addFile file = do
ps <- forceParams
Annex.Queue.addCommand "add" (ps++[Param "--"]) [file]
Annex.Queue.addCommand "add" (ps++[Param "--"]) [fromRawFilePath file]
return True
start :: FilePath -> CommandStart
start :: RawFilePath -> CommandStart
start file = do
mk <- liftIO $ isPointerFile file
maybe go fixuppointer mk
where
go = ifAnnexed file addpresent add
add = liftIO (catchMaybeIO $ getSymbolicLinkStatus file) >>= \case
add = liftIO (catchMaybeIO $ getSymbolicLinkStatus (fromRawFilePath file)) >>= \case
Nothing -> stop
Just s
| not (isRegularFile s) && not (isSymbolicLink s) -> stop
@ -102,28 +102,28 @@ start file = do
then next $ addFile file
else perform file
addpresent key =
liftIO (catchMaybeIO $ getSymbolicLinkStatus file) >>= \case
liftIO (catchMaybeIO $ getSymbolicLinkStatus $ fromRawFilePath file) >>= \case
Just s | isSymbolicLink s -> fixuplink key
_ -> add
fixuplink key = starting "add" (ActionItemWorkTreeFile file) $ do
-- the annexed symlink is present but not yet added to git
liftIO $ removeFile file
addLink file key Nothing
liftIO $ removeFile (fromRawFilePath file)
addLink (fromRawFilePath file) key Nothing
next $
cleanup key =<< inAnnex key
fixuppointer key = starting "add" (ActionItemWorkTreeFile file) $ do
-- the pointer file is present, but not yet added to git
Database.Keys.addAssociatedFile key =<< inRepo (toTopFilePath file)
Database.Keys.addAssociatedFile key =<< inRepo (toTopFilePath (fromRawFilePath file))
next $ addFile file
perform :: FilePath -> CommandPerform
perform :: RawFilePath -> CommandPerform
perform file = withOtherTmp $ \tmpdir -> do
lockingfile <- not <$> addUnlocked
let cfg = LockDownConfig
{ lockingFile = lockingfile
, hardlinkFileTmpDir = Just tmpdir
}
ld <- lockDown cfg file
ld <- lockDown cfg (fromRawFilePath file)
let sizer = keySource <$> ld
v <- metered Nothing sizer $ \_meter meterupdate ->
ingestAdd meterupdate ld

View file

@ -156,13 +156,13 @@ startRemote r o file uri sz = do
performRemote r o uri file' sz
performRemote :: Remote -> AddUrlOptions -> URLString -> FilePath -> Maybe Integer -> CommandPerform
performRemote r o uri file sz = ifAnnexed file adduri geturi
performRemote r o uri file sz = ifAnnexed (toRawFilePath file) adduri geturi
where
loguri = setDownloader uri OtherDownloader
adduri = addUrlChecked o loguri file (Remote.uuid r) checkexistssize
checkexistssize key = return $ case sz of
Nothing -> (True, True, loguri)
Just n -> (True, n == fromMaybe n (keySize key), loguri)
Just n -> (True, n == fromMaybe n (fromKey keySize key), loguri)
geturi = next $ isJust <$> downloadRemoteFile r (downloadOptions o) uri file sz
downloadRemoteFile :: Remote -> DownloadOptions -> URLString -> FilePath -> Maybe Integer -> Annex (Maybe Key)
@ -180,7 +180,7 @@ downloadRemoteFile r o uri file sz = checkCanAdd file $ do
setTempUrl urlkey loguri
let downloader = \dest p -> fst
<$> Remote.retrieveKeyFile r urlkey
(AssociatedFile (Just file)) dest p
(AssociatedFile (Just (toRawFilePath file))) dest p
ret <- downloadWith downloader urlkey (Remote.uuid r) loguri file
removeTempUrl urlkey
return ret
@ -212,13 +212,13 @@ startWeb o urlstring = go $ fromMaybe bad $ parseURI urlstring
performWeb o urlstring file urlinfo
performWeb :: AddUrlOptions -> URLString -> FilePath -> Url.UrlInfo -> CommandPerform
performWeb o url file urlinfo = ifAnnexed file addurl geturl
performWeb o url file urlinfo = ifAnnexed (toRawFilePath file) addurl geturl
where
geturl = next $ isJust <$> addUrlFile (downloadOptions o) url urlinfo file
addurl = addUrlChecked o url file webUUID $ \k ->
ifM (pure (not (rawOption (downloadOptions o))) <&&> youtubeDlSupported url)
( return (True, True, setDownloader url YoutubeDownloader)
, return (Url.urlExists urlinfo, Url.urlSize urlinfo == keySize k, url)
, return (Url.urlExists urlinfo, Url.urlSize urlinfo == fromKey keySize k, url)
)
{- Check that the url exists, and has the same size as the key,
@ -258,7 +258,7 @@ addUrlFile o url urlinfo file =
downloadWeb :: DownloadOptions -> URLString -> Url.UrlInfo -> FilePath -> Annex (Maybe Key)
downloadWeb o url urlinfo file =
go =<< downloadWith' downloader urlkey webUUID url (AssociatedFile (Just file))
go =<< downloadWith' downloader urlkey webUUID url (AssociatedFile (Just (toRawFilePath file)))
where
urlkey = addSizeUrlKey urlinfo $ Backend.URL.fromUrl url Nothing
downloader f p = downloadUrl urlkey p [url] f
@ -278,7 +278,7 @@ downloadWeb o url urlinfo file =
-- first, and check if that is already an annexed file,
-- to avoid unnecessary work in that case.
| otherwise = youtubeDlFileNameHtmlOnly url >>= \case
Right dest -> ifAnnexed dest
Right dest -> ifAnnexed (toRawFilePath dest)
(alreadyannexed dest)
(dl dest)
Left _ -> normalfinish tmp
@ -345,7 +345,7 @@ downloadWith :: (FilePath -> MeterUpdate -> Annex Bool) -> Key -> UUID -> URLStr
downloadWith downloader dummykey u url file =
go =<< downloadWith' downloader dummykey u url afile
where
afile = AssociatedFile (Just file)
afile = AssociatedFile (Just (toRawFilePath file))
go Nothing = return Nothing
go (Just tmp) = finishDownloadWith tmp u url file
@ -379,7 +379,9 @@ finishDownloadWith tmp u url file = do
{- Adds the url size to the Key. -}
addSizeUrlKey :: Url.UrlInfo -> Key -> Key
addSizeUrlKey urlinfo key = key { keySize = Url.urlSize urlinfo }
addSizeUrlKey urlinfo key = alterKey key $ \d -> d
{ keySize = Url.urlSize urlinfo
}
{- Adds worktree file to the repository. -}
addWorkTree :: UUID -> URLString -> FilePath -> Key -> Maybe FilePath -> Annex ()
@ -399,7 +401,7 @@ addWorkTree u url file key mtmp = case mtmp of
-- than the work tree file.
liftIO $ renameFile file tmp
go
else void $ Command.Add.addSmall file
else void $ Command.Add.addSmall (toRawFilePath file)
where
go = do
maybeShowJSON $ JSONChunk [("key", serializeKey key)]

View file

@ -10,6 +10,9 @@ module Command.Config where
import Command
import Logs.Config
import Config
import Git.Types (ConfigKey(..), fromConfigValue)
import qualified Data.ByteString as S
cmd :: Command
cmd = noMessages $ command "config" SectionSetup
@ -17,9 +20,9 @@ cmd = noMessages $ command "config" SectionSetup
paramNothing (seek <$$> optParser)
data Action
= SetConfig ConfigName ConfigValue
| GetConfig ConfigName
| UnsetConfig ConfigName
= SetConfig ConfigKey ConfigValue
| GetConfig ConfigKey
| UnsetConfig ConfigKey
type Name = String
type Value = String
@ -48,19 +51,19 @@ optParser _ = setconfig <|> getconfig <|> unsetconfig
)
seek :: Action -> CommandSeek
seek (SetConfig name val) = commandAction $
startingUsualMessages name (ActionItemOther (Just val)) $ do
setGlobalConfig name val
setConfig (ConfigKey name) val
seek (SetConfig ck@(ConfigKey name) val) = commandAction $
startingUsualMessages (decodeBS' name) (ActionItemOther (Just (fromConfigValue val))) $ do
setGlobalConfig ck val
setConfig ck (fromConfigValue val)
next $ return True
seek (UnsetConfig name) = commandAction $
startingUsualMessages name (ActionItemOther (Just "unset")) $do
unsetGlobalConfig name
unsetConfig (ConfigKey name)
seek (UnsetConfig ck@(ConfigKey name)) = commandAction $
startingUsualMessages (decodeBS' name) (ActionItemOther (Just "unset")) $do
unsetGlobalConfig ck
unsetConfig ck
next $ return True
seek (GetConfig name) = commandAction $
seek (GetConfig ck) = commandAction $
startingCustomOutput (ActionItemOther Nothing) $ do
getGlobalConfig name >>= \case
getGlobalConfig ck >>= \case
Nothing -> return ()
Just v -> liftIO $ putStrLn v
Just (ConfigValue v) -> liftIO $ S.putStrLn v
next $ return True

View file

@ -12,6 +12,7 @@ import Annex.UUID
import Annex.Init
import qualified Annex.Branch
import qualified Git.Config
import Git.Types
import Remote.GCrypt (coreGCryptId)
import qualified CmdLine.GitAnnexShell.Fields as Fields
import CmdLine.GitAnnexShell.Checks
@ -28,11 +29,12 @@ seek = withNothing (commandAction start)
start :: CommandStart
start = do
u <- findOrGenUUID
showConfig "annex.uuid" $ fromUUID u
showConfig coreGCryptId =<< fromRepo (Git.Config.get coreGCryptId "")
showConfig configkeyUUID $ fromUUID u
showConfig coreGCryptId . fromConfigValue
=<< fromRepo (Git.Config.get coreGCryptId mempty)
stop
where
showConfig k v = liftIO $ putStrLn $ k ++ "=" ++ v
showConfig k v = liftIO $ putStrLn $ fromConfigKey k ++ "=" ++ v
{- The repository may not yet have a UUID; automatically initialize it
- when there's a git-annex branch available or if the autoinit field was

View file

@ -47,7 +47,7 @@ seek :: CopyOptions -> CommandSeek
seek o = startConcurrency commandStages $ do
let go = whenAnnexed $ start o
case batchOption o of
Batch fmt -> batchFilesMatching fmt go
Batch fmt -> batchFilesMatching fmt (go . toRawFilePath)
NoBatch -> withKeyOptions
(keyOptions o) (autoMode o)
(commandAction . Command.Move.startKey (fromToOptions o) Command.Move.RemoveNever)
@ -57,12 +57,12 @@ seek o = startConcurrency commandStages $ do
{- A copy is just a move that does not delete the source file.
- However, auto mode avoids unnecessary copies, and avoids getting or
- sending non-preferred content. -}
start :: CopyOptions -> FilePath -> Key -> CommandStart
start :: CopyOptions -> RawFilePath -> Key -> CommandStart
start o file key = stopUnless shouldCopy $
Command.Move.start (fromToOptions o) Command.Move.RemoveNever file key
where
shouldCopy
| autoMode o = want <||> numCopiesCheck file key (<)
| autoMode o = want <||> numCopiesCheck (fromRawFilePath file) key (<)
| otherwise = return True
want = case fromToOptions o of
Right (ToRemote dest) ->

View file

@ -85,9 +85,9 @@ fixupReq req@(Req {}) =
check rOldFile rOldMode (\r f -> r { rOldFile = f }) req
>>= check rNewFile rNewMode (\r f -> r { rNewFile = f })
where
check getfile getmode setfile r = case readTreeItemType (getmode r) of
check getfile getmode setfile r = case readTreeItemType (encodeBS' (getmode r)) of
Just TreeSymlink -> do
v <- getAnnexLinkTarget' (getfile r) False
v <- getAnnexLinkTarget' (toRawFilePath (getfile r)) False
case parseLinkTargetOrPointer =<< v of
Nothing -> return r
Just k -> withObjectLoc k (pure . setfile r)

View file

@ -54,7 +54,7 @@ parseDropFromOption = parseRemoteOption <$> strOption
seek :: DropOptions -> CommandSeek
seek o = startConcurrency transferStages $
case batchOption o of
Batch fmt -> batchFilesMatching fmt go
Batch fmt -> batchFilesMatching fmt (go . toRawFilePath)
NoBatch -> withKeyOptions (keyOptions o) (autoMode o)
(commandAction . startKeys o)
(withFilesInGit (commandAction . go))
@ -62,7 +62,7 @@ seek o = startConcurrency transferStages $
where
go = whenAnnexed $ start o
start :: DropOptions -> FilePath -> Key -> CommandStart
start :: DropOptions -> RawFilePath -> Key -> CommandStart
start o file key = start' o key afile ai
where
afile = AssociatedFile (Just file)

View file

@ -5,6 +5,8 @@
- Licensed under the GNU AGPL version 3 or higher.
-}
{-# LANGUAGE OverloadedStrings #-}
module Command.EnableRemote where
import Command

View file

@ -22,5 +22,5 @@ cmd = noCommit $ noMessages $ dontCheck repoExists $
run :: Maybe Utility.Format.Format -> String -> Annex Bool
run format p = do
let k = fromMaybe (giveup "bad key") $ deserializeKey p
showFormatted format (serializeKey k) (keyVars k)
showFormatted format (serializeKey' k) (keyVars k)
return True

View file

@ -6,6 +6,7 @@
-}
{-# LANGUAGE TupleSections, BangPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
module Command.Export where
@ -70,7 +71,7 @@ optParser _ = ExportOptions
-- To handle renames which swap files, the exported file is first renamed
-- to a stable temporary name based on the key.
exportTempName :: ExportKey -> ExportLocation
exportTempName ek = mkExportLocation $
exportTempName ek = mkExportLocation $ toRawFilePath $
".git-annex-tmp-content-" ++ serializeKey (asKey (ek))
seek :: ExportOptions -> CommandSeek
@ -258,9 +259,9 @@ startExport r db cvar allfilledvar ti = do
performExport r db ek af (Git.LsTree.sha ti) loc allfilledvar
)
where
loc = mkExportLocation f
loc = mkExportLocation (toRawFilePath f)
f = getTopFilePath (Git.LsTree.file ti)
af = AssociatedFile (Just f)
af = AssociatedFile (Just (toRawFilePath f))
notrecordedpresent ek = (||)
<$> liftIO (notElem loc <$> getExportedLocation db (asKey ek))
-- If content was removed from the remote, the export db
@ -316,14 +317,14 @@ startUnexport r db f shas = do
else starting ("unexport " ++ name r) (ActionItemOther (Just f')) $
performUnexport r db eks loc
where
loc = mkExportLocation f'
loc = mkExportLocation (toRawFilePath f')
f' = getTopFilePath f
startUnexport' :: Remote -> ExportHandle -> TopFilePath -> ExportKey -> CommandStart
startUnexport' r db f ek = starting ("unexport " ++ name r) (ActionItemOther (Just f')) $
performUnexport r db [ek] loc
where
loc = mkExportLocation f'
loc = mkExportLocation (toRawFilePath f')
f' = getTopFilePath f
-- Unlike a usual drop from a repository, this does not check that
@ -363,19 +364,19 @@ startRecoverIncomplete r db sha oldf
| otherwise = do
ek <- exportKey sha
let loc = exportTempName ek
starting ("unexport " ++ name r) (ActionItemOther (Just (fromExportLocation loc))) $ do
starting ("unexport " ++ name r) (ActionItemOther (Just (fromRawFilePath (fromExportLocation loc)))) $ do
liftIO $ removeExportedLocation db (asKey ek) oldloc
performUnexport r db [ek] loc
where
oldloc = mkExportLocation oldf'
oldloc = mkExportLocation (toRawFilePath oldf')
oldf' = getTopFilePath oldf
startMoveToTempName :: Remote -> ExportHandle -> TopFilePath -> ExportKey -> CommandStart
startMoveToTempName r db f ek = starting ("rename " ++ name r)
(ActionItemOther $ Just $ f' ++ " -> " ++ fromExportLocation tmploc)
(ActionItemOther $ Just $ f' ++ " -> " ++ fromRawFilePath (fromExportLocation tmploc))
(performRename r db ek loc tmploc)
where
loc = mkExportLocation f'
loc = mkExportLocation (toRawFilePath f')
f' = getTopFilePath f
tmploc = exportTempName ek
@ -383,10 +384,10 @@ startMoveFromTempName :: Remote -> ExportHandle -> ExportKey -> TopFilePath -> C
startMoveFromTempName r db ek f = do
let tmploc = exportTempName ek
stopUnless (liftIO $ elem tmploc <$> getExportedLocation db (asKey ek)) $
starting ("rename " ++ name r) (ActionItemOther (Just (fromExportLocation tmploc ++ " -> " ++ f'))) $
starting ("rename " ++ name r) (ActionItemOther (Just (fromRawFilePath (fromExportLocation tmploc) ++ " -> " ++ f'))) $
performRename r db ek tmploc loc
where
loc = mkExportLocation f'
loc = mkExportLocation (toRawFilePath f')
f' = getTopFilePath f
performRename :: Remote -> ExportHandle -> ExportKey -> ExportLocation -> ExportLocation -> CommandPerform
@ -468,7 +469,7 @@ filterPreferredContent r tree = logExportExcluded (uuid r) $ \logwriter -> do
-- Match filename relative to the
-- top of the tree.
let af = AssociatedFile $ Just $
getTopFilePath topf
toRawFilePath $ getTopFilePath topf
let mi = MatchingKey k af
ifM (checkMatcher' matcher mi mempty)
( return (Just ti)

View file

@ -9,6 +9,8 @@ module Command.Find where
import Data.Default
import qualified Data.Map as M
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as S8
import Command
import Annex.Content
@ -57,29 +59,29 @@ seek o = case batchOption o of
(commandAction . startKeys o)
(withFilesInGit (commandAction . go))
=<< workTreeItems (findThese o)
Batch fmt -> batchFilesMatching fmt go
Batch fmt -> batchFilesMatching fmt (go . toRawFilePath)
where
go = whenAnnexed $ start o
-- only files inAnnex are shown, unless the user has requested
-- others via a limit
start :: FindOptions -> FilePath -> Key -> CommandStart
start :: FindOptions -> RawFilePath -> Key -> CommandStart
start o file key =
stopUnless (limited <||> inAnnex key) $
startingCustomOutput key $ do
showFormatted (formatOption o) file $ ("file", file) : keyVars key
showFormatted (formatOption o) file $ ("file", fromRawFilePath file) : keyVars key
next $ return True
startKeys :: FindOptions -> (Key, ActionItem) -> CommandStart
startKeys o (key, ActionItemBranchFilePath (BranchFilePath _ topf) _) =
start o (getTopFilePath topf) key
start o (toRawFilePath (getTopFilePath topf)) key
startKeys _ _ = stop
showFormatted :: Maybe Utility.Format.Format -> String -> [(String, String)] -> Annex ()
showFormatted :: Maybe Utility.Format.Format -> S.ByteString -> [(String, String)] -> Annex ()
showFormatted format unformatted vars =
unlessM (showFullJSON $ JSONChunk vars) $
case format of
Nothing -> liftIO $ putStrLn unformatted
Nothing -> liftIO $ S8.putStrLn unformatted
Just formatter -> liftIO $ putStr $
Utility.Format.format formatter $
M.fromList vars
@ -87,14 +89,14 @@ showFormatted format unformatted vars =
keyVars :: Key -> [(String, String)]
keyVars key =
[ ("key", serializeKey key)
, ("backend", decodeBS $ formatKeyVariety $ keyVariety key)
, ("backend", decodeBS $ formatKeyVariety $ fromKey keyVariety key)
, ("bytesize", size show)
, ("humansize", size $ roughSize storageUnits True)
, ("keyname", decodeBS $ keyName key)
, ("keyname", decodeBS $ fromKey keyName key)
, ("hashdirlower", hashDirLower def key)
, ("hashdirmixed", hashDirMixed def key)
, ("mtime", whenavail show $ keyMtime key)
, ("mtime", whenavail show $ fromKey keyMtime key)
]
where
size c = whenavail c $ keySize key
size c = whenavail c $ fromKey keySize key
whenavail = maybe "unknown"

View file

@ -17,6 +17,7 @@ import Annex.Content
import Annex.Perms
import qualified Annex.Queue
import qualified Database.Keys
import qualified Utility.RawFilePath as R
#if ! defined(mingw32_HOST_OS)
import Utility.Touch
@ -37,13 +38,14 @@ seek ps = unlessM crippledFileSystem $ do
data FixWhat = FixSymlinks | FixAll
start :: FixWhat -> FilePath -> Key -> CommandStart
start :: FixWhat -> RawFilePath -> Key -> CommandStart
start fixwhat file key = do
currlink <- liftIO $ catchMaybeIO $ readSymbolicLink file
wantlink <- calcRepo $ gitAnnexLink file key
currlink <- liftIO $ catchMaybeIO $ R.readSymbolicLink file
wantlink <- calcRepo $ gitAnnexLink (fromRawFilePath file) key
case currlink of
Just l
| l /= wantlink -> fixby $ fixSymlink file wantlink
| l /= toRawFilePath wantlink -> fixby $
fixSymlink (fromRawFilePath file) wantlink
| otherwise -> stop
Nothing -> case fixwhat of
FixAll -> fixthin
@ -52,9 +54,9 @@ start fixwhat file key = do
fixby = starting "fix" (mkActionItem (key, file))
fixthin = do
obj <- calcRepo $ gitAnnexLocation key
stopUnless (isUnmodified key file <&&> isUnmodified key obj) $ do
stopUnless (isUnmodified key (fromRawFilePath file) <&&> isUnmodified key obj) $ do
thin <- annexThin <$> Annex.getGitConfig
fs <- liftIO $ catchMaybeIO $ getFileStatus file
fs <- liftIO $ catchMaybeIO $ R.getFileStatus file
os <- liftIO $ catchMaybeIO $ getFileStatus obj
case (linkCount <$> fs, linkCount <$> os, thin) of
(Just 1, Just 1, True) ->
@ -63,21 +65,21 @@ start fixwhat file key = do
fixby $ breakHardLink file key obj
_ -> stop
breakHardLink :: FilePath -> Key -> FilePath -> CommandPerform
breakHardLink :: RawFilePath -> Key -> FilePath -> CommandPerform
breakHardLink file key obj = do
replaceFile file $ \tmp -> do
mode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus file
replaceFile (fromRawFilePath file) $ \tmp -> do
mode <- liftIO $ catchMaybeIO $ fileMode <$> R.getFileStatus file
unlessM (checkedCopyFile key obj tmp mode) $
error "unable to break hard link"
thawContent tmp
modifyContent obj $ freezeContent obj
Database.Keys.storeInodeCaches key [file]
Database.Keys.storeInodeCaches key [fromRawFilePath file]
next $ return True
makeHardLink :: FilePath -> Key -> CommandPerform
makeHardLink :: RawFilePath -> Key -> CommandPerform
makeHardLink file key = do
replaceFile file $ \tmp -> do
mode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus file
replaceFile (fromRawFilePath file) $ \tmp -> do
mode <- liftIO $ catchMaybeIO $ fileMode <$> R.getFileStatus file
linkFromAnnex key tmp mode >>= \case
LinkAnnexFailed -> error "unable to make hard link"
_ -> noop

View file

@ -49,19 +49,19 @@ seekBatch fmt = batchInput fmt parse commandAction
parse s =
let (keyname, file) = separate (== ' ') s
in if not (null keyname) && not (null file)
then Right $ go file (mkKey keyname)
then Right $ go file (keyOpt keyname)
else Left "Expected pairs of key and filename"
go file key = starting "fromkey" (mkActionItem (key, file)) $
go file key = starting "fromkey" (mkActionItem (key, toRawFilePath file)) $
perform key file
start :: Bool -> (String, FilePath) -> CommandStart
start force (keyname, file) = do
let key = mkKey keyname
let key = keyOpt keyname
unless force $ do
inbackend <- inAnnex key
unless inbackend $ giveup $
"key ("++ keyname ++") is not present in backend (use --force to override this sanity check)"
starting "fromkey" (mkActionItem (key, file)) $
starting "fromkey" (mkActionItem (key, toRawFilePath file)) $
perform key file
-- From user input to a Key.
@ -71,8 +71,8 @@ start force (keyname, file) = do
-- For example, "WORM--a:a" parses as an uri. To disambiguate, check
-- the uri scheme, to see if it looks like the prefix of a key. This relies
-- on key backend names never containing a ':'.
mkKey :: String -> Key
mkKey s = case parseURI s of
keyOpt :: String -> Key
keyOpt s = case parseURI s of
Just u | not (isKeyPrefix (uriScheme u)) ->
Backend.URL.fromUrl s Nothing
_ -> case deserializeKey s of
@ -80,7 +80,7 @@ mkKey s = case parseURI s of
Nothing -> giveup $ "bad key/url " ++ s
perform :: Key -> FilePath -> CommandPerform
perform key file = lookupFileNotHidden file >>= \case
perform key file = lookupFileNotHidden (toRawFilePath file) >>= \case
Nothing -> ifM (liftIO $ doesFileExist file)
( hasothercontent
, do

View file

@ -35,6 +35,7 @@ import qualified Database.Fsck as FsckDb
import Types.CleanupActions
import Types.Key
import Types.ActionItem
import qualified Utility.RawFilePath as R
import Data.Time.Clock.POSIX
import System.Posix.Types (EpochTime)
@ -102,11 +103,11 @@ checkDeadRepo u =
whenM ((==) DeadTrusted <$> lookupTrust u) $
earlyWarning "Warning: Fscking a repository that is currently marked as dead."
start :: Maybe Remote -> Incremental -> FilePath -> Key -> CommandStart
start from inc file key = Backend.getBackend file key >>= \case
start :: Maybe Remote -> Incremental -> RawFilePath -> Key -> CommandStart
start from inc file key = Backend.getBackend (fromRawFilePath file) key >>= \case
Nothing -> stop
Just backend -> do
numcopies <- getFileNumCopies file
numcopies <- getFileNumCopies (fromRawFilePath file)
case from of
Nothing -> go $ perform key file backend numcopies
Just r -> go $ performRemote key afile backend numcopies r
@ -114,9 +115,9 @@ start from inc file key = Backend.getBackend file key >>= \case
go = runFsck inc (mkActionItem (key, afile)) key
afile = AssociatedFile (Just file)
perform :: Key -> FilePath -> Backend -> NumCopies -> Annex Bool
perform :: Key -> RawFilePath -> Backend -> NumCopies -> Annex Bool
perform key file backend numcopies = do
keystatus <- getKeyFileStatus key file
keystatus <- getKeyFileStatus key (fromRawFilePath file)
check
-- order matters
[ fixLink key file
@ -182,7 +183,7 @@ performRemote key afile backend numcopies remote =
startKey :: Maybe Remote -> Incremental -> (Key, ActionItem) -> NumCopies -> CommandStart
startKey from inc (key, ai) numcopies =
case Backend.maybeLookupBackendVariety (keyVariety key) of
case Backend.maybeLookupBackendVariety (fromKey keyVariety key) of
Nothing -> stop
Just backend -> runFsck inc ai key $
case from of
@ -203,18 +204,18 @@ check :: [Annex Bool] -> Annex Bool
check cs = and <$> sequence cs
{- Checks that symlinks points correctly to the annexed content. -}
fixLink :: Key -> FilePath -> Annex Bool
fixLink :: Key -> RawFilePath -> Annex Bool
fixLink key file = do
want <- calcRepo $ gitAnnexLink file key
want <- calcRepo $ gitAnnexLink (fromRawFilePath file) key
have <- getAnnexLinkTarget file
maybe noop (go want) have
return True
where
go want have
| want /= fromInternalGitPath (fromRawFilePath have) = do
| want /= fromRawFilePath (fromInternalGitPath have) = do
showNote "fixing link"
liftIO $ createDirectoryIfMissing True (parentDir file)
liftIO $ removeFile file
liftIO $ createDirectoryIfMissing True (parentDir (fromRawFilePath file))
liftIO $ removeFile (fromRawFilePath file)
addAnnexLink want file
| otherwise = noop
@ -244,9 +245,9 @@ verifyLocationLog key keystatus ai = do
- insecure hash is present. This should only be able to happen
- if the repository already contained the content before the
- config was set. -}
when (present && not (cryptographicallySecure (keyVariety key))) $
when (present && not (cryptographicallySecure (fromKey keyVariety key))) $
whenM (annexSecureHashesOnly <$> Annex.getGitConfig) $
warning $ "** Despite annex.securehashesonly being set, " ++ obj ++ " has content present in the annex using an insecure " ++ decodeBS (formatKeyVariety (keyVariety key)) ++ " key"
warning $ "** Despite annex.securehashesonly being set, " ++ obj ++ " has content present in the annex using an insecure " ++ decodeBS (formatKeyVariety (fromKey keyVariety key)) ++ " key"
verifyLocationLog' key ai present u (logChange key u)
@ -267,7 +268,7 @@ verifyLocationLog' key ai present u updatestatus = do
fix InfoMissing
warning $
"** Based on the location log, " ++
actionItemDesc ai ++
decodeBS' (actionItemDesc ai) ++
"\n** was expected to be present, " ++
"but its content is missing."
return False
@ -302,23 +303,23 @@ verifyRequiredContent key ai@(ActionItemAssociatedFile afile _) = do
missingrequired <- Remote.prettyPrintUUIDs "missingrequired" missinglocs
warning $
"** Required content " ++
actionItemDesc ai ++
decodeBS' (actionItemDesc ai) ++
" is missing from these repositories:\n" ++
missingrequired
return False
verifyRequiredContent _ _ = return True
{- Verifies the associated file records. -}
verifyAssociatedFiles :: Key -> KeyStatus -> FilePath -> Annex Bool
verifyAssociatedFiles :: Key -> KeyStatus -> RawFilePath -> Annex Bool
verifyAssociatedFiles key keystatus file = do
when (isKeyUnlockedThin keystatus) $ do
f <- inRepo $ toTopFilePath file
f <- inRepo $ toTopFilePath $ fromRawFilePath file
afs <- Database.Keys.getAssociatedFiles key
unless (getTopFilePath f `elem` map getTopFilePath afs) $
Database.Keys.addAssociatedFile key f
return True
verifyWorkTree :: Key -> FilePath -> Annex Bool
verifyWorkTree :: Key -> RawFilePath -> Annex Bool
verifyWorkTree key file = do
{- Make sure that a pointer file is replaced with its content,
- when the content is available. -}
@ -326,8 +327,8 @@ verifyWorkTree key file = do
case mk of
Just k | k == key -> whenM (inAnnex key) $ do
showNote "fixing worktree content"
replaceFile file $ \tmp -> do
mode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus file
replaceFile (fromRawFilePath file) $ \tmp -> do
mode <- liftIO $ catchMaybeIO $ fileMode <$> R.getFileStatus file
ifM (annexThin <$> Annex.getGitConfig)
( void $ linkFromAnnex key tmp mode
, do
@ -335,7 +336,7 @@ verifyWorkTree key file = do
void $ checkedCopyFile key obj tmp mode
thawContent tmp
)
Database.Keys.storeInodeCaches key [file]
Database.Keys.storeInodeCaches key [fromRawFilePath file]
_ -> return ()
return True
@ -362,7 +363,7 @@ checkKeySizeRemote key remote ai localcopy =
checkKeySizeOr (badContentRemote remote localcopy) key localcopy ai
checkKeySizeOr :: (Key -> Annex String) -> Key -> FilePath -> ActionItem -> Annex Bool
checkKeySizeOr bad key file ai = case keySize key of
checkKeySizeOr bad key file ai = case fromKey keySize key of
Nothing -> return True
Just size -> do
size' <- liftIO $ getFileSize file
@ -375,7 +376,7 @@ checkKeySizeOr bad key file ai = case keySize key of
badsize a b = do
msg <- bad key
warning $ concat
[ actionItemDesc ai
[ decodeBS' (actionItemDesc ai)
, ": Bad file size ("
, compareSizes storageUnits True a b
, "); "
@ -393,11 +394,11 @@ checkKeyUpgrade backend key ai (AssociatedFile (Just file)) =
case Types.Backend.canUpgradeKey backend of
Just a | a key -> do
warning $ concat
[ actionItemDesc ai
[ decodeBS' (actionItemDesc ai)
, ": Can be upgraded to an improved key format. "
, "You can do so by running: git annex migrate --backend="
, decodeBS (formatKeyVariety (keyVariety key)) ++ " "
, file
, decodeBS (formatKeyVariety (fromKey keyVariety key)) ++ " "
, decodeBS' file
]
return True
_ -> return True
@ -448,7 +449,7 @@ checkBackendOr' bad backend key file ai postcheck =
unless ok $ do
msg <- bad key
warning $ concat
[ actionItemDesc ai
[ decodeBS' (actionItemDesc ai)
, ": Bad file content; "
, msg
]
@ -460,7 +461,7 @@ checkKeyNumCopies :: Key -> AssociatedFile -> NumCopies -> Annex Bool
checkKeyNumCopies key afile numcopies = do
let (desc, hasafile) = case afile of
AssociatedFile Nothing -> (serializeKey key, False)
AssociatedFile (Just af) -> (af, True)
AssociatedFile (Just af) -> (fromRawFilePath af, True)
locs <- loggedLocations key
(untrustedlocations, otherlocations) <- trustPartition UnTrusted locs
(deadlocations, safelocations) <- trustPartition DeadTrusted otherlocations
@ -680,7 +681,7 @@ getKeyFileStatus key file = do
s <- getKeyStatus key
case s of
KeyUnlockedThin -> catchDefaultIO KeyUnlockedThin $
ifM (isJust <$> isAnnexLink file)
ifM (isJust <$> isAnnexLink (toRawFilePath file))
( return KeyLockedThin
, return KeyUnlockedThin
)

View file

@ -5,6 +5,8 @@
- Licensed under the GNU AGPL version 3 or higher.
-}
{-# LANGUAGE OverloadedStrings #-}
module Command.FuzzTest where
import Command
@ -13,6 +15,7 @@ import qualified Git.Config
import Config
import Utility.ThreadScheduler
import Utility.DiskFree
import Git.Types (fromConfigKey)
import Data.Time.Clock
import System.Random (getStdRandom, random, randomR)
@ -32,25 +35,23 @@ start :: CommandStart
start = do
guardTest
logf <- fromRepo gitAnnexFuzzTestLogFile
showStart "fuzztest" logf
showStart "fuzztest" (toRawFilePath logf)
logh <- liftIO $ openFile logf WriteMode
void $ forever $ fuzz logh
stop
guardTest :: Annex ()
guardTest = unlessM (fromMaybe False . Git.Config.isTrue <$> getConfig key "") $
guardTest = unlessM (fromMaybe False . Git.Config.isTrue' <$> getConfig key mempty) $
giveup $ unlines
[ "Running fuzz tests *writes* to and *deletes* files in"
, "this repository, and pushes those changes to other"
, "repositories! This is a developer tool, not something"
, "to play with."
, ""
, "Refusing to run fuzz tests, since " ++ keyname ++ " is not set!"
, "Refusing to run fuzz tests, since " ++ fromConfigKey key ++ " is not set!"
]
where
key = annexConfig "eat-my-repository"
(ConfigKey keyname) = key
fuzz :: Handle -> Annex ()
fuzz logh = do

View file

@ -42,19 +42,19 @@ seek o = startConcurrency transferStages $ do
from <- maybe (pure Nothing) (Just <$$> getParsed) (getFrom o)
let go = whenAnnexed $ start o from
case batchOption o of
Batch fmt -> batchFilesMatching fmt go
Batch fmt -> batchFilesMatching fmt (go . toRawFilePath)
NoBatch -> withKeyOptions (keyOptions o) (autoMode o)
(commandAction . startKeys from)
(withFilesInGit (commandAction . go))
=<< workTreeItems (getFiles o)
start :: GetOptions -> Maybe Remote -> FilePath -> Key -> CommandStart
start :: GetOptions -> Maybe Remote -> RawFilePath -> Key -> CommandStart
start o from file key = start' expensivecheck from key afile ai
where
afile = AssociatedFile (Just file)
ai = mkActionItem (key, afile)
expensivecheck
| autoMode o = numCopiesCheck file key (<)
| autoMode o = numCopiesCheck (fromRawFilePath file) key (<)
<||> wantGet False (Just key) afile
| otherwise = return True

View file

@ -117,7 +117,7 @@ seek o@(RemoteImportOptions {}) = startConcurrency commandStages $ do
startLocal :: GetFileMatcher -> DuplicateMode -> (FilePath, FilePath) -> CommandStart
startLocal largematcher mode (srcfile, destfile) =
ifM (liftIO $ isRegularFile <$> getSymbolicLinkStatus srcfile)
( starting "import" (ActionItemWorkTreeFile destfile)
( starting "import" (ActionItemWorkTreeFile (toRawFilePath destfile))
pickaction
, stop
)
@ -202,7 +202,7 @@ startLocal largematcher mode (srcfile, destfile) =
>>= maybe
stop
(\addedk -> next $ Command.Add.cleanup addedk True)
, next $ Command.Add.addSmall destfile
, next $ Command.Add.addSmall $ toRawFilePath destfile
)
notoverwriting why = do
warning $ "not overwriting existing " ++ destfile ++ " " ++ why

View file

@ -67,7 +67,7 @@ seek o = do
getFeed :: ImportFeedOptions -> Cache -> URLString -> CommandSeek
getFeed opts cache url = do
showStart "importfeed" url
showStart' "importfeed" (Just url)
downloadFeed url >>= \case
Nothing -> showEndResult =<< feedProblem url
"downloading the feed failed"
@ -222,7 +222,7 @@ performDownload opts cache todownload = case location todownload of
case dest of
Nothing -> return True
Just f -> do
showStart "addurl" url
showStart' "addurl" (Just url)
ks <- getter f
if null ks
then do
@ -244,7 +244,7 @@ performDownload opts cache todownload = case location todownload of
- to be re-downloaded. -}
makeunique url n file = ifM alreadyexists
( ifM forced
( ifAnnexed f checksameurl tryanother
( ifAnnexed (toRawFilePath f) checksameurl tryanother
, tryanother
)
, return $ Just f

View file

@ -50,23 +50,23 @@ import qualified Command.Unused
type Stat = StatState (Maybe (String, StatState String))
-- data about a set of keys
data KeyData = KeyData
data KeyInfo = KeyInfo
{ countKeys :: Integer
, sizeKeys :: Integer
, unknownSizeKeys :: Integer
, backendsKeys :: M.Map KeyVariety Integer
}
instance Sem.Semigroup KeyData where
a <> b = KeyData
instance Sem.Semigroup KeyInfo where
a <> b = KeyInfo
{ countKeys = countKeys a + countKeys b
, sizeKeys = sizeKeys a + sizeKeys b
, unknownSizeKeys = unknownSizeKeys a + unknownSizeKeys b
, backendsKeys = backendsKeys a <> backendsKeys b
}
instance Monoid KeyData where
mempty = KeyData 0 0 0 M.empty
instance Monoid KeyInfo where
mempty = KeyInfo 0 0 0 M.empty
data NumCopiesStats = NumCopiesStats
{ numCopiesVarianceMap :: M.Map Variance Integer
@ -82,9 +82,9 @@ instance Show Variance where
-- cached info that multiple Stats use
data StatInfo = StatInfo
{ presentData :: Maybe KeyData
, referencedData :: Maybe KeyData
, repoData :: M.Map UUID KeyData
{ presentData :: Maybe KeyInfo
, referencedData :: Maybe KeyInfo
, repoData :: M.Map UUID KeyInfo
, numCopiesStats :: Maybe NumCopiesStats
, infoOptions :: InfoOptions
}
@ -152,7 +152,7 @@ itemInfo o p = ifM (isdir p)
v' <- Remote.nameToUUID' p
case v' of
Right u -> uuidInfo o u
Left _ -> ifAnnexed p
Left _ -> ifAnnexed (toRawFilePath p)
(fileInfo o p)
(treeishInfo o p)
)
@ -161,7 +161,7 @@ itemInfo o p = ifM (isdir p)
noInfo :: String -> Annex ()
noInfo s = do
showStart "info" s
showStart "info" (encodeBS' s)
showNote $ "not a directory or an annexed file or a treeish or a remote or a uuid"
showEndFail
@ -311,8 +311,8 @@ showStat :: Stat -> StatState ()
showStat s = maybe noop calc =<< s
where
calc (desc, a) = do
(lift . showHeader) desc
lift . showRaw =<< a
(lift . showHeader . encodeBS') desc
lift . showRaw . encodeBS' =<< a
repo_list :: TrustLevel -> Stat
repo_list level = stat n $ nojson $ lift $ do
@ -435,7 +435,7 @@ transfer_list = stat desc $ nojson $ lift $ do
desc = "transfers in progress"
line uuidmap t i = unwords
[ formatDirection (transferDirection t) ++ "ing"
, actionItemDesc $ mkActionItem
, fromRawFilePath $ actionItemDesc $ mkActionItem
(transferKey t, associatedFile i)
, if transferDirection t == Upload then "to" else "from"
, maybe (fromUUID $ transferUUID t) Remote.name $
@ -444,7 +444,7 @@ transfer_list = stat desc $ nojson $ lift $ do
jsonify t i = object $ map (\(k, v) -> (packString k, v)) $
[ ("transfer", toJSON' (formatDirection (transferDirection t)))
, ("key", toJSON' (transferKey t))
, ("file", toJSON' afile)
, ("file", toJSON' (fromRawFilePath <$> afile))
, ("remote", toJSON' (fromUUID (transferUUID t) :: String))
]
where
@ -512,7 +512,7 @@ reposizes_total :: Stat
reposizes_total = simpleStat "combined size of repositories containing these files" $
showSizeKeys . mconcat . M.elems =<< cachedRepoData
cachedPresentData :: StatState KeyData
cachedPresentData :: StatState KeyInfo
cachedPresentData = do
s <- get
case presentData s of
@ -522,7 +522,7 @@ cachedPresentData = do
put s { presentData = Just v }
return v
cachedRemoteData :: UUID -> StatState KeyData
cachedRemoteData :: UUID -> StatState KeyInfo
cachedRemoteData u = do
s <- get
case M.lookup u (repoData s) of
@ -531,19 +531,19 @@ cachedRemoteData u = do
let combinedata d uk = finishCheck uk >>= \case
Nothing -> return d
Just k -> return $ addKey k d
v <- lift $ foldM combinedata emptyKeyData
v <- lift $ foldM combinedata emptyKeyInfo
=<< loggedKeysFor' u
put s { repoData = M.insert u v (repoData s) }
return v
cachedReferencedData :: StatState KeyData
cachedReferencedData :: StatState KeyInfo
cachedReferencedData = do
s <- get
case referencedData s of
Just v -> return v
Nothing -> do
!v <- lift $ Command.Unused.withKeysReferenced
emptyKeyData addKey
emptyKeyInfo addKey
put s { referencedData = Just v }
return v
@ -552,7 +552,7 @@ cachedNumCopiesStats :: StatState (Maybe NumCopiesStats)
cachedNumCopiesStats = numCopiesStats <$> get
-- currently only available for directory info
cachedRepoData :: StatState (M.Map UUID KeyData)
cachedRepoData :: StatState (M.Map UUID KeyInfo)
cachedRepoData = repoData <$> get
getDirStatInfo :: InfoOptions -> FilePath -> Annex StatInfo
@ -564,9 +564,9 @@ getDirStatInfo o dir = do
(update matcher fast)
return $ StatInfo (Just presentdata) (Just referenceddata) repodata (Just numcopiesstats) o
where
initial = (emptyKeyData, emptyKeyData, emptyNumCopiesStats, M.empty)
initial = (emptyKeyInfo, emptyKeyInfo, emptyNumCopiesStats, M.empty)
update matcher fast key file vs@(presentdata, referenceddata, numcopiesstats, repodata) =
ifM (matcher $ MatchingFile $ FileInfo file file)
ifM (matcher $ MatchingFile $ FileInfo file' file')
( do
!presentdata' <- ifM (inAnnex key)
( return $ addKey key presentdata
@ -577,11 +577,13 @@ getDirStatInfo o dir = do
then return (numcopiesstats, repodata)
else do
locs <- Remote.keyLocations key
nc <- updateNumCopiesStats file numcopiesstats locs
nc <- updateNumCopiesStats file' numcopiesstats locs
return (nc, updateRepoData key locs repodata)
return $! (presentdata', referenceddata', numcopiesstats', repodata')
, return vs
)
where
file' = fromRawFilePath file
getTreeStatInfo :: InfoOptions -> Git.Ref -> Annex (Maybe StatInfo)
getTreeStatInfo o r = do
@ -594,7 +596,7 @@ getTreeStatInfo o r = do
, return Nothing
)
where
initial = (emptyKeyData, emptyKeyData, M.empty)
initial = (emptyKeyInfo, emptyKeyInfo, M.empty)
go _ [] vs = return vs
go fast (l:ls) vs@(presentdata, referenceddata, repodata) = do
mk <- catKey (LsTree.sha l)
@ -613,33 +615,33 @@ getTreeStatInfo o r = do
return (updateRepoData key locs repodata)
go fast ls $! (presentdata', referenceddata', repodata')
emptyKeyData :: KeyData
emptyKeyData = KeyData 0 0 0 M.empty
emptyKeyInfo :: KeyInfo
emptyKeyInfo = KeyInfo 0 0 0 M.empty
emptyNumCopiesStats :: NumCopiesStats
emptyNumCopiesStats = NumCopiesStats M.empty
foldKeys :: [Key] -> KeyData
foldKeys = foldl' (flip addKey) emptyKeyData
foldKeys :: [Key] -> KeyInfo
foldKeys = foldl' (flip addKey) emptyKeyInfo
addKey :: Key -> KeyData -> KeyData
addKey key (KeyData count size unknownsize backends) =
KeyData count' size' unknownsize' backends'
addKey :: Key -> KeyInfo -> KeyInfo
addKey key (KeyInfo count size unknownsize backends) =
KeyInfo count' size' unknownsize' backends'
where
{- All calculations strict to avoid thunks when repeatedly
- applied to many keys. -}
!count' = count + 1
!backends' = M.insertWith (+) (keyVariety key) 1 backends
!backends' = M.insertWith (+) (fromKey keyVariety key) 1 backends
!size' = maybe size (+ size) ks
!unknownsize' = maybe (unknownsize + 1) (const unknownsize) ks
ks = keySize key
ks = fromKey keySize key
updateRepoData :: Key -> [UUID] -> M.Map UUID KeyData -> M.Map UUID KeyData
updateRepoData :: Key -> [UUID] -> M.Map UUID KeyInfo -> M.Map UUID KeyInfo
updateRepoData key locs m = m'
where
!m' = M.unionWith (\_old new -> new) m $
M.fromList $ zip locs (map update locs)
update loc = addKey key (fromMaybe emptyKeyData $ M.lookup loc m)
update loc = addKey key (fromMaybe emptyKeyInfo $ M.lookup loc m)
updateNumCopiesStats :: FilePath -> NumCopiesStats -> [UUID] -> Annex NumCopiesStats
updateNumCopiesStats file (NumCopiesStats m) locs = do
@ -649,7 +651,7 @@ updateNumCopiesStats file (NumCopiesStats m) locs = do
let !ret = NumCopiesStats m'
return ret
showSizeKeys :: KeyData -> StatState String
showSizeKeys :: KeyInfo -> StatState String
showSizeKeys d = do
sizer <- mkSizer
return $ total sizer ++ missingnote

View file

@ -5,6 +5,8 @@
- Licensed under the GNU AGPL version 3 or higher.
-}
{-# LANGUAGE OverloadedStrings #-}
module Command.InitRemote where
import qualified Data.Map as M

View file

@ -42,7 +42,7 @@ seek o = do
(commandAction . (whenAnnexed (start s)))
=<< workTreeItems (inprogressFiles o)
start :: S.Set Key -> FilePath -> Key -> CommandStart
start :: S.Set Key -> RawFilePath -> Key -> CommandStart
start s _file k
| S.member k s = start' k
| otherwise = stop

View file

@ -72,7 +72,7 @@ getList o
printHeader :: [(UUID, RemoteName, TrustLevel)] -> Annex ()
printHeader l = liftIO $ putStrLn $ lheader $ map (\(_, n, t) -> (n, t)) l
start :: [(UUID, RemoteName, TrustLevel)] -> FilePath -> Key -> CommandStart
start :: [(UUID, RemoteName, TrustLevel)] -> RawFilePath -> Key -> CommandStart
start l file key = do
ls <- S.fromList <$> keyLocations key
liftIO $ putStrLn $ format (map (\(u, _, t) -> (t, S.member u ls)) l) file
@ -88,8 +88,8 @@ lheader remotes = unlines (zipWith formatheader [0..] remotes) ++ pipes (length
trust UnTrusted = " (untrusted)"
trust _ = ""
format :: [(TrustLevel, Present)] -> FilePath -> String
format remotes file = thereMap ++ " " ++ file
format :: [(TrustLevel, Present)] -> RawFilePath -> String
format remotes file = thereMap ++ " " ++ fromRawFilePath file
where
thereMap = concatMap there remotes
there (UnTrusted, True) = "x"

View file

@ -32,7 +32,7 @@ seek ps = do
l <- workTreeItems ps
withFilesInGit (commandAction . (whenAnnexed startNew)) l
startNew :: FilePath -> Key -> CommandStart
startNew :: RawFilePath -> Key -> CommandStart
startNew file key = ifM (isJust <$> isAnnexLink file)
( stop
, starting "lock" (mkActionItem (key, file)) $
@ -43,7 +43,7 @@ startNew file key = ifM (isJust <$> isAnnexLink file)
| key' == key = cont
| otherwise = errorModified
go Nothing =
ifM (isUnmodified key file)
ifM (isUnmodified key (fromRawFilePath file))
( cont
, ifM (Annex.getState Annex.force)
( cont
@ -52,11 +52,11 @@ startNew file key = ifM (isJust <$> isAnnexLink file)
)
cont = performNew file key
performNew :: FilePath -> Key -> CommandPerform
performNew :: RawFilePath -> Key -> CommandPerform
performNew file key = do
lockdown =<< calcRepo (gitAnnexLocation key)
addLink file key
=<< withTSDelta (liftIO . genInodeCache file)
addLink (fromRawFilePath file) key
=<< withTSDelta (liftIO . genInodeCache' file)
next $ cleanupNew file key
where
lockdown obj = do
@ -70,7 +70,7 @@ performNew file key = do
-- It's ok if the file is hard linked to obj, but if some other
-- associated file is, we need to break that link to lock down obj.
breakhardlink obj = whenM (catchBoolIO $ (> 1) . linkCount <$> liftIO (getFileStatus obj)) $ do
mfc <- withTSDelta (liftIO . genInodeCache file)
mfc <- withTSDelta (liftIO . genInodeCache' file)
unlessM (sameInodeCache obj (maybeToList mfc)) $ do
modifyContent obj $ replaceFile obj $ \tmp -> do
unlessM (checkedCopyFile key obj tmp Nothing) $
@ -92,21 +92,21 @@ performNew file key = do
lostcontent = logStatus key InfoMissing
cleanupNew :: FilePath -> Key -> CommandCleanup
cleanupNew :: RawFilePath -> Key -> CommandCleanup
cleanupNew file key = do
Database.Keys.removeAssociatedFile key =<< inRepo (toTopFilePath file)
Database.Keys.removeAssociatedFile key =<< inRepo (toTopFilePath (fromRawFilePath file))
return True
startOld :: FilePath -> CommandStart
startOld :: RawFilePath -> CommandStart
startOld file = do
unlessM (Annex.getState Annex.force)
errorModified
starting "lock" (ActionItemWorkTreeFile file) $
performOld file
performOld :: FilePath -> CommandPerform
performOld :: RawFilePath -> CommandPerform
performOld file = do
Annex.Queue.addCommand "checkout" [Param "--"] [file]
Annex.Queue.addCommand "checkout" [Param "--"] [fromRawFilePath file]
next $ return True
errorModified :: a

View file

@ -92,10 +92,10 @@ seek o = do
([], True) -> commandAction (startAll o outputter)
(_, True) -> giveup "Cannot specify both files and --all"
start :: LogOptions -> (FilePath -> Outputter) -> FilePath -> Key -> CommandStart
start :: LogOptions -> (FilePath -> Outputter) -> RawFilePath -> Key -> CommandStart
start o outputter file key = do
(changes, cleanup) <- getKeyLog key (passthruOptions o)
showLogIncremental (outputter file) changes
showLogIncremental (outputter (fromRawFilePath file)) changes
void $ liftIO cleanup
stop
@ -201,7 +201,7 @@ getKeyLog key os = do
top <- fromRepo Git.repoPath
p <- liftIO $ relPathCwdToFile top
config <- Annex.getGitConfig
let logfile = p </> locationLogFile config key
let logfile = p </> fromRawFilePath (locationLogFile config key)
getGitLog [logfile] (Param "--remove-empty" : os)
{- Streams the git log for all git-annex branch changes. -}
@ -220,7 +220,7 @@ getGitLog fs os = do
[ Param $ Git.fromRef Annex.Branch.fullname
, Param "--"
] ++ map Param fs
return (parseGitRawLog ls, cleanup)
return (parseGitRawLog (map decodeBL' ls), cleanup)
-- Parses chunked git log --raw output, which looks something like:
--
@ -250,7 +250,7 @@ parseGitRawLog = parse epoch
(tss, cl') -> (parseTimeStamp tss, cl')
mrc = do
(old, new) <- parseRawChangeLine cl
key <- locationLogFileKey c2
key <- locationLogFileKey (toRawFilePath c2)
return $ RefChange
{ changetime = ts
, oldref = old

View file

@ -29,11 +29,12 @@ run _ file = seekSingleGitFile file >>= \case
-- To support absolute filenames, pass through git ls-files.
-- But, this plumbing command does not recurse through directories.
seekSingleGitFile :: FilePath -> Annex (Maybe FilePath)
seekSingleGitFile :: FilePath -> Annex (Maybe RawFilePath)
seekSingleGitFile file = do
(l, cleanup) <- inRepo (Git.LsFiles.inRepo [file])
(l, cleanup) <- inRepo (Git.LsFiles.inRepo [toRawFilePath file])
r <- case l of
(f:[]) | takeFileName f == takeFileName file -> return (Just f)
(f:[]) | takeFileName (fromRawFilePath f) == takeFileName file ->
return (Just f)
_ -> return Nothing
void $ liftIO cleanup
return r

View file

@ -67,7 +67,7 @@ optParser desc = MatchExpressionOptions
missingdata datadesc = bail $ "cannot match this expression without " ++ datadesc ++ " data"
-- When a key is provided, make its size also be provided.
addkeysize p = case providedKey p of
Right k -> case keySize k of
Right k -> case fromKey keySize k of
Just sz -> p { providedFileSize = Right sz }
Nothing -> p
Left _ -> p

View file

@ -92,7 +92,7 @@ seek o = case batchOption o of
)
_ -> giveup "--batch is currently only supported in --json mode"
start :: VectorClock -> MetaDataOptions -> FilePath -> Key -> CommandStart
start :: VectorClock -> MetaDataOptions -> RawFilePath -> Key -> CommandStart
start c o file k = startKeys c o (k, mkActionItem (k, afile))
where
afile = AssociatedFile (Just file)
@ -147,7 +147,7 @@ instance FromJSON MetaDataFields where
fieldsField :: T.Text
fieldsField = T.pack "fields"
parseJSONInput :: String -> Either String (Either FilePath Key, MetaData)
parseJSONInput :: String -> Either String (Either RawFilePath Key, MetaData)
parseJSONInput i = do
v <- eitherDecode (BU.fromString i)
let m = case itemAdded v of
@ -155,16 +155,16 @@ parseJSONInput i = do
Just (MetaDataFields m') -> m'
case (itemKey v, itemFile v) of
(Just k, _) -> Right (Right k, m)
(Nothing, Just f) -> Right (Left f, m)
(Nothing, Just f) -> Right (Left (toRawFilePath f), m)
(Nothing, Nothing) -> Left "JSON input is missing either file or key"
startBatch :: (Either FilePath Key, MetaData) -> CommandStart
startBatch :: (Either RawFilePath Key, MetaData) -> CommandStart
startBatch (i, (MetaData m)) = case i of
Left f -> do
mk <- lookupFile f
case mk of
Just k -> go k (mkActionItem (k, AssociatedFile (Just f)))
Nothing -> giveup $ "not an annexed file: " ++ f
Nothing -> giveup $ "not an annexed file: " ++ fromRawFilePath f
Right k -> go k (mkActionItem k)
where
go k ai = starting "metadata" ai $ do

View file

@ -28,16 +28,16 @@ cmd = withGlobalOptions [annexedMatchingOptions] $
seek :: CmdParams -> CommandSeek
seek = withFilesInGit (commandAction . (whenAnnexed start)) <=< workTreeItems
start :: FilePath -> Key -> CommandStart
start :: RawFilePath -> Key -> CommandStart
start file key = do
forced <- Annex.getState Annex.force
v <- Backend.getBackend file key
v <- Backend.getBackend (fromRawFilePath file) key
case v of
Nothing -> stop
Just oldbackend -> do
exists <- inAnnex key
newbackend <- maybe defaultBackend return
=<< chooseBackend file
=<< chooseBackend (fromRawFilePath file)
if (newbackend /= oldbackend || upgradableKey oldbackend key || forced) && exists
then starting "migrate" (mkActionItem (key, file)) $
perform file key oldbackend newbackend
@ -50,7 +50,7 @@ start file key = do
- - Something has changed in the backend, such as a bug fix.
-}
upgradableKey :: Backend -> Key -> Bool
upgradableKey backend key = isNothing (keySize key) || backendupgradable
upgradableKey backend key = isNothing (fromKey keySize key) || backendupgradable
where
backendupgradable = maybe False (\a -> a key) (canUpgradeKey backend)
@ -63,7 +63,7 @@ upgradableKey backend key = isNothing (keySize key) || backendupgradable
- data cannot get corrupted after the fsck but before the new key is
- generated.
-}
perform :: FilePath -> Key -> Backend -> Backend -> CommandPerform
perform :: RawFilePath -> Key -> Backend -> Backend -> CommandPerform
perform file oldkey oldbackend newbackend = go =<< genkey (fastMigrate oldbackend)
where
go Nothing = stop
@ -85,7 +85,7 @@ perform file oldkey oldbackend newbackend = go =<< genkey (fastMigrate oldbacken
genkey Nothing = do
content <- calcRepo $ gitAnnexLocation oldkey
let source = KeySource
{ keyFilename = file
{ keyFilename = fromRawFilePath file
, contentLocation = content
, inodeCache = Nothing
}

View file

@ -47,7 +47,7 @@ seek o = startConcurrency transferStages $
(withFilesInGit (commandAction . (whenAnnexed $ start o)))
=<< workTreeItems (mirrorFiles o)
start :: MirrorOptions -> FilePath -> Key -> CommandStart
start :: MirrorOptions -> RawFilePath -> Key -> CommandStart
start o file k = startKey o afile (k, ai)
where
afile = AssociatedFile (Just file)
@ -75,4 +75,4 @@ startKey o afile (key, ai) = case fromToOptions o of
where
getnumcopies = case afile of
AssociatedFile Nothing -> getNumCopies
AssociatedFile (Just af) -> getFileNumCopies af
AssociatedFile (Just af) -> getFileNumCopies (fromRawFilePath af)

View file

@ -57,13 +57,13 @@ seek :: MoveOptions -> CommandSeek
seek o = startConcurrency transferStages $ do
let go = whenAnnexed $ start (fromToOptions o) (removeWhen o)
case batchOption o of
Batch fmt -> batchFilesMatching fmt go
Batch fmt -> batchFilesMatching fmt (go . toRawFilePath)
NoBatch -> withKeyOptions (keyOptions o) False
(commandAction . startKey (fromToOptions o) (removeWhen o))
(withFilesInGit (commandAction . go))
=<< workTreeItems (moveFiles o)
start :: FromToHereOptions -> RemoveWhen -> FilePath -> Key -> CommandStart
start :: FromToHereOptions -> RemoveWhen -> RawFilePath -> Key -> CommandStart
start fromto removewhen f k = start' fromto removewhen afile k ai
where
afile = AssociatedFile (Just f)

View file

@ -137,7 +137,7 @@ send ups fs = do
mk <- lookupFile f
case mk of
Nothing -> noop
Just k -> withObjectLoc k (addlist f)
Just k -> withObjectLoc k (addlist (fromRawFilePath f))
liftIO $ hClose h
serverkey <- uftpKey

View file

@ -5,6 +5,8 @@
- Licensed under the GNU AGPL version 3 or higher.
-}
{-# LANGUAGE OverloadedStrings #-}
module Command.P2P where
import Command

View file

@ -53,11 +53,11 @@ seek ps = lockPreCommitHook $ do
(removeViewMetaData v)
addViewMetaData :: View -> ViewedFile -> Key -> CommandStart
addViewMetaData v f k = starting "metadata" (mkActionItem (k, f)) $
addViewMetaData v f k = starting "metadata" (mkActionItem (k, toRawFilePath f)) $
next $ changeMetaData k $ fromView v f
removeViewMetaData :: View -> ViewedFile -> Key -> CommandStart
removeViewMetaData v f k = starting "metadata" (mkActionItem (k, f)) $
removeViewMetaData v f k = starting "metadata" (mkActionItem (k, toRawFilePath f)) $
next $ changeMetaData k $ unsetMetaData $ fromView v f
changeMetaData :: Key -> MetaData -> CommandCleanup

View file

@ -19,6 +19,7 @@ import Git.FilePath
import qualified Database.Keys
import Annex.InodeSentinal
import Utility.InodeCache
import qualified Utility.RawFilePath as R
cmd :: Command
cmd = command "rekey" SectionPlumbing
@ -38,13 +39,13 @@ optParser desc = ReKeyOptions
-- Split on the last space, since a FilePath can contain whitespace,
-- but a Key very rarely does.
batchParser :: String -> Either String (FilePath, Key)
batchParser :: String -> Either String (RawFilePath, Key)
batchParser s = case separate (== ' ') (reverse s) of
(rk, rf)
| null rk || null rf -> Left "Expected: \"file key\""
| otherwise -> case deserializeKey (reverse rk) of
Nothing -> Left "bad key"
Just k -> Right (reverse rf, k)
Just k -> Right (toRawFilePath (reverse rf), k)
seek :: ReKeyOptions -> CommandSeek
seek o = case batchOption o of
@ -52,9 +53,9 @@ seek o = case batchOption o of
NoBatch -> withPairs (commandAction . start . parsekey) (reKeyThese o)
where
parsekey (file, skey) =
(file, fromMaybe (giveup "bad key") (deserializeKey skey))
(toRawFilePath file, fromMaybe (giveup "bad key") (deserializeKey skey))
start :: (FilePath, Key) -> CommandStart
start :: (RawFilePath, Key) -> CommandStart
start (file, newkey) = ifAnnexed file go stop
where
go oldkey
@ -62,19 +63,19 @@ start (file, newkey) = ifAnnexed file go stop
| otherwise = starting "rekey" (ActionItemWorkTreeFile file) $
perform file oldkey newkey
perform :: FilePath -> Key -> Key -> CommandPerform
perform :: RawFilePath -> Key -> Key -> CommandPerform
perform file oldkey newkey = do
ifM (inAnnex oldkey)
( unlessM (linkKey file oldkey newkey) $
giveup "failed creating link from old to new key"
, unlessM (Annex.getState Annex.force) $
giveup $ file ++ " is not available (use --force to override)"
giveup $ fromRawFilePath file ++ " is not available (use --force to override)"
)
next $ cleanup file oldkey newkey
{- Make a hard link to the old key content (when supported),
- to avoid wasting disk space. -}
linkKey :: FilePath -> Key -> Key -> Annex Bool
linkKey :: RawFilePath -> Key -> Key -> Annex Bool
linkKey file oldkey newkey = ifM (isJust <$> isAnnexLink file)
{- If the object file is already hardlinked to elsewhere, a hard
- link won't be made by getViaTmpFromDisk, but a copy instead.
@ -89,40 +90,40 @@ linkKey file oldkey newkey = ifM (isJust <$> isAnnexLink file)
- it's hard linked to the old key, that link must be broken. -}
oldobj <- calcRepo (gitAnnexLocation oldkey)
v <- tryNonAsync $ do
st <- liftIO $ getFileStatus file
st <- liftIO $ R.getFileStatus file
when (linkCount st > 1) $ do
freezeContent oldobj
replaceFile file $ \tmp -> do
replaceFile (fromRawFilePath file) $ \tmp -> do
unlessM (checkedCopyFile oldkey oldobj tmp Nothing) $
error "can't lock old key"
thawContent tmp
ic <- withTSDelta (liftIO . genInodeCache file)
ic <- withTSDelta (liftIO . genInodeCache' file)
case v of
Left e -> do
warning (show e)
return False
Right () -> do
r <- linkToAnnex newkey file ic
r <- linkToAnnex newkey (fromRawFilePath file) ic
return $ case r of
LinkAnnexFailed -> False
LinkAnnexOk -> True
LinkAnnexNoop -> True
)
cleanup :: FilePath -> Key -> Key -> CommandCleanup
cleanup :: RawFilePath -> Key -> Key -> CommandCleanup
cleanup file oldkey newkey = do
ifM (isJust <$> isAnnexLink file)
( do
-- Update symlink to use the new key.
liftIO $ removeFile file
addLink file newkey Nothing
liftIO $ removeFile (fromRawFilePath file)
addLink (fromRawFilePath file) newkey Nothing
, do
mode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus file
mode <- liftIO $ catchMaybeIO $ fileMode <$> R.getFileStatus file
liftIO $ whenM (isJust <$> isPointerFile file) $
writePointerFile file newkey mode
stagePointerFile file mode =<< hashPointerFile newkey
Database.Keys.removeAssociatedFile oldkey
=<< inRepo (toTopFilePath file)
=<< inRepo (toTopFilePath (fromRawFilePath file))
)
whenM (inAnnex newkey) $
logStatus newkey InfoPresent

View file

@ -11,7 +11,7 @@ module Command.RegisterUrl where
import Command
import Logs.Web
import Command.FromKey (mkKey)
import Command.FromKey (keyOpt)
import qualified Remote
cmd :: Command
@ -41,7 +41,7 @@ seek o = case (batchOption o, keyUrlPairs o) of
start :: [String] -> CommandStart
start (keyname:url:[]) =
starting "registerurl" (ActionItemOther (Just url)) $ do
let key = mkKey keyname
let key = keyOpt keyname
perform key url
start _ = giveup "specify a key and an url"
@ -55,7 +55,7 @@ massAdd fmt = go True =<< map (separate (== ' ')) <$> batchLines fmt
where
go status [] = next $ return status
go status ((keyname,u):rest) | not (null keyname) && not (null u) = do
let key = mkKey keyname
let key = keyOpt keyname
ok <- perform' key u
let !status' = status && ok
go status' rest

View file

@ -42,7 +42,7 @@ seek os
startSrcDest :: [FilePath] -> CommandStart
startSrcDest (src:dest:[])
| src == dest = stop
| otherwise = notAnnexed src $ ifAnnexed dest go stop
| otherwise = notAnnexed src $ ifAnnexed (toRawFilePath dest) go stop
where
go key = starting "reinject" (ActionItemOther (Just src)) $
ifM (verifyKeyContent RetrievalAllKeysSecure DefaultVerify UnVerified key src)
@ -65,7 +65,7 @@ startKnown src = notAnnexed src $
)
notAnnexed :: FilePath -> CommandStart -> CommandStart
notAnnexed src = ifAnnexed src $
notAnnexed src = ifAnnexed (toRawFilePath src) $
giveup $ "cannot used annexed file as src: " ++ src
perform :: FilePath -> Key -> CommandPerform

View file

@ -42,9 +42,11 @@ batchParser s = case separate (== ' ') (reverse s) of
| otherwise -> Right (reverse rf, reverse ru)
start :: (FilePath, URLString) -> CommandStart
start (file, url) = flip whenAnnexed file $ \_ key ->
starting "rmurl" (mkActionItem (key, AssociatedFile (Just file))) $
start (file, url) = flip whenAnnexed file' $ \_ key ->
starting "rmurl" (mkActionItem (key, AssociatedFile (Just file'))) $
next $ cleanup url key
where
file' = toRawFilePath file
cleanup :: String -> Key -> CommandCleanup
cleanup url key = do

View file

@ -46,10 +46,11 @@ start key = do
fieldTransfer :: Direction -> Key -> (MeterUpdate -> Annex Bool) -> CommandStart
fieldTransfer direction key a = do
liftIO $ debugM "fieldTransfer" "transfer start"
afile <- AssociatedFile <$> Fields.getField Fields.associatedFile
afile <- AssociatedFile . (fmap toRawFilePath)
<$> Fields.getField Fields.associatedFile
ok <- maybe (a $ const noop)
-- Using noRetry here because we're the sender.
(\u -> runner (Transfer direction (toUUID u) key) afile noRetry a)
(\u -> runner (Transfer direction (toUUID u) (fromKey id key)) afile noRetry a)
=<< Fields.getField Fields.remoteUUID
liftIO $ debugM "fieldTransfer" "transfer done"
liftIO $ exitBool ok

View file

@ -21,11 +21,11 @@ seek = withWords (commandAction . start)
start :: [String] -> CommandStart
start (keyname:file:[]) = starting "setkey" (ActionItemOther (Just file)) $
perform file (mkKey keyname)
perform file (keyOpt keyname)
start _ = giveup "specify a key and a content file"
mkKey :: String -> Key
mkKey = fromMaybe (giveup "bad key") . deserializeKey
keyOpt :: String -> Key
keyOpt = fromMaybe (giveup "bad key") . deserializeKey
perform :: FilePath -> Key -> CommandPerform
perform file key = do

View file

@ -86,9 +86,9 @@ clean file = do
( liftIO $ L.hPut stdout b
, case parseLinkTargetOrPointerLazy b of
Just k -> do
getMoveRaceRecovery k file
getMoveRaceRecovery k (toRawFilePath file)
liftIO $ L.hPut stdout b
Nothing -> go b =<< catKeyFile file
Nothing -> go b =<< catKeyFile (toRawFilePath file)
)
stop
where
@ -119,7 +119,7 @@ clean file = do
-- Look up the backend that was used for this file
-- before, so that when git re-cleans a file its
-- backend does not change.
let oldbackend = maybe Nothing (maybeLookupBackendVariety . keyVariety) oldkey
let oldbackend = maybe Nothing (maybeLookupBackendVariety . fromKey keyVariety) oldkey
-- Can't restage associated files because git add
-- runs this and has the index locked.
let norestage = Restage False
@ -187,10 +187,10 @@ emitPointer = S.putStr . formatPointer
-- This also handles the case where a copy of a pointer file is made,
-- then git-annex gets the content, and later git add is run on
-- the pointer copy. It will then be populated with the content.
getMoveRaceRecovery :: Key -> FilePath -> Annex ()
getMoveRaceRecovery :: Key -> RawFilePath -> Annex ()
getMoveRaceRecovery k file = void $ tryNonAsync $
whenM (inAnnex k) $ do
obj <- calcRepo (gitAnnexLocation k)
obj <- toRawFilePath <$> calcRepo (gitAnnexLocation k)
-- Cannot restage because git add is running and has
-- the index locked.
populatePointerFile (Restage False) k obj file >>= \case
@ -204,11 +204,11 @@ update = do
updateSmudged :: Restage -> Annex ()
updateSmudged restage = streamSmudged $ \k topf -> do
f <- fromRepo $ fromTopFilePath topf
f <- toRawFilePath <$> fromRepo (fromTopFilePath topf)
whenM (inAnnex k) $ do
obj <- calcRepo (gitAnnexLocation k)
obj <- toRawFilePath <$> calcRepo (gitAnnexLocation k)
unlessM (isJust <$> populatePointerFile restage k obj f) $
liftIO (isPointerFile f) >>= \case
Just k' | k' == k -> toplevelWarning False $
"unable to populate worktree file " ++ f
"unable to populate worktree file " ++ fromRawFilePath f
_ -> noop

View file

@ -7,6 +7,7 @@
-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
module Command.Sync (
cmd,

View file

@ -107,14 +107,14 @@ perform rs unavailrs exportr ks = do
next $ cleanup rs ks ok
where
desc r' k = intercalate "; " $ map unwords
[ [ "key size", show (keySize k) ]
[ [ "key size", show (fromKey keySize k) ]
, [ show (getChunkConfig (Remote.config r')) ]
, ["encryption", fromMaybe "none" (M.lookup "encryption" (Remote.config r'))]
]
descexport k1 k2 = intercalate "; " $ map unwords
[ [ "exporttree=yes" ]
, [ "key1 size", show (keySize k1) ]
, [ "key2 size", show (keySize k2) ]
, [ "key1 size", show (fromKey keySize k1) ]
, [ "key2 size", show (fromKey keySize k2) ]
]
adjustChunkSize :: Remote -> Int -> Annex (Maybe Remote)
@ -199,7 +199,7 @@ test st r k = catMaybes
Annex.eval st (Annex.setOutput QuietOutput >> a) @? "failed"
present b = check ("present " ++ show b) $
(== Right b) <$> Remote.hasKey r k
fsck = case maybeLookupBackendVariety (keyVariety k) of
fsck = case maybeLookupBackendVariety (fromKey keyVariety k) of
Nothing -> return True
Just b -> case Backend.verifyKeyContent b of
Nothing -> return True
@ -236,7 +236,7 @@ testExportTree st (Just _) ea k1 k2 =
]
where
testexportdirectory = "testremote-export"
testexportlocation = mkExportLocation (testexportdirectory </> "location")
testexportlocation = mkExportLocation (toRawFilePath (testexportdirectory </> "location"))
check desc a = testCase desc $
Annex.eval st (Annex.setOutput QuietOutput >> a) @? "failed"
storeexport k = do
@ -252,7 +252,7 @@ testExportTree st (Just _) ea k1 k2 =
removeexport k = Remote.removeExport ea k testexportlocation
removeexportdirectory = case Remote.removeExportDirectory ea of
Nothing -> return True
Just a -> a (mkExportDirectory testexportdirectory)
Just a -> a (mkExportDirectory (toRawFilePath testexportdirectory))
testUnavailable :: Annex.AnnexState -> Remote -> Key -> [TestTree]
testUnavailable st r k =
@ -326,7 +326,7 @@ randKey sz = withTmpFile "randkey" $ \f h -> do
return k
getReadonlyKey :: Remote -> FilePath -> Annex Key
getReadonlyKey r f = lookupFile f >>= \case
getReadonlyKey r f = lookupFile (toRawFilePath f) >>= \case
Nothing -> giveup $ f ++ " is not an annexed file"
Just k -> do
unlessM (inAnnex k) $

Some files were not shown because too many files have changed in this diff Show more