Merge branch 'bs' into sqlite-bs
This commit is contained in:
commit
d5628a16b8
137 changed files with 827 additions and 516 deletions
|
@ -112,8 +112,8 @@ adjustToSymlink = adjustToSymlink' gitAnnexLink
|
|||
adjustToSymlink' :: (FilePath -> Key -> Git.Repo -> GitConfig -> IO FilePath) -> TreeItem -> Annex (Maybe TreeItem)
|
||||
adjustToSymlink' gitannexlink ti@(TreeItem f _m s) = catKey s >>= \case
|
||||
Just k -> do
|
||||
absf <- inRepo $ \r -> absPath $
|
||||
fromTopFilePath f r
|
||||
absf <- inRepo $ \r -> absPath $
|
||||
fromRawFilePath $ fromTopFilePath f r
|
||||
linktarget <- calcRepo $ gitannexlink absf k
|
||||
Just . TreeItem f (fromTreeItemType TreeSymlink)
|
||||
<$> hashSymlink linktarget
|
||||
|
@ -376,7 +376,7 @@ mergeToAdjustedBranch tomerge (origbranch, adj) mergeconfig canresolvemerge comm
|
|||
-}
|
||||
changestomerge (Just updatedorig) = withOtherTmp $ \othertmpdir -> do
|
||||
tmpwt <- fromRepo gitAnnexMergeDir
|
||||
git_dir <- fromRepo Git.localGitDir
|
||||
git_dir <- fromRawFilePath <$> fromRepo Git.localGitDir
|
||||
withTmpDirIn othertmpdir "git" $ \tmpgit -> withWorkTreeRelated tmpgit $
|
||||
withemptydir tmpwt $ withWorkTree tmpwt $ do
|
||||
liftIO $ writeFile (tmpgit </> "HEAD") (fromRef updatedorig)
|
||||
|
@ -580,7 +580,7 @@ reverseAdjustedTree basis adj csha = do
|
|||
where
|
||||
m = M.fromList $ map (\i@(TreeItem f' _ _) -> (norm f', i)) $
|
||||
map diffTreeToTreeItem changes
|
||||
norm = normalise . getTopFilePath
|
||||
norm = normalise . fromRawFilePath . getTopFilePath
|
||||
|
||||
diffTreeToTreeItem :: Git.DiffTree.DiffTreeItem -> TreeItem
|
||||
diffTreeToTreeItem dti = TreeItem
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Annex.AutoMerge
|
||||
( autoMergeFrom
|
||||
, resolveMerge
|
||||
|
@ -104,7 +106,7 @@ autoMergeFrom branch currbranch mergeconfig canresolvemerge commitmode = do
|
|||
-}
|
||||
resolveMerge :: Maybe Git.Ref -> Git.Ref -> Bool -> Annex Bool
|
||||
resolveMerge us them inoverlay = do
|
||||
top <- toRawFilePath <$> if inoverlay
|
||||
top <- if inoverlay
|
||||
then pure "."
|
||||
else fromRepo Git.repoPath
|
||||
(fs, cleanup) <- inRepo (LsFiles.unmerged [top])
|
||||
|
@ -196,7 +198,7 @@ resolveMerge' unstagedmap (Just us) them inoverlay u = do
|
|||
|
||||
stagefile :: FilePath -> Annex FilePath
|
||||
stagefile f
|
||||
| inoverlay = (</> f) <$> fromRepo Git.repoPath
|
||||
| inoverlay = (</> f) . fromRawFilePath <$> fromRepo Git.repoPath
|
||||
| otherwise = pure f
|
||||
|
||||
makesymlink key dest = do
|
||||
|
@ -219,7 +221,7 @@ resolveMerge' unstagedmap (Just us) them inoverlay u = do
|
|||
stagePointerFile dest' destmode =<< hashPointerFile key
|
||||
unless inoverlay $
|
||||
Database.Keys.addAssociatedFile key
|
||||
=<< inRepo (toTopFilePath dest)
|
||||
=<< inRepo (toTopFilePath (toRawFilePath dest))
|
||||
|
||||
withworktree f a = a f
|
||||
|
||||
|
@ -332,10 +334,9 @@ inodeMap :: Annex ([RawFilePath], IO Bool) -> Annex InodeMap
|
|||
inodeMap getfiles = do
|
||||
(fs, cleanup) <- getfiles
|
||||
fsis <- forM fs $ \f -> do
|
||||
let f' = fromRawFilePath f
|
||||
mi <- withTSDelta (liftIO . genInodeCache f')
|
||||
mi <- withTSDelta (liftIO . genInodeCache f)
|
||||
return $ case mi of
|
||||
Nothing -> Nothing
|
||||
Just i -> Just (inodeCacheToKey Strongly i, f')
|
||||
Just i -> Just (inodeCacheToKey Strongly i, fromRawFilePath f)
|
||||
void $ liftIO cleanup
|
||||
return $ M.fromList $ catMaybes fsis
|
||||
|
|
|
@ -482,7 +482,7 @@ stageJournal jl commitindex = withIndex $ withOtherTmp $ \tmpdir -> do
|
|||
sha <- Git.HashObject.hashFile h path
|
||||
hPutStrLn jlogh file
|
||||
streamer $ Git.UpdateIndex.updateIndexLine
|
||||
sha TreeFile (asTopFilePath $ fileJournal file)
|
||||
sha TreeFile (asTopFilePath $ fileJournal $ toRawFilePath file)
|
||||
genstream dir h jh jlogh streamer
|
||||
-- Clean up the staged files, as listed in the temp log file.
|
||||
-- The temp file is used to avoid needing to buffer all the
|
||||
|
@ -600,7 +600,7 @@ performTransitionsLocked jl ts neednewlocalbranch transitionedrefs = do
|
|||
else do
|
||||
sha <- hashBlob content'
|
||||
Annex.Queue.addUpdateIndex $ Git.UpdateIndex.pureStreamer $
|
||||
Git.UpdateIndex.updateIndexLine sha TreeFile (asTopFilePath (fromRawFilePath file))
|
||||
Git.UpdateIndex.updateIndexLine sha TreeFile (asTopFilePath file)
|
||||
apply rest file content'
|
||||
|
||||
checkBranchDifferences :: Git.Ref -> Annex ()
|
||||
|
|
|
@ -76,7 +76,7 @@ watchChangedRefs = do
|
|||
chan <- liftIO $ newTBMChanIO 100
|
||||
|
||||
g <- gitRepo
|
||||
let refdir = Git.localGitDir g </> "refs"
|
||||
let refdir = fromRawFilePath (Git.localGitDir g) </> "refs"
|
||||
liftIO $ createDirectoryIfMissing True refdir
|
||||
|
||||
let notifyhook = Just $ notifyHook chan
|
||||
|
|
|
@ -89,17 +89,18 @@ import Annex.Content.LowLevel
|
|||
import Annex.Content.PointerFile
|
||||
import Annex.Concurrent
|
||||
import Types.WorkerPool
|
||||
import qualified Utility.RawFilePath as R
|
||||
|
||||
{- Checks if a given key's content is currently present. -}
|
||||
inAnnex :: Key -> Annex Bool
|
||||
inAnnex key = inAnnexCheck key $ liftIO . doesFileExist
|
||||
inAnnex key = inAnnexCheck key $ liftIO . R.doesPathExist
|
||||
|
||||
{- Runs an arbitrary check on a key's content. -}
|
||||
inAnnexCheck :: Key -> (FilePath -> Annex Bool) -> Annex Bool
|
||||
inAnnexCheck :: Key -> (RawFilePath -> Annex Bool) -> Annex Bool
|
||||
inAnnexCheck key check = inAnnex' id False check key
|
||||
|
||||
{- inAnnex that performs an arbitrary check of the key's content. -}
|
||||
inAnnex' :: (a -> Bool) -> a -> (FilePath -> Annex a) -> Key -> Annex a
|
||||
inAnnex' :: (a -> Bool) -> a -> (RawFilePath -> Annex a) -> Key -> Annex a
|
||||
inAnnex' isgood bad check key = withObjectLoc key $ \loc -> do
|
||||
r <- check loc
|
||||
if isgood r
|
||||
|
@ -120,12 +121,15 @@ inAnnex' isgood bad check key = withObjectLoc key $ \loc -> do
|
|||
{- Like inAnnex, checks if the object file for a key exists,
|
||||
- but there are no guarantees it has the right content. -}
|
||||
objectFileExists :: Key -> Annex Bool
|
||||
objectFileExists key = calcRepo (gitAnnexLocation key) >>= liftIO . doesFileExist
|
||||
objectFileExists key =
|
||||
calcRepo (gitAnnexLocation key)
|
||||
>>= liftIO . R.doesPathExist
|
||||
|
||||
{- A safer check; the key's content must not only be present, but
|
||||
- is not in the process of being removed. -}
|
||||
inAnnexSafe :: Key -> Annex (Maybe Bool)
|
||||
inAnnexSafe key = inAnnex' (fromMaybe True) (Just False) go key
|
||||
inAnnexSafe key =
|
||||
inAnnex' (fromMaybe True) (Just False) (go . fromRawFilePath) key
|
||||
where
|
||||
is_locked = Nothing
|
||||
is_unlocked = Just True
|
||||
|
@ -246,7 +250,7 @@ winLocker _ _ Nothing = return Nothing
|
|||
|
||||
lockContentUsing :: ContentLocker -> Key -> Annex a -> Annex a
|
||||
lockContentUsing locker key a = do
|
||||
contentfile <- calcRepo $ gitAnnexLocation key
|
||||
contentfile <- fromRawFilePath <$> calcRepo (gitAnnexLocation key)
|
||||
lockfile <- contentLockFile key
|
||||
bracket
|
||||
(lock contentfile lockfile)
|
||||
|
@ -474,18 +478,20 @@ moveAnnex key src = ifM (checkSecureHashes key)
|
|||
, return False
|
||||
)
|
||||
where
|
||||
storeobject dest = ifM (liftIO $ doesFileExist dest)
|
||||
storeobject dest = ifM (liftIO $ R.doesPathExist dest)
|
||||
( alreadyhave
|
||||
, modifyContent dest $ do
|
||||
, modifyContent dest' $ do
|
||||
freezeContent src
|
||||
liftIO $ moveFile src dest
|
||||
liftIO $ moveFile src dest'
|
||||
g <- Annex.gitRepo
|
||||
fs <- map (`fromTopFilePath` g)
|
||||
<$> Database.Keys.getAssociatedFiles key
|
||||
unless (null fs) $ do
|
||||
ics <- mapM (populatePointerFile (Restage True) key (toRawFilePath dest) . toRawFilePath) fs
|
||||
ics <- mapM (populatePointerFile (Restage True) key dest) fs
|
||||
Database.Keys.storeInodeCaches' key [dest] (catMaybes ics)
|
||||
)
|
||||
where
|
||||
dest' = fromRawFilePath dest
|
||||
alreadyhave = liftIO $ removeFile src
|
||||
|
||||
checkSecureHashes :: Key -> Annex Bool
|
||||
|
@ -505,7 +511,7 @@ data LinkAnnexResult = LinkAnnexOk | LinkAnnexFailed | LinkAnnexNoop
|
|||
linkToAnnex :: Key -> FilePath -> Maybe InodeCache -> Annex LinkAnnexResult
|
||||
linkToAnnex key src srcic = ifM (checkSecureHashes key)
|
||||
( do
|
||||
dest <- calcRepo (gitAnnexLocation key)
|
||||
dest <- fromRawFilePath <$> calcRepo (gitAnnexLocation key)
|
||||
modifyContent dest $ linkAnnex To key src srcic dest Nothing
|
||||
, return LinkAnnexFailed
|
||||
)
|
||||
|
@ -515,7 +521,7 @@ linkFromAnnex :: Key -> FilePath -> Maybe FileMode -> Annex LinkAnnexResult
|
|||
linkFromAnnex key dest destmode = do
|
||||
src <- calcRepo (gitAnnexLocation key)
|
||||
srcic <- withTSDelta (liftIO . genInodeCache src)
|
||||
linkAnnex From key src srcic dest destmode
|
||||
linkAnnex From key (fromRawFilePath src) srcic dest destmode
|
||||
|
||||
data FromTo = From | To
|
||||
|
||||
|
@ -534,7 +540,7 @@ data FromTo = From | To
|
|||
linkAnnex :: FromTo -> Key -> FilePath -> Maybe InodeCache -> FilePath -> Maybe FileMode -> Annex LinkAnnexResult
|
||||
linkAnnex _ _ _ Nothing _ _ = return LinkAnnexFailed
|
||||
linkAnnex fromto key src (Just srcic) dest destmode =
|
||||
withTSDelta (liftIO . genInodeCache dest) >>= \case
|
||||
withTSDelta (liftIO . genInodeCache dest') >>= \case
|
||||
Just destic -> do
|
||||
cs <- Database.Keys.getInodeCaches key
|
||||
if null cs
|
||||
|
@ -551,12 +557,13 @@ linkAnnex fromto key src (Just srcic) dest destmode =
|
|||
Linked -> noop
|
||||
checksrcunchanged
|
||||
where
|
||||
dest' = toRawFilePath dest
|
||||
failed = do
|
||||
Database.Keys.addInodeCaches key [srcic]
|
||||
return LinkAnnexFailed
|
||||
checksrcunchanged = withTSDelta (liftIO . genInodeCache src) >>= \case
|
||||
checksrcunchanged = withTSDelta (liftIO . genInodeCache (toRawFilePath src)) >>= \case
|
||||
Just srcic' | compareStrong srcic srcic' -> do
|
||||
destic <- withTSDelta (liftIO . genInodeCache dest)
|
||||
destic <- withTSDelta (liftIO . genInodeCache dest')
|
||||
Database.Keys.addInodeCaches key $
|
||||
catMaybes [destic, Just srcic]
|
||||
return LinkAnnexOk
|
||||
|
@ -567,7 +574,7 @@ linkAnnex fromto key src (Just srcic) dest destmode =
|
|||
{- Removes the annex object file for a key. Lowlevel. -}
|
||||
unlinkAnnex :: Key -> Annex ()
|
||||
unlinkAnnex key = do
|
||||
obj <- calcRepo $ gitAnnexLocation key
|
||||
obj <- fromRawFilePath <$> calcRepo (gitAnnexLocation key)
|
||||
modifyContent obj $ do
|
||||
secureErase obj
|
||||
liftIO $ nukeFile obj
|
||||
|
@ -616,15 +623,15 @@ prepSendAnnex key = withObjectLoc key $ \f -> do
|
|||
else pure cache
|
||||
return $ if null cache'
|
||||
then Nothing
|
||||
else Just (f, sameInodeCache f cache')
|
||||
else Just (fromRawFilePath f, sameInodeCache f cache')
|
||||
|
||||
{- Performs an action, passing it the location to use for a key's content. -}
|
||||
withObjectLoc :: Key -> (FilePath -> Annex a) -> Annex a
|
||||
withObjectLoc :: Key -> (RawFilePath -> Annex a) -> Annex a
|
||||
withObjectLoc key a = a =<< calcRepo (gitAnnexLocation key)
|
||||
|
||||
cleanObjectLoc :: Key -> Annex () -> Annex ()
|
||||
cleanObjectLoc key cleaner = do
|
||||
file <- calcRepo $ gitAnnexLocation key
|
||||
file <- fromRawFilePath <$> calcRepo (gitAnnexLocation key)
|
||||
void $ tryIO $ thawContentDir file
|
||||
cleaner
|
||||
liftIO $ removeparents file (3 :: Int)
|
||||
|
@ -640,8 +647,9 @@ cleanObjectLoc key cleaner = do
|
|||
removeAnnex :: ContentRemovalLock -> Annex ()
|
||||
removeAnnex (ContentRemovalLock key) = withObjectLoc key $ \file ->
|
||||
cleanObjectLoc key $ do
|
||||
secureErase file
|
||||
liftIO $ nukeFile file
|
||||
let file' = fromRawFilePath file
|
||||
secureErase file'
|
||||
liftIO $ nukeFile file'
|
||||
g <- Annex.gitRepo
|
||||
mapM_ (\f -> void $ tryIO $ resetpointer $ fromTopFilePath f g)
|
||||
=<< Database.Keys.getAssociatedFiles key
|
||||
|
@ -650,12 +658,12 @@ removeAnnex (ContentRemovalLock key) = withObjectLoc key $ \file ->
|
|||
-- Check associated pointer file for modifications, and reset if
|
||||
-- it's unmodified.
|
||||
resetpointer file = ifM (isUnmodified key file)
|
||||
( depopulatePointerFile key (toRawFilePath file)
|
||||
( depopulatePointerFile key file
|
||||
-- Modified file, so leave it alone.
|
||||
-- If it was a hard link to the annex object,
|
||||
-- that object might have been frozen as part of the
|
||||
-- removal process, so thaw it.
|
||||
, void $ tryIO $ thawContent file
|
||||
, void $ tryIO $ thawContent $ fromRawFilePath file
|
||||
)
|
||||
|
||||
{- Check if a file contains the unmodified content of the key.
|
||||
|
@ -663,12 +671,12 @@ removeAnnex (ContentRemovalLock key) = withObjectLoc key $ \file ->
|
|||
- The expensive way to tell is to do a verification of its content.
|
||||
- The cheaper way is to see if the InodeCache for the key matches the
|
||||
- file. -}
|
||||
isUnmodified :: Key -> FilePath -> Annex Bool
|
||||
isUnmodified :: Key -> RawFilePath -> Annex Bool
|
||||
isUnmodified key f = go =<< geti
|
||||
where
|
||||
go Nothing = return False
|
||||
go (Just fc) = isUnmodifiedCheap' key fc <||> expensivecheck fc
|
||||
expensivecheck fc = ifM (verifyKeyContent RetrievalAllKeysSecure AlwaysVerify UnVerified key f)
|
||||
expensivecheck fc = ifM (verifyKeyContent RetrievalAllKeysSecure AlwaysVerify UnVerified key (fromRawFilePath f))
|
||||
( do
|
||||
-- The file could have been modified while it was
|
||||
-- being verified. Detect that.
|
||||
|
@ -691,7 +699,7 @@ isUnmodified key f = go =<< geti
|
|||
- this may report a false positive when repeated edits are made to a file
|
||||
- within a small time window (eg 1 second).
|
||||
-}
|
||||
isUnmodifiedCheap :: Key -> FilePath -> Annex Bool
|
||||
isUnmodifiedCheap :: Key -> RawFilePath -> Annex Bool
|
||||
isUnmodifiedCheap key f = maybe (return False) (isUnmodifiedCheap' key)
|
||||
=<< withTSDelta (liftIO . genInodeCache f)
|
||||
|
||||
|
@ -703,7 +711,7 @@ isUnmodifiedCheap' key fc =
|
|||
- returns the file it was moved to. -}
|
||||
moveBad :: Key -> Annex FilePath
|
||||
moveBad key = do
|
||||
src <- calcRepo $ gitAnnexLocation key
|
||||
src <- fromRawFilePath <$> calcRepo (gitAnnexLocation key)
|
||||
bad <- fromRepo gitAnnexBadDir
|
||||
let dest = bad </> takeFileName src
|
||||
createAnnexDirectory (parentDir dest)
|
||||
|
@ -791,7 +799,7 @@ preseedTmp key file = go =<< inAnnex key
|
|||
copy = ifM (liftIO $ doesFileExist file)
|
||||
( return True
|
||||
, do
|
||||
s <- calcRepo $ gitAnnexLocation key
|
||||
s <- fromRawFilePath <$> (calcRepo $ gitAnnexLocation key)
|
||||
liftIO $ ifM (doesFileExist s)
|
||||
( copyFileExternal CopyTimeStamps s file
|
||||
, return False
|
||||
|
|
|
@ -38,10 +38,11 @@ populatePointerFile restage k obj f = go =<< liftIO (isPointerFile f)
|
|||
destmode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus f'
|
||||
liftIO $ nukeFile f'
|
||||
(ic, populated) <- replaceFile f' $ \tmp -> do
|
||||
let tmp' = toRawFilePath tmp
|
||||
ok <- linkOrCopy k (fromRawFilePath obj) tmp destmode >>= \case
|
||||
Just _ -> thawContent tmp >> return True
|
||||
Nothing -> liftIO (writePointerFile (toRawFilePath tmp) k destmode) >> return False
|
||||
ic <- withTSDelta (liftIO . genInodeCache tmp)
|
||||
Nothing -> liftIO (writePointerFile tmp' k destmode) >> return False
|
||||
ic <- withTSDelta (liftIO . genInodeCache tmp')
|
||||
return (ic, ok)
|
||||
maybe noop (restagePointerFile restage f) ic
|
||||
if populated
|
||||
|
@ -68,5 +69,5 @@ depopulatePointerFile key file = do
|
|||
(\t -> touch tmp t False)
|
||||
(fmap modificationTimeHiRes st)
|
||||
#endif
|
||||
withTSDelta (liftIO . genInodeCache tmp)
|
||||
withTSDelta (liftIO . genInodeCache (toRawFilePath tmp))
|
||||
maybe noop (restagePointerFile (Restage True) file) ic
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{- git-annex file locations
|
||||
-
|
||||
- Copyright 2010-2017 Joey Hess <id@joeyh.name>
|
||||
- Copyright 2010-2019 Joey Hess <id@joeyh.name>
|
||||
-
|
||||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
@ -19,7 +19,10 @@ module Annex.DirHashes (
|
|||
|
||||
import Data.Default
|
||||
import Data.Bits
|
||||
import qualified Data.ByteArray
|
||||
import qualified Data.ByteArray as BA
|
||||
import qualified Data.ByteArray.Encoding as BA
|
||||
import qualified Data.ByteString as S
|
||||
import qualified System.FilePath.ByteString as P
|
||||
|
||||
import Common
|
||||
import Key
|
||||
|
@ -28,7 +31,7 @@ import Types.Difference
|
|||
import Utility.Hash
|
||||
import Utility.MD5
|
||||
|
||||
type Hasher = Key -> FilePath
|
||||
type Hasher = Key -> RawFilePath
|
||||
|
||||
-- Number of hash levels to use. 2 is the default.
|
||||
newtype HashLevels = HashLevels Int
|
||||
|
@ -47,7 +50,7 @@ configHashLevels d config
|
|||
| hasDifference d (annexDifferences config) = HashLevels 1
|
||||
| otherwise = def
|
||||
|
||||
branchHashDir :: GitConfig -> Key -> String
|
||||
branchHashDir :: GitConfig -> Key -> S.ByteString
|
||||
branchHashDir = hashDirLower . branchHashLevels
|
||||
|
||||
{- Two different directory hashes may be used. The mixed case hash
|
||||
|
@ -60,19 +63,26 @@ branchHashDir = hashDirLower . branchHashLevels
|
|||
dirHashes :: [HashLevels -> Hasher]
|
||||
dirHashes = [hashDirLower, hashDirMixed]
|
||||
|
||||
hashDirs :: HashLevels -> Int -> String -> FilePath
|
||||
hashDirs (HashLevels 1) sz s = addTrailingPathSeparator $ take sz s
|
||||
hashDirs _ sz s = addTrailingPathSeparator $ take sz s </> drop sz s
|
||||
hashDirs :: HashLevels -> Int -> S.ByteString -> RawFilePath
|
||||
hashDirs (HashLevels 1) sz s = P.addTrailingPathSeparator $ S.take sz s
|
||||
hashDirs _ sz s = P.addTrailingPathSeparator $ h P.</> t
|
||||
where
|
||||
(h, t) = S.splitAt sz s
|
||||
|
||||
hashDirLower :: HashLevels -> Hasher
|
||||
hashDirLower n k = hashDirs n 3 $ take 6 $ show $ md5s $ serializeKey' $ nonChunkKey k
|
||||
hashDirLower n k = hashDirs n 3 $ S.pack $ take 6 $ conv $
|
||||
md5s $ serializeKey' $ nonChunkKey k
|
||||
where
|
||||
conv v = BA.unpack $
|
||||
(BA.convertToBase BA.Base16 v :: BA.Bytes)
|
||||
|
||||
{- This was originally using Data.Hash.MD5 from MissingH. This new version
|
||||
- is faster, but ugly as it has to replicate the 4 Word32's that produced. -}
|
||||
hashDirMixed :: HashLevels -> Hasher
|
||||
hashDirMixed n k = hashDirs n 2 $ take 4 $ concatMap display_32bits_as_dir $
|
||||
encodeWord32 $ map fromIntegral $ Data.ByteArray.unpack $
|
||||
Utility.Hash.md5s $ serializeKey' $ nonChunkKey k
|
||||
hashDirMixed n k = hashDirs n 2 $ S.pack $ take 4 $
|
||||
concatMap display_32bits_as_dir $
|
||||
encodeWord32 $ map fromIntegral $ BA.unpack $
|
||||
Utility.Hash.md5s $ serializeKey' $ nonChunkKey k
|
||||
where
|
||||
encodeWord32 (b1:b2:b3:b4:rest) =
|
||||
(shiftL b4 24 .|. shiftL b3 16 .|. shiftL b2 8 .|. b1)
|
||||
|
|
|
@ -49,7 +49,7 @@ type Reason = String
|
|||
handleDropsFrom :: [UUID] -> [Remote] -> Reason -> Bool -> Key -> AssociatedFile -> [VerifiedCopy] -> (CommandStart -> CommandCleanup) -> Annex ()
|
||||
handleDropsFrom locs rs reason fromhere key afile preverified runner = do
|
||||
g <- Annex.gitRepo
|
||||
l <- map toRawFilePath . map (`fromTopFilePath` g)
|
||||
l <- map (`fromTopFilePath` g)
|
||||
<$> Database.Keys.getAssociatedFiles key
|
||||
let fs = case afile of
|
||||
AssociatedFile (Just f) -> nub (f : l)
|
||||
|
|
|
@ -62,7 +62,7 @@ checkMatcher :: FileMatcher Annex -> Maybe Key -> AssociatedFile -> AssumeNotPre
|
|||
checkMatcher matcher mkey afile notpresent notconfigured d
|
||||
| isEmpty matcher = notconfigured
|
||||
| otherwise = case (mkey, afile) of
|
||||
(_, AssociatedFile (Just file)) -> go =<< fileMatchInfo (fromRawFilePath file)
|
||||
(_, AssociatedFile (Just file)) -> go =<< fileMatchInfo file
|
||||
(Just key, _) -> go (MatchingKey key afile)
|
||||
_ -> d
|
||||
where
|
||||
|
@ -72,7 +72,7 @@ checkMatcher' :: FileMatcher Annex -> MatchInfo -> AssumeNotPresent -> Annex Boo
|
|||
checkMatcher' matcher mi notpresent =
|
||||
matchMrun matcher $ \a -> a notpresent mi
|
||||
|
||||
fileMatchInfo :: FilePath -> Annex MatchInfo
|
||||
fileMatchInfo :: RawFilePath -> Annex MatchInfo
|
||||
fileMatchInfo file = do
|
||||
matchfile <- getTopFilePath <$> inRepo (toTopFilePath file)
|
||||
return $ MatchingFile FileInfo
|
||||
|
|
|
@ -19,6 +19,7 @@ import Utility.SafeCommand
|
|||
import Utility.Directory
|
||||
import Utility.Exception
|
||||
import Utility.Monad
|
||||
import Utility.FileSystemEncoding
|
||||
import Utility.PartialPrelude
|
||||
|
||||
import System.IO
|
||||
|
@ -29,6 +30,8 @@ import Data.Maybe
|
|||
import Control.Monad
|
||||
import Control.Monad.IfElse
|
||||
import qualified Data.Map as M
|
||||
import qualified System.FilePath.ByteString as P
|
||||
import qualified Data.ByteString as S
|
||||
import Control.Applicative
|
||||
import Prelude
|
||||
|
||||
|
@ -52,7 +55,7 @@ disableWildcardExpansion r = r
|
|||
fixupDirect :: Repo -> Repo
|
||||
fixupDirect r@(Repo { location = l@(Local { gitdir = d, worktree = Nothing }) }) = do
|
||||
r
|
||||
{ location = l { worktree = Just (parentDir d) }
|
||||
{ location = l { worktree = Just (toRawFilePath (parentDir (fromRawFilePath d))) }
|
||||
, gitGlobalOpts = gitGlobalOpts r ++
|
||||
[ Param "-c"
|
||||
, Param $ fromConfigKey coreBare ++ "=" ++ boolConfig False
|
||||
|
@ -110,12 +113,13 @@ fixupUnusualRepos r@(Repo { location = l@(Local { worktree = Just w, gitdir = d
|
|||
, return r
|
||||
)
|
||||
where
|
||||
dotgit = w </> ".git"
|
||||
dotgit = w P.</> ".git"
|
||||
dotgit' = fromRawFilePath dotgit
|
||||
|
||||
replacedotgit = whenM (doesFileExist dotgit) $ do
|
||||
linktarget <- relPathDirToFile w d
|
||||
nukeFile dotgit
|
||||
createSymbolicLink linktarget dotgit
|
||||
replacedotgit = whenM (doesFileExist dotgit') $ do
|
||||
linktarget <- relPathDirToFile (fromRawFilePath w) (fromRawFilePath d)
|
||||
nukeFile dotgit'
|
||||
createSymbolicLink linktarget dotgit'
|
||||
|
||||
unsetcoreworktree =
|
||||
maybe (error "unset core.worktree failed") (\_ -> return ())
|
||||
|
@ -125,13 +129,13 @@ fixupUnusualRepos r@(Repo { location = l@(Local { worktree = Just w, gitdir = d
|
|||
-- git-worktree sets up a "commondir" file that contains
|
||||
-- the path to the main git directory.
|
||||
-- Using --separate-git-dir does not.
|
||||
catchDefaultIO Nothing (headMaybe . lines <$> readFile (d </> "commondir")) >>= \case
|
||||
catchDefaultIO Nothing (headMaybe . lines <$> readFile (fromRawFilePath (d P.</> "commondir"))) >>= \case
|
||||
Just gd -> do
|
||||
-- Make the worktree's git directory
|
||||
-- contain an annex symlink to the main
|
||||
-- repository's annex directory.
|
||||
let linktarget = gd </> "annex"
|
||||
createSymbolicLink linktarget (dotgit </> "annex")
|
||||
createSymbolicLink linktarget (dotgit' </> "annex")
|
||||
Nothing -> return ()
|
||||
|
||||
-- Repo adjusted, so that symlinks to objects that get checked
|
||||
|
@ -141,12 +145,12 @@ fixupUnusualRepos r@(Repo { location = l@(Local { worktree = Just w, gitdir = d
|
|||
| coreSymlinks c = r { location = l { gitdir = dotgit } }
|
||||
| otherwise = r
|
||||
|
||||
notnoannex = isNothing <$> noAnnexFileContent (Git.repoWorkTree r)
|
||||
notnoannex = isNothing <$> noAnnexFileContent (fmap fromRawFilePath (Git.repoWorkTree r))
|
||||
fixupUnusualRepos r _ = return r
|
||||
|
||||
needsSubmoduleFixup :: Repo -> Bool
|
||||
needsSubmoduleFixup (Repo { location = (Local { worktree = Just _, gitdir = d }) }) =
|
||||
(".git" </> "modules") `isInfixOf` d
|
||||
(".git" P.</> "modules") `S.isInfixOf` d
|
||||
needsSubmoduleFixup _ = False
|
||||
|
||||
needsGitLinkFixup :: Repo -> IO Bool
|
||||
|
@ -154,6 +158,6 @@ needsGitLinkFixup (Repo { location = (Local { worktree = Just wt, gitdir = d })
|
|||
-- Optimization: Avoid statting .git in the common case; only
|
||||
-- when the gitdir is not in the usual place inside the worktree
|
||||
-- might .git be a file.
|
||||
| wt </> ".git" == d = return False
|
||||
| otherwise = doesFileExist (wt </> ".git")
|
||||
| wt P.</> ".git" == d = return False
|
||||
| otherwise = doesFileExist (fromRawFilePath (wt P.</> ".git"))
|
||||
needsGitLinkFixup _ = return False
|
||||
|
|
|
@ -54,7 +54,7 @@ withWorkTree d = withAltRepo
|
|||
(\g -> return $ g { location = modlocation (location g), gitGlobalOpts = gitGlobalOpts g ++ disableSmudgeConfig })
|
||||
(\g g' -> g' { location = location g, gitGlobalOpts = gitGlobalOpts g })
|
||||
where
|
||||
modlocation l@(Local {}) = l { worktree = Just d }
|
||||
modlocation l@(Local {}) = l { worktree = Just (toRawFilePath d) }
|
||||
modlocation _ = error "withWorkTree of non-local git repo"
|
||||
disableSmudgeConfig = map Param
|
||||
[ "-c", "filter.annex.smudge="
|
||||
|
@ -73,7 +73,8 @@ withWorkTreeRelated :: FilePath -> Annex a -> Annex a
|
|||
withWorkTreeRelated d = withAltRepo modrepo unmodrepo
|
||||
where
|
||||
modrepo g = liftIO $ do
|
||||
g' <- addGitEnv g "GIT_COMMON_DIR" =<< absPath (localGitDir g)
|
||||
g' <- addGitEnv g "GIT_COMMON_DIR"
|
||||
=<< absPath (fromRawFilePath (localGitDir g))
|
||||
g'' <- addGitEnv g' "GIT_DIR" d
|
||||
return (g'' { gitEnvOverridesGitDir = True })
|
||||
unmodrepo g g' = g'
|
||||
|
|
|
@ -57,6 +57,7 @@ import Control.Concurrent.STM
|
|||
import qualified Data.Map.Strict as M
|
||||
import qualified Data.Set as S
|
||||
import qualified System.FilePath.Posix as Posix
|
||||
import qualified System.FilePath.ByteString as P
|
||||
|
||||
{- Configures how to build an import tree. -}
|
||||
data ImportTreeConfig
|
||||
|
@ -123,7 +124,7 @@ buildImportCommit remote importtreeconfig importcommitconfig importable =
|
|||
Nothing -> pure committedtree
|
||||
Just dir ->
|
||||
let subtreeref = Ref $
|
||||
fromRef committedtree ++ ":" ++ getTopFilePath dir
|
||||
fromRef committedtree ++ ":" ++ fromRawFilePath (getTopFilePath dir)
|
||||
in fromMaybe emptyTree
|
||||
<$> inRepo (Git.Ref.tree subtreeref)
|
||||
updateexportdb importedtree
|
||||
|
@ -264,12 +265,12 @@ buildImportTrees basetree msubdir importable = History
|
|||
graftTree' importtree subdir basetree repo hdl
|
||||
|
||||
mktreeitem (loc, k) = do
|
||||
let lf = fromRawFilePath (fromImportLocation loc)
|
||||
let lf = fromImportLocation loc
|
||||
let treepath = asTopFilePath lf
|
||||
let topf = asTopFilePath $
|
||||
maybe lf (\sd -> getTopFilePath sd </> lf) msubdir
|
||||
maybe lf (\sd -> getTopFilePath sd P.</> lf) msubdir
|
||||
relf <- fromRepo $ fromTopFilePath topf
|
||||
symlink <- calcRepo $ gitAnnexLink relf k
|
||||
symlink <- calcRepo $ gitAnnexLink (fromRawFilePath relf) k
|
||||
linksha <- hashSymlink symlink
|
||||
return $ TreeItem treepath (fromTreeItemType TreeSymlink) linksha
|
||||
|
||||
|
@ -368,18 +369,18 @@ downloadImport remote importtreeconfig importablecontents = do
|
|||
|
||||
mkkey loc tmpfile = do
|
||||
f <- fromRepo $ fromTopFilePath $ locworktreefilename loc
|
||||
backend <- chooseBackend f
|
||||
backend <- chooseBackend (fromRawFilePath f)
|
||||
let ks = KeySource
|
||||
{ keyFilename = f
|
||||
{ keyFilename = (fromRawFilePath f)
|
||||
, contentLocation = tmpfile
|
||||
, inodeCache = Nothing
|
||||
}
|
||||
fmap fst <$> genKey ks nullMeterUpdate backend
|
||||
|
||||
locworktreefilename loc = asTopFilePath $ case importtreeconfig of
|
||||
ImportTree -> fromRawFilePath (fromImportLocation loc)
|
||||
ImportTree -> fromImportLocation loc
|
||||
ImportSubTree subdir _ ->
|
||||
getTopFilePath subdir </> fromRawFilePath (fromImportLocation loc)
|
||||
getTopFilePath subdir P.</> fromImportLocation loc
|
||||
|
||||
getcidkey cidmap db cid = liftIO $
|
||||
CIDDb.getContentIdentifierKeys db rs cid >>= \case
|
||||
|
|
|
@ -92,7 +92,7 @@ lockDown' cfg file = tryIO $ ifM crippledFileSystem
|
|||
nohardlink = withTSDelta $ liftIO . nohardlink'
|
||||
|
||||
nohardlink' delta = do
|
||||
cache <- genInodeCache file delta
|
||||
cache <- genInodeCache (toRawFilePath file) delta
|
||||
return $ LockedDown cfg $ KeySource
|
||||
{ keyFilename = file
|
||||
, contentLocation = file
|
||||
|
@ -112,7 +112,7 @@ lockDown' cfg file = tryIO $ ifM crippledFileSystem
|
|||
|
||||
withhardlink' delta tmpfile = do
|
||||
createLink file tmpfile
|
||||
cache <- genInodeCache tmpfile delta
|
||||
cache <- genInodeCache (toRawFilePath tmpfile) delta
|
||||
return $ LockedDown cfg $ KeySource
|
||||
{ keyFilename = file
|
||||
, contentLocation = tmpfile
|
||||
|
@ -202,19 +202,20 @@ finishIngestUnlocked key source = do
|
|||
|
||||
finishIngestUnlocked' :: Key -> KeySource -> Restage -> Annex ()
|
||||
finishIngestUnlocked' key source restage = do
|
||||
Database.Keys.addAssociatedFile key =<< inRepo (toTopFilePath (keyFilename source))
|
||||
Database.Keys.addAssociatedFile key
|
||||
=<< inRepo (toTopFilePath (toRawFilePath (keyFilename source)))
|
||||
populateAssociatedFiles key source restage
|
||||
|
||||
{- Copy to any other locations using the same key. -}
|
||||
populateAssociatedFiles :: Key -> KeySource -> Restage -> Annex ()
|
||||
populateAssociatedFiles key source restage = do
|
||||
obj <- toRawFilePath <$> calcRepo (gitAnnexLocation key)
|
||||
obj <- calcRepo (gitAnnexLocation key)
|
||||
g <- Annex.gitRepo
|
||||
ingestedf <- flip fromTopFilePath g
|
||||
<$> inRepo (toTopFilePath (keyFilename source))
|
||||
<$> inRepo (toTopFilePath (toRawFilePath (keyFilename source)))
|
||||
afs <- map (`fromTopFilePath` g) <$> Database.Keys.getAssociatedFiles key
|
||||
forM_ (filter (/= ingestedf) afs) $
|
||||
populatePointerFile restage key obj . toRawFilePath
|
||||
populatePointerFile restage key obj
|
||||
|
||||
cleanCruft :: KeySource -> Annex ()
|
||||
cleanCruft source = when (contentLocation source /= keyFilename source) $
|
||||
|
@ -226,8 +227,8 @@ cleanCruft source = when (contentLocation source /= keyFilename source) $
|
|||
cleanOldKeys :: FilePath -> Key -> Annex ()
|
||||
cleanOldKeys file newkey = do
|
||||
g <- Annex.gitRepo
|
||||
ingestedf <- flip fromTopFilePath g <$> inRepo (toTopFilePath file)
|
||||
topf <- inRepo (toTopFilePath file)
|
||||
topf <- inRepo (toTopFilePath (toRawFilePath file))
|
||||
ingestedf <- fromRepo $ fromTopFilePath topf
|
||||
oldkeys <- filter (/= newkey)
|
||||
<$> Database.Keys.getAssociatedKey topf
|
||||
forM_ oldkeys $ \key ->
|
||||
|
@ -243,7 +244,7 @@ cleanOldKeys file newkey = do
|
|||
-- so no need for any recovery.
|
||||
(f:_) -> do
|
||||
ic <- withTSDelta (liftIO . genInodeCache f)
|
||||
void $ linkToAnnex key f ic
|
||||
void $ linkToAnnex key (fromRawFilePath f) ic
|
||||
_ -> logStatus key InfoMissing
|
||||
|
||||
{- On error, put the file back so it doesn't seem to have vanished.
|
||||
|
@ -254,7 +255,7 @@ restoreFile file key e = do
|
|||
liftIO $ nukeFile file
|
||||
-- The key could be used by other files too, so leave the
|
||||
-- content in the annex, and make a copy back to the file.
|
||||
obj <- calcRepo $ gitAnnexLocation key
|
||||
obj <- fromRawFilePath <$> calcRepo (gitAnnexLocation key)
|
||||
unlessM (liftIO $ copyFileExternal CopyTimeStamps obj file) $
|
||||
warning $ "Unable to restore content of " ++ file ++ "; it should be located in " ++ obj
|
||||
thawContent file
|
||||
|
@ -330,7 +331,7 @@ addAnnexedFile file key mtmp = ifM addUnlocked
|
|||
(\tmp -> liftIO $ catchMaybeIO $ fileMode <$> getFileStatus tmp)
|
||||
mtmp
|
||||
stagePointerFile (toRawFilePath file) mode =<< hashPointerFile key
|
||||
Database.Keys.addAssociatedFile key =<< inRepo (toTopFilePath file)
|
||||
Database.Keys.addAssociatedFile key =<< inRepo (toTopFilePath (toRawFilePath file))
|
||||
case mtmp of
|
||||
Just tmp -> ifM (moveAnnex key tmp)
|
||||
( linkunlocked mode >> return True
|
||||
|
|
|
@ -56,7 +56,7 @@ import Data.Either
|
|||
import qualified Data.Map as M
|
||||
|
||||
checkCanInitialize :: Annex a -> Annex a
|
||||
checkCanInitialize a = inRepo (noAnnexFileContent . Git.repoWorkTree) >>= \case
|
||||
checkCanInitialize a = inRepo (noAnnexFileContent . fmap fromRawFilePath . Git.repoWorkTree) >>= \case
|
||||
Nothing -> a
|
||||
Just noannexmsg -> do
|
||||
warning "Initialization prevented by .noannex file (remove the file to override)"
|
||||
|
@ -67,7 +67,9 @@ checkCanInitialize a = inRepo (noAnnexFileContent . Git.repoWorkTree) >>= \case
|
|||
genDescription :: Maybe String -> Annex UUIDDesc
|
||||
genDescription (Just d) = return $ UUIDDesc $ encodeBS d
|
||||
genDescription Nothing = do
|
||||
reldir <- liftIO . relHome =<< liftIO . absPath =<< fromRepo Git.repoPath
|
||||
reldir <- liftIO . relHome
|
||||
=<< liftIO . absPath . fromRawFilePath
|
||||
=<< fromRepo Git.repoPath
|
||||
hostname <- fromMaybe "" <$> liftIO getHostname
|
||||
let at = if null hostname then "" else "@"
|
||||
v <- liftIO myUserName
|
||||
|
|
|
@ -29,7 +29,7 @@ compareInodeCachesWith = ifM inodesChanged ( return Weakly, return Strongly )
|
|||
|
||||
{- Checks if one of the provided old InodeCache matches the current
|
||||
- version of a file. -}
|
||||
sameInodeCache :: FilePath -> [InodeCache] -> Annex Bool
|
||||
sameInodeCache :: RawFilePath -> [InodeCache] -> Annex Bool
|
||||
sameInodeCache _ [] = return False
|
||||
sameInodeCache file old = go =<< withTSDelta (liftIO . genInodeCache file)
|
||||
where
|
||||
|
@ -78,7 +78,7 @@ createInodeSentinalFile :: Bool -> Annex ()
|
|||
createInodeSentinalFile evenwithobjects =
|
||||
unlessM (alreadyexists <||> hasobjects) $ do
|
||||
s <- annexSentinalFile
|
||||
createAnnexDirectory (parentDir (sentinalFile s))
|
||||
createAnnexDirectory (parentDir (fromRawFilePath (sentinalFile s)))
|
||||
liftIO $ writeSentinalFile s
|
||||
where
|
||||
alreadyexists = liftIO. sentinalFileExists =<< annexSentinalFile
|
||||
|
|
|
@ -20,7 +20,9 @@ import Utility.Directory.Stream
|
|||
|
||||
import qualified Data.ByteString.Lazy as L
|
||||
import qualified Data.ByteString as S
|
||||
import qualified System.FilePath.ByteString as P
|
||||
import Data.ByteString.Builder
|
||||
import Data.Char
|
||||
|
||||
class Journalable t where
|
||||
writeJournalHandle :: Handle -> t -> IO ()
|
||||
|
@ -48,7 +50,7 @@ setJournalFile :: Journalable content => JournalLocked -> RawFilePath -> content
|
|||
setJournalFile _jl file content = withOtherTmp $ \tmp -> do
|
||||
createAnnexDirectory =<< fromRepo gitAnnexJournalDir
|
||||
-- journal file is written atomically
|
||||
jfile <- fromRepo $ journalFile $ fromRawFilePath file
|
||||
jfile <- fromRawFilePath <$> fromRepo (journalFile file)
|
||||
let tmpfile = tmp </> takeFileName jfile
|
||||
liftIO $ do
|
||||
withFile tmpfile WriteMode $ \h -> writeJournalHandle h content
|
||||
|
@ -71,7 +73,7 @@ getJournalFile _jl = getJournalFileStale
|
|||
-}
|
||||
getJournalFileStale :: RawFilePath -> Annex (Maybe L.ByteString)
|
||||
getJournalFileStale file = inRepo $ \g -> catchMaybeIO $
|
||||
L.fromStrict <$> S.readFile (journalFile (fromRawFilePath file) g)
|
||||
L.fromStrict <$> S.readFile (fromRawFilePath $ journalFile file g)
|
||||
|
||||
{- List of existing journal files, but without locking, may miss new ones
|
||||
- just being added, or may have false positives if the journal is staged
|
||||
|
@ -81,7 +83,8 @@ getJournalledFilesStale = do
|
|||
g <- gitRepo
|
||||
fs <- liftIO $ catchDefaultIO [] $
|
||||
getDirectoryContents $ gitAnnexJournalDir g
|
||||
return $ filter (`notElem` [".", ".."]) $ map fileJournal fs
|
||||
return $ filter (`notElem` [".", ".."]) $
|
||||
map (fromRawFilePath . fileJournal . toRawFilePath) fs
|
||||
|
||||
withJournalHandle :: (DirectoryHandle -> IO a) -> Annex a
|
||||
withJournalHandle a = do
|
||||
|
@ -102,19 +105,33 @@ journalDirty = do
|
|||
- used in the branch is not necessary, and all the files are put directly
|
||||
- in the journal directory.
|
||||
-}
|
||||
journalFile :: FilePath -> Git.Repo -> FilePath
|
||||
journalFile file repo = gitAnnexJournalDir repo </> concatMap mangle file
|
||||
journalFile :: RawFilePath -> Git.Repo -> RawFilePath
|
||||
journalFile file repo = gitAnnexJournalDir' repo P.</> S.concatMap mangle file
|
||||
where
|
||||
mangle c
|
||||
| c == pathSeparator = "_"
|
||||
| c == '_' = "__"
|
||||
| otherwise = [c]
|
||||
| P.isPathSeparator c = S.singleton underscore
|
||||
| c == underscore = S.pack [underscore, underscore]
|
||||
| otherwise = S.singleton c
|
||||
underscore = fromIntegral (ord '_')
|
||||
|
||||
{- Converts a journal file (relative to the journal dir) back to the
|
||||
- filename on the branch. -}
|
||||
fileJournal :: FilePath -> FilePath
|
||||
fileJournal = replace [pathSeparator, pathSeparator] "_" .
|
||||
replace "_" [pathSeparator]
|
||||
fileJournal :: RawFilePath -> RawFilePath
|
||||
fileJournal = go
|
||||
where
|
||||
go b =
|
||||
let (h, t) = S.break (== underscore) b
|
||||
in h <> case S.uncons t of
|
||||
Nothing -> t
|
||||
Just (_u, t') -> case S.uncons t' of
|
||||
Nothing -> t'
|
||||
Just (w, t'')
|
||||
| w == underscore ->
|
||||
S.cons underscore (go t'')
|
||||
| otherwise ->
|
||||
S.cons P.pathSeparator (go t')
|
||||
|
||||
underscore = fromIntegral (ord '_')
|
||||
|
||||
{- Sentinal value, only produced by lockJournal; required
|
||||
- as a parameter by things that need to ensure the journal is
|
||||
|
|
|
@ -39,6 +39,7 @@ import qualified Utility.RawFilePath as R
|
|||
import qualified Data.ByteString as S
|
||||
import qualified Data.ByteString.Char8 as S8
|
||||
import qualified Data.ByteString.Lazy as L
|
||||
import qualified System.FilePath.ByteString as P
|
||||
|
||||
type LinkTarget = String
|
||||
|
||||
|
@ -182,7 +183,7 @@ restagePointerFile (Restage True) f orig = withTSDelta $ \tsd -> do
|
|||
absf <- liftIO $ absPath $ fromRawFilePath f
|
||||
Annex.Queue.addInternalAction runner [(absf, isunmodified tsd)]
|
||||
where
|
||||
isunmodified tsd = genInodeCache' f tsd >>= return . \case
|
||||
isunmodified tsd = genInodeCache f tsd >>= return . \case
|
||||
Nothing -> False
|
||||
Just new -> compareStrong orig new
|
||||
|
||||
|
@ -200,7 +201,7 @@ restagePointerFile (Restage True) f orig = withTSDelta $ \tsd -> do
|
|||
unlockindex = liftIO . maybe noop Git.LockFile.closeLock
|
||||
showwarning = warning $ unableToRestage Nothing
|
||||
go Nothing = showwarning
|
||||
go (Just _) = withTmpDirIn (Git.localGitDir r) "annexindex" $ \tmpdir -> do
|
||||
go (Just _) = withTmpDirIn (fromRawFilePath $ Git.localGitDir r) "annexindex" $ \tmpdir -> do
|
||||
let tmpindex = tmpdir </> "index"
|
||||
let updatetmpindex = do
|
||||
r' <- Git.Env.addGitEnv r Git.Index.indexEnv
|
||||
|
@ -301,8 +302,7 @@ isLinkToAnnex s = p `S.isInfixOf` s
|
|||
|| p' `S.isInfixOf` s
|
||||
#endif
|
||||
where
|
||||
sp = (pathSeparator:objectDir)
|
||||
p = toRawFilePath sp
|
||||
p = P.pathSeparator `S.cons` objectDir'
|
||||
#ifdef mingw32_HOST_OS
|
||||
p' = toRawFilePath (toInternalGitPath sp)
|
||||
p' = toInternalGitPath p
|
||||
#endif
|
||||
|
|
|
@ -16,6 +16,7 @@ module Annex.Locations (
|
|||
keyPath,
|
||||
annexDir,
|
||||
objectDir,
|
||||
objectDir',
|
||||
gitAnnexLocation,
|
||||
gitAnnexLocationDepth,
|
||||
gitAnnexLink,
|
||||
|
@ -64,6 +65,7 @@ module Annex.Locations (
|
|||
gitAnnexFeedState,
|
||||
gitAnnexMergeDir,
|
||||
gitAnnexJournalDir,
|
||||
gitAnnexJournalDir',
|
||||
gitAnnexJournalLock,
|
||||
gitAnnexGitQueueLock,
|
||||
gitAnnexPreCommitLock,
|
||||
|
@ -95,6 +97,7 @@ module Annex.Locations (
|
|||
import Data.Char
|
||||
import Data.Default
|
||||
import qualified Data.ByteString.Char8 as S8
|
||||
import qualified System.FilePath.ByteString as P
|
||||
|
||||
import Common
|
||||
import Key
|
||||
|
@ -106,6 +109,7 @@ import qualified Git.Types as Git
|
|||
import Git.FilePath
|
||||
import Annex.DirHashes
|
||||
import Annex.Fixup
|
||||
import qualified Utility.RawFilePath as R
|
||||
|
||||
{- Conventions:
|
||||
-
|
||||
|
@ -125,21 +129,27 @@ import Annex.Fixup
|
|||
annexDir :: FilePath
|
||||
annexDir = addTrailingPathSeparator "annex"
|
||||
|
||||
annexDir' :: RawFilePath
|
||||
annexDir' = P.addTrailingPathSeparator "annex"
|
||||
|
||||
{- The directory git annex uses for locally available object content,
|
||||
- relative to the .git directory -}
|
||||
objectDir :: FilePath
|
||||
objectDir = addTrailingPathSeparator $ annexDir </> "objects"
|
||||
|
||||
objectDir' :: RawFilePath
|
||||
objectDir' = P.addTrailingPathSeparator $ annexDir' P.</> "objects"
|
||||
|
||||
{- Annexed file's possible locations relative to the .git directory.
|
||||
- There are two different possibilities, using different hashes.
|
||||
-
|
||||
- Also, some repositories have a Difference in hash directory depth.
|
||||
-}
|
||||
annexLocations :: GitConfig -> Key -> [FilePath]
|
||||
annexLocations :: GitConfig -> Key -> [RawFilePath]
|
||||
annexLocations config key = map (annexLocation config key) dirHashes
|
||||
|
||||
annexLocation :: GitConfig -> Key -> (HashLevels -> Hasher) -> FilePath
|
||||
annexLocation config key hasher = objectDir </> keyPath key (hasher $ objectHashLevels config)
|
||||
annexLocation :: GitConfig -> Key -> (HashLevels -> Hasher) -> RawFilePath
|
||||
annexLocation config key hasher = objectDir' P.</> keyPath key (hasher $ objectHashLevels config)
|
||||
|
||||
{- Number of subdirectories from the gitAnnexObjectDir
|
||||
- to the gitAnnexLocation. -}
|
||||
|
@ -159,9 +169,14 @@ gitAnnexLocationDepth config = hashlevels + 1
|
|||
- This does not take direct mode into account, so in direct mode it is not
|
||||
- the actual location of the file's content.
|
||||
-}
|
||||
gitAnnexLocation :: Key -> Git.Repo -> GitConfig -> IO FilePath
|
||||
gitAnnexLocation key r config = gitAnnexLocation' key r config (annexCrippledFileSystem config) (coreSymlinks config) doesFileExist (Git.localGitDir r)
|
||||
gitAnnexLocation' :: Key -> Git.Repo -> GitConfig -> Bool -> Bool -> (FilePath -> IO Bool) -> FilePath -> IO FilePath
|
||||
gitAnnexLocation :: Key -> Git.Repo -> GitConfig -> IO RawFilePath
|
||||
gitAnnexLocation key r config = gitAnnexLocation' key r config
|
||||
(annexCrippledFileSystem config)
|
||||
(coreSymlinks config)
|
||||
R.doesPathExist
|
||||
(Git.localGitDir r)
|
||||
|
||||
gitAnnexLocation' :: Key -> Git.Repo -> GitConfig -> Bool -> Bool -> (RawFilePath -> IO Bool) -> RawFilePath -> IO RawFilePath
|
||||
gitAnnexLocation' key r config crippled symlinkssupported checker gitdir
|
||||
{- Bare repositories default to hashDirLower for new
|
||||
- content, as it's more portable. But check all locations. -}
|
||||
|
@ -183,7 +198,7 @@ gitAnnexLocation' key r config crippled symlinkssupported checker gitdir
|
|||
only = return . inrepo . annexLocation config key
|
||||
checkall = check $ map inrepo $ annexLocations config key
|
||||
|
||||
inrepo d = gitdir </> d
|
||||
inrepo d = gitdir P.</> d
|
||||
check locs@(l:_) = fromMaybe l <$> firstM checker locs
|
||||
check [] = error "internal"
|
||||
|
||||
|
@ -195,14 +210,16 @@ gitAnnexLink file key r config = do
|
|||
let gitdir = getgitdir currdir
|
||||
loc <- gitAnnexLocation' key r config False False (\_ -> return True) gitdir
|
||||
fromRawFilePath . toInternalGitPath . toRawFilePath
|
||||
<$> relPathDirToFile (parentDir absfile) loc
|
||||
<$> relPathDirToFile (parentDir absfile) (fromRawFilePath loc)
|
||||
where
|
||||
getgitdir currdir
|
||||
{- This special case is for git submodules on filesystems not
|
||||
- supporting symlinks; generate link target that will
|
||||
- work portably. -}
|
||||
| not (coreSymlinks config) && needsSubmoduleFixup r =
|
||||
absNormPathUnix currdir $ Git.repoPath r </> ".git"
|
||||
toRawFilePath $
|
||||
absNormPathUnix currdir $ fromRawFilePath $
|
||||
Git.repoPath r P.</> ".git"
|
||||
| otherwise = Git.localGitDir r
|
||||
absNormPathUnix d p = fromRawFilePath $ toInternalGitPath $ toRawFilePath $
|
||||
absPathFrom
|
||||
|
@ -216,7 +233,7 @@ gitAnnexLinkCanonical file key r config = gitAnnexLink file key r' config'
|
|||
where
|
||||
r' = case r of
|
||||
Git.Repo { Git.location = l@Git.Local { Git.worktree = Just wt } } ->
|
||||
r { Git.location = l { Git.gitdir = wt </> ".git" } }
|
||||
r { Git.location = l { Git.gitdir = wt P.</> ".git" } }
|
||||
_ -> r
|
||||
config' = config
|
||||
{ annexCrippledFileSystem = False
|
||||
|
@ -227,36 +244,39 @@ gitAnnexLinkCanonical file key r config = gitAnnexLink file key r' config'
|
|||
gitAnnexContentLock :: Key -> Git.Repo -> GitConfig -> IO FilePath
|
||||
gitAnnexContentLock key r config = do
|
||||
loc <- gitAnnexLocation key r config
|
||||
return $ loc ++ ".lck"
|
||||
return $ fromRawFilePath loc ++ ".lck"
|
||||
|
||||
{- File that maps from a key to the file(s) in the git repository.
|
||||
- Used in direct mode. -}
|
||||
gitAnnexMapping :: Key -> Git.Repo -> GitConfig -> IO FilePath
|
||||
gitAnnexMapping key r config = do
|
||||
loc <- gitAnnexLocation key r config
|
||||
return $ loc ++ ".map"
|
||||
return $ fromRawFilePath loc ++ ".map"
|
||||
|
||||
{- File that caches information about a key's content, used to determine
|
||||
- if a file has changed.
|
||||
- Used in direct mode. -}
|
||||
gitAnnexInodeCache :: Key -> Git.Repo -> GitConfig -> IO FilePath
|
||||
gitAnnexInodeCache key r config = do
|
||||
gitAnnexInodeCache key r config = do
|
||||
loc <- gitAnnexLocation key r config
|
||||
return $ loc ++ ".cache"
|
||||
return $ fromRawFilePath loc ++ ".cache"
|
||||
|
||||
gitAnnexInodeSentinal :: Git.Repo -> FilePath
|
||||
gitAnnexInodeSentinal r = gitAnnexDir r </> "sentinal"
|
||||
gitAnnexInodeSentinal :: Git.Repo -> RawFilePath
|
||||
gitAnnexInodeSentinal r = gitAnnexDir' r P.</> "sentinal"
|
||||
|
||||
gitAnnexInodeSentinalCache :: Git.Repo -> FilePath
|
||||
gitAnnexInodeSentinalCache r = gitAnnexInodeSentinal r ++ ".cache"
|
||||
gitAnnexInodeSentinalCache :: Git.Repo -> RawFilePath
|
||||
gitAnnexInodeSentinalCache r = gitAnnexInodeSentinal r <> ".cache"
|
||||
|
||||
{- The annex directory of a repository. -}
|
||||
gitAnnexDir :: Git.Repo -> FilePath
|
||||
gitAnnexDir r = addTrailingPathSeparator $ Git.localGitDir r </> annexDir
|
||||
gitAnnexDir r = addTrailingPathSeparator $ fromRawFilePath (Git.localGitDir r) </> annexDir
|
||||
|
||||
gitAnnexDir' :: Git.Repo -> RawFilePath
|
||||
gitAnnexDir' r = P.addTrailingPathSeparator $ Git.localGitDir r P.</> annexDir'
|
||||
|
||||
{- The part of the annex directory where file contents are stored. -}
|
||||
gitAnnexObjectDir :: Git.Repo -> FilePath
|
||||
gitAnnexObjectDir r = addTrailingPathSeparator $ Git.localGitDir r </> objectDir
|
||||
gitAnnexObjectDir r = addTrailingPathSeparator $ fromRawFilePath (Git.localGitDir r) </> objectDir
|
||||
|
||||
{- .git/annex/tmp/ is used for temp files for key's contents -}
|
||||
gitAnnexTmpObjectDir :: Git.Repo -> FilePath
|
||||
|
@ -427,6 +447,9 @@ gitAnnexTransferDir r = addTrailingPathSeparator $ gitAnnexDir r </> "transfer"
|
|||
gitAnnexJournalDir :: Git.Repo -> FilePath
|
||||
gitAnnexJournalDir r = addTrailingPathSeparator $ gitAnnexDir r </> "journal"
|
||||
|
||||
gitAnnexJournalDir' :: Git.Repo -> RawFilePath
|
||||
gitAnnexJournalDir' r = P.addTrailingPathSeparator $ gitAnnexDir' r P.</> "journal"
|
||||
|
||||
{- Lock file for the journal. -}
|
||||
gitAnnexJournalLock :: Git.Repo -> FilePath
|
||||
gitAnnexJournalLock r = gitAnnexDir r </> "journal.lck"
|
||||
|
@ -608,10 +631,10 @@ fileKey' = deserializeKey' . S8.intercalate "/" . map go . S8.split '%'
|
|||
- The file is put in a directory with the same name, this allows
|
||||
- write-protecting the directory to avoid accidental deletion of the file.
|
||||
-}
|
||||
keyPath :: Key -> Hasher -> FilePath
|
||||
keyPath key hasher = hasher key </> f </> f
|
||||
keyPath :: Key -> Hasher -> RawFilePath
|
||||
keyPath key hasher = hasher key P.</> f P.</> f
|
||||
where
|
||||
f = keyFile key
|
||||
f = keyFile' key
|
||||
|
||||
{- All possibile locations to store a key in a special remote
|
||||
- using different directory hashes.
|
||||
|
@ -619,5 +642,5 @@ keyPath key hasher = hasher key </> f </> f
|
|||
- This is compatible with the annexLocations, for interoperability between
|
||||
- special remotes and git-annex repos.
|
||||
-}
|
||||
keyPaths :: Key -> [FilePath]
|
||||
keyPaths :: Key -> [RawFilePath]
|
||||
keyPaths key = map (\h -> keyPath key (h def)) dirHashes
|
||||
|
|
|
@ -43,6 +43,7 @@ import Annex.LockPool
|
|||
#endif
|
||||
|
||||
import Control.Concurrent.STM
|
||||
import qualified Data.ByteString as S
|
||||
|
||||
{- Some ssh commands are fed stdin on a pipe and so should be allowed to
|
||||
- consume it. But ssh commands that are not piped stdin should generally
|
||||
|
@ -325,7 +326,7 @@ sizeof_sockaddr_un_sun_path = 100
|
|||
{- Note that this looks at the true length of the path in bytes, as it will
|
||||
- appear on disk. -}
|
||||
valid_unix_socket_path :: FilePath -> Bool
|
||||
valid_unix_socket_path f = length (decodeW8 f) < sizeof_sockaddr_un_sun_path
|
||||
valid_unix_socket_path f = S.length (encodeBS f) < sizeof_sockaddr_un_sun_path
|
||||
|
||||
{- Parses the SSH port, and returns the other OpenSSH options. If
|
||||
- several ports are found, the last one takes precedence. -}
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Annex.View where
|
||||
|
||||
import Annex.Common
|
||||
|
@ -80,7 +82,7 @@ parseViewParam s = case separate (== '=') s of
|
|||
)
|
||||
where
|
||||
mkFilterValues v
|
||||
| any (`elem` v) "*?" = FilterGlob v
|
||||
| any (`elem` v) ['*', '?'] = FilterGlob v
|
||||
| otherwise = FilterValues $ S.singleton $ toMetaValue $ encodeBS v
|
||||
mkExcludeValues = ExcludeValues . S.singleton . toMetaValue . encodeBS
|
||||
|
||||
|
@ -343,11 +345,11 @@ narrowView = applyView' viewedFileReuse getViewedFileMetaData
|
|||
applyView' :: MkViewedFile -> (FilePath -> MetaData) -> View -> Annex Git.Branch
|
||||
applyView' mkviewedfile getfilemetadata view = do
|
||||
top <- fromRepo Git.repoPath
|
||||
(l, clean) <- inRepo $ Git.LsFiles.stagedDetails [toRawFilePath top]
|
||||
(l, clean) <- inRepo $ Git.LsFiles.stagedDetails [top]
|
||||
liftIO . nukeFile =<< fromRepo gitAnnexViewIndex
|
||||
uh <- withViewIndex $ inRepo Git.UpdateIndex.startUpdateIndex
|
||||
forM_ l $ \(f, sha, mode) -> do
|
||||
topf <- inRepo (toTopFilePath $ fromRawFilePath f)
|
||||
topf <- inRepo (toTopFilePath f)
|
||||
go uh topf sha (toTreeItemType =<< mode) =<< lookupFile f
|
||||
liftIO $ do
|
||||
void $ stopUpdateIndex uh
|
||||
|
@ -358,13 +360,14 @@ applyView' mkviewedfile getfilemetadata view = do
|
|||
|
||||
go uh topf _sha _mode (Just k) = do
|
||||
metadata <- getCurrentMetaData k
|
||||
let f = getTopFilePath topf
|
||||
let f = fromRawFilePath $ getTopFilePath topf
|
||||
let metadata' = getfilemetadata f `unionMetaData` metadata
|
||||
forM_ (genviewedfiles f metadata') $ \fv -> do
|
||||
f' <- fromRepo $ fromTopFilePath $ asTopFilePath fv
|
||||
f' <- fromRawFilePath <$>
|
||||
fromRepo (fromTopFilePath $ asTopFilePath $ toRawFilePath fv)
|
||||
stagesymlink uh f' =<< calcRepo (gitAnnexLink f' k)
|
||||
go uh topf (Just sha) (Just treeitemtype) Nothing
|
||||
| "." `isPrefixOf` getTopFilePath topf =
|
||||
| "." `B.isPrefixOf` getTopFilePath topf =
|
||||
liftIO $ Git.UpdateIndex.streamUpdateIndex' uh $
|
||||
pureStreamer $ updateIndexLine sha treeitemtype topf
|
||||
go _ _ _ _ _ = noop
|
||||
|
@ -403,7 +406,7 @@ withViewChanges addmeta removemeta = do
|
|||
=<< catKey (DiffTree.dstsha item)
|
||||
| otherwise = noop
|
||||
handlechange item a = maybe noop
|
||||
(void . commandAction . a (getTopFilePath $ DiffTree.file item))
|
||||
(void . commandAction . a (fromRawFilePath $ getTopFilePath $ DiffTree.file item))
|
||||
|
||||
{- Runs an action using the view index file.
|
||||
- Note that the file does not necessarily exist, or can contain
|
||||
|
|
|
@ -22,6 +22,7 @@ import qualified Git.Types
|
|||
import qualified Database.Keys
|
||||
import qualified Database.Keys.SQL
|
||||
import Config
|
||||
import qualified Utility.RawFilePath as R
|
||||
|
||||
{- Looks up the key corresponding to an annexed file in the work tree,
|
||||
- by examining what the file links to.
|
||||
|
@ -95,16 +96,18 @@ scanUnlockedFiles = whenM (inRepo Git.Ref.headExists <&&> not <$> isBareRepo) $
|
|||
liftIO . Database.Keys.SQL.addAssociatedFileFast k tf
|
||||
whenM (inAnnex k) $ do
|
||||
f <- fromRepo $ fromTopFilePath tf
|
||||
liftIO (isPointerFile (toRawFilePath f)) >>= \case
|
||||
liftIO (isPointerFile f) >>= \case
|
||||
Just k' | k' == k -> do
|
||||
destmode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus f
|
||||
ic <- replaceFile f $ \tmp ->
|
||||
destmode <- liftIO $ catchMaybeIO $
|
||||
fileMode <$> R.getFileStatus f
|
||||
ic <- replaceFile (fromRawFilePath f) $ \tmp -> do
|
||||
let tmp' = toRawFilePath tmp
|
||||
linkFromAnnex k tmp destmode >>= \case
|
||||
LinkAnnexOk ->
|
||||
withTSDelta (liftIO . genInodeCache tmp)
|
||||
withTSDelta (liftIO . genInodeCache tmp')
|
||||
LinkAnnexNoop -> return Nothing
|
||||
LinkAnnexFailed -> liftIO $ do
|
||||
writePointerFile (toRawFilePath tmp) k destmode
|
||||
writePointerFile tmp' k destmode
|
||||
return Nothing
|
||||
maybe noop (restagePointerFile (Restage True) (toRawFilePath f)) ic
|
||||
maybe noop (restagePointerFile (Restage True) f) ic
|
||||
_ -> noop
|
||||
|
|
|
@ -91,7 +91,7 @@ runRepair u mrmt destructiverepair = do
|
|||
remoterepair fsckresults = case Remote.repairRepo =<< mrmt of
|
||||
Nothing -> return False
|
||||
Just mkrepair -> do
|
||||
thisrepopath <- liftIO . absPath
|
||||
thisrepopath <- liftIO . absPath . fromRawFilePath
|
||||
=<< liftAnnex (fromRepo Git.repoPath)
|
||||
a <- liftAnnex $ mkrepair $
|
||||
repair fsckresults (Just thisrepopath)
|
||||
|
@ -130,7 +130,7 @@ repairStaleGitLocks r = do
|
|||
repairStaleLocks lockfiles
|
||||
return $ not $ null lockfiles
|
||||
where
|
||||
findgitfiles = dirContentsRecursiveSkipping (== dropTrailingPathSeparator annexDir) True . Git.localGitDir
|
||||
findgitfiles = dirContentsRecursiveSkipping (== dropTrailingPathSeparator annexDir) True . fromRawFilePath . Git.localGitDir
|
||||
islock f
|
||||
| "gc.pid" `isInfixOf` f = False
|
||||
| ".lock" `isSuffixOf` f = True
|
||||
|
|
|
@ -308,7 +308,7 @@ handleAdds lockdowndir havelsof delayadd cs = returnWhen (null incomplete) $ do
|
|||
if M.null m
|
||||
then forM toadd (add cfg)
|
||||
else forM toadd $ \c -> do
|
||||
mcache <- liftIO $ genInodeCache (changeFile c) delta
|
||||
mcache <- liftIO $ genInodeCache (toRawFilePath (changeFile c)) delta
|
||||
case mcache of
|
||||
Nothing -> add cfg c
|
||||
Just cache ->
|
||||
|
|
|
@ -91,4 +91,4 @@ getConfigs = S.fromList . map extract
|
|||
<$> liftAnnex (inRepo $ LsTree.lsTreeFiles Annex.Branch.fullname files)
|
||||
where
|
||||
files = map (fromRawFilePath . fst) configFilesActions
|
||||
extract treeitem = (toRawFilePath $ getTopFilePath $ LsTree.file treeitem, LsTree.sha treeitem)
|
||||
extract treeitem = (getTopFilePath $ LsTree.file treeitem, LsTree.sha treeitem)
|
||||
|
|
|
@ -26,7 +26,7 @@ import qualified Command.Sync
|
|||
mergeThread :: NamedThread
|
||||
mergeThread = namedThread "Merger" $ do
|
||||
g <- liftAnnex gitRepo
|
||||
let dir = Git.localGitDir g </> "refs"
|
||||
let dir = fromRawFilePath (Git.localGitDir g) </> "refs"
|
||||
liftIO $ createDirectoryIfMissing True dir
|
||||
let hook a = Just <$> asIO2 (runHandler a)
|
||||
changehook <- hook onChange
|
||||
|
|
|
@ -159,7 +159,7 @@ handleMount urlrenderer dir = do
|
|||
-}
|
||||
remotesUnder :: FilePath -> Assistant [Remote]
|
||||
remotesUnder dir = do
|
||||
repotop <- liftAnnex $ fromRepo Git.repoPath
|
||||
repotop <- liftAnnex $ fromRawFilePath <$> fromRepo Git.repoPath
|
||||
rs <- liftAnnex remoteList
|
||||
pairs <- liftAnnex $ mapM (checkremote repotop) rs
|
||||
let (waschanged, rs') = unzip pairs
|
||||
|
|
|
@ -119,7 +119,7 @@ pairReqReceived False urlrenderer msg = do
|
|||
pairAckReceived :: Bool -> Maybe PairingInProgress -> PairMsg -> [PairingInProgress] -> Assistant [PairingInProgress]
|
||||
pairAckReceived True (Just pip) msg cache = do
|
||||
stopSending pip
|
||||
repodir <- repoPath <$> liftAnnex gitRepo
|
||||
repodir <- fromRawFilePath . repoPath <$> liftAnnex gitRepo
|
||||
liftIO $ setupAuthorizedKeys msg repodir
|
||||
finishedLocalPairing msg (inProgressSshKeyPair pip)
|
||||
startSending pip PairDone $ multicastPairMsg
|
||||
|
|
|
@ -269,5 +269,5 @@ checkOldUnused urlrenderer = go =<< annexExpireUnused <$> liftAnnex Annex.getGit
|
|||
checkRepoExists :: Assistant ()
|
||||
checkRepoExists = do
|
||||
g <- liftAnnex gitRepo
|
||||
liftIO $ unlessM (doesDirectoryExist $ Git.repoPath g) $
|
||||
liftIO $ unlessM (doesDirectoryExist $ fromRawFilePath $ Git.repoPath g) $
|
||||
terminateSelf
|
||||
|
|
|
@ -136,8 +136,7 @@ startupScan scanner = do
|
|||
-- Notice any files that were deleted before
|
||||
-- watching was started.
|
||||
top <- liftAnnex $ fromRepo Git.repoPath
|
||||
(fs, cleanup) <- liftAnnex $ inRepo $ LsFiles.deleted
|
||||
[toRawFilePath top]
|
||||
(fs, cleanup) <- liftAnnex $ inRepo $ LsFiles.deleted [top]
|
||||
forM_ fs $ \f -> do
|
||||
let f' = fromRawFilePath f
|
||||
liftAnnex $ onDel' f'
|
||||
|
@ -215,7 +214,7 @@ onAddUnlocked symlinkssupported matcher f fs = do
|
|||
where
|
||||
addassociatedfile key file =
|
||||
Database.Keys.addAssociatedFile key
|
||||
=<< inRepo (toTopFilePath file)
|
||||
=<< inRepo (toTopFilePath (toRawFilePath file))
|
||||
samefilestatus key file status = do
|
||||
cache <- Database.Keys.getInodeCaches key
|
||||
curr <- withTSDelta $ \delta -> liftIO $ toInodeCache delta file status
|
||||
|
@ -225,7 +224,7 @@ onAddUnlocked symlinkssupported matcher f fs = do
|
|||
_ -> return False
|
||||
contentchanged oldkey file = do
|
||||
Database.Keys.removeAssociatedFile oldkey
|
||||
=<< inRepo (toTopFilePath file)
|
||||
=<< inRepo (toTopFilePath (toRawFilePath file))
|
||||
unlessM (inAnnex oldkey) $
|
||||
logStatus oldkey InfoMissing
|
||||
addlink file key = do
|
||||
|
@ -347,7 +346,7 @@ onDel file _ = do
|
|||
|
||||
onDel' :: FilePath -> Annex ()
|
||||
onDel' file = do
|
||||
topfile <- inRepo (toTopFilePath file)
|
||||
topfile <- inRepo (toTopFilePath (toRawFilePath file))
|
||||
withkey $ flip Database.Keys.removeAssociatedFile topfile
|
||||
Annex.Queue.addUpdateIndex =<<
|
||||
inRepo (Git.UpdateIndex.unstageFile file)
|
||||
|
|
|
@ -100,7 +100,7 @@ webAppThread assistantdata urlrenderer noannex cannotrun postfirstrun listenhost
|
|||
getreldir
|
||||
| noannex = return Nothing
|
||||
| otherwise = Just <$>
|
||||
(relHome =<< absPath
|
||||
(relHome =<< absPath . fromRawFilePath
|
||||
=<< getAnnex' (fromRepo repoPath))
|
||||
go tlssettings addr webapp htmlshim urlfile = do
|
||||
let url = myUrl tlssettings webapp addr
|
||||
|
|
|
@ -64,7 +64,7 @@ describeUnused' whenbig = liftAnnex $ go =<< readUnusedLog ""
|
|||
|
||||
sumkeysize s k = s + fromMaybe 0 (fromKey keySize k)
|
||||
|
||||
forpath a = inRepo $ liftIO . a . Git.repoPath
|
||||
forpath a = inRepo $ liftIO . a . fromRawFilePath . Git.repoPath
|
||||
|
||||
{- With a duration, expires all unused files that are older.
|
||||
- With Nothing, expires *all* unused files. -}
|
||||
|
|
|
@ -113,7 +113,7 @@ distributionDownloadComplete d dest cleanup t
|
|||
| transferDirection t == Download = do
|
||||
debug ["finished downloading git-annex distribution"]
|
||||
maybe (failedupgrade "bad download") go
|
||||
=<< liftAnnex (withObjectLoc k fsckit)
|
||||
=<< liftAnnex (withObjectLoc k (fsckit . fromRawFilePath))
|
||||
| otherwise = cleanup
|
||||
where
|
||||
k = mkKey $ const $ distributionKey d
|
||||
|
|
|
@ -78,7 +78,7 @@ deleteCurrentRepository = dangerPage $ do
|
|||
sanityVerifierAForm $ SanityVerifier magicphrase
|
||||
case result of
|
||||
FormSuccess _ -> liftH $ do
|
||||
dir <- liftAnnex $ fromRepo Git.repoPath
|
||||
dir <- liftAnnex $ fromRawFilePath <$> fromRepo Git.repoPath
|
||||
liftIO $ removeAutoStartFile dir
|
||||
|
||||
{- Disable syncing to this repository, and all
|
||||
|
|
|
@ -238,7 +238,7 @@ checkAssociatedDirectory cfg (Just r) = do
|
|||
RepoGroupStandard gr -> case associatedDirectory repoconfig gr of
|
||||
Just d -> inRepo $ \g ->
|
||||
createDirectoryIfMissing True $
|
||||
Git.repoPath g </> d
|
||||
fromRawFilePath (Git.repoPath g) </> d
|
||||
Nothing -> noop
|
||||
_ -> noop
|
||||
|
||||
|
|
|
@ -173,7 +173,7 @@ getFinishLocalPairR = postFinishLocalPairR
|
|||
postFinishLocalPairR :: PairMsg -> Handler Html
|
||||
#ifdef WITH_PAIRING
|
||||
postFinishLocalPairR msg = promptSecret (Just msg) $ \_ secret -> do
|
||||
repodir <- liftH $ repoPath <$> liftAnnex gitRepo
|
||||
repodir <- liftH $ fromRawFilePath . repoPath <$> liftAnnex gitRepo
|
||||
liftIO $ setup repodir
|
||||
startLocalPairing PairAck (cleanup repodir) alert uuid "" secret
|
||||
where
|
||||
|
|
|
@ -94,7 +94,7 @@ storePrefs p = do
|
|||
unsetConfig (annexConfig "numcopies") -- deprecated
|
||||
setConfig (annexConfig "autoupgrade") (fromAutoUpgrade $ autoUpgrade p)
|
||||
unlessM ((==) <$> pure (autoStart p) <*> inAutoStartFile) $ do
|
||||
here <- fromRepo Git.repoPath
|
||||
here <- fromRawFilePath <$> fromRepo Git.repoPath
|
||||
liftIO $ if autoStart p
|
||||
then addAutoStartFile here
|
||||
else removeAutoStartFile here
|
||||
|
@ -118,5 +118,5 @@ postPreferencesR = page "Preferences" (Just Configuration) $ do
|
|||
|
||||
inAutoStartFile :: Annex Bool
|
||||
inAutoStartFile = do
|
||||
here <- liftIO . absPath =<< fromRepo Git.repoPath
|
||||
here <- liftIO . absPath . fromRawFilePath =<< fromRepo Git.repoPath
|
||||
any (`equalFilePath` here) <$> liftIO readAutoStartFile
|
||||
|
|
|
@ -118,7 +118,8 @@ getFileBrowserR = whenM openFileBrowser redirectBack
|
|||
- blocking the response to the browser on it. -}
|
||||
openFileBrowser :: Handler Bool
|
||||
openFileBrowser = do
|
||||
path <- liftIO . absPath =<< liftAnnex (fromRepo Git.repoPath)
|
||||
path <- liftIO . absPath . fromRawFilePath
|
||||
=<< liftAnnex (fromRepo Git.repoPath)
|
||||
#ifdef darwin_HOST_OS
|
||||
let cmd = "open"
|
||||
let p = proc cmd [path]
|
||||
|
|
|
@ -11,6 +11,7 @@ import Annex.Common
|
|||
import Utility.Hash
|
||||
|
||||
import qualified Data.ByteString as S
|
||||
import qualified Data.ByteString.Lazy as L
|
||||
|
||||
{- Generates a keyName from an input string. Takes care of sanitizing it.
|
||||
- If it's not too long, the full string is used as the keyName.
|
||||
|
@ -21,11 +22,12 @@ genKeyName s
|
|||
-- Avoid making keys longer than the length of a SHA256 checksum.
|
||||
| bytelen > sha256len = encodeBS' $
|
||||
truncateFilePath (sha256len - md5len - 1) s' ++ "-" ++
|
||||
show (md5 (encodeBL s))
|
||||
show (md5 bl)
|
||||
| otherwise = encodeBS' s'
|
||||
where
|
||||
s' = preSanitizeKeyName s
|
||||
bytelen = length (decodeW8 s')
|
||||
bl = encodeBL s
|
||||
bytelen = fromIntegral $ L.length bl
|
||||
|
||||
sha256len = 64
|
||||
md5len = 32
|
||||
|
|
|
@ -38,7 +38,8 @@ keyValue source _ = do
|
|||
let f = contentLocation source
|
||||
stat <- liftIO $ getFileStatus f
|
||||
sz <- liftIO $ getFileSize' f stat
|
||||
relf <- getTopFilePath <$> inRepo (toTopFilePath $ keyFilename source)
|
||||
relf <- fromRawFilePath . getTopFilePath
|
||||
<$> inRepo (toTopFilePath $ toRawFilePath $ keyFilename source)
|
||||
return $ Just $ mkKey $ \k -> k
|
||||
{ keyName = genKeyName relf
|
||||
, keyVariety = WORMKey
|
||||
|
|
12
CHANGELOG
12
CHANGELOG
|
@ -18,14 +18,14 @@ git-annex (8.20191107) UNRELEASED; urgency=medium
|
|||
|
||||
git-annex (7.20191115) UNRELEASED; urgency=medium
|
||||
|
||||
* Sped up many git-annex commands that operate on many files, by
|
||||
using ByteStrings. Some commands like find got up to 60% faster.
|
||||
* Optimised processing of many files, especially by commands like find
|
||||
and whereis that only report on the state of the repository. Commands
|
||||
like get also sped up in cases where they have to check a lot of
|
||||
files but only transfer a few files. Speedups range from 30-100%.
|
||||
* Sped up many git-annex commands that operate on many files, by
|
||||
avoiding reserialization of keys.
|
||||
find got 7% faster; whereis 3% faster; and git-annex get when
|
||||
all files are already present got 5% faster
|
||||
* Sped up many git-annex commands that query the git-annex branch.
|
||||
In particular whereis got 1.5% faster.
|
||||
find is 7% faster; whereis is 3% faster; and git-annex get when
|
||||
all files are already present is 5% faster
|
||||
* Stop displaying rsync progress, and use git-annex's own progress display
|
||||
for local-to-local repo transfers.
|
||||
* git-lfs: The url provided to initremote/enableremote will now be
|
||||
|
|
|
@ -102,7 +102,8 @@ batchFilesMatching :: BatchFormat -> (FilePath -> CommandStart) -> Annex ()
|
|||
batchFilesMatching fmt a = do
|
||||
matcher <- getMatcher
|
||||
batchStart fmt $ \f ->
|
||||
ifM (matcher $ MatchingFile $ FileInfo f f)
|
||||
let f' = toRawFilePath f
|
||||
in ifM (matcher $ MatchingFile $ FileInfo f' f')
|
||||
( a f
|
||||
, return Nothing
|
||||
)
|
||||
|
|
|
@ -33,6 +33,7 @@ import Annex.CurrentBranch
|
|||
import Annex.Content
|
||||
import Annex.InodeSentinal
|
||||
import qualified Database.Keys
|
||||
import qualified Utility.RawFilePath as R
|
||||
|
||||
withFilesInGit :: (RawFilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
|
||||
withFilesInGit a l = seekActions $ prepFiltered a $
|
||||
|
@ -93,8 +94,8 @@ withPathContents a params = do
|
|||
, return [(p, takeFileName p)]
|
||||
)
|
||||
checkmatch matcher (f, relf) = matcher $ MatchingFile $ FileInfo
|
||||
{ currFile = f
|
||||
, matchFile = relf
|
||||
{ currFile = toRawFilePath f
|
||||
, matchFile = toRawFilePath relf
|
||||
}
|
||||
|
||||
withWords :: ([String] -> CommandSeek) -> CmdParams -> CommandSeek
|
||||
|
@ -130,7 +131,7 @@ withUnmodifiedUnlockedPointers a l = seekActions $
|
|||
isUnmodifiedUnlocked :: RawFilePath -> Annex Bool
|
||||
isUnmodifiedUnlocked f = catKeyFile f >>= \case
|
||||
Nothing -> return False
|
||||
Just k -> sameInodeCache (fromRawFilePath f) =<< Database.Keys.getInodeCaches k
|
||||
Just k -> sameInodeCache f =<< Database.Keys.getInodeCaches k
|
||||
|
||||
{- Finds files that may be modified. -}
|
||||
withFilesMaybeModified :: (RawFilePath -> CommandSeek) -> [WorkTreeItem] -> CommandSeek
|
||||
|
@ -169,7 +170,7 @@ withKeyOptions ko auto keyaction = withKeyOptions' ko auto mkkeyaction
|
|||
return $ \v@(k, ai) ->
|
||||
let i = case ai of
|
||||
ActionItemBranchFilePath (BranchFilePath _ topf) _ ->
|
||||
MatchingKey k (AssociatedFile $ Just $ toRawFilePath $ getTopFilePath topf)
|
||||
MatchingKey k (AssociatedFile $ Just $ getTopFilePath topf)
|
||||
_ -> MatchingKey k (AssociatedFile Nothing)
|
||||
in whenM (matcher i) $
|
||||
keyaction v
|
||||
|
@ -231,8 +232,7 @@ prepFiltered a fs = do
|
|||
map (process matcher) <$> fs
|
||||
where
|
||||
process matcher f =
|
||||
let f' = fromRawFilePath f
|
||||
in whenM (matcher $ MatchingFile $ FileInfo f' f') $ a f
|
||||
whenM (matcher $ MatchingFile $ FileInfo f f) $ a f
|
||||
|
||||
seekActions :: Annex [CommandSeek] -> Annex ()
|
||||
seekActions gen = sequence_ =<< gen
|
||||
|
@ -276,4 +276,4 @@ workTreeItems' (AllowHidden allowhidden) ps = do
|
|||
| otherwise = return False
|
||||
|
||||
notSymlink :: RawFilePath -> IO Bool
|
||||
notSymlink f = liftIO $ not . isSymbolicLink <$> getSymbolicLinkStatus (fromRawFilePath f)
|
||||
notSymlink f = liftIO $ not . isSymbolicLink <$> R.getSymbolicLinkStatus f
|
||||
|
|
|
@ -19,6 +19,7 @@ import Annex.Link
|
|||
import Annex.Tmp
|
||||
import Messages.Progress
|
||||
import Git.FilePath
|
||||
import qualified Utility.RawFilePath as R
|
||||
|
||||
cmd :: Command
|
||||
cmd = notBareRepo $
|
||||
|
@ -92,7 +93,7 @@ start file = do
|
|||
maybe go fixuppointer mk
|
||||
where
|
||||
go = ifAnnexed file addpresent add
|
||||
add = liftIO (catchMaybeIO $ getSymbolicLinkStatus (fromRawFilePath file)) >>= \case
|
||||
add = liftIO (catchMaybeIO $ R.getSymbolicLinkStatus file) >>= \case
|
||||
Nothing -> stop
|
||||
Just s
|
||||
| not (isRegularFile s) && not (isSymbolicLink s) -> stop
|
||||
|
@ -102,7 +103,7 @@ start file = do
|
|||
then next $ addFile file
|
||||
else perform file
|
||||
addpresent key =
|
||||
liftIO (catchMaybeIO $ getSymbolicLinkStatus $ fromRawFilePath file) >>= \case
|
||||
liftIO (catchMaybeIO $ R.getSymbolicLinkStatus file) >>= \case
|
||||
Just s | isSymbolicLink s -> fixuplink key
|
||||
_ -> add
|
||||
fixuplink key = starting "add" (ActionItemWorkTreeFile file) $ do
|
||||
|
@ -113,7 +114,7 @@ start file = do
|
|||
cleanup key =<< inAnnex key
|
||||
fixuppointer key = starting "add" (ActionItemWorkTreeFile file) $ do
|
||||
-- the pointer file is present, but not yet added to git
|
||||
Database.Keys.addAssociatedFile key =<< inRepo (toTopFilePath (fromRawFilePath file))
|
||||
Database.Keys.addAssociatedFile key =<< inRepo (toTopFilePath file)
|
||||
next $ addFile file
|
||||
|
||||
perform :: RawFilePath -> CommandPerform
|
||||
|
|
|
@ -12,7 +12,7 @@ import Logs.Config
|
|||
import Config
|
||||
import Git.Types (ConfigKey(..), fromConfigValue)
|
||||
|
||||
import qualified Data.ByteString as S
|
||||
import qualified Data.ByteString.Char8 as S8
|
||||
|
||||
cmd :: Command
|
||||
cmd = noMessages $ command "config" SectionSetup
|
||||
|
@ -65,5 +65,5 @@ seek (GetConfig ck) = commandAction $
|
|||
startingCustomOutput (ActionItemOther Nothing) $ do
|
||||
getGlobalConfig ck >>= \case
|
||||
Nothing -> return ()
|
||||
Just (ConfigValue v) -> liftIO $ S.putStrLn v
|
||||
Just (ConfigValue v) -> liftIO $ S8.putStrLn v
|
||||
next $ return True
|
||||
|
|
|
@ -9,6 +9,9 @@ module Command.ContentLocation where
|
|||
|
||||
import Command
|
||||
import Annex.Content
|
||||
import qualified Utility.RawFilePath as R
|
||||
|
||||
import qualified Data.ByteString.Char8 as B8
|
||||
|
||||
cmd :: Command
|
||||
cmd = noCommit $ noMessages $
|
||||
|
@ -20,10 +23,10 @@ cmd = noCommit $ noMessages $
|
|||
run :: () -> String -> Annex Bool
|
||||
run _ p = do
|
||||
let k = fromMaybe (giveup "bad key") $ deserializeKey p
|
||||
maybe (return False) (\f -> liftIO (putStrLn f) >> return True)
|
||||
maybe (return False) (\f -> liftIO (B8.putStrLn f) >> return True)
|
||||
=<< inAnnex' (pure True) Nothing check k
|
||||
where
|
||||
check f = ifM (liftIO (doesFileExist f))
|
||||
check f = ifM (liftIO (R.doesPathExist f))
|
||||
( return (Just f)
|
||||
, return Nothing
|
||||
)
|
||||
|
|
|
@ -90,7 +90,8 @@ fixupReq req@(Req {}) =
|
|||
v <- getAnnexLinkTarget' (toRawFilePath (getfile r)) False
|
||||
case parseLinkTargetOrPointer =<< v of
|
||||
Nothing -> return r
|
||||
Just k -> withObjectLoc k (pure . setfile r)
|
||||
Just k -> withObjectLoc k $
|
||||
pure . setfile r . fromRawFilePath
|
||||
_ -> return r
|
||||
|
||||
externalDiffer :: String -> [String] -> Differ
|
||||
|
|
|
@ -251,7 +251,7 @@ startExport :: Remote -> ExportHandle -> MVar FileUploaded -> MVar AllFilled ->
|
|||
startExport r db cvar allfilledvar ti = do
|
||||
ek <- exportKey (Git.LsTree.sha ti)
|
||||
stopUnless (notrecordedpresent ek) $
|
||||
starting ("export " ++ name r) (ActionItemOther (Just f)) $
|
||||
starting ("export " ++ name r) (ActionItemOther (Just (fromRawFilePath f))) $
|
||||
ifM (either (const False) id <$> tryNonAsync (checkPresentExport (exportActions r) (asKey ek) loc))
|
||||
( next $ cleanupExport r db ek loc False
|
||||
, do
|
||||
|
@ -259,9 +259,9 @@ startExport r db cvar allfilledvar ti = do
|
|||
performExport r db ek af (Git.LsTree.sha ti) loc allfilledvar
|
||||
)
|
||||
where
|
||||
loc = mkExportLocation (toRawFilePath f)
|
||||
loc = mkExportLocation f
|
||||
f = getTopFilePath (Git.LsTree.file ti)
|
||||
af = AssociatedFile (Just (toRawFilePath f))
|
||||
af = AssociatedFile (Just f)
|
||||
notrecordedpresent ek = (||)
|
||||
<$> liftIO (notElem loc <$> getExportedLocation db (asKey ek))
|
||||
-- If content was removed from the remote, the export db
|
||||
|
@ -314,17 +314,17 @@ startUnexport r db f shas = do
|
|||
eks <- forM (filter (/= nullSha) shas) exportKey
|
||||
if null eks
|
||||
then stop
|
||||
else starting ("unexport " ++ name r) (ActionItemOther (Just f')) $
|
||||
else starting ("unexport " ++ name r) (ActionItemOther (Just (fromRawFilePath f'))) $
|
||||
performUnexport r db eks loc
|
||||
where
|
||||
loc = mkExportLocation (toRawFilePath f')
|
||||
loc = mkExportLocation f'
|
||||
f' = getTopFilePath f
|
||||
|
||||
startUnexport' :: Remote -> ExportHandle -> TopFilePath -> ExportKey -> CommandStart
|
||||
startUnexport' r db f ek = starting ("unexport " ++ name r) (ActionItemOther (Just f')) $
|
||||
startUnexport' r db f ek = starting ("unexport " ++ name r) (ActionItemOther (Just (fromRawFilePath f'))) $
|
||||
performUnexport r db [ek] loc
|
||||
where
|
||||
loc = mkExportLocation (toRawFilePath f')
|
||||
loc = mkExportLocation f'
|
||||
f' = getTopFilePath f
|
||||
|
||||
-- Unlike a usual drop from a repository, this does not check that
|
||||
|
@ -368,15 +368,14 @@ startRecoverIncomplete r db sha oldf
|
|||
liftIO $ removeExportedLocation db (asKey ek) oldloc
|
||||
performUnexport r db [ek] loc
|
||||
where
|
||||
oldloc = mkExportLocation (toRawFilePath oldf')
|
||||
oldf' = getTopFilePath oldf
|
||||
oldloc = mkExportLocation $ getTopFilePath oldf
|
||||
|
||||
startMoveToTempName :: Remote -> ExportHandle -> TopFilePath -> ExportKey -> CommandStart
|
||||
startMoveToTempName r db f ek = starting ("rename " ++ name r)
|
||||
(ActionItemOther $ Just $ f' ++ " -> " ++ fromRawFilePath (fromExportLocation tmploc))
|
||||
(ActionItemOther $ Just $ fromRawFilePath f' ++ " -> " ++ fromRawFilePath (fromExportLocation tmploc))
|
||||
(performRename r db ek loc tmploc)
|
||||
where
|
||||
loc = mkExportLocation (toRawFilePath f')
|
||||
loc = mkExportLocation f'
|
||||
f' = getTopFilePath f
|
||||
tmploc = exportTempName ek
|
||||
|
||||
|
@ -384,10 +383,10 @@ startMoveFromTempName :: Remote -> ExportHandle -> ExportKey -> TopFilePath -> C
|
|||
startMoveFromTempName r db ek f = do
|
||||
let tmploc = exportTempName ek
|
||||
stopUnless (liftIO $ elem tmploc <$> getExportedLocation db (asKey ek)) $
|
||||
starting ("rename " ++ name r) (ActionItemOther (Just (fromRawFilePath (fromExportLocation tmploc) ++ " -> " ++ f'))) $
|
||||
starting ("rename " ++ name r) (ActionItemOther (Just (fromRawFilePath (fromExportLocation tmploc) ++ " -> " ++ fromRawFilePath f'))) $
|
||||
performRename r db ek tmploc loc
|
||||
where
|
||||
loc = mkExportLocation (toRawFilePath f')
|
||||
loc = mkExportLocation f'
|
||||
f' = getTopFilePath f
|
||||
|
||||
performRename :: Remote -> ExportHandle -> ExportKey -> ExportLocation -> ExportLocation -> CommandPerform
|
||||
|
@ -469,7 +468,7 @@ filterPreferredContent r tree = logExportExcluded (uuid r) $ \logwriter -> do
|
|||
-- Match filename relative to the
|
||||
-- top of the tree.
|
||||
let af = AssociatedFile $ Just $
|
||||
toRawFilePath $ getTopFilePath topf
|
||||
getTopFilePath topf
|
||||
let mi = MatchingKey k af
|
||||
ifM (checkMatcher' matcher mi mempty)
|
||||
( return (Just ti)
|
||||
|
|
|
@ -74,7 +74,7 @@ start o file key =
|
|||
|
||||
startKeys :: FindOptions -> (Key, ActionItem) -> CommandStart
|
||||
startKeys o (key, ActionItemBranchFilePath (BranchFilePath _ topf) _) =
|
||||
start o (toRawFilePath (getTopFilePath topf)) key
|
||||
start o (getTopFilePath topf) key
|
||||
startKeys _ _ = stop
|
||||
|
||||
showFormatted :: Maybe Utility.Format.Format -> S.ByteString -> [(String, String)] -> Annex ()
|
||||
|
@ -93,8 +93,8 @@ keyVars key =
|
|||
, ("bytesize", size show)
|
||||
, ("humansize", size $ roughSize storageUnits True)
|
||||
, ("keyname", decodeBS $ fromKey keyName key)
|
||||
, ("hashdirlower", hashDirLower def key)
|
||||
, ("hashdirmixed", hashDirMixed def key)
|
||||
, ("hashdirlower", fromRawFilePath $ hashDirLower def key)
|
||||
, ("hashdirmixed", fromRawFilePath $ hashDirMixed def key)
|
||||
, ("mtime", whenavail show $ fromKey keyMtime key)
|
||||
]
|
||||
where
|
||||
|
|
|
@ -53,11 +53,11 @@ start fixwhat file key = do
|
|||
where
|
||||
fixby = starting "fix" (mkActionItem (key, file))
|
||||
fixthin = do
|
||||
obj <- calcRepo $ gitAnnexLocation key
|
||||
stopUnless (isUnmodified key (fromRawFilePath file) <&&> isUnmodified key obj) $ do
|
||||
obj <- calcRepo (gitAnnexLocation key)
|
||||
stopUnless (isUnmodified key file <&&> isUnmodified key obj) $ do
|
||||
thin <- annexThin <$> Annex.getGitConfig
|
||||
fs <- liftIO $ catchMaybeIO $ R.getFileStatus file
|
||||
os <- liftIO $ catchMaybeIO $ getFileStatus obj
|
||||
os <- liftIO $ catchMaybeIO $ R.getFileStatus obj
|
||||
case (linkCount <$> fs, linkCount <$> os, thin) of
|
||||
(Just 1, Just 1, True) ->
|
||||
fixby $ makeHardLink file key
|
||||
|
@ -65,15 +65,16 @@ start fixwhat file key = do
|
|||
fixby $ breakHardLink file key obj
|
||||
_ -> stop
|
||||
|
||||
breakHardLink :: RawFilePath -> Key -> FilePath -> CommandPerform
|
||||
breakHardLink :: RawFilePath -> Key -> RawFilePath -> CommandPerform
|
||||
breakHardLink file key obj = do
|
||||
replaceFile (fromRawFilePath file) $ \tmp -> do
|
||||
mode <- liftIO $ catchMaybeIO $ fileMode <$> R.getFileStatus file
|
||||
unlessM (checkedCopyFile key obj tmp mode) $
|
||||
let obj' = fromRawFilePath obj
|
||||
unlessM (checkedCopyFile key obj' tmp mode) $
|
||||
error "unable to break hard link"
|
||||
thawContent tmp
|
||||
modifyContent obj $ freezeContent obj
|
||||
Database.Keys.storeInodeCaches key [fromRawFilePath file]
|
||||
modifyContent obj' $ freezeContent obj'
|
||||
Database.Keys.storeInodeCaches key [file]
|
||||
next $ return True
|
||||
|
||||
makeHardLink :: RawFilePath -> Key -> CommandPerform
|
||||
|
|
|
@ -223,7 +223,7 @@ fixLink key file = do
|
|||
- in this repository only. -}
|
||||
verifyLocationLog :: Key -> KeyStatus -> ActionItem -> Annex Bool
|
||||
verifyLocationLog key keystatus ai = do
|
||||
obj <- calcRepo $ gitAnnexLocation key
|
||||
obj <- fromRawFilePath <$> calcRepo (gitAnnexLocation key)
|
||||
present <- if isKeyUnlockedThin keystatus
|
||||
then liftIO (doesFileExist obj)
|
||||
else inAnnex key
|
||||
|
@ -313,7 +313,7 @@ verifyRequiredContent _ _ = return True
|
|||
verifyAssociatedFiles :: Key -> KeyStatus -> RawFilePath -> Annex Bool
|
||||
verifyAssociatedFiles key keystatus file = do
|
||||
when (isKeyUnlockedThin keystatus) $ do
|
||||
f <- inRepo $ toTopFilePath $ fromRawFilePath file
|
||||
f <- inRepo $ toTopFilePath file
|
||||
afs <- Database.Keys.getAssociatedFiles key
|
||||
unless (getTopFilePath f `elem` map getTopFilePath afs) $
|
||||
Database.Keys.addAssociatedFile key f
|
||||
|
@ -332,11 +332,11 @@ verifyWorkTree key file = do
|
|||
ifM (annexThin <$> Annex.getGitConfig)
|
||||
( void $ linkFromAnnex key tmp mode
|
||||
, do
|
||||
obj <- calcRepo $ gitAnnexLocation key
|
||||
obj <- fromRawFilePath <$> calcRepo (gitAnnexLocation key)
|
||||
void $ checkedCopyFile key obj tmp mode
|
||||
thawContent tmp
|
||||
)
|
||||
Database.Keys.storeInodeCaches key [fromRawFilePath file]
|
||||
Database.Keys.storeInodeCaches key [file]
|
||||
_ -> return ()
|
||||
return True
|
||||
|
||||
|
@ -349,8 +349,8 @@ checkKeySize :: Key -> KeyStatus -> ActionItem -> Annex Bool
|
|||
checkKeySize _ KeyUnlockedThin _ = return True
|
||||
checkKeySize key _ ai = do
|
||||
file <- calcRepo $ gitAnnexLocation key
|
||||
ifM (liftIO $ doesFileExist file)
|
||||
( checkKeySizeOr badContent key file ai
|
||||
ifM (liftIO $ R.doesPathExist file)
|
||||
( checkKeySizeOr badContent key (fromRawFilePath file) ai
|
||||
, return True
|
||||
)
|
||||
|
||||
|
@ -417,10 +417,10 @@ checkKeyUpgrade _ _ _ (AssociatedFile Nothing) =
|
|||
-}
|
||||
checkBackend :: Backend -> Key -> KeyStatus -> AssociatedFile -> Annex Bool
|
||||
checkBackend backend key keystatus afile = do
|
||||
content <- calcRepo $ gitAnnexLocation key
|
||||
content <- calcRepo (gitAnnexLocation key)
|
||||
ifM (pure (isKeyUnlockedThin keystatus) <&&> (not <$> isUnmodified key content))
|
||||
( nocheck
|
||||
, checkBackendOr badContent backend key content ai
|
||||
, checkBackendOr badContent backend key (fromRawFilePath content) ai
|
||||
)
|
||||
where
|
||||
nocheck = return True
|
||||
|
@ -670,8 +670,8 @@ isKeyUnlockedThin KeyMissing = False
|
|||
getKeyStatus :: Key -> Annex KeyStatus
|
||||
getKeyStatus key = catchDefaultIO KeyMissing $ do
|
||||
afs <- not . null <$> Database.Keys.getAssociatedFiles key
|
||||
obj <- calcRepo $ gitAnnexLocation key
|
||||
multilink <- ((> 1) . linkCount <$> liftIO (getFileStatus obj))
|
||||
obj <- calcRepo (gitAnnexLocation key)
|
||||
multilink <- ((> 1) . linkCount <$> liftIO (R.getFileStatus obj))
|
||||
return $ if multilink && afs
|
||||
then KeyUnlockedThin
|
||||
else KeyPresent
|
||||
|
|
|
@ -97,7 +97,7 @@ duplicateModeParser =
|
|||
|
||||
seek :: ImportOptions -> CommandSeek
|
||||
seek o@(LocalImportOptions {}) = startConcurrency commandStages $ do
|
||||
repopath <- liftIO . absPath =<< fromRepo Git.repoPath
|
||||
repopath <- liftIO . absPath . fromRawFilePath =<< fromRepo Git.repoPath
|
||||
inrepops <- liftIO $ filter (dirContains repopath) <$> mapM absPath (importFiles o)
|
||||
unless (null inrepops) $ do
|
||||
giveup $ "cannot import files from inside the working tree (use git annex add instead): " ++ unwords inrepops
|
||||
|
@ -110,7 +110,7 @@ seek o@(RemoteImportOptions {}) = startConcurrency commandStages $ do
|
|||
giveup "That remote does not support imports."
|
||||
subdir <- maybe
|
||||
(pure Nothing)
|
||||
(Just <$$> inRepo . toTopFilePath)
|
||||
(Just <$$> inRepo . toTopFilePath . toRawFilePath)
|
||||
(importToSubDir o)
|
||||
seekRemote r (importToBranch o) subdir
|
||||
|
||||
|
@ -181,7 +181,7 @@ startLocal largematcher mode (srcfile, destfile) =
|
|||
-- weakly the same as the origianlly locked down file's
|
||||
-- inode cache. (Since the file may have been copied,
|
||||
-- its inodes may not be the same.)
|
||||
newcache <- withTSDelta $ liftIO . genInodeCache destfile
|
||||
newcache <- withTSDelta $ liftIO . genInodeCache (toRawFilePath destfile)
|
||||
let unchanged = case (newcache, inodeCache (keySource ld)) of
|
||||
(_, Nothing) -> True
|
||||
(Just newc, Just c) | compareWeak c newc -> True
|
||||
|
|
|
@ -566,7 +566,7 @@ getDirStatInfo o dir = do
|
|||
where
|
||||
initial = (emptyKeyInfo, emptyKeyInfo, emptyNumCopiesStats, M.empty)
|
||||
update matcher fast key file vs@(presentdata, referenceddata, numcopiesstats, repodata) =
|
||||
ifM (matcher $ MatchingFile $ FileInfo file' file')
|
||||
ifM (matcher $ MatchingFile $ FileInfo file file)
|
||||
( do
|
||||
!presentdata' <- ifM (inAnnex key)
|
||||
( return $ addKey key presentdata
|
||||
|
@ -577,13 +577,11 @@ getDirStatInfo o dir = do
|
|||
then return (numcopiesstats, repodata)
|
||||
else do
|
||||
locs <- Remote.keyLocations key
|
||||
nc <- updateNumCopiesStats file' numcopiesstats locs
|
||||
nc <- updateNumCopiesStats (fromRawFilePath file) numcopiesstats locs
|
||||
return (nc, updateRepoData key locs repodata)
|
||||
return $! (presentdata', referenceddata', numcopiesstats', repodata')
|
||||
, return vs
|
||||
)
|
||||
where
|
||||
file' = fromRawFilePath file
|
||||
|
||||
getTreeStatInfo :: InfoOptions -> Git.Ref -> Annex (Maybe StatInfo)
|
||||
getTreeStatInfo o r = do
|
||||
|
|
|
@ -20,6 +20,7 @@ import qualified Database.Keys
|
|||
import Annex.Ingest
|
||||
import Logs.Location
|
||||
import Git.FilePath
|
||||
import qualified Utility.RawFilePath as R
|
||||
|
||||
cmd :: Command
|
||||
cmd = withGlobalOptions [jsonOptions, annexedMatchingOptions] $
|
||||
|
@ -43,7 +44,7 @@ startNew file key = ifM (isJust <$> isAnnexLink file)
|
|||
| key' == key = cont
|
||||
| otherwise = errorModified
|
||||
go Nothing =
|
||||
ifM (isUnmodified key (fromRawFilePath file))
|
||||
ifM (isUnmodified key file)
|
||||
( cont
|
||||
, ifM (Annex.getState Annex.force)
|
||||
( cont
|
||||
|
@ -56,24 +57,25 @@ performNew :: RawFilePath -> Key -> CommandPerform
|
|||
performNew file key = do
|
||||
lockdown =<< calcRepo (gitAnnexLocation key)
|
||||
addLink (fromRawFilePath file) key
|
||||
=<< withTSDelta (liftIO . genInodeCache' file)
|
||||
=<< withTSDelta (liftIO . genInodeCache file)
|
||||
next $ cleanupNew file key
|
||||
where
|
||||
lockdown obj = do
|
||||
ifM (isUnmodified key obj)
|
||||
( breakhardlink obj
|
||||
, repopulate obj
|
||||
, repopulate (fromRawFilePath obj)
|
||||
)
|
||||
whenM (liftIO $ doesFileExist obj) $
|
||||
freezeContent obj
|
||||
whenM (liftIO $ R.doesPathExist obj) $
|
||||
freezeContent $ fromRawFilePath obj
|
||||
|
||||
-- It's ok if the file is hard linked to obj, but if some other
|
||||
-- associated file is, we need to break that link to lock down obj.
|
||||
breakhardlink obj = whenM (catchBoolIO $ (> 1) . linkCount <$> liftIO (getFileStatus obj)) $ do
|
||||
mfc <- withTSDelta (liftIO . genInodeCache' file)
|
||||
breakhardlink obj = whenM (catchBoolIO $ (> 1) . linkCount <$> liftIO (R.getFileStatus obj)) $ do
|
||||
mfc <- withTSDelta (liftIO . genInodeCache file)
|
||||
unlessM (sameInodeCache obj (maybeToList mfc)) $ do
|
||||
modifyContent obj $ replaceFile obj $ \tmp -> do
|
||||
unlessM (checkedCopyFile key obj tmp Nothing) $
|
||||
let obj' = fromRawFilePath obj
|
||||
modifyContent obj' $ replaceFile obj' $ \tmp -> do
|
||||
unlessM (checkedCopyFile key obj' tmp Nothing) $
|
||||
giveup "unable to lock file"
|
||||
Database.Keys.storeInodeCaches key [obj]
|
||||
|
||||
|
@ -86,7 +88,7 @@ performNew file key = do
|
|||
liftIO $ nukeFile obj
|
||||
case mfile of
|
||||
Just unmodified ->
|
||||
unlessM (checkedCopyFile key unmodified obj Nothing)
|
||||
unlessM (checkedCopyFile key (fromRawFilePath unmodified) obj Nothing)
|
||||
lostcontent
|
||||
Nothing -> lostcontent
|
||||
|
||||
|
@ -94,7 +96,7 @@ performNew file key = do
|
|||
|
||||
cleanupNew :: RawFilePath -> Key -> CommandCleanup
|
||||
cleanupNew file key = do
|
||||
Database.Keys.removeAssociatedFile key =<< inRepo (toTopFilePath (fromRawFilePath file))
|
||||
Database.Keys.removeAssociatedFile key =<< inRepo (toTopFilePath file)
|
||||
return True
|
||||
|
||||
startOld :: RawFilePath -> CommandStart
|
||||
|
|
|
@ -199,7 +199,7 @@ compareChanges format changes = concatMap diff changes
|
|||
getKeyLog :: Key -> [CommandParam] -> Annex ([RefChange], IO Bool)
|
||||
getKeyLog key os = do
|
||||
top <- fromRepo Git.repoPath
|
||||
p <- liftIO $ relPathCwdToFile top
|
||||
p <- liftIO $ relPathCwdToFile $ fromRawFilePath top
|
||||
config <- Annex.getGitConfig
|
||||
let logfile = p </> fromRawFilePath (locationLogFile config key)
|
||||
getGitLog [logfile] (Param "--remove-empty" : os)
|
||||
|
|
|
@ -176,7 +176,8 @@ absRepo reference r
|
|||
| Git.repoIsUrl reference = return $ Git.Construct.localToUrl reference r
|
||||
| Git.repoIsUrl r = return r
|
||||
| otherwise = liftIO $ do
|
||||
r' <- Git.Construct.fromAbsPath =<< absPath (Git.repoPath r)
|
||||
r' <- Git.Construct.fromAbsPath
|
||||
=<< absPath (fromRawFilePath (Git.repoPath r))
|
||||
r'' <- safely $ flip Annex.eval Annex.gitRepo =<< Annex.new r'
|
||||
return (fromMaybe r' r'')
|
||||
|
||||
|
@ -234,7 +235,7 @@ tryScan r
|
|||
where
|
||||
remotecmd = "sh -c " ++ shellEscape
|
||||
(cddir ++ " && " ++ "git config --null --list")
|
||||
dir = Git.repoPath r
|
||||
dir = fromRawFilePath $ Git.repoPath r
|
||||
cddir
|
||||
| "/~" `isPrefixOf` dir =
|
||||
let (userhome, reldir) = span (/= '/') (drop 1 dir)
|
||||
|
|
|
@ -86,7 +86,7 @@ perform file oldkey oldbackend newbackend = go =<< genkey (fastMigrate oldbacken
|
|||
content <- calcRepo $ gitAnnexLocation oldkey
|
||||
let source = KeySource
|
||||
{ keyFilename = fromRawFilePath file
|
||||
, contentLocation = content
|
||||
, contentLocation = fromRawFilePath content
|
||||
, inodeCache = Nothing
|
||||
}
|
||||
v <- genKey source nullMeterUpdate (Just newbackend)
|
||||
|
|
|
@ -137,7 +137,8 @@ send ups fs = do
|
|||
mk <- lookupFile f
|
||||
case mk of
|
||||
Nothing -> noop
|
||||
Just k -> withObjectLoc k (addlist (fromRawFilePath f))
|
||||
Just k -> withObjectLoc k $
|
||||
addlist f . fromRawFilePath
|
||||
liftIO $ hClose h
|
||||
|
||||
serverkey <- uftpKey
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Command.PostReceive where
|
||||
|
||||
import Command
|
||||
|
|
|
@ -83,12 +83,12 @@ linkKey file oldkey newkey = ifM (isJust <$> isAnnexLink file)
|
|||
- unlocked file, which would leave the new key unlocked
|
||||
- and vulnerable to corruption. -}
|
||||
( getViaTmpFromDisk RetrievalAllKeysSecure DefaultVerify newkey $ \tmp -> unVerified $ do
|
||||
oldobj <- calcRepo (gitAnnexLocation oldkey)
|
||||
oldobj <- fromRawFilePath <$> calcRepo (gitAnnexLocation oldkey)
|
||||
isJust <$> linkOrCopy' (return True) newkey oldobj tmp Nothing
|
||||
, do
|
||||
{- The file being rekeyed is itself an unlocked file; if
|
||||
- it's hard linked to the old key, that link must be broken. -}
|
||||
oldobj <- calcRepo (gitAnnexLocation oldkey)
|
||||
oldobj <- fromRawFilePath <$> calcRepo (gitAnnexLocation oldkey)
|
||||
v <- tryNonAsync $ do
|
||||
st <- liftIO $ R.getFileStatus file
|
||||
when (linkCount st > 1) $ do
|
||||
|
@ -97,7 +97,7 @@ linkKey file oldkey newkey = ifM (isJust <$> isAnnexLink file)
|
|||
unlessM (checkedCopyFile oldkey oldobj tmp Nothing) $
|
||||
error "can't lock old key"
|
||||
thawContent tmp
|
||||
ic <- withTSDelta (liftIO . genInodeCache' file)
|
||||
ic <- withTSDelta (liftIO . genInodeCache file)
|
||||
case v of
|
||||
Left e -> do
|
||||
warning (show e)
|
||||
|
@ -123,7 +123,7 @@ cleanup file oldkey newkey = do
|
|||
writePointerFile file newkey mode
|
||||
stagePointerFile file mode =<< hashPointerFile newkey
|
||||
Database.Keys.removeAssociatedFile oldkey
|
||||
=<< inRepo (toTopFilePath (fromRawFilePath file))
|
||||
=<< inRepo (toTopFilePath file)
|
||||
)
|
||||
whenM (inAnnex newkey) $
|
||||
logStatus newkey InfoPresent
|
||||
|
|
|
@ -24,7 +24,7 @@ seek = withNothing (commandAction start)
|
|||
start :: CommandStart
|
||||
start = starting "resolvemerge" (ActionItemOther Nothing) $ do
|
||||
us <- fromMaybe nobranch <$> inRepo Git.Branch.current
|
||||
d <- fromRepo Git.localGitDir
|
||||
d <- fromRawFilePath <$> fromRepo Git.localGitDir
|
||||
let merge_head = d </> "MERGE_HEAD"
|
||||
them <- fromMaybe (error nomergehead) . extractSha
|
||||
<$> liftIO (readFile merge_head)
|
||||
|
|
|
@ -70,7 +70,7 @@ smudge file = do
|
|||
case parseLinkTargetOrPointerLazy b of
|
||||
Nothing -> noop
|
||||
Just k -> do
|
||||
topfile <- inRepo (toTopFilePath file)
|
||||
topfile <- inRepo (toTopFilePath (toRawFilePath file))
|
||||
Database.Keys.addAssociatedFile k topfile
|
||||
void $ smudgeLog k topfile
|
||||
liftIO $ L.putStr b
|
||||
|
@ -108,7 +108,7 @@ clean file = do
|
|||
-- annexed and is unmodified.
|
||||
case oldkey of
|
||||
Nothing -> doingest oldkey
|
||||
Just ko -> ifM (isUnmodifiedCheap ko file)
|
||||
Just ko -> ifM (isUnmodifiedCheap ko (toRawFilePath file))
|
||||
( liftIO $ emitPointer ko
|
||||
, doingest oldkey
|
||||
)
|
||||
|
@ -141,7 +141,8 @@ clean file = do
|
|||
-- git diff can run the clean filter on files outside the
|
||||
-- repository; can't annex those
|
||||
fileoutsiderepo = do
|
||||
repopath <- liftIO . absPath =<< fromRepo Git.repoPath
|
||||
repopath <- liftIO . absPath . fromRawFilePath
|
||||
=<< fromRepo Git.repoPath
|
||||
filepath <- liftIO $ absPath file
|
||||
return $ not $ dirContains repopath filepath
|
||||
|
||||
|
@ -173,7 +174,7 @@ shouldAnnex file moldkey = ifM (annexGitAddToAnnex <$> Annex.getGitConfig)
|
|||
Just _ -> return True
|
||||
Nothing -> checkknowninode
|
||||
|
||||
checkknowninode = withTSDelta (liftIO . genInodeCache file) >>= \case
|
||||
checkknowninode = withTSDelta (liftIO . genInodeCache (toRawFilePath file)) >>= \case
|
||||
Nothing -> pure False
|
||||
Just ic -> Database.Keys.isInodeKnown ic =<< sentinalStatus
|
||||
|
||||
|
@ -190,7 +191,7 @@ emitPointer = S.putStr . formatPointer
|
|||
getMoveRaceRecovery :: Key -> RawFilePath -> Annex ()
|
||||
getMoveRaceRecovery k file = void $ tryNonAsync $
|
||||
whenM (inAnnex k) $ do
|
||||
obj <- toRawFilePath <$> calcRepo (gitAnnexLocation k)
|
||||
obj <- calcRepo (gitAnnexLocation k)
|
||||
-- Cannot restage because git add is running and has
|
||||
-- the index locked.
|
||||
populatePointerFile (Restage False) k obj file >>= \case
|
||||
|
@ -204,9 +205,9 @@ update = do
|
|||
|
||||
updateSmudged :: Restage -> Annex ()
|
||||
updateSmudged restage = streamSmudged $ \k topf -> do
|
||||
f <- toRawFilePath <$> fromRepo (fromTopFilePath topf)
|
||||
f <- fromRepo (fromTopFilePath topf)
|
||||
whenM (inAnnex k) $ do
|
||||
obj <- toRawFilePath <$> calcRepo (gitAnnexLocation k)
|
||||
obj <- calcRepo (gitAnnexLocation k)
|
||||
unlessM (isJust <$> populatePointerFile restage k obj f) $
|
||||
liftIO (isPointerFile f) >>= \case
|
||||
Just k' | k' == k -> toplevelWarning False $
|
||||
|
|
|
@ -61,6 +61,6 @@ displayStatus (Renamed _ _) = noop
|
|||
displayStatus s = do
|
||||
let c = statusChar s
|
||||
absf <- fromRepo $ fromTopFilePath (statusFile s)
|
||||
f <- liftIO $ relPathCwdToFile absf
|
||||
f <- liftIO $ relPathCwdToFile $ fromRawFilePath absf
|
||||
unlessM (showFullJSON $ JSONChunk [("status", [c]), ("file", f)]) $
|
||||
liftIO $ putStrLn $ [c] ++ " " ++ f
|
||||
|
|
|
@ -226,7 +226,7 @@ seek' o = do
|
|||
- of the repo. This also means that sync always acts on all files in the
|
||||
- repository, not just on a subdirectory. -}
|
||||
prepMerge :: Annex ()
|
||||
prepMerge = Annex.changeDirectory =<< fromRepo Git.repoPath
|
||||
prepMerge = Annex.changeDirectory . fromRawFilePath =<< fromRepo Git.repoPath
|
||||
|
||||
mergeConfig :: [Git.Merge.MergeConfig]
|
||||
mergeConfig =
|
||||
|
@ -409,7 +409,7 @@ importRemote o mergeconfig remote currbranch
|
|||
let branch = Git.Ref b
|
||||
let subdir = if null s
|
||||
then Nothing
|
||||
else Just (asTopFilePath s)
|
||||
else Just (asTopFilePath (toRawFilePath s))
|
||||
Command.Import.seekRemote remote branch subdir
|
||||
void $ mergeRemote remote currbranch mergeconfig
|
||||
(resolveMergeOverride o)
|
||||
|
@ -468,7 +468,7 @@ pushRemote o remote (Just branch, _) = stopUnless (pure (pushOption o) <&&> need
|
|||
( liftIO $ do
|
||||
p <- readProgramFile
|
||||
boolSystem' p [Param "post-receive"]
|
||||
(\cp -> cp { cwd = Just wt })
|
||||
(\cp -> cp { cwd = Just (fromRawFilePath wt) })
|
||||
, return True
|
||||
)
|
||||
where
|
||||
|
|
|
@ -168,7 +168,7 @@ test st r k = catMaybes
|
|||
get
|
||||
, Just $ check "fsck downloaded object" fsck
|
||||
, Just $ check "retrieveKeyFile resume from 33%" $ do
|
||||
loc <- Annex.calcRepo (gitAnnexLocation k)
|
||||
loc <- fromRawFilePath <$> Annex.calcRepo (gitAnnexLocation k)
|
||||
tmp <- prepTmp k
|
||||
partial <- liftIO $ bracket (openBinaryFile loc ReadMode) hClose $ \h -> do
|
||||
sz <- hFileSize h
|
||||
|
@ -184,7 +184,7 @@ test st r k = catMaybes
|
|||
get
|
||||
, Just $ check "fsck downloaded object" fsck
|
||||
, Just $ check "retrieveKeyFile resume from end" $ do
|
||||
loc <- Annex.calcRepo (gitAnnexLocation k)
|
||||
loc <- fromRawFilePath <$> Annex.calcRepo (gitAnnexLocation k)
|
||||
tmp <- prepTmp k
|
||||
void $ liftIO $ copyFileExternal CopyAllMetaData loc tmp
|
||||
lockContentForRemoval k removeAnnex
|
||||
|
@ -240,7 +240,7 @@ testExportTree st (Just _) ea k1 k2 =
|
|||
check desc a = testCase desc $
|
||||
Annex.eval st (Annex.setOutput QuietOutput >> a) @? "failed"
|
||||
storeexport k = do
|
||||
loc <- Annex.calcRepo (gitAnnexLocation k)
|
||||
loc <- fromRawFilePath <$> Annex.calcRepo (gitAnnexLocation k)
|
||||
Remote.storeExport ea loc k testexportlocation nullMeterUpdate
|
||||
retrieveexport k = withTmpFile "exported" $ \tmp h -> do
|
||||
liftIO $ hClose h
|
||||
|
|
|
@ -28,25 +28,25 @@ seek ps = (withFilesInGit $ commandAction . whenAnnexed start) =<< workTreeItems
|
|||
start :: RawFilePath -> Key -> CommandStart
|
||||
start file key = stopUnless (inAnnex key) $
|
||||
starting "unannex" (mkActionItem (key, file)) $
|
||||
perform (fromRawFilePath file) key
|
||||
perform file key
|
||||
|
||||
perform :: FilePath -> Key -> CommandPerform
|
||||
perform :: RawFilePath -> Key -> CommandPerform
|
||||
perform file key = do
|
||||
liftIO $ removeFile file
|
||||
liftIO $ removeFile (fromRawFilePath file)
|
||||
inRepo $ Git.Command.run
|
||||
[ Param "rm"
|
||||
, Param "--cached"
|
||||
, Param "--force"
|
||||
, Param "--quiet"
|
||||
, Param "--"
|
||||
, File file
|
||||
, File (fromRawFilePath file)
|
||||
]
|
||||
next $ cleanup file key
|
||||
|
||||
cleanup :: FilePath -> Key -> CommandCleanup
|
||||
cleanup :: RawFilePath -> Key -> CommandCleanup
|
||||
cleanup file key = do
|
||||
Database.Keys.removeAssociatedFile key =<< inRepo (toTopFilePath file)
|
||||
src <- calcRepo $ gitAnnexLocation key
|
||||
src <- fromRawFilePath <$> calcRepo (gitAnnexLocation key)
|
||||
ifM (Annex.getState Annex.fast)
|
||||
( do
|
||||
-- Only make a hard link if the annexed file does not
|
||||
|
@ -61,11 +61,12 @@ cleanup file key = do
|
|||
, copyfrom src
|
||||
)
|
||||
where
|
||||
file' = fromRawFilePath file
|
||||
copyfrom src =
|
||||
thawContent file `after` liftIO (copyFileExternal CopyAllMetaData src file)
|
||||
thawContent file' `after` liftIO (copyFileExternal CopyAllMetaData src file')
|
||||
hardlinkfrom src =
|
||||
-- creating a hard link could fall; fall back to copying
|
||||
ifM (liftIO $ catchBoolIO $ createLink src file >> return True)
|
||||
ifM (liftIO $ catchBoolIO $ createLink src file' >> return True)
|
||||
( return True
|
||||
, copyfrom src
|
||||
)
|
||||
|
|
|
@ -51,7 +51,7 @@ perform p = do
|
|||
-- Get the reversed diff that needs to be applied to undo.
|
||||
(diff, cleanup) <- inRepo $
|
||||
diffLog [Param "-R", Param "--", Param p]
|
||||
top <- inRepo $ toTopFilePath p
|
||||
top <- inRepo $ toTopFilePath $ toRawFilePath p
|
||||
let diff' = filter (`isDiffOf` top) diff
|
||||
liftIO $ streamUpdateIndex g (map stageDiffTreeItem diff')
|
||||
|
||||
|
@ -59,7 +59,8 @@ perform p = do
|
|||
-- and then any adds. This order is necessary to handle eg, removing
|
||||
-- a directory and replacing it with a file.
|
||||
let (removals, adds) = partition (\di -> dstsha di == nullSha) diff'
|
||||
let mkrel di = liftIO $ relPathCwdToFile $ fromTopFilePath (file di) g
|
||||
let mkrel di = liftIO $ relPathCwdToFile $ fromRawFilePath $
|
||||
fromTopFilePath (file di) g
|
||||
|
||||
forM_ removals $ \di -> do
|
||||
f <- mkrel di
|
||||
|
|
|
@ -17,6 +17,7 @@ import qualified Database.Keys
|
|||
import Annex.Content
|
||||
import Annex.Init
|
||||
import Utility.FileMode
|
||||
import qualified Utility.RawFilePath as R
|
||||
|
||||
cmd :: Command
|
||||
cmd = addCheck check $
|
||||
|
@ -29,7 +30,7 @@ check = do
|
|||
b <- current_branch
|
||||
when (b == Annex.Branch.name) $ giveup $
|
||||
"cannot uninit when the " ++ Git.fromRef b ++ " branch is checked out"
|
||||
top <- fromRepo Git.repoPath
|
||||
top <- fromRawFilePath <$> fromRepo Git.repoPath
|
||||
currdir <- liftIO getCurrentDirectory
|
||||
whenM ((/=) <$> liftIO (absPath top) <*> liftIO (absPath currdir)) $
|
||||
giveup "can only run uninit from the top of the git repository"
|
||||
|
@ -117,5 +118,5 @@ removeUnannexed = go []
|
|||
, go (k:c) ks
|
||||
)
|
||||
enoughlinks f = catchBoolIO $ do
|
||||
s <- getFileStatus f
|
||||
s <- R.getFileStatus f
|
||||
return $ linkCount s > 1
|
||||
|
|
|
@ -57,5 +57,5 @@ perform dest key = do
|
|||
cleanup :: RawFilePath -> Key -> Maybe FileMode -> CommandCleanup
|
||||
cleanup dest key destmode = do
|
||||
stagePointerFile dest destmode =<< hashPointerFile key
|
||||
Database.Keys.addAssociatedFile key =<< inRepo (toTopFilePath (fromRawFilePath dest))
|
||||
Database.Keys.addAssociatedFile key =<< inRepo (toTopFilePath dest)
|
||||
return True
|
||||
|
|
|
@ -207,7 +207,7 @@ withKeysReferenced' mdir initial a = do
|
|||
( return ([], return True)
|
||||
, do
|
||||
top <- fromRepo Git.repoPath
|
||||
inRepo $ LsFiles.allFiles [toRawFilePath top]
|
||||
inRepo $ LsFiles.allFiles [top]
|
||||
)
|
||||
Just dir -> inRepo $ LsFiles.inRepo [toRawFilePath dir]
|
||||
go v [] = return v
|
||||
|
|
|
@ -99,7 +99,7 @@ checkoutViewBranch view mkbranch = do
|
|||
- and this pollutes the view, so remove them.
|
||||
- (However, emptry directories used by submodules are not
|
||||
- removed.) -}
|
||||
top <- liftIO . absPath =<< fromRepo Git.repoPath
|
||||
top <- liftIO . absPath . fromRawFilePath =<< fromRepo Git.repoPath
|
||||
(l, cleanup) <- inRepo $
|
||||
LsFiles.notInRepoIncludingEmptyDirectories False
|
||||
[toRawFilePath top]
|
||||
|
@ -110,8 +110,8 @@ checkoutViewBranch view mkbranch = do
|
|||
return ok
|
||||
where
|
||||
removeemptydir top d = do
|
||||
p <- inRepo $ toTopFilePath $ fromRawFilePath d
|
||||
liftIO $ tryIO $ removeDirectory (top </> getTopFilePath p)
|
||||
p <- inRepo $ toTopFilePath d
|
||||
liftIO $ tryIO $ removeDirectory (top </> fromRawFilePath (getTopFilePath p))
|
||||
cwdmissing top = unlines
|
||||
[ "This view does not include the subdirectory you are currently in."
|
||||
, "Perhaps you should: cd " ++ top
|
||||
|
|
|
@ -147,7 +147,7 @@ updateFromLog db (oldtree, currtree) = do
|
|||
recordAnnexBranchTree db currtree
|
||||
flushDbQueue db
|
||||
where
|
||||
go ti = case extLogFileKey remoteContentIdentifierExt (toRawFilePath (getTopFilePath (DiffTree.file ti))) of
|
||||
go ti = case extLogFileKey remoteContentIdentifierExt (getTopFilePath (DiffTree.file ti)) of
|
||||
Nothing -> return ()
|
||||
Just k -> do
|
||||
l <- Log.getContentIdentifiers k
|
||||
|
|
|
@ -211,7 +211,7 @@ mkExportDiffUpdater removeold addnew h srcek dstek i = do
|
|||
Nothing -> return ()
|
||||
Just k -> liftIO $ addnew h (asKey k) loc
|
||||
where
|
||||
loc = mkExportLocation $ toRawFilePath $ getTopFilePath $ Git.DiffTree.file i
|
||||
loc = mkExportLocation $ getTopFilePath $ Git.DiffTree.file i
|
||||
|
||||
runExportDiffUpdater :: ExportDiffUpdater -> ExportHandle -> Sha -> Sha -> Annex ()
|
||||
runExportDiffUpdater updater h old new = do
|
||||
|
|
|
@ -169,13 +169,13 @@ removeAssociatedFile :: Key -> TopFilePath -> Annex ()
|
|||
removeAssociatedFile k = runWriterIO . SQL.removeAssociatedFile k
|
||||
|
||||
{- Stats the files, and stores their InodeCaches. -}
|
||||
storeInodeCaches :: Key -> [FilePath] -> Annex ()
|
||||
storeInodeCaches :: Key -> [RawFilePath] -> Annex ()
|
||||
storeInodeCaches k fs = storeInodeCaches' k fs []
|
||||
|
||||
storeInodeCaches' :: Key -> [FilePath] -> [InodeCache] -> Annex ()
|
||||
storeInodeCaches' :: Key -> [RawFilePath] -> [InodeCache] -> Annex ()
|
||||
storeInodeCaches' k fs ics = withTSDelta $ \d ->
|
||||
addInodeCaches k . (++ ics) . catMaybes
|
||||
=<< liftIO (mapM (`genInodeCache` d) fs)
|
||||
=<< liftIO (mapM (\f -> genInodeCache f d) fs)
|
||||
|
||||
addInodeCaches :: Key -> [InodeCache] -> Annex ()
|
||||
addInodeCaches k is = runWriterIO $ SQL.addInodeCaches k is
|
||||
|
@ -223,7 +223,7 @@ reconcileStaged :: H.DbQueue -> Annex ()
|
|||
reconcileStaged qh = do
|
||||
gitindex <- inRepo currentIndexFile
|
||||
indexcache <- fromRepo gitAnnexKeysDbIndexCache
|
||||
withTSDelta (liftIO . genInodeCache gitindex) >>= \case
|
||||
withTSDelta (liftIO . genInodeCache (toRawFilePath gitindex)) >>= \case
|
||||
Just cur ->
|
||||
liftIO (maybe Nothing readInodeCache <$> catchMaybeIO (readFile indexcache)) >>= \case
|
||||
Nothing -> go cur indexcache
|
||||
|
@ -279,7 +279,7 @@ reconcileStaged qh = do
|
|||
((':':_srcmode):dstmode:_srcsha:dstsha:_change:[])
|
||||
-- Only want files, not symlinks
|
||||
| dstmode /= decodeBS' (fmtTreeItemType TreeSymlink) -> do
|
||||
maybe noop (reconcile (asTopFilePath file))
|
||||
maybe noop (reconcile (asTopFilePath (toRawFilePath file)))
|
||||
=<< catKey (Ref dstsha)
|
||||
procdiff rest True
|
||||
| otherwise -> procdiff rest changed
|
||||
|
@ -293,11 +293,11 @@ reconcileStaged qh = do
|
|||
caches <- liftIO $ SQL.getInodeCaches key (SQL.ReadHandle qh)
|
||||
keyloc <- calcRepo (gitAnnexLocation key)
|
||||
keypopulated <- sameInodeCache keyloc caches
|
||||
p <- fromRepo $ toRawFilePath . fromTopFilePath file
|
||||
filepopulated <- sameInodeCache (fromRawFilePath p) caches
|
||||
p <- fromRepo $ fromTopFilePath file
|
||||
filepopulated <- sameInodeCache p caches
|
||||
case (keypopulated, filepopulated) of
|
||||
(True, False) ->
|
||||
populatePointerFile (Restage True) key (toRawFilePath keyloc) p >>= \case
|
||||
populatePointerFile (Restage True) key keyloc p >>= \case
|
||||
Nothing -> return ()
|
||||
Just ic -> liftIO $
|
||||
SQL.addInodeCaches key [ic] (SQL.WriteHandle qh)
|
||||
|
|
|
@ -17,6 +17,7 @@ import Database.Types
|
|||
import Database.Handle
|
||||
import qualified Database.Queue as H
|
||||
import Utility.InodeCache
|
||||
import Utility.FileSystemEncoding
|
||||
import Git.FilePath
|
||||
|
||||
import Database.Persist.Sql hiding (Key)
|
||||
|
@ -85,7 +86,7 @@ addAssociatedFile k f = queueDb $ do
|
|||
deleteWhere [AssociatedFile ==. af, AssociatedKey !=. k]
|
||||
void $ insertUnique $ Associated k af
|
||||
where
|
||||
af = toSFilePath (getTopFilePath f)
|
||||
af = toSFilePath (fromRawFilePath (getTopFilePath f))
|
||||
|
||||
-- Does not remove any old association for a file, but less expensive
|
||||
-- than addAssociatedFile. Calling dropAllAssociatedFiles first and then
|
||||
|
@ -93,7 +94,7 @@ addAssociatedFile k f = queueDb $ do
|
|||
addAssociatedFileFast :: Key -> TopFilePath -> WriteHandle -> IO ()
|
||||
addAssociatedFileFast k f = queueDb $ void $ insertUnique $ Associated k af
|
||||
where
|
||||
af = toSFilePath (getTopFilePath f)
|
||||
af = toSFilePath (fromRawFilePath (getTopFilePath f))
|
||||
|
||||
dropAllAssociatedFiles :: WriteHandle -> IO ()
|
||||
dropAllAssociatedFiles = queueDb $
|
||||
|
@ -104,7 +105,7 @@ dropAllAssociatedFiles = queueDb $
|
|||
getAssociatedFiles :: Key -> ReadHandle -> IO [TopFilePath]
|
||||
getAssociatedFiles k = readDb $ do
|
||||
l <- selectList [AssociatedKey ==. k] []
|
||||
return $ map (asTopFilePath . fromSFilePath . associatedFile . entityVal) l
|
||||
return $ map (asTopFilePath . toRawFilePath . associatedFile . entityVal) l
|
||||
|
||||
{- Gets any keys that are on record as having a particular associated file.
|
||||
- (Should be one or none but the database doesn't enforce that.) -}
|
||||
|
@ -113,13 +114,13 @@ getAssociatedKey f = readDb $ do
|
|||
l <- selectList [AssociatedFile ==. af] []
|
||||
return $ map (associatedKey . entityVal) l
|
||||
where
|
||||
af = toSFilePath (getTopFilePath f)
|
||||
af = toSFilePath (fromRawFilePath (getTopFilePath f))
|
||||
|
||||
removeAssociatedFile :: Key -> TopFilePath -> WriteHandle -> IO ()
|
||||
removeAssociatedFile k f = queueDb $
|
||||
deleteWhere [AssociatedKey ==. k, AssociatedFile ==. af]
|
||||
where
|
||||
af = toSFilePath (getTopFilePath f)
|
||||
af = toSFilePath (fromRawFilePath (getTopFilePath f))
|
||||
|
||||
addInodeCaches :: Key -> [InodeCache] -> WriteHandle -> IO ()
|
||||
addInodeCaches k is = queueDb $
|
||||
|
|
39
Git.hs
39
Git.hs
|
@ -51,35 +51,35 @@ import Utility.FileMode
|
|||
repoDescribe :: Repo -> String
|
||||
repoDescribe Repo { remoteName = Just name } = name
|
||||
repoDescribe Repo { location = Url url } = show url
|
||||
repoDescribe Repo { location = Local { worktree = Just dir } } = dir
|
||||
repoDescribe Repo { location = Local { gitdir = dir } } = dir
|
||||
repoDescribe Repo { location = LocalUnknown dir } = dir
|
||||
repoDescribe Repo { location = Local { worktree = Just dir } } = fromRawFilePath dir
|
||||
repoDescribe Repo { location = Local { gitdir = dir } } = fromRawFilePath dir
|
||||
repoDescribe Repo { location = LocalUnknown dir } = fromRawFilePath dir
|
||||
repoDescribe Repo { location = Unknown } = "UNKNOWN"
|
||||
|
||||
{- Location of the repo, either as a path or url. -}
|
||||
repoLocation :: Repo -> String
|
||||
repoLocation Repo { location = Url url } = show url
|
||||
repoLocation Repo { location = Local { worktree = Just dir } } = dir
|
||||
repoLocation Repo { location = Local { gitdir = dir } } = dir
|
||||
repoLocation Repo { location = LocalUnknown dir } = dir
|
||||
repoLocation Repo { location = Local { worktree = Just dir } } = fromRawFilePath dir
|
||||
repoLocation Repo { location = Local { gitdir = dir } } = fromRawFilePath dir
|
||||
repoLocation Repo { location = LocalUnknown dir } = fromRawFilePath dir
|
||||
repoLocation Repo { location = Unknown } = error "unknown repoLocation"
|
||||
|
||||
{- Path to a repository. For non-bare, this is the worktree, for bare,
|
||||
- it's the gitdir, and for URL repositories, is the path on the remote
|
||||
- host. -}
|
||||
repoPath :: Repo -> FilePath
|
||||
repoPath Repo { location = Url u } = unEscapeString $ uriPath u
|
||||
repoPath :: Repo -> RawFilePath
|
||||
repoPath Repo { location = Url u } = toRawFilePath $ unEscapeString $ uriPath u
|
||||
repoPath Repo { location = Local { worktree = Just d } } = d
|
||||
repoPath Repo { location = Local { gitdir = d } } = d
|
||||
repoPath Repo { location = LocalUnknown dir } = dir
|
||||
repoPath Repo { location = Unknown } = error "unknown repoPath"
|
||||
|
||||
repoWorkTree :: Repo -> Maybe FilePath
|
||||
repoWorkTree :: Repo -> Maybe RawFilePath
|
||||
repoWorkTree Repo { location = Local { worktree = Just d } } = Just d
|
||||
repoWorkTree _ = Nothing
|
||||
|
||||
{- Path to a local repository's .git directory. -}
|
||||
localGitDir :: Repo -> FilePath
|
||||
localGitDir :: Repo -> RawFilePath
|
||||
localGitDir Repo { location = Local { gitdir = d } } = d
|
||||
localGitDir _ = error "unknown localGitDir"
|
||||
|
||||
|
@ -132,16 +132,17 @@ assertLocal repo action
|
|||
attributes :: Repo -> FilePath
|
||||
attributes repo
|
||||
| repoIsLocalBare repo = attributesLocal repo
|
||||
| otherwise = repoPath repo </> ".gitattributes"
|
||||
| otherwise = fromRawFilePath (repoPath repo) </> ".gitattributes"
|
||||
|
||||
attributesLocal :: Repo -> FilePath
|
||||
attributesLocal repo = localGitDir repo </> "info" </> "attributes"
|
||||
attributesLocal repo = fromRawFilePath (localGitDir repo)
|
||||
</> "info" </> "attributes"
|
||||
|
||||
{- Path to a given hook script in a repository, only if the hook exists
|
||||
- and is executable. -}
|
||||
hookPath :: String -> Repo -> IO (Maybe FilePath)
|
||||
hookPath script repo = do
|
||||
let hook = localGitDir repo </> "hooks" </> script
|
||||
let hook = fromRawFilePath (localGitDir repo) </> "hooks" </> script
|
||||
ifM (catchBoolIO $ isexecutable hook)
|
||||
( return $ Just hook , return Nothing )
|
||||
where
|
||||
|
@ -157,22 +158,22 @@ relPath = adjustPath torel
|
|||
where
|
||||
torel p = do
|
||||
p' <- relPathCwdToFile p
|
||||
if null p'
|
||||
then return "."
|
||||
else return p'
|
||||
return $ if null p' then "." else p'
|
||||
|
||||
{- Adusts the path to a local Repo using the provided function. -}
|
||||
adjustPath :: (FilePath -> IO FilePath) -> Repo -> IO Repo
|
||||
adjustPath f r@(Repo { location = l@(Local { gitdir = d, worktree = w }) }) = do
|
||||
d' <- f d
|
||||
w' <- maybe (pure Nothing) (Just <$$> f) w
|
||||
d' <- f' d
|
||||
w' <- maybe (pure Nothing) (Just <$$> f') w
|
||||
return $ r
|
||||
{ location = l
|
||||
{ gitdir = d'
|
||||
, worktree = w'
|
||||
}
|
||||
}
|
||||
where
|
||||
f' v = toRawFilePath <$> f (fromRawFilePath v)
|
||||
adjustPath f r@(Repo { location = LocalUnknown d }) = do
|
||||
d' <- f d
|
||||
d' <- toRawFilePath <$> f (fromRawFilePath d)
|
||||
return $ r { location = LocalUnknown d' }
|
||||
adjustPath _ r = pure r
|
||||
|
|
|
@ -24,10 +24,10 @@ gitCommandLine params r@(Repo { location = l@(Local { } ) }) =
|
|||
where
|
||||
setdir
|
||||
| gitEnvOverridesGitDir r = []
|
||||
| otherwise = [Param $ "--git-dir=" ++ gitdir l]
|
||||
| otherwise = [Param $ "--git-dir=" ++ fromRawFilePath (gitdir l)]
|
||||
settree = case worktree l of
|
||||
Nothing -> []
|
||||
Just t -> [Param $ "--work-tree=" ++ t]
|
||||
Just t -> [Param $ "--work-tree=" ++ fromRawFilePath t]
|
||||
gitCommandLine _ repo = assertLocal repo $ error "internal"
|
||||
|
||||
{- Runs git in the specified repo. -}
|
||||
|
|
|
@ -13,6 +13,7 @@ import qualified Data.Map as M
|
|||
import qualified Data.ByteString as S
|
||||
import qualified Data.ByteString.Char8 as S8
|
||||
import Data.Char
|
||||
import qualified System.FilePath.ByteString as P
|
||||
|
||||
import Common
|
||||
import Git
|
||||
|
@ -61,7 +62,7 @@ read' repo = go repo
|
|||
where
|
||||
params = ["config", "--null", "--list"]
|
||||
p = (proc "git" params)
|
||||
{ cwd = Just d
|
||||
{ cwd = Just (fromRawFilePath d)
|
||||
, env = gitEnv repo
|
||||
}
|
||||
|
||||
|
@ -114,13 +115,13 @@ store' k v repo = repo
|
|||
-}
|
||||
updateLocation :: Repo -> IO Repo
|
||||
updateLocation r@(Repo { location = LocalUnknown d })
|
||||
| isBare r = ifM (doesDirectoryExist dotgit)
|
||||
| isBare r = ifM (doesDirectoryExist (fromRawFilePath dotgit))
|
||||
( updateLocation' r $ Local dotgit Nothing
|
||||
, updateLocation' r $ Local d Nothing
|
||||
)
|
||||
| otherwise = updateLocation' r $ Local dotgit (Just d)
|
||||
where
|
||||
dotgit = (d </> ".git")
|
||||
dotgit = d P.</> ".git"
|
||||
updateLocation r@(Repo { location = l@(Local {}) }) = updateLocation' r l
|
||||
updateLocation r = return r
|
||||
|
||||
|
@ -130,9 +131,9 @@ updateLocation' r l = do
|
|||
Nothing -> return l
|
||||
Just (ConfigValue d) -> do
|
||||
{- core.worktree is relative to the gitdir -}
|
||||
top <- absPath $ gitdir l
|
||||
top <- absPath $ fromRawFilePath (gitdir l)
|
||||
let p = absPathFrom top (fromRawFilePath d)
|
||||
return $ l { worktree = Just p }
|
||||
return $ l { worktree = Just (toRawFilePath p) }
|
||||
return $ r { location = l' }
|
||||
|
||||
{- Parses git config --list or git config --null --list output into a
|
||||
|
|
|
@ -62,7 +62,7 @@ fromAbsPath dir
|
|||
| otherwise =
|
||||
error $ "internal error, " ++ dir ++ " is not absolute"
|
||||
where
|
||||
ret = pure . newFrom . LocalUnknown
|
||||
ret = pure . newFrom . LocalUnknown . toRawFilePath
|
||||
canondir = dropTrailingPathSeparator dir
|
||||
{- When dir == "foo/.git", git looks for "foo/.git/.git",
|
||||
- and failing that, uses "foo" as the repository. -}
|
||||
|
@ -117,7 +117,7 @@ localToUrl reference r
|
|||
[ Url.scheme reference
|
||||
, "//"
|
||||
, auth
|
||||
, repoPath r
|
||||
, fromRawFilePath (repoPath r)
|
||||
]
|
||||
in r { location = Url $ fromJust $ parseURI absurl }
|
||||
|
||||
|
@ -154,7 +154,7 @@ fromRemoteLocation s repo = gen $ parseRemoteLocation s repo
|
|||
fromRemotePath :: FilePath -> Repo -> IO Repo
|
||||
fromRemotePath dir repo = do
|
||||
dir' <- expandTilde dir
|
||||
fromPath $ repoPath repo </> dir'
|
||||
fromPath $ fromRawFilePath (repoPath repo) </> dir'
|
||||
|
||||
{- Git remotes can have a directory that is specified relative
|
||||
- to the user's home directory, or that contains tilde expansions.
|
||||
|
@ -204,7 +204,7 @@ checkForRepo dir =
|
|||
where
|
||||
check test cont = maybe cont (return . Just) =<< test
|
||||
checkdir c = ifM c
|
||||
( return $ Just $ LocalUnknown dir
|
||||
( return $ Just $ LocalUnknown $ toRawFilePath dir
|
||||
, return Nothing
|
||||
)
|
||||
isRepo = checkdir $
|
||||
|
@ -224,9 +224,9 @@ checkForRepo dir =
|
|||
catchDefaultIO "" (readFile $ dir </> ".git")
|
||||
return $ if gitdirprefix `isPrefixOf` c
|
||||
then Just $ Local
|
||||
{ gitdir = absPathFrom dir $
|
||||
{ gitdir = toRawFilePath $ absPathFrom dir $
|
||||
drop (length gitdirprefix) c
|
||||
, worktree = Just dir
|
||||
, worktree = Just (toRawFilePath dir)
|
||||
}
|
||||
else Nothing
|
||||
where
|
||||
|
|
|
@ -37,7 +37,7 @@ get = do
|
|||
gd <- getpathenv "GIT_DIR"
|
||||
r <- configure gd =<< fromCwd
|
||||
prefix <- getpathenv "GIT_PREFIX"
|
||||
wt <- maybe (worktree $ location r) Just
|
||||
wt <- maybe (fromRawFilePath <$> worktree (location r)) Just
|
||||
<$> getpathenvprefix "GIT_WORK_TREE" prefix
|
||||
case wt of
|
||||
Nothing -> return r
|
||||
|
@ -68,13 +68,18 @@ get = do
|
|||
absd <- absPath d
|
||||
curr <- getCurrentDirectory
|
||||
r <- Git.Config.read $ newFrom $
|
||||
Local { gitdir = absd, worktree = Just curr }
|
||||
Local
|
||||
{ gitdir = toRawFilePath absd
|
||||
, worktree = Just (toRawFilePath curr)
|
||||
}
|
||||
return $ if Git.Config.isBare r
|
||||
then r { location = (location r) { worktree = Nothing } }
|
||||
else r
|
||||
|
||||
configure Nothing Nothing = giveup "Not in a git repository."
|
||||
|
||||
addworktree w r = changelocation r $
|
||||
Local { gitdir = gitdir (location r), worktree = w }
|
||||
addworktree w r = changelocation r $ Local
|
||||
{ gitdir = gitdir (location r)
|
||||
, worktree = fmap toRawFilePath w
|
||||
}
|
||||
changelocation r l = r { location = l }
|
||||
|
|
|
@ -31,9 +31,9 @@ import qualified Git.Ref
|
|||
{- Checks if the DiffTreeItem modifies a file with a given name
|
||||
- or under a directory by that name. -}
|
||||
isDiffOf :: DiffTreeItem -> TopFilePath -> Bool
|
||||
isDiffOf diff f = case getTopFilePath f of
|
||||
isDiffOf diff f = case fromRawFilePath (getTopFilePath f) of
|
||||
"" -> True -- top of repo contains all
|
||||
d -> d `dirContains` getTopFilePath (file diff)
|
||||
d -> d `dirContains` fromRawFilePath (getTopFilePath (file diff))
|
||||
|
||||
{- Diffs two tree Refs. -}
|
||||
diffTree :: Ref -> Ref -> Repo -> IO ([DiffTreeItem], IO Bool)
|
||||
|
@ -113,7 +113,7 @@ parseDiffRaw l = go l
|
|||
, srcsha = fromMaybe (error "bad srcsha") $ extractSha ssha
|
||||
, dstsha = fromMaybe (error "bad dstsha") $ extractSha dsha
|
||||
, status = s
|
||||
, file = asTopFilePath $ fromRawFilePath $ fromInternalGitPath $ Git.Filename.decode $ toRawFilePath f
|
||||
, file = asTopFilePath $ fromInternalGitPath $ Git.Filename.decode $ toRawFilePath f
|
||||
}
|
||||
where
|
||||
readmode = fst . Prelude.head . readOct
|
||||
|
|
|
@ -30,8 +30,10 @@ addGitEnv g var val = adjustGitEnv g (addEntry var val)
|
|||
- and a copy of the rest of the system environment. -}
|
||||
propGitEnv :: Repo -> IO [(String, String)]
|
||||
propGitEnv g = do
|
||||
g' <- addGitEnv g "GIT_DIR" (localGitDir g)
|
||||
g'' <- maybe (pure g') (addGitEnv g' "GIT_WORK_TREE") (repoWorkTree g)
|
||||
g' <- addGitEnv g "GIT_DIR" (fromRawFilePath (localGitDir g))
|
||||
g'' <- maybe (pure g')
|
||||
(addGitEnv g' "GIT_WORK_TREE" . fromRawFilePath)
|
||||
(repoWorkTree g)
|
||||
return $ fromMaybe [] (gitEnv g'')
|
||||
|
||||
{- Use with any action that makes a commit to set metadata. -}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
- top of the repository even when run in a subdirectory. Adding some
|
||||
- types helps keep that straight.
|
||||
-
|
||||
- Copyright 2012-2013 Joey Hess <id@joeyh.name>
|
||||
- Copyright 2012-2019 Joey Hess <id@joeyh.name>
|
||||
-
|
||||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
@ -31,13 +31,14 @@ module Git.FilePath (
|
|||
import Common
|
||||
import Git
|
||||
|
||||
import qualified System.FilePath.Posix
|
||||
import qualified System.FilePath.ByteString as P
|
||||
import qualified System.FilePath.Posix.ByteString
|
||||
import GHC.Generics
|
||||
import Control.DeepSeq
|
||||
import qualified Data.ByteString as S
|
||||
|
||||
{- A RawFilePath, relative to the top of the git repository. -}
|
||||
newtype TopFilePath = TopFilePath { getTopFilePath :: FilePath }
|
||||
newtype TopFilePath = TopFilePath { getTopFilePath :: RawFilePath }
|
||||
deriving (Show, Eq, Ord, Generic)
|
||||
|
||||
instance NFData TopFilePath
|
||||
|
@ -49,19 +50,20 @@ data BranchFilePath = BranchFilePath Ref TopFilePath
|
|||
{- Git uses the branch:file form to refer to a BranchFilePath -}
|
||||
descBranchFilePath :: BranchFilePath -> S.ByteString
|
||||
descBranchFilePath (BranchFilePath b f) =
|
||||
encodeBS' (fromRef b) <> ":" <> toRawFilePath (getTopFilePath f)
|
||||
encodeBS' (fromRef b) <> ":" <> getTopFilePath f
|
||||
|
||||
{- Path to a TopFilePath, within the provided git repo. -}
|
||||
fromTopFilePath :: TopFilePath -> Git.Repo -> FilePath
|
||||
fromTopFilePath p repo = combine (repoPath repo) (getTopFilePath p)
|
||||
fromTopFilePath :: TopFilePath -> Git.Repo -> RawFilePath
|
||||
fromTopFilePath p repo = P.combine (repoPath repo) (getTopFilePath p)
|
||||
|
||||
{- The input FilePath can be absolute, or relative to the CWD. -}
|
||||
toTopFilePath :: FilePath -> Git.Repo -> IO TopFilePath
|
||||
toTopFilePath file repo = TopFilePath <$> relPathDirToFile (repoPath repo) file
|
||||
toTopFilePath :: RawFilePath -> Git.Repo -> IO TopFilePath
|
||||
toTopFilePath file repo = TopFilePath . toRawFilePath
|
||||
<$> relPathDirToFile (fromRawFilePath (repoPath repo)) (fromRawFilePath file)
|
||||
|
||||
{- The input FilePath must already be relative to the top of the git
|
||||
{- The input RawFilePath must already be relative to the top of the git
|
||||
- repository -}
|
||||
asTopFilePath :: FilePath -> TopFilePath
|
||||
asTopFilePath :: RawFilePath -> TopFilePath
|
||||
asTopFilePath file = TopFilePath file
|
||||
|
||||
{- Git may use a different representation of a path when storing
|
||||
|
@ -91,5 +93,5 @@ fromInternalGitPath = encodeBS . replace "/" "\\" . decodeBS
|
|||
- so try posix paths.
|
||||
-}
|
||||
absoluteGitPath :: RawFilePath -> Bool
|
||||
absoluteGitPath p = isAbsolute (decodeBS p) ||
|
||||
System.FilePath.Posix.isAbsolute (decodeBS (toInternalGitPath p))
|
||||
absoluteGitPath p = P.isAbsolute p ||
|
||||
System.FilePath.Posix.ByteString.isAbsolute (toInternalGitPath p)
|
||||
|
|
|
@ -28,7 +28,7 @@ instance Eq Hook where
|
|||
a == b = hookName a == hookName b
|
||||
|
||||
hookFile :: Hook -> Repo -> FilePath
|
||||
hookFile h r = localGitDir r </> "hooks" </> hookName h
|
||||
hookFile h r = fromRawFilePath (localGitDir r) </> "hooks" </> hookName h
|
||||
|
||||
{- Writes a hook. Returns False if the hook already exists with a different
|
||||
- content. Upgrades old scripts.
|
||||
|
|
|
@ -49,7 +49,7 @@ override index _r = do
|
|||
|
||||
{- The normal index file. Does not check GIT_INDEX_FILE. -}
|
||||
indexFile :: Repo -> FilePath
|
||||
indexFile r = localGitDir r </> "index"
|
||||
indexFile r = fromRawFilePath (localGitDir r) </> "index"
|
||||
|
||||
{- The index file git will currently use, checking GIT_INDEX_FILE. -}
|
||||
currentIndexFile :: Repo -> IO FilePath
|
||||
|
|
|
@ -189,7 +189,7 @@ typeChanged' ps l repo = do
|
|||
(fs, cleanup) <- pipeNullSplit (prefix ++ ps ++ suffix) repo
|
||||
-- git diff returns filenames relative to the top of the git repo;
|
||||
-- convert to filenames relative to the cwd, like git ls-files.
|
||||
top <- absPath (repoPath repo)
|
||||
top <- absPath (fromRawFilePath (repoPath repo))
|
||||
currdir <- getCurrentDirectory
|
||||
return (map (\f -> toRawFilePath (relPathDirToFileAbs currdir $ top </> decodeBL' f)) fs, cleanup)
|
||||
where
|
||||
|
|
|
@ -100,7 +100,7 @@ parserLsTree = TreeItem
|
|||
<*> (Ref . decodeBS' <$> A.take shaSize)
|
||||
<* A8.char '\t'
|
||||
-- file
|
||||
<*> (asTopFilePath . decodeBS' . Git.Filename.decode <$> A.takeByteString)
|
||||
<*> (asTopFilePath . Git.Filename.decode <$> A.takeByteString)
|
||||
|
||||
{- Inverse of parseLsTree -}
|
||||
formatLsTree :: TreeItem -> String
|
||||
|
@ -108,5 +108,5 @@ formatLsTree ti = unwords
|
|||
[ showOct (mode ti) ""
|
||||
, decodeBS (typeobj ti)
|
||||
, fromRef (sha ti)
|
||||
, getTopFilePath (file ti)
|
||||
, fromRawFilePath (getTopFilePath (file ti))
|
||||
]
|
||||
|
|
|
@ -12,7 +12,7 @@ import Git
|
|||
import Git.Sha
|
||||
|
||||
objectsDir :: Repo -> FilePath
|
||||
objectsDir r = localGitDir r </> "objects"
|
||||
objectsDir r = fromRawFilePath (localGitDir r) </> "objects"
|
||||
|
||||
packDir :: Repo -> FilePath
|
||||
packDir r = objectsDir r </> "pack"
|
||||
|
|
|
@ -22,7 +22,7 @@ headRef :: Ref
|
|||
headRef = Ref "HEAD"
|
||||
|
||||
headFile :: Repo -> FilePath
|
||||
headFile r = localGitDir r </> "HEAD"
|
||||
headFile r = fromRawFilePath (localGitDir r) </> "HEAD"
|
||||
|
||||
setHeadRef :: Ref -> Repo -> IO ()
|
||||
setHeadRef ref r = writeFile (headFile r) ("ref: " ++ fromRef ref)
|
||||
|
@ -85,7 +85,7 @@ exists ref = runBool
|
|||
{- The file used to record a ref. (Git also stores some refs in a
|
||||
- packed-refs file.) -}
|
||||
file :: Ref -> Repo -> FilePath
|
||||
file ref repo = localGitDir repo </> fromRef ref
|
||||
file ref repo = fromRawFilePath (localGitDir repo) </> fromRef ref
|
||||
|
||||
{- Checks if HEAD exists. It generally will, except for in a repository
|
||||
- that was just created. -}
|
||||
|
|
|
@ -227,7 +227,7 @@ badBranches missing r = filterM isbad =<< getAllRefs r
|
|||
- Relies on packed refs being exploded before it's called.
|
||||
-}
|
||||
getAllRefs :: Repo -> IO [Ref]
|
||||
getAllRefs r = getAllRefs' (localGitDir r </> "refs")
|
||||
getAllRefs r = getAllRefs' (fromRawFilePath (localGitDir r) </> "refs")
|
||||
|
||||
getAllRefs' :: FilePath -> IO [Ref]
|
||||
getAllRefs' refdir = do
|
||||
|
@ -245,13 +245,13 @@ explodePackedRefsFile r = do
|
|||
nukeFile f
|
||||
where
|
||||
makeref (sha, ref) = do
|
||||
let dest = localGitDir r </> fromRef ref
|
||||
let dest = fromRawFilePath (localGitDir r) </> fromRef ref
|
||||
createDirectoryIfMissing True (parentDir dest)
|
||||
unlessM (doesFileExist dest) $
|
||||
writeFile dest (fromRef sha)
|
||||
|
||||
packedRefsFile :: Repo -> FilePath
|
||||
packedRefsFile r = localGitDir r </> "packed-refs"
|
||||
packedRefsFile r = fromRawFilePath (localGitDir r) </> "packed-refs"
|
||||
|
||||
parsePacked :: String -> Maybe (Sha, Ref)
|
||||
parsePacked l = case words l of
|
||||
|
@ -263,7 +263,7 @@ parsePacked l = case words l of
|
|||
{- git-branch -d cannot be used to remove a branch that is directly
|
||||
- pointing to a corrupt commit. -}
|
||||
nukeBranchRef :: Branch -> Repo -> IO ()
|
||||
nukeBranchRef b r = nukeFile $ localGitDir r </> fromRef b
|
||||
nukeBranchRef b r = nukeFile $ fromRawFilePath (localGitDir r) </> fromRef b
|
||||
|
||||
{- Finds the most recent commit to a branch that does not need any
|
||||
- of the missing objects. If the input branch is good as-is, returns it.
|
||||
|
@ -366,16 +366,16 @@ checkIndex r = do
|
|||
- itself is not corrupt. -}
|
||||
checkIndexFast :: Repo -> IO Bool
|
||||
checkIndexFast r = do
|
||||
(indexcontents, cleanup) <- LsFiles.stagedDetails [toRawFilePath (repoPath r)] r
|
||||
(indexcontents, cleanup) <- LsFiles.stagedDetails [repoPath r] r
|
||||
length indexcontents `seq` cleanup
|
||||
|
||||
missingIndex :: Repo -> IO Bool
|
||||
missingIndex r = not <$> doesFileExist (localGitDir r </> "index")
|
||||
missingIndex r = not <$> doesFileExist (fromRawFilePath (localGitDir r) </> "index")
|
||||
|
||||
{- Finds missing and ok files staged in the index. -}
|
||||
partitionIndex :: Repo -> IO ([LsFiles.StagedDetails], [LsFiles.StagedDetails], IO Bool)
|
||||
partitionIndex r = do
|
||||
(indexcontents, cleanup) <- LsFiles.stagedDetails [toRawFilePath (repoPath r)] r
|
||||
(indexcontents, cleanup) <- LsFiles.stagedDetails [repoPath r] r
|
||||
l <- forM indexcontents $ \i -> case i of
|
||||
(_file, Just sha, Just _mode) -> (,) <$> isMissing sha r <*> pure i
|
||||
_ -> pure (False, i)
|
||||
|
@ -446,7 +446,7 @@ preRepair g = do
|
|||
let f = indexFile g
|
||||
void $ tryIO $ allowWrite f
|
||||
where
|
||||
headfile = localGitDir g </> "HEAD"
|
||||
headfile = fromRawFilePath (localGitDir g) </> "HEAD"
|
||||
validhead s = "ref: refs/" `isPrefixOf` s || isJust (extractSha s)
|
||||
|
||||
{- Put it all together. -}
|
||||
|
|
|
@ -57,13 +57,13 @@ parseStatusZ = go []
|
|||
in go (v : c) xs'
|
||||
_ -> go c xs
|
||||
|
||||
cparse 'M' f _ = (Just (Modified (asTopFilePath f)), Nothing)
|
||||
cparse 'A' f _ = (Just (Added (asTopFilePath f)), Nothing)
|
||||
cparse 'D' f _ = (Just (Deleted (asTopFilePath f)), Nothing)
|
||||
cparse 'T' f _ = (Just (TypeChanged (asTopFilePath f)), Nothing)
|
||||
cparse '?' f _ = (Just (Untracked (asTopFilePath f)), Nothing)
|
||||
cparse 'M' f _ = (Just (Modified (asTopFilePath (toRawFilePath f))), Nothing)
|
||||
cparse 'A' f _ = (Just (Added (asTopFilePath (toRawFilePath f))), Nothing)
|
||||
cparse 'D' f _ = (Just (Deleted (asTopFilePath (toRawFilePath f))), Nothing)
|
||||
cparse 'T' f _ = (Just (TypeChanged (asTopFilePath (toRawFilePath f))), Nothing)
|
||||
cparse '?' f _ = (Just (Untracked (asTopFilePath (toRawFilePath f))), Nothing)
|
||||
cparse 'R' f (oldf:xs) =
|
||||
(Just (Renamed (asTopFilePath oldf) (asTopFilePath f)), Just xs)
|
||||
(Just (Renamed (asTopFilePath (toRawFilePath oldf)) (asTopFilePath (toRawFilePath f))), Just xs)
|
||||
cparse _ _ _ = (Nothing, Nothing)
|
||||
|
||||
getStatus :: [CommandParam] -> [FilePath] -> Repo -> IO ([StagedUnstaged Status], IO Bool)
|
||||
|
|
10
Git/Tree.hs
10
Git/Tree.hs
|
@ -119,7 +119,7 @@ mkTreeOutput fm ot s f = concat
|
|||
, " "
|
||||
, fromRef s
|
||||
, "\t"
|
||||
, takeFileName (getTopFilePath f)
|
||||
, takeFileName (fromRawFilePath (getTopFilePath f))
|
||||
, "\NUL"
|
||||
]
|
||||
|
||||
|
@ -156,7 +156,7 @@ treeItemsToTree = go M.empty
|
|||
Just (NewSubTree d l) ->
|
||||
go (addsubtree idir m (NewSubTree d (c:l))) is
|
||||
_ ->
|
||||
go (addsubtree idir m (NewSubTree (asTopFilePath idir) [c])) is
|
||||
go (addsubtree idir m (NewSubTree (asTopFilePath (toRawFilePath idir)) [c])) is
|
||||
where
|
||||
p = gitPath i
|
||||
idir = takeDirectory p
|
||||
|
@ -169,7 +169,7 @@ treeItemsToTree = go M.empty
|
|||
Just (NewSubTree d' l) ->
|
||||
let l' = filter (\ti -> gitPath ti /= d) l
|
||||
in addsubtree parent m' (NewSubTree d' (t:l'))
|
||||
_ -> addsubtree parent m' (NewSubTree (asTopFilePath parent) [t])
|
||||
_ -> addsubtree parent m' (NewSubTree (asTopFilePath (toRawFilePath parent)) [t])
|
||||
| otherwise = M.insert d t m
|
||||
where
|
||||
parent = takeDirectory d
|
||||
|
@ -328,7 +328,7 @@ graftTree' subtree graftloc basetree repo hdl = go basetree graftdirs
|
|||
|
||||
-- For a graftloc of "foo/bar/baz", this generates
|
||||
-- ["foo", "foo/bar", "foo/bar/baz"]
|
||||
graftdirs = map (asTopFilePath . decodeBS . toInternalGitPath . encodeBS) $
|
||||
graftdirs = map (asTopFilePath . toInternalGitPath . encodeBS) $
|
||||
mkpaths [] $ splitDirectories $ gitPath graftloc
|
||||
mkpaths _ [] = []
|
||||
mkpaths base (d:rest) = (joinPath base </> d) : mkpaths (base ++ [d]) rest
|
||||
|
@ -366,7 +366,7 @@ instance GitPath FilePath where
|
|||
gitPath = id
|
||||
|
||||
instance GitPath TopFilePath where
|
||||
gitPath = getTopFilePath
|
||||
gitPath = fromRawFilePath . getTopFilePath
|
||||
|
||||
instance GitPath TreeItem where
|
||||
gitPath (TreeItem f _ _) = gitPath f
|
||||
|
|
|
@ -30,8 +30,8 @@ import Utility.FileSystemEncoding
|
|||
- else known about it.
|
||||
-}
|
||||
data RepoLocation
|
||||
= Local { gitdir :: FilePath, worktree :: Maybe FilePath }
|
||||
| LocalUnknown FilePath
|
||||
= Local { gitdir :: RawFilePath, worktree :: Maybe RawFilePath }
|
||||
| LocalUnknown RawFilePath
|
||||
| Url URI
|
||||
| Unknown
|
||||
deriving (Show, Eq, Ord)
|
||||
|
|
|
@ -91,7 +91,7 @@ mergeFile info file hashhandle h = case filter (/= nullSha) [Ref asha, Ref bsha]
|
|||
where
|
||||
[_colonmode, _bmode, asha, bsha, _status] = words info
|
||||
use sha = return $ Just $
|
||||
updateIndexLine sha TreeFile $ asTopFilePath file
|
||||
updateIndexLine sha TreeFile $ asTopFilePath $ toRawFilePath file
|
||||
-- Get file and split into lines to union merge.
|
||||
-- The encoding of the file is assumed to be either ASCII or utf-8;
|
||||
-- in either case it's safe to split on \n
|
||||
|
|
|
@ -96,13 +96,13 @@ updateIndexLine sha treeitemtype file = L.fromStrict $
|
|||
|
||||
stageFile :: Sha -> TreeItemType -> FilePath -> Repo -> IO Streamer
|
||||
stageFile sha treeitemtype file repo = do
|
||||
p <- toTopFilePath file repo
|
||||
p <- toTopFilePath (toRawFilePath file) repo
|
||||
return $ pureStreamer $ updateIndexLine sha treeitemtype p
|
||||
|
||||
{- A streamer that removes a file from the index. -}
|
||||
unstageFile :: FilePath -> Repo -> IO Streamer
|
||||
unstageFile file repo = do
|
||||
p <- toTopFilePath file repo
|
||||
p <- toTopFilePath (toRawFilePath file) repo
|
||||
return $ unstageFile' p
|
||||
|
||||
unstageFile' :: TopFilePath -> Streamer
|
||||
|
@ -118,7 +118,7 @@ stageSymlink file sha repo = do
|
|||
!line <- updateIndexLine
|
||||
<$> pure sha
|
||||
<*> pure TreeSymlink
|
||||
<*> toTopFilePath file repo
|
||||
<*> toTopFilePath (toRawFilePath file) repo
|
||||
return $ pureStreamer line
|
||||
|
||||
{- A streamer that applies a DiffTreeItem to the index. -}
|
||||
|
@ -128,7 +128,7 @@ stageDiffTreeItem d = case toTreeItemType (Diff.dstmode d) of
|
|||
Just t -> pureStreamer $ updateIndexLine (Diff.dstsha d) t (Diff.file d)
|
||||
|
||||
indexPath :: TopFilePath -> InternalGitPath
|
||||
indexPath = toInternalGitPath . toRawFilePath . getTopFilePath
|
||||
indexPath = toInternalGitPath . getTopFilePath
|
||||
|
||||
{- Refreshes the index, by checking file stat information. -}
|
||||
refreshIndex :: Repo -> ((FilePath -> IO ()) -> IO ()) -> IO Bool
|
||||
|
|
24
Limit.hs
24
Limit.hs
|
@ -33,6 +33,7 @@ import Git.Types (RefDate(..))
|
|||
import Utility.Glob
|
||||
import Utility.HumanTime
|
||||
import Utility.DataUnits
|
||||
import qualified Utility.RawFilePath as R
|
||||
|
||||
import Data.Time.Clock.POSIX
|
||||
import qualified Data.Set as S
|
||||
|
@ -94,7 +95,7 @@ matchGlobFile :: String -> MatchInfo -> Annex Bool
|
|||
matchGlobFile glob = go
|
||||
where
|
||||
cglob = compileGlob glob CaseSensative -- memoized
|
||||
go (MatchingFile fi) = pure $ matchGlob cglob (matchFile fi)
|
||||
go (MatchingFile fi) = pure $ matchGlob cglob (fromRawFilePath (matchFile fi))
|
||||
go (MatchingInfo p) = matchGlob cglob <$> getInfo (providedFilePath p)
|
||||
go (MatchingKey _ (AssociatedFile Nothing)) = pure False
|
||||
go (MatchingKey _ (AssociatedFile (Just af))) = pure $ matchGlob cglob (fromRawFilePath af)
|
||||
|
@ -117,7 +118,8 @@ addMagicLimit limitname querymagic selectprovidedinfo glob = do
|
|||
-- When the file is an annex symlink, get magic of the
|
||||
-- object file.
|
||||
Nothing -> isAnnexLink (toRawFilePath f) >>= \case
|
||||
Just k -> withObjectLoc k $ querymagic magic
|
||||
Just k -> withObjectLoc k $
|
||||
querymagic magic . fromRawFilePath
|
||||
Nothing -> querymagic magic f
|
||||
|
||||
matchMagic :: String -> (Magic -> FilePath -> Annex (Maybe String)) -> (ProvidedInfo -> OptInfo String) -> Maybe Magic -> MkLimit Annex
|
||||
|
@ -127,7 +129,7 @@ matchMagic _limitname querymagic selectprovidedinfo (Just magic) glob = Right $
|
|||
go (MatchingKey _ _) = pure False
|
||||
go (MatchingFile fi) = catchBoolIO $
|
||||
maybe False (matchGlob cglob)
|
||||
<$> querymagic magic (currFile fi)
|
||||
<$> querymagic magic (fromRawFilePath (currFile fi))
|
||||
go (MatchingInfo p) =
|
||||
matchGlob cglob <$> getInfo (selectprovidedinfo p)
|
||||
matchMagic limitname _ _ Nothing _ =
|
||||
|
@ -143,10 +145,10 @@ matchLockStatus :: Bool -> MatchInfo -> Annex Bool
|
|||
matchLockStatus _ (MatchingKey _ _) = pure False
|
||||
matchLockStatus _ (MatchingInfo _) = pure False
|
||||
matchLockStatus wantlocked (MatchingFile fi) = liftIO $ do
|
||||
islocked <- isPointerFile (toRawFilePath (currFile fi)) >>= \case
|
||||
islocked <- isPointerFile (currFile fi) >>= \case
|
||||
Just _key -> return False
|
||||
Nothing -> isSymbolicLink
|
||||
<$> getSymbolicLinkStatus (currFile fi)
|
||||
<$> getSymbolicLinkStatus (fromRawFilePath (currFile fi))
|
||||
return (islocked == wantlocked)
|
||||
|
||||
{- Adds a limit to skip files not believed to be present
|
||||
|
@ -190,7 +192,7 @@ limitPresent u _ = checkKey $ \key -> do
|
|||
limitInDir :: FilePath -> MatchFiles Annex
|
||||
limitInDir dir = const go
|
||||
where
|
||||
go (MatchingFile fi) = checkf $ matchFile fi
|
||||
go (MatchingFile fi) = checkf $ fromRawFilePath $ matchFile fi
|
||||
go (MatchingKey _ (AssociatedFile Nothing)) = return False
|
||||
go (MatchingKey _ (AssociatedFile (Just af))) = checkf (fromRawFilePath af)
|
||||
go (MatchingInfo p) = checkf =<< getInfo (providedFilePath p)
|
||||
|
@ -239,7 +241,8 @@ limitLackingCopies approx want = case readish want of
|
|||
NumCopies numcopies <- if approx
|
||||
then approxNumCopies
|
||||
else case mi of
|
||||
MatchingFile fi -> getGlobalFileNumCopies $ matchFile fi
|
||||
MatchingFile fi -> getGlobalFileNumCopies $
|
||||
fromRawFilePath $ matchFile fi
|
||||
MatchingKey _ _ -> approxNumCopies
|
||||
MatchingInfo {} -> approxNumCopies
|
||||
us <- filter (`S.notMember` notpresent)
|
||||
|
@ -321,7 +324,8 @@ limitSize lb vs s = case readSize dataUnits s of
|
|||
Just key -> checkkey sz key
|
||||
Nothing -> return False
|
||||
LimitDiskFiles -> do
|
||||
filesize <- liftIO $ catchMaybeIO $ getFileSize (currFile fi)
|
||||
filesize <- liftIO $ catchMaybeIO $
|
||||
getFileSize (fromRawFilePath (currFile fi))
|
||||
return $ filesize `vs` Just sz
|
||||
go sz _ (MatchingKey key _) = checkkey sz key
|
||||
go sz _ (MatchingInfo p) =
|
||||
|
@ -361,14 +365,14 @@ addAccessedWithin duration = do
|
|||
where
|
||||
check now k = inAnnexCheck k $ \f ->
|
||||
liftIO $ catchDefaultIO False $ do
|
||||
s <- getFileStatus f
|
||||
s <- R.getFileStatus f
|
||||
let accessed = realToFrac (accessTime s)
|
||||
let delta = now - accessed
|
||||
return $ delta <= secs
|
||||
secs = fromIntegral (durationSeconds duration)
|
||||
|
||||
lookupFileKey :: FileInfo -> Annex (Maybe Key)
|
||||
lookupFileKey = lookupFile . toRawFilePath . currFile
|
||||
lookupFileKey = lookupFile . currFile
|
||||
|
||||
checkKey :: (Key -> Annex Bool) -> MatchInfo -> Annex Bool
|
||||
checkKey a (MatchingFile fi) = lookupFileKey fi >>= maybe (return False) a
|
||||
|
|
|
@ -21,6 +21,6 @@ addWantDrop = addLimit $ Right $ const $ checkWant $
|
|||
wantDrop False Nothing Nothing
|
||||
|
||||
checkWant :: (AssociatedFile -> Annex Bool) -> MatchInfo -> Annex Bool
|
||||
checkWant a (MatchingFile fi) = a (AssociatedFile (Just $ toRawFilePath $ matchFile fi))
|
||||
checkWant a (MatchingFile fi) = a (AssociatedFile (Just $ matchFile fi))
|
||||
checkWant a (MatchingKey _ af) = a af
|
||||
checkWant _ (MatchingInfo {}) = return False
|
||||
|
|
25
Logs.hs
25
Logs.hs
|
@ -13,6 +13,7 @@ import Annex.Common
|
|||
import Annex.DirHashes
|
||||
|
||||
import qualified Data.ByteString as S
|
||||
import qualified System.FilePath.ByteString as P
|
||||
|
||||
{- There are several varieties of log file formats. -}
|
||||
data LogVariety
|
||||
|
@ -117,19 +118,19 @@ exportLog = "export.log"
|
|||
|
||||
{- The pathname of the location log file for a given key. -}
|
||||
locationLogFile :: GitConfig -> Key -> RawFilePath
|
||||
locationLogFile config key = toRawFilePath $
|
||||
branchHashDir config key </> keyFile key ++ ".log"
|
||||
locationLogFile config key =
|
||||
branchHashDir config key P.</> keyFile' key <> ".log"
|
||||
|
||||
{- The filename of the url log for a given key. -}
|
||||
urlLogFile :: GitConfig -> Key -> RawFilePath
|
||||
urlLogFile config key = toRawFilePath $
|
||||
branchHashDir config key </> keyFile key ++ decodeBS' urlLogExt
|
||||
urlLogFile config key =
|
||||
branchHashDir config key P.</> keyFile' key <> urlLogExt
|
||||
|
||||
{- Old versions stored the urls elsewhere. -}
|
||||
oldurlLogs :: GitConfig -> Key -> [RawFilePath]
|
||||
oldurlLogs config key = map toRawFilePath
|
||||
[ "remote/web" </> hdir </> serializeKey key ++ ".log"
|
||||
, "remote/web" </> hdir </> keyFile key ++ ".log"
|
||||
oldurlLogs config key =
|
||||
[ "remote/web" P.</> hdir P.</> serializeKey' key <> ".log"
|
||||
, "remote/web" P.</> hdir P.</> keyFile' key <> ".log"
|
||||
]
|
||||
where
|
||||
hdir = branchHashDir config key
|
||||
|
@ -144,7 +145,7 @@ isUrlLog file = urlLogExt `S.isSuffixOf` file
|
|||
{- The filename of the remote state log for a given key. -}
|
||||
remoteStateLogFile :: GitConfig -> Key -> RawFilePath
|
||||
remoteStateLogFile config key =
|
||||
toRawFilePath (branchHashDir config key </> keyFile key)
|
||||
(branchHashDir config key P.</> keyFile' key)
|
||||
<> remoteStateLogExt
|
||||
|
||||
remoteStateLogExt :: S.ByteString
|
||||
|
@ -156,7 +157,7 @@ isRemoteStateLog path = remoteStateLogExt `S.isSuffixOf` path
|
|||
{- The filename of the chunk log for a given key. -}
|
||||
chunkLogFile :: GitConfig -> Key -> RawFilePath
|
||||
chunkLogFile config key =
|
||||
toRawFilePath (branchHashDir config key </> keyFile key)
|
||||
(branchHashDir config key P.</> keyFile' key)
|
||||
<> chunkLogExt
|
||||
|
||||
chunkLogExt :: S.ByteString
|
||||
|
@ -168,7 +169,7 @@ isChunkLog path = chunkLogExt `S.isSuffixOf` path
|
|||
{- The filename of the metadata log for a given key. -}
|
||||
metaDataLogFile :: GitConfig -> Key -> RawFilePath
|
||||
metaDataLogFile config key =
|
||||
toRawFilePath (branchHashDir config key </> keyFile key)
|
||||
(branchHashDir config key P.</> keyFile' key)
|
||||
<> metaDataLogExt
|
||||
|
||||
metaDataLogExt :: S.ByteString
|
||||
|
@ -180,7 +181,7 @@ isMetaDataLog path = metaDataLogExt `S.isSuffixOf` path
|
|||
{- The filename of the remote metadata log for a given key. -}
|
||||
remoteMetaDataLogFile :: GitConfig -> Key -> RawFilePath
|
||||
remoteMetaDataLogFile config key =
|
||||
toRawFilePath (branchHashDir config key </> keyFile key)
|
||||
(branchHashDir config key P.</> keyFile' key)
|
||||
<> remoteMetaDataLogExt
|
||||
|
||||
remoteMetaDataLogExt :: S.ByteString
|
||||
|
@ -192,7 +193,7 @@ isRemoteMetaDataLog path = remoteMetaDataLogExt `S.isSuffixOf` path
|
|||
{- The filename of the remote content identifier log for a given key. -}
|
||||
remoteContentIdentifierLogFile :: GitConfig -> Key -> RawFilePath
|
||||
remoteContentIdentifierLogFile config key =
|
||||
toRawFilePath (branchHashDir config key </> keyFile key)
|
||||
(branchHashDir config key P.</> keyFile' key)
|
||||
<> remoteContentIdentifierExt
|
||||
|
||||
remoteContentIdentifierExt :: S.ByteString
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Logs.Export (
|
||||
Exported,
|
||||
mkExported,
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Logs.Smudge where
|
||||
|
||||
import Annex.Common
|
||||
|
@ -15,8 +17,8 @@ import Logs.File
|
|||
smudgeLog :: Key -> TopFilePath -> Annex ()
|
||||
smudgeLog k f = do
|
||||
logf <- fromRepo gitAnnexSmudgeLog
|
||||
appendLogFile logf gitAnnexSmudgeLock $
|
||||
serializeKey k ++ " " ++ getTopFilePath f
|
||||
appendLogFile logf gitAnnexSmudgeLock $ fromRawFilePath $
|
||||
serializeKey' k <> " " <> getTopFilePath f
|
||||
|
||||
-- | Streams all smudged files, and then empties the log at the end.
|
||||
--
|
||||
|
@ -37,4 +39,4 @@ streamSmudged a = do
|
|||
let (ks, f) = separate (== ' ') l
|
||||
in do
|
||||
k <- deserializeKey ks
|
||||
return (k, asTopFilePath f)
|
||||
return (k, asTopFilePath (toRawFilePath f))
|
||||
|
|
|
@ -93,7 +93,7 @@ knownUrls = do
|
|||
Annex.Branch.update
|
||||
Annex.Branch.commit =<< Annex.Branch.commitMessage
|
||||
Annex.Branch.withIndex $ do
|
||||
top <- toRawFilePath <$> fromRepo Git.repoPath
|
||||
top <- fromRepo Git.repoPath
|
||||
(l, cleanup) <- inRepo $ Git.LsFiles.stagedDetails [top]
|
||||
r <- mapM getkeyurls l
|
||||
void $ liftIO cleanup
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue