more RawFilePath conversion

at 377/645

This commit was sponsored by Svenne Krap on Patreon.
This commit is contained in:
Joey Hess 2020-10-29 14:20:57 -04:00
parent f45ad178cb
commit 681b44236a
No known key found for this signature in database
GPG key ID: DB12DB0FF05F8F38
23 changed files with 215 additions and 188 deletions

View file

@ -41,6 +41,7 @@ import Data.Function
import Data.Char
import Data.ByteString.Builder
import Control.Concurrent (threadDelay)
import qualified System.FilePath.ByteString as P
import Annex.Common
import Types.BranchState
@ -455,7 +456,7 @@ withIndex' :: Bool -> Annex a -> Annex a
withIndex' bootstrapping a = withIndexFile AnnexIndexFile $ \f -> do
checkIndexOnce $ unlessM (liftIO $ doesFileExist f) $ do
unless bootstrapping create
createAnnexDirectory $ takeDirectory f
createAnnexDirectory $ toRawFilePath $ takeDirectory f
unless bootstrapping $ inRepo genIndex
a
@ -477,7 +478,7 @@ forceUpdateIndex jl branchref = do
{- Checks if the index needs to be updated. -}
needUpdateIndex :: Git.Ref -> Annex Bool
needUpdateIndex branchref = do
f <- fromRepo gitAnnexIndexStatus
f <- fromRawFilePath <$> fromRepo gitAnnexIndexStatus
committedref <- Git.Ref . firstLine' <$>
liftIO (catchDefaultIO mempty $ B.readFile f)
return (committedref /= branchref)
@ -506,19 +507,19 @@ stageJournal jl commitindex = withIndex $ withOtherTmp $ \tmpdir -> do
prepareModifyIndex jl
g <- gitRepo
let dir = gitAnnexJournalDir g
(jlogf, jlogh) <- openjlog tmpdir
(jlogf, jlogh) <- openjlog (fromRawFilePath tmpdir)
h <- hashObjectHandle
withJournalHandle $ \jh ->
Git.UpdateIndex.streamUpdateIndex g
[genstream dir h jh jlogh]
commitindex
liftIO $ cleanup dir jlogh jlogf
liftIO $ cleanup (fromRawFilePath dir) jlogh jlogf
where
genstream dir h jh jlogh streamer = readDirectory jh >>= \case
Nothing -> return ()
Just file -> do
unless (dirCruft file) $ do
let path = dir </> file
let path = dir P.</> toRawFilePath file
sha <- Git.HashObject.hashFile h path
hPutStrLn jlogh file
streamer $ Git.UpdateIndex.updateIndexLine
@ -666,7 +667,7 @@ getIgnoredRefs =
S.fromList . mapMaybe Git.Sha.extractSha . B8.lines <$> content
where
content = do
f <- fromRepo gitAnnexIgnoredRefs
f <- fromRawFilePath <$> fromRepo gitAnnexIgnoredRefs
liftIO $ catchDefaultIO mempty $ B.readFile f
addMergedRefs :: [(Git.Sha, Git.Branch)] -> Annex ()
@ -684,7 +685,7 @@ getMergedRefs = S.fromList . map fst <$> getMergedRefs'
getMergedRefs' :: Annex [(Git.Sha, Git.Branch)]
getMergedRefs' = do
f <- fromRepo gitAnnexMergedRefs
f <- fromRawFilePath <$> fromRepo gitAnnexMergedRefs
s <- liftIO $ catchDefaultIO mempty $ B.readFile f
return $ map parse $ B8.lines s
where

View file

@ -5,6 +5,8 @@
- Licensed under the GNU AGPL version 3 or higher.
-}
{-# LANGUAGE OverloadedStrings #-}
module Annex.ChangedRefs
( ChangedRefs(..)
, ChangedRefsHandle
@ -17,6 +19,7 @@ module Annex.ChangedRefs
import Annex.Common
import Utility.DirWatcher
import Utility.DirWatcher.Types
import Utility.Directory.Create
import qualified Git
import Git.Sha
import qualified Utility.SimpleProtocol as Proto
@ -90,7 +93,9 @@ watchChangedRefs = do
if canWatch
then do
h <- liftIO $ watchDir refdir (const False) True hooks id
h <- liftIO $ watchDir
(fromRawFilePath refdir)
(const False) True hooks id
return $ Just $ ChangedRefsHandle h chan
else return Nothing

View file

@ -1,6 +1,6 @@
{- git-annex file content managing
-
- Copyright 2010-2019 Joey Hess <id@joeyh.name>
- Copyright 2010-2020 Joey Hess <id@joeyh.name>
-
- Licensed under the GNU AGPL version 3 or higher.
-}
@ -131,8 +131,7 @@ objectFileExists key =
{- A safer check; the key's content must not only be present, but
- is not in the process of being removed. -}
inAnnexSafe :: Key -> Annex (Maybe Bool)
inAnnexSafe key =
inAnnex' (fromMaybe True) (Just False) (go . fromRawFilePath) key
inAnnexSafe key = inAnnex' (fromMaybe True) (Just False) go key
where
is_locked = Nothing
is_unlocked = Just True
@ -145,7 +144,7 @@ inAnnexSafe key =
{- The content file must exist, but the lock file generally
- won't exist unless a removal is in process. -}
checklock (Just lockfile) contentfile =
ifM (liftIO $ doesFileExist contentfile)
ifM (liftIO $ doesFileExist (fromRawFilePath contentfile))
( checkOr is_unlocked lockfile
, return is_missing
)
@ -154,7 +153,7 @@ inAnnexSafe key =
Just True -> is_locked
Just False -> is_unlocked
#else
checklock Nothing contentfile = liftIO $ ifM (doesFileExist contentfile)
checklock Nothing contentfile = liftIO $ ifM (doesFileExist (fromRawFilePath contentfile))
( lockShared contentfile >>= \case
Nothing -> return is_locked
Just lockhandle -> do
@ -165,7 +164,7 @@ inAnnexSafe key =
{- In Windows, see if we can take a shared lock. If so,
- remove the lock file to clean up after ourselves. -}
checklock (Just lockfile) contentfile =
ifM (liftIO $ doesFileExist contentfile)
ifM (liftIO $ doesFileExist (fromRawFilePath contentfile))
( modifyContent lockfile $ liftIO $
lockShared lockfile >>= \case
Nothing -> return is_locked
@ -180,7 +179,7 @@ inAnnexSafe key =
{- Windows has to use a separate lock file from the content, since
- locking the actual content file would interfere with the user's
- use of it. -}
contentLockFile :: Key -> Annex (Maybe FilePath)
contentLockFile :: Key -> Annex (Maybe RawFilePath)
#ifndef mingw32_HOST_OS
contentLockFile _ = pure Nothing
#else
@ -226,9 +225,11 @@ lockContentForRemoval key fallback a = lockContentUsing lock key fallback $
{- Since content files are stored with the write bit disabled, have
- to fiddle with permissions to open for an exclusive lock. -}
lock contentfile Nothing = bracket_
(thawContent contentfile)
(freezeContent contentfile)
(thawContent contentfile')
(freezeContent contentfile')
(tryLockExclusive Nothing contentfile)
where
contentfile' = fromRawFilePath contentfile
lock _ (Just lockfile) = posixLocker tryLockExclusive lockfile
#else
lock = winLocker lockExclusive
@ -236,7 +237,7 @@ lockContentForRemoval key fallback a = lockContentUsing lock key fallback $
{- Passed the object content file, and maybe a separate lock file to use,
- when the content file itself should not be locked. -}
type ContentLocker = FilePath -> Maybe LockFile -> Annex (Maybe LockHandle)
type ContentLocker = RawFilePath -> Maybe LockFile -> Annex (Maybe LockHandle)
#ifndef mingw32_HOST_OS
posixLocker :: (Maybe FileMode -> LockFile -> Annex (Maybe LockHandle)) -> LockFile -> Annex (Maybe LockHandle)
@ -262,7 +263,7 @@ winLocker _ _ Nothing = return Nothing
- the file that is locked eg on Windows a different file is locked. -}
lockContentUsing :: ContentLocker -> Key -> Annex a -> Annex a -> Annex a
lockContentUsing locker key fallback a = do
contentfile <- fromRawFilePath <$> calcRepo (gitAnnexLocation key)
contentfile <- calcRepo (gitAnnexLocation key)
lockfile <- contentLockFile key
bracket
(lock contentfile lockfile)
@ -295,22 +296,22 @@ lockContentUsing locker key fallback a = do
cleanuplockfile lockfile = modifyContent lockfile $
void $ liftIO $ tryIO $
removeWhenExistsWith removeLink lockfile
removeWhenExistsWith R.removeLink lockfile
{- Runs an action, passing it the temp file to get,
- and if the action succeeds, verifies the file matches
- the key and moves the file into the annex as a key's content. -}
getViaTmp :: RetrievalSecurityPolicy -> VerifyConfig -> Key -> (FilePath -> Annex (Bool, Verification)) -> Annex Bool
getViaTmp :: RetrievalSecurityPolicy -> VerifyConfig -> Key -> (RawFilePath -> Annex (Bool, Verification)) -> Annex Bool
getViaTmp rsp v key action = checkDiskSpaceToGet key False $
getViaTmpFromDisk rsp v key action
{- Like getViaTmp, but does not check that there is enough disk space
- for the incoming key. For use when the key content is already on disk
- and not being copied into place. -}
getViaTmpFromDisk :: RetrievalSecurityPolicy -> VerifyConfig -> Key -> (FilePath -> Annex (Bool, Verification)) -> Annex Bool
getViaTmpFromDisk :: RetrievalSecurityPolicy -> VerifyConfig -> Key -> (RawFilePath -> Annex (Bool, Verification)) -> Annex Bool
getViaTmpFromDisk rsp v key action = checkallowed $ do
tmpfile <- prepTmp key
resuming <- liftIO $ doesFileExist tmpfile
resuming <- liftIO $ R.doesPathExist tmpfile
(ok, verification) <- action tmpfile
-- When the temp file already had content, we don't know if
-- that content is good or not, so only trust if it the action
@ -322,7 +323,7 @@ getViaTmpFromDisk rsp v key action = checkallowed $ do
_ -> MustVerify
else verification
if ok
then ifM (verifyKeyContent rsp v verification' key tmpfile)
then ifM (verifyKeyContent rsp v verification' key (fromRawFilePath tmpfile))
( ifM (pruneTmpWorkDirBefore tmpfile (moveAnnex key))
( do
logStatus key InfoPresent
@ -338,7 +339,8 @@ getViaTmpFromDisk rsp v key action = checkallowed $ do
-- including perhaps the content of another
-- file than the one that was requested,
-- and so it's best not to keep it on disk.
pruneTmpWorkDirBefore tmpfile (liftIO . removeWhenExistsWith removeLink)
pruneTmpWorkDirBefore tmpfile
(liftIO . removeWhenExistsWith R.removeLink)
return False
)
-- On transfer failure, the tmp file is left behind, in case
@ -432,7 +434,7 @@ shouldVerify (RemoteVerify r) =
-}
checkDiskSpaceToGet :: Key -> a -> Annex a -> Annex a
checkDiskSpaceToGet key unabletoget getkey = do
tmp <- fromRepo $ gitAnnexTmpObjectLocation key
tmp <- fromRawFilePath <$> fromRepo (gitAnnexTmpObjectLocation key)
e <- liftIO $ doesFileExist tmp
alreadythere <- liftIO $ if e
@ -446,7 +448,7 @@ checkDiskSpaceToGet key unabletoget getkey = do
, return unabletoget
)
prepTmp :: Key -> Annex FilePath
prepTmp :: Key -> Annex RawFilePath
prepTmp key = do
tmp <- fromRepo $ gitAnnexTmpObjectLocation key
createAnnexDirectory (parentDir tmp)
@ -456,11 +458,11 @@ prepTmp key = do
- the temp file. If the action throws an exception, the temp file is
- left behind, which allows for resuming.
-}
withTmp :: Key -> (FilePath -> Annex a) -> Annex a
withTmp :: Key -> (RawFilePath -> Annex a) -> Annex a
withTmp key action = do
tmp <- prepTmp key
res <- action tmp
pruneTmpWorkDirBefore tmp (liftIO . removeWhenExistsWith removeLink)
pruneTmpWorkDirBefore tmp (liftIO . removeWhenExistsWith R.removeLink)
return res
{- Moves a key's content into .git/annex/objects/
@ -491,7 +493,7 @@ withTmp key action = do
- accepted into the repository. Will display a warning message in this
- case. May also throw exceptions in some cases.
-}
moveAnnex :: Key -> FilePath -> Annex Bool
moveAnnex :: Key -> RawFilePath -> Annex Bool
moveAnnex key src = ifM (checkSecureHashes' key)
( do
withObjectLoc key storeobject
@ -501,9 +503,11 @@ moveAnnex key src = ifM (checkSecureHashes' key)
where
storeobject dest = ifM (liftIO $ R.doesPathExist dest)
( alreadyhave
, modifyContent dest' $ do
freezeContent src
liftIO $ moveFile src dest'
, modifyContent dest $ do
freezeContent (fromRawFilePath src)
liftIO $ moveFile
(fromRawFilePath src)
(fromRawFilePath dest)
g <- Annex.gitRepo
fs <- map (`fromTopFilePath` g)
<$> Database.Keys.getAssociatedFiles key
@ -511,9 +515,7 @@ moveAnnex key src = ifM (checkSecureHashes' key)
ics <- mapM (populatePointerFile (Restage True) key dest) fs
Database.Keys.storeInodeCaches' key [dest] (catMaybes ics)
)
where
dest' = fromRawFilePath dest
alreadyhave = liftIO $ removeFile src
alreadyhave = liftIO $ R.removeLink src
checkSecureHashes :: Key -> Annex (Maybe String)
checkSecureHashes key = ifM (Backend.isCryptographicallySecure key)
@ -535,20 +537,20 @@ data LinkAnnexResult = LinkAnnexOk | LinkAnnexFailed | LinkAnnexNoop
{- Populates the annex object file by hard linking or copying a source
- file to it. -}
linkToAnnex :: Key -> FilePath -> Maybe InodeCache -> Annex LinkAnnexResult
linkToAnnex :: Key -> RawFilePath -> Maybe InodeCache -> Annex LinkAnnexResult
linkToAnnex key src srcic = ifM (checkSecureHashes' key)
( do
dest <- fromRawFilePath <$> calcRepo (gitAnnexLocation key)
dest <- calcRepo (gitAnnexLocation key)
modifyContent dest $ linkAnnex To key src srcic dest Nothing
, return LinkAnnexFailed
)
{- Makes a destination file be a link or copy from the annex object. -}
linkFromAnnex :: Key -> FilePath -> Maybe FileMode -> Annex LinkAnnexResult
linkFromAnnex :: Key -> RawFilePath -> Maybe FileMode -> Annex LinkAnnexResult
linkFromAnnex key dest destmode = do
src <- calcRepo (gitAnnexLocation key)
srcic <- withTSDelta (liftIO . genInodeCache src)
linkAnnex From key (fromRawFilePath src) srcic dest destmode
linkAnnex From key src srcic dest destmode
data FromTo = From | To
@ -564,10 +566,10 @@ data FromTo = From | To
-
- Nothing is done if the destination file already exists.
-}
linkAnnex :: FromTo -> Key -> FilePath -> Maybe InodeCache -> FilePath -> Maybe FileMode -> Annex LinkAnnexResult
linkAnnex :: FromTo -> Key -> RawFilePath -> Maybe InodeCache -> RawFilePath -> Maybe FileMode -> Annex LinkAnnexResult
linkAnnex _ _ _ Nothing _ _ = return LinkAnnexFailed
linkAnnex fromto key src (Just srcic) dest destmode =
withTSDelta (liftIO . genInodeCache dest') >>= \case
withTSDelta (liftIO . genInodeCache dest) >>= \case
Just destic -> do
cs <- Database.Keys.getInodeCaches key
if null cs
@ -578,24 +580,25 @@ linkAnnex fromto key src (Just srcic) dest destmode =
Nothing -> failed
Just r -> do
case fromto of
From -> thawContent dest
From -> thawContent $
fromRawFilePath dest
To -> case r of
Copied -> freezeContent dest
Copied -> freezeContent $
fromRawFilePath dest
Linked -> noop
checksrcunchanged
where
dest' = toRawFilePath dest
failed = do
Database.Keys.addInodeCaches key [srcic]
return LinkAnnexFailed
checksrcunchanged = withTSDelta (liftIO . genInodeCache (toRawFilePath src)) >>= \case
checksrcunchanged = withTSDelta (liftIO . genInodeCache src) >>= \case
Just srcic' | compareStrong srcic srcic' -> do
destic <- withTSDelta (liftIO . genInodeCache dest')
destic <- withTSDelta (liftIO . genInodeCache dest)
Database.Keys.addInodeCaches key $
catMaybes [destic, Just srcic]
return LinkAnnexOk
_ -> do
liftIO $ removeWhenExistsWith removeLink dest
liftIO $ removeWhenExistsWith R.removeLink dest
failed
{- Removes the annex object file for a key. Lowlevel. -}
@ -656,7 +659,7 @@ withObjectLoc key a = a =<< calcRepo (gitAnnexLocation key)
cleanObjectLoc :: Key -> Annex () -> Annex ()
cleanObjectLoc key cleaner = do
file <- fromRawFilePath <$> calcRepo (gitAnnexLocation key)
file <- calcRepo (gitAnnexLocation key)
void $ tryIO $ thawContentDir file
cleaner
liftIO $ removeparents file (3 :: Int)
@ -665,16 +668,15 @@ cleanObjectLoc key cleaner = do
removeparents file n = do
let dir = parentDir file
maybe noop (const $ removeparents dir (n-1))
<=< catchMaybeIO $ removeDirectory dir
<=< catchMaybeIO $ removeDirectory (fromRawFilePath dir)
{- Removes a key's file from .git/annex/objects/
-}
removeAnnex :: ContentRemovalLock -> Annex ()
removeAnnex (ContentRemovalLock key) = withObjectLoc key $ \file ->
cleanObjectLoc key $ do
let file' = fromRawFilePath file
secureErase file'
liftIO $ removeWhenExistsWith removeLink file'
secureErase file
liftIO $ removeWhenExistsWith R.removeLink file
g <- Annex.gitRepo
mapM_ (\f -> void $ tryIO $ resetpointer $ fromTopFilePath f g)
=<< Database.Keys.getAssociatedFiles key
@ -736,14 +738,15 @@ isUnmodifiedCheap' key fc =
- returns the file it was moved to. -}
moveBad :: Key -> Annex FilePath
moveBad key = do
src <- fromRawFilePath <$> calcRepo (gitAnnexLocation key)
src <- calcRepo (gitAnnexLocation key)
bad <- fromRepo gitAnnexBadDir
let dest = bad </> takeFileName src
let dest = bad P.</> P.takeFileName src
let dest' = fromRawFilePath dest
createAnnexDirectory (parentDir dest)
cleanObjectLoc key $
liftIO $ moveFile src dest
liftIO $ moveFile (fromRawFilePath src) dest'
logStatus key InfoMissing
return dest
return dest'
data KeyLocation = InAnnex | InAnywhere
@ -839,9 +842,9 @@ preseedTmp key file = go =<< inAnnex key
{- Finds files directly inside a directory like gitAnnexBadDir
- (not in subdirectories) and returns the corresponding keys. -}
dirKeys :: (Git.Repo -> FilePath) -> Annex [Key]
dirKeys :: (Git.Repo -> RawFilePath) -> Annex [Key]
dirKeys dirspec = do
dir <- fromRepo dirspec
dir <- fromRawFilePath <$> fromRepo dirspec
ifM (liftIO $ doesDirectoryExist dir)
( do
contents <- liftIO $ getDirectoryContents dir
@ -857,7 +860,7 @@ dirKeys dirspec = do
- Also, stale keys that can be proven to have no value
- (ie, their content is already present) are deleted.
-}
staleKeysPrune :: (Git.Repo -> FilePath) -> Bool -> Annex [Key]
staleKeysPrune :: (Git.Repo -> RawFilePath) -> Bool -> Annex [Key]
staleKeysPrune dirspec nottransferred = do
contents <- dirKeys dirspec
@ -866,8 +869,8 @@ staleKeysPrune dirspec nottransferred = do
dir <- fromRepo dirspec
forM_ dups $ \k ->
pruneTmpWorkDirBefore (dir </> fromRawFilePath (keyFile k))
(liftIO . removeFile)
pruneTmpWorkDirBefore (dir P.</> keyFile k)
(liftIO . R.removeLink)
if nottransferred
then do
@ -882,9 +885,9 @@ staleKeysPrune dirspec nottransferred = do
- This preserves the invariant that the workdir never exists without
- the content file.
-}
pruneTmpWorkDirBefore :: FilePath -> (FilePath -> Annex a) -> Annex a
pruneTmpWorkDirBefore :: RawFilePath -> (RawFilePath -> Annex a) -> Annex a
pruneTmpWorkDirBefore f action = do
let workdir = gitAnnexTmpWorkDir f
let workdir = fromRawFilePath $ gitAnnexTmpWorkDir f
liftIO $ whenM (doesDirectoryExist workdir) $
removeDirectoryRecursive workdir
action f
@ -899,21 +902,22 @@ pruneTmpWorkDirBefore f action = do
- the temporary work directory is retained (unless
- empty), so anything in it can be used on resume.
-}
withTmpWorkDir :: Key -> (FilePath -> Annex (Maybe a)) -> Annex (Maybe a)
withTmpWorkDir :: Key -> (RawFilePath -> Annex (Maybe a)) -> Annex (Maybe a)
withTmpWorkDir key action = do
-- Create the object file if it does not exist. This way,
-- staleKeysPrune only has to look for object files, and can
-- clean up gitAnnexTmpWorkDir for those it finds.
obj <- prepTmp key
unlessM (liftIO $ doesFileExist obj) $ do
liftIO $ writeFile obj ""
setAnnexFilePerm obj
let obj' = fromRawFilePath obj
unlessM (liftIO $ doesFileExist obj') $ do
liftIO $ writeFile obj' ""
setAnnexFilePerm obj'
let tmpdir = gitAnnexTmpWorkDir obj
createAnnexDirectory tmpdir
res <- action tmpdir
case res of
Just _ -> liftIO $ removeDirectoryRecursive tmpdir
Nothing -> liftIO $ void $ tryIO $ removeDirectory tmpdir
Just _ -> liftIO $ removeDirectoryRecursive (fromRawFilePath tmpdir)
Nothing -> liftIO $ void $ tryIO $ removeDirectory (fromRawFilePath tmpdir)
return res
{- Finds items in the first, smaller list, that are not

View file

@ -18,16 +18,17 @@ import Utility.DiskFree
import Utility.FileMode
import Utility.DataUnits
import Utility.CopyFile
import qualified Utility.RawFilePath as R
{- Runs the secure erase command if set, otherwise does nothing.
- File may or may not be deleted at the end; caller is responsible for
- making sure it's deleted. -}
secureErase :: FilePath -> Annex ()
secureErase :: RawFilePath -> Annex ()
secureErase file = maybe noop go =<< annexSecureEraseCommand <$> Annex.getGitConfig
where
go basecmd = void $ liftIO $
boolSystem "sh" [Param "-c", Param $ gencmd basecmd]
gencmd = massReplace [ ("%file", shellEscape file) ]
gencmd = massReplace [ ("%file", shellEscape (fromRawFilePath file)) ]
data LinkedOrCopied = Linked | Copied
@ -44,10 +45,10 @@ data LinkedOrCopied = Linked | Copied
- execute bit will be set. The mode is not fully copied over because
- git doesn't support file modes beyond execute.
-}
linkOrCopy :: Key -> FilePath -> FilePath -> Maybe FileMode -> Annex (Maybe LinkedOrCopied)
linkOrCopy :: Key -> RawFilePath -> RawFilePath -> Maybe FileMode -> Annex (Maybe LinkedOrCopied)
linkOrCopy = linkOrCopy' (annexThin <$> Annex.getGitConfig)
linkOrCopy' :: Annex Bool -> Key -> FilePath -> FilePath -> Maybe FileMode -> Annex (Maybe LinkedOrCopied)
linkOrCopy' :: Annex Bool -> Key -> RawFilePath -> RawFilePath -> Maybe FileMode -> Annex (Maybe LinkedOrCopied)
linkOrCopy' canhardlink key src dest destmode = catchDefaultIO Nothing $
ifM canhardlink
( hardlink
@ -58,13 +59,15 @@ linkOrCopy' canhardlink key src dest destmode = catchDefaultIO Nothing $
s <- getstat
if linkCount s > 1
then copy s
else liftIO (createLink src dest >> preserveGitMode dest destmode >> return (Just Linked))
else liftIO (R.createLink src dest >> preserveGitMode dest' destmode >> return (Just Linked))
`catchIO` const (copy s)
copy s = ifM (checkedCopyFile' key src dest destmode s)
copy s = ifM (checkedCopyFile' key src' dest' destmode s)
( return (Just Copied)
, return Nothing
)
getstat = liftIO $ getFileStatus src
getstat = liftIO $ R.getFileStatus src
src' = fromRawFilePath src
dest' = fromRawFilePath dest
{- Checks disk space before copying. -}
checkedCopyFile :: Key -> FilePath -> FilePath -> Maybe FileMode -> Annex Bool

View file

@ -42,7 +42,7 @@ populatePointerFile restage k obj f = go =<< liftIO (isPointerFile f)
liftIO $ removeWhenExistsWith R.removeLink f
(ic, populated) <- replaceWorkTreeFile f' $ \tmp -> do
let tmp' = toRawFilePath tmp
ok <- linkOrCopy k (fromRawFilePath obj) tmp destmode >>= \case
ok <- linkOrCopy k obj tmp' destmode >>= \case
Just _ -> thawContent tmp >> return True
Nothing -> liftIO (writePointerFile tmp' k destmode) >> return False
ic <- withTSDelta (liftIO . genInodeCache tmp')
@ -61,7 +61,7 @@ depopulatePointerFile key file = do
let file' = fromRawFilePath file
st <- liftIO $ catchMaybeIO $ getFileStatus file'
let mode = fmap fileMode st
secureErase file'
secureErase file
liftIO $ removeWhenExistsWith R.removeLink file
ic <- replaceWorkTreeFile file' $ \tmp -> do
liftIO $ writePointerFile (toRawFilePath tmp) key mode

View file

@ -187,7 +187,7 @@ restagePointerFile (Restage True) f orig = withTSDelta $ \tsd ->
-- update-index is documented as picky about "./file" and it
-- fails on "../../repo/path/file" when cwd is not in the repo
-- being acted on. Avoid these problems with an absolute path.
absf <- liftIO $ absPath $ fromRawFilePath f
absf <- liftIO $ absPath f
Annex.Queue.addInternalAction runner [(absf, isunmodified tsd)]
where
isunmodified tsd = genInodeCache f tsd >>= return . \case

View file

@ -232,10 +232,10 @@ gitAnnexLinkCanonical file key r config = gitAnnexLink file key r' config'
}
{- File used to lock a key's content. -}
gitAnnexContentLock :: Key -> Git.Repo -> GitConfig -> IO FilePath
gitAnnexContentLock :: Key -> Git.Repo -> GitConfig -> IO RawFilePath
gitAnnexContentLock key r config = do
loc <- gitAnnexLocation key r config
return $ fromRawFilePath loc ++ ".lck"
return $ loc <> ".lck"
{- File that maps from a key to the file(s) in the git repository.
- Used in direct mode. -}
@ -296,9 +296,8 @@ gitAnnexTmpWatcherDir r = fromRawFilePath $
P.addTrailingPathSeparator $ gitAnnexDir r P.</> "watchtmp"
{- The temp file to use for a given key's content. -}
gitAnnexTmpObjectLocation :: Key -> Git.Repo -> FilePath
gitAnnexTmpObjectLocation key r = fromRawFilePath $
gitAnnexTmpObjectDir' r P.</> keyFile key
gitAnnexTmpObjectLocation :: Key -> Git.Repo -> RawFilePath
gitAnnexTmpObjectLocation key r = gitAnnexTmpObjectDir' r P.</> keyFile key
{- Given a temp file such as gitAnnexTmpObjectLocation, makes a name for a
- subdirectory in the same location, that can be used as a work area
@ -307,37 +306,36 @@ gitAnnexTmpObjectLocation key r = fromRawFilePath $
- There are ordering requirements for creating these directories;
- use Annex.Content.withTmpWorkDir to set them up.
-}
gitAnnexTmpWorkDir :: FilePath -> FilePath
gitAnnexTmpWorkDir :: RawFilePath -> RawFilePath
gitAnnexTmpWorkDir p =
let (dir, f) = splitFileName p
let (dir, f) = P.splitFileName p
-- Using a prefix avoids name conflict with any other keys.
in dir </> "work." ++ f
in dir P.</> "work." <> f
{- .git/annex/bad/ is used for bad files found during fsck -}
gitAnnexBadDir :: Git.Repo -> FilePath
gitAnnexBadDir r = fromRawFilePath $
P.addTrailingPathSeparator $ gitAnnexDir r P.</> "bad"
gitAnnexBadDir :: Git.Repo -> RawFilePath
gitAnnexBadDir r = P.addTrailingPathSeparator $ gitAnnexDir r P.</> "bad"
{- The bad file to use for a given key. -}
gitAnnexBadLocation :: Key -> Git.Repo -> FilePath
gitAnnexBadLocation key r = gitAnnexBadDir r </> fromRawFilePath (keyFile key)
gitAnnexBadLocation :: Key -> Git.Repo -> RawFilePath
gitAnnexBadLocation key r = gitAnnexBadDir r P.</> keyFile key
{- .git/annex/foounused is used to number possibly unused keys -}
gitAnnexUnusedLog :: RawFilePath -> Git.Repo -> RawFilePath
gitAnnexUnusedLog prefix r = gitAnnexDir r P.</> (prefix <> "unused")
{- .git/annex/keysdb/ contains a database of information about keys. -}
gitAnnexKeysDb :: Git.Repo -> FilePath
gitAnnexKeysDb r = fromRawFilePath $ gitAnnexDir r P.</> "keysdb"
gitAnnexKeysDb :: Git.Repo -> RawFilePath
gitAnnexKeysDb r = gitAnnexDir r P.</> "keysdb"
{- Lock file for the keys database. -}
gitAnnexKeysDbLock :: Git.Repo -> FilePath
gitAnnexKeysDbLock r = gitAnnexKeysDb r ++ ".lck"
gitAnnexKeysDbLock :: Git.Repo -> RawFilePath
gitAnnexKeysDbLock r = gitAnnexKeysDb r <> ".lck"
{- Contains the stat of the last index file that was
- reconciled with the keys database. -}
gitAnnexKeysDbIndexCache :: Git.Repo -> FilePath
gitAnnexKeysDbIndexCache r = gitAnnexKeysDb r ++ ".cache"
gitAnnexKeysDbIndexCache :: Git.Repo -> RawFilePath
gitAnnexKeysDbIndexCache r = gitAnnexKeysDb r <> ".cache"
{- .git/annex/fsck/uuid/ is used to store information about incremental
- fscks. -}
@ -383,43 +381,42 @@ gitAnnexMoveLock r = fromRawFilePath $ gitAnnexDir r P.</> "move.lck"
{- .git/annex/export/ is used to store information about
- exports to special remotes. -}
gitAnnexExportDir :: Git.Repo -> FilePath
gitAnnexExportDir r = fromRawFilePath $ gitAnnexDir r P.</> "export"
gitAnnexExportDir :: Git.Repo -> RawFilePath
gitAnnexExportDir r = gitAnnexDir r P.</> "export"
{- Directory containing database used to record export info. -}
gitAnnexExportDbDir :: UUID -> Git.Repo -> FilePath
gitAnnexExportDbDir u r = gitAnnexExportDir r </> fromUUID u </> "exportdb"
gitAnnexExportDbDir :: UUID -> Git.Repo -> RawFilePath
gitAnnexExportDbDir u r = gitAnnexExportDir r P.</> fromUUID u P.</> "exportdb"
{- Lock file for export state for a special remote. -}
gitAnnexExportLock :: UUID -> Git.Repo -> FilePath
gitAnnexExportLock u r = gitAnnexExportDbDir u r ++ ".lck"
gitAnnexExportLock :: UUID -> Git.Repo -> RawFilePath
gitAnnexExportLock u r = gitAnnexExportDbDir u r <> ".lck"
{- Lock file for updating the export state for a special remote. -}
gitAnnexExportUpdateLock :: UUID -> Git.Repo -> FilePath
gitAnnexExportUpdateLock u r = gitAnnexExportDbDir u r ++ ".upl"
gitAnnexExportUpdateLock :: UUID -> Git.Repo -> RawFilePath
gitAnnexExportUpdateLock u r = gitAnnexExportDbDir u r <> ".upl"
{- Log file used to keep track of files that were in the tree exported to a
- remote, but were excluded by its preferred content settings. -}
gitAnnexExportExcludeLog :: UUID -> Git.Repo -> FilePath
gitAnnexExportExcludeLog u r = fromRawFilePath $
gitAnnexDir r P.</> "export.ex" P.</> fromUUID u
gitAnnexExportExcludeLog :: UUID -> Git.Repo -> RawFilePath
gitAnnexExportExcludeLog u r = gitAnnexDir r P.</> "export.ex" P.</> fromUUID u
{- Directory containing database used to record remote content ids.
-
- (This used to be "cid", but a problem with the database caused it to
- need to be rebuilt with a new name.)
-}
gitAnnexContentIdentifierDbDir :: Git.Repo -> FilePath
gitAnnexContentIdentifierDbDir r = fromRawFilePath $ gitAnnexDir r P.</> "cidsdb"
gitAnnexContentIdentifierDbDir :: Git.Repo -> RawFilePath
gitAnnexContentIdentifierDbDir r = gitAnnexDir r P.</> "cidsdb"
{- Lock file for writing to the content id database. -}
gitAnnexContentIdentifierLock :: Git.Repo -> FilePath
gitAnnexContentIdentifierLock r = gitAnnexContentIdentifierDbDir r ++ ".lck"
gitAnnexContentIdentifierLock :: Git.Repo -> RawFilePath
gitAnnexContentIdentifierLock r = gitAnnexContentIdentifierDbDir r <> ".lck"
{- .git/annex/schedulestate is used to store information about when
- scheduled jobs were last run. -}
gitAnnexScheduleState :: Git.Repo -> FilePath
gitAnnexScheduleState r = fromRawFilePath $ gitAnnexDir r P.</> "schedulestate"
gitAnnexScheduleState :: Git.Repo -> RawFilePath
gitAnnexScheduleState r = gitAnnexDir r P.</> "schedulestate"
{- .git/annex/creds/ is used to store credentials to access some special
- remotes. -}
@ -484,8 +481,8 @@ gitAnnexIndex r = fromRawFilePath $ gitAnnexDir r P.</> "index"
-
- The .lck in the name is a historical accident; this is not used as a
- lock. -}
gitAnnexIndexStatus :: Git.Repo -> FilePath
gitAnnexIndexStatus r = fromRawFilePath $ gitAnnexDir r P.</> "index.lck"
gitAnnexIndexStatus :: Git.Repo -> RawFilePath
gitAnnexIndexStatus r = gitAnnexDir r P.</> "index.lck"
{- The index file used to generate a filtered branch view._-}
gitAnnexViewIndex :: Git.Repo -> FilePath
@ -496,12 +493,12 @@ gitAnnexViewLog :: Git.Repo -> RawFilePath
gitAnnexViewLog r = gitAnnexDir r P.</> "viewlog"
{- List of refs that have already been merged into the git-annex branch. -}
gitAnnexMergedRefs :: Git.Repo -> FilePath
gitAnnexMergedRefs r = fromRawFilePath $ gitAnnexDir r P.</> "mergedrefs"
gitAnnexMergedRefs :: Git.Repo -> RawFilePath
gitAnnexMergedRefs r = gitAnnexDir r P.</> "mergedrefs"
{- List of refs that should not be merged into the git-annex branch. -}
gitAnnexIgnoredRefs :: Git.Repo -> FilePath
gitAnnexIgnoredRefs r = fromRawFilePath $ gitAnnexDir r P.</> "ignoredrefs"
gitAnnexIgnoredRefs :: Git.Repo -> RawFilePath
gitAnnexIgnoredRefs r = gitAnnexDir r P.</> "ignoredrefs"
{- Pid file for daemon mode. -}
gitAnnexPidFile :: Git.Repo -> RawFilePath

View file

@ -31,7 +31,7 @@ addCommand command params files = do
store =<< flushWhenFull =<<
(Git.Queue.addCommand command params files q =<< gitRepo)
addInternalAction :: Git.Queue.InternalActionRunner Annex -> [(FilePath, IO Bool)] -> Annex ()
addInternalAction :: Git.Queue.InternalActionRunner Annex -> [(RawFilePath, IO Bool)] -> Annex ()
addInternalAction runner files = do
q <- get
store =<< flushWhenFull =<<

View file

@ -35,9 +35,11 @@ import Annex.Concurrent.Utility
import Types.WorkerPool
import Annex.WorkerPool
import Backend (isCryptographicallySecure)
import qualified Utility.RawFilePath as R
import Control.Concurrent
import qualified Data.Map.Strict as M
import qualified System.FilePath.ByteString as P
import Data.Ord
upload :: Observable v => UUID -> Key -> AssociatedFile -> RetryDecider -> (MeterUpdate -> Annex v) -> NotifyWitness -> Annex v
@ -96,11 +98,11 @@ runTransfer' ignorelock t afile retrydecider transferaction = enteringStage Tran
else recordFailedTransfer t info
return v
where
prep :: FilePath -> Annex () -> FileMode -> Annex (Maybe LockHandle, Bool)
prep :: RawFilePath -> Annex () -> FileMode -> Annex (Maybe LockHandle, Bool)
#ifndef mingw32_HOST_OS
prep tfile createtfile mode = catchPermissionDenied (const prepfailed) $ do
let lck = transferLockFile tfile
createAnnexDirectory $ takeDirectory lck
createAnnexDirectory $ P.takeDirectory lck
tryLockExclusive (Just mode) lck >>= \case
Nothing -> return (Nothing, True)
Just lockhandle -> ifM (checkSaneLock lck lockhandle)
@ -114,7 +116,7 @@ runTransfer' ignorelock t afile retrydecider transferaction = enteringStage Tran
#else
prep tfile createtfile _mode = catchPermissionDenied (const prepfailed) $ do
let lck = transferLockFile tfile
createAnnexDirectory $ takeDirectory lck
createAnnexDirectory $ P.takeDirectory lck
catchMaybeIO (liftIO $ lockExclusive lck) >>= \case
Nothing -> return (Nothing, False)
Just Nothing -> return (Nothing, True)
@ -127,9 +129,9 @@ runTransfer' ignorelock t afile retrydecider transferaction = enteringStage Tran
cleanup _ Nothing = noop
cleanup tfile (Just lockhandle) = do
let lck = transferLockFile tfile
void $ tryIO $ removeFile tfile
void $ tryIO $ R.removeLink tfile
#ifndef mingw32_HOST_OS
void $ tryIO $ removeFile lck
void $ tryIO $ R.removeLink lck
dropLock lockhandle
#else
{- Windows cannot delete the lockfile until the lock
@ -138,7 +140,7 @@ runTransfer' ignorelock t afile retrydecider transferaction = enteringStage Tran
- so ignore failure to remove.
-}
dropLock lockhandle
void $ tryIO $ removeFile lck
void $ tryIO $ R.removeLink lck
#endif
retry numretries oldinfo metervar run =
@ -164,7 +166,7 @@ runTransfer' ignorelock t afile retrydecider transferaction = enteringStage Tran
liftIO $ readMVar metervar
| otherwise = do
f <- fromRepo $ gitAnnexTmpObjectLocation (transferKey t)
liftIO $ catchDefaultIO 0 $ getFileSize f
liftIO $ catchDefaultIO 0 $ getFileSize (fromRawFilePath f)
{- Avoid download and upload of keys with insecure content when
- annex.securehashesonly is configured.

View file

@ -88,7 +88,7 @@ unknownBackendVarietyMessage v =
{- Looks up the backend that should be used for a file.
- That can be configured on a per-file basis in the gitattributes file,
- or forced with --backend. -}
chooseBackend :: FilePath -> Annex (Maybe Backend)
chooseBackend :: RawFilePath -> Annex (Maybe Backend)
chooseBackend f = Annex.getState Annex.forcebackend >>= go
where
go Nothing = maybeLookupBackendVariety . parseKeyVariety . encodeBS

View file

@ -74,7 +74,7 @@ AnnexBranch
-}
openDb :: Annex ContentIdentifierHandle
openDb = do
dbdir <- fromRepo gitAnnexContentIdentifierDbDir
dbdir <- fromRawFilePath <$> fromRepo gitAnnexContentIdentifierDbDir
let db = dbdir </> "db"
unlessM (liftIO $ doesFileExist db) $ do
initDb db $ void $

View file

@ -96,7 +96,7 @@ ExportTreeCurrent
-}
openDb :: UUID -> Annex ExportHandle
openDb u = do
dbdir <- fromRepo (gitAnnexExportDbDir u)
dbdir <- fromRawFilePath <$> fromRepo (gitAnnexExportDbDir u)
let db = dbdir </> "db"
unlessM (liftIO $ doesFileExist db) $ do
initDb db $ void $

View file

@ -114,7 +114,7 @@ openDb _ st@(DbOpen _) = return st
openDb False DbUnavailable = return DbUnavailable
openDb createdb _ = catchPermissionDenied permerr $ withExclusiveLock gitAnnexKeysDbLock $ do
dbdir <- fromRepo gitAnnexKeysDb
let db = dbdir </> "db"
let db = fromRawFilePath dbdir </> "db"
dbexists <- liftIO $ doesFileExist db
case (dbexists, createdb) of
(True, _) -> open db
@ -214,7 +214,7 @@ isInodeKnown i s = or <$> runReaderIO ((:[]) <$$> SQL.isInodeKnown i s)
reconcileStaged :: H.DbQueue -> Annex ()
reconcileStaged qh = do
gitindex <- inRepo currentIndexFile
indexcache <- fromRepo gitAnnexKeysDbIndexCache
indexcache <- fromRawFilePath <$> fromRepo gitAnnexKeysDbIndexCache
withTSDelta (liftIO . genInodeCache (toRawFilePath gitindex)) >>= \case
Just cur ->
liftIO (maybe Nothing readInodeCache <$> catchMaybeIO (readFile indexcache)) >>= \case

View file

@ -45,11 +45,11 @@ data Action m
- to as the queue grows. -}
| InternalAction
{ getRunner :: InternalActionRunner m
, getInternalFiles :: [(FilePath, IO Bool)]
, getInternalFiles :: [(RawFilePath, IO Bool)]
}
{- The String must be unique for each internal action. -}
data InternalActionRunner m = InternalActionRunner String (Repo -> [(FilePath, IO Bool)] -> m ())
data InternalActionRunner m = InternalActionRunner String (Repo -> [(RawFilePath, IO Bool)] -> m ())
instance Eq (InternalActionRunner m) where
InternalActionRunner s1 _ == InternalActionRunner s2 _ = s1 == s2
@ -108,7 +108,7 @@ addCommand subcommand params files q repo =
different _ = True
{- Adds an internal action to the queue. -}
addInternalAction :: MonadIO m => InternalActionRunner m -> [(FilePath, IO Bool)] -> Queue m -> Repo -> m (Queue m)
addInternalAction :: MonadIO m => InternalActionRunner m -> [(RawFilePath, IO Bool)] -> Queue m -> Repo -> m (Queue m)
addInternalAction runner files q repo =
updateQueue action different (length files) q repo
where

View file

@ -31,6 +31,7 @@ import Git.FilePath
import Git.Sha
import qualified Git.DiffTreeItem as Diff
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
import Control.Monad.IO.Class
@ -135,7 +136,7 @@ indexPath :: TopFilePath -> InternalGitPath
indexPath = toInternalGitPath . getTopFilePath
{- Refreshes the index, by checking file stat information. -}
refreshIndex :: Repo -> ((FilePath -> IO ()) -> IO ()) -> IO Bool
refreshIndex :: Repo -> ((RawFilePath -> IO ()) -> IO ()) -> IO Bool
refreshIndex repo feeder = withCreateProcess p go
where
params =
@ -150,9 +151,8 @@ refreshIndex repo feeder = withCreateProcess p go
{ std_in = CreatePipe }
go (Just h) _ _ pid = do
feeder $ \f -> do
hPutStr h f
hPutStr h "\0"
feeder $ \f ->
S.hPut h (S.snoc f 0)
hFlush h
hClose h
checkSuccessProcess pid

View file

@ -180,7 +180,8 @@ logExportExcluded u a = do
getExportExcluded :: UUID -> Annex [Git.Tree.TreeItem]
getExportExcluded u = do
logf <- fromRepo $ gitAnnexExportExcludeLog u
liftIO $ catchDefaultIO [] $ parser <$> L.readFile logf
liftIO $ catchDefaultIO [] $ parser
<$> L.readFile (fromRawFilePath logf)
where
parser = map Git.Tree.lsTreeItemToTreeItem
. rights

View file

@ -63,7 +63,7 @@ scheduleChange u a = scheduleSet u . S.toList . a =<< scheduleGet u
getLastRunTimes :: Annex (M.Map ScheduledActivity LocalTime)
getLastRunTimes = do
f <- fromRepo gitAnnexScheduleState
f <- fromRawFilePath <$> fromRepo gitAnnexScheduleState
liftIO $ fromMaybe M.empty
<$> catchDefaultIO Nothing (readish <$> readFile f)

View file

@ -5,6 +5,7 @@
- Licensed under the GNU AGPL version 3 or higher.
-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
module Remote.BitTorrent (remote) where
@ -29,8 +30,10 @@ import Annex.UUID
import qualified Annex.Url as Url
import Remote.Helper.ExportImport
import Annex.SpecialRemote.Config
import qualified Utility.RawFilePath as R
import Network.URI
import qualified System.FilePath.ByteString as P
#ifdef WITH_TORRENTPARSER
import Data.Torrent
@ -167,7 +170,7 @@ torrentUrlKey :: URLString -> Annex Key
torrentUrlKey u = return $ fromUrl (fst $ torrentUrlNum u) Nothing
{- Temporary filename to use to store the torrent file. -}
tmpTorrentFile :: URLString -> Annex FilePath
tmpTorrentFile :: URLString -> Annex RawFilePath
tmpTorrentFile u = fromRepo . gitAnnexTmpObjectLocation =<< torrentUrlKey u
{- A cleanup action is registered to delete the torrent file
@ -179,34 +182,37 @@ tmpTorrentFile u = fromRepo . gitAnnexTmpObjectLocation =<< torrentUrlKey u
-}
registerTorrentCleanup :: URLString -> Annex ()
registerTorrentCleanup u = Annex.addCleanup (TorrentCleanup u) $
liftIO . removeWhenExistsWith removeLink =<< tmpTorrentFile u
liftIO . removeWhenExistsWith R.removeLink =<< tmpTorrentFile u
{- Downloads the torrent file. (Not its contents.) -}
downloadTorrentFile :: URLString -> Annex Bool
downloadTorrentFile u = do
torrent <- tmpTorrentFile u
ifM (liftIO $ doesFileExist torrent)
ifM (liftIO $ doesFileExist (fromRawFilePath torrent))
( return True
, do
showAction "downloading torrent file"
createAnnexDirectory (parentDir torrent)
if isTorrentMagnetUrl u
then withOtherTmp $ \othertmp -> do
kf <- fromRawFilePath . keyFile <$> torrentUrlKey u
let metadir = othertmp </> "torrentmeta" </> kf
kf <- keyFile <$> torrentUrlKey u
let metadir = othertmp P.</> "torrentmeta" P.</> kf
createAnnexDirectory metadir
showOutput
ok <- downloadMagnetLink u metadir torrent
liftIO $ removeDirectoryRecursive metadir
ok <- downloadMagnetLink u
(fromRawFilePath metadir)
(fromRawFilePath torrent)
liftIO $ removeDirectoryRecursive
(fromRawFilePath metadir)
return ok
else withOtherTmp $ \othertmp -> do
withTmpFileIn othertmp "torrent" $ \f h -> do
withTmpFileIn (fromRawFilePath othertmp) "torrent" $ \f h -> do
liftIO $ hClose h
resetAnnexFilePerm f
ok <- Url.withUrlOptions $
Url.download nullMeterUpdate u f
when ok $
liftIO $ renameFile f torrent
liftIO $ renameFile f (fromRawFilePath torrent)
return ok
)
@ -237,14 +243,15 @@ downloadTorrentContent :: Key -> URLString -> FilePath -> Int -> MeterUpdate ->
downloadTorrentContent k u dest filenum p = do
torrent <- tmpTorrentFile u
withOtherTmp $ \othertmp -> do
kf <- fromRawFilePath . keyFile <$> torrentUrlKey u
let downloaddir = othertmp </> "torrent" </> kf
kf <- keyFile <$> torrentUrlKey u
let downloaddir = othertmp P.</> "torrent" P.</> kf
createAnnexDirectory downloaddir
f <- wantedfile torrent
let dlf = fromRawFilePath downloaddir </> f
showOutput
ifM (download torrent downloaddir <&&> liftIO (doesFileExist (downloaddir </> f)))
ifM (download torrent downloaddir <&&> liftIO (doesFileExist dlf))
( do
liftIO $ renameFile (downloaddir </> f) dest
liftIO $ renameFile dlf dest
-- The downloaddir is not removed here,
-- so if aria downloaded parts of other
-- files, and this is called again, it will
@ -258,9 +265,9 @@ downloadTorrentContent k u dest filenum p = do
where
download torrent tmpdir = ariaProgress (fromKey keySize k) p
[ Param $ "--select-file=" ++ show filenum
, File torrent
, File (fromRawFilePath torrent)
, Param "-d"
, File tmpdir
, File (fromRawFilePath tmpdir)
, Param "--seed-time=0"
, Param "--summary-interval=0"
, Param "--file-allocation=none"
@ -347,11 +354,11 @@ btshowmetainfo torrent field =
{- Examines the torrent file and gets the list of files in it,
- and their sizes.
-}
torrentFileSizes :: FilePath -> IO [(FilePath, Integer)]
torrentFileSizes :: RawFilePath -> IO [(FilePath, Integer)]
torrentFileSizes torrent = do
#ifdef WITH_TORRENTPARSER
let mkfile = joinPath . map (scrub . decodeBL)
b <- B.readFile torrent
b <- B.readFile (fromRawFilePath torrent)
return $ case readTorrent b of
Left e -> giveup $ "failed to parse torrent: " ++ e
Right t -> case tInfo t of

View file

@ -93,8 +93,9 @@ mkRetrievalVerifiableKeysSecure gc
fileStorer :: (Key -> FilePath -> MeterUpdate -> Annex ()) -> Storer
fileStorer a k (FileContent f) m = a k f m
fileStorer a k (ByteContent b) m = withTmp k $ \f -> do
liftIO $ L.writeFile f b
a k f m
let f' = fromRawFilePath f
liftIO $ L.writeFile f' b
a k f' m
-- A Storer that expects to be provided with a L.ByteString of
-- the content to store.
@ -106,8 +107,8 @@ byteStorer a k c m = withBytes c $ \b -> a k b m
fileRetriever :: (FilePath -> Key -> MeterUpdate -> Annex ()) -> Retriever
fileRetriever a k m callback = do
f <- prepTmp k
a f k m
pruneTmpWorkDirBefore f (callback . FileContent)
a (fromRawFilePath f) k m
pruneTmpWorkDirBefore f (callback . FileContent . fromRawFilePath)
-- A Retriever that generates a lazy ByteString containing the Key's
-- content, and passes it to a callback action which will fully consume it

View file

@ -18,7 +18,8 @@ upgrade = do
-- do the reorganisation of the key files
olddir <- fromRawFilePath <$> fromRepo gitAnnexDir
keys <- getKeysPresent0 olddir
forM_ keys $ \k -> moveAnnex k $ olddir </> keyFile0 k
forM_ keys $ \k ->
moveAnnex k $ toRawFilePath $ olddir </> keyFile0 k
-- update the symlinks to the key files
-- No longer needed here; V1.upgrade does the same thing

View file

@ -75,10 +75,10 @@ moveContent = do
where
move f = do
let k = fileKey1 (takeFileName f)
let d = parentDir f
let d = fromRawFilePath $ parentDir $ toRawFilePath f
liftIO $ allowWrite d
liftIO $ allowWrite f
_ <- moveAnnex k f
_ <- moveAnnex k (toRawFilePath f)
liftIO $ removeDirectory d
updateSymlinks :: Annex ()
@ -94,7 +94,8 @@ updateSymlinks = do
case r of
Nothing -> noop
Just (k, _) -> do
link <- calcRepo $ gitAnnexLink f k
link <- fromRawFilePath
<$> calcRepo (gitAnnexLink (toRawFilePath f) k)
liftIO $ removeFile f
liftIO $ createSymbolicLink link f
Annex.Queue.addCommand "add" [Param "--"] [f]
@ -113,10 +114,10 @@ moveLocationLogs = do
, return []
)
move (l, k) = do
dest <- fromRepo $ logFile2 k
dest <- fromRepo (logFile2 k)
dir <- fromRepo Upgrade.V2.gitStateDir
let f = dir </> l
createWorkTreeDirectory (parentDir dest)
createWorkTreeDirectory (parentDir (toRawFilePath dest))
-- could just git mv, but this way deals with
-- log files that are not checked into git,
-- as well as merging with already upgraded

View file

@ -132,7 +132,7 @@ attrLines =
gitAttributesUnWrite :: Git.Repo -> IO ()
gitAttributesUnWrite repo = do
let attributes = Git.attributes repo
let attributes = fromRawFilePath (Git.attributes repo)
whenM (doesFileExist attributes) $ do
c <- readFileStrict attributes
liftIO $ viaTmp writeFile attributes $ unlines $

View file

@ -5,6 +5,7 @@
- Licensed under the GNU AGPL version 3 or higher.
-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
module Upgrade.V7 where
@ -18,6 +19,9 @@ import qualified Git.LsFiles as LsFiles
import qualified Git
import Git.FilePath
import Config
import qualified Utility.RawFilePath as R
import qualified System.FilePath.ByteString as P
upgrade :: Bool -> Annex Bool
upgrade automatic = do
@ -33,7 +37,7 @@ upgrade automatic = do
-- new database is not populated. It will be automatically
-- populated from the git-annex branch the next time it is used.
removeOldDb gitAnnexContentIdentifierDbDirOld
liftIO . removeWhenExistsWith removeLink
liftIO . removeWhenExistsWith R.removeLink
=<< fromRepo gitAnnexContentIdentifierLockOld
-- The export databases are deleted here. The new databases
@ -43,33 +47,33 @@ upgrade automatic = do
populateKeysDb
removeOldDb gitAnnexKeysDbOld
liftIO . removeWhenExistsWith removeLink
liftIO . removeWhenExistsWith R.removeLink
=<< fromRepo gitAnnexKeysDbIndexCacheOld
liftIO . removeWhenExistsWith removeLink
liftIO . removeWhenExistsWith R.removeLink
=<< fromRepo gitAnnexKeysDbLockOld
updateSmudgeFilter
return True
gitAnnexKeysDbOld :: Git.Repo -> FilePath
gitAnnexKeysDbOld r = fromRawFilePath (gitAnnexDir r) </> "keys"
gitAnnexKeysDbOld :: Git.Repo -> RawFilePath
gitAnnexKeysDbOld r = gitAnnexDir r P.</> "keys"
gitAnnexKeysDbLockOld :: Git.Repo -> FilePath
gitAnnexKeysDbLockOld r = gitAnnexKeysDbOld r ++ ".lck"
gitAnnexKeysDbLockOld :: Git.Repo -> RawFilePath
gitAnnexKeysDbLockOld r = gitAnnexKeysDbOld r <> ".lck"
gitAnnexKeysDbIndexCacheOld :: Git.Repo -> FilePath
gitAnnexKeysDbIndexCacheOld r = gitAnnexKeysDbOld r ++ ".cache"
gitAnnexKeysDbIndexCacheOld :: Git.Repo -> RawFilePath
gitAnnexKeysDbIndexCacheOld r = gitAnnexKeysDbOld r <> ".cache"
gitAnnexContentIdentifierDbDirOld :: Git.Repo -> FilePath
gitAnnexContentIdentifierDbDirOld r = fromRawFilePath (gitAnnexDir r) </> "cids"
gitAnnexContentIdentifierDbDirOld :: Git.Repo -> RawFilePath
gitAnnexContentIdentifierDbDirOld r = gitAnnexDir r P.</> "cids"
gitAnnexContentIdentifierLockOld :: Git.Repo -> FilePath
gitAnnexContentIdentifierLockOld r = gitAnnexContentIdentifierDbDirOld r ++ ".lck"
gitAnnexContentIdentifierLockOld :: Git.Repo -> RawFilePath
gitAnnexContentIdentifierLockOld r = gitAnnexContentIdentifierDbDirOld r <> ".lck"
removeOldDb :: (Git.Repo -> FilePath) -> Annex ()
removeOldDb :: (Git.Repo -> RawFilePath) -> Annex ()
removeOldDb getdb = do
db <- fromRepo getdb
db <- fromRawFilePath <$> fromRepo getdb
whenM (liftIO $ doesDirectoryExist db) $ do
v <- liftIO $ tryNonAsync $
#if MIN_VERSION_directory(1,2,7)
@ -124,7 +128,7 @@ populateKeysDb = unlessM isBareRepo $ do
-- checked into the repository.
updateSmudgeFilter :: Annex ()
updateSmudgeFilter = do
lf <- Annex.fromRepo Git.attributesLocal
lf <- fromRawFilePath <$> Annex.fromRepo Git.attributesLocal
ls <- liftIO $ lines <$> catchDefaultIO "" (readFileStrict lf)
let ls' = removedotfilter ls
when (ls /= ls') $