convert KeySource to RawFilePath

This commit is contained in:
Joey Hess 2020-02-21 09:34:59 -04:00
parent e468fbc518
commit c31e1be781
No known key found for this signature in database
GPG key ID: DB12DB0FF05F8F38
14 changed files with 74 additions and 55 deletions

View file

@ -369,8 +369,8 @@ downloadImport remote importtreeconfig importablecontents = do
f <- fromRepo $ fromTopFilePath $ locworktreefilename loc f <- fromRepo $ fromTopFilePath $ locworktreefilename loc
backend <- chooseBackend (fromRawFilePath f) backend <- chooseBackend (fromRawFilePath f)
let ks = KeySource let ks = KeySource
{ keyFilename = (fromRawFilePath f) { keyFilename = f
, contentLocation = tmpfile , contentLocation = toRawFilePath tmpfile
, inodeCache = Nothing , inodeCache = Nothing
} }
fmap fst <$> genKey ks nullMeterUpdate backend fmap fst <$> genKey ks nullMeterUpdate backend

View file

@ -49,6 +49,7 @@ import Annex.AdjustedBranch
import Annex.FileMatcher import Annex.FileMatcher
import Control.Exception (IOException) import Control.Exception (IOException)
import qualified Utility.RawFilePath as R
data LockedDown = LockedDown data LockedDown = LockedDown
{ lockDownConfig :: LockDownConfig { lockDownConfig :: LockDownConfig
@ -91,13 +92,15 @@ lockDown' cfg file = tryIO $ ifM crippledFileSystem
Just tmpdir -> withhardlink tmpdir Just tmpdir -> withhardlink tmpdir
) )
where where
file' = toRawFilePath file
nohardlink = withTSDelta $ liftIO . nohardlink' nohardlink = withTSDelta $ liftIO . nohardlink'
nohardlink' delta = do nohardlink' delta = do
cache <- genInodeCache (toRawFilePath file) delta cache <- genInodeCache (toRawFilePath file) delta
return $ LockedDown cfg $ KeySource return $ LockedDown cfg $ KeySource
{ keyFilename = file { keyFilename = file'
, contentLocation = file , contentLocation = file'
, inodeCache = cache , inodeCache = cache
} }
@ -116,8 +119,8 @@ lockDown' cfg file = tryIO $ ifM crippledFileSystem
createLink file tmpfile createLink file tmpfile
cache <- genInodeCache (toRawFilePath tmpfile) delta cache <- genInodeCache (toRawFilePath tmpfile) delta
return $ LockedDown cfg $ KeySource return $ LockedDown cfg $ KeySource
{ keyFilename = file { keyFilename = file'
, contentLocation = tmpfile , contentLocation = toRawFilePath tmpfile
, inodeCache = cache , inodeCache = cache
} }
@ -135,10 +138,11 @@ ingestAdd' meterupdate ld@(Just (LockedDown cfg source)) mk = do
Just k -> do Just k -> do
let f = keyFilename source let f = keyFilename source
if lockingFile cfg if lockingFile cfg
then addLink f k mic then addLink (fromRawFilePath f) k mic
else do else do
mode <- liftIO $ catchMaybeIO $ fileMode <$> getFileStatus (contentLocation source) mode <- liftIO $ catchMaybeIO $
stagePointerFile (toRawFilePath f) mode =<< hashPointerFile k fileMode <$> R.getFileStatus (contentLocation source)
stagePointerFile f mode =<< hashPointerFile k
return (Just k) return (Just k)
{- Ingests a locked down file into the annex. Does not update the working {- Ingests a locked down file into the annex. Does not update the working
@ -151,12 +155,15 @@ ingest' _ _ Nothing _ _ = return (Nothing, Nothing)
ingest' preferredbackend meterupdate (Just (LockedDown cfg source)) mk restage = withTSDelta $ \delta -> do ingest' preferredbackend meterupdate (Just (LockedDown cfg source)) mk restage = withTSDelta $ \delta -> do
k <- case mk of k <- case mk of
Nothing -> do Nothing -> do
backend <- maybe (chooseBackend $ keyFilename source) (return . Just) preferredbackend backend <- maybe
(chooseBackend $ fromRawFilePath $ keyFilename source)
(return . Just)
preferredbackend
fmap fst <$> genKey source meterupdate backend fmap fst <$> genKey source meterupdate backend
Just k -> return (Just k) Just k -> return (Just k)
let src = contentLocation source let src = contentLocation source
ms <- liftIO $ catchMaybeIO $ getFileStatus src ms <- liftIO $ catchMaybeIO $ R.getFileStatus src
mcache <- maybe (pure Nothing) (liftIO . toInodeCache delta src) ms mcache <- maybe (pure Nothing) (liftIO . toInodeCache delta (fromRawFilePath src)) ms
case (mcache, inodeCache source) of case (mcache, inodeCache source) of
(_, Nothing) -> go k mcache ms (_, Nothing) -> go k mcache ms
(Just newc, Just c) | compareStrong c newc -> go k mcache ms (Just newc, Just c) | compareStrong c newc -> go k mcache ms
@ -168,12 +175,12 @@ ingest' preferredbackend meterupdate (Just (LockedDown cfg source)) mk restage =
go _ _ _ = failure "failed to generate a key" go _ _ _ = failure "failed to generate a key"
golocked key mcache s = golocked key mcache s =
tryNonAsync (moveAnnex key $ contentLocation source) >>= \case tryNonAsync (moveAnnex key $ fromRawFilePath $ contentLocation source) >>= \case
Right True -> do Right True -> do
populateAssociatedFiles key source restage populateAssociatedFiles key source restage
success key mcache s success key mcache s
Right False -> giveup "failed to add content to annex" Right False -> giveup "failed to add content to annex"
Left e -> restoreFile (keyFilename source) key e Left e -> restoreFile (fromRawFilePath $ keyFilename source) key e
gounlocked key (Just cache) s = do gounlocked key (Just cache) s = do
-- Remove temp directory hard link first because -- Remove temp directory hard link first because
@ -181,7 +188,7 @@ ingest' preferredbackend meterupdate (Just (LockedDown cfg source)) mk restage =
-- already has a hard link. -- already has a hard link.
cleanCruft source cleanCruft source
cleanOldKeys (keyFilename source) key cleanOldKeys (keyFilename source) key
linkToAnnex key (keyFilename source) (Just cache) >>= \case linkToAnnex key (fromRawFilePath $ keyFilename source) (Just cache) >>= \case
LinkAnnexFailed -> failure "failed to link to annex" LinkAnnexFailed -> failure "failed to link to annex"
_ -> do _ -> do
finishIngestUnlocked' key source restage finishIngestUnlocked' key source restage
@ -189,11 +196,11 @@ ingest' preferredbackend meterupdate (Just (LockedDown cfg source)) mk restage =
gounlocked _ _ _ = failure "failed statting file" gounlocked _ _ _ = failure "failed statting file"
success k mcache s = do success k mcache s = do
genMetaData k (toRawFilePath (keyFilename source)) s genMetaData k (keyFilename source) s
return (Just k, mcache) return (Just k, mcache)
failure msg = do failure msg = do
warning $ keyFilename source ++ " " ++ msg warning $ fromRawFilePath (keyFilename source) ++ " " ++ msg
cleanCruft source cleanCruft source
return (Nothing, Nothing) return (Nothing, Nothing)
@ -205,7 +212,7 @@ finishIngestUnlocked key source = do
finishIngestUnlocked' :: Key -> KeySource -> Restage -> Annex () finishIngestUnlocked' :: Key -> KeySource -> Restage -> Annex ()
finishIngestUnlocked' key source restage = do finishIngestUnlocked' key source restage = do
Database.Keys.addAssociatedFile key Database.Keys.addAssociatedFile key
=<< inRepo (toTopFilePath (toRawFilePath (keyFilename source))) =<< inRepo (toTopFilePath (keyFilename source))
populateAssociatedFiles key source restage populateAssociatedFiles key source restage
{- Copy to any other locations using the same key. -} {- Copy to any other locations using the same key. -}
@ -214,22 +221,22 @@ populateAssociatedFiles key source restage = do
obj <- calcRepo (gitAnnexLocation key) obj <- calcRepo (gitAnnexLocation key)
g <- Annex.gitRepo g <- Annex.gitRepo
ingestedf <- flip fromTopFilePath g ingestedf <- flip fromTopFilePath g
<$> inRepo (toTopFilePath (toRawFilePath (keyFilename source))) <$> inRepo (toTopFilePath (keyFilename source))
afs <- map (`fromTopFilePath` g) <$> Database.Keys.getAssociatedFiles key afs <- map (`fromTopFilePath` g) <$> Database.Keys.getAssociatedFiles key
forM_ (filter (/= ingestedf) afs) $ forM_ (filter (/= ingestedf) afs) $
populatePointerFile restage key obj populatePointerFile restage key obj
cleanCruft :: KeySource -> Annex () cleanCruft :: KeySource -> Annex ()
cleanCruft source = when (contentLocation source /= keyFilename source) $ cleanCruft source = when (contentLocation source /= keyFilename source) $
liftIO $ nukeFile $ contentLocation source liftIO $ nukeFile $ fromRawFilePath $ contentLocation source
-- If a worktree file was was hard linked to an annex object before, -- If a worktree file was was hard linked to an annex object before,
-- modifying the file would have caused the object to have the wrong -- modifying the file would have caused the object to have the wrong
-- content. Clean up from that. -- content. Clean up from that.
cleanOldKeys :: FilePath -> Key -> Annex () cleanOldKeys :: RawFilePath -> Key -> Annex ()
cleanOldKeys file newkey = do cleanOldKeys file newkey = do
g <- Annex.gitRepo g <- Annex.gitRepo
topf <- inRepo (toTopFilePath (toRawFilePath file)) topf <- inRepo (toTopFilePath file)
ingestedf <- fromRepo $ fromTopFilePath topf ingestedf <- fromRepo $ fromTopFilePath topf
oldkeys <- filter (/= newkey) oldkeys <- filter (/= newkey)
<$> Database.Keys.getAssociatedKey topf <$> Database.Keys.getAssociatedKey topf

View file

@ -286,9 +286,9 @@ handleAdds lockdowndir havelsof delayadd cs = returnWhen (null incomplete) $ do
ks = keySource ld ks = keySource ld
doadd = sanitycheck ks $ do doadd = sanitycheck ks $ do
(mkey, _mcache) <- liftAnnex $ do (mkey, _mcache) <- liftAnnex $ do
showStart "add" $ toRawFilePath $ keyFilename ks showStart "add" $ keyFilename ks
ingest nullMeterUpdate (Just $ LockedDown lockdownconfig ks) Nothing ingest nullMeterUpdate (Just $ LockedDown lockdownconfig ks) Nothing
maybe (failedingest change) (done change $ keyFilename ks) mkey maybe (failedingest change) (done change $ fromRawFilePath $ keyFilename ks) mkey
add _ _ = return Nothing add _ _ = return Nothing
{- Avoid overhead of re-injesting a renamed unlocked file, by {- Avoid overhead of re-injesting a renamed unlocked file, by
@ -320,7 +320,7 @@ handleAdds lockdowndir havelsof delayadd cs = returnWhen (null incomplete) $ do
fastadd change key = do fastadd change key = do
let source = keySource $ lockedDown change let source = keySource $ lockedDown change
liftAnnex $ finishIngestUnlocked key source liftAnnex $ finishIngestUnlocked key source
done change (keyFilename source) key done change (fromRawFilePath $ keyFilename source) key
removedKeysMap :: InodeComparisonType -> [Change] -> Annex (M.Map InodeCacheKey Key) removedKeysMap :: InodeComparisonType -> [Change] -> Annex (M.Map InodeCacheKey Key)
removedKeysMap ct l = do removedKeysMap ct l = do
@ -347,14 +347,14 @@ handleAdds lockdowndir havelsof delayadd cs = returnWhen (null incomplete) $ do
- and is still a hard link to its contentLocation, - and is still a hard link to its contentLocation,
- before ingesting it. -} - before ingesting it. -}
sanitycheck keysource a = do sanitycheck keysource a = do
fs <- liftIO $ getSymbolicLinkStatus $ keyFilename keysource fs <- liftIO $ getSymbolicLinkStatus $ fromRawFilePath $ keyFilename keysource
ks <- liftIO $ getSymbolicLinkStatus $ contentLocation keysource ks <- liftIO $ getSymbolicLinkStatus $ fromRawFilePath $ contentLocation keysource
if deviceID ks == deviceID fs && fileID ks == fileID fs if deviceID ks == deviceID fs && fileID ks == fileID fs
then a then a
else do else do
-- remove the hard link -- remove the hard link
when (contentLocation keysource /= keyFilename keysource) $ when (contentLocation keysource /= keyFilename keysource) $
void $ liftIO $ tryIO $ removeFile $ contentLocation keysource void $ liftIO $ tryIO $ removeFile $ fromRawFilePath $ contentLocation keysource
return Nothing return Nothing
{- Shown an alert while performing an action to add a file or {- Shown an alert while performing an action to add a file or
@ -400,7 +400,7 @@ safeToAdd lockdowndir lockdownconfig havelsof delayadd pending inprocess = do
else return checked else return checked
where where
check openfiles change@(InProcessAddChange { lockedDown = ld }) check openfiles change@(InProcessAddChange { lockedDown = ld })
| S.member (contentLocation (keySource ld)) openfiles = Left change | S.member (fromRawFilePath (contentLocation (keySource ld))) openfiles = Left change
check _ change = Right change check _ change = Right change
mkinprocess (c, Just ld) = Just InProcessAddChange mkinprocess (c, Just ld) = Just InProcessAddChange
@ -411,11 +411,11 @@ safeToAdd lockdowndir lockdownconfig havelsof delayadd pending inprocess = do
canceladd (InProcessAddChange { lockedDown = ld }) = do canceladd (InProcessAddChange { lockedDown = ld }) = do
let ks = keySource ld let ks = keySource ld
warning $ keyFilename ks warning $ fromRawFilePath (keyFilename ks)
++ " still has writers, not adding" ++ " still has writers, not adding"
-- remove the hard link -- remove the hard link
when (contentLocation ks /= keyFilename ks) $ when (contentLocation ks /= keyFilename ks) $
void $ liftIO $ tryIO $ removeFile $ contentLocation ks void $ liftIO $ tryIO $ removeFile $ fromRawFilePath $ contentLocation ks
canceladd _ = noop canceladd _ = noop
openwrite (_file, mode, _pid) openwrite (_file, mode, _pid)
@ -434,7 +434,8 @@ safeToAdd lockdowndir lockdownconfig havelsof delayadd pending inprocess = do
-} -}
findopenfiles keysources = ifM crippledFileSystem findopenfiles keysources = ifM crippledFileSystem
( liftIO $ do ( liftIO $ do
let segments = segmentXargsUnordered $ map keyFilename keysources let segments = segmentXargsUnordered $
map (fromRawFilePath . keyFilename) keysources
concat <$> forM segments (\fs -> Lsof.query $ "--" : fs) concat <$> forM segments (\fs -> Lsof.query $ "--" : fs)
, liftIO $ Lsof.queryDir lockdowndir , liftIO $ Lsof.queryDir lockdowndir
) )

View file

@ -12,6 +12,7 @@ module Assistant.Types.Changes where
import Types.KeySource import Types.KeySource
import Types.Key import Types.Key
import Utility.TList import Utility.TList
import Utility.FileSystemEncoding
import Annex.Ingest import Annex.Ingest
import Control.Concurrent.STM import Control.Concurrent.STM
@ -57,7 +58,7 @@ changeInfoKey _ = Nothing
changeFile :: Change -> FilePath changeFile :: Change -> FilePath
changeFile (Change _ f _) = f changeFile (Change _ f _) = f
changeFile (PendingAddChange _ f) = f changeFile (PendingAddChange _ f) = f
changeFile (InProcessAddChange _ ld) = keyFilename $ keySource ld changeFile (InProcessAddChange _ ld) = fromRawFilePath $ keyFilename $ keySource ld
isPendingAddChange :: Change -> Bool isPendingAddChange :: Change -> Bool
isPendingAddChange (PendingAddChange {}) = True isPendingAddChange (PendingAddChange {}) = True

View file

@ -90,7 +90,7 @@ hashKeyVariety (Blake2spHash size) he = Blake2spKey size he
{- A key is a hash of its contents. -} {- A key is a hash of its contents. -}
keyValue :: Hash -> KeySource -> MeterUpdate -> Annex (Maybe Key) keyValue :: Hash -> KeySource -> MeterUpdate -> Annex (Maybe Key)
keyValue hash source meterupdate = do keyValue hash source meterupdate = do
let file = contentLocation source let file = fromRawFilePath (contentLocation source)
filesize <- liftIO $ getFileSize file filesize <- liftIO $ getFileSize file
s <- hashFile hash file meterupdate s <- hashFile hash file meterupdate
return $ Just $ mkKey $ \k -> k return $ Just $ mkKey $ \k -> k
@ -106,7 +106,7 @@ keyValueE hash source meterupdate =
where where
addE k = do addE k = do
maxlen <- annexMaxExtensionLength <$> Annex.getGitConfig maxlen <- annexMaxExtensionLength <$> Annex.getGitConfig
let ext = selectExtension maxlen (toRawFilePath (keyFilename source)) let ext = selectExtension maxlen (keyFilename source)
return $ Just $ alterKey k $ \d -> d return $ Just $ alterKey k $ \d -> d
{ keyName = keyName d <> ext { keyName = keyName d <> ext
, keyVariety = hashKeyVariety hash (HasExt True) , keyVariety = hashKeyVariety hash (HasExt True)

View file

@ -16,6 +16,7 @@ import Git.FilePath
import Utility.Metered import Utility.Metered
import qualified Data.ByteString.Char8 as S8 import qualified Data.ByteString.Char8 as S8
import qualified Utility.RawFilePath as R
backends :: [Backend] backends :: [Backend]
backends = [backend] backends = [backend]
@ -36,10 +37,10 @@ backend = Backend
keyValue :: KeySource -> MeterUpdate -> Annex (Maybe Key) keyValue :: KeySource -> MeterUpdate -> Annex (Maybe Key)
keyValue source _ = do keyValue source _ = do
let f = contentLocation source let f = contentLocation source
stat <- liftIO $ getFileStatus f stat <- liftIO $ R.getFileStatus f
sz <- liftIO $ getFileSize' f stat sz <- liftIO $ getFileSize' (fromRawFilePath f) stat
relf <- fromRawFilePath . getTopFilePath relf <- fromRawFilePath . getTopFilePath
<$> inRepo (toTopFilePath $ toRawFilePath $ keyFilename source) <$> inRepo (toTopFilePath $ keyFilename source)
return $ Just $ mkKey $ \k -> k return $ Just $ mkKey $ \k -> k
{ keyName = genKeyName relf { keyName = genKeyName relf
, keyVariety = WORMKey , keyVariety = WORMKey

View file

@ -367,8 +367,8 @@ finishDownloadWith :: AddUnlockedMatcher -> FilePath -> UUID -> URLString -> Fil
finishDownloadWith addunlockedmatcher tmp u url file = do finishDownloadWith addunlockedmatcher tmp u url file = do
backend <- chooseBackend file backend <- chooseBackend file
let source = KeySource let source = KeySource
{ keyFilename = file { keyFilename = toRawFilePath file
, contentLocation = tmp , contentLocation = toRawFilePath tmp
, inodeCache = Nothing , inodeCache = Nothing
} }
genKey source nullMeterUpdate backend >>= \case genKey source nullMeterUpdate backend >>= \case

View file

@ -20,8 +20,11 @@ cmd = noCommit $ noMessages $ dontCheck repoExists $
(batchable run (pure ())) (batchable run (pure ()))
run :: () -> String -> Annex Bool run :: () -> String -> Annex Bool
run _ file = genKey (KeySource file file Nothing) nullMeterUpdate Nothing >>= \case run _ file = genKey ks nullMeterUpdate Nothing >>= \case
Just (k, _) -> do Just (k, _) -> do
liftIO $ putStrLn $ serializeKey k liftIO $ putStrLn $ serializeKey k
return True return True
Nothing -> return False Nothing -> return False
where
ks = KeySource file' file' Nothing
file' = toRawFilePath file

View file

@ -118,11 +118,13 @@ seek o@(RemoteImportOptions {}) = startConcurrency commandStages $ do
startLocal :: AddUnlockedMatcher -> GetFileMatcher -> DuplicateMode -> (FilePath, FilePath) -> CommandStart startLocal :: AddUnlockedMatcher -> GetFileMatcher -> DuplicateMode -> (FilePath, FilePath) -> CommandStart
startLocal addunlockedmatcher largematcher mode (srcfile, destfile) = startLocal addunlockedmatcher largematcher mode (srcfile, destfile) =
ifM (liftIO $ isRegularFile <$> getSymbolicLinkStatus srcfile) ifM (liftIO $ isRegularFile <$> getSymbolicLinkStatus srcfile)
( starting "import" (ActionItemWorkTreeFile (toRawFilePath destfile)) ( starting "import" (ActionItemWorkTreeFile destfile')
pickaction pickaction
, stop , stop
) )
where where
destfile' = toRawFilePath destfile
deletedup k = do deletedup k = do
showNote $ "duplicate of " ++ serializeKey k showNote $ "duplicate of " ++ serializeKey k
verifyExisting k destfile verifyExisting k destfile
@ -182,7 +184,7 @@ startLocal addunlockedmatcher largematcher mode (srcfile, destfile) =
-- weakly the same as the origianlly locked down file's -- weakly the same as the origianlly locked down file's
-- inode cache. (Since the file may have been copied, -- inode cache. (Since the file may have been copied,
-- its inodes may not be the same.) -- its inodes may not be the same.)
newcache <- withTSDelta $ liftIO . genInodeCache (toRawFilePath destfile) newcache <- withTSDelta $ liftIO . genInodeCache destfile'
let unchanged = case (newcache, inodeCache (keySource ld)) of let unchanged = case (newcache, inodeCache (keySource ld)) of
(_, Nothing) -> True (_, Nothing) -> True
(Just newc, Just c) | compareWeak c newc -> True (Just newc, Just c) | compareWeak c newc -> True
@ -193,8 +195,8 @@ startLocal addunlockedmatcher largematcher mode (srcfile, destfile) =
-- is what will be ingested. -- is what will be ingested.
let ld' = ld let ld' = ld
{ keySource = KeySource { keySource = KeySource
{ keyFilename = destfile { keyFilename = destfile'
, contentLocation = destfile , contentLocation = destfile'
, inodeCache = newcache , inodeCache = newcache
} }
} }
@ -203,7 +205,7 @@ startLocal addunlockedmatcher largematcher mode (srcfile, destfile) =
>>= maybe >>= maybe
stop stop
(\addedk -> next $ Command.Add.cleanup addedk True) (\addedk -> next $ Command.Add.cleanup addedk True)
, next $ Command.Add.addSmall $ toRawFilePath destfile , next $ Command.Add.addSmall destfile'
) )
notoverwriting why = do notoverwriting why = do
warning $ "not overwriting existing " ++ destfile ++ " " ++ why warning $ "not overwriting existing " ++ destfile ++ " " ++ why

View file

@ -85,8 +85,8 @@ perform file oldkey oldbackend newbackend = go =<< genkey (fastMigrate oldbacken
genkey Nothing = do genkey Nothing = do
content <- calcRepo $ gitAnnexLocation oldkey content <- calcRepo $ gitAnnexLocation oldkey
let source = KeySource let source = KeySource
{ keyFilename = fromRawFilePath file { keyFilename = file
, contentLocation = fromRawFilePath content , contentLocation = content
, inodeCache = Nothing , inodeCache = Nothing
} }
v <- genKey source nullMeterUpdate (Just newbackend) v <- genKey source nullMeterUpdate (Just newbackend)

View file

@ -55,7 +55,7 @@ startSrcDest _ = giveup "specify a src file and a dest file"
startKnown :: FilePath -> CommandStart startKnown :: FilePath -> CommandStart
startKnown src = notAnnexed src $ startKnown src = notAnnexed src $
starting "reinject" (ActionItemOther (Just src)) $ do starting "reinject" (ActionItemOther (Just src)) $ do
mkb <- genKey (KeySource src src Nothing) nullMeterUpdate Nothing mkb <- genKey ks nullMeterUpdate Nothing
case mkb of case mkb of
Nothing -> error "Failed to generate key" Nothing -> error "Failed to generate key"
Just (key, _) -> ifM (isKnownKey key) Just (key, _) -> ifM (isKnownKey key)
@ -64,6 +64,9 @@ startKnown src = notAnnexed src $
warning "Not known content; skipping" warning "Not known content; skipping"
next $ return True next $ return True
) )
where
ks = KeySource src' src' Nothing
src' = toRawFilePath src
notAnnexed :: FilePath -> CommandStart -> CommandStart notAnnexed :: FilePath -> CommandStart -> CommandStart
notAnnexed src a = notAnnexed src a =

View file

@ -322,8 +322,8 @@ randKey sz = withTmpFile "randkey" $ \f h -> do
Right (rand, _) -> liftIO $ B.hPut h rand Right (rand, _) -> liftIO $ B.hPut h rand
liftIO $ hClose h liftIO $ hClose h
let ks = KeySource let ks = KeySource
{ keyFilename = f { keyFilename = toRawFilePath f
, contentLocation = f , contentLocation = toRawFilePath f
, inodeCache = Nothing , inodeCache = Nothing
} }
k <- fromMaybe (error "failed to generate random key") k <- fromMaybe (error "failed to generate random key")

View file

@ -584,7 +584,7 @@ getKey b f = fromJust <$> annexeval go
where where
go = Types.Backend.getKey b ks Utility.Metered.nullMeterUpdate go = Types.Backend.getKey b ks Utility.Metered.nullMeterUpdate
ks = Types.KeySource.KeySource ks = Types.KeySource.KeySource
{ Types.KeySource.keyFilename = f { Types.KeySource.keyFilename = toRawFilePath f
, Types.KeySource.contentLocation = f , Types.KeySource.contentLocation = toRawFilePath f
, Types.KeySource.inodeCache = Nothing , Types.KeySource.inodeCache = Nothing
} }

View file

@ -8,6 +8,7 @@
module Types.KeySource where module Types.KeySource where
import Utility.InodeCache import Utility.InodeCache
import System.FilePath.ByteString (RawFilePath)
{- When content is in the process of being ingested into the annex, {- When content is in the process of being ingested into the annex,
- and a Key generated from it, this data type is used. - and a Key generated from it, this data type is used.
@ -22,8 +23,8 @@ import Utility.InodeCache
- files that may be made while they're in the process of being ingested. - files that may be made while they're in the process of being ingested.
-} -}
data KeySource = KeySource data KeySource = KeySource
{ keyFilename :: FilePath { keyFilename :: RawFilePath
, contentLocation :: FilePath , contentLocation :: RawFilePath
, inodeCache :: Maybe InodeCache , inodeCache :: Maybe InodeCache
} }
deriving (Show) deriving (Show)