avoid closing pipe before all the shas are read from it
Could have just used hGetContentsStrict here, but that would require storing all the shas in memory. Since this is called at the end of a git-annex run, it may have created a *lot* of shas, so I avoid that memory use and stream them out like before.
This commit is contained in:
parent
0e45b762a0
commit
46588674b0
2 changed files with 10 additions and 7 deletions
|
@ -291,9 +291,10 @@ stageJournal = do
|
|||
withIndex $ liftIO $ do
|
||||
let dir = gitAnnexJournalDir g
|
||||
let paths = map (dir </>) fs
|
||||
shas <- Git.HashObject.hashFiles paths g
|
||||
(shas, cleanup) <- Git.HashObject.hashFiles paths g
|
||||
Git.UnionMerge.update_index g $
|
||||
index_lines shas (map fileJournal fs)
|
||||
cleanup
|
||||
mapM_ removeFile paths
|
||||
where
|
||||
index_lines shas = map genline . zip shas
|
||||
|
|
|
@ -10,16 +10,15 @@ module Git.HashObject where
|
|||
import Common
|
||||
import Git
|
||||
|
||||
{- Injects a set of files into git, returning the shas of the objects. -}
|
||||
hashFiles :: [FilePath] -> Repo -> IO [Sha]
|
||||
{- Injects a set of files into git, returning the shas of the objects
|
||||
- and an IO action to call ones the the shas have been used. -}
|
||||
hashFiles :: [FilePath] -> Repo -> IO ([Sha], IO ())
|
||||
hashFiles paths repo = do
|
||||
(pid, fromh, toh) <- hPipeBoth "git" $ toCommand $ git_hash_object repo
|
||||
_ <- forkProcess (feeder toh)
|
||||
hClose toh
|
||||
shas <- map Git.Ref . lines <$> hGetContents fromh
|
||||
hClose fromh
|
||||
forceSuccess pid
|
||||
return shas
|
||||
shas <- map Git.Ref . lines <$> hGetContentsStrict fromh
|
||||
return (shas, ender fromh pid)
|
||||
where
|
||||
git_hash_object = Git.gitCommandLine
|
||||
[Param "hash-object", Param "-w", Param "--stdin-paths"]
|
||||
|
@ -27,3 +26,6 @@ hashFiles paths repo = do
|
|||
hPutStr toh $ unlines paths
|
||||
hClose toh
|
||||
exitSuccess
|
||||
ender fromh pid = do
|
||||
hClose fromh
|
||||
forceSuccess pid
|
||||
|
|
Loading…
Reference in a new issue