tweaks
This commit is contained in:
parent
10edaf6dc9
commit
025ded4a2d
3 changed files with 34 additions and 40 deletions
|
@ -215,20 +215,16 @@ set file content = do
|
||||||
-
|
-
|
||||||
- Returns an empty string if the file doesn't exist yet. -}
|
- Returns an empty string if the file doesn't exist yet. -}
|
||||||
get :: FilePath -> Annex String
|
get :: FilePath -> Annex String
|
||||||
get file = do
|
get file = fromcache =<< getCache file
|
||||||
cached <- getCache file
|
where
|
||||||
case cached of
|
fromcache (Just content) = return content
|
||||||
Just content -> return content
|
fromcache Nothing = fromjournal =<< getJournalFile file
|
||||||
Nothing -> do
|
fromjournal (Just content) = cache content
|
||||||
j <- getJournalFile file
|
fromjournal Nothing = withIndexUpdate $
|
||||||
case j of
|
cache =<< catFile fullname file
|
||||||
Just content -> do
|
cache content = do
|
||||||
setCache file content
|
setCache file content
|
||||||
return content
|
return content
|
||||||
Nothing -> withIndexUpdate $ do
|
|
||||||
content <- catFile fullname file
|
|
||||||
setCache file content
|
|
||||||
return content
|
|
||||||
|
|
||||||
{- Lists all files on the branch. There may be duplicates in the list. -}
|
{- Lists all files on the branch. There may be duplicates in the list. -}
|
||||||
files :: Annex [FilePath]
|
files :: Annex [FilePath]
|
||||||
|
@ -287,8 +283,7 @@ stageJournalFiles = do
|
||||||
let paths = map (dir </>) fs
|
let paths = map (dir </>) fs
|
||||||
-- inject all the journal files directly into git
|
-- inject all the journal files directly into git
|
||||||
-- in one quick command
|
-- in one quick command
|
||||||
(pid, fromh, toh) <- hPipeBoth "git" $ toCommand $
|
(pid, fromh, toh) <- hPipeBoth "git" $ toCommand $ git_hash_object g
|
||||||
Git.gitCommandLine g [Param "hash-object", Param "-w", Param "--stdin-paths"]
|
|
||||||
_ <- forkProcess $ do
|
_ <- forkProcess $ do
|
||||||
hPutStr toh $ unlines paths
|
hPutStr toh $ unlines paths
|
||||||
hClose toh
|
hClose toh
|
||||||
|
@ -304,6 +299,9 @@ stageJournalFiles = do
|
||||||
where
|
where
|
||||||
index_lines shas = map genline . zip shas
|
index_lines shas = map genline . zip shas
|
||||||
genline (sha, file) = Git.UnionMerge.update_index_line sha file
|
genline (sha, file) = Git.UnionMerge.update_index_line sha file
|
||||||
|
git_hash_object g = Git.gitCommandLine g
|
||||||
|
[Param "hash-object", Param "-w", Param "--stdin-paths"]
|
||||||
|
|
||||||
|
|
||||||
{- Checks if there are changes in the journal. -}
|
{- Checks if there are changes in the journal. -}
|
||||||
journalDirty :: Annex Bool
|
journalDirty :: Annex Bool
|
||||||
|
|
42
Backend.hs
42
Backend.hs
|
@ -39,23 +39,20 @@ orderedList = do
|
||||||
l <- Annex.getState Annex.backends -- list is cached here
|
l <- Annex.getState Annex.backends -- list is cached here
|
||||||
if not $ null l
|
if not $ null l
|
||||||
then return l
|
then return l
|
||||||
else do
|
else handle =<< Annex.getState Annex.forcebackend
|
||||||
s <- getstandard
|
|
||||||
d <- Annex.getState Annex.forcebackend
|
|
||||||
handle d s
|
|
||||||
where
|
where
|
||||||
parseBackendList [] = list
|
handle Nothing = standard
|
||||||
parseBackendList s = map lookupBackendName $ words s
|
handle (Just "") = standard
|
||||||
handle Nothing s = return s
|
handle (Just name) = do
|
||||||
handle (Just "") s = return s
|
l' <- (lookupBackendName name :) <$> standard
|
||||||
handle (Just name) s = do
|
Annex.changeState $ \s -> s { Annex.backends = l' }
|
||||||
let l' = lookupBackendName name : s
|
|
||||||
Annex.changeState $ \state -> state { Annex.backends = l' }
|
|
||||||
return l'
|
return l'
|
||||||
getstandard = do
|
standard = do
|
||||||
g <- gitRepo
|
g <- gitRepo
|
||||||
return $ parseBackendList $
|
return $ parseBackendList $
|
||||||
Git.configGet g "annex.backends" ""
|
Git.configGet g "annex.backends" ""
|
||||||
|
parseBackendList [] = list
|
||||||
|
parseBackendList s = map lookupBackendName $ words s
|
||||||
|
|
||||||
{- Generates a key for a file, trying each backend in turn until one
|
{- Generates a key for a file, trying each backend in turn until one
|
||||||
- accepts it. -}
|
- accepts it. -}
|
||||||
|
@ -83,17 +80,15 @@ lookupFile file = do
|
||||||
where
|
where
|
||||||
getsymlink = takeFileName <$> readSymbolicLink file
|
getsymlink = takeFileName <$> readSymbolicLink file
|
||||||
makekey l = maybe (return Nothing) (makeret l) (fileKey l)
|
makekey l = maybe (return Nothing) (makeret l) (fileKey l)
|
||||||
makeret l k =
|
makeret l k = let bname = keyBackendName k in
|
||||||
case maybeLookupBackendName bname of
|
case maybeLookupBackendName bname of
|
||||||
Just backend -> return $ Just (k, backend)
|
Just backend -> return $ Just (k, backend)
|
||||||
Nothing -> do
|
Nothing -> do
|
||||||
when (isLinkToAnnex l) $
|
when (isLinkToAnnex l) $ warning $
|
||||||
warning skip
|
"skipping " ++ file ++
|
||||||
return Nothing
|
" (unknown backend " ++
|
||||||
where
|
bname ++ ")"
|
||||||
bname = keyBackendName k
|
return Nothing
|
||||||
skip = "skipping " ++ file ++
|
|
||||||
" (unknown backend " ++ bname ++ ")"
|
|
||||||
|
|
||||||
type BackendFile = (Maybe (Backend Annex), FilePath)
|
type BackendFile = (Maybe (Backend Annex), FilePath)
|
||||||
|
|
||||||
|
@ -121,4 +116,5 @@ maybeLookupBackendName :: String -> Maybe (Backend Annex)
|
||||||
maybeLookupBackendName s
|
maybeLookupBackendName s
|
||||||
| length matches == 1 = Just $ head matches
|
| length matches == 1 = Just $ head matches
|
||||||
| otherwise = Nothing
|
| otherwise = Nothing
|
||||||
where matches = filter (\b -> s == B.name b) list
|
where
|
||||||
|
matches = filter (\b -> s == B.name b) list
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{- git-annex key/value backend data type
|
{- git-annex key/value backend data type
|
||||||
-
|
-
|
||||||
- Most things should not need this, using Types instead
|
- Most things should not need this, using Remotes instead
|
||||||
-
|
-
|
||||||
- Copyright 2010 Joey Hess <joey@kitenet.net>
|
- Copyright 2010 Joey Hess <joey@kitenet.net>
|
||||||
-
|
-
|
||||||
|
|
Loading…
Reference in a new issue