tagging package git-annex version 5.20140320
-----BEGIN PGP SIGNATURE----- Version: GnuPG v1 iQIVAwUAUytJN8kQ2SIlEuPHAQgcEA//X5MJTU5DXbnTV9Yrqg9zf3WUF9dTeKnb fxpUzHd20pjVhH8oqAmwTPpMHViNQax3CBhR2qd/T41eHZA0jljxefihWYd7e+2m 3KFxTc//9/5BI2cNjr+3cOikairVRK1jjCts0+rWF6YBjK0IRjxCN56ij9hg4B0a 11J/qu5GelB3VvyMUzyZqheapmP0MKAtIlcIXL0tr695dR2Ar4etvrkX/VFgtuSQ 2Nir6emXsoxrhl4Ph1WtwC9+mdJi85n2IbZ4l14l5n+sAc1Lmkp+r6RZJp8Efkef LN6OsYHV2pqyKUZiZ9y3RgBzd838R7ocpOggsA14RtMslxAYxP5jZP9QwUvSWAqk t2gibt6p3sJms1cvASHpOtegnz6d7+LK6pNqIl5XgjugofHlT+XHUEvrzyP/Lan4 RZK8wUIfJRX+VBju4tCZ+Q/5Br9VM/ZF80DIps0WIYOzLR5b1po8YTTWMskwkiii g5UjNw4NVfi9X7N+SjArwzmtpijctAeQj2QkBlb+kaPL+dxx33EDcRegw7KZ+/4Q ViyOhwl21Lb5Nzj8n6t/8g5GKRNJBmlOsa/K80vTOraRgukt4QejliFPswK7Z/6b DjfItpny+qqwuufa0hWW/U9VgTQfbewEsCt8QdpQHIUpjO+KvGHjOpal/zjnGemy qpFXO5EiwOo= =AwJK -----END PGP SIGNATURE----- Merge tag '5.20140320' into debian-wheezy-backport tagging package git-annex version 5.20140320 Conflicts: Assistant/Threads/WebApp.hs Utility/WebApp.hs git-annex.cabal
This commit is contained in:
commit
a20b9d78dc
306 changed files with 4068 additions and 1379 deletions
11
Annex.hs
11
Annex.hs
|
@ -60,6 +60,7 @@ import Types.FileMatcher
|
|||
import Types.NumCopies
|
||||
import Types.LockPool
|
||||
import Types.MetaData
|
||||
import Types.CleanupActions
|
||||
import qualified Utility.Matcher
|
||||
import qualified Data.Map as M
|
||||
import qualified Data.Set as S
|
||||
|
@ -88,6 +89,7 @@ data AnnexState = AnnexState
|
|||
, gitconfig :: GitConfig
|
||||
, backends :: [BackendA Annex]
|
||||
, remotes :: [Types.Remote.RemoteA Annex]
|
||||
, remoteannexstate :: M.Map UUID AnnexState
|
||||
, output :: MessageState
|
||||
, force :: Bool
|
||||
, fast :: Bool
|
||||
|
@ -113,7 +115,7 @@ data AnnexState = AnnexState
|
|||
, flags :: M.Map String Bool
|
||||
, fields :: M.Map String String
|
||||
, modmeta :: [ModMeta]
|
||||
, cleanup :: M.Map String (Annex ())
|
||||
, cleanup :: M.Map CleanupAction (Annex ())
|
||||
, inodeschanged :: Maybe Bool
|
||||
, useragent :: Maybe String
|
||||
, errcounter :: Integer
|
||||
|
@ -128,6 +130,7 @@ newState c r = AnnexState
|
|||
, gitconfig = c
|
||||
, backends = []
|
||||
, remotes = []
|
||||
, remoteannexstate = M.empty
|
||||
, output = defaultMessageState
|
||||
, force = False
|
||||
, fast = False
|
||||
|
@ -208,9 +211,9 @@ setField field value = changeState $ \s ->
|
|||
s { fields = M.insertWith' const field value $ fields s }
|
||||
|
||||
{- Adds a cleanup action to perform. -}
|
||||
addCleanup :: String -> Annex () -> Annex ()
|
||||
addCleanup uid a = changeState $ \s ->
|
||||
s { cleanup = M.insertWith' const uid a $ cleanup s }
|
||||
addCleanup :: CleanupAction -> Annex () -> Annex ()
|
||||
addCleanup k a = changeState $ \s ->
|
||||
s { cleanup = M.insertWith' const k a $ cleanup s }
|
||||
|
||||
{- Sets the type of output to emit. -}
|
||||
setOutput :: OutputType -> Annex ()
|
||||
|
|
|
@ -80,7 +80,7 @@ catKey = catKey' True
|
|||
catKey' :: Bool -> Ref -> FileMode -> Annex (Maybe Key)
|
||||
catKey' modeguaranteed ref mode
|
||||
| isSymLink mode = do
|
||||
l <- fromInternalGitPath . encodeW8 . L.unpack <$> get
|
||||
l <- fromInternalGitPath . decodeBS <$> get
|
||||
return $ if isLinkToAnnex l
|
||||
then fileKey $ takeFileName l
|
||||
else Nothing
|
||||
|
|
|
@ -24,6 +24,7 @@ module Annex.Content (
|
|||
removeAnnex,
|
||||
fromAnnex,
|
||||
moveBad,
|
||||
KeyLocation(..),
|
||||
getKeysPresent,
|
||||
saveState,
|
||||
downloadUrl,
|
||||
|
@ -466,22 +467,33 @@ moveBad key = do
|
|||
logStatus key InfoMissing
|
||||
return dest
|
||||
|
||||
{- List of keys whose content exists in the annex. -}
|
||||
getKeysPresent :: Annex [Key]
|
||||
getKeysPresent = do
|
||||
data KeyLocation = InAnnex | InRepository
|
||||
|
||||
{- List of keys whose content exists in the specified location.
|
||||
|
||||
- InAnnex only lists keys under .git/annex/objects,
|
||||
- while InRepository, in direct mode, also finds keys located in the
|
||||
- work tree.
|
||||
-
|
||||
- Note that InRepository has to check whether direct mode files
|
||||
- have goodContent.
|
||||
-}
|
||||
getKeysPresent :: KeyLocation -> Annex [Key]
|
||||
getKeysPresent keyloc = do
|
||||
direct <- isDirect
|
||||
dir <- fromRepo gitAnnexObjectDir
|
||||
liftIO $ traverse direct (2 :: Int) dir
|
||||
s <- getstate direct
|
||||
liftIO $ traverse s direct (2 :: Int) dir
|
||||
where
|
||||
traverse direct depth dir = do
|
||||
traverse s direct depth dir = do
|
||||
contents <- catchDefaultIO [] (dirContents dir)
|
||||
if depth == 0
|
||||
then do
|
||||
contents' <- filterM (present direct) contents
|
||||
contents' <- filterM (present s direct) contents
|
||||
let keys = mapMaybe (fileKey . takeFileName) contents'
|
||||
continue keys []
|
||||
else do
|
||||
let deeper = traverse direct (depth - 1)
|
||||
let deeper = traverse s direct (depth - 1)
|
||||
continue [] (map deeper contents)
|
||||
continue keys [] = return keys
|
||||
continue keys (a:as) = do
|
||||
|
@ -489,15 +501,31 @@ getKeysPresent = do
|
|||
morekeys <- unsafeInterleaveIO a
|
||||
continue (morekeys++keys) as
|
||||
|
||||
{- In indirect mode, look for the key. In direct mode,
|
||||
- the inode cache file is only present when a key's content
|
||||
- is present, so can be used as a surrogate if the content
|
||||
- is not located in the annex directory. -}
|
||||
present False d = doesFileExist $ contentfile d
|
||||
present True d = doesFileExist (contentfile d ++ ".cache")
|
||||
<||> present False d
|
||||
present _ False d = presentInAnnex d
|
||||
present s True d = presentDirect s d <||> presentInAnnex d
|
||||
|
||||
presentInAnnex = doesFileExist . contentfile
|
||||
contentfile d = d </> takeFileName d
|
||||
|
||||
presentDirect s d = case keyloc of
|
||||
InAnnex -> return False
|
||||
InRepository -> case fileKey (takeFileName d) of
|
||||
Nothing -> return False
|
||||
Just k -> Annex.eval s $
|
||||
anyM (goodContent k) =<< associatedFiles k
|
||||
|
||||
{- In order to run Annex monad actions within unsafeInterleaveIO,
|
||||
- the current state is taken and reused. No changes made to this
|
||||
- state will be preserved.
|
||||
-
|
||||
- As an optimsation, call inodesChanged to prime the state with
|
||||
- a cached value that will be used in the call to goodContent.
|
||||
-}
|
||||
getstate direct = do
|
||||
when direct $
|
||||
void $ inodesChanged
|
||||
Annex.getState id
|
||||
|
||||
{- Things to do to record changes to content when shutting down.
|
||||
-
|
||||
- It's acceptable to avoid committing changes to the branch,
|
||||
|
|
|
@ -56,23 +56,27 @@ parsedToMatcher parsed = case partitionEithers parsed of
|
|||
([], vs) -> Right $ generate vs
|
||||
(es, _) -> Left $ unwords $ map ("Parse failure: " ++) es
|
||||
|
||||
exprParser :: GroupMap -> M.Map UUID RemoteConfig -> Maybe UUID -> String -> [Either String (Token MatchFiles)]
|
||||
exprParser groupmap configmap mu expr =
|
||||
exprParser :: FileMatcher -> FileMatcher -> GroupMap -> M.Map UUID RemoteConfig -> Maybe UUID -> String -> [Either String (Token MatchFiles)]
|
||||
exprParser matchstandard matchgroupwanted groupmap configmap mu expr =
|
||||
map parse $ tokenizeMatcher expr
|
||||
where
|
||||
parse = parseToken
|
||||
parse = parseToken
|
||||
matchstandard
|
||||
matchgroupwanted
|
||||
(limitPresent mu)
|
||||
(limitInDir preferreddir)
|
||||
groupmap
|
||||
preferreddir = fromMaybe "public" $
|
||||
M.lookup "preferreddir" =<< (`M.lookup` configmap) =<< mu
|
||||
|
||||
parseToken :: MkLimit -> MkLimit -> GroupMap -> String -> Either String (Token MatchFiles)
|
||||
parseToken checkpresent checkpreferreddir groupmap t
|
||||
parseToken :: FileMatcher -> FileMatcher -> MkLimit -> MkLimit -> GroupMap -> String -> Either String (Token MatchFiles)
|
||||
parseToken matchstandard matchgroupwanted checkpresent checkpreferreddir groupmap t
|
||||
| t `elem` tokens = Right $ token t
|
||||
| t == "standard" = call matchstandard
|
||||
| t == "groupwanted" = call matchgroupwanted
|
||||
| t == "present" = use checkpresent
|
||||
| t == "inpreferreddir" = use checkpreferreddir
|
||||
| t == "unused" = Right (Operation limitUnused)
|
||||
| t == "unused" = Right $ Operation limitUnused
|
||||
| otherwise = maybe (Left $ "near " ++ show t) use $ M.lookup k $
|
||||
M.fromList
|
||||
[ ("include", limitInclude)
|
||||
|
@ -89,6 +93,8 @@ parseToken checkpresent checkpreferreddir groupmap t
|
|||
where
|
||||
(k, v) = separate (== '=') t
|
||||
use a = Operation <$> a v
|
||||
call sub = Right $ Operation $ \notpresent mi ->
|
||||
matchMrun sub $ \a -> a notpresent mi
|
||||
|
||||
{- This is really dumb tokenization; there's no support for quoted values.
|
||||
- Open and close parens are always treated as standalone tokens;
|
||||
|
@ -109,5 +115,5 @@ largeFilesMatcher = go =<< annexLargeFiles <$> Annex.getGitConfig
|
|||
rc <- readRemoteLog
|
||||
u <- getUUID
|
||||
either badexpr return $
|
||||
parsedToMatcher $ exprParser gm rc (Just u) expr
|
||||
parsedToMatcher $ exprParser matchAll matchAll gm rc (Just u) expr
|
||||
badexpr e = error $ "bad annex.largefiles configuration: " ++ e
|
||||
|
|
|
@ -198,7 +198,7 @@ enableDirectMode = unlessM isDirect $ do
|
|||
-}
|
||||
fixBadBare :: Annex ()
|
||||
fixBadBare = whenM checkBadBare $ do
|
||||
ks <- getKeysPresent
|
||||
ks <- getKeysPresent InAnnex
|
||||
liftIO $ debugM "Init" $ unwords
|
||||
[ "Detected bad bare repository with"
|
||||
, show (length ks)
|
||||
|
|
|
@ -5,11 +5,15 @@
|
|||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
module Annex.MetaData where
|
||||
module Annex.MetaData (
|
||||
genMetaData,
|
||||
module X
|
||||
) where
|
||||
|
||||
import Common.Annex
|
||||
import qualified Annex
|
||||
import Types.MetaData
|
||||
import Types.MetaData as X
|
||||
import Annex.MetaData.StandardFields as X
|
||||
import Logs.MetaData
|
||||
import Annex.CatFile
|
||||
|
||||
|
@ -19,15 +23,6 @@ import Data.Time.Calendar
|
|||
import Data.Time.Clock
|
||||
import Data.Time.Clock.POSIX
|
||||
|
||||
tagMetaField :: MetaField
|
||||
tagMetaField = mkMetaFieldUnchecked "tag"
|
||||
|
||||
yearMetaField :: MetaField
|
||||
yearMetaField = mkMetaFieldUnchecked "year"
|
||||
|
||||
monthMetaField :: MetaField
|
||||
monthMetaField = mkMetaFieldUnchecked "month"
|
||||
|
||||
{- Adds metadata for a file that has just been ingested into the
|
||||
- annex, but has not yet been committed to git.
|
||||
-
|
||||
|
|
47
Annex/MetaData/StandardFields.hs
Normal file
47
Annex/MetaData/StandardFields.hs
Normal file
|
@ -0,0 +1,47 @@
|
|||
{- git-annex metadata, standard fields
|
||||
-
|
||||
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
module Annex.MetaData.StandardFields (
|
||||
tagMetaField,
|
||||
yearMetaField,
|
||||
monthMetaField,
|
||||
lastChangedField,
|
||||
mkLastChangedField,
|
||||
isLastChangedField
|
||||
) where
|
||||
|
||||
import Types.MetaData
|
||||
|
||||
import Data.List
|
||||
|
||||
tagMetaField :: MetaField
|
||||
tagMetaField = mkMetaFieldUnchecked "tag"
|
||||
|
||||
yearMetaField :: MetaField
|
||||
yearMetaField = mkMetaFieldUnchecked "year"
|
||||
|
||||
monthMetaField :: MetaField
|
||||
monthMetaField = mkMetaFieldUnchecked "month"
|
||||
|
||||
lastChangedField :: MetaField
|
||||
lastChangedField = mkMetaFieldUnchecked lastchanged
|
||||
|
||||
mkLastChangedField :: MetaField -> MetaField
|
||||
mkLastChangedField f = mkMetaFieldUnchecked (fromMetaField f ++ lastchangedSuffix)
|
||||
|
||||
isLastChangedField :: MetaField -> Bool
|
||||
isLastChangedField f
|
||||
| f == lastChangedField = True
|
||||
| otherwise = lastchanged `isSuffixOf` s && s /= lastchangedSuffix
|
||||
where
|
||||
s = fromMetaField f
|
||||
|
||||
lastchanged :: String
|
||||
lastchanged = "lastchanged"
|
||||
|
||||
lastchangedSuffix :: String
|
||||
lastchangedSuffix = "-lastchanged"
|
|
@ -9,7 +9,6 @@
|
|||
|
||||
module Annex.Ssh (
|
||||
sshCachingOptions,
|
||||
sshCleanup,
|
||||
sshCacheDir,
|
||||
sshReadPort,
|
||||
) where
|
||||
|
@ -24,6 +23,7 @@ import qualified Build.SysConfig as SysConfig
|
|||
import qualified Annex
|
||||
import Config
|
||||
import Utility.Env
|
||||
import Types.CleanupActions
|
||||
#ifndef mingw32_HOST_OS
|
||||
import Annex.Perms
|
||||
#endif
|
||||
|
@ -31,7 +31,9 @@ import Annex.Perms
|
|||
{- Generates parameters to ssh to a given host (or user@host) on a given
|
||||
- port, with connection caching. -}
|
||||
sshCachingOptions :: (String, Maybe Integer) -> [CommandParam] -> Annex [CommandParam]
|
||||
sshCachingOptions (host, port) opts = go =<< sshInfo (host, port)
|
||||
sshCachingOptions (host, port) opts = do
|
||||
Annex.addCleanup SshCachingCleanup sshCleanup
|
||||
go =<< sshInfo (host, port)
|
||||
where
|
||||
go (Nothing, params) = ret params
|
||||
go (Just socketfile, params) = do
|
||||
|
@ -144,8 +146,9 @@ sshCleanup = go =<< sshCacheDir
|
|||
withQuietOutput createProcessSuccess $
|
||||
(proc "ssh" $ toCommand $
|
||||
[ Params "-O stop"
|
||||
] ++ params ++ [Param "any"])
|
||||
] ++ params ++ [Param "localhost"])
|
||||
{ cwd = Just dir }
|
||||
liftIO $ nukeFile socketfile
|
||||
-- Cannot remove the lock file; other processes may
|
||||
-- be waiting on our exclusive lock to use it.
|
||||
|
||||
|
|
|
@ -45,7 +45,6 @@ import Assistant.Threads.XMPPClient
|
|||
import Assistant.Threads.XMPPPusher
|
||||
#endif
|
||||
#else
|
||||
#warning Building without the webapp. You probably need to install Yesod..
|
||||
import Assistant.Types.UrlRenderer
|
||||
#endif
|
||||
import qualified Utility.Daemon
|
||||
|
|
|
@ -14,6 +14,7 @@ import Utility.Tense
|
|||
import qualified Data.Text as T
|
||||
import Data.Text (Text)
|
||||
import qualified Data.Map as M
|
||||
import Data.Monoid
|
||||
|
||||
{- This is as many alerts as it makes sense to display at a time.
|
||||
- A display might be smaller, or larger, the point is to not overwhelm the
|
||||
|
@ -43,8 +44,8 @@ compareAlertPairs
|
|||
(aid, Alert { alertClass = aclass, alertPriority = aprio })
|
||||
(bid, Alert { alertClass = bclass, alertPriority = bprio })
|
||||
= compare aprio bprio
|
||||
`thenOrd` compare aid bid
|
||||
`thenOrd` compare aclass bclass
|
||||
`mappend` compare aid bid
|
||||
`mappend` compare aclass bclass
|
||||
|
||||
sortAlertPairs :: [AlertPair] -> [AlertPair]
|
||||
sortAlertPairs = sortBy compareAlertPairs
|
||||
|
|
|
@ -21,7 +21,7 @@ installMenu command menufile iconsrcdir icondir = do
|
|||
writeDesktopMenuFile (fdoDesktopMenu command) menufile
|
||||
installIcon (iconsrcdir </> "logo.svg") $
|
||||
iconFilePath (iconBaseName ++ ".svg") "scalable" icondir
|
||||
installIcon (iconsrcdir </> "favicon.png") $
|
||||
installIcon (iconsrcdir </> "logo_16x16.png") $
|
||||
iconFilePath (iconBaseName ++ ".png") "16x16" icondir
|
||||
#endif
|
||||
|
||||
|
|
|
@ -73,7 +73,7 @@ webAppThread assistantdata urlrenderer noannex cannotrun postfirstrun listenhost
|
|||
#endif
|
||||
webapp <- WebApp
|
||||
<$> pure assistantdata
|
||||
<*> (pack <$> genRandomToken)
|
||||
<*> genAuthToken
|
||||
<*> getreldir
|
||||
<*> pure staticRoutes
|
||||
<*> pure postfirstrun
|
||||
|
@ -125,7 +125,7 @@ myUrl tlssettings webapp addr = unpack $ yesodRender webapp urlbase DashboardR [
|
|||
|
||||
getTlsSettings :: Annex (Maybe TLS.TLSSettings)
|
||||
getTlsSettings = do
|
||||
#ifdef WITH_WEBAPP_HTTPS
|
||||
#ifdef WITH_WEBAPP_SECURE
|
||||
cert <- fromRepo gitAnnexWebCertificate
|
||||
privkey <- fromRepo gitAnnexWebPrivKey
|
||||
ifM (liftIO $ allM doesFileExist [cert, privkey])
|
||||
|
|
|
@ -14,6 +14,7 @@ import Assistant.WebApp.Types
|
|||
import Assistant.Common
|
||||
import Utility.NotificationBroadcaster
|
||||
import Utility.Yesod
|
||||
import Utility.WebApp
|
||||
|
||||
import Data.Text (Text)
|
||||
import Control.Concurrent
|
||||
|
@ -36,7 +37,7 @@ newNotifier getbroadcaster = liftAssistant $ do
|
|||
webAppFormAuthToken :: Widget
|
||||
webAppFormAuthToken = do
|
||||
webapp <- liftH getYesod
|
||||
[whamlet|<input type="hidden" name="auth" value="#{secretToken webapp}">|]
|
||||
[whamlet|<input type="hidden" name="auth" value="#{fromAuthToken (authToken webapp)}">|]
|
||||
|
||||
{- A button with an icon, and maybe label or tooltip, that can be
|
||||
- clicked to perform some action.
|
||||
|
|
|
@ -22,6 +22,7 @@ import Assistant.DaemonStatus
|
|||
import Assistant.Types.Buddies
|
||||
import Utility.NotificationBroadcaster
|
||||
import Utility.Yesod
|
||||
import Utility.WebApp
|
||||
|
||||
import Data.Text (Text)
|
||||
import qualified Data.Text as T
|
||||
|
@ -64,7 +65,7 @@ notifierUrl route broadcaster = do
|
|||
[ "/"
|
||||
, T.intercalate "/" urlbits
|
||||
, "?auth="
|
||||
, secretToken webapp
|
||||
, fromAuthToken (authToken webapp)
|
||||
]
|
||||
|
||||
getNotifierTransfersR :: Handler RepPlain
|
||||
|
|
|
@ -31,6 +31,7 @@ import qualified Data.Map as M
|
|||
import qualified Data.Set as S
|
||||
import qualified Data.Text as T
|
||||
import Data.Function
|
||||
import Control.Concurrent
|
||||
|
||||
type RepoList = [(RepoDesc, RepoId, Actions)]
|
||||
|
||||
|
@ -238,3 +239,15 @@ reorderCosts remote rs = zip rs'' (insertCostAfter costs i)
|
|||
costs = map Remote.cost rs'
|
||||
rs'' = (\(x, y) -> x ++ [remote] ++ y) $ splitAt (i + 1) rs'
|
||||
|
||||
getSyncNowRepositoryR :: UUID -> Handler ()
|
||||
getSyncNowRepositoryR uuid = do
|
||||
u <- liftAnnex getUUID
|
||||
if u == uuid
|
||||
then do
|
||||
thread <- liftAssistant $ asIO $
|
||||
reconnectRemotes True
|
||||
=<< (syncRemotes <$> getDaemonStatus)
|
||||
void $ liftIO $ forkIO thread
|
||||
else maybe noop (liftAssistant . syncRemote)
|
||||
=<< liftAnnex (Remote.remoteFromUUID uuid)
|
||||
redirectBack
|
||||
|
|
|
@ -41,7 +41,7 @@ mkYesodData "WebApp" $(parseRoutesFile "Assistant/WebApp/routes")
|
|||
|
||||
data WebApp = WebApp
|
||||
{ assistantData :: AssistantData
|
||||
, secretToken :: Text
|
||||
, authToken :: AuthToken
|
||||
, relDir :: Maybe FilePath
|
||||
, getStatic :: Static
|
||||
, postFirstRun :: Maybe (IO String)
|
||||
|
@ -52,11 +52,11 @@ data WebApp = WebApp
|
|||
|
||||
instance Yesod WebApp where
|
||||
{- Require an auth token be set when accessing any (non-static) route -}
|
||||
isAuthorized _ _ = checkAuthToken secretToken
|
||||
isAuthorized _ _ = checkAuthToken authToken
|
||||
|
||||
{- Add the auth token to every url generated, except static subsite
|
||||
- urls (which can show up in Permission Denied pages). -}
|
||||
joinPath = insertAuthToken secretToken excludeStatic
|
||||
joinPath = insertAuthToken authToken excludeStatic
|
||||
where
|
||||
excludeStatic [] = True
|
||||
excludeStatic (p:_) = p /= "static"
|
||||
|
|
|
@ -82,6 +82,7 @@
|
|||
|
||||
/config/repository/reorder RepositoriesReorderR GET
|
||||
|
||||
/config/repository/syncnow/#UUID SyncNowRepositoryR GET
|
||||
/config/repository/disable/#UUID DisableRepositoryR GET
|
||||
|
||||
/config/repository/delete/confirm/#UUID DeleteRepositoryR GET
|
||||
|
|
|
@ -3,20 +3,14 @@
|
|||
module Build.Configure where
|
||||
|
||||
import System.Directory
|
||||
import Data.List
|
||||
import System.Process
|
||||
import Control.Applicative
|
||||
import System.FilePath
|
||||
import System.Environment (getArgs)
|
||||
import Data.Maybe
|
||||
import Control.Monad.IfElse
|
||||
import Control.Monad
|
||||
import Data.Char
|
||||
|
||||
import Build.TestConfig
|
||||
import Build.Version
|
||||
import Utility.SafeCommand
|
||||
import Utility.Monad
|
||||
import Utility.ExternalSHA
|
||||
import Utility.Env
|
||||
import qualified Git.Version
|
||||
|
|
|
@ -24,9 +24,7 @@ import System.Directory
|
|||
import System.Environment
|
||||
#ifndef mingw32_HOST_OS
|
||||
import System.Posix.User
|
||||
import System.Posix.Files
|
||||
#endif
|
||||
import System.FilePath
|
||||
import Data.Maybe
|
||||
|
||||
systemwideInstall :: IO Bool
|
||||
|
|
|
@ -67,7 +67,7 @@ uninstaller :: FilePath
|
|||
uninstaller = "git-annex-uninstall.exe"
|
||||
|
||||
gitInstallDir :: Exp FilePath
|
||||
gitInstallDir = fromString "$PROGRAMFILES\\Git\\cmd"
|
||||
gitInstallDir = fromString "$PROGRAMFILES\\Git\\bin"
|
||||
|
||||
startMenuItem :: Exp FilePath
|
||||
startMenuItem = "$SMPROGRAMS/git-annex.lnk"
|
||||
|
|
|
@ -7,8 +7,6 @@ import Utility.Monad
|
|||
import Utility.SafeCommand
|
||||
|
||||
import System.IO
|
||||
import System.Cmd
|
||||
import System.Exit
|
||||
import System.FilePath
|
||||
import System.Directory
|
||||
|
||||
|
|
|
@ -14,24 +14,36 @@ buildFlags = filter (not . null)
|
|||
[ ""
|
||||
#ifdef WITH_ASSISTANT
|
||||
, "Assistant"
|
||||
#else
|
||||
#warning Building without the assistant.
|
||||
#endif
|
||||
#ifdef WITH_WEBAPP
|
||||
, "Webapp"
|
||||
#else
|
||||
#warning Building without the webapp. You probably need to install Yesod..
|
||||
#endif
|
||||
#ifdef WITH_WEBAPP_HTTPS
|
||||
, "Webapp-https"
|
||||
#ifdef WITH_WEBAPP_SECURE
|
||||
, "Webapp-secure"
|
||||
#endif
|
||||
#ifdef WITH_PAIRING
|
||||
, "Pairing"
|
||||
#else
|
||||
#warning Building without local pairing.
|
||||
#endif
|
||||
#ifdef WITH_TESTSUITE
|
||||
, "Testsuite"
|
||||
#else
|
||||
#warning Building without the testsuite.
|
||||
#endif
|
||||
#ifdef WITH_S3
|
||||
, "S3"
|
||||
#else
|
||||
#warning Building without S3.
|
||||
#endif
|
||||
#ifdef WITH_WEBDAV
|
||||
, "WebDAV"
|
||||
#else
|
||||
#warning Building without WebDAV.
|
||||
#endif
|
||||
#ifdef WITH_INOTIFY
|
||||
, "Inotify"
|
||||
|
@ -47,21 +59,29 @@ buildFlags = filter (not . null)
|
|||
#endif
|
||||
#ifdef WITH_XMPP
|
||||
, "XMPP"
|
||||
#else
|
||||
#warning Building without XMPP.
|
||||
#endif
|
||||
#ifdef WITH_DNS
|
||||
, "DNS"
|
||||
#endif
|
||||
#ifdef WITH_FEED
|
||||
, "Feeds"
|
||||
#else
|
||||
#warning Building without Feeds.
|
||||
#endif
|
||||
#ifdef WITH_QUVI
|
||||
, "Quvi"
|
||||
#else
|
||||
#warning Building without quvi.
|
||||
#endif
|
||||
#ifdef WITH_TDFA
|
||||
, "TDFA"
|
||||
#endif
|
||||
#ifdef WITH_CRYPTOHASH
|
||||
, "CryptoHash"
|
||||
#else
|
||||
#warning Building without CryptoHash.
|
||||
#endif
|
||||
#ifdef WITH_EKG
|
||||
, "EKG"
|
||||
|
|
|
@ -26,7 +26,6 @@ import qualified Annex
|
|||
import qualified Git
|
||||
import qualified Git.AutoCorrect
|
||||
import Annex.Content
|
||||
import Annex.Ssh
|
||||
import Annex.Environment
|
||||
import Command
|
||||
import Types.Messages
|
||||
|
@ -107,4 +106,3 @@ shutdown nocommit = do
|
|||
saveState nocommit
|
||||
sequence_ =<< M.elems <$> Annex.getState Annex.cleanup
|
||||
liftIO reapZombies -- zombies from long-running git processes
|
||||
sshCleanup -- ssh connection caching
|
||||
|
|
|
@ -73,6 +73,8 @@ paramNumRange :: String
|
|||
paramNumRange = "NUM|RANGE"
|
||||
paramRemote :: String
|
||||
paramRemote = "REMOTE"
|
||||
paramField :: String
|
||||
paramField = "FIELD"
|
||||
paramGlob :: String
|
||||
paramGlob = "GLOB"
|
||||
paramName :: String
|
||||
|
|
|
@ -93,12 +93,15 @@ start file = ifAnnexed file addpresent add
|
|||
- Lockdown can fail if a file gets deleted, and Nothing will be returned.
|
||||
-}
|
||||
lockDown :: FilePath -> Annex (Maybe KeySource)
|
||||
lockDown file = ifM crippledFileSystem
|
||||
( liftIO $ catchMaybeIO nohardlink
|
||||
, do
|
||||
lockDown = either (\e -> showErr e >> return Nothing) (return . Just) <=< lockDown'
|
||||
|
||||
lockDown' :: FilePath -> Annex (Either IOException KeySource)
|
||||
lockDown' file = ifM crippledFileSystem
|
||||
( liftIO $ tryIO nohardlink
|
||||
, tryAnnexIO $ do
|
||||
tmp <- fromRepo gitAnnexTmpMiscDir
|
||||
createAnnexDirectory tmp
|
||||
eitherToMaybe <$> tryAnnexIO (go tmp)
|
||||
go tmp
|
||||
)
|
||||
where
|
||||
{- In indirect mode, the write bit is removed from the file as part
|
||||
|
|
|
@ -29,6 +29,7 @@ import Utility.DataUnits
|
|||
import Utility.FileMode
|
||||
import Config
|
||||
import Types.Key
|
||||
import Types.CleanupActions
|
||||
import Utility.HumanTime
|
||||
import Git.FilePath
|
||||
import Utility.PID
|
||||
|
@ -93,7 +94,7 @@ getIncremental = do
|
|||
|
||||
checkschedule Nothing = error "bad --incremental-schedule value"
|
||||
checkschedule (Just delta) = do
|
||||
Annex.addCleanup "" $ do
|
||||
Annex.addCleanup FsckCleanup $ do
|
||||
v <- getStartTime
|
||||
case v of
|
||||
Nothing -> noop
|
||||
|
|
|
@ -281,7 +281,7 @@ cachedPresentData = do
|
|||
case presentData s of
|
||||
Just v -> return v
|
||||
Nothing -> do
|
||||
v <- foldKeys <$> lift getKeysPresent
|
||||
v <- foldKeys <$> lift (getKeysPresent InRepository)
|
||||
put s { presentData = Just v }
|
||||
return v
|
||||
|
||||
|
|
|
@ -158,7 +158,8 @@ absRepo reference r
|
|||
| Git.repoIsUrl r = return r
|
||||
| otherwise = liftIO $ do
|
||||
r' <- Git.Construct.fromAbsPath =<< absPath (Git.repoPath r)
|
||||
flip Annex.eval Annex.gitRepo =<< Annex.new r'
|
||||
r'' <- safely $ flip Annex.eval Annex.gitRepo =<< Annex.new r'
|
||||
return (fromMaybe r' r'')
|
||||
|
||||
{- Checks if two repos are the same. -}
|
||||
same :: Git.Repo -> Git.Repo -> Bool
|
||||
|
@ -192,14 +193,9 @@ tryScan :: Git.Repo -> Annex (Maybe Git.Repo)
|
|||
tryScan r
|
||||
| Git.repoIsSsh r = sshscan
|
||||
| Git.repoIsUrl r = return Nothing
|
||||
| otherwise = safely $ Git.Config.read r
|
||||
| otherwise = liftIO $ safely $ Git.Config.read r
|
||||
where
|
||||
safely a = do
|
||||
result <- liftIO (try a :: IO (Either SomeException Git.Repo))
|
||||
case result of
|
||||
Left _ -> return Nothing
|
||||
Right r' -> return $ Just r'
|
||||
pipedconfig cmd params = safely $
|
||||
pipedconfig cmd params = liftIO $ safely $
|
||||
withHandle StdoutHandle createProcessSuccess p $
|
||||
Git.Config.hRead r
|
||||
where
|
||||
|
@ -247,3 +243,10 @@ combineSame = map snd . nubBy sameuuid . map pair
|
|||
where
|
||||
sameuuid (u1, _) (u2, _) = u1 == u2 && u1 /= NoUUID
|
||||
pair r = (getUncachedUUID r, r)
|
||||
|
||||
safely :: IO Git.Repo -> IO (Maybe Git.Repo)
|
||||
safely a = do
|
||||
result <- try a :: IO (Either SomeException Git.Repo)
|
||||
case result of
|
||||
Left _ -> return Nothing
|
||||
Right r' -> return $ Just r'
|
||||
|
|
|
@ -12,16 +12,24 @@ import qualified Annex
|
|||
import Command
|
||||
import Annex.MetaData
|
||||
import Logs.MetaData
|
||||
import Types.MetaData
|
||||
|
||||
import qualified Data.Set as S
|
||||
import Data.Time.Clock.POSIX
|
||||
|
||||
def :: [Command]
|
||||
def = [withOptions [setOption, tagOption, untagOption, jsonOption] $
|
||||
def = [withOptions metaDataOptions $
|
||||
command "metadata" paramPaths seek
|
||||
SectionMetaData "sets metadata of a file"]
|
||||
|
||||
metaDataOptions :: [Option]
|
||||
metaDataOptions =
|
||||
[ setOption
|
||||
, tagOption
|
||||
, untagOption
|
||||
, getOption
|
||||
, jsonOption
|
||||
] ++ keyOptions
|
||||
|
||||
storeModMeta :: ModMeta -> Annex ()
|
||||
storeModMeta modmeta = Annex.changeState $
|
||||
\s -> s { Annex.modmeta = modmeta:Annex.modmeta s }
|
||||
|
@ -31,6 +39,9 @@ setOption = Option ['s'] ["set"] (ReqArg mkmod "FIELD[+-]=VALUE") "set metadata"
|
|||
where
|
||||
mkmod = either error storeModMeta . parseModMeta
|
||||
|
||||
getOption :: Option
|
||||
getOption = fieldOption ['g'] "get" paramField "get single metadata field"
|
||||
|
||||
tagOption :: Option
|
||||
tagOption = Option ['t'] ["tag"] (ReqArg mkmod "TAG") "set a tag"
|
||||
where
|
||||
|
@ -44,19 +55,35 @@ untagOption = Option ['u'] ["untag"] (ReqArg mkmod "TAG") "remove a tag"
|
|||
seek :: CommandSeek
|
||||
seek ps = do
|
||||
modmeta <- Annex.getState Annex.modmeta
|
||||
getfield <- getOptionField getOption $ \ms ->
|
||||
return $ either error id . mkMetaField <$> ms
|
||||
now <- liftIO getPOSIXTime
|
||||
withFilesInGit (whenAnnexed $ start now modmeta) ps
|
||||
withKeyOptions
|
||||
(startKeys now getfield modmeta)
|
||||
(withFilesInGit (whenAnnexed $ start now getfield modmeta))
|
||||
ps
|
||||
|
||||
start :: POSIXTime -> [ModMeta] -> FilePath -> (Key, Backend) -> CommandStart
|
||||
start now ms file (k, _) = do
|
||||
showStart "metadata" file
|
||||
start :: POSIXTime -> Maybe MetaField -> [ModMeta] -> FilePath -> (Key, Backend) -> CommandStart
|
||||
start now f ms file (k, _) = start' (Just file) now f ms k
|
||||
|
||||
startKeys :: POSIXTime -> Maybe MetaField -> [ModMeta] -> Key -> CommandStart
|
||||
startKeys = start' Nothing
|
||||
|
||||
start' :: AssociatedFile -> POSIXTime -> Maybe MetaField -> [ModMeta] -> Key -> CommandStart
|
||||
start' afile now Nothing ms k = do
|
||||
showStart' "metadata" k afile
|
||||
next $ perform now ms k
|
||||
start' _ _ (Just f) _ k = do
|
||||
l <- S.toList . currentMetaDataValues f <$> getCurrentMetaData k
|
||||
liftIO $ forM_ l $
|
||||
putStrLn . fromMetaValue
|
||||
stop
|
||||
|
||||
perform :: POSIXTime -> [ModMeta] -> Key -> CommandPerform
|
||||
perform _ [] k = next $ cleanup k
|
||||
perform now ms k = do
|
||||
oldm <- getCurrentMetaData k
|
||||
let m = foldl' unionMetaData emptyMetaData $ map (modMeta oldm) ms
|
||||
let m = combineMetaData $ map (modMeta oldm) ms
|
||||
addMetaData' k m now
|
||||
next $ cleanup k
|
||||
|
||||
|
|
|
@ -69,20 +69,29 @@ toStart dest move afile key = do
|
|||
ishere <- inAnnex key
|
||||
if not ishere || u == Remote.uuid dest
|
||||
then stop -- not here, so nothing to do
|
||||
else do
|
||||
showMoveAction move key afile
|
||||
next $ toPerform dest move key afile
|
||||
toPerform :: Remote -> Bool -> Key -> AssociatedFile -> CommandPerform
|
||||
toPerform dest move key afile = moveLock move key $ do
|
||||
-- Checking the remote is expensive, so not done in the start step.
|
||||
-- In fast mode, location tracking is assumed to be correct,
|
||||
-- and an explicit check is not done, when copying. When moving,
|
||||
-- it has to be done, to avoid inaverdent data loss.
|
||||
else toStart' dest move afile key
|
||||
|
||||
toStart' :: Remote -> Bool -> AssociatedFile -> Key -> CommandStart
|
||||
toStart' dest move afile key = do
|
||||
fast <- Annex.getState Annex.fast
|
||||
let fastcheck = fast && not move && not (Remote.hasKeyCheap dest)
|
||||
isthere <- if fastcheck
|
||||
then Right <$> expectedpresent
|
||||
else Remote.hasKey dest key
|
||||
if fast && not move && not (Remote.hasKeyCheap dest)
|
||||
then ifM (expectedPresent dest key)
|
||||
( stop
|
||||
, go True (pure $ Right False)
|
||||
)
|
||||
else go False (Remote.hasKey dest key)
|
||||
where
|
||||
go fastcheck isthere = do
|
||||
showMoveAction move key afile
|
||||
next $ toPerform dest move key afile fastcheck =<< isthere
|
||||
|
||||
expectedPresent :: Remote -> Key -> Annex Bool
|
||||
expectedPresent dest key = do
|
||||
remotes <- Remote.keyPossibilities key
|
||||
return $ dest `elem` remotes
|
||||
|
||||
toPerform :: Remote -> Bool -> Key -> AssociatedFile -> Bool -> Either String Bool -> CommandPerform
|
||||
toPerform dest move key afile fastcheck isthere = moveLock move key $
|
||||
case isthere of
|
||||
Left err -> do
|
||||
showNote err
|
||||
|
@ -100,7 +109,7 @@ toPerform dest move key afile = moveLock move key $ do
|
|||
warning "This could have failed because --fast is enabled."
|
||||
stop
|
||||
Right True -> do
|
||||
unlessM expectedpresent $
|
||||
unlessM (expectedPresent dest key) $
|
||||
Remote.logStatus dest key InfoPresent
|
||||
finish
|
||||
where
|
||||
|
@ -109,9 +118,6 @@ toPerform dest move key afile = moveLock move key $ do
|
|||
removeAnnex key
|
||||
next $ Command.Drop.cleanupLocal key
|
||||
| otherwise = next $ return True
|
||||
expectedpresent = do
|
||||
remotes <- Remote.keyPossibilities key
|
||||
return $ dest `elem` remotes
|
||||
|
||||
{- Moves (or copies) the content of an annexed file from a remote
|
||||
- to the current repository.
|
||||
|
|
|
@ -376,5 +376,5 @@ syncFile rs f (k, _) = do
|
|||
put dest = do
|
||||
ok <- commandAction $ do
|
||||
showStart "copy" f
|
||||
next $ Command.Move.toPerform dest False k (Just f)
|
||||
Command.Move.toStart' dest False (Just f) k
|
||||
return (ok, if ok then Just (Remote.uuid dest) else Nothing)
|
||||
|
|
|
@ -53,7 +53,7 @@ finish :: Annex ()
|
|||
finish = do
|
||||
annexdir <- fromRepo gitAnnexDir
|
||||
annexobjectdir <- fromRepo gitAnnexObjectDir
|
||||
leftovers <- removeUnannexed =<< getKeysPresent
|
||||
leftovers <- removeUnannexed =<< getKeysPresent InAnnex
|
||||
if null leftovers
|
||||
then liftIO $ removeDirectoryRecursive annexdir
|
||||
else error $ unlines
|
||||
|
|
|
@ -10,7 +10,6 @@
|
|||
module Command.Unused where
|
||||
|
||||
import qualified Data.Set as S
|
||||
import qualified Data.ByteString.Lazy as L
|
||||
import Data.BloomFilter
|
||||
import Data.BloomFilter.Easy
|
||||
import Data.BloomFilter.Hash
|
||||
|
@ -71,7 +70,9 @@ checkUnused = chain 0
|
|||
return []
|
||||
findunused False = do
|
||||
showAction "checking for unused data"
|
||||
excludeReferenced =<< getKeysPresent
|
||||
-- InAnnex, not InRepository because if a direct mode
|
||||
-- file exists, it is obviously not unused.
|
||||
excludeReferenced =<< getKeysPresent InAnnex
|
||||
chain _ [] = next $ return True
|
||||
chain v (a:as) = do
|
||||
v' <- a v
|
||||
|
@ -294,7 +295,7 @@ withKeysReferencedInGitRef a ref = do
|
|||
liftIO $ void clean
|
||||
where
|
||||
tKey True = fmap fst <$$> Backend.lookupFile . getTopFilePath . DiffTree.file
|
||||
tKey False = fileKey . takeFileName . encodeW8 . L.unpack <$$>
|
||||
tKey False = fileKey . takeFileName . decodeBS <$$>
|
||||
catFile ref . getTopFilePath . DiffTree.file
|
||||
|
||||
{- Looks in the specified directory for bad/tmp keys, and returns a list
|
||||
|
|
140
Command/Vicfg.hs
140
Command/Vicfg.hs
|
@ -1,6 +1,6 @@
|
|||
{- git-annex command
|
||||
-
|
||||
- Copyright 2012 Joey Hess <joey@kitenet.net>
|
||||
- Copyright 2012-2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
@ -60,7 +60,8 @@ vicfg curcfg f = do
|
|||
data Cfg = Cfg
|
||||
{ cfgTrustMap :: TrustMap
|
||||
, cfgGroupMap :: M.Map UUID (S.Set Group)
|
||||
, cfgPreferredContentMap :: M.Map UUID String
|
||||
, cfgPreferredContentMap :: M.Map UUID PreferredContentExpression
|
||||
, cfgGroupPreferredContentMap :: M.Map Group PreferredContentExpression
|
||||
, cfgScheduleMap :: M.Map UUID [ScheduledActivity]
|
||||
}
|
||||
|
||||
|
@ -69,25 +70,40 @@ getCfg = Cfg
|
|||
<$> trustMapRaw -- without local trust overrides
|
||||
<*> (groupsByUUID <$> groupMap)
|
||||
<*> preferredContentMapRaw
|
||||
<*> groupPreferredContentMapRaw
|
||||
<*> scheduleMap
|
||||
|
||||
setCfg :: Cfg -> Cfg -> Annex ()
|
||||
setCfg curcfg newcfg = do
|
||||
let (trustchanges, groupchanges, preferredcontentchanges, schedulechanges) = diffCfg curcfg newcfg
|
||||
mapM_ (uncurry trustSet) $ M.toList trustchanges
|
||||
mapM_ (uncurry groupSet) $ M.toList groupchanges
|
||||
mapM_ (uncurry preferredContentSet) $ M.toList preferredcontentchanges
|
||||
mapM_ (uncurry scheduleSet) $ M.toList schedulechanges
|
||||
let diff = diffCfg curcfg newcfg
|
||||
mapM_ (uncurry trustSet) $ M.toList $ cfgTrustMap diff
|
||||
mapM_ (uncurry groupSet) $ M.toList $ cfgGroupMap diff
|
||||
mapM_ (uncurry preferredContentSet) $ M.toList $ cfgPreferredContentMap diff
|
||||
mapM_ (uncurry groupPreferredContentSet) $ M.toList $ cfgGroupPreferredContentMap diff
|
||||
mapM_ (uncurry scheduleSet) $ M.toList $ cfgScheduleMap diff
|
||||
|
||||
diffCfg :: Cfg -> Cfg -> (TrustMap, M.Map UUID (S.Set Group), M.Map UUID String, M.Map UUID [ScheduledActivity])
|
||||
diffCfg curcfg newcfg = (diff cfgTrustMap, diff cfgGroupMap, diff cfgPreferredContentMap, diff cfgScheduleMap)
|
||||
diffCfg :: Cfg -> Cfg -> Cfg
|
||||
diffCfg curcfg newcfg = Cfg
|
||||
{ cfgTrustMap = diff cfgTrustMap
|
||||
, cfgGroupMap = diff cfgGroupMap
|
||||
, cfgPreferredContentMap = diff cfgPreferredContentMap
|
||||
, cfgGroupPreferredContentMap = diff cfgGroupPreferredContentMap
|
||||
, cfgScheduleMap = diff cfgScheduleMap
|
||||
}
|
||||
where
|
||||
diff f = M.differenceWith (\x y -> if x == y then Nothing else Just x)
|
||||
(f newcfg) (f curcfg)
|
||||
|
||||
genCfg :: Cfg -> M.Map UUID String -> String
|
||||
genCfg cfg descs = unlines $ concat
|
||||
[intro, trust, groups, preferredcontent, schedule]
|
||||
genCfg cfg descs = unlines $ intercalate [""]
|
||||
[ intro
|
||||
, trust
|
||||
, groups
|
||||
, preferredcontent
|
||||
, grouppreferredcontent
|
||||
, standardgroups
|
||||
, schedule
|
||||
]
|
||||
where
|
||||
intro =
|
||||
[ com "git-annex configuration"
|
||||
|
@ -95,22 +111,20 @@ genCfg cfg descs = unlines $ concat
|
|||
, com "Changes saved to this file will be recorded in the git-annex branch."
|
||||
, com ""
|
||||
, com "Lines in this file have the format:"
|
||||
, com " setting uuid = value"
|
||||
, com " setting field = value"
|
||||
]
|
||||
|
||||
trust = settings cfgTrustMap
|
||||
[ ""
|
||||
, com "Repository trust configuration"
|
||||
trust = settings cfg descs cfgTrustMap
|
||||
[ com "Repository trust configuration"
|
||||
, com "(Valid trust levels: " ++ trustlevels ++ ")"
|
||||
]
|
||||
(\(t, u) -> line "trust" u $ showTrustLevel t)
|
||||
(\u -> lcom $ line "trust" u $ showTrustLevel SemiTrusted)
|
||||
where
|
||||
trustlevels = unwords $ map showTrustLevel [Trusted .. DeadTrusted]
|
||||
trustlevels = unwords $ map showTrustLevel [Trusted .. DeadTrusted]
|
||||
|
||||
groups = settings cfgGroupMap
|
||||
[ ""
|
||||
, com "Repository groups"
|
||||
groups = settings cfg descs cfgGroupMap
|
||||
[ com "Repository groups"
|
||||
, com $ "(Standard groups: " ++ grouplist ++ ")"
|
||||
, com "(Separate group names with spaces)"
|
||||
]
|
||||
|
@ -119,33 +133,60 @@ genCfg cfg descs = unlines $ concat
|
|||
where
|
||||
grouplist = unwords $ map fromStandardGroup [minBound..]
|
||||
|
||||
preferredcontent = settings cfgPreferredContentMap
|
||||
[ ""
|
||||
, com "Repository preferred contents"
|
||||
]
|
||||
(\(s, u) -> line "content" u s)
|
||||
(\u -> line "content" u "")
|
||||
preferredcontent = settings cfg descs cfgPreferredContentMap
|
||||
[ com "Repository preferred contents" ]
|
||||
(\(s, u) -> line "wanted" u s)
|
||||
(\u -> line "wanted" u "standard")
|
||||
|
||||
schedule = settings cfgScheduleMap
|
||||
[ ""
|
||||
, com "Scheduled activities"
|
||||
grouppreferredcontent = settings' cfg allgroups cfgGroupPreferredContentMap
|
||||
[ com "Group preferred contents"
|
||||
, com "(Used by repositories with \"groupwanted\" in their preferred contents)"
|
||||
]
|
||||
(\(s, g) -> gline g s)
|
||||
(\g -> gline g "standard")
|
||||
where
|
||||
gline g value = [ unwords ["groupwanted", g, "=", value] ]
|
||||
allgroups = S.unions $ stdgroups : M.elems (cfgGroupMap cfg)
|
||||
stdgroups = S.fromList $ map fromStandardGroup [minBound..maxBound]
|
||||
|
||||
standardgroups =
|
||||
[ com "Standard preferred contents"
|
||||
, com "(Used by wanted or groupwanted expressions containing \"standard\")"
|
||||
, com "(For reference only; built-in and cannot be changed!)"
|
||||
]
|
||||
++ map gline [minBound..maxBound]
|
||||
where
|
||||
gline g = com $ unwords
|
||||
[ "standard"
|
||||
, fromStandardGroup g, "=", standardPreferredContent g
|
||||
]
|
||||
|
||||
schedule = settings cfg descs cfgScheduleMap
|
||||
[ com "Scheduled activities"
|
||||
, com "(Separate multiple activities with \"; \")"
|
||||
]
|
||||
(\(l, u) -> line "schedule" u $ fromScheduledActivities l)
|
||||
(\u -> line "schedule" u "")
|
||||
|
||||
settings field desc showvals showdefaults = concat
|
||||
[ desc
|
||||
, concatMap showvals $ sort $ map swap $ M.toList $ field cfg
|
||||
, concatMap (lcom . showdefaults) $ missing field
|
||||
]
|
||||
|
||||
line setting u value =
|
||||
[ com $ "(for " ++ fromMaybe "" (M.lookup u descs) ++ ")"
|
||||
, unwords [setting, fromUUID u, "=", value]
|
||||
]
|
||||
lcom = map (\l -> if "#" `isPrefixOf` l then l else '#' : l)
|
||||
missing field = S.toList $ M.keysSet descs `S.difference` M.keysSet (field cfg)
|
||||
|
||||
settings :: Ord v => Cfg -> M.Map UUID String -> (Cfg -> M.Map UUID v) -> [String] -> ((v, UUID) -> [String]) -> (UUID -> [String]) -> [String]
|
||||
settings cfg descs = settings' cfg (M.keysSet descs)
|
||||
|
||||
settings' :: (Ord v, Ord f) => Cfg -> S.Set f -> (Cfg -> M.Map f v) -> [String] -> ((v, f) -> [String]) -> (f -> [String]) -> [String]
|
||||
settings' cfg s field desc showvals showdefaults = concat
|
||||
[ desc
|
||||
, concatMap showvals $ sort $ map swap $ M.toList $ field cfg
|
||||
, concatMap (lcom . showdefaults) missing
|
||||
]
|
||||
where
|
||||
missing = S.toList $ s `S.difference` M.keysSet (field cfg)
|
||||
|
||||
lcom :: [String] -> [String]
|
||||
lcom = map (\l -> if "#" `isPrefixOf` l then l else '#' : l)
|
||||
|
||||
{- If there's a parse error, returns a new version of the file,
|
||||
- with the problem lines noted. -}
|
||||
|
@ -163,16 +204,16 @@ parseCfg curcfg = go [] curcfg . lines
|
|||
parse l cfg
|
||||
| null l = Right cfg
|
||||
| "#" `isPrefixOf` l = Right cfg
|
||||
| null setting || null u = Left "missing repository uuid"
|
||||
| otherwise = handle cfg (toUUID u) setting value'
|
||||
| null setting || null f = Left "missing field"
|
||||
| otherwise = handle cfg f setting value'
|
||||
where
|
||||
(setting, rest) = separate isSpace l
|
||||
(r, value) = separate (== '=') rest
|
||||
value' = trimspace value
|
||||
u = reverse $ trimspace $ reverse $ trimspace r
|
||||
f = reverse $ trimspace $ reverse $ trimspace r
|
||||
trimspace = dropWhile isSpace
|
||||
|
||||
handle cfg u setting value
|
||||
handle cfg f setting value
|
||||
| setting == "trust" = case readTrustLevel value of
|
||||
Nothing -> badval "trust value" value
|
||||
Just t ->
|
||||
|
@ -181,18 +222,26 @@ parseCfg curcfg = go [] curcfg . lines
|
|||
| setting == "group" =
|
||||
let m = M.insert u (S.fromList $ words value) (cfgGroupMap cfg)
|
||||
in Right $ cfg { cfgGroupMap = m }
|
||||
| setting == "content" =
|
||||
| setting == "wanted" =
|
||||
case checkPreferredContentExpression value of
|
||||
Just e -> Left e
|
||||
Nothing ->
|
||||
let m = M.insert u value (cfgPreferredContentMap cfg)
|
||||
in Right $ cfg { cfgPreferredContentMap = m }
|
||||
| setting == "groupwanted" =
|
||||
case checkPreferredContentExpression value of
|
||||
Just e -> Left e
|
||||
Nothing ->
|
||||
let m = M.insert f value (cfgGroupPreferredContentMap cfg)
|
||||
in Right $ cfg { cfgGroupPreferredContentMap = m }
|
||||
| setting == "schedule" = case parseScheduledActivities value of
|
||||
Left e -> Left e
|
||||
Right l ->
|
||||
let m = M.insert u l (cfgScheduleMap cfg)
|
||||
in Right $ cfg { cfgScheduleMap = m }
|
||||
| otherwise = badval "setting" setting
|
||||
where
|
||||
u = toUUID f
|
||||
|
||||
showerr (Just msg, l) = [parseerr ++ msg, l]
|
||||
showerr (Nothing, l)
|
||||
|
@ -203,11 +252,12 @@ parseCfg curcfg = go [] curcfg . lines
|
|||
|
||||
badval desc val = Left $ "unknown " ++ desc ++ " \"" ++ val ++ "\""
|
||||
badheader =
|
||||
[ com "There was a problem parsing your input."
|
||||
, com "Search for \"Parse error\" to find the bad lines."
|
||||
, com "Either fix the bad lines, or delete them (to discard your changes)."
|
||||
[ com "** There was a problem parsing your input!"
|
||||
, com "** Search for \"Parse error\" to find the bad lines."
|
||||
, com "** Either fix the bad lines, or delete them (to discard your changes)."
|
||||
, ""
|
||||
]
|
||||
parseerr = com "Parse error in next line: "
|
||||
parseerr = com "** Parse error in next line: "
|
||||
|
||||
com :: String -> String
|
||||
com s = "# " ++ s
|
||||
|
|
|
@ -108,6 +108,6 @@ catTree h treeref = go <$> catObjectDetails h treeref
|
|||
dropsha = L.drop 21
|
||||
|
||||
parsemodefile b =
|
||||
let (modestr, file) = separate (== ' ') (encodeW8 $ L.unpack b)
|
||||
let (modestr, file) = separate (== ' ') (decodeBS b)
|
||||
in (file, readmode modestr)
|
||||
readmode = fst . fromMaybe (0, undefined) . headMaybe . readOct
|
||||
|
|
38
Git/Fsck.hs
38
Git/Fsck.hs
|
@ -23,10 +23,17 @@ import Utility.Batch
|
|||
import qualified Git.Version
|
||||
|
||||
import qualified Data.Set as S
|
||||
import System.Process (std_out, std_err)
|
||||
import Control.Concurrent.Async
|
||||
|
||||
type MissingObjects = S.Set Sha
|
||||
|
||||
data FsckResults = FsckFoundMissing MissingObjects | FsckFailed
|
||||
data FsckResults
|
||||
= FsckFoundMissing
|
||||
{ missingObjects :: MissingObjects
|
||||
, missingObjectsTruncated :: Bool
|
||||
}
|
||||
| FsckFailed
|
||||
deriving (Show)
|
||||
|
||||
{- Runs fsck to find some of the broken objects in the repository.
|
||||
|
@ -46,20 +53,32 @@ findBroken batchmode r = do
|
|||
(command', params') <- if batchmode
|
||||
then toBatchCommand (command, params)
|
||||
else return (command, params)
|
||||
(output, fsckok) <- processTranscript command' (toCommand params') Nothing
|
||||
let objs = findShas supportsNoDangling output
|
||||
badobjs <- findMissing objs r
|
||||
|
||||
p@(_, _, _, pid) <- createProcess $
|
||||
(proc command' (toCommand params'))
|
||||
{ std_out = CreatePipe
|
||||
, std_err = CreatePipe
|
||||
}
|
||||
(bad1, bad2) <- concurrently
|
||||
(readMissingObjs maxobjs r supportsNoDangling (stdoutHandle p))
|
||||
(readMissingObjs maxobjs r supportsNoDangling (stderrHandle p))
|
||||
fsckok <- checkSuccessProcess pid
|
||||
let truncated = S.size bad1 == maxobjs || S.size bad1 == maxobjs
|
||||
let badobjs = S.union bad1 bad2
|
||||
|
||||
if S.null badobjs && not fsckok
|
||||
then return FsckFailed
|
||||
else return $ FsckFoundMissing badobjs
|
||||
else return $ FsckFoundMissing badobjs truncated
|
||||
where
|
||||
maxobjs = 10000
|
||||
|
||||
foundBroken :: FsckResults -> Bool
|
||||
foundBroken FsckFailed = True
|
||||
foundBroken (FsckFoundMissing s) = not (S.null s)
|
||||
foundBroken (FsckFoundMissing s _) = not (S.null s)
|
||||
|
||||
knownMissing :: FsckResults -> MissingObjects
|
||||
knownMissing FsckFailed = S.empty
|
||||
knownMissing (FsckFoundMissing s) = s
|
||||
knownMissing (FsckFoundMissing s _) = s
|
||||
|
||||
{- Finds objects that are missing from the git repsitory, or are corrupt.
|
||||
-
|
||||
|
@ -69,6 +88,11 @@ knownMissing (FsckFoundMissing s) = s
|
|||
findMissing :: [Sha] -> Repo -> IO MissingObjects
|
||||
findMissing objs r = S.fromList <$> filterM (`isMissing` r) objs
|
||||
|
||||
readMissingObjs :: Int -> Repo -> Bool -> Handle -> IO MissingObjects
|
||||
readMissingObjs maxobjs r supportsNoDangling h = do
|
||||
objs <- take maxobjs . findShas supportsNoDangling <$> hGetContents h
|
||||
findMissing objs r
|
||||
|
||||
isMissing :: Sha -> Repo -> IO Bool
|
||||
isMissing s r = either (const True) (const False) <$> tryIO dump
|
||||
where
|
||||
|
|
142
Git/Repair.hs
142
Git/Repair.hs
|
@ -1,7 +1,6 @@
|
|||
{- git repository recovery
|
||||
import qualified Data.Set as S
|
||||
-
|
||||
- Copyright 2013 Joey Hess <joey@kitenet.net>
|
||||
- Copyright 2013-2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
@ -45,35 +44,18 @@ import qualified Data.ByteString.Lazy as L
|
|||
import Data.Tuple.Utils
|
||||
|
||||
{- Given a set of bad objects found by git fsck, which may not
|
||||
- be complete, finds and removes all corrupt objects,
|
||||
- and returns missing objects.
|
||||
-}
|
||||
cleanCorruptObjects :: FsckResults -> Repo -> IO FsckResults
|
||||
- be complete, finds and removes all corrupt objects. -}
|
||||
cleanCorruptObjects :: FsckResults -> Repo -> IO ()
|
||||
cleanCorruptObjects fsckresults r = do
|
||||
void $ explodePacks r
|
||||
objs <- listLooseObjectShas r
|
||||
mapM_ (tryIO . allowRead . looseObjectFile r) objs
|
||||
bad <- findMissing objs r
|
||||
void $ removeLoose r $ S.union bad (knownMissing fsckresults)
|
||||
-- Rather than returning the loose objects that were removed, re-run
|
||||
-- fsck. Other missing objects may have been in the packs,
|
||||
-- and this way fsck will find them.
|
||||
findBroken False r
|
||||
|
||||
removeLoose :: Repo -> MissingObjects -> IO Bool
|
||||
removeLoose r s = do
|
||||
fs <- filterM doesFileExist (map (looseObjectFile r) (S.toList s))
|
||||
let count = length fs
|
||||
if count > 0
|
||||
then do
|
||||
putStrLn $ unwords
|
||||
[ "Removing"
|
||||
, show count
|
||||
, "corrupt loose objects."
|
||||
]
|
||||
mapM_ nukeFile fs
|
||||
return True
|
||||
else return False
|
||||
mapM_ removeLoose (S.toList $ knownMissing fsckresults)
|
||||
mapM_ removeBad =<< listLooseObjectShas r
|
||||
where
|
||||
removeLoose s = nukeFile (looseObjectFile r s)
|
||||
removeBad s = do
|
||||
void $ tryIO $ allowRead $ looseObjectFile r s
|
||||
whenM (isMissing s r) $
|
||||
removeLoose s
|
||||
|
||||
{- Explodes all pack files, and deletes them.
|
||||
-
|
||||
|
@ -132,7 +114,9 @@ retrieveMissingObjects missing referencerepo r
|
|||
void $ copyObjects tmpr r
|
||||
case stillmissing of
|
||||
FsckFailed -> return $ FsckFailed
|
||||
FsckFoundMissing s -> FsckFoundMissing <$> findMissing (S.toList s) r
|
||||
FsckFoundMissing s t -> FsckFoundMissing
|
||||
<$> findMissing (S.toList s) r
|
||||
<*> pure t
|
||||
, return stillmissing
|
||||
)
|
||||
pullremotes tmpr (rmt:rmts) fetchrefs ms
|
||||
|
@ -145,9 +129,9 @@ retrieveMissingObjects missing referencerepo r
|
|||
void $ copyObjects tmpr r
|
||||
case ms of
|
||||
FsckFailed -> pullremotes tmpr rmts fetchrefs ms
|
||||
FsckFoundMissing s -> do
|
||||
FsckFoundMissing s t -> do
|
||||
stillmissing <- findMissing (S.toList s) r
|
||||
pullremotes tmpr rmts fetchrefs (FsckFoundMissing stillmissing)
|
||||
pullremotes tmpr rmts fetchrefs (FsckFoundMissing stillmissing t)
|
||||
, pullremotes tmpr rmts fetchrefs ms
|
||||
)
|
||||
fetchfrom fetchurl ps = runBool $
|
||||
|
@ -295,7 +279,7 @@ findUncorruptedCommit missing goodcommits branch r = do
|
|||
then return (Just c, gcs')
|
||||
else findfirst gcs' cs
|
||||
|
||||
{- Verifies tha none of the missing objects in the set are used by
|
||||
{- Verifies that none of the missing objects in the set are used by
|
||||
- the commit. Also adds to a set of commit shas that have been verified to
|
||||
- be good, which can be passed into subsequent calls to avoid
|
||||
- redundant work when eg, chasing down branches to find the first
|
||||
|
@ -465,10 +449,11 @@ runRepairOf fsckresult removablebranch forced referencerepo g = do
|
|||
|
||||
runRepair' :: (Ref -> Bool) -> FsckResults -> Bool -> Maybe FilePath -> Repo -> IO (Bool, [Branch])
|
||||
runRepair' removablebranch fsckresult forced referencerepo g = do
|
||||
missing <- cleanCorruptObjects fsckresult g
|
||||
cleanCorruptObjects fsckresult g
|
||||
missing <- findBroken False g
|
||||
stillmissing <- retrieveMissingObjects missing referencerepo g
|
||||
case stillmissing of
|
||||
FsckFoundMissing s
|
||||
FsckFoundMissing s t
|
||||
| S.null s -> if repoIsLocalBare g
|
||||
then successfulfinish []
|
||||
else ifM (checkIndex g)
|
||||
|
@ -481,7 +466,7 @@ runRepair' removablebranch fsckresult forced referencerepo g = do
|
|||
)
|
||||
| otherwise -> if forced
|
||||
then ifM (checkIndex g)
|
||||
( continuerepairs s
|
||||
( forcerepair s t
|
||||
, corruptedindex
|
||||
)
|
||||
else do
|
||||
|
@ -493,17 +478,17 @@ runRepair' removablebranch fsckresult forced referencerepo g = do
|
|||
FsckFailed
|
||||
| forced -> ifM (pure (repoIsLocalBare g) <||> checkIndex g)
|
||||
( do
|
||||
missing' <- cleanCorruptObjects FsckFailed g
|
||||
case missing' of
|
||||
cleanCorruptObjects FsckFailed g
|
||||
stillmissing' <- findBroken False g
|
||||
case stillmissing' of
|
||||
FsckFailed -> return (False, [])
|
||||
FsckFoundMissing stillmissing' ->
|
||||
continuerepairs stillmissing'
|
||||
FsckFoundMissing s t -> forcerepair s t
|
||||
, corruptedindex
|
||||
)
|
||||
| otherwise -> unsuccessfulfinish
|
||||
where
|
||||
continuerepairs stillmissing = do
|
||||
(removedbranches, goodcommits) <- removeBadBranches removablebranch stillmissing emptyGoodCommits g
|
||||
repairbranches missing = do
|
||||
(removedbranches, goodcommits) <- removeBadBranches removablebranch missing emptyGoodCommits g
|
||||
let remotebranches = filter isTrackingBranch removedbranches
|
||||
unless (null remotebranches) $
|
||||
putStrLn $ unwords
|
||||
|
@ -511,32 +496,43 @@ runRepair' removablebranch fsckresult forced referencerepo g = do
|
|||
, show (length remotebranches)
|
||||
, "remote tracking branches that referred to missing objects."
|
||||
]
|
||||
(resetbranches, deletedbranches, _) <- resetLocalBranches stillmissing goodcommits g
|
||||
(resetbranches, deletedbranches, _) <- resetLocalBranches missing goodcommits g
|
||||
displayList (map fromRef resetbranches)
|
||||
"Reset these local branches to old versions before the missing objects were committed:"
|
||||
displayList (map fromRef deletedbranches)
|
||||
"Deleted these local branches, which could not be recovered due to missing objects:"
|
||||
return (resetbranches ++ deletedbranches)
|
||||
|
||||
forcerepair missing fscktruncated = do
|
||||
modifiedbranches <- repairbranches missing
|
||||
deindexedfiles <- rewriteIndex g
|
||||
displayList deindexedfiles
|
||||
"Removed these missing files from the index. You should look at what files are present in your working tree and git add them back to the index when appropriate."
|
||||
let modifiedbranches = resetbranches ++ deletedbranches
|
||||
if null resetbranches && null deletedbranches
|
||||
then successfulfinish modifiedbranches
|
||||
else do
|
||||
unless (repoIsLocalBare g) $ do
|
||||
mcurr <- Branch.currentUnsafe g
|
||||
case mcurr of
|
||||
Nothing -> return ()
|
||||
Just curr -> when (any (== curr) modifiedbranches) $ do
|
||||
|
||||
-- When the fsck results were truncated, try
|
||||
-- fscking again, and as long as different
|
||||
-- missing objects are found, continue
|
||||
-- the repair process.
|
||||
if fscktruncated
|
||||
then do
|
||||
fsckresult' <- findBroken False g
|
||||
case fsckresult' of
|
||||
FsckFailed -> do
|
||||
putStrLn "git fsck is failing"
|
||||
return (False, modifiedbranches)
|
||||
FsckFoundMissing s _
|
||||
| S.null s -> successfulfinish modifiedbranches
|
||||
| S.null (s `S.difference` missing) -> do
|
||||
putStrLn $ unwords
|
||||
[ "You currently have"
|
||||
, fromRef curr
|
||||
, "checked out. You may have staged changes in the index that can be committed to recover the lost state of this branch!"
|
||||
[ show (S.size s)
|
||||
, "missing objects could not be recovered!"
|
||||
]
|
||||
putStrLn "Successfully recovered repository!"
|
||||
putStrLn "Please carefully check that the changes mentioned above are ok.."
|
||||
return (True, modifiedbranches)
|
||||
|
||||
return (False, modifiedbranches)
|
||||
| otherwise -> do
|
||||
(ok, modifiedbranches') <- runRepairOf fsckresult' removablebranch forced referencerepo g
|
||||
return (ok, modifiedbranches++modifiedbranches')
|
||||
else successfulfinish modifiedbranches
|
||||
|
||||
corruptedindex = do
|
||||
nukeFile (indexFile g)
|
||||
-- The corrupted index can prevent fsck from finding other
|
||||
|
@ -546,12 +542,28 @@ runRepair' removablebranch fsckresult forced referencerepo g = do
|
|||
putStrLn "Removed the corrupted index file. You should look at what files are present in your working tree and git add them back to the index when appropriate."
|
||||
return result
|
||||
|
||||
successfulfinish modifiedbranches = do
|
||||
mapM_ putStrLn
|
||||
[ "Successfully recovered repository!"
|
||||
, "You should run \"git fsck\" to make sure, but it looks like everything was recovered ok."
|
||||
]
|
||||
return (True, modifiedbranches)
|
||||
successfulfinish modifiedbranches
|
||||
| null modifiedbranches = do
|
||||
mapM_ putStrLn
|
||||
[ "Successfully recovered repository!"
|
||||
, "You should run \"git fsck\" to make sure, but it looks like everything was recovered ok."
|
||||
]
|
||||
return (True, modifiedbranches)
|
||||
| otherwise = do
|
||||
unless (repoIsLocalBare g) $ do
|
||||
mcurr <- Branch.currentUnsafe g
|
||||
case mcurr of
|
||||
Nothing -> return ()
|
||||
Just curr -> when (any (== curr) modifiedbranches) $ do
|
||||
putStrLn $ unwords
|
||||
[ "You currently have"
|
||||
, fromRef curr
|
||||
, "checked out. You may have staged changes in the index that can be committed to recover the lost state of this branch!"
|
||||
]
|
||||
putStrLn "Successfully recovered repository!"
|
||||
putStrLn "Please carefully check that the changes mentioned above are ok.."
|
||||
return (True, modifiedbranches)
|
||||
|
||||
unsuccessfulfinish = do
|
||||
if repoIsLocalBare g
|
||||
then do
|
||||
|
|
23
Limit.hs
23
Limit.hs
|
@ -94,18 +94,16 @@ matchGlobFile glob = go
|
|||
{- Adds a limit to skip files not believed to be present
|
||||
- in a specfied repository. Optionally on a prior date. -}
|
||||
addIn :: String -> Annex ()
|
||||
addIn = addLimit . limitIn
|
||||
|
||||
limitIn :: MkLimit
|
||||
limitIn s = Right $ \notpresent -> checkKey $ \key ->
|
||||
if name == "."
|
||||
then if null date
|
||||
then inhere notpresent key
|
||||
else inuuid notpresent key =<< getUUID
|
||||
else inuuid notpresent key =<< Remote.nameToUUID name
|
||||
addIn s = addLimit =<< mk
|
||||
where
|
||||
(name, date) = separate (== '@') s
|
||||
inuuid notpresent key u
|
||||
mk
|
||||
| name == "." = if null date
|
||||
then use inhere
|
||||
else use . inuuid =<< getUUID
|
||||
| otherwise = use . inuuid =<< Remote.nameToUUID name
|
||||
use a = return $ Right $ \notpresent -> checkKey (a notpresent)
|
||||
inuuid u notpresent key
|
||||
| null date = do
|
||||
us <- Remote.keyLocations key
|
||||
return $ u `elem` us && u `S.notMember` notpresent
|
||||
|
@ -122,7 +120,10 @@ limitIn s = Right $ \notpresent -> checkKey $ \key ->
|
|||
|
||||
{- Limit to content that is currently present on a uuid. -}
|
||||
limitPresent :: Maybe UUID -> MkLimit
|
||||
limitPresent u _ = Right $ const $ checkKey $ \key -> do
|
||||
limitPresent u _ = Right $ matchPresent u
|
||||
|
||||
matchPresent :: Maybe UUID -> MatchFiles
|
||||
matchPresent u _ = checkKey $ \key -> do
|
||||
hereu <- getUUID
|
||||
if u == Just hereu || isNothing u
|
||||
then inAnnex key
|
||||
|
|
12
Logs.hs
12
Logs.hs
|
@ -24,7 +24,7 @@ getLogVariety :: FilePath -> Maybe LogVariety
|
|||
getLogVariety f
|
||||
| f `elem` topLevelUUIDBasedLogs = Just UUIDBasedLog
|
||||
| isRemoteStateLog f = Just NewUUIDBasedLog
|
||||
| isMetaDataLog f || f == numcopiesLog = Just OtherLog
|
||||
| isMetaDataLog f || f `elem` otherLogs = Just OtherLog
|
||||
| otherwise = PresenceLog <$> firstJust (presenceLogs f)
|
||||
|
||||
{- All the uuid-based logs stored in the top of the git-annex branch. -}
|
||||
|
@ -45,6 +45,13 @@ presenceLogs f =
|
|||
, locationLogFileKey f
|
||||
]
|
||||
|
||||
{- Logs that are neither UUID based nor presence logs. -}
|
||||
otherLogs :: [FilePath]
|
||||
otherLogs =
|
||||
[ numcopiesLog
|
||||
, groupPreferredContentLog
|
||||
]
|
||||
|
||||
uuidLog :: FilePath
|
||||
uuidLog = "uuid.log"
|
||||
|
||||
|
@ -63,6 +70,9 @@ groupLog = "group.log"
|
|||
preferredContentLog :: FilePath
|
||||
preferredContentLog = "preferred-content.log"
|
||||
|
||||
groupPreferredContentLog :: FilePath
|
||||
groupPreferredContentLog = "group-preferred-content.log"
|
||||
|
||||
scheduleLog :: FilePath
|
||||
scheduleLog = "schedule.log"
|
||||
|
||||
|
|
|
@ -23,25 +23,31 @@ writeFsckResults u fsckresults = do
|
|||
logfile <- fromRepo $ gitAnnexFsckResultsLog u
|
||||
liftIO $
|
||||
case fsckresults of
|
||||
FsckFailed -> store S.empty logfile
|
||||
FsckFoundMissing s
|
||||
FsckFailed -> store S.empty False logfile
|
||||
FsckFoundMissing s t
|
||||
| S.null s -> nukeFile logfile
|
||||
| otherwise -> store s logfile
|
||||
| otherwise -> store s t logfile
|
||||
where
|
||||
store s logfile = do
|
||||
store s t logfile = do
|
||||
createDirectoryIfMissing True (parentDir logfile)
|
||||
liftIO $ viaTmp writeFile logfile $ serialize s
|
||||
serialize = unlines . map fromRef . S.toList
|
||||
liftIO $ viaTmp writeFile logfile $ serialize s t
|
||||
serialize s t =
|
||||
let ls = map fromRef (S.toList s)
|
||||
in if t
|
||||
then unlines ("truncated":ls)
|
||||
else unlines ls
|
||||
|
||||
readFsckResults :: UUID -> Annex FsckResults
|
||||
readFsckResults u = do
|
||||
logfile <- fromRepo $ gitAnnexFsckResultsLog u
|
||||
liftIO $ catchDefaultIO (FsckFoundMissing S.empty) $
|
||||
deserialize <$> readFile logfile
|
||||
liftIO $ catchDefaultIO (FsckFoundMissing S.empty False) $
|
||||
deserialize . lines <$> readFile logfile
|
||||
where
|
||||
deserialize l =
|
||||
let s = S.fromList $ map Ref $ lines l
|
||||
in if S.null s then FsckFailed else FsckFoundMissing s
|
||||
deserialize ("truncated":ls) = deserialize' ls True
|
||||
deserialize ls = deserialize' ls False
|
||||
deserialize' ls t =
|
||||
let s = S.fromList $ map Ref ls
|
||||
in if S.null s then FsckFailed else FsckFoundMissing s t
|
||||
|
||||
clearFsckResults :: UUID -> Annex ()
|
||||
clearFsckResults = liftIO . nukeFile <=< fromRepo . gitAnnexFsckResultsLog
|
||||
|
|
81
Logs/MapLog.hs
Normal file
81
Logs/MapLog.hs
Normal file
|
@ -0,0 +1,81 @@
|
|||
{- git-annex Map log
|
||||
-
|
||||
- This is used to store a Map, in a way that can be union merged.
|
||||
-
|
||||
- A line of the log will look like: "timestamp field value"
|
||||
-
|
||||
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
module Logs.MapLog where
|
||||
|
||||
import qualified Data.Map as M
|
||||
import Data.Time.Clock.POSIX
|
||||
import Data.Time
|
||||
import System.Locale
|
||||
|
||||
import Common
|
||||
|
||||
data TimeStamp = Unknown | Date POSIXTime
|
||||
deriving (Eq, Ord, Show)
|
||||
|
||||
data LogEntry v = LogEntry
|
||||
{ changed :: TimeStamp
|
||||
, value :: v
|
||||
} deriving (Eq, Show)
|
||||
|
||||
type MapLog f v = M.Map f (LogEntry v)
|
||||
|
||||
showMapLog :: (f -> String) -> (v -> String) -> MapLog f v -> String
|
||||
showMapLog fieldshower valueshower = unlines . map showpair . M.toList
|
||||
where
|
||||
showpair (f, LogEntry (Date p) v) =
|
||||
unwords [show p, fieldshower f, valueshower v]
|
||||
showpair (f, LogEntry Unknown v) =
|
||||
unwords ["0", fieldshower f, valueshower v]
|
||||
|
||||
parseMapLog :: Ord f => (String -> Maybe f) -> (String -> Maybe v) -> String -> MapLog f v
|
||||
parseMapLog fieldparser valueparser = M.fromListWith best . mapMaybe parse . lines
|
||||
where
|
||||
parse line = do
|
||||
let (ts, rest) = splitword line
|
||||
(sf, sv) = splitword rest
|
||||
date <- Date . utcTimeToPOSIXSeconds <$> parseTime defaultTimeLocale "%s%Qs" ts
|
||||
f <- fieldparser sf
|
||||
v <- valueparser sv
|
||||
Just (f, LogEntry date v)
|
||||
splitword = separate (== ' ')
|
||||
|
||||
changeMapLog :: Ord f => POSIXTime -> f -> v -> MapLog f v -> MapLog f v
|
||||
changeMapLog t f v = M.insert f $ LogEntry (Date t) v
|
||||
|
||||
{- Only add an LogEntry if it's newer (or at least as new as) than any
|
||||
- existing LogEntry for a field. -}
|
||||
addMapLog :: Ord f => f -> LogEntry v -> MapLog f v -> MapLog f v
|
||||
addMapLog = M.insertWith' best
|
||||
|
||||
{- Converts a MapLog into a simple Map without the timestamp information.
|
||||
- This is a one-way trip, but useful for code that never needs to change
|
||||
- the log. -}
|
||||
simpleMap :: MapLog f v -> M.Map f v
|
||||
simpleMap = M.map value
|
||||
|
||||
best :: LogEntry v -> LogEntry v -> LogEntry v
|
||||
best new old
|
||||
| changed old > changed new = old
|
||||
| otherwise = new
|
||||
|
||||
-- Unknown is oldest.
|
||||
prop_TimeStamp_sane :: Bool
|
||||
prop_TimeStamp_sane = Unknown < Date 1
|
||||
|
||||
prop_addMapLog_sane :: Bool
|
||||
prop_addMapLog_sane = newWins && newestWins
|
||||
where
|
||||
newWins = addMapLog ("foo") (LogEntry (Date 1) "new") l == l2
|
||||
newestWins = addMapLog ("foo") (LogEntry (Date 1) "newest") l2 /= l2
|
||||
|
||||
l = M.fromList [("foo", LogEntry (Date 0) "old")]
|
||||
l2 = M.fromList [("foo", LogEntry (Date 1) "new")]
|
|
@ -36,26 +36,54 @@ module Logs.MetaData (
|
|||
|
||||
import Common.Annex
|
||||
import Types.MetaData
|
||||
import Annex.MetaData.StandardFields
|
||||
import qualified Annex.Branch
|
||||
import Logs
|
||||
import Logs.SingleValue
|
||||
|
||||
import qualified Data.Set as S
|
||||
import qualified Data.Map as M
|
||||
import Data.Time.Clock.POSIX
|
||||
import Data.Time.Format
|
||||
import System.Locale
|
||||
|
||||
instance SingleValueSerializable MetaData where
|
||||
serialize = Types.MetaData.serialize
|
||||
deserialize = Types.MetaData.deserialize
|
||||
|
||||
getMetaData :: Key -> Annex (Log MetaData)
|
||||
getMetaData = readLog . metaDataLogFile
|
||||
getMetaDataLog :: Key -> Annex (Log MetaData)
|
||||
getMetaDataLog = readLog . metaDataLogFile
|
||||
|
||||
{- Go through the log from oldest to newest, and combine it all
|
||||
- into a single MetaData representing the current state. -}
|
||||
- into a single MetaData representing the current state.
|
||||
-
|
||||
- Automatically generates a lastchanged metadata for each field that's
|
||||
- currently set, based on timestamps in the log.
|
||||
-}
|
||||
getCurrentMetaData :: Key -> Annex MetaData
|
||||
getCurrentMetaData = currentMetaData . collect <$$> getMetaData
|
||||
getCurrentMetaData k = do
|
||||
ls <- S.toAscList <$> getMetaDataLog k
|
||||
let loggedmeta = currentMetaData $ combineMetaData $ map value ls
|
||||
return $ currentMetaData $ unionMetaData loggedmeta
|
||||
(lastchanged ls loggedmeta)
|
||||
where
|
||||
collect = foldl' unionMetaData emptyMetaData . map value . S.toAscList
|
||||
lastchanged [] _ = emptyMetaData
|
||||
lastchanged ls (MetaData currentlyset) =
|
||||
let m = foldl' (flip M.union) M.empty (map genlastchanged ls)
|
||||
in MetaData $
|
||||
-- Add a overall lastchanged using the oldest log
|
||||
-- item (log is in ascending order).
|
||||
M.insert lastChangedField (lastchangedval $ Prelude.last ls) $
|
||||
M.mapKeys mkLastChangedField $
|
||||
-- Only include fields that are currently set.
|
||||
m `M.intersection` currentlyset
|
||||
-- Makes each field have the timestamp as its value.
|
||||
genlastchanged l =
|
||||
let MetaData m = value l
|
||||
ts = lastchangedval l
|
||||
in M.map (const ts) m
|
||||
lastchangedval l = S.singleton $ toMetaValue $ showts $ changed l
|
||||
showts = formatTime defaultTimeLocale "%F@%H-%M-%S" . posixSecondsToUTCTime
|
||||
|
||||
{- Adds in some metadata, which can override existing values, or unset
|
||||
- them, but otherwise leaves any existing metadata as-is. -}
|
||||
|
@ -67,10 +95,12 @@ addMetaData k metadata = addMetaData' k metadata =<< liftIO getPOSIXTime
|
|||
- will tend to be generated across the different log files, and so
|
||||
- git will be able to pack the data more efficiently. -}
|
||||
addMetaData' :: Key -> MetaData -> POSIXTime -> Annex ()
|
||||
addMetaData' k metadata now = Annex.Branch.change (metaDataLogFile k) $
|
||||
addMetaData' k (MetaData m) now = Annex.Branch.change (metaDataLogFile k) $
|
||||
showLog . simplifyLog
|
||||
. S.insert (LogEntry now metadata)
|
||||
. S.insert (LogEntry now metadata)
|
||||
. parseLog
|
||||
where
|
||||
metadata = MetaData $ M.filterWithKey (\f _ -> not (isLastChangedField f)) m
|
||||
|
||||
{- Simplify a log, removing historical values that are no longer
|
||||
- needed.
|
||||
|
@ -148,7 +178,7 @@ copyMetaData :: Key -> Key -> Annex ()
|
|||
copyMetaData oldkey newkey
|
||||
| oldkey == newkey = noop
|
||||
| otherwise = do
|
||||
l <- getMetaData oldkey
|
||||
l <- getMetaDataLog oldkey
|
||||
unless (S.null l) $
|
||||
Annex.Branch.change (metaDataLogFile newkey) $
|
||||
const $ showLog l
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{- git-annex preferred content matcher configuration
|
||||
-
|
||||
- Copyright 2012 Joey Hess <joey@kitenet.net>
|
||||
- Copyright 2012-2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
@ -8,10 +8,12 @@
|
|||
module Logs.PreferredContent (
|
||||
preferredContentLog,
|
||||
preferredContentSet,
|
||||
groupPreferredContentSet,
|
||||
isPreferredContent,
|
||||
preferredContentMap,
|
||||
preferredContentMapLoad,
|
||||
preferredContentMapRaw,
|
||||
groupPreferredContentMapRaw,
|
||||
checkPreferredContentExpression,
|
||||
setStandardGroup,
|
||||
) where
|
||||
|
@ -35,6 +37,7 @@ import Types.Remote (RemoteConfig)
|
|||
import Logs.Group
|
||||
import Logs.Remote
|
||||
import Types.StandardGroups
|
||||
import Limit
|
||||
|
||||
{- Checks if a file is preferred content for the specified repository
|
||||
- (or the current repository if none is specified). -}
|
||||
|
@ -56,40 +59,61 @@ preferredContentMapLoad :: Annex Annex.PreferredContentMap
|
|||
preferredContentMapLoad = do
|
||||
groupmap <- groupMap
|
||||
configmap <- readRemoteLog
|
||||
groupwantedmap <- groupPreferredContentMapRaw
|
||||
m <- simpleMap
|
||||
. parseLogWithUUID ((Just .) . makeMatcher groupmap configmap)
|
||||
. parseLogWithUUID ((Just .) . makeMatcher groupmap configmap groupwantedmap)
|
||||
<$> Annex.Branch.get preferredContentLog
|
||||
Annex.changeState $ \s -> s { Annex.preferredcontentmap = Just m }
|
||||
return m
|
||||
|
||||
{- This intentionally never fails, even on unparsable expressions,
|
||||
- because the configuration is shared among repositories and newer
|
||||
- versions of git-annex may add new features. Instead, parse errors
|
||||
- result in a Matcher that will always succeed. -}
|
||||
makeMatcher :: GroupMap -> M.Map UUID RemoteConfig -> UUID -> PreferredContentExpression -> FileMatcher
|
||||
makeMatcher groupmap configmap u expr
|
||||
| expr == "standard" = standardMatcher groupmap configmap u
|
||||
| null (lefts tokens) = Utility.Matcher.generate $ rights tokens
|
||||
| otherwise = matchAll
|
||||
- versions of git-annex may add new features. -}
|
||||
makeMatcher
|
||||
:: GroupMap
|
||||
-> M.Map UUID RemoteConfig
|
||||
-> M.Map Group PreferredContentExpression
|
||||
-> UUID
|
||||
-> PreferredContentExpression
|
||||
-> FileMatcher
|
||||
makeMatcher groupmap configmap groupwantedmap u = go True True
|
||||
where
|
||||
tokens = exprParser groupmap configmap (Just u) expr
|
||||
go expandstandard expandgroupwanted expr
|
||||
| null (lefts tokens) = Utility.Matcher.generate $ rights tokens
|
||||
| otherwise = unknownMatcher u
|
||||
where
|
||||
tokens = exprParser matchstandard matchgroupwanted groupmap configmap (Just u) expr
|
||||
matchstandard
|
||||
| expandstandard = maybe (unknownMatcher u) (go False False)
|
||||
(standardPreferredContent <$> getStandardGroup mygroups)
|
||||
| otherwise = unknownMatcher u
|
||||
matchgroupwanted
|
||||
| expandgroupwanted = maybe (unknownMatcher u) (go True False)
|
||||
(groupwanted mygroups)
|
||||
| otherwise = unknownMatcher u
|
||||
mygroups = fromMaybe S.empty (u `M.lookup` groupsByUUID groupmap)
|
||||
groupwanted s = case M.elems $ M.filterWithKey (\k _ -> S.member k s) groupwantedmap of
|
||||
[pc] -> Just pc
|
||||
_ -> Nothing
|
||||
|
||||
{- Standard matchers are pre-defined for some groups. If none is defined,
|
||||
- or a repository is in multiple groups with standard matchers, match all. -}
|
||||
standardMatcher :: GroupMap -> M.Map UUID RemoteConfig -> UUID -> FileMatcher
|
||||
standardMatcher groupmap configmap u =
|
||||
maybe matchAll (makeMatcher groupmap configmap u . preferredContent) $
|
||||
getStandardGroup =<< u `M.lookup` groupsByUUID groupmap
|
||||
{- When a preferred content expression cannot be parsed, but is already
|
||||
- in the log (eg, put there by a newer version of git-annex),
|
||||
- the fallback behavior is to match only files that are currently present.
|
||||
-
|
||||
- This avoid unwanted/expensive changes to the content, until the problem
|
||||
- is resolved. -}
|
||||
unknownMatcher :: UUID -> FileMatcher
|
||||
unknownMatcher u = Utility.Matcher.generate [present]
|
||||
where
|
||||
present = Utility.Matcher.Operation $ matchPresent (Just u)
|
||||
|
||||
{- Checks if an expression can be parsed, if not returns Just error -}
|
||||
checkPreferredContentExpression :: PreferredContentExpression -> Maybe String
|
||||
checkPreferredContentExpression expr
|
||||
| expr == "standard" = Nothing
|
||||
| otherwise = case parsedToMatcher tokens of
|
||||
Left e -> Just e
|
||||
Right _ -> Nothing
|
||||
checkPreferredContentExpression expr = case parsedToMatcher tokens of
|
||||
Left e -> Just e
|
||||
Right _ -> Nothing
|
||||
where
|
||||
tokens = exprParser emptyGroupMap M.empty Nothing expr
|
||||
tokens = exprParser matchAll matchAll emptyGroupMap M.empty Nothing expr
|
||||
|
||||
{- Puts a UUID in a standard group, and sets its preferred content to use
|
||||
- the standard expression for that group, unless something is already set. -}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{- unparsed preferred content expressions
|
||||
-
|
||||
- Copyright 2012 Joey Hess <joey@kitenet.net>
|
||||
- Copyright 2012-2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
@ -15,17 +15,35 @@ import qualified Annex.Branch
|
|||
import qualified Annex
|
||||
import Logs
|
||||
import Logs.UUIDBased
|
||||
import Logs.MapLog
|
||||
import Types.StandardGroups
|
||||
import Types.Group
|
||||
|
||||
{- Changes the preferred content configuration of a remote. -}
|
||||
preferredContentSet :: UUID -> PreferredContentExpression -> Annex ()
|
||||
preferredContentSet uuid@(UUID _) val = do
|
||||
ts <- liftIO getPOSIXTime
|
||||
Annex.Branch.change preferredContentLog $
|
||||
showLog id . changeLog ts uuid val . parseLog Just
|
||||
showLog id
|
||||
. changeLog ts uuid val
|
||||
. parseLog Just
|
||||
Annex.changeState $ \s -> s { Annex.preferredcontentmap = Nothing }
|
||||
preferredContentSet NoUUID _ = error "unknown UUID; cannot modify"
|
||||
|
||||
{- Changes the preferred content configuration of a group. -}
|
||||
groupPreferredContentSet :: Group -> PreferredContentExpression -> Annex ()
|
||||
groupPreferredContentSet g val = do
|
||||
ts <- liftIO getPOSIXTime
|
||||
Annex.Branch.change groupPreferredContentLog $
|
||||
showMapLog id id
|
||||
. changeMapLog ts g val
|
||||
. parseMapLog Just Just
|
||||
Annex.changeState $ \s -> s { Annex.preferredcontentmap = Nothing }
|
||||
|
||||
preferredContentMapRaw :: Annex (M.Map UUID PreferredContentExpression)
|
||||
preferredContentMapRaw = simpleMap . parseLog Just
|
||||
<$> Annex.Branch.get preferredContentLog
|
||||
|
||||
groupPreferredContentMapRaw :: Annex (M.Map Group PreferredContentExpression)
|
||||
groupPreferredContentMapRaw = simpleMap . parseMapLog Just Just
|
||||
<$> Annex.Branch.get groupPreferredContentLog
|
||||
|
|
|
@ -26,9 +26,6 @@ module Logs.UUIDBased (
|
|||
changeLog,
|
||||
addLog,
|
||||
simpleMap,
|
||||
|
||||
prop_TimeStamp_sane,
|
||||
prop_addLog_sane,
|
||||
) where
|
||||
|
||||
import qualified Data.Map as M
|
||||
|
@ -38,21 +35,11 @@ import System.Locale
|
|||
|
||||
import Common
|
||||
import Types.UUID
|
||||
import Logs.MapLog
|
||||
|
||||
data TimeStamp = Unknown | Date POSIXTime
|
||||
deriving (Eq, Ord, Show)
|
||||
type Log v = MapLog UUID v
|
||||
|
||||
data LogEntry a = LogEntry
|
||||
{ changed :: TimeStamp
|
||||
, value :: a
|
||||
} deriving (Eq, Show)
|
||||
|
||||
type Log a = M.Map UUID (LogEntry a)
|
||||
|
||||
tskey :: String
|
||||
tskey = "timestamp="
|
||||
|
||||
showLog :: (a -> String) -> Log a -> String
|
||||
showLog :: (v -> String) -> Log v -> String
|
||||
showLog shower = unlines . map showpair . M.toList
|
||||
where
|
||||
showpair (k, LogEntry (Date p) v) =
|
||||
|
@ -60,14 +47,6 @@ showLog shower = unlines . map showpair . M.toList
|
|||
showpair (k, LogEntry Unknown v) =
|
||||
unwords [fromUUID k, shower v]
|
||||
|
||||
showLogNew :: (a -> String) -> Log a -> String
|
||||
showLogNew shower = unlines . map showpair . M.toList
|
||||
where
|
||||
showpair (k, LogEntry (Date p) v) =
|
||||
unwords [show p, fromUUID k, shower v]
|
||||
showpair (k, LogEntry Unknown v) =
|
||||
unwords ["0", fromUUID k, shower v]
|
||||
|
||||
parseLog :: (String -> Maybe a) -> String -> Log a
|
||||
parseLog = parseLogWithUUID . const
|
||||
|
||||
|
@ -98,45 +77,17 @@ parseLogWithUUID parser = M.fromListWith best . mapMaybe parse . lines
|
|||
Nothing -> Unknown
|
||||
Just d -> Date $ utcTimeToPOSIXSeconds d
|
||||
|
||||
parseLogNew :: (String -> Maybe a) -> String -> Log a
|
||||
parseLogNew parser = M.fromListWith best . mapMaybe parse . lines
|
||||
where
|
||||
parse line = do
|
||||
let (ts, rest) = splitword line
|
||||
(u, v) = splitword rest
|
||||
date <- Date . utcTimeToPOSIXSeconds <$> parseTime defaultTimeLocale "%s%Qs" ts
|
||||
val <- parser v
|
||||
Just (toUUID u, LogEntry date val)
|
||||
splitword = separate (== ' ')
|
||||
showLogNew :: (v -> String) -> Log v -> String
|
||||
showLogNew = showMapLog fromUUID
|
||||
|
||||
changeLog :: POSIXTime -> UUID -> a -> Log a -> Log a
|
||||
changeLog t u v = M.insert u $ LogEntry (Date t) v
|
||||
parseLogNew :: (String -> Maybe v) -> String -> Log v
|
||||
parseLogNew = parseMapLog (Just . toUUID)
|
||||
|
||||
{- Only add an LogEntry if it's newer (or at least as new as) than any
|
||||
- existing LogEntry for a UUID. -}
|
||||
addLog :: UUID -> LogEntry a -> Log a -> Log a
|
||||
addLog = M.insertWith' best
|
||||
changeLog :: POSIXTime -> UUID -> v -> Log v -> Log v
|
||||
changeLog = changeMapLog
|
||||
|
||||
{- Converts a Log into a simple Map without the timestamp information.
|
||||
- This is a one-way trip, but useful for code that never needs to change
|
||||
- the log. -}
|
||||
simpleMap :: Log a -> M.Map UUID a
|
||||
simpleMap = M.map value
|
||||
addLog :: UUID -> LogEntry v -> Log v -> Log v
|
||||
addLog = addMapLog
|
||||
|
||||
best :: LogEntry a -> LogEntry a -> LogEntry a
|
||||
best new old
|
||||
| changed old > changed new = old
|
||||
| otherwise = new
|
||||
|
||||
-- Unknown is oldest.
|
||||
prop_TimeStamp_sane :: Bool
|
||||
prop_TimeStamp_sane = Unknown < Date 1
|
||||
|
||||
prop_addLog_sane :: Bool
|
||||
prop_addLog_sane = newWins && newestWins
|
||||
where
|
||||
newWins = addLog (UUID "foo") (LogEntry (Date 1) "new") l == l2
|
||||
newestWins = addLog (UUID "foo") (LogEntry (Date 1) "newest") l2 /= l2
|
||||
|
||||
l = M.fromList [(UUID "foo", LogEntry (Date 0) "old")]
|
||||
l2 = M.fromList [(UUID "foo", LogEntry (Date 1) "new")]
|
||||
tskey :: String
|
||||
tskey = "timestamp="
|
||||
|
|
|
@ -67,7 +67,7 @@ updateUnusedLog prefix m = do
|
|||
writeUnusedLog :: FilePath -> UnusedLog -> Annex ()
|
||||
writeUnusedLog prefix l = do
|
||||
logfile <- fromRepo $ gitAnnexUnusedLog prefix
|
||||
liftIO $ viaTmp writeFile logfile $ unlines $ map format $ M.toList l
|
||||
liftIO $ viaTmp writeFileAnyEncoding logfile $ unlines $ map format $ M.toList l
|
||||
where
|
||||
format (k, (i, Just t)) = show i ++ " " ++ key2file k ++ " " ++ show t
|
||||
format (k, (i, Nothing)) = show i ++ " " ++ key2file k
|
||||
|
@ -77,7 +77,7 @@ readUnusedLog prefix = do
|
|||
f <- fromRepo $ gitAnnexUnusedLog prefix
|
||||
ifM (liftIO $ doesFileExist f)
|
||||
( M.fromList . mapMaybe parse . lines
|
||||
<$> liftIO (readFile f)
|
||||
<$> liftIO (readFileStrictAnyEncoding f)
|
||||
, return M.empty
|
||||
)
|
||||
where
|
||||
|
@ -99,7 +99,6 @@ dateUnusedLog prefix = do
|
|||
f <- fromRepo $ gitAnnexUnusedLog prefix
|
||||
liftIO $ catchMaybeIO $ getModificationTime f
|
||||
#else
|
||||
#warning foo
|
||||
-- old ghc's getModificationTime returned a ClockTime
|
||||
dateUnusedLog _prefix = return Nothing
|
||||
#endif
|
||||
|
|
2
Makefile
2
Makefile
|
@ -119,7 +119,7 @@ linuxstandalone-nobuild: Build/Standalone Build/LinuxMkLibs
|
|||
strip "$(LINUXSTANDALONE_DEST)/bin/git-annex"
|
||||
ln -sf git-annex "$(LINUXSTANDALONE_DEST)/bin/git-annex-shell"
|
||||
zcat standalone/licences.gz > $(LINUXSTANDALONE_DEST)/LICENSE
|
||||
cp doc/favicon.png doc/logo.svg $(LINUXSTANDALONE_DEST)
|
||||
cp doc/logo_16x16.png doc/logo.svg $(LINUXSTANDALONE_DEST)
|
||||
|
||||
./Build/Standalone "$(LINUXSTANDALONE_DEST)"
|
||||
|
||||
|
|
34
Remote.hs
34
Remote.hs
|
@ -37,6 +37,7 @@ module Remote (
|
|||
keyPossibilities,
|
||||
keyPossibilitiesTrusted,
|
||||
nameToUUID,
|
||||
nameToUUID',
|
||||
showTriedRemotes,
|
||||
showLocations,
|
||||
forceTrust,
|
||||
|
@ -48,7 +49,6 @@ module Remote (
|
|||
import qualified Data.Map as M
|
||||
import Text.JSON
|
||||
import Text.JSON.Generic
|
||||
import Data.Tuple
|
||||
import Data.Ord
|
||||
|
||||
import Common.Annex
|
||||
|
@ -121,23 +121,25 @@ noRemoteUUIDMsg r = "cannot determine uuid for " ++ name r
|
|||
- and returns its UUID. Finds even repositories that are not
|
||||
- configured in .git/config. -}
|
||||
nameToUUID :: RemoteName -> Annex UUID
|
||||
nameToUUID "." = getUUID -- special case for current repo
|
||||
nameToUUID "here" = getUUID
|
||||
nameToUUID "" = error "no remote specified"
|
||||
nameToUUID n = byName' n >>= go
|
||||
nameToUUID = either error return <=< nameToUUID'
|
||||
|
||||
nameToUUID' :: RemoteName -> Annex (Either String UUID)
|
||||
nameToUUID' "." = Right <$> getUUID -- special case for current repo
|
||||
nameToUUID' "here" = Right <$> getUUID
|
||||
nameToUUID' n = byName' n >>= go
|
||||
where
|
||||
go (Right r) = case uuid r of
|
||||
NoUUID -> error $ noRemoteUUIDMsg r
|
||||
u -> return u
|
||||
go (Left e) = fromMaybe (error e) <$> bydescription
|
||||
bydescription = do
|
||||
go (Right r) = return $ case uuid r of
|
||||
NoUUID -> Left $ noRemoteUUIDMsg r
|
||||
u -> Right u
|
||||
go (Left e) = do
|
||||
m <- uuidMap
|
||||
case M.lookup n $ transform swap m of
|
||||
Just u -> return $ Just u
|
||||
Nothing -> return $ byuuid m
|
||||
byuuid m = M.lookup (toUUID n) $ transform double m
|
||||
transform a = M.fromList . map a . M.toList
|
||||
double (a, _) = (a, a)
|
||||
return $ case M.keys (M.filter (== n) m) of
|
||||
[u] -> Right u
|
||||
[] -> let u = toUUID n
|
||||
in case M.keys (M.filterWithKey (\k _ -> k == u) m) of
|
||||
[] -> Left e
|
||||
_ -> Right u
|
||||
_us -> Left "Found multiple repositories with that description"
|
||||
|
||||
{- Pretty-prints a list of UUIDs of remotes, for human display.
|
||||
-
|
||||
|
|
|
@ -11,6 +11,7 @@ import Remote.External.Types
|
|||
import qualified Annex
|
||||
import Common.Annex
|
||||
import Types.Remote
|
||||
import Types.CleanupActions
|
||||
import qualified Git
|
||||
import Config
|
||||
import Remote.Helper.Special
|
||||
|
@ -43,7 +44,7 @@ remote = RemoteType {
|
|||
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
|
||||
gen r u c gc = do
|
||||
external <- newExternal externaltype u c
|
||||
Annex.addCleanup (fromUUID u) $ stopExternal external
|
||||
Annex.addCleanup (RemoteCleanup u) $ stopExternal external
|
||||
cst <- getCost external r gc
|
||||
avail <- getAvailability external r gc
|
||||
return $ Just $ encryptableRemote c
|
||||
|
|
|
@ -36,6 +36,7 @@ import Config
|
|||
import Config.Cost
|
||||
import Annex.Init
|
||||
import Types.Key
|
||||
import Types.CleanupActions
|
||||
import qualified CmdLine.GitAnnexShell.Fields as Fields
|
||||
import Logs.Location
|
||||
import Utility.Metered
|
||||
|
@ -144,7 +145,7 @@ repoAvail r
|
|||
else return True
|
||||
| Git.repoIsUrl r = return True
|
||||
| Git.repoIsLocalUnknown r = return False
|
||||
| otherwise = liftIO $ catchBoolIO $ onLocal r $ return True
|
||||
| otherwise = liftIO $ isJust <$> catchMaybeIO (Git.Config.read r)
|
||||
|
||||
{- Tries to read the config for a specified remote, updates state, and
|
||||
- returns the updated repo. -}
|
||||
|
@ -161,9 +162,12 @@ tryGitConfigRead r
|
|||
| Git.repoIsHttp r = store geturlconfig
|
||||
| Git.GCrypt.isEncrypted r = handlegcrypt =<< getConfigMaybe (remoteConfig r "uuid")
|
||||
| Git.repoIsUrl r = return r
|
||||
| otherwise = store $ safely $ onLocal r $ do
|
||||
ensureInitialized
|
||||
Annex.getState Annex.repo
|
||||
| otherwise = store $ safely $ do
|
||||
s <- Annex.new r
|
||||
Annex.eval s $ do
|
||||
Annex.BranchState.disableUpdate
|
||||
ensureInitialized
|
||||
Annex.getState Annex.repo
|
||||
where
|
||||
haveconfig = not . M.null . Git.config
|
||||
|
||||
|
@ -267,8 +271,8 @@ inAnnex rmt key
|
|||
checkremote = Ssh.inAnnex r key
|
||||
checklocal = guardUsable r (cantCheck r) $ dispatch <$> check
|
||||
where
|
||||
check = liftIO $ catchMsgIO $ onLocal r $
|
||||
Annex.Content.inAnnexSafe key
|
||||
check = either (Left . show) Right
|
||||
<$> tryAnnex (onLocal rmt $ Annex.Content.inAnnexSafe key)
|
||||
dispatch (Left e) = Left e
|
||||
dispatch (Right (Just b)) = Right b
|
||||
dispatch (Right Nothing) = cantCheck r
|
||||
|
@ -291,7 +295,7 @@ keyUrls r key = map tourl locs'
|
|||
dropKey :: Remote -> Key -> Annex Bool
|
||||
dropKey r key
|
||||
| not $ Git.repoIsUrl (repo r) =
|
||||
guardUsable (repo r) False $ commitOnCleanup r $ liftIO $ onLocal (repo r) $ do
|
||||
guardUsable (repo r) False $ commitOnCleanup r $ onLocal r $ do
|
||||
ensureInitialized
|
||||
whenM (Annex.Content.inAnnex key) $ do
|
||||
Annex.Content.lockContent key $
|
||||
|
@ -311,7 +315,7 @@ copyFromRemote' r key file dest
|
|||
let params = Ssh.rsyncParams r Download
|
||||
u <- getUUID
|
||||
-- run copy from perspective of remote
|
||||
liftIO $ onLocal (repo r) $ do
|
||||
onLocal r $ do
|
||||
ensureInitialized
|
||||
v <- Annex.Content.prepSendAnnex key
|
||||
case v of
|
||||
|
@ -410,7 +414,7 @@ copyToRemote r key file p
|
|||
let params = Ssh.rsyncParams r Upload
|
||||
u <- getUUID
|
||||
-- run copy from perspective of remote
|
||||
liftIO $ onLocal (repo r) $ ifM (Annex.Content.inAnnex key)
|
||||
onLocal r $ ifM (Annex.Content.inAnnex key)
|
||||
( return True
|
||||
, do
|
||||
ensureInitialized
|
||||
|
@ -439,19 +443,40 @@ fsckOnRemote r params
|
|||
|
||||
{- The passed repair action is run in the Annex monad of the remote. -}
|
||||
repairRemote :: Git.Repo -> Annex Bool -> Annex (IO Bool)
|
||||
repairRemote r a = return $ Remote.Git.onLocal r a
|
||||
|
||||
{- Runs an action on a local repository inexpensively, by making an annex
|
||||
- monad using that repository. -}
|
||||
onLocal :: Git.Repo -> Annex a -> IO a
|
||||
onLocal r a = do
|
||||
repairRemote r a = return $ do
|
||||
s <- Annex.new r
|
||||
Annex.eval s $ do
|
||||
-- No need to update the branch; its data is not used
|
||||
-- for anything onLocal is used to do.
|
||||
Annex.BranchState.disableUpdate
|
||||
ensureInitialized
|
||||
a
|
||||
|
||||
{- Runs an action from the perspective of a local remote.
|
||||
-
|
||||
- The AnnexState is cached for speed and to avoid resource leaks.
|
||||
-
|
||||
- The repository's git-annex branch is not updated, as an optimisation.
|
||||
- No caller of onLocal can query data from the branch and be ensured
|
||||
- it gets a current value. Caller of onLocal can make changes to
|
||||
- the branch, however.
|
||||
-}
|
||||
onLocal :: Remote -> Annex a -> Annex a
|
||||
onLocal r a = do
|
||||
m <- Annex.getState Annex.remoteannexstate
|
||||
case M.lookup (uuid r) m of
|
||||
Nothing -> do
|
||||
st <- liftIO $ Annex.new (repo r)
|
||||
go st $ do
|
||||
Annex.BranchState.disableUpdate
|
||||
a
|
||||
Just st -> go st a
|
||||
where
|
||||
cache st = Annex.changeState $ \s -> s
|
||||
{ Annex.remoteannexstate = M.insert (uuid r) st (Annex.remoteannexstate s) }
|
||||
go st a' = do
|
||||
(ret, st') <- liftIO $ Annex.run st a'
|
||||
cache st'
|
||||
return ret
|
||||
|
||||
{- Copys a file with rsync unless both locations are on the same
|
||||
- filesystem. Then cp could be faster. -}
|
||||
rsyncOrCopyFile :: [CommandParam] -> FilePath -> FilePath -> MeterUpdate -> Annex Bool
|
||||
|
@ -486,9 +511,9 @@ rsyncOrCopyFile rsyncparams src dest p =
|
|||
commitOnCleanup :: Remote -> Annex a -> Annex a
|
||||
commitOnCleanup r a = go `after` a
|
||||
where
|
||||
go = Annex.addCleanup (Git.repoLocation $ repo r) cleanup
|
||||
go = Annex.addCleanup (RemoteCleanup $ uuid r) cleanup
|
||||
cleanup
|
||||
| not $ Git.repoIsUrl (repo r) = liftIO $ onLocal (repo r) $
|
||||
| not $ Git.repoIsUrl (repo r) = onLocal r $
|
||||
doQuietSideAction $
|
||||
Annex.Branch.commit "update"
|
||||
| otherwise = void $ do
|
||||
|
|
|
@ -13,6 +13,7 @@ import qualified Data.Map as M
|
|||
|
||||
import Common.Annex
|
||||
import Types.Remote
|
||||
import Types.CleanupActions
|
||||
import qualified Annex
|
||||
import Annex.LockPool
|
||||
#ifndef mingw32_HOST_OS
|
||||
|
@ -74,7 +75,7 @@ runHooks r starthook stophook a = do
|
|||
-- So, requiring idempotency is the right approach.
|
||||
run starthook
|
||||
|
||||
Annex.addCleanup (remoteid ++ "-stop-command") $ runstop lck
|
||||
Annex.addCleanup (StopHook $ uuid r) $ runstop lck
|
||||
runstop lck = do
|
||||
-- Drop any shared lock we have, and take an
|
||||
-- exclusive lock, without blocking. If the lock
|
||||
|
|
|
@ -28,6 +28,7 @@ import Annex.UUID
|
|||
import Annex.Ssh
|
||||
import Remote.Helper.Special
|
||||
import Remote.Helper.Encryptable
|
||||
import Remote.Rsync.RsyncUrl
|
||||
import Crypto
|
||||
import Utility.Rsync
|
||||
import Utility.CopyFile
|
||||
|
@ -40,16 +41,6 @@ import Types.Creds
|
|||
import qualified Data.ByteString.Lazy as L
|
||||
import qualified Data.Map as M
|
||||
|
||||
type RsyncUrl = String
|
||||
|
||||
data RsyncOpts = RsyncOpts
|
||||
{ rsyncUrl :: RsyncUrl
|
||||
, rsyncOptions :: [CommandParam]
|
||||
, rsyncUploadOptions :: [CommandParam]
|
||||
, rsyncDownloadOptions :: [CommandParam]
|
||||
, rsyncShellEscape :: Bool
|
||||
}
|
||||
|
||||
remote :: RemoteType
|
||||
remote = RemoteType {
|
||||
typename = "rsync",
|
||||
|
@ -148,17 +139,6 @@ rsyncSetup mu _ c = do
|
|||
gitConfigSpecialRemote u c' "rsyncurl" url
|
||||
return (c', u)
|
||||
|
||||
rsyncEscape :: RsyncOpts -> String -> String
|
||||
rsyncEscape o s
|
||||
| rsyncShellEscape o && rsyncUrlIsShell (rsyncUrl o) = shellEscape s
|
||||
| otherwise = s
|
||||
|
||||
rsyncUrls :: RsyncOpts -> Key -> [String]
|
||||
rsyncUrls o k = map use annexHashes
|
||||
where
|
||||
use h = rsyncUrl o </> h k </> rsyncEscape o (f </> f)
|
||||
f = keyFile k
|
||||
|
||||
store :: RsyncOpts -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
|
||||
store o k _f p = sendAnnex k (void $ remove o k) $ rsyncSend o p k False
|
||||
|
||||
|
|
46
Remote/Rsync/RsyncUrl.hs
Normal file
46
Remote/Rsync/RsyncUrl.hs
Normal file
|
@ -0,0 +1,46 @@
|
|||
{- Rsync urls.
|
||||
-
|
||||
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE CPP #-}
|
||||
|
||||
module Remote.Rsync.RsyncUrl where
|
||||
|
||||
import Types
|
||||
import Locations
|
||||
import Utility.Rsync
|
||||
import Utility.SafeCommand
|
||||
|
||||
import System.FilePath.Posix
|
||||
#ifdef mingw32_HOST_OS
|
||||
import Data.String.Utils
|
||||
#endif
|
||||
|
||||
type RsyncUrl = String
|
||||
|
||||
data RsyncOpts = RsyncOpts
|
||||
{ rsyncUrl :: RsyncUrl
|
||||
, rsyncOptions :: [CommandParam]
|
||||
, rsyncUploadOptions :: [CommandParam]
|
||||
, rsyncDownloadOptions :: [CommandParam]
|
||||
, rsyncShellEscape :: Bool
|
||||
}
|
||||
|
||||
rsyncEscape :: RsyncOpts -> RsyncUrl -> RsyncUrl
|
||||
rsyncEscape o u
|
||||
| rsyncShellEscape o && rsyncUrlIsShell (rsyncUrl o) = shellEscape u
|
||||
| otherwise = u
|
||||
|
||||
rsyncUrls :: RsyncOpts -> Key -> [RsyncUrl]
|
||||
rsyncUrls o k = map use annexHashes
|
||||
where
|
||||
use h = rsyncUrl o </> hash h </> rsyncEscape o (f </> f)
|
||||
f = keyFile k
|
||||
#ifndef mingw32_HOST_OS
|
||||
hash h = h k
|
||||
#else
|
||||
hash h = replace "\\" "/" (h k)
|
||||
#endif
|
11
Setup.hs
11
Setup.hs
|
@ -16,15 +16,14 @@ import System.Directory
|
|||
import qualified Build.DesktopFile as DesktopFile
|
||||
import qualified Build.Configure as Configure
|
||||
|
||||
main :: IO ()
|
||||
main = defaultMainWithHooks simpleUserHooks
|
||||
{ preConf = configure
|
||||
{ preConf = \_ _ -> do
|
||||
Configure.run Configure.tests
|
||||
return (Nothing, [])
|
||||
, postInst = myPostInst
|
||||
}
|
||||
|
||||
configure _ _ = do
|
||||
Configure.run Configure.tests
|
||||
return (Nothing, [])
|
||||
|
||||
myPostInst :: Args -> InstallFlags -> PackageDescription -> LocalBuildInfo -> IO ()
|
||||
myPostInst _ (InstallFlags { installVerbosity }) pkg lbi = do
|
||||
installGitAnnexShell dest verbosity pkg lbi
|
||||
|
@ -57,7 +56,7 @@ installManpages copyDest verbosity pkg lbi =
|
|||
manpages = ["git-annex.1", "git-annex-shell.1"]
|
||||
|
||||
installDesktopFile :: CopyDest -> Verbosity -> PackageDescription -> LocalBuildInfo -> IO ()
|
||||
installDesktopFile copyDest verbosity pkg lbi =
|
||||
installDesktopFile copyDest _verbosity pkg lbi =
|
||||
DesktopFile.install $ dstBinDir </> "git-annex"
|
||||
where
|
||||
dstBinDir = bindir $ absoluteInstallDirs pkg lbi copyDest
|
||||
|
|
27
Test.hs
27
Test.hs
|
@ -17,12 +17,14 @@ import Test.Tasty.Ingredients.Rerun
|
|||
import Data.Monoid
|
||||
|
||||
import Options.Applicative hiding (command)
|
||||
#if MIN_VERSION_optparse_applicative(0,8,0)
|
||||
import qualified Options.Applicative.Types as Opt
|
||||
#endif
|
||||
import Control.Exception.Extensible
|
||||
import qualified Data.Map as M
|
||||
import System.IO.HVFS (SystemFS(..))
|
||||
import qualified Text.JSON
|
||||
import System.Path
|
||||
import qualified Data.ByteString.Lazy as L
|
||||
|
||||
import Common
|
||||
|
||||
|
@ -43,7 +45,7 @@ import qualified Types.Backend
|
|||
import qualified Types.TrustLevel
|
||||
import qualified Types
|
||||
import qualified Logs
|
||||
import qualified Logs.UUIDBased
|
||||
import qualified Logs.MapLog
|
||||
import qualified Logs.Trust
|
||||
import qualified Logs.Remote
|
||||
import qualified Logs.Unused
|
||||
|
@ -104,8 +106,7 @@ main ps = do
|
|||
-- parameters is "test".
|
||||
let pinfo = info (helper <*> suiteOptionParser ingredients tests)
|
||||
( fullDesc <> header "Builtin test suite" )
|
||||
opts <- either (\f -> error =<< errMessage f "git-annex test") return $
|
||||
execParserPure (prefs idm) pinfo ps
|
||||
opts <- parseOpts (prefs idm) pinfo ps
|
||||
case tryIngredients ingredients opts tests of
|
||||
Nothing -> error "No tests found!?"
|
||||
Just act -> ifM act
|
||||
|
@ -115,6 +116,18 @@ main ps = do
|
|||
putStrLn " with utilities, such as git, installed on this system.)"
|
||||
exitFailure
|
||||
)
|
||||
where
|
||||
progdesc = "git-annex test"
|
||||
parseOpts pprefs pinfo args =
|
||||
#if MIN_VERSION_optparse_applicative(0,8,0)
|
||||
pure $ case execParserPure pprefs pinfo args of
|
||||
Opt.Success v -> v
|
||||
Opt.Failure f -> error $ fst $ Opt.execFailure f progdesc
|
||||
Opt.CompletionInvoked _ -> error "completion not supported"
|
||||
#else
|
||||
either (error <=< flip errMessage progdesc) return $
|
||||
execParserPure pprefs pinfo args
|
||||
#endif
|
||||
|
||||
ingredients :: [Ingredient]
|
||||
ingredients =
|
||||
|
@ -140,8 +153,8 @@ properties = localOption (QuickCheckTests 1000) $ testGroup "QuickCheck"
|
|||
, testProperty "prop_cost_sane" Config.Cost.prop_cost_sane
|
||||
, testProperty "prop_matcher_sane" Utility.Matcher.prop_matcher_sane
|
||||
, testProperty "prop_HmacSha1WithCipher_sane" Crypto.prop_HmacSha1WithCipher_sane
|
||||
, testProperty "prop_TimeStamp_sane" Logs.UUIDBased.prop_TimeStamp_sane
|
||||
, testProperty "prop_addLog_sane" Logs.UUIDBased.prop_addLog_sane
|
||||
, testProperty "prop_TimeStamp_sane" Logs.MapLog.prop_TimeStamp_sane
|
||||
, testProperty "prop_addMapLog_sane" Logs.MapLog.prop_addMapLog_sane
|
||||
, testProperty "prop_verifiable_sane" Utility.Verifiable.prop_verifiable_sane
|
||||
, testProperty "prop_segment_regressionTest" Utility.Misc.prop_segment_regressionTest
|
||||
, testProperty "prop_read_write_transferinfo" Logs.Transfer.prop_read_write_transferinfo
|
||||
|
@ -1272,7 +1285,7 @@ test_add_subdirs env = intmpclonerepo env $ do
|
|||
{- Regression test for Windows bug where symlinks were not
|
||||
- calculated correctly for files in subdirs. -}
|
||||
git_annex env "sync" [] @? "sync failed"
|
||||
l <- annexeval $ encodeW8 . L.unpack <$> Annex.CatFile.catObject (Git.Types.Ref "HEAD:dir/foo")
|
||||
l <- annexeval $ decodeBS <$> Annex.CatFile.catObject (Git.Types.Ref "HEAD:dir/foo")
|
||||
"../.git/annex/" `isPrefixOf` l @? ("symlink from subdir to .git/annex is wrong: " ++ l)
|
||||
|
||||
createDirectory "dir2"
|
||||
|
|
17
Types/CleanupActions.hs
Normal file
17
Types/CleanupActions.hs
Normal file
|
@ -0,0 +1,17 @@
|
|||
{- Enumeration of cleanup actions
|
||||
-
|
||||
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
module Types.CleanupActions where
|
||||
|
||||
import Types.UUID
|
||||
|
||||
data CleanupAction
|
||||
= RemoteCleanup UUID
|
||||
| StopHook UUID
|
||||
| FsckCleanup
|
||||
| SshCachingCleanup
|
||||
deriving (Eq, Ord)
|
|
@ -28,6 +28,7 @@ module Types.MetaData (
|
|||
emptyMetaData,
|
||||
updateMetaData,
|
||||
unionMetaData,
|
||||
combineMetaData,
|
||||
differenceMetaData,
|
||||
isSet,
|
||||
currentMetaData,
|
||||
|
@ -140,7 +141,7 @@ toMetaField f
|
|||
- that would break views.
|
||||
-
|
||||
- So, require they have an alphanumeric first letter, with the remainder
|
||||
- being either alphanumeric or a small set of shitelisted common punctuation.
|
||||
- being either alphanumeric or a small set of whitelisted common punctuation.
|
||||
-}
|
||||
legalField :: String -> Bool
|
||||
legalField [] = False
|
||||
|
@ -188,6 +189,9 @@ unionMetaData :: MetaData -> MetaData -> MetaData
|
|||
unionMetaData (MetaData old) (MetaData new) = MetaData $
|
||||
M.unionWith S.union new old
|
||||
|
||||
combineMetaData :: [MetaData] -> MetaData
|
||||
combineMetaData = foldl' unionMetaData emptyMetaData
|
||||
|
||||
differenceMetaData :: MetaData -> MetaData -> MetaData
|
||||
differenceMetaData (MetaData m) (MetaData excludem) = MetaData $
|
||||
M.differenceWith diff m excludem
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
module Types.StandardGroups where
|
||||
|
||||
import Types.Remote (RemoteConfig)
|
||||
import Types.Group
|
||||
|
||||
import qualified Data.Map as M
|
||||
import Data.Maybe
|
||||
|
@ -27,7 +28,7 @@ data StandardGroup
|
|||
| UnwantedGroup
|
||||
deriving (Eq, Ord, Enum, Bounded, Show)
|
||||
|
||||
fromStandardGroup :: StandardGroup -> String
|
||||
fromStandardGroup :: StandardGroup -> Group
|
||||
fromStandardGroup ClientGroup = "client"
|
||||
fromStandardGroup TransferGroup = "transfer"
|
||||
fromStandardGroup BackupGroup = "backup"
|
||||
|
@ -39,7 +40,7 @@ fromStandardGroup ManualGroup = "manual"
|
|||
fromStandardGroup PublicGroup = "public"
|
||||
fromStandardGroup UnwantedGroup = "unwanted"
|
||||
|
||||
toStandardGroup :: String -> Maybe StandardGroup
|
||||
toStandardGroup :: Group -> Maybe StandardGroup
|
||||
toStandardGroup "client" = Just ClientGroup
|
||||
toStandardGroup "transfer" = Just TransferGroup
|
||||
toStandardGroup "backup" = Just BackupGroup
|
||||
|
@ -77,21 +78,21 @@ specialRemoteOnly PublicGroup = True
|
|||
specialRemoteOnly _ = False
|
||||
|
||||
{- See doc/preferred_content.mdwn for explanations of these expressions. -}
|
||||
preferredContent :: StandardGroup -> PreferredContentExpression
|
||||
preferredContent ClientGroup = lastResort $
|
||||
standardPreferredContent :: StandardGroup -> PreferredContentExpression
|
||||
standardPreferredContent ClientGroup = lastResort $
|
||||
"((exclude=*/archive/* and exclude=archive/*) or (" ++ notArchived ++ ")) and not unused"
|
||||
preferredContent TransferGroup = lastResort $
|
||||
"not (inallgroup=client and copies=client:2) and (" ++ preferredContent ClientGroup ++ ")"
|
||||
preferredContent BackupGroup = "include=* or unused"
|
||||
preferredContent IncrementalBackupGroup = lastResort
|
||||
standardPreferredContent TransferGroup = lastResort $
|
||||
"not (inallgroup=client and copies=client:2) and (" ++ standardPreferredContent ClientGroup ++ ")"
|
||||
standardPreferredContent BackupGroup = "include=* or unused"
|
||||
standardPreferredContent IncrementalBackupGroup = lastResort
|
||||
"(include=* or unused) and (not copies=incrementalbackup:1)"
|
||||
preferredContent SmallArchiveGroup = lastResort $
|
||||
"(include=*/archive/* or include=archive/*) and (" ++ preferredContent FullArchiveGroup ++ ")"
|
||||
preferredContent FullArchiveGroup = lastResort notArchived
|
||||
preferredContent SourceGroup = "not (copies=1)"
|
||||
preferredContent ManualGroup = "present and (" ++ preferredContent ClientGroup ++ ")"
|
||||
preferredContent PublicGroup = "inpreferreddir"
|
||||
preferredContent UnwantedGroup = "exclude=*"
|
||||
standardPreferredContent SmallArchiveGroup = lastResort $
|
||||
"(include=*/archive/* or include=archive/*) and (" ++ standardPreferredContent FullArchiveGroup ++ ")"
|
||||
standardPreferredContent FullArchiveGroup = lastResort notArchived
|
||||
standardPreferredContent SourceGroup = "not (copies=1)"
|
||||
standardPreferredContent ManualGroup = "present and (" ++ standardPreferredContent ClientGroup ++ ")"
|
||||
standardPreferredContent PublicGroup = "inpreferreddir"
|
||||
standardPreferredContent UnwantedGroup = "exclude=*"
|
||||
|
||||
notArchived :: String
|
||||
notArchived = "not (copies=archive:1 or copies=smallarchive:1)"
|
||||
|
|
|
@ -99,13 +99,20 @@ noUmask :: FileMode -> IO a -> IO a
|
|||
#ifndef mingw32_HOST_OS
|
||||
noUmask mode a
|
||||
| mode == stdFileMode = a
|
||||
| otherwise = bracket setup cleanup go
|
||||
| otherwise = withUmask nullFileMode a
|
||||
#else
|
||||
noUmask _ a = a
|
||||
#endif
|
||||
|
||||
withUmask :: FileMode -> IO a -> IO a
|
||||
#ifndef mingw32_HOST_OS
|
||||
withUmask umask a = bracket setup cleanup go
|
||||
where
|
||||
setup = setFileCreationMask nullFileMode
|
||||
setup = setFileCreationMask umask
|
||||
cleanup = setFileCreationMask
|
||||
go _ = a
|
||||
#else
|
||||
noUmask _ a = a
|
||||
withUmask _ a = a
|
||||
#endif
|
||||
|
||||
combineModes :: [FileMode] -> FileMode
|
||||
|
@ -127,14 +134,20 @@ setSticky f = modifyFileMode f $ addModes [stickyMode]
|
|||
#endif
|
||||
|
||||
{- Writes a file, ensuring that its modes do not allow it to be read
|
||||
- by anyone other than the current user, before any content is written.
|
||||
- or written by anyone other than the current user,
|
||||
- before any content is written.
|
||||
-
|
||||
- When possible, this is done using the umask.
|
||||
-
|
||||
- On a filesystem that does not support file permissions, this is the same
|
||||
- as writeFile.
|
||||
-}
|
||||
writeFileProtected :: FilePath -> String -> IO ()
|
||||
writeFileProtected file content = withFile file WriteMode $ \h -> do
|
||||
void $ tryIO $
|
||||
modifyFileMode file $
|
||||
removeModes [groupReadMode, otherReadMode]
|
||||
hPutStr h content
|
||||
writeFileProtected file content = withUmask 0o0077 $
|
||||
withFile file WriteMode $ \h -> do
|
||||
void $ tryIO $ modifyFileMode file $
|
||||
removeModes
|
||||
[ groupReadMode, otherReadMode
|
||||
, groupWriteMode, otherWriteMode
|
||||
]
|
||||
hPutStr h content
|
||||
|
|
|
@ -1,14 +1,17 @@
|
|||
{- GHC File system encoding handling.
|
||||
-
|
||||
- Copyright 2012-2013 Joey Hess <joey@kitenet.net>
|
||||
- Copyright 2012-2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE CPP #-}
|
||||
|
||||
module Utility.FileSystemEncoding (
|
||||
fileEncoding,
|
||||
withFilePath,
|
||||
md5FilePath,
|
||||
decodeBS,
|
||||
decodeW8,
|
||||
encodeW8,
|
||||
truncateFilePath,
|
||||
|
@ -22,13 +25,24 @@ import System.IO.Unsafe
|
|||
import qualified Data.Hash.MD5 as MD5
|
||||
import Data.Word
|
||||
import Data.Bits.Utils
|
||||
import qualified Data.ByteString.Lazy as L
|
||||
#ifdef mingw32_HOST_OS
|
||||
import qualified Data.ByteString.Lazy.UTF8 as L8
|
||||
#endif
|
||||
|
||||
{- Sets a Handle to use the filesystem encoding. This causes data
|
||||
- written or read from it to be encoded/decoded the same
|
||||
- as ghc 7.4 does to filenames etc. This special encoding
|
||||
- allows "arbitrary undecodable bytes to be round-tripped through it". -}
|
||||
- allows "arbitrary undecodable bytes to be round-tripped through it".
|
||||
-}
|
||||
fileEncoding :: Handle -> IO ()
|
||||
#ifndef mingw32_HOST_OS
|
||||
fileEncoding h = hSetEncoding h =<< Encoding.getFileSystemEncoding
|
||||
#else
|
||||
{- The file system encoding does not work well on Windows,
|
||||
- and Windows only has utf FilePaths anyway. -}
|
||||
fileEncoding h = hSetEncoding h Encoding.utf8
|
||||
#endif
|
||||
|
||||
{- Marshal a Haskell FilePath into a NUL terminated C string using temporary
|
||||
- storage. The FilePath is encoded using the filesystem encoding,
|
||||
|
@ -60,6 +74,16 @@ _encodeFilePath fp = unsafePerformIO $ do
|
|||
md5FilePath :: FilePath -> MD5.Str
|
||||
md5FilePath = MD5.Str . _encodeFilePath
|
||||
|
||||
{- Decodes a ByteString into a FilePath, applying the filesystem encoding. -}
|
||||
decodeBS :: L.ByteString -> FilePath
|
||||
#ifndef mingw32_HOST_OS
|
||||
decodeBS = encodeW8 . L.unpack
|
||||
#else
|
||||
{- On Windows, we assume that the ByteString is utf-8, since Windows
|
||||
- only uses unicode for filenames. -}
|
||||
decodeBS = L8.toString
|
||||
#endif
|
||||
|
||||
{- Converts a [Word8] to a FilePath, encoding using the filesystem encoding.
|
||||
-
|
||||
- w82c produces a String, which may contain Chars that are invalid
|
||||
|
@ -84,6 +108,7 @@ decodeW8 = s2w8 . _encodeFilePath
|
|||
- cost of efficiency when running on a large FilePath.
|
||||
-}
|
||||
truncateFilePath :: Int -> FilePath -> FilePath
|
||||
#ifndef mingw32_HOST_OS
|
||||
truncateFilePath n = go . reverse
|
||||
where
|
||||
go f =
|
||||
|
@ -91,3 +116,17 @@ truncateFilePath n = go . reverse
|
|||
in if length bytes <= n
|
||||
then reverse f
|
||||
else go (drop 1 f)
|
||||
#else
|
||||
{- On Windows, count the number of bytes used by each utf8 character. -}
|
||||
truncateFilePath n = reverse . go [] n . L8.fromString
|
||||
where
|
||||
go coll cnt bs
|
||||
| cnt <= 0 = coll
|
||||
| otherwise = case L8.decode bs of
|
||||
Just (c, x) | c /= L8.replacement_char ->
|
||||
let x' = fromIntegral x
|
||||
in if cnt - x' < 0
|
||||
then coll
|
||||
else go (c:coll) (cnt - x') (L8.drop 1 bs)
|
||||
_ -> coll
|
||||
#endif
|
||||
|
|
|
@ -109,18 +109,6 @@ massReplace vs = go [] vs
|
|||
go (replacement:acc) vs (drop (length val) s)
|
||||
| otherwise = go acc rest s
|
||||
|
||||
{- Given two orderings, returns the second if the first is EQ and returns
|
||||
- the first otherwise.
|
||||
-
|
||||
- Example use:
|
||||
-
|
||||
- compare lname1 lname2 `thenOrd` compare fname1 fname2
|
||||
-}
|
||||
thenOrd :: Ordering -> Ordering -> Ordering
|
||||
thenOrd EQ x = x
|
||||
thenOrd x _ = x
|
||||
{-# INLINE thenOrd #-}
|
||||
|
||||
{- Wrapper around hGetBufSome that returns a String.
|
||||
-
|
||||
- The null string is returned on eof, otherwise returns whatever
|
||||
|
|
|
@ -28,10 +28,10 @@ instance (Arbitrary v, Eq v, Ord v) => Arbitrary (S.Set v) where
|
|||
|
||||
{- Times before the epoch are excluded. -}
|
||||
instance Arbitrary POSIXTime where
|
||||
arbitrary = nonNegative arbitrarySizedIntegral
|
||||
arbitrary = fromInteger <$> nonNegative arbitrarySizedIntegral
|
||||
|
||||
instance Arbitrary EpochTime where
|
||||
arbitrary = nonNegative arbitrarySizedIntegral
|
||||
arbitrary = fromInteger <$> nonNegative arbitrarySizedIntegral
|
||||
|
||||
{- Pids are never negative, or 0. -}
|
||||
instance Arbitrary ProcessID where
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{- Yesod webapp
|
||||
-
|
||||
- Copyright 2012 Joey Hess <joey@kitenet.net>
|
||||
- Copyright 2012-2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
@ -36,6 +36,10 @@ import Blaze.ByteString.Builder (Builder)
|
|||
import Data.Monoid
|
||||
import Control.Arrow ((***))
|
||||
import Control.Concurrent
|
||||
#ifdef WITH_WEBAPP_SECURE
|
||||
import Data.SecureMem
|
||||
import Data.Byteable
|
||||
#endif
|
||||
#ifdef __ANDROID__
|
||||
import Data.Endian
|
||||
#endif
|
||||
|
@ -74,14 +78,14 @@ browserProc url = proc "xdg-open" [url]
|
|||
runWebApp :: Maybe TLSSettings -> Maybe HostName -> Wai.Application -> (SockAddr -> IO ()) -> IO ()
|
||||
runWebApp tlssettings h app observer = withSocketsDo $ do
|
||||
sock <- getSocket h
|
||||
void $ forkIO $ run webAppSettings sock app
|
||||
void $ forkIO $ go webAppSettings sock app
|
||||
sockaddr <- fixSockAddr <$> getSocketName sock
|
||||
observer sockaddr
|
||||
where
|
||||
#ifdef WITH_WEBAPP_HTTPS
|
||||
run = (maybe runSettingsSocket (\ts -> runTLSSocket ts) tlssettings)
|
||||
#ifdef WITH_WEBAPP_SECURE
|
||||
go = (maybe runSettingsSocket (\ts -> runTLSSocket ts) tlssettings)
|
||||
#else
|
||||
run = runSettingsSocket
|
||||
go = runSettingsSocket
|
||||
#endif
|
||||
|
||||
fixSockAddr :: SockAddr -> SockAddr
|
||||
|
@ -208,15 +212,35 @@ webAppSessionBackend _ = do
|
|||
#endif
|
||||
#endif
|
||||
|
||||
{- Generates a random sha512 string, suitable to be used for an
|
||||
- authentication secret. -}
|
||||
genRandomToken :: IO String
|
||||
genRandomToken = do
|
||||
#ifdef WITH_WEBAPP_SECURE
|
||||
type AuthToken = SecureMem
|
||||
#else
|
||||
type AuthToken = T.Text
|
||||
#endif
|
||||
|
||||
toAuthToken :: T.Text -> AuthToken
|
||||
#ifdef WITH_WEBAPP_SECURE
|
||||
toAuthToken = secureMemFromByteString . TE.encodeUtf8
|
||||
#else
|
||||
toAuthToken = id
|
||||
#endif
|
||||
|
||||
fromAuthToken :: AuthToken -> T.Text
|
||||
#ifdef WITH_WEBAPP_SECURE
|
||||
fromAuthToken = TE.decodeLatin1 . toBytes
|
||||
#else
|
||||
fromAuthToken = id
|
||||
#endif
|
||||
|
||||
{- Generates a random sha512 string, encapsulated in a SecureMem,
|
||||
- suitable to be used for an authentication secret. -}
|
||||
genAuthToken :: IO AuthToken
|
||||
genAuthToken = do
|
||||
g <- newGenIO :: IO SystemRandom
|
||||
return $
|
||||
case genBytes 512 g of
|
||||
Left e -> error $ "failed to generate secret token: " ++ show e
|
||||
Right (s, _) -> show $ sha512 $ L.fromChunks [s]
|
||||
Left e -> error $ "failed to generate auth token: " ++ show e
|
||||
Right (s, _) -> toAuthToken $ T.pack $ show $ sha512 $ L.fromChunks [s]
|
||||
|
||||
{- A Yesod isAuthorized method, which checks the auth cgi parameter
|
||||
- against a token extracted from the Yesod application.
|
||||
|
@ -225,15 +249,15 @@ genRandomToken = do
|
|||
- possibly leaking the auth token in urls on that page!
|
||||
-}
|
||||
#if MIN_VERSION_yesod(1,2,0)
|
||||
checkAuthToken :: (Monad m, Yesod.MonadHandler m) => (Yesod.HandlerSite m -> T.Text) -> m Yesod.AuthResult
|
||||
checkAuthToken :: (Monad m, Yesod.MonadHandler m) => (Yesod.HandlerSite m -> AuthToken) -> m Yesod.AuthResult
|
||||
#else
|
||||
checkAuthToken :: forall t sub. (t -> T.Text) -> Yesod.GHandler sub t Yesod.AuthResult
|
||||
checkAuthToken :: forall t sub. (t -> AuthToken) -> Yesod.GHandler sub t Yesod.AuthResult
|
||||
#endif
|
||||
checkAuthToken extractToken = do
|
||||
checkAuthToken extractAuthToken = do
|
||||
webapp <- Yesod.getYesod
|
||||
req <- Yesod.getRequest
|
||||
let params = Yesod.reqGetParams req
|
||||
if lookup "auth" params == Just (extractToken webapp)
|
||||
if (toAuthToken <$> lookup "auth" params) == Just (extractAuthToken webapp)
|
||||
then return Yesod.Authorized
|
||||
else Yesod.sendResponseStatus unauthorized401 ()
|
||||
|
||||
|
@ -243,21 +267,21 @@ checkAuthToken extractToken = do
|
|||
-
|
||||
- A typical predicate would exclude files under /static.
|
||||
-}
|
||||
insertAuthToken :: forall y. (y -> T.Text)
|
||||
insertAuthToken :: forall y. (y -> AuthToken)
|
||||
-> ([T.Text] -> Bool)
|
||||
-> y
|
||||
-> T.Text
|
||||
-> [T.Text]
|
||||
-> [(T.Text, T.Text)]
|
||||
-> Builder
|
||||
insertAuthToken extractToken predicate webapp root pathbits params =
|
||||
insertAuthToken extractAuthToken predicate webapp root pathbits params =
|
||||
fromText root `mappend` encodePath pathbits' encodedparams
|
||||
where
|
||||
pathbits' = if null pathbits then [T.empty] else pathbits
|
||||
encodedparams = map (TE.encodeUtf8 *** go) params'
|
||||
go "" = Nothing
|
||||
go x = Just $ TE.encodeUtf8 x
|
||||
authparam = (T.pack "auth", extractToken webapp)
|
||||
authparam = (T.pack "auth", fromAuthToken (extractAuthToken webapp))
|
||||
params'
|
||||
| predicate pathbits = authparam:params
|
||||
| otherwise = params
|
||||
|
|
40
debian/changelog
vendored
40
debian/changelog
vendored
|
@ -1,3 +1,43 @@
|
|||
git-annex (5.20140320) unstable; urgency=medium
|
||||
|
||||
* Fix zombie leak and general inneficiency when copying files to a
|
||||
local git repo.
|
||||
* Fix ssh connection caching stop method to work with openssh 6.5p1,
|
||||
which broke the old method.
|
||||
* webapp: Added a "Sync now" item to each repository's menu.
|
||||
* webapp: Use securemem for constant time auth token comparisons.
|
||||
* copy --fast --to remote: Avoid printing anything for files that
|
||||
are already believed to be present on the remote.
|
||||
* Commands that allow specifying which repository to act on using
|
||||
the repository's description will now fail when multiple repositories
|
||||
match, rather than picking a repository at random.
|
||||
(So will --in=)
|
||||
* Better workaround for problem umasks when eg, setting up ssh keys.
|
||||
* "standard" can now be used as a first-class keyword in preferred content
|
||||
expressions. For example "standard or (include=otherdir/*)"
|
||||
* groupwanted can be used in preferred content expressions.
|
||||
* vicfg: Allows editing preferred content expressions for groups.
|
||||
* Improve behavior when unable to parse a preferred content expression
|
||||
(thanks, ion).
|
||||
* metadata: Add --get
|
||||
* metadata: Support --key option (and some other ones like --all)
|
||||
* For each metadata field, there's now an automatically maintained
|
||||
"$field-lastchanged" that gives the date of the last change to that
|
||||
field. Also the "lastchanged" field for the date of the last change
|
||||
to any of a file's metadata.
|
||||
* unused: In direct mode, files that are deleted from the work tree
|
||||
and so have no content present are no longer incorrectly detected as
|
||||
unused.
|
||||
* Avoid encoding errors when using the unused log file.
|
||||
* map: Fix crash when one of the remotes of a repo is a local directory
|
||||
that does not exist, or is not a git repo.
|
||||
* repair: Improve memory usage when git fsck finds a great many broken
|
||||
objects.
|
||||
* Windows: Fix some filename encoding bugs.
|
||||
* rsync special remote: Fix slashes when used on Windows.
|
||||
|
||||
-- Joey Hess <joeyh@debian.org> Thu, 20 Mar 2014 13:21:12 -0400
|
||||
|
||||
git-annex (5.20140306~bpo70+1) wheezy-backports; urgency=high
|
||||
|
||||
* Updating backport to newest release.
|
||||
|
|
3
debian/control
vendored
3
debian/control
vendored
|
@ -38,6 +38,9 @@ Build-Depends:
|
|||
libghc-warp-tls-dev [i386 amd64 kfreebsd-i386 kfreebsd-amd64 powerpc sparc],
|
||||
libghc-wai-dev [i386 amd64 kfreebsd-i386 kfreebsd-amd64 powerpc sparc],
|
||||
libghc-wai-logger-dev [i386 amd64 kfreebsd-i386 kfreebsd-amd64 powerpc sparc],
|
||||
libghc-securemem-dev,
|
||||
libghc-byteable-dev,
|
||||
libghc-dns-dev,
|
||||
libghc-case-insensitive-dev,
|
||||
libghc-http-types-dev,
|
||||
libghc-blaze-builder-dev,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
[[!comment format=txt
|
||||
[[!comment format=mdwn
|
||||
username="http://yarikoptic.myopenid.com/"
|
||||
nickname="site-myopenid"
|
||||
subject="Does it require the device to be rooted?"
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
[[!comment format=mdwn
|
||||
username="severo"
|
||||
ip="88.182.182.135"
|
||||
subject="git-assistant and transfer repository"
|
||||
date="2014-03-16T17:05:43Z"
|
||||
content="""
|
||||
In your comment http://git-annex.branchable.com/assistant/remote_sharing_walkthrough/#comment-f97efe1d05c0101232684b4e4edc4866, you describe a way to synchronize two devices using an intermediate USB drive configured as a \"transfer repository\".
|
||||
|
||||
I understand that in that case, the USB drive can only be used as a \"transmitter\", in a git repository form, not as a copy of the files structure. This means the files contained by the USB drive cannot be accessed without git/git-annnex.
|
||||
|
||||
Is there a way to use the USB drive as a \"client repository\" in order to allow synchronization, as described earlier, but also as a simple copy of the files, in order to access them from any device (opening them with windows in a cyber coffee for example).
|
||||
|
||||
Thanks
|
||||
"""]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.154"
|
||||
subject="comment 7"
|
||||
date="2014-03-17T19:50:48Z"
|
||||
content="""
|
||||
@severo the web app does not support setting up that use case. However, you can make a non-bare clone of your repository onto a removable drive, and if you do the assistant will use it just the same as if you'd set up a removable drive using the webapp. Note that you will need to run `git annex sync` inside that repository in order to update the tree it displays.
|
||||
"""]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="severo"
|
||||
ip="95.152.107.168"
|
||||
subject="comment 8"
|
||||
date="2014-03-18T10:06:50Z"
|
||||
content="""
|
||||
Thansk @joeyh.name for your answer. Do you think this feature could be integrated into the git-annex assistant ?
|
||||
"""]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="severo"
|
||||
ip="95.152.107.168"
|
||||
subject="comment 9"
|
||||
date="2014-03-18T11:16:19Z"
|
||||
content="""
|
||||
Some explanations in French on how to do: http://seenthis.net/messages/237648#message238202
|
||||
"""]]
|
|
@ -1,4 +1,4 @@
|
|||
[[!comment format=txt
|
||||
[[!comment format=mdwn
|
||||
username="https://www.google.com/accounts/o8/id?id=AItOawnJTqmRu1YCKS2Hsm4vtOflLhP4fU-k98w"
|
||||
nickname="Ahmed"
|
||||
subject="Customise conflict resolution behaviour"
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="https://www.google.com/accounts/o8/id?id=AItOawn3p4i4lk_zMilvjnJ9sS6g2nerpgz0Fjc"
|
||||
nickname="Matthias"
|
||||
subject="Use automatic merge without syncing"
|
||||
date="2014-03-20T10:03:41Z"
|
||||
content="""
|
||||
Is there a possibility to use the automatic merge logic without using \"git annex sync\"? I don't want to have the \"synced\"-branches, but the auto-conflict-resolution is very nice.
|
||||
"""]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.102"
|
||||
subject="comment 3"
|
||||
date="2014-03-20T16:10:10Z"
|
||||
content="""
|
||||
@Matthias `git annex merge` will do what you want, as long as you have git-annex 4.20130709 or newer.
|
||||
"""]]
|
|
@ -11,3 +11,6 @@ In order to handle the fact that the directory where pictures are saved is not c
|
|||
In the log, there are many "too many open files" errors like these :
|
||||
|
||||
git:createProcess: runInteractiveProcess: pipe: resource exhausted (Too many open files)
|
||||
|
||||
[[!tag moreinfo]]
|
||||
[[!meta title="too many open files on android"]]
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="108.236.230.124"
|
||||
subject="comment 10"
|
||||
date="2014-03-10T17:34:56Z"
|
||||
content="""
|
||||
I've found the 1 second delay on failure to accept in the warp source code.
|
||||
|
||||
It's using Network.Socket.accept, which uses accept4 with NONBLOCK by default, but can be built without `HAVE_ACCEPT4` and in that case uses `accept` with blocking.
|
||||
|
||||
I've put in a patch to build network without accept4 support, and am rebuilding the arm autobuilder. This will take a while..
|
||||
|
||||
"""]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.154"
|
||||
subject="comment 11"
|
||||
date="2014-03-11T03:02:21Z"
|
||||
content="""
|
||||
Autobuild is now updated with the accept fix.
|
||||
"""]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.102"
|
||||
subject="ping?"
|
||||
date="2014-03-19T20:29:12Z"
|
||||
content="""
|
||||
Could either greg or Schnouki please test with the current arm autobuild and see if you can connect to the webapp?
|
||||
"""]]
|
|
@ -0,0 +1,10 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.146"
|
||||
subject="comment 5"
|
||||
date="2014-03-06T18:12:57Z"
|
||||
content="""
|
||||
Again the accept message does not seem to be related to dbus. A dbus client has no reason to do that; a web server does. The use of `O_NONBLOCK` with accept4 seems likely to be the culprit to me.
|
||||
|
||||
How frequently is dbus mentioned in the log?
|
||||
"""]]
|
|
@ -0,0 +1,10 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://schnouki.net/"
|
||||
nickname="Schnouki"
|
||||
subject="comment 6"
|
||||
date="2014-03-07T08:52:12Z"
|
||||
content="""
|
||||
Agreed, the dbus and accept messages are probably unrelated. I just commented here because it's the same bug I'm encountering.
|
||||
|
||||
The dbus message only appears once in the log (shortly after startup). The accept messages appears every second.
|
||||
"""]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.146"
|
||||
subject="comment 7"
|
||||
date="2014-03-07T17:03:30Z"
|
||||
content="""
|
||||
Are you sure that the accept message happens every second? I don't see why the webapp would continue to try to bind a socket it it failed with a 1 second delay. (It does try 100 times if it fails, per [[!commit fe3009d83b08563875856152034e7c59a0c6ecca]], before ending with \"unable to bind to local socket\".)
|
||||
"""]]
|
|
@ -0,0 +1,10 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.146"
|
||||
subject="comment 8"
|
||||
date="2014-03-07T17:16:41Z"
|
||||
content="""
|
||||
greg has confirmed that he can connect to the webapp, but it never replies to http requests. So, this could be the port being bound, but the accept failing.
|
||||
|
||||
I don't know why it would retry the accept once per second, but this could be something in warp or the network library.
|
||||
"""]]
|
|
@ -0,0 +1,12 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://schnouki.net/"
|
||||
nickname="Schnouki"
|
||||
subject="comment 9"
|
||||
date="2014-03-09T16:29:26Z"
|
||||
content="""
|
||||
I did some more testing today.
|
||||
|
||||
I have this message when using either git-annex assistant or git-annex webapp. When running the webapp, I can connect to its port, but there's no response from git-annex (either from a browser or when using telnet to send a simple \"GET / HTTP/1.0\").
|
||||
|
||||
The accept message comes every second, the dbus one very minute (didn't test long enough last time, sorry about that).
|
||||
"""]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.146"
|
||||
subject="comment 5"
|
||||
date="2014-03-06T18:14:37Z"
|
||||
content="""
|
||||
auto-repair is only done if git fsck detects a problem. You can run git fsck yourself to see.
|
||||
"""]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="https://www.google.com/accounts/o8/id?id=AItOawnX1msQxnLoSeu7q-i-c9BWghonsN7Qmns"
|
||||
nickname="Jan Ulrich"
|
||||
subject="comment 6"
|
||||
date="2014-03-10T14:14:06Z"
|
||||
content="""
|
||||
I manually ran git fsck without problems but git-annex still wants to repair something.
|
||||
"""]]
|
|
@ -35,3 +35,6 @@ What this tells me is that any changes that occur whilst I am not networked are
|
|||
git-annex version: 5.20131130-gc25be33
|
||||
|
||||
|
||||
> This was fixed in 5.20140127; the assistant now does a daily sweep of
|
||||
> unused files to move them to backup repositories when possible. [[done]]
|
||||
> --[[Joey]]
|
||||
|
|
20
doc/bugs/Bug_Report_doesn__39__t_work.mdwn
Normal file
20
doc/bugs/Bug_Report_doesn__39__t_work.mdwn
Normal file
|
@ -0,0 +1,20 @@
|
|||
### Please describe the problem.
|
||||
Bug Report doesn't work
|
||||
|
||||
### What steps will reproduce the problem?
|
||||
|
||||
|
||||
### What version of git-annex are you using? On what operating system?
|
||||
|
||||
|
||||
### Please provide any additional information below.
|
||||
|
||||
[[!format sh """
|
||||
# If you can, paste a complete transcript of the problem occurring here.
|
||||
# If the problem is with the git-annex assistant, paste in .git/annex/daemon.log
|
||||
|
||||
|
||||
# End of transcript or log.
|
||||
"""]]
|
||||
|
||||
[[fixed|done]] --[[Joey]]
|
|
@ -19,3 +19,4 @@ I'm using 9e57edff287ac53fc4b1cefef7271e9ed17f2285 (Fri Feb 22 15:19:25 2013 +00
|
|||
Ubuntu 12.10 x86_64
|
||||
|
||||
[[!tag /design/assistant]]
|
||||
[[!meta title="assistant should set up non-bare repos on removable drives, and update them when syncing with them"]]
|
||||
|
|
|
@ -31,3 +31,6 @@ I noticed the problem yesterday afternoon (Thu 24 Oct).
|
|||
|
||||
# End of transcript or log.
|
||||
"""]]
|
||||
|
||||
> [[moreinfo]]; either I don't have enough information to work on this,
|
||||
> or it might have just been user error. --[[Joey]]
|
||||
|
|
|
@ -22,3 +22,5 @@ fatal: Could not read from remote repository.
|
|||
Please make sure you have the correct access rights
|
||||
and the repository exists.
|
||||
"""]]
|
||||
|
||||
[[!meta title="xmpp syncing sometimes fails"]]
|
||||
|
|
|
@ -4,3 +4,5 @@ I did a "git annex add" of a bunch of files on a storage server with low IOPS, a
|
|||
failed
|
||||
|
||||
How is that even possible, when the server is doing nothing else?
|
||||
|
||||
[[!tag moreinfo]]
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.102"
|
||||
subject="moreinfo"
|
||||
date="2014-03-19T20:49:47Z"
|
||||
content="""
|
||||
What I don't understand about this is, how does `open` fail due to a file being locked? This is Linux, it doesn't have mandatory locking that I know of, and git-annex certianly doesn't use such a thing.
|
||||
|
||||
I really need a way to reproduce this and/or a strace. As it is, I've never seen this reported by anyone else and don't understand the failure mode at all.
|
||||
|
||||
The relevant part of the code seems to be here:
|
||||
|
||||
[[!format haskell \"\"\"
|
||||
setJournalFile :: JournalLocked -> FilePath -> String -> Annex ()
|
||||
setJournalFile _jl file content = do
|
||||
tmp <- fromRepo gitAnnexTmpMiscDir
|
||||
createAnnexDirectory =<< fromRepo gitAnnexJournalDir
|
||||
createAnnexDirectory tmp
|
||||
-- journal file is written atomically
|
||||
jfile <- fromRepo $ journalFile file
|
||||
let tmpfile = tmp </> takeFileName jfile
|
||||
liftIO $ do
|
||||
writeBinaryFile tmpfile content
|
||||
moveFile tmpfile jfile
|
||||
\"\"\"]]
|
||||
|
||||
While there is some ctnl locking going on, it locks a special sentinal file, not the file it's writing to.
|
||||
"""]]
|
|
@ -19,3 +19,5 @@ This is with git-annex installed on the remote server; without it the process ge
|
|||
|
||||
### What version of git-annex are you using? On what operating system?
|
||||
Latest nightly build on ubuntu 13.10
|
||||
|
||||
[[!tag moreinfo]]
|
||||
|
|
|
@ -68,3 +68,5 @@ Mac OS X Mountain Lion. git-annex files are from within the downloadable git-ann
|
|||
|
||||
|
||||
Thanks for your help :)
|
||||
|
||||
> This is a duplicate of [[Git_annexed_files_symlink_are_wrong_when_submodule_is_not_in_the_same_path]] [[done]] --[[Joey]]
|
||||
|
|
12
doc/bugs/Mac_OS_X_Build_doesn__39__t_include_webapp.mdwn
Normal file
12
doc/bugs/Mac_OS_X_Build_doesn__39__t_include_webapp.mdwn
Normal file
|
@ -0,0 +1,12 @@
|
|||
Latest build for Mac OS X (both autobuild and release versions) does not contain webapp.
|
||||
|
||||
git annex version for OS X,
|
||||
|
||||
git-annex version: 5.20140306-g309a73c
|
||||
build flags: Assistant Pairing Testsuite S3 WebDAV FsEvents XMPP DNS Feeds Quvi TDFA CryptoHash
|
||||
key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SKEIN256E SKEIN512E SHA256 SHA1 SHA512 SHA224 SHA384 SKEIN2 56 SKEIN512 WORM URL
|
||||
remote types: git gcrypt S3 bup directory rsync web webdav tahoe glacier hook external
|
||||
|
||||
whereas on my Linux box build flags include webapp. On os x when I run git annex webapp it does nothing, just prints the help info.
|
||||
|
||||
> [[fixed|done]] --[[Joey]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.146"
|
||||
subject="comment 1"
|
||||
date="2014-03-07T15:17:16Z"
|
||||
content="""
|
||||
I've fixed the missing warp-tls dep on the autobuilder and updated the builds.
|
||||
"""]]
|
|
@ -62,3 +62,6 @@ My .gitconfig is as follows:
|
|||
> to a more recent version of git. done --[[Joey]]
|
||||
>> Reopened, because the Linux autobuilds have been downgraded to Debian
|
||||
>> stable and have this problem again. --[[Joey]]
|
||||
|
||||
>>> Closing again! Autobuilders all run unstable and will have a current
|
||||
>>> git. [[done]] --[[Joey]]
|
||||
|
|
|
@ -228,3 +228,13 @@ Everything up-to-date
|
|||
"""]]
|
||||
|
||||
Well, I see that thing about "failed to lock". I can imagine that my 'killall git-annex' to kill a leftover process that was hanging around after I'd done git-annex assistant --stop might have left stale lock files, somewhere... but of course I only got as far as doing that because I was already encountering problems, just trying to return to the webapp.
|
||||
|
||||
> The original bug report seems to be a case of user confusion,
|
||||
> and not a bug. (Although perhaps the UI is confusing?)
|
||||
>
|
||||
> The "resource exhausted" that came up later is quite likely the problem
|
||||
> fixed in [[!commit 4d06037fdd44ba38fcd4c118d1e6330f06e22366]],
|
||||
> which affected local git remotes.
|
||||
>
|
||||
> [[closing|done]]; I don't see any value keeping this open, I'm afraid.
|
||||
> --[[Joey]]
|
||||
|
|
|
@ -366,3 +366,5 @@ Here is the crash report osx creates
|
|||
|
||||
# End of transcript or log.
|
||||
"""]]
|
||||
|
||||
> Apparently this is [[fixed|done]] in the latest release. --[[Joey]]
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="https://www.google.com/accounts/o8/id?id=AItOawkLdR1fuu5aEz3s9VKTBKVMize_SmeNRJM"
|
||||
nickname="David"
|
||||
subject="Seems to be working now"
|
||||
date="2014-03-12T02:36:59Z"
|
||||
content="""
|
||||
Just tried again off of the most recent osx release build and it appears to be working without crashing. Not sure what else you did but thanks!
|
||||
"""]]
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue