Merge branch 'master' of git://git-annex.branchable.com
This commit is contained in:
commit
e0078c3882
356 changed files with 6147 additions and 963 deletions
25
Annex.hs
25
Annex.hs
|
@ -10,7 +10,6 @@
|
||||||
module Annex (
|
module Annex (
|
||||||
Annex,
|
Annex,
|
||||||
AnnexState(..),
|
AnnexState(..),
|
||||||
PreferredContentMap,
|
|
||||||
new,
|
new,
|
||||||
run,
|
run,
|
||||||
eval,
|
eval,
|
||||||
|
@ -60,7 +59,8 @@ import Types.FileMatcher
|
||||||
import Types.NumCopies
|
import Types.NumCopies
|
||||||
import Types.LockPool
|
import Types.LockPool
|
||||||
import Types.MetaData
|
import Types.MetaData
|
||||||
import qualified Utility.Matcher
|
import Types.DesktopNotify
|
||||||
|
import Types.CleanupActions
|
||||||
import qualified Data.Map as M
|
import qualified Data.Map as M
|
||||||
import qualified Data.Set as S
|
import qualified Data.Set as S
|
||||||
import Utility.Quvi (QuviVersion)
|
import Utility.Quvi (QuviVersion)
|
||||||
|
@ -79,9 +79,6 @@ newtype Annex a = Annex { runAnnex :: ReaderT (MVar AnnexState) IO a }
|
||||||
Applicative
|
Applicative
|
||||||
)
|
)
|
||||||
|
|
||||||
type Matcher a = Either [Utility.Matcher.Token a] (Utility.Matcher.Matcher a)
|
|
||||||
type PreferredContentMap = M.Map UUID (Utility.Matcher.Matcher (S.Set UUID -> MatchInfo -> Annex Bool))
|
|
||||||
|
|
||||||
-- internal state storage
|
-- internal state storage
|
||||||
data AnnexState = AnnexState
|
data AnnexState = AnnexState
|
||||||
{ repo :: Git.Repo
|
{ repo :: Git.Repo
|
||||||
|
@ -102,9 +99,10 @@ data AnnexState = AnnexState
|
||||||
, forcebackend :: Maybe String
|
, forcebackend :: Maybe String
|
||||||
, globalnumcopies :: Maybe NumCopies
|
, globalnumcopies :: Maybe NumCopies
|
||||||
, forcenumcopies :: Maybe NumCopies
|
, forcenumcopies :: Maybe NumCopies
|
||||||
, limit :: Matcher (MatchInfo -> Annex Bool)
|
, limit :: ExpandableMatcher Annex
|
||||||
, uuidmap :: Maybe UUIDMap
|
, uuidmap :: Maybe UUIDMap
|
||||||
, preferredcontentmap :: Maybe PreferredContentMap
|
, preferredcontentmap :: Maybe (FileMatcherMap Annex)
|
||||||
|
, requiredcontentmap :: Maybe (FileMatcherMap Annex)
|
||||||
, shared :: Maybe SharedRepository
|
, shared :: Maybe SharedRepository
|
||||||
, forcetrust :: TrustMap
|
, forcetrust :: TrustMap
|
||||||
, trustmap :: Maybe TrustMap
|
, trustmap :: Maybe TrustMap
|
||||||
|
@ -114,13 +112,14 @@ data AnnexState = AnnexState
|
||||||
, flags :: M.Map String Bool
|
, flags :: M.Map String Bool
|
||||||
, fields :: M.Map String String
|
, fields :: M.Map String String
|
||||||
, modmeta :: [ModMeta]
|
, modmeta :: [ModMeta]
|
||||||
, cleanup :: M.Map String (Annex ())
|
, cleanup :: M.Map CleanupAction (Annex ())
|
||||||
, inodeschanged :: Maybe Bool
|
, inodeschanged :: Maybe Bool
|
||||||
, useragent :: Maybe String
|
, useragent :: Maybe String
|
||||||
, errcounter :: Integer
|
, errcounter :: Integer
|
||||||
, unusedkeys :: Maybe (S.Set Key)
|
, unusedkeys :: Maybe (S.Set Key)
|
||||||
, quviversion :: Maybe QuviVersion
|
, quviversion :: Maybe QuviVersion
|
||||||
, existinghooks :: M.Map Git.Hook.Hook Bool
|
, existinghooks :: M.Map Git.Hook.Hook Bool
|
||||||
|
, desktopnotify :: DesktopNotify
|
||||||
}
|
}
|
||||||
|
|
||||||
newState :: GitConfig -> Git.Repo -> AnnexState
|
newState :: GitConfig -> Git.Repo -> AnnexState
|
||||||
|
@ -143,9 +142,10 @@ newState c r = AnnexState
|
||||||
, forcebackend = Nothing
|
, forcebackend = Nothing
|
||||||
, globalnumcopies = Nothing
|
, globalnumcopies = Nothing
|
||||||
, forcenumcopies = Nothing
|
, forcenumcopies = Nothing
|
||||||
, limit = Left []
|
, limit = BuildingMatcher []
|
||||||
, uuidmap = Nothing
|
, uuidmap = Nothing
|
||||||
, preferredcontentmap = Nothing
|
, preferredcontentmap = Nothing
|
||||||
|
, requiredcontentmap = Nothing
|
||||||
, shared = Nothing
|
, shared = Nothing
|
||||||
, forcetrust = M.empty
|
, forcetrust = M.empty
|
||||||
, trustmap = Nothing
|
, trustmap = Nothing
|
||||||
|
@ -162,6 +162,7 @@ newState c r = AnnexState
|
||||||
, unusedkeys = Nothing
|
, unusedkeys = Nothing
|
||||||
, quviversion = Nothing
|
, quviversion = Nothing
|
||||||
, existinghooks = M.empty
|
, existinghooks = M.empty
|
||||||
|
, desktopnotify = mempty
|
||||||
}
|
}
|
||||||
|
|
||||||
{- Makes an Annex state object for the specified git repo.
|
{- Makes an Annex state object for the specified git repo.
|
||||||
|
@ -210,9 +211,9 @@ setField field value = changeState $ \s ->
|
||||||
s { fields = M.insertWith' const field value $ fields s }
|
s { fields = M.insertWith' const field value $ fields s }
|
||||||
|
|
||||||
{- Adds a cleanup action to perform. -}
|
{- Adds a cleanup action to perform. -}
|
||||||
addCleanup :: String -> Annex () -> Annex ()
|
addCleanup :: CleanupAction -> Annex () -> Annex ()
|
||||||
addCleanup uid a = changeState $ \s ->
|
addCleanup k a = changeState $ \s ->
|
||||||
s { cleanup = M.insertWith' const uid a $ cleanup s }
|
s { cleanup = M.insertWith' const k a $ cleanup s }
|
||||||
|
|
||||||
{- Sets the type of output to emit. -}
|
{- Sets the type of output to emit. -}
|
||||||
setOutput :: OutputType -> Annex ()
|
setOutput :: OutputType -> Annex ()
|
||||||
|
|
|
@ -32,8 +32,12 @@ getTransitionCalculator ForgetDeadRemotes = Just dropDead
|
||||||
|
|
||||||
dropDead :: FilePath -> String -> TrustMap -> FileTransition
|
dropDead :: FilePath -> String -> TrustMap -> FileTransition
|
||||||
dropDead f content trustmap = case getLogVariety f of
|
dropDead f content trustmap = case getLogVariety f of
|
||||||
Just UUIDBasedLog -> ChangeFile $
|
Just UUIDBasedLog
|
||||||
UUIDBased.showLog id $ dropDeadFromUUIDBasedLog trustmap $ UUIDBased.parseLog Just content
|
-- Don't remove the dead repo from the trust log,
|
||||||
|
-- because git remotes may still exist, and they need
|
||||||
|
-- to still know it's dead.
|
||||||
|
| f == trustLog -> PreserveFile
|
||||||
|
| otherwise -> ChangeFile $ UUIDBased.showLog id $ dropDeadFromUUIDBasedLog trustmap $ UUIDBased.parseLog Just content
|
||||||
Just NewUUIDBasedLog -> ChangeFile $
|
Just NewUUIDBasedLog -> ChangeFile $
|
||||||
UUIDBased.showLogNew id $ dropDeadFromUUIDBasedLog trustmap $ UUIDBased.parseLogNew Just content
|
UUIDBased.showLogNew id $ dropDeadFromUUIDBasedLog trustmap $ UUIDBased.parseLogNew Just content
|
||||||
Just (PresenceLog _) ->
|
Just (PresenceLog _) ->
|
||||||
|
|
|
@ -80,7 +80,7 @@ catKey = catKey' True
|
||||||
catKey' :: Bool -> Ref -> FileMode -> Annex (Maybe Key)
|
catKey' :: Bool -> Ref -> FileMode -> Annex (Maybe Key)
|
||||||
catKey' modeguaranteed ref mode
|
catKey' modeguaranteed ref mode
|
||||||
| isSymLink mode = do
|
| isSymLink mode = do
|
||||||
l <- fromInternalGitPath . encodeW8 . L.unpack <$> get
|
l <- fromInternalGitPath . decodeBS <$> get
|
||||||
return $ if isLinkToAnnex l
|
return $ if isLinkToAnnex l
|
||||||
then fileKey $ takeFileName l
|
then fileKey $ takeFileName l
|
||||||
else Nothing
|
else Nothing
|
||||||
|
|
|
@ -13,7 +13,6 @@ import Common.Annex
|
||||||
import Limit
|
import Limit
|
||||||
import Utility.Matcher
|
import Utility.Matcher
|
||||||
import Types.Group
|
import Types.Group
|
||||||
import Types.Limit
|
|
||||||
import Logs.Group
|
import Logs.Group
|
||||||
import Logs.Remote
|
import Logs.Remote
|
||||||
import Annex.UUID
|
import Annex.UUID
|
||||||
|
@ -25,12 +24,10 @@ import Types.Remote (RemoteConfig)
|
||||||
import Data.Either
|
import Data.Either
|
||||||
import qualified Data.Set as S
|
import qualified Data.Set as S
|
||||||
|
|
||||||
type FileMatcher = Matcher MatchFiles
|
checkFileMatcher :: (FileMatcher Annex) -> FilePath -> Annex Bool
|
||||||
|
|
||||||
checkFileMatcher :: FileMatcher -> FilePath -> Annex Bool
|
|
||||||
checkFileMatcher matcher file = checkMatcher matcher Nothing (Just file) S.empty True
|
checkFileMatcher matcher file = checkMatcher matcher Nothing (Just file) S.empty True
|
||||||
|
|
||||||
checkMatcher :: FileMatcher -> Maybe Key -> AssociatedFile -> AssumeNotPresent -> Bool -> Annex Bool
|
checkMatcher :: (FileMatcher Annex) -> Maybe Key -> AssociatedFile -> AssumeNotPresent -> Bool -> Annex Bool
|
||||||
checkMatcher matcher mkey afile notpresent def
|
checkMatcher matcher mkey afile notpresent def
|
||||||
| isEmpty matcher = return def
|
| isEmpty matcher = return def
|
||||||
| otherwise = case (mkey, afile) of
|
| otherwise = case (mkey, afile) of
|
||||||
|
@ -48,31 +45,35 @@ fileMatchInfo file = do
|
||||||
, relFile = file
|
, relFile = file
|
||||||
}
|
}
|
||||||
|
|
||||||
matchAll :: FileMatcher
|
matchAll :: FileMatcher Annex
|
||||||
matchAll = generate []
|
matchAll = generate []
|
||||||
|
|
||||||
parsedToMatcher :: [Either String (Token MatchFiles)] -> Either String FileMatcher
|
parsedToMatcher :: [Either String (Token (MatchFiles Annex))] -> Either String (FileMatcher Annex)
|
||||||
parsedToMatcher parsed = case partitionEithers parsed of
|
parsedToMatcher parsed = case partitionEithers parsed of
|
||||||
([], vs) -> Right $ generate vs
|
([], vs) -> Right $ generate vs
|
||||||
(es, _) -> Left $ unwords $ map ("Parse failure: " ++) es
|
(es, _) -> Left $ unwords $ map ("Parse failure: " ++) es
|
||||||
|
|
||||||
exprParser :: GroupMap -> M.Map UUID RemoteConfig -> Maybe UUID -> String -> [Either String (Token MatchFiles)]
|
exprParser :: FileMatcher Annex -> FileMatcher Annex -> GroupMap -> M.Map UUID RemoteConfig -> Maybe UUID -> String -> [Either String (Token (MatchFiles Annex))]
|
||||||
exprParser groupmap configmap mu expr =
|
exprParser matchstandard matchgroupwanted groupmap configmap mu expr =
|
||||||
map parse $ tokenizeMatcher expr
|
map parse $ tokenizeMatcher expr
|
||||||
where
|
where
|
||||||
parse = parseToken
|
parse = parseToken
|
||||||
|
matchstandard
|
||||||
|
matchgroupwanted
|
||||||
(limitPresent mu)
|
(limitPresent mu)
|
||||||
(limitInDir preferreddir)
|
(limitInDir preferreddir)
|
||||||
groupmap
|
groupmap
|
||||||
preferreddir = fromMaybe "public" $
|
preferreddir = fromMaybe "public" $
|
||||||
M.lookup "preferreddir" =<< (`M.lookup` configmap) =<< mu
|
M.lookup "preferreddir" =<< (`M.lookup` configmap) =<< mu
|
||||||
|
|
||||||
parseToken :: MkLimit -> MkLimit -> GroupMap -> String -> Either String (Token MatchFiles)
|
parseToken :: FileMatcher Annex -> FileMatcher Annex -> MkLimit Annex -> MkLimit Annex -> GroupMap -> String -> Either String (Token (MatchFiles Annex))
|
||||||
parseToken checkpresent checkpreferreddir groupmap t
|
parseToken matchstandard matchgroupwanted checkpresent checkpreferreddir groupmap t
|
||||||
| t `elem` tokens = Right $ token t
|
| t `elem` tokens = Right $ token t
|
||||||
|
| t == "standard" = call matchstandard
|
||||||
|
| t == "groupwanted" = call matchgroupwanted
|
||||||
| t == "present" = use checkpresent
|
| t == "present" = use checkpresent
|
||||||
| t == "inpreferreddir" = use checkpreferreddir
|
| t == "inpreferreddir" = use checkpreferreddir
|
||||||
| t == "unused" = Right (Operation limitUnused)
|
| t == "unused" = Right $ Operation limitUnused
|
||||||
| otherwise = maybe (Left $ "near " ++ show t) use $ M.lookup k $
|
| otherwise = maybe (Left $ "near " ++ show t) use $ M.lookup k $
|
||||||
M.fromList
|
M.fromList
|
||||||
[ ("include", limitInclude)
|
[ ("include", limitInclude)
|
||||||
|
@ -89,6 +90,8 @@ parseToken checkpresent checkpreferreddir groupmap t
|
||||||
where
|
where
|
||||||
(k, v) = separate (== '=') t
|
(k, v) = separate (== '=') t
|
||||||
use a = Operation <$> a v
|
use a = Operation <$> a v
|
||||||
|
call sub = Right $ Operation $ \notpresent mi ->
|
||||||
|
matchMrun sub $ \a -> a notpresent mi
|
||||||
|
|
||||||
{- This is really dumb tokenization; there's no support for quoted values.
|
{- This is really dumb tokenization; there's no support for quoted values.
|
||||||
- Open and close parens are always treated as standalone tokens;
|
- Open and close parens are always treated as standalone tokens;
|
||||||
|
@ -100,7 +103,7 @@ tokenizeMatcher = filter (not . null ) . concatMap splitparens . words
|
||||||
|
|
||||||
{- Generates a matcher for files large enough (or meeting other criteria)
|
{- Generates a matcher for files large enough (or meeting other criteria)
|
||||||
- to be added to the annex, rather than directly to git. -}
|
- to be added to the annex, rather than directly to git. -}
|
||||||
largeFilesMatcher :: Annex FileMatcher
|
largeFilesMatcher :: Annex (FileMatcher Annex)
|
||||||
largeFilesMatcher = go =<< annexLargeFiles <$> Annex.getGitConfig
|
largeFilesMatcher = go =<< annexLargeFiles <$> Annex.getGitConfig
|
||||||
where
|
where
|
||||||
go Nothing = return matchAll
|
go Nothing = return matchAll
|
||||||
|
@ -109,5 +112,5 @@ largeFilesMatcher = go =<< annexLargeFiles <$> Annex.getGitConfig
|
||||||
rc <- readRemoteLog
|
rc <- readRemoteLog
|
||||||
u <- getUUID
|
u <- getUUID
|
||||||
either badexpr return $
|
either badexpr return $
|
||||||
parsedToMatcher $ exprParser gm rc (Just u) expr
|
parsedToMatcher $ exprParser matchAll matchAll gm rc (Just u) expr
|
||||||
badexpr e = error $ "bad annex.largefiles configuration: " ++ e
|
badexpr e = error $ "bad annex.largefiles configuration: " ++ e
|
||||||
|
|
|
@ -5,11 +5,15 @@
|
||||||
- Licensed under the GNU GPL version 3 or higher.
|
- Licensed under the GNU GPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
module Annex.MetaData where
|
module Annex.MetaData (
|
||||||
|
genMetaData,
|
||||||
|
module X
|
||||||
|
) where
|
||||||
|
|
||||||
import Common.Annex
|
import Common.Annex
|
||||||
import qualified Annex
|
import qualified Annex
|
||||||
import Types.MetaData
|
import Types.MetaData as X
|
||||||
|
import Annex.MetaData.StandardFields as X
|
||||||
import Logs.MetaData
|
import Logs.MetaData
|
||||||
import Annex.CatFile
|
import Annex.CatFile
|
||||||
|
|
||||||
|
@ -19,15 +23,6 @@ import Data.Time.Calendar
|
||||||
import Data.Time.Clock
|
import Data.Time.Clock
|
||||||
import Data.Time.Clock.POSIX
|
import Data.Time.Clock.POSIX
|
||||||
|
|
||||||
tagMetaField :: MetaField
|
|
||||||
tagMetaField = mkMetaFieldUnchecked "tag"
|
|
||||||
|
|
||||||
yearMetaField :: MetaField
|
|
||||||
yearMetaField = mkMetaFieldUnchecked "year"
|
|
||||||
|
|
||||||
monthMetaField :: MetaField
|
|
||||||
monthMetaField = mkMetaFieldUnchecked "month"
|
|
||||||
|
|
||||||
{- Adds metadata for a file that has just been ingested into the
|
{- Adds metadata for a file that has just been ingested into the
|
||||||
- annex, but has not yet been committed to git.
|
- annex, but has not yet been committed to git.
|
||||||
-
|
-
|
||||||
|
|
47
Annex/MetaData/StandardFields.hs
Normal file
47
Annex/MetaData/StandardFields.hs
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
{- git-annex metadata, standard fields
|
||||||
|
-
|
||||||
|
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||||
|
-
|
||||||
|
- Licensed under the GNU GPL version 3 or higher.
|
||||||
|
-}
|
||||||
|
|
||||||
|
module Annex.MetaData.StandardFields (
|
||||||
|
tagMetaField,
|
||||||
|
yearMetaField,
|
||||||
|
monthMetaField,
|
||||||
|
lastChangedField,
|
||||||
|
mkLastChangedField,
|
||||||
|
isLastChangedField
|
||||||
|
) where
|
||||||
|
|
||||||
|
import Types.MetaData
|
||||||
|
|
||||||
|
import Data.List
|
||||||
|
|
||||||
|
tagMetaField :: MetaField
|
||||||
|
tagMetaField = mkMetaFieldUnchecked "tag"
|
||||||
|
|
||||||
|
yearMetaField :: MetaField
|
||||||
|
yearMetaField = mkMetaFieldUnchecked "year"
|
||||||
|
|
||||||
|
monthMetaField :: MetaField
|
||||||
|
monthMetaField = mkMetaFieldUnchecked "month"
|
||||||
|
|
||||||
|
lastChangedField :: MetaField
|
||||||
|
lastChangedField = mkMetaFieldUnchecked lastchanged
|
||||||
|
|
||||||
|
mkLastChangedField :: MetaField -> MetaField
|
||||||
|
mkLastChangedField f = mkMetaFieldUnchecked (fromMetaField f ++ lastchangedSuffix)
|
||||||
|
|
||||||
|
isLastChangedField :: MetaField -> Bool
|
||||||
|
isLastChangedField f
|
||||||
|
| f == lastChangedField = True
|
||||||
|
| otherwise = lastchanged `isSuffixOf` s && s /= lastchangedSuffix
|
||||||
|
where
|
||||||
|
s = fromMetaField f
|
||||||
|
|
||||||
|
lastchanged :: String
|
||||||
|
lastchanged = "lastchanged"
|
||||||
|
|
||||||
|
lastchangedSuffix :: String
|
||||||
|
lastchangedSuffix = "-lastchanged"
|
81
Annex/Notification.hs
Normal file
81
Annex/Notification.hs
Normal file
|
@ -0,0 +1,81 @@
|
||||||
|
{- git-annex desktop notifications
|
||||||
|
-
|
||||||
|
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||||
|
-
|
||||||
|
- Licensed under the GNU GPL version 3 or higher.
|
||||||
|
-}
|
||||||
|
|
||||||
|
{-# LANGUAGE CPP #-}
|
||||||
|
|
||||||
|
module Annex.Notification where
|
||||||
|
|
||||||
|
import Common.Annex
|
||||||
|
import Logs.Transfer
|
||||||
|
#ifdef WITH_DBUS_NOTIFICATIONS
|
||||||
|
import qualified Annex
|
||||||
|
import Types.DesktopNotify
|
||||||
|
import qualified DBus.Notify as Notify
|
||||||
|
import qualified DBus.Client
|
||||||
|
#endif
|
||||||
|
|
||||||
|
-- Witness that notification has happened.
|
||||||
|
data NotifyWitness = NotifyWitness
|
||||||
|
|
||||||
|
{- Wrap around an action that performs a transfer, which may run multiple
|
||||||
|
- attempts. Displays notification when supported and when the user asked
|
||||||
|
- for it. -}
|
||||||
|
notifyTransfer :: Direction -> Maybe FilePath -> (NotifyWitness -> Annex Bool) -> Annex Bool
|
||||||
|
notifyTransfer _ Nothing a = a NotifyWitness
|
||||||
|
#ifdef WITH_DBUS_NOTIFICATIONS
|
||||||
|
notifyTransfer direction (Just f) a = do
|
||||||
|
wanted <- Annex.getState Annex.desktopnotify
|
||||||
|
let action = if direction == Upload then "uploading" else "downloading"
|
||||||
|
let basedesc = action ++ " " ++ f
|
||||||
|
let startdesc = "started " ++ basedesc
|
||||||
|
let enddesc ok = if ok
|
||||||
|
then "finished " ++ basedesc
|
||||||
|
else basedesc ++ " failed"
|
||||||
|
if (notifyStart wanted || notifyFinish wanted)
|
||||||
|
then do
|
||||||
|
client <- liftIO DBus.Client.connectSession
|
||||||
|
startnotification <- liftIO $ if notifyStart wanted
|
||||||
|
then Just <$> Notify.notify client (mkNote startdesc)
|
||||||
|
else pure Nothing
|
||||||
|
ok <- a NotifyWitness
|
||||||
|
when (notifyFinish wanted) $ liftIO $ void $ maybe
|
||||||
|
(Notify.notify client $ mkNote $ enddesc ok)
|
||||||
|
(\n -> Notify.replace client n $ mkNote $ enddesc ok)
|
||||||
|
startnotification
|
||||||
|
return ok
|
||||||
|
else a NotifyWitness
|
||||||
|
#else
|
||||||
|
notifyTransfer _ (Just _) a = do a NotifyWitness
|
||||||
|
#endif
|
||||||
|
|
||||||
|
notifyDrop :: Maybe FilePath -> Bool -> Annex ()
|
||||||
|
notifyDrop Nothing _ = noop
|
||||||
|
#ifdef WITH_DBUS_NOTIFICATIONS
|
||||||
|
notifyDrop (Just f) ok = do
|
||||||
|
wanted <- Annex.getState Annex.desktopnotify
|
||||||
|
when (notifyFinish wanted) $ liftIO $ do
|
||||||
|
client <- DBus.Client.connectSession
|
||||||
|
let msg = if ok
|
||||||
|
then "dropped " ++ f
|
||||||
|
else "failed to drop" ++ f
|
||||||
|
void $ Notify.notify client (mkNote msg)
|
||||||
|
#else
|
||||||
|
notifyDrop (Just _) _ = noop
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef WITH_DBUS_NOTIFICATIONS
|
||||||
|
mkNote :: String -> Notify.Note
|
||||||
|
mkNote desc = Notify.blankNote
|
||||||
|
{ Notify.appName = "git-annex"
|
||||||
|
, Notify.body = Just $ Notify.Text desc
|
||||||
|
, Notify.hints =
|
||||||
|
[ Notify.Category Notify.Transfer
|
||||||
|
, Notify.Urgency Notify.Low
|
||||||
|
, Notify.SuppressSound True
|
||||||
|
]
|
||||||
|
}
|
||||||
|
#endif
|
|
@ -9,7 +9,6 @@
|
||||||
|
|
||||||
module Annex.Ssh (
|
module Annex.Ssh (
|
||||||
sshCachingOptions,
|
sshCachingOptions,
|
||||||
sshCleanup,
|
|
||||||
sshCacheDir,
|
sshCacheDir,
|
||||||
sshReadPort,
|
sshReadPort,
|
||||||
) where
|
) where
|
||||||
|
@ -24,6 +23,7 @@ import qualified Build.SysConfig as SysConfig
|
||||||
import qualified Annex
|
import qualified Annex
|
||||||
import Config
|
import Config
|
||||||
import Utility.Env
|
import Utility.Env
|
||||||
|
import Types.CleanupActions
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
import Annex.Perms
|
import Annex.Perms
|
||||||
#endif
|
#endif
|
||||||
|
@ -31,7 +31,9 @@ import Annex.Perms
|
||||||
{- Generates parameters to ssh to a given host (or user@host) on a given
|
{- Generates parameters to ssh to a given host (or user@host) on a given
|
||||||
- port, with connection caching. -}
|
- port, with connection caching. -}
|
||||||
sshCachingOptions :: (String, Maybe Integer) -> [CommandParam] -> Annex [CommandParam]
|
sshCachingOptions :: (String, Maybe Integer) -> [CommandParam] -> Annex [CommandParam]
|
||||||
sshCachingOptions (host, port) opts = go =<< sshInfo (host, port)
|
sshCachingOptions (host, port) opts = do
|
||||||
|
Annex.addCleanup SshCachingCleanup sshCleanup
|
||||||
|
go =<< sshInfo (host, port)
|
||||||
where
|
where
|
||||||
go (Nothing, params) = ret params
|
go (Nothing, params) = ret params
|
||||||
go (Just socketfile, params) = do
|
go (Just socketfile, params) = do
|
||||||
|
@ -144,8 +146,9 @@ sshCleanup = go =<< sshCacheDir
|
||||||
withQuietOutput createProcessSuccess $
|
withQuietOutput createProcessSuccess $
|
||||||
(proc "ssh" $ toCommand $
|
(proc "ssh" $ toCommand $
|
||||||
[ Params "-O stop"
|
[ Params "-O stop"
|
||||||
] ++ params ++ [Param "any"])
|
] ++ params ++ [Param "localhost"])
|
||||||
{ cwd = Just dir }
|
{ cwd = Just dir }
|
||||||
|
liftIO $ nukeFile socketfile
|
||||||
-- Cannot remove the lock file; other processes may
|
-- Cannot remove the lock file; other processes may
|
||||||
-- be waiting on our exclusive lock to use it.
|
-- be waiting on our exclusive lock to use it.
|
||||||
|
|
||||||
|
|
131
Annex/Transfer.hs
Normal file
131
Annex/Transfer.hs
Normal file
|
@ -0,0 +1,131 @@
|
||||||
|
{- git-annex transfers
|
||||||
|
-
|
||||||
|
- Copyright 2012-2014 Joey Hess <joey@kitenet.net>
|
||||||
|
-
|
||||||
|
- Licensed under the GNU GPL version 3 or higher.
|
||||||
|
-}
|
||||||
|
|
||||||
|
{-# LANGUAGE CPP #-}
|
||||||
|
|
||||||
|
module Annex.Transfer (
|
||||||
|
module X,
|
||||||
|
upload,
|
||||||
|
download,
|
||||||
|
runTransfer,
|
||||||
|
noRetry,
|
||||||
|
forwardRetry,
|
||||||
|
) where
|
||||||
|
|
||||||
|
import Common.Annex
|
||||||
|
import Logs.Transfer as X
|
||||||
|
import Annex.Notification as X
|
||||||
|
import Annex.Perms
|
||||||
|
import Annex.Exception
|
||||||
|
import Utility.Metered
|
||||||
|
#ifdef mingw32_HOST_OS
|
||||||
|
import Utility.WinLock
|
||||||
|
#endif
|
||||||
|
|
||||||
|
import Control.Concurrent
|
||||||
|
|
||||||
|
upload :: UUID -> Key -> AssociatedFile -> RetryDecider -> (MeterUpdate -> Annex Bool) -> NotifyWitness -> Annex Bool
|
||||||
|
upload u key f d a _witness = runTransfer (Transfer Upload u key) f d a
|
||||||
|
|
||||||
|
download :: UUID -> Key -> AssociatedFile -> RetryDecider -> (MeterUpdate -> Annex Bool) -> NotifyWitness -> Annex Bool
|
||||||
|
download u key f d a _witness = runTransfer (Transfer Download u key) f d a
|
||||||
|
|
||||||
|
{- Runs a transfer action. Creates and locks the lock file while the
|
||||||
|
- action is running, and stores info in the transfer information
|
||||||
|
- file.
|
||||||
|
-
|
||||||
|
- If the transfer action returns False, the transfer info is
|
||||||
|
- left in the failedTransferDir.
|
||||||
|
-
|
||||||
|
- If the transfer is already in progress, returns False.
|
||||||
|
-
|
||||||
|
- An upload can be run from a read-only filesystem, and in this case
|
||||||
|
- no transfer information or lock file is used.
|
||||||
|
-}
|
||||||
|
runTransfer :: Transfer -> Maybe FilePath -> RetryDecider -> (MeterUpdate -> Annex Bool) -> Annex Bool
|
||||||
|
runTransfer t file shouldretry a = do
|
||||||
|
info <- liftIO $ startTransferInfo file
|
||||||
|
(meter, tfile, metervar) <- mkProgressUpdater t info
|
||||||
|
mode <- annexFileMode
|
||||||
|
(fd, inprogress) <- liftIO $ prep tfile mode info
|
||||||
|
if inprogress
|
||||||
|
then do
|
||||||
|
showNote "transfer already in progress"
|
||||||
|
return False
|
||||||
|
else do
|
||||||
|
ok <- retry info metervar $
|
||||||
|
bracketIO (return fd) (cleanup tfile) (const $ a meter)
|
||||||
|
unless ok $ recordFailedTransfer t info
|
||||||
|
return ok
|
||||||
|
where
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
|
prep tfile mode info = do
|
||||||
|
mfd <- catchMaybeIO $
|
||||||
|
openFd (transferLockFile tfile) ReadWrite (Just mode)
|
||||||
|
defaultFileFlags { trunc = True }
|
||||||
|
case mfd of
|
||||||
|
Nothing -> return (Nothing, False)
|
||||||
|
Just fd -> do
|
||||||
|
locked <- catchMaybeIO $
|
||||||
|
setLock fd (WriteLock, AbsoluteSeek, 0, 0)
|
||||||
|
if isNothing locked
|
||||||
|
then return (Nothing, True)
|
||||||
|
else do
|
||||||
|
void $ tryIO $ writeTransferInfoFile info tfile
|
||||||
|
return (mfd, False)
|
||||||
|
#else
|
||||||
|
prep tfile _mode info = do
|
||||||
|
v <- catchMaybeIO $ lockExclusive (transferLockFile tfile)
|
||||||
|
case v of
|
||||||
|
Nothing -> return (Nothing, False)
|
||||||
|
Just Nothing -> return (Nothing, True)
|
||||||
|
Just (Just lockhandle) -> do
|
||||||
|
void $ tryIO $ writeTransferInfoFile info tfile
|
||||||
|
return (Just lockhandle, False)
|
||||||
|
#endif
|
||||||
|
cleanup _ Nothing = noop
|
||||||
|
cleanup tfile (Just lockhandle) = do
|
||||||
|
void $ tryIO $ removeFile tfile
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
|
void $ tryIO $ removeFile $ transferLockFile tfile
|
||||||
|
closeFd lockhandle
|
||||||
|
#else
|
||||||
|
{- Windows cannot delete the lockfile until the lock
|
||||||
|
- is closed. So it's possible to race with another
|
||||||
|
- process that takes the lock before it's removed,
|
||||||
|
- so ignore failure to remove.
|
||||||
|
-}
|
||||||
|
dropLock lockhandle
|
||||||
|
void $ tryIO $ removeFile $ transferLockFile tfile
|
||||||
|
#endif
|
||||||
|
retry oldinfo metervar run = do
|
||||||
|
v <- tryAnnex run
|
||||||
|
case v of
|
||||||
|
Right b -> return b
|
||||||
|
Left _ -> do
|
||||||
|
b <- getbytescomplete metervar
|
||||||
|
let newinfo = oldinfo { bytesComplete = Just b }
|
||||||
|
if shouldretry oldinfo newinfo
|
||||||
|
then retry newinfo metervar run
|
||||||
|
else return False
|
||||||
|
getbytescomplete metervar
|
||||||
|
| transferDirection t == Upload =
|
||||||
|
liftIO $ readMVar metervar
|
||||||
|
| otherwise = do
|
||||||
|
f <- fromRepo $ gitAnnexTmpObjectLocation (transferKey t)
|
||||||
|
liftIO $ catchDefaultIO 0 $
|
||||||
|
fromIntegral . fileSize <$> getFileStatus f
|
||||||
|
|
||||||
|
type RetryDecider = TransferInfo -> TransferInfo -> Bool
|
||||||
|
|
||||||
|
noRetry :: RetryDecider
|
||||||
|
noRetry _ _ = False
|
||||||
|
|
||||||
|
{- Retries a transfer when it fails, as long as the failed transfer managed
|
||||||
|
- to send some data. -}
|
||||||
|
forwardRetry :: RetryDecider
|
||||||
|
forwardRetry old new = bytesComplete old < bytesComplete new
|
|
@ -14,7 +14,6 @@ import Utility.Tense
|
||||||
import qualified Data.Text as T
|
import qualified Data.Text as T
|
||||||
import Data.Text (Text)
|
import Data.Text (Text)
|
||||||
import qualified Data.Map as M
|
import qualified Data.Map as M
|
||||||
import Data.Monoid
|
|
||||||
|
|
||||||
{- This is as many alerts as it makes sense to display at a time.
|
{- This is as many alerts as it makes sense to display at a time.
|
||||||
- A display might be smaller, or larger, the point is to not overwhelm the
|
- A display might be smaller, or larger, the point is to not overwhelm the
|
||||||
|
|
|
@ -35,11 +35,14 @@ standaloneAppBase = getEnv "GIT_ANNEX_APP_BASE"
|
||||||
-
|
-
|
||||||
- Note that this is done every time it's started, so if the user moves
|
- Note that this is done every time it's started, so if the user moves
|
||||||
- it around, the paths this sets up won't break.
|
- it around, the paths this sets up won't break.
|
||||||
|
-
|
||||||
|
- Nautilus hook script installation is done even for packaged apps,
|
||||||
|
- since it has to go into the user's home directory.
|
||||||
-}
|
-}
|
||||||
ensureInstalled :: IO ()
|
ensureInstalled :: IO ()
|
||||||
ensureInstalled = go =<< standaloneAppBase
|
ensureInstalled = go =<< standaloneAppBase
|
||||||
where
|
where
|
||||||
go Nothing = noop
|
go Nothing = installNautilus "git-annex"
|
||||||
go (Just base) = do
|
go (Just base) = do
|
||||||
let program = base </> "git-annex"
|
let program = base </> "git-annex"
|
||||||
programfile <- programFile
|
programfile <- programFile
|
||||||
|
@ -78,6 +81,32 @@ ensureInstalled = go =<< standaloneAppBase
|
||||||
viaTmp writeFile shim content
|
viaTmp writeFile shim content
|
||||||
modifyFileMode shim $ addModes [ownerExecuteMode]
|
modifyFileMode shim $ addModes [ownerExecuteMode]
|
||||||
|
|
||||||
|
installNautilus program
|
||||||
|
|
||||||
|
installNautilus :: FilePath -> IO ()
|
||||||
|
#ifdef linux_HOST_OS
|
||||||
|
installNautilus program = do
|
||||||
|
scriptdir <- (\d -> d </> "nautilus" </> "scripts") <$> userDataDir
|
||||||
|
genscript scriptdir "get"
|
||||||
|
genscript scriptdir "drop"
|
||||||
|
where
|
||||||
|
genscript scriptdir action =
|
||||||
|
installscript (scriptdir </> scriptname action) $ unlines
|
||||||
|
[ shebang_local
|
||||||
|
, autoaddedcomment
|
||||||
|
, "exec " ++ program ++ " " ++ action ++ " --notify-start --notify-finish -- \"$@\""
|
||||||
|
]
|
||||||
|
scriptname action = "git-annex " ++ action
|
||||||
|
installscript f c = whenM (safetoinstallscript f) $ do
|
||||||
|
writeFile f c
|
||||||
|
modifyFileMode f $ addModes [ownerExecuteMode]
|
||||||
|
safetoinstallscript f = catchDefaultIO True $
|
||||||
|
elem autoaddedcomment . lines <$> readFileStrict f
|
||||||
|
autoaddedcomment = "# Automatically added by git-annex, do not edit. (To disable, chmod 600 this file.)"
|
||||||
|
#else
|
||||||
|
installNautilus _ = noop
|
||||||
|
#endif
|
||||||
|
|
||||||
{- Returns a cleaned up environment that lacks settings used to make the
|
{- Returns a cleaned up environment that lacks settings used to make the
|
||||||
- standalone builds use their bundled libraries and programs.
|
- standalone builds use their bundled libraries and programs.
|
||||||
- Useful when calling programs not included in the standalone builds.
|
- Useful when calling programs not included in the standalone builds.
|
||||||
|
|
|
@ -21,7 +21,7 @@ installMenu command menufile iconsrcdir icondir = do
|
||||||
writeDesktopMenuFile (fdoDesktopMenu command) menufile
|
writeDesktopMenuFile (fdoDesktopMenu command) menufile
|
||||||
installIcon (iconsrcdir </> "logo.svg") $
|
installIcon (iconsrcdir </> "logo.svg") $
|
||||||
iconFilePath (iconBaseName ++ ".svg") "scalable" icondir
|
iconFilePath (iconBaseName ++ ".svg") "scalable" icondir
|
||||||
installIcon (iconsrcdir </> "favicon.png") $
|
installIcon (iconsrcdir </> "logo_16x16.png") $
|
||||||
iconFilePath (iconBaseName ++ ".png") "16x16" icondir
|
iconFilePath (iconBaseName ++ ".png") "16x16" icondir
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
|
@ -197,7 +197,7 @@ authorizedKeysLine gitannexshellonly dir pubkey
|
||||||
- long perl script. -}
|
- long perl script. -}
|
||||||
| otherwise = pubkey
|
| otherwise = pubkey
|
||||||
where
|
where
|
||||||
limitcommand = "command=\"GIT_ANNEX_SHELL_DIRECTORY="++shellEscape dir++" ~/.ssh/git-annex-shell\",no-agent-forwarding,no-port-forwarding,no-X11-forwarding,no-pty "
|
limitcommand = "command=\"env GIT_ANNEX_SHELL_DIRECTORY="++shellEscape dir++" ~/.ssh/git-annex-shell\",no-agent-forwarding,no-port-forwarding,no-X11-forwarding,no-pty "
|
||||||
|
|
||||||
{- Generates a ssh key pair. -}
|
{- Generates a ssh key pair. -}
|
||||||
genSshKeyPair :: IO SshKeyPair
|
genSshKeyPair :: IO SshKeyPair
|
||||||
|
|
|
@ -62,15 +62,17 @@ configFilesActions =
|
||||||
, (groupLog, void $ liftAnnex groupMapLoad)
|
, (groupLog, void $ liftAnnex groupMapLoad)
|
||||||
, (numcopiesLog, void $ liftAnnex globalNumCopiesLoad)
|
, (numcopiesLog, void $ liftAnnex globalNumCopiesLoad)
|
||||||
, (scheduleLog, void updateScheduleLog)
|
, (scheduleLog, void updateScheduleLog)
|
||||||
-- Preferred content settings depend on most of the other configs,
|
-- Preferred and required content settings depend on most of the
|
||||||
-- so will be reloaded whenever any configs change.
|
-- other configs, so will be reloaded whenever any configs change.
|
||||||
, (preferredContentLog, noop)
|
, (preferredContentLog, noop)
|
||||||
|
, (requiredContentLog, noop)
|
||||||
|
, (groupPreferredContentLog, noop)
|
||||||
]
|
]
|
||||||
|
|
||||||
reloadConfigs :: Configs -> Assistant ()
|
reloadConfigs :: Configs -> Assistant ()
|
||||||
reloadConfigs changedconfigs = do
|
reloadConfigs changedconfigs = do
|
||||||
sequence_ as
|
sequence_ as
|
||||||
void $ liftAnnex preferredContentMapLoad
|
void $ liftAnnex preferredRequiredMapsLoad
|
||||||
{- Changes to the remote log, or the trust log, can affect the
|
{- Changes to the remote log, or the trust log, can affect the
|
||||||
- syncRemotes list. Changes to the uuid log may affect its
|
- syncRemotes list. Changes to the uuid log may affect its
|
||||||
- display so are also included. -}
|
- display so are also included. -}
|
||||||
|
|
|
@ -35,6 +35,7 @@ import Annex.CatFile
|
||||||
import Annex.CheckIgnore
|
import Annex.CheckIgnore
|
||||||
import Annex.Link
|
import Annex.Link
|
||||||
import Annex.FileMatcher
|
import Annex.FileMatcher
|
||||||
|
import Types.FileMatcher
|
||||||
import Annex.ReplaceFile
|
import Annex.ReplaceFile
|
||||||
import Git.Types
|
import Git.Types
|
||||||
import Config
|
import Config
|
||||||
|
@ -196,7 +197,7 @@ runHandler handler file filestatus = void $ do
|
||||||
| otherwise = f
|
| otherwise = f
|
||||||
|
|
||||||
{- Small files are added to git as-is, while large ones go into the annex. -}
|
{- Small files are added to git as-is, while large ones go into the annex. -}
|
||||||
add :: FileMatcher -> FilePath -> Assistant (Maybe Change)
|
add :: FileMatcher Annex -> FilePath -> Assistant (Maybe Change)
|
||||||
add bigfilematcher file = ifM (liftAnnex $ checkFileMatcher bigfilematcher file)
|
add bigfilematcher file = ifM (liftAnnex $ checkFileMatcher bigfilematcher file)
|
||||||
( pendingAddChange file
|
( pendingAddChange file
|
||||||
, do
|
, do
|
||||||
|
@ -205,7 +206,7 @@ add bigfilematcher file = ifM (liftAnnex $ checkFileMatcher bigfilematcher file)
|
||||||
madeChange file AddFileChange
|
madeChange file AddFileChange
|
||||||
)
|
)
|
||||||
|
|
||||||
onAdd :: FileMatcher -> Handler
|
onAdd :: FileMatcher Annex -> Handler
|
||||||
onAdd matcher file filestatus
|
onAdd matcher file filestatus
|
||||||
| maybe False isRegularFile filestatus =
|
| maybe False isRegularFile filestatus =
|
||||||
unlessIgnored file $
|
unlessIgnored file $
|
||||||
|
@ -218,7 +219,7 @@ shouldRestage ds = scanComplete ds || forceRestage ds
|
||||||
{- In direct mode, add events are received for both new files, and
|
{- In direct mode, add events are received for both new files, and
|
||||||
- modified existing files.
|
- modified existing files.
|
||||||
-}
|
-}
|
||||||
onAddDirect :: Bool -> FileMatcher -> Handler
|
onAddDirect :: Bool -> FileMatcher Annex -> Handler
|
||||||
onAddDirect symlinkssupported matcher file fs = do
|
onAddDirect symlinkssupported matcher file fs = do
|
||||||
v <- liftAnnex $ catKeyFile file
|
v <- liftAnnex $ catKeyFile file
|
||||||
case (v, fs) of
|
case (v, fs) of
|
||||||
|
|
|
@ -73,7 +73,7 @@ webAppThread assistantdata urlrenderer noannex cannotrun postfirstrun listenhost
|
||||||
#endif
|
#endif
|
||||||
webapp <- WebApp
|
webapp <- WebApp
|
||||||
<$> pure assistantdata
|
<$> pure assistantdata
|
||||||
<*> (pack <$> genRandomToken)
|
<*> genAuthToken
|
||||||
<*> getreldir
|
<*> getreldir
|
||||||
<*> pure staticRoutes
|
<*> pure staticRoutes
|
||||||
<*> pure postfirstrun
|
<*> pure postfirstrun
|
||||||
|
@ -125,9 +125,13 @@ myUrl tlssettings webapp addr = unpack $ yesodRender webapp urlbase DashboardR [
|
||||||
|
|
||||||
getTlsSettings :: Annex (Maybe TLS.TLSSettings)
|
getTlsSettings :: Annex (Maybe TLS.TLSSettings)
|
||||||
getTlsSettings = do
|
getTlsSettings = do
|
||||||
|
#ifdef WITH_WEBAPP_SECURE
|
||||||
cert <- fromRepo gitAnnexWebCertificate
|
cert <- fromRepo gitAnnexWebCertificate
|
||||||
privkey <- fromRepo gitAnnexWebPrivKey
|
privkey <- fromRepo gitAnnexWebPrivKey
|
||||||
ifM (liftIO $ allM doesFileExist [cert, privkey])
|
ifM (liftIO $ allM doesFileExist [cert, privkey])
|
||||||
( return $ Just $ TLS.tlsSettings cert privkey
|
( return $ Just $ TLS.tlsSettings cert privkey
|
||||||
, return Nothing
|
, return Nothing
|
||||||
)
|
)
|
||||||
|
#else
|
||||||
|
return Nothing
|
||||||
|
#endif
|
||||||
|
|
|
@ -14,6 +14,7 @@ import Assistant.WebApp.Types
|
||||||
import Assistant.Common
|
import Assistant.Common
|
||||||
import Utility.NotificationBroadcaster
|
import Utility.NotificationBroadcaster
|
||||||
import Utility.Yesod
|
import Utility.Yesod
|
||||||
|
import Utility.WebApp
|
||||||
|
|
||||||
import Data.Text (Text)
|
import Data.Text (Text)
|
||||||
import Control.Concurrent
|
import Control.Concurrent
|
||||||
|
@ -36,7 +37,7 @@ newNotifier getbroadcaster = liftAssistant $ do
|
||||||
webAppFormAuthToken :: Widget
|
webAppFormAuthToken :: Widget
|
||||||
webAppFormAuthToken = do
|
webAppFormAuthToken = do
|
||||||
webapp <- liftH getYesod
|
webapp <- liftH getYesod
|
||||||
[whamlet|<input type="hidden" name="auth" value="#{secretToken webapp}">|]
|
[whamlet|<input type="hidden" name="auth" value="#{fromAuthToken (authToken webapp)}">|]
|
||||||
|
|
||||||
{- A button with an icon, and maybe label or tooltip, that can be
|
{- A button with an icon, and maybe label or tooltip, that can be
|
||||||
- clicked to perform some action.
|
- clicked to perform some action.
|
||||||
|
|
|
@ -22,6 +22,7 @@ import Assistant.DaemonStatus
|
||||||
import Assistant.Types.Buddies
|
import Assistant.Types.Buddies
|
||||||
import Utility.NotificationBroadcaster
|
import Utility.NotificationBroadcaster
|
||||||
import Utility.Yesod
|
import Utility.Yesod
|
||||||
|
import Utility.WebApp
|
||||||
|
|
||||||
import Data.Text (Text)
|
import Data.Text (Text)
|
||||||
import qualified Data.Text as T
|
import qualified Data.Text as T
|
||||||
|
@ -64,7 +65,7 @@ notifierUrl route broadcaster = do
|
||||||
[ "/"
|
[ "/"
|
||||||
, T.intercalate "/" urlbits
|
, T.intercalate "/" urlbits
|
||||||
, "?auth="
|
, "?auth="
|
||||||
, secretToken webapp
|
, fromAuthToken (authToken webapp)
|
||||||
]
|
]
|
||||||
|
|
||||||
getNotifierTransfersR :: Handler RepPlain
|
getNotifierTransfersR :: Handler RepPlain
|
||||||
|
|
|
@ -41,7 +41,7 @@ mkYesodData "WebApp" $(parseRoutesFile "Assistant/WebApp/routes")
|
||||||
|
|
||||||
data WebApp = WebApp
|
data WebApp = WebApp
|
||||||
{ assistantData :: AssistantData
|
{ assistantData :: AssistantData
|
||||||
, secretToken :: Text
|
, authToken :: AuthToken
|
||||||
, relDir :: Maybe FilePath
|
, relDir :: Maybe FilePath
|
||||||
, getStatic :: Static
|
, getStatic :: Static
|
||||||
, postFirstRun :: Maybe (IO String)
|
, postFirstRun :: Maybe (IO String)
|
||||||
|
@ -52,11 +52,11 @@ data WebApp = WebApp
|
||||||
|
|
||||||
instance Yesod WebApp where
|
instance Yesod WebApp where
|
||||||
{- Require an auth token be set when accessing any (non-static) route -}
|
{- Require an auth token be set when accessing any (non-static) route -}
|
||||||
isAuthorized _ _ = checkAuthToken secretToken
|
isAuthorized _ _ = checkAuthToken authToken
|
||||||
|
|
||||||
{- Add the auth token to every url generated, except static subsite
|
{- Add the auth token to every url generated, except static subsite
|
||||||
- urls (which can show up in Permission Denied pages). -}
|
- urls (which can show up in Permission Denied pages). -}
|
||||||
joinPath = insertAuthToken secretToken excludeStatic
|
joinPath = insertAuthToken authToken excludeStatic
|
||||||
where
|
where
|
||||||
excludeStatic [] = True
|
excludeStatic [] = True
|
||||||
excludeStatic (p:_) = p /= "static"
|
excludeStatic (p:_) = p /= "static"
|
||||||
|
|
|
@ -67,7 +67,7 @@ uninstaller :: FilePath
|
||||||
uninstaller = "git-annex-uninstall.exe"
|
uninstaller = "git-annex-uninstall.exe"
|
||||||
|
|
||||||
gitInstallDir :: Exp FilePath
|
gitInstallDir :: Exp FilePath
|
||||||
gitInstallDir = fromString "$PROGRAMFILES\\Git\\cmd"
|
gitInstallDir = fromString "$PROGRAMFILES\\Git\\bin"
|
||||||
|
|
||||||
startMenuItem :: Exp FilePath
|
startMenuItem :: Exp FilePath
|
||||||
startMenuItem = "$SMPROGRAMS/git-annex.lnk"
|
startMenuItem = "$SMPROGRAMS/git-annex.lnk"
|
||||||
|
|
|
@ -22,8 +22,8 @@ buildFlags = filter (not . null)
|
||||||
#else
|
#else
|
||||||
#warning Building without the webapp. You probably need to install Yesod..
|
#warning Building without the webapp. You probably need to install Yesod..
|
||||||
#endif
|
#endif
|
||||||
#ifdef WITH_WEBAPP_HTTPS
|
#ifdef WITH_WEBAPP_SECURE
|
||||||
, "Webapp-https"
|
, "Webapp-secure"
|
||||||
#endif
|
#endif
|
||||||
#ifdef WITH_PAIRING
|
#ifdef WITH_PAIRING
|
||||||
, "Pairing"
|
, "Pairing"
|
||||||
|
@ -57,6 +57,9 @@ buildFlags = filter (not . null)
|
||||||
#ifdef WITH_DBUS
|
#ifdef WITH_DBUS
|
||||||
, "DBus"
|
, "DBus"
|
||||||
#endif
|
#endif
|
||||||
|
#ifdef WITH_DESKTOP_NOTIFY
|
||||||
|
, "DesktopNotify"
|
||||||
|
#endif
|
||||||
#ifdef WITH_XMPP
|
#ifdef WITH_XMPP
|
||||||
, "XMPP"
|
, "XMPP"
|
||||||
#else
|
#else
|
||||||
|
|
|
@ -26,7 +26,6 @@ import qualified Annex
|
||||||
import qualified Git
|
import qualified Git
|
||||||
import qualified Git.AutoCorrect
|
import qualified Git.AutoCorrect
|
||||||
import Annex.Content
|
import Annex.Content
|
||||||
import Annex.Ssh
|
|
||||||
import Annex.Environment
|
import Annex.Environment
|
||||||
import Command
|
import Command
|
||||||
import Types.Messages
|
import Types.Messages
|
||||||
|
@ -107,4 +106,3 @@ shutdown nocommit = do
|
||||||
saveState nocommit
|
saveState nocommit
|
||||||
sequence_ =<< M.elems <$> Annex.getState Annex.cleanup
|
sequence_ =<< M.elems <$> Annex.getState Annex.cleanup
|
||||||
liftIO reapZombies -- zombies from long-running git processes
|
liftIO reapZombies -- zombies from long-running git processes
|
||||||
sshCleanup -- ssh connection caching
|
|
||||||
|
|
|
@ -20,6 +20,7 @@ import System.Console.GetOpt
|
||||||
import Common.Annex
|
import Common.Annex
|
||||||
import qualified Annex
|
import qualified Annex
|
||||||
import Types.Messages
|
import Types.Messages
|
||||||
|
import Types.DesktopNotify
|
||||||
import Limit
|
import Limit
|
||||||
import CmdLine.Usage
|
import CmdLine.Usage
|
||||||
|
|
||||||
|
@ -41,6 +42,10 @@ commonOptions =
|
||||||
"don't show debug messages"
|
"don't show debug messages"
|
||||||
, Option ['b'] ["backend"] (ReqArg setforcebackend paramName)
|
, Option ['b'] ["backend"] (ReqArg setforcebackend paramName)
|
||||||
"specify key-value backend to use"
|
"specify key-value backend to use"
|
||||||
|
, Option [] ["notify-finish"] (NoArg (setdesktopnotify mkNotifyFinish))
|
||||||
|
"show desktop notification after transfer finishes"
|
||||||
|
, Option [] ["notify-start"] (NoArg (setdesktopnotify mkNotifyStart))
|
||||||
|
"show desktop notification after transfer completes"
|
||||||
]
|
]
|
||||||
where
|
where
|
||||||
setforce v = Annex.changeState $ \s -> s { Annex.force = v }
|
setforce v = Annex.changeState $ \s -> s { Annex.force = v }
|
||||||
|
@ -49,6 +54,7 @@ commonOptions =
|
||||||
setforcebackend v = Annex.changeState $ \s -> s { Annex.forcebackend = Just v }
|
setforcebackend v = Annex.changeState $ \s -> s { Annex.forcebackend = Just v }
|
||||||
setdebug = Annex.changeGitConfig $ \c -> c { annexDebug = True }
|
setdebug = Annex.changeGitConfig $ \c -> c { annexDebug = True }
|
||||||
unsetdebug = Annex.changeGitConfig $ \c -> c { annexDebug = False }
|
unsetdebug = Annex.changeGitConfig $ \c -> c { annexDebug = False }
|
||||||
|
setdesktopnotify v = Annex.changeState $ \s -> s { Annex.desktopnotify = Annex.desktopnotify s <> v }
|
||||||
|
|
||||||
matcherOptions :: [Option]
|
matcherOptions :: [Option]
|
||||||
matcherOptions =
|
matcherOptions =
|
||||||
|
|
|
@ -30,14 +30,15 @@ withFilesInGit :: (FilePath -> CommandStart) -> CommandSeek
|
||||||
withFilesInGit a params = seekActions $ prepFiltered a $
|
withFilesInGit a params = seekActions $ prepFiltered a $
|
||||||
seekHelper LsFiles.inRepo params
|
seekHelper LsFiles.inRepo params
|
||||||
|
|
||||||
withFilesNotInGit :: (FilePath -> CommandStart) -> CommandSeek
|
withFilesNotInGit :: Bool -> (FilePath -> CommandStart) -> CommandSeek
|
||||||
withFilesNotInGit a params = do
|
withFilesNotInGit skipdotfiles a params
|
||||||
{- dotfiles are not acted on unless explicitly listed -}
|
| skipdotfiles = do
|
||||||
files <- filter (not . dotfile) <$>
|
{- dotfiles are not acted on unless explicitly listed -}
|
||||||
seekunless (null ps && not (null params)) ps
|
files <- filter (not . dotfile) <$>
|
||||||
dotfiles <- seekunless (null dotps) dotps
|
seekunless (null ps && not (null params)) ps
|
||||||
seekActions $ prepFiltered a $
|
dotfiles <- seekunless (null dotps) dotps
|
||||||
return $ concat $ segmentPaths params (files++dotfiles)
|
go (files++dotfiles)
|
||||||
|
| otherwise = go =<< seekunless False params
|
||||||
where
|
where
|
||||||
(dotps, ps) = partition dotfile params
|
(dotps, ps) = partition dotfile params
|
||||||
seekunless True _ = return []
|
seekunless True _ = return []
|
||||||
|
@ -45,6 +46,8 @@ withFilesNotInGit a params = do
|
||||||
force <- Annex.getState Annex.force
|
force <- Annex.getState Annex.force
|
||||||
g <- gitRepo
|
g <- gitRepo
|
||||||
liftIO $ Git.Command.leaveZombie <$> LsFiles.notInRepo force l g
|
liftIO $ Git.Command.leaveZombie <$> LsFiles.notInRepo force l g
|
||||||
|
go l = seekActions $ prepFiltered a $
|
||||||
|
return $ concat $ segmentPaths params l
|
||||||
|
|
||||||
withPathContents :: ((FilePath, FilePath) -> CommandStart) -> CommandSeek
|
withPathContents :: ((FilePath, FilePath) -> CommandStart) -> CommandSeek
|
||||||
withPathContents a params = seekActions $
|
withPathContents a params = seekActions $
|
||||||
|
|
|
@ -73,6 +73,8 @@ paramNumRange :: String
|
||||||
paramNumRange = "NUM|RANGE"
|
paramNumRange = "NUM|RANGE"
|
||||||
paramRemote :: String
|
paramRemote :: String
|
||||||
paramRemote = "REMOTE"
|
paramRemote = "REMOTE"
|
||||||
|
paramField :: String
|
||||||
|
paramField = "FIELD"
|
||||||
paramGlob :: String
|
paramGlob :: String
|
||||||
paramGlob = "GLOB"
|
paramGlob = "GLOB"
|
||||||
paramName :: String
|
paramName :: String
|
||||||
|
|
|
@ -34,8 +34,12 @@ import Annex.ReplaceFile
|
||||||
import Utility.Tmp
|
import Utility.Tmp
|
||||||
|
|
||||||
def :: [Command]
|
def :: [Command]
|
||||||
def = [notBareRepo $ command "add" paramPaths seek SectionCommon
|
def = [notBareRepo $ withOptions [includeDotFilesOption] $
|
||||||
"add files to annex"]
|
command "add" paramPaths seek SectionCommon
|
||||||
|
"add files to annex"]
|
||||||
|
|
||||||
|
includeDotFilesOption :: Option
|
||||||
|
includeDotFilesOption = flagOption [] "include-dotfiles" "don't skip dotfiles"
|
||||||
|
|
||||||
{- Add acts on both files not checked into git yet, and unlocked files.
|
{- Add acts on both files not checked into git yet, and unlocked files.
|
||||||
-
|
-
|
||||||
|
@ -47,7 +51,8 @@ seek ps = do
|
||||||
( start file
|
( start file
|
||||||
, stop
|
, stop
|
||||||
)
|
)
|
||||||
go withFilesNotInGit
|
skipdotfiles <- not <$> Annex.getFlag (optionName includeDotFilesOption)
|
||||||
|
go $ withFilesNotInGit skipdotfiles
|
||||||
ifM isDirect
|
ifM isDirect
|
||||||
( go withFilesMaybeModified
|
( go withFilesMaybeModified
|
||||||
, go withFilesUnlocked
|
, go withFilesUnlocked
|
||||||
|
@ -93,12 +98,15 @@ start file = ifAnnexed file addpresent add
|
||||||
- Lockdown can fail if a file gets deleted, and Nothing will be returned.
|
- Lockdown can fail if a file gets deleted, and Nothing will be returned.
|
||||||
-}
|
-}
|
||||||
lockDown :: FilePath -> Annex (Maybe KeySource)
|
lockDown :: FilePath -> Annex (Maybe KeySource)
|
||||||
lockDown file = ifM crippledFileSystem
|
lockDown = either (\e -> showErr e >> return Nothing) (return . Just) <=< lockDown'
|
||||||
( liftIO $ catchMaybeIO nohardlink
|
|
||||||
, do
|
lockDown' :: FilePath -> Annex (Either IOException KeySource)
|
||||||
|
lockDown' file = ifM crippledFileSystem
|
||||||
|
( liftIO $ tryIO nohardlink
|
||||||
|
, tryAnnexIO $ do
|
||||||
tmp <- fromRepo gitAnnexTmpMiscDir
|
tmp <- fromRepo gitAnnexTmpMiscDir
|
||||||
createAnnexDirectory tmp
|
createAnnexDirectory tmp
|
||||||
eitherToMaybe <$> tryAnnexIO (go tmp)
|
go tmp
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
{- In indirect mode, the write bit is removed from the file as part
|
{- In indirect mode, the write bit is removed from the file as part
|
||||||
|
|
|
@ -26,7 +26,7 @@ import Types.KeySource
|
||||||
import Config
|
import Config
|
||||||
import Annex.Content.Direct
|
import Annex.Content.Direct
|
||||||
import Logs.Location
|
import Logs.Location
|
||||||
import qualified Logs.Transfer as Transfer
|
import qualified Annex.Transfer as Transfer
|
||||||
#ifdef WITH_QUVI
|
#ifdef WITH_QUVI
|
||||||
import Annex.Quvi
|
import Annex.Quvi
|
||||||
import qualified Utility.Quvi as Quvi
|
import qualified Utility.Quvi as Quvi
|
||||||
|
@ -116,9 +116,10 @@ addUrlFileQuvi relaxed quviurl videourl file = do
|
||||||
prepGetViaTmpChecked sizedkey $ do
|
prepGetViaTmpChecked sizedkey $ do
|
||||||
tmp <- fromRepo $ gitAnnexTmpObjectLocation key
|
tmp <- fromRepo $ gitAnnexTmpObjectLocation key
|
||||||
showOutput
|
showOutput
|
||||||
ok <- Transfer.download webUUID key (Just file) Transfer.forwardRetry $ const $ do
|
ok <- Transfer.notifyTransfer Transfer.Download (Just file) $
|
||||||
liftIO $ createDirectoryIfMissing True (parentDir tmp)
|
Transfer.download webUUID key (Just file) Transfer.forwardRetry $ const $ do
|
||||||
downloadUrl [videourl] tmp
|
liftIO $ createDirectoryIfMissing True (parentDir tmp)
|
||||||
|
downloadUrl [videourl] tmp
|
||||||
if ok
|
if ok
|
||||||
then cleanup quviurl file key (Just tmp)
|
then cleanup quviurl file key (Just tmp)
|
||||||
else return False
|
else return False
|
||||||
|
@ -133,17 +134,20 @@ perform relaxed url file = ifAnnexed file addurl geturl
|
||||||
| relaxed = do
|
| relaxed = do
|
||||||
setUrlPresent key url
|
setUrlPresent key url
|
||||||
next $ return True
|
next $ return True
|
||||||
| otherwise = do
|
| otherwise = ifM (elem url <$> getUrls key)
|
||||||
(exists, samesize) <- Url.withUrlOptions $ Url.check url (keySize key)
|
( stop
|
||||||
if exists && samesize
|
, do
|
||||||
then do
|
(exists, samesize) <- Url.withUrlOptions $ Url.check url (keySize key)
|
||||||
setUrlPresent key url
|
if exists && samesize
|
||||||
next $ return True
|
then do
|
||||||
else do
|
setUrlPresent key url
|
||||||
warning $ if exists
|
next $ return True
|
||||||
then "url does not have expected file size (use --relaxed to bypass this check) " ++ url
|
else do
|
||||||
else "failed to verify url exists: " ++ url
|
warning $ "while adding a new url to an already annexed file, " ++ if exists
|
||||||
stop
|
then "url does not have expected file size (use --relaxed to bypass this check) " ++ url
|
||||||
|
else "failed to verify url exists: " ++ url
|
||||||
|
stop
|
||||||
|
)
|
||||||
|
|
||||||
addUrlFile :: Bool -> URLString -> FilePath -> Annex Bool
|
addUrlFile :: Bool -> URLString -> FilePath -> Annex Bool
|
||||||
addUrlFile relaxed url file = do
|
addUrlFile relaxed url file = do
|
||||||
|
@ -179,7 +183,7 @@ download url file = do
|
||||||
, return False
|
, return False
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
runtransfer dummykey tmp =
|
runtransfer dummykey tmp = Transfer.notifyTransfer Transfer.Download (Just file) $
|
||||||
Transfer.download webUUID dummykey (Just file) Transfer.forwardRetry $ const $ do
|
Transfer.download webUUID dummykey (Just file) Transfer.forwardRetry $ const $ do
|
||||||
liftIO $ createDirectoryIfMissing True (parentDir tmp)
|
liftIO $ createDirectoryIfMissing True (parentDir tmp)
|
||||||
downloadUrl [url] tmp
|
downloadUrl [url] tmp
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{- git-annex command
|
{- git-annex command
|
||||||
-
|
-
|
||||||
- Copyright 2010 Joey Hess <joey@kitenet.net>
|
- Copyright 2010-2014 Joey Hess <joey@kitenet.net>
|
||||||
-
|
-
|
||||||
- Licensed under the GNU GPL version 3 or higher.
|
- Licensed under the GNU GPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
@ -10,6 +10,8 @@ module Command.ConfigList where
|
||||||
import Common.Annex
|
import Common.Annex
|
||||||
import Command
|
import Command
|
||||||
import Annex.UUID
|
import Annex.UUID
|
||||||
|
import Annex.Init
|
||||||
|
import qualified Annex.Branch
|
||||||
import qualified Git.Config
|
import qualified Git.Config
|
||||||
import Remote.GCrypt (coreGCryptId)
|
import Remote.GCrypt (coreGCryptId)
|
||||||
|
|
||||||
|
@ -22,9 +24,23 @@ seek = withNothing start
|
||||||
|
|
||||||
start :: CommandStart
|
start :: CommandStart
|
||||||
start = do
|
start = do
|
||||||
u <- getUUID
|
u <- findOrGenUUID
|
||||||
showConfig "annex.uuid" $ fromUUID u
|
showConfig "annex.uuid" $ fromUUID u
|
||||||
showConfig coreGCryptId =<< fromRepo (Git.Config.get coreGCryptId "")
|
showConfig coreGCryptId =<< fromRepo (Git.Config.get coreGCryptId "")
|
||||||
stop
|
stop
|
||||||
where
|
where
|
||||||
showConfig k v = liftIO $ putStrLn $ k ++ "=" ++ v
|
showConfig k v = liftIO $ putStrLn $ k ++ "=" ++ v
|
||||||
|
|
||||||
|
{- The repository may not yet have a UUID; automatically initialize it
|
||||||
|
- when there's a git-annex branch available. -}
|
||||||
|
findOrGenUUID :: Annex UUID
|
||||||
|
findOrGenUUID = do
|
||||||
|
u <- getUUID
|
||||||
|
if u /= NoUUID
|
||||||
|
then return u
|
||||||
|
else ifM Annex.Branch.hasSibling
|
||||||
|
( do
|
||||||
|
initialize Nothing
|
||||||
|
getUUID
|
||||||
|
, return NoUUID
|
||||||
|
)
|
||||||
|
|
|
@ -14,9 +14,13 @@ import qualified Annex
|
||||||
import Annex.UUID
|
import Annex.UUID
|
||||||
import Logs.Location
|
import Logs.Location
|
||||||
import Logs.Trust
|
import Logs.Trust
|
||||||
|
import Logs.PreferredContent
|
||||||
import Config.NumCopies
|
import Config.NumCopies
|
||||||
import Annex.Content
|
import Annex.Content
|
||||||
import Annex.Wanted
|
import Annex.Wanted
|
||||||
|
import Annex.Notification
|
||||||
|
|
||||||
|
import qualified Data.Set as S
|
||||||
|
|
||||||
def :: [Command]
|
def :: [Command]
|
||||||
def = [withOptions [dropFromOption] $ command "drop" paramPaths seek
|
def = [withOptions [dropFromOption] $ command "drop" paramPaths seek
|
||||||
|
@ -44,27 +48,34 @@ start from file (key, _) = checkDropAuto from file key $ \numcopies ->
|
||||||
startLocal :: AssociatedFile -> NumCopies -> Key -> Maybe Remote -> CommandStart
|
startLocal :: AssociatedFile -> NumCopies -> Key -> Maybe Remote -> CommandStart
|
||||||
startLocal afile numcopies key knownpresentremote = stopUnless (inAnnex key) $ do
|
startLocal afile numcopies key knownpresentremote = stopUnless (inAnnex key) $ do
|
||||||
showStart' "drop" key afile
|
showStart' "drop" key afile
|
||||||
next $ performLocal key numcopies knownpresentremote
|
next $ performLocal key afile numcopies knownpresentremote
|
||||||
|
|
||||||
startRemote :: AssociatedFile -> NumCopies -> Key -> Remote -> CommandStart
|
startRemote :: AssociatedFile -> NumCopies -> Key -> Remote -> CommandStart
|
||||||
startRemote afile numcopies key remote = do
|
startRemote afile numcopies key remote = do
|
||||||
showStart' ("drop " ++ Remote.name remote) key afile
|
showStart' ("drop " ++ Remote.name remote) key afile
|
||||||
next $ performRemote key numcopies remote
|
next $ performRemote key afile numcopies remote
|
||||||
|
|
||||||
performLocal :: Key -> NumCopies -> Maybe Remote -> CommandPerform
|
performLocal :: Key -> AssociatedFile -> NumCopies -> Maybe Remote -> CommandPerform
|
||||||
performLocal key numcopies knownpresentremote = lockContent key $ do
|
performLocal key afile numcopies knownpresentremote = lockContent key $ do
|
||||||
(remotes, trusteduuids) <- Remote.keyPossibilitiesTrusted key
|
(remotes, trusteduuids) <- Remote.keyPossibilitiesTrusted key
|
||||||
let trusteduuids' = case knownpresentremote of
|
let trusteduuids' = case knownpresentremote of
|
||||||
Nothing -> trusteduuids
|
Nothing -> trusteduuids
|
||||||
Just r -> nub (Remote.uuid r:trusteduuids)
|
Just r -> nub (Remote.uuid r:trusteduuids)
|
||||||
untrusteduuids <- trustGet UnTrusted
|
untrusteduuids <- trustGet UnTrusted
|
||||||
let tocheck = Remote.remotesWithoutUUID remotes (trusteduuids'++untrusteduuids)
|
let tocheck = Remote.remotesWithoutUUID remotes (trusteduuids'++untrusteduuids)
|
||||||
stopUnless (canDropKey key numcopies trusteduuids' tocheck []) $ do
|
u <- getUUID
|
||||||
removeAnnex key
|
ifM (canDrop u key afile numcopies trusteduuids' tocheck [])
|
||||||
next $ cleanupLocal key
|
( do
|
||||||
|
removeAnnex key
|
||||||
|
notifyDrop afile True
|
||||||
|
next $ cleanupLocal key
|
||||||
|
, do
|
||||||
|
notifyDrop afile False
|
||||||
|
stop
|
||||||
|
)
|
||||||
|
|
||||||
performRemote :: Key -> NumCopies -> Remote -> CommandPerform
|
performRemote :: Key -> AssociatedFile -> NumCopies -> Remote -> CommandPerform
|
||||||
performRemote key numcopies remote = lockContent key $ do
|
performRemote key afile numcopies remote = lockContent key $ do
|
||||||
-- Filter the remote it's being dropped from out of the lists of
|
-- Filter the remote it's being dropped from out of the lists of
|
||||||
-- places assumed to have the key, and places to check.
|
-- places assumed to have the key, and places to check.
|
||||||
-- When the local repo has the key, that's one additional copy.
|
-- When the local repo has the key, that's one additional copy.
|
||||||
|
@ -76,7 +87,7 @@ performRemote key numcopies remote = lockContent key $ do
|
||||||
untrusteduuids <- trustGet UnTrusted
|
untrusteduuids <- trustGet UnTrusted
|
||||||
let tocheck = filter (/= remote) $
|
let tocheck = filter (/= remote) $
|
||||||
Remote.remotesWithoutUUID remotes (have++untrusteduuids)
|
Remote.remotesWithoutUUID remotes (have++untrusteduuids)
|
||||||
stopUnless (canDropKey key numcopies have tocheck [uuid]) $ do
|
stopUnless (canDrop uuid key afile numcopies have tocheck [uuid]) $ do
|
||||||
ok <- Remote.removeKey remote key
|
ok <- Remote.removeKey remote key
|
||||||
next $ cleanupRemote key remote ok
|
next $ cleanupRemote key remote ok
|
||||||
where
|
where
|
||||||
|
@ -95,13 +106,19 @@ cleanupRemote key remote ok = do
|
||||||
|
|
||||||
{- Checks specified remotes to verify that enough copies of a key exist to
|
{- Checks specified remotes to verify that enough copies of a key exist to
|
||||||
- allow it to be safely removed (with no data loss). Can be provided with
|
- allow it to be safely removed (with no data loss). Can be provided with
|
||||||
- some locations where the key is known/assumed to be present. -}
|
- some locations where the key is known/assumed to be present.
|
||||||
canDropKey :: Key -> NumCopies -> [UUID] -> [Remote] -> [UUID] -> Annex Bool
|
-
|
||||||
canDropKey key numcopies have check skip = do
|
- Also checks if it's required content, and refuses to drop if so.
|
||||||
force <- Annex.getState Annex.force
|
-
|
||||||
if force || numcopies == NumCopies 0
|
- --force overrides and always allows dropping.
|
||||||
then return True
|
-}
|
||||||
else findCopies key numcopies skip have check
|
canDrop :: UUID -> Key -> AssociatedFile -> NumCopies -> [UUID] -> [Remote] -> [UUID] -> Annex Bool
|
||||||
|
canDrop dropfrom key afile numcopies have check skip = ifM (Annex.getState Annex.force)
|
||||||
|
( return True
|
||||||
|
, checkRequiredContent dropfrom key afile
|
||||||
|
<&&>
|
||||||
|
findCopies key numcopies skip have check
|
||||||
|
)
|
||||||
|
|
||||||
findCopies :: Key -> NumCopies -> [UUID] -> [UUID] -> [Remote] -> Annex Bool
|
findCopies :: Key -> NumCopies -> [UUID] -> [UUID] -> [Remote] -> Annex Bool
|
||||||
findCopies key need skip = helper [] []
|
findCopies key need skip = helper [] []
|
||||||
|
@ -137,6 +154,19 @@ notEnoughCopies key need have skip bad = do
|
||||||
unsafe = showNote "unsafe"
|
unsafe = showNote "unsafe"
|
||||||
hint = showLongNote "(Use --force to override this check, or adjust numcopies.)"
|
hint = showLongNote "(Use --force to override this check, or adjust numcopies.)"
|
||||||
|
|
||||||
|
checkRequiredContent :: UUID -> Key -> AssociatedFile -> Annex Bool
|
||||||
|
checkRequiredContent u k afile =
|
||||||
|
ifM (isRequiredContent (Just u) S.empty (Just k) afile False)
|
||||||
|
( requiredContent
|
||||||
|
, return True
|
||||||
|
)
|
||||||
|
|
||||||
|
requiredContent :: Annex Bool
|
||||||
|
requiredContent = do
|
||||||
|
showLongNote "That file is required content, it cannot be dropped!"
|
||||||
|
showLongNote "(Use --force to override this check, or adjust required content configuration.)"
|
||||||
|
return False
|
||||||
|
|
||||||
{- In auto mode, only runs the action if there are enough
|
{- In auto mode, only runs the action if there are enough
|
||||||
- copies on other semitrusted repositories. -}
|
- copies on other semitrusted repositories. -}
|
||||||
checkDropAuto :: Maybe Remote -> FilePath -> Key -> (NumCopies -> CommandStart) -> CommandStart
|
checkDropAuto :: Maybe Remote -> FilePath -> Key -> (NumCopies -> CommandStart) -> CommandStart
|
||||||
|
|
|
@ -34,8 +34,8 @@ perform numcopies key = maybe droplocal dropremote =<< Remote.byNameWithUUID =<<
|
||||||
where
|
where
|
||||||
dropremote r = do
|
dropremote r = do
|
||||||
showAction $ "from " ++ Remote.name r
|
showAction $ "from " ++ Remote.name r
|
||||||
Command.Drop.performRemote key numcopies r
|
Command.Drop.performRemote key Nothing numcopies r
|
||||||
droplocal = Command.Drop.performLocal key numcopies Nothing
|
droplocal = Command.Drop.performLocal key Nothing numcopies Nothing
|
||||||
from = Annex.getField $ optionName Command.Drop.dropFromOption
|
from = Annex.getField $ optionName Command.Drop.dropFromOption
|
||||||
|
|
||||||
performOther :: (Key -> Git.Repo -> FilePath) -> Key -> CommandPerform
|
performOther :: (Key -> Git.Repo -> FilePath) -> Key -> CommandPerform
|
||||||
|
|
|
@ -29,6 +29,7 @@ import Utility.DataUnits
|
||||||
import Utility.FileMode
|
import Utility.FileMode
|
||||||
import Config
|
import Config
|
||||||
import Types.Key
|
import Types.Key
|
||||||
|
import Types.CleanupActions
|
||||||
import Utility.HumanTime
|
import Utility.HumanTime
|
||||||
import Git.FilePath
|
import Git.FilePath
|
||||||
import Utility.PID
|
import Utility.PID
|
||||||
|
@ -93,7 +94,7 @@ getIncremental = do
|
||||||
|
|
||||||
checkschedule Nothing = error "bad --incremental-schedule value"
|
checkschedule Nothing = error "bad --incremental-schedule value"
|
||||||
checkschedule (Just delta) = do
|
checkschedule (Just delta) = do
|
||||||
Annex.addCleanup "" $ do
|
Annex.addCleanup FsckCleanup $ do
|
||||||
v <- getStartTime
|
v <- getStartTime
|
||||||
case v of
|
case v of
|
||||||
Nothing -> noop
|
Nothing -> noop
|
||||||
|
|
|
@ -11,7 +11,7 @@ import Common.Annex
|
||||||
import Command
|
import Command
|
||||||
import qualified Remote
|
import qualified Remote
|
||||||
import Annex.Content
|
import Annex.Content
|
||||||
import Logs.Transfer
|
import Annex.Transfer
|
||||||
import Config.NumCopies
|
import Config.NumCopies
|
||||||
import Annex.Wanted
|
import Annex.Wanted
|
||||||
import qualified Command.Move
|
import qualified Command.Move
|
||||||
|
@ -69,15 +69,15 @@ getKeyFile' key afile dest = dispatch
|
||||||
showNote "not available"
|
showNote "not available"
|
||||||
showlocs
|
showlocs
|
||||||
return False
|
return False
|
||||||
dispatch remotes = trycopy remotes remotes
|
dispatch remotes = notifyTransfer Download afile $ trycopy remotes remotes
|
||||||
trycopy full [] = do
|
trycopy full [] _ = do
|
||||||
Remote.showTriedRemotes full
|
Remote.showTriedRemotes full
|
||||||
showlocs
|
showlocs
|
||||||
return False
|
return False
|
||||||
trycopy full (r:rs) =
|
trycopy full (r:rs) witness =
|
||||||
ifM (probablyPresent r)
|
ifM (probablyPresent r)
|
||||||
( docopy r (trycopy full rs)
|
( docopy r witness <||> trycopy full rs witness
|
||||||
, trycopy full rs
|
, trycopy full rs witness
|
||||||
)
|
)
|
||||||
showlocs = Remote.showLocations key []
|
showlocs = Remote.showLocations key []
|
||||||
"No other repository is known to contain the file."
|
"No other repository is known to contain the file."
|
||||||
|
@ -87,8 +87,6 @@ getKeyFile' key afile dest = dispatch
|
||||||
| Remote.hasKeyCheap r =
|
| Remote.hasKeyCheap r =
|
||||||
either (const False) id <$> Remote.hasKey r key
|
either (const False) id <$> Remote.hasKey r key
|
||||||
| otherwise = return True
|
| otherwise = return True
|
||||||
docopy r continue = do
|
docopy r = download (Remote.uuid r) key afile noRetry $ \p -> do
|
||||||
ok <- download (Remote.uuid r) key afile noRetry $ \p -> do
|
showAction $ "from " ++ Remote.name r
|
||||||
showAction $ "from " ++ Remote.name r
|
Remote.retrieveKeyFile r key afile dest p
|
||||||
Remote.retrieveKeyFile r key afile dest p
|
|
||||||
if ok then return ok else continue
|
|
||||||
|
|
|
@ -38,7 +38,7 @@ seek ps = do
|
||||||
|
|
||||||
getList :: Annex [(UUID, RemoteName, TrustLevel)]
|
getList :: Annex [(UUID, RemoteName, TrustLevel)]
|
||||||
getList = ifM (Annex.getFlag $ optionName allrepos)
|
getList = ifM (Annex.getFlag $ optionName allrepos)
|
||||||
( nubBy ((==) `on` fst3) <$> ((++) <$> getRemotes <*> getAll)
|
( nubBy ((==) `on` fst3) <$> ((++) <$> getRemotes <*> getAllUUIDs)
|
||||||
, getRemotes
|
, getRemotes
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
|
@ -48,7 +48,7 @@ getList = ifM (Annex.getFlag $ optionName allrepos)
|
||||||
hereu <- getUUID
|
hereu <- getUUID
|
||||||
heretrust <- lookupTrust hereu
|
heretrust <- lookupTrust hereu
|
||||||
return $ (hereu, "here", heretrust) : zip3 (map uuid rs) (map name rs) ts
|
return $ (hereu, "here", heretrust) : zip3 (map uuid rs) (map name rs) ts
|
||||||
getAll = do
|
getAllUUIDs = do
|
||||||
rs <- M.toList <$> uuidMap
|
rs <- M.toList <$> uuidMap
|
||||||
rs3 <- forM rs $ \(u, n) -> (,,)
|
rs3 <- forM rs $ \(u, n) -> (,,)
|
||||||
<$> pure u
|
<$> pure u
|
||||||
|
|
|
@ -158,7 +158,8 @@ absRepo reference r
|
||||||
| Git.repoIsUrl r = return r
|
| Git.repoIsUrl r = return r
|
||||||
| otherwise = liftIO $ do
|
| otherwise = liftIO $ do
|
||||||
r' <- Git.Construct.fromAbsPath =<< absPath (Git.repoPath r)
|
r' <- Git.Construct.fromAbsPath =<< absPath (Git.repoPath r)
|
||||||
flip Annex.eval Annex.gitRepo =<< Annex.new r'
|
r'' <- safely $ flip Annex.eval Annex.gitRepo =<< Annex.new r'
|
||||||
|
return (fromMaybe r' r'')
|
||||||
|
|
||||||
{- Checks if two repos are the same. -}
|
{- Checks if two repos are the same. -}
|
||||||
same :: Git.Repo -> Git.Repo -> Bool
|
same :: Git.Repo -> Git.Repo -> Bool
|
||||||
|
@ -192,14 +193,9 @@ tryScan :: Git.Repo -> Annex (Maybe Git.Repo)
|
||||||
tryScan r
|
tryScan r
|
||||||
| Git.repoIsSsh r = sshscan
|
| Git.repoIsSsh r = sshscan
|
||||||
| Git.repoIsUrl r = return Nothing
|
| Git.repoIsUrl r = return Nothing
|
||||||
| otherwise = safely $ Git.Config.read r
|
| otherwise = liftIO $ safely $ Git.Config.read r
|
||||||
where
|
where
|
||||||
safely a = do
|
pipedconfig cmd params = liftIO $ safely $
|
||||||
result <- liftIO (try a :: IO (Either SomeException Git.Repo))
|
|
||||||
case result of
|
|
||||||
Left _ -> return Nothing
|
|
||||||
Right r' -> return $ Just r'
|
|
||||||
pipedconfig cmd params = safely $
|
|
||||||
withHandle StdoutHandle createProcessSuccess p $
|
withHandle StdoutHandle createProcessSuccess p $
|
||||||
Git.Config.hRead r
|
Git.Config.hRead r
|
||||||
where
|
where
|
||||||
|
@ -247,3 +243,10 @@ combineSame = map snd . nubBy sameuuid . map pair
|
||||||
where
|
where
|
||||||
sameuuid (u1, _) (u2, _) = u1 == u2 && u1 /= NoUUID
|
sameuuid (u1, _) (u2, _) = u1 == u2 && u1 /= NoUUID
|
||||||
pair r = (getUncachedUUID r, r)
|
pair r = (getUncachedUUID r, r)
|
||||||
|
|
||||||
|
safely :: IO Git.Repo -> IO (Maybe Git.Repo)
|
||||||
|
safely a = do
|
||||||
|
result <- try a :: IO (Either SomeException Git.Repo)
|
||||||
|
case result of
|
||||||
|
Left _ -> return Nothing
|
||||||
|
Right r' -> return $ Just r'
|
||||||
|
|
|
@ -12,16 +12,24 @@ import qualified Annex
|
||||||
import Command
|
import Command
|
||||||
import Annex.MetaData
|
import Annex.MetaData
|
||||||
import Logs.MetaData
|
import Logs.MetaData
|
||||||
import Types.MetaData
|
|
||||||
|
|
||||||
import qualified Data.Set as S
|
import qualified Data.Set as S
|
||||||
import Data.Time.Clock.POSIX
|
import Data.Time.Clock.POSIX
|
||||||
|
|
||||||
def :: [Command]
|
def :: [Command]
|
||||||
def = [withOptions [setOption, tagOption, untagOption, jsonOption] $
|
def = [withOptions metaDataOptions $
|
||||||
command "metadata" paramPaths seek
|
command "metadata" paramPaths seek
|
||||||
SectionMetaData "sets metadata of a file"]
|
SectionMetaData "sets metadata of a file"]
|
||||||
|
|
||||||
|
metaDataOptions :: [Option]
|
||||||
|
metaDataOptions =
|
||||||
|
[ setOption
|
||||||
|
, tagOption
|
||||||
|
, untagOption
|
||||||
|
, getOption
|
||||||
|
, jsonOption
|
||||||
|
] ++ keyOptions
|
||||||
|
|
||||||
storeModMeta :: ModMeta -> Annex ()
|
storeModMeta :: ModMeta -> Annex ()
|
||||||
storeModMeta modmeta = Annex.changeState $
|
storeModMeta modmeta = Annex.changeState $
|
||||||
\s -> s { Annex.modmeta = modmeta:Annex.modmeta s }
|
\s -> s { Annex.modmeta = modmeta:Annex.modmeta s }
|
||||||
|
@ -31,6 +39,9 @@ setOption = Option ['s'] ["set"] (ReqArg mkmod "FIELD[+-]=VALUE") "set metadata"
|
||||||
where
|
where
|
||||||
mkmod = either error storeModMeta . parseModMeta
|
mkmod = either error storeModMeta . parseModMeta
|
||||||
|
|
||||||
|
getOption :: Option
|
||||||
|
getOption = fieldOption ['g'] "get" paramField "get single metadata field"
|
||||||
|
|
||||||
tagOption :: Option
|
tagOption :: Option
|
||||||
tagOption = Option ['t'] ["tag"] (ReqArg mkmod "TAG") "set a tag"
|
tagOption = Option ['t'] ["tag"] (ReqArg mkmod "TAG") "set a tag"
|
||||||
where
|
where
|
||||||
|
@ -44,19 +55,35 @@ untagOption = Option ['u'] ["untag"] (ReqArg mkmod "TAG") "remove a tag"
|
||||||
seek :: CommandSeek
|
seek :: CommandSeek
|
||||||
seek ps = do
|
seek ps = do
|
||||||
modmeta <- Annex.getState Annex.modmeta
|
modmeta <- Annex.getState Annex.modmeta
|
||||||
|
getfield <- getOptionField getOption $ \ms ->
|
||||||
|
return $ either error id . mkMetaField <$> ms
|
||||||
now <- liftIO getPOSIXTime
|
now <- liftIO getPOSIXTime
|
||||||
withFilesInGit (whenAnnexed $ start now modmeta) ps
|
withKeyOptions
|
||||||
|
(startKeys now getfield modmeta)
|
||||||
|
(withFilesInGit (whenAnnexed $ start now getfield modmeta))
|
||||||
|
ps
|
||||||
|
|
||||||
start :: POSIXTime -> [ModMeta] -> FilePath -> (Key, Backend) -> CommandStart
|
start :: POSIXTime -> Maybe MetaField -> [ModMeta] -> FilePath -> (Key, Backend) -> CommandStart
|
||||||
start now ms file (k, _) = do
|
start now f ms file (k, _) = start' (Just file) now f ms k
|
||||||
showStart "metadata" file
|
|
||||||
|
startKeys :: POSIXTime -> Maybe MetaField -> [ModMeta] -> Key -> CommandStart
|
||||||
|
startKeys = start' Nothing
|
||||||
|
|
||||||
|
start' :: AssociatedFile -> POSIXTime -> Maybe MetaField -> [ModMeta] -> Key -> CommandStart
|
||||||
|
start' afile now Nothing ms k = do
|
||||||
|
showStart' "metadata" k afile
|
||||||
next $ perform now ms k
|
next $ perform now ms k
|
||||||
|
start' _ _ (Just f) _ k = do
|
||||||
|
l <- S.toList . currentMetaDataValues f <$> getCurrentMetaData k
|
||||||
|
liftIO $ forM_ l $
|
||||||
|
putStrLn . fromMetaValue
|
||||||
|
stop
|
||||||
|
|
||||||
perform :: POSIXTime -> [ModMeta] -> Key -> CommandPerform
|
perform :: POSIXTime -> [ModMeta] -> Key -> CommandPerform
|
||||||
perform _ [] k = next $ cleanup k
|
perform _ [] k = next $ cleanup k
|
||||||
perform now ms k = do
|
perform now ms k = do
|
||||||
oldm <- getCurrentMetaData k
|
oldm <- getCurrentMetaData k
|
||||||
let m = foldl' unionMetaData emptyMetaData $ map (modMeta oldm) ms
|
let m = combineMetaData $ map (modMeta oldm) ms
|
||||||
addMetaData' k m now
|
addMetaData' k m now
|
||||||
next $ cleanup k
|
next $ cleanup k
|
||||||
|
|
||||||
|
|
|
@ -14,8 +14,8 @@ import qualified Annex
|
||||||
import Annex.Content
|
import Annex.Content
|
||||||
import qualified Remote
|
import qualified Remote
|
||||||
import Annex.UUID
|
import Annex.UUID
|
||||||
|
import Annex.Transfer
|
||||||
import Logs.Presence
|
import Logs.Presence
|
||||||
import Logs.Transfer
|
|
||||||
|
|
||||||
def :: [Command]
|
def :: [Command]
|
||||||
def = [withOptions moveOptions $ command "move" paramPaths seek
|
def = [withOptions moveOptions $ command "move" paramPaths seek
|
||||||
|
@ -69,28 +69,38 @@ toStart dest move afile key = do
|
||||||
ishere <- inAnnex key
|
ishere <- inAnnex key
|
||||||
if not ishere || u == Remote.uuid dest
|
if not ishere || u == Remote.uuid dest
|
||||||
then stop -- not here, so nothing to do
|
then stop -- not here, so nothing to do
|
||||||
else do
|
else toStart' dest move afile key
|
||||||
showMoveAction move key afile
|
|
||||||
next $ toPerform dest move key afile
|
toStart' :: Remote -> Bool -> AssociatedFile -> Key -> CommandStart
|
||||||
toPerform :: Remote -> Bool -> Key -> AssociatedFile -> CommandPerform
|
toStart' dest move afile key = do
|
||||||
toPerform dest move key afile = moveLock move key $ do
|
|
||||||
-- Checking the remote is expensive, so not done in the start step.
|
|
||||||
-- In fast mode, location tracking is assumed to be correct,
|
|
||||||
-- and an explicit check is not done, when copying. When moving,
|
|
||||||
-- it has to be done, to avoid inaverdent data loss.
|
|
||||||
fast <- Annex.getState Annex.fast
|
fast <- Annex.getState Annex.fast
|
||||||
let fastcheck = fast && not move && not (Remote.hasKeyCheap dest)
|
if fast && not move && not (Remote.hasKeyCheap dest)
|
||||||
isthere <- if fastcheck
|
then ifM (expectedPresent dest key)
|
||||||
then Right <$> expectedpresent
|
( stop
|
||||||
else Remote.hasKey dest key
|
, go True (pure $ Right False)
|
||||||
|
)
|
||||||
|
else go False (Remote.hasKey dest key)
|
||||||
|
where
|
||||||
|
go fastcheck isthere = do
|
||||||
|
showMoveAction move key afile
|
||||||
|
next $ toPerform dest move key afile fastcheck =<< isthere
|
||||||
|
|
||||||
|
expectedPresent :: Remote -> Key -> Annex Bool
|
||||||
|
expectedPresent dest key = do
|
||||||
|
remotes <- Remote.keyPossibilities key
|
||||||
|
return $ dest `elem` remotes
|
||||||
|
|
||||||
|
toPerform :: Remote -> Bool -> Key -> AssociatedFile -> Bool -> Either String Bool -> CommandPerform
|
||||||
|
toPerform dest move key afile fastcheck isthere = moveLock move key $
|
||||||
case isthere of
|
case isthere of
|
||||||
Left err -> do
|
Left err -> do
|
||||||
showNote err
|
showNote err
|
||||||
stop
|
stop
|
||||||
Right False -> do
|
Right False -> do
|
||||||
showAction $ "to " ++ Remote.name dest
|
showAction $ "to " ++ Remote.name dest
|
||||||
ok <- upload (Remote.uuid dest) key afile noRetry $
|
ok <- notifyTransfer Upload afile $
|
||||||
Remote.storeKey dest key afile
|
upload (Remote.uuid dest) key afile noRetry $
|
||||||
|
Remote.storeKey dest key afile
|
||||||
if ok
|
if ok
|
||||||
then do
|
then do
|
||||||
Remote.logStatus dest key InfoPresent
|
Remote.logStatus dest key InfoPresent
|
||||||
|
@ -100,7 +110,7 @@ toPerform dest move key afile = moveLock move key $ do
|
||||||
warning "This could have failed because --fast is enabled."
|
warning "This could have failed because --fast is enabled."
|
||||||
stop
|
stop
|
||||||
Right True -> do
|
Right True -> do
|
||||||
unlessM expectedpresent $
|
unlessM (expectedPresent dest key) $
|
||||||
Remote.logStatus dest key InfoPresent
|
Remote.logStatus dest key InfoPresent
|
||||||
finish
|
finish
|
||||||
where
|
where
|
||||||
|
@ -109,9 +119,6 @@ toPerform dest move key afile = moveLock move key $ do
|
||||||
removeAnnex key
|
removeAnnex key
|
||||||
next $ Command.Drop.cleanupLocal key
|
next $ Command.Drop.cleanupLocal key
|
||||||
| otherwise = next $ return True
|
| otherwise = next $ return True
|
||||||
expectedpresent = do
|
|
||||||
remotes <- Remote.keyPossibilities key
|
|
||||||
return $ dest `elem` remotes
|
|
||||||
|
|
||||||
{- Moves (or copies) the content of an annexed file from a remote
|
{- Moves (or copies) the content of an annexed file from a remote
|
||||||
- to the current repository.
|
- to the current repository.
|
||||||
|
@ -149,9 +156,10 @@ fromPerform src move key afile = moveLock move key $
|
||||||
, handle move =<< go
|
, handle move =<< go
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
go = download (Remote.uuid src) key afile noRetry $ \p -> do
|
go = notifyTransfer Download afile $
|
||||||
showAction $ "from " ++ Remote.name src
|
download (Remote.uuid src) key afile noRetry $ \p -> do
|
||||||
getViaTmp key $ \t -> Remote.retrieveKeyFile src key afile t p
|
showAction $ "from " ++ Remote.name src
|
||||||
|
getViaTmp key $ \t -> Remote.retrieveKeyFile src key afile t p
|
||||||
handle _ False = stop -- failed
|
handle _ False = stop -- failed
|
||||||
handle False True = next $ return True -- copy complete
|
handle False True = next $ return True -- copy complete
|
||||||
handle True True = do -- finish moving
|
handle True True = do -- finish moving
|
||||||
|
|
|
@ -5,6 +5,8 @@
|
||||||
- Licensed under the GNU GPL version 3 or higher.
|
- Licensed under the GNU GPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
|
{-# LANGUAGE CPP #-}
|
||||||
|
|
||||||
module Command.PreCommit where
|
module Command.PreCommit where
|
||||||
|
|
||||||
import Common.Annex
|
import Common.Annex
|
||||||
|
@ -16,11 +18,17 @@ import Annex.Direct
|
||||||
import Annex.Hook
|
import Annex.Hook
|
||||||
import Annex.View
|
import Annex.View
|
||||||
import Annex.View.ViewedFile
|
import Annex.View.ViewedFile
|
||||||
|
import Annex.Perms
|
||||||
|
import Annex.Exception
|
||||||
import Logs.View
|
import Logs.View
|
||||||
import Logs.MetaData
|
import Logs.MetaData
|
||||||
import Types.View
|
import Types.View
|
||||||
import Types.MetaData
|
import Types.MetaData
|
||||||
|
|
||||||
|
#ifdef mingw32_HOST_OS
|
||||||
|
import Utility.WinLock
|
||||||
|
#endif
|
||||||
|
|
||||||
import qualified Data.Set as S
|
import qualified Data.Set as S
|
||||||
|
|
||||||
def :: [Command]
|
def :: [Command]
|
||||||
|
@ -28,7 +36,7 @@ def = [command "pre-commit" paramPaths seek SectionPlumbing
|
||||||
"run by git pre-commit hook"]
|
"run by git pre-commit hook"]
|
||||||
|
|
||||||
seek :: CommandSeek
|
seek :: CommandSeek
|
||||||
seek ps = ifM isDirect
|
seek ps = lockPreCommitHook $ ifM isDirect
|
||||||
( do
|
( do
|
||||||
-- update direct mode mappings for committed files
|
-- update direct mode mappings for committed files
|
||||||
withWords startDirect ps
|
withWords startDirect ps
|
||||||
|
@ -82,3 +90,22 @@ showMetaDataChange = showLongNote . unlines . concatMap showmeta . fromMetaData
|
||||||
showset v
|
showset v
|
||||||
| isSet v = "+"
|
| isSet v = "+"
|
||||||
| otherwise = "-"
|
| otherwise = "-"
|
||||||
|
|
||||||
|
{- Takes exclusive lock; blocks until available. -}
|
||||||
|
lockPreCommitHook :: Annex a -> Annex a
|
||||||
|
lockPreCommitHook a = do
|
||||||
|
lockfile <- fromRepo gitAnnexPreCommitLock
|
||||||
|
createAnnexDirectory $ takeDirectory lockfile
|
||||||
|
mode <- annexFileMode
|
||||||
|
bracketIO (lock lockfile mode) unlock (const a)
|
||||||
|
where
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
|
lock lockfile mode = do
|
||||||
|
l <- liftIO $ noUmask mode $ createFile lockfile mode
|
||||||
|
liftIO $ waitToSetLock l (WriteLock, AbsoluteSeek, 0, 0)
|
||||||
|
return l
|
||||||
|
unlock = closeFd
|
||||||
|
#else
|
||||||
|
lock lockfile _mode = liftIO $ waitToLock $ lockExclusive lockfile
|
||||||
|
unlock = dropLock
|
||||||
|
#endif
|
||||||
|
|
|
@ -12,7 +12,7 @@ import Command
|
||||||
import Annex.Content
|
import Annex.Content
|
||||||
import Annex
|
import Annex
|
||||||
import Utility.Rsync
|
import Utility.Rsync
|
||||||
import Logs.Transfer
|
import Annex.Transfer
|
||||||
import qualified CmdLine.GitAnnexShell.Fields as Fields
|
import qualified CmdLine.GitAnnexShell.Fields as Fields
|
||||||
import Utility.Metered
|
import Utility.Metered
|
||||||
|
|
||||||
|
|
|
@ -376,5 +376,5 @@ syncFile rs f (k, _) = do
|
||||||
put dest = do
|
put dest = do
|
||||||
ok <- commandAction $ do
|
ok <- commandAction $ do
|
||||||
showStart "copy" f
|
showStart "copy" f
|
||||||
next $ Command.Move.toPerform dest False k (Just f)
|
Command.Move.toStart' dest False (Just f) k
|
||||||
return (ok, if ok then Just (Remote.uuid dest) else Nothing)
|
return (ok, if ok then Just (Remote.uuid dest) else Nothing)
|
||||||
|
|
|
@ -11,7 +11,7 @@ import Common.Annex
|
||||||
import Command
|
import Command
|
||||||
import Annex.Content
|
import Annex.Content
|
||||||
import Logs.Location
|
import Logs.Location
|
||||||
import Logs.Transfer
|
import Annex.Transfer
|
||||||
import qualified Remote
|
import qualified Remote
|
||||||
import Types.Remote
|
import Types.Remote
|
||||||
|
|
||||||
|
@ -41,7 +41,7 @@ start to from file key =
|
||||||
_ -> error "specify either --from or --to"
|
_ -> error "specify either --from or --to"
|
||||||
|
|
||||||
toPerform :: Remote -> Key -> AssociatedFile -> CommandPerform
|
toPerform :: Remote -> Key -> AssociatedFile -> CommandPerform
|
||||||
toPerform remote key file = go $
|
toPerform remote key file = go Upload file $
|
||||||
upload (uuid remote) key file forwardRetry $ \p -> do
|
upload (uuid remote) key file forwardRetry $ \p -> do
|
||||||
ok <- Remote.storeKey remote key file p
|
ok <- Remote.storeKey remote key file p
|
||||||
when ok $
|
when ok $
|
||||||
|
@ -49,9 +49,9 @@ toPerform remote key file = go $
|
||||||
return ok
|
return ok
|
||||||
|
|
||||||
fromPerform :: Remote -> Key -> AssociatedFile -> CommandPerform
|
fromPerform :: Remote -> Key -> AssociatedFile -> CommandPerform
|
||||||
fromPerform remote key file = go $
|
fromPerform remote key file = go Upload file $
|
||||||
download (uuid remote) key file forwardRetry $ \p ->
|
download (uuid remote) key file forwardRetry $ \p ->
|
||||||
getViaTmp key $ \t -> Remote.retrieveKeyFile remote key file t p
|
getViaTmp key $ \t -> Remote.retrieveKeyFile remote key file t p
|
||||||
|
|
||||||
go :: Annex Bool -> CommandPerform
|
go :: Direction -> AssociatedFile -> (NotifyWitness -> Annex Bool) -> CommandPerform
|
||||||
go a = a >>= liftIO . exitBool
|
go direction file a = notifyTransfer direction file a >>= liftIO . exitBool
|
||||||
|
|
|
@ -13,7 +13,7 @@ import Common.Annex
|
||||||
import Command
|
import Command
|
||||||
import Annex.Content
|
import Annex.Content
|
||||||
import Logs.Location
|
import Logs.Location
|
||||||
import Logs.Transfer
|
import Annex.Transfer
|
||||||
import qualified Remote
|
import qualified Remote
|
||||||
import Types.Key
|
import Types.Key
|
||||||
|
|
||||||
|
@ -34,14 +34,15 @@ start = withHandles $ \(readh, writeh) -> do
|
||||||
stop
|
stop
|
||||||
where
|
where
|
||||||
runner (TransferRequest direction remote key file)
|
runner (TransferRequest direction remote key file)
|
||||||
| direction == Upload =
|
| direction == Upload = notifyTransfer direction file $
|
||||||
upload (Remote.uuid remote) key file forwardRetry $ \p -> do
|
upload (Remote.uuid remote) key file forwardRetry $ \p -> do
|
||||||
ok <- Remote.storeKey remote key file p
|
ok <- Remote.storeKey remote key file p
|
||||||
when ok $
|
when ok $
|
||||||
Remote.logStatus remote key InfoPresent
|
Remote.logStatus remote key InfoPresent
|
||||||
return ok
|
return ok
|
||||||
| otherwise = download (Remote.uuid remote) key file forwardRetry $ \p ->
|
| otherwise = notifyTransfer direction file $
|
||||||
getViaTmp key $ \t -> Remote.retrieveKeyFile remote key file t p
|
download (Remote.uuid remote) key file forwardRetry $ \p ->
|
||||||
|
getViaTmp key $ \t -> Remote.retrieveKeyFile remote key file t p
|
||||||
|
|
||||||
{- stdin and stdout are connected with the caller, to be used for
|
{- stdin and stdout are connected with the caller, to be used for
|
||||||
- communication with it. But doing a transfer might involve something
|
- communication with it. But doing a transfer might involve something
|
||||||
|
|
|
@ -16,15 +16,47 @@ import qualified Annex
|
||||||
import Annex.Content
|
import Annex.Content
|
||||||
import Annex.Content.Direct
|
import Annex.Content.Direct
|
||||||
import qualified Git.Command
|
import qualified Git.Command
|
||||||
import qualified Git.LsFiles as LsFiles
|
import qualified Git.Ref
|
||||||
|
import qualified Git.DiffTree as DiffTree
|
||||||
import Utility.CopyFile
|
import Utility.CopyFile
|
||||||
|
import Command.PreCommit (lockPreCommitHook)
|
||||||
|
|
||||||
def :: [Command]
|
def :: [Command]
|
||||||
def = [command "unannex" paramPaths seek SectionUtility
|
def = [command "unannex" paramPaths seek SectionUtility
|
||||||
"undo accidential add command"]
|
"undo accidential add command"]
|
||||||
|
|
||||||
seek :: CommandSeek
|
seek :: CommandSeek
|
||||||
seek = withFilesInGit $ whenAnnexed start
|
seek = wrapUnannex . (withFilesInGit $ whenAnnexed start)
|
||||||
|
|
||||||
|
wrapUnannex :: Annex a -> Annex a
|
||||||
|
wrapUnannex a = ifM isDirect
|
||||||
|
( a
|
||||||
|
{- Run with the pre-commit hook disabled, to avoid confusing
|
||||||
|
- behavior if an unannexed file is added back to git as
|
||||||
|
- a normal, non-annexed file and then committed.
|
||||||
|
- Otherwise, the pre-commit hook would think that the file
|
||||||
|
- has been unlocked and needs to be re-annexed.
|
||||||
|
-
|
||||||
|
- At the end, make a commit removing the unannexed files.
|
||||||
|
-}
|
||||||
|
, ifM cleanindex
|
||||||
|
( lockPreCommitHook $ commit `after` a
|
||||||
|
, error "Cannot proceed with uncommitted changes staged in the index. Recommend you: git commit"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
where
|
||||||
|
commit = inRepo $ Git.Command.run
|
||||||
|
[ Param "commit"
|
||||||
|
, Param "-q"
|
||||||
|
, Param "--allow-empty"
|
||||||
|
, Param "--no-verify"
|
||||||
|
, Param "-m", Param "content removed from git annex"
|
||||||
|
]
|
||||||
|
cleanindex = do
|
||||||
|
(diff, cleanup) <- inRepo $ DiffTree.diffIndex Git.Ref.headRef
|
||||||
|
if null diff
|
||||||
|
then void (liftIO cleanup) >> return True
|
||||||
|
else void (liftIO cleanup) >> return False
|
||||||
|
|
||||||
start :: FilePath -> (Key, Backend) -> CommandStart
|
start :: FilePath -> (Key, Backend) -> CommandStart
|
||||||
start file (key, _) = stopUnless (inAnnex key) $ do
|
start file (key, _) = stopUnless (inAnnex key) $ do
|
||||||
|
@ -36,26 +68,7 @@ start file (key, _) = stopUnless (inAnnex key) $ do
|
||||||
performIndirect :: FilePath -> Key -> CommandPerform
|
performIndirect :: FilePath -> Key -> CommandPerform
|
||||||
performIndirect file key = do
|
performIndirect file key = do
|
||||||
liftIO $ removeFile file
|
liftIO $ removeFile file
|
||||||
|
|
||||||
-- git rm deletes empty directory without --cached
|
|
||||||
inRepo $ Git.Command.run [Params "rm --cached --force --quiet --", File file]
|
inRepo $ Git.Command.run [Params "rm --cached --force --quiet --", File file]
|
||||||
|
|
||||||
-- If the file was already committed, it is now staged for removal.
|
|
||||||
-- Commit that removal now, to avoid later confusing the
|
|
||||||
-- pre-commit hook, if this file is later added back to
|
|
||||||
-- git as a normal non-annexed file, to thinking that the
|
|
||||||
-- file has been unlocked and needs to be re-annexed.
|
|
||||||
(s, reap) <- inRepo $ LsFiles.staged [file]
|
|
||||||
unless (null s) $
|
|
||||||
inRepo $ Git.Command.run
|
|
||||||
[ Param "commit"
|
|
||||||
, Param "-q"
|
|
||||||
, Param "--no-verify"
|
|
||||||
, Param "-m", Param "content removed from git annex"
|
|
||||||
, Param "--", File file
|
|
||||||
]
|
|
||||||
void $ liftIO reap
|
|
||||||
|
|
||||||
next $ cleanupIndirect file key
|
next $ cleanupIndirect file key
|
||||||
|
|
||||||
cleanupIndirect :: FilePath -> Key -> CommandCleanup
|
cleanupIndirect :: FilePath -> Key -> CommandCleanup
|
||||||
|
|
|
@ -36,7 +36,7 @@ check = do
|
||||||
|
|
||||||
seek :: CommandSeek
|
seek :: CommandSeek
|
||||||
seek ps = do
|
seek ps = do
|
||||||
withFilesNotInGit (whenAnnexed startCheckIncomplete) ps
|
withFilesNotInGit False (whenAnnexed startCheckIncomplete) ps
|
||||||
withFilesInGit (whenAnnexed Command.Unannex.start) ps
|
withFilesInGit (whenAnnexed Command.Unannex.start) ps
|
||||||
finish
|
finish
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,6 @@
|
||||||
module Command.Unused where
|
module Command.Unused where
|
||||||
|
|
||||||
import qualified Data.Set as S
|
import qualified Data.Set as S
|
||||||
import qualified Data.ByteString.Lazy as L
|
|
||||||
import Data.BloomFilter
|
import Data.BloomFilter
|
||||||
import Data.BloomFilter.Easy
|
import Data.BloomFilter.Easy
|
||||||
import Data.BloomFilter.Hash
|
import Data.BloomFilter.Hash
|
||||||
|
@ -296,7 +295,7 @@ withKeysReferencedInGitRef a ref = do
|
||||||
liftIO $ void clean
|
liftIO $ void clean
|
||||||
where
|
where
|
||||||
tKey True = fmap fst <$$> Backend.lookupFile . getTopFilePath . DiffTree.file
|
tKey True = fmap fst <$$> Backend.lookupFile . getTopFilePath . DiffTree.file
|
||||||
tKey False = fileKey . takeFileName . encodeW8 . L.unpack <$$>
|
tKey False = fileKey . takeFileName . decodeBS <$$>
|
||||||
catFile ref . getTopFilePath . DiffTree.file
|
catFile ref . getTopFilePath . DiffTree.file
|
||||||
|
|
||||||
{- Looks in the specified directory for bad/tmp keys, and returns a list
|
{- Looks in the specified directory for bad/tmp keys, and returns a list
|
||||||
|
|
155
Command/Vicfg.hs
155
Command/Vicfg.hs
|
@ -1,6 +1,6 @@
|
||||||
{- git-annex command
|
{- git-annex command
|
||||||
-
|
-
|
||||||
- Copyright 2012 Joey Hess <joey@kitenet.net>
|
- Copyright 2012-2014 Joey Hess <joey@kitenet.net>
|
||||||
-
|
-
|
||||||
- Licensed under the GNU GPL version 3 or higher.
|
- Licensed under the GNU GPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
@ -60,7 +60,9 @@ vicfg curcfg f = do
|
||||||
data Cfg = Cfg
|
data Cfg = Cfg
|
||||||
{ cfgTrustMap :: TrustMap
|
{ cfgTrustMap :: TrustMap
|
||||||
, cfgGroupMap :: M.Map UUID (S.Set Group)
|
, cfgGroupMap :: M.Map UUID (S.Set Group)
|
||||||
, cfgPreferredContentMap :: M.Map UUID String
|
, cfgPreferredContentMap :: M.Map UUID PreferredContentExpression
|
||||||
|
, cfgRequiredContentMap :: M.Map UUID PreferredContentExpression
|
||||||
|
, cfgGroupPreferredContentMap :: M.Map Group PreferredContentExpression
|
||||||
, cfgScheduleMap :: M.Map UUID [ScheduledActivity]
|
, cfgScheduleMap :: M.Map UUID [ScheduledActivity]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -69,25 +71,44 @@ getCfg = Cfg
|
||||||
<$> trustMapRaw -- without local trust overrides
|
<$> trustMapRaw -- without local trust overrides
|
||||||
<*> (groupsByUUID <$> groupMap)
|
<*> (groupsByUUID <$> groupMap)
|
||||||
<*> preferredContentMapRaw
|
<*> preferredContentMapRaw
|
||||||
|
<*> requiredContentMapRaw
|
||||||
|
<*> groupPreferredContentMapRaw
|
||||||
<*> scheduleMap
|
<*> scheduleMap
|
||||||
|
|
||||||
setCfg :: Cfg -> Cfg -> Annex ()
|
setCfg :: Cfg -> Cfg -> Annex ()
|
||||||
setCfg curcfg newcfg = do
|
setCfg curcfg newcfg = do
|
||||||
let (trustchanges, groupchanges, preferredcontentchanges, schedulechanges) = diffCfg curcfg newcfg
|
let diff = diffCfg curcfg newcfg
|
||||||
mapM_ (uncurry trustSet) $ M.toList trustchanges
|
mapM_ (uncurry trustSet) $ M.toList $ cfgTrustMap diff
|
||||||
mapM_ (uncurry groupSet) $ M.toList groupchanges
|
mapM_ (uncurry groupSet) $ M.toList $ cfgGroupMap diff
|
||||||
mapM_ (uncurry preferredContentSet) $ M.toList preferredcontentchanges
|
mapM_ (uncurry preferredContentSet) $ M.toList $ cfgPreferredContentMap diff
|
||||||
mapM_ (uncurry scheduleSet) $ M.toList schedulechanges
|
mapM_ (uncurry requiredContentSet) $ M.toList $ cfgRequiredContentMap diff
|
||||||
|
mapM_ (uncurry groupPreferredContentSet) $ M.toList $ cfgGroupPreferredContentMap diff
|
||||||
|
mapM_ (uncurry scheduleSet) $ M.toList $ cfgScheduleMap diff
|
||||||
|
|
||||||
diffCfg :: Cfg -> Cfg -> (TrustMap, M.Map UUID (S.Set Group), M.Map UUID String, M.Map UUID [ScheduledActivity])
|
diffCfg :: Cfg -> Cfg -> Cfg
|
||||||
diffCfg curcfg newcfg = (diff cfgTrustMap, diff cfgGroupMap, diff cfgPreferredContentMap, diff cfgScheduleMap)
|
diffCfg curcfg newcfg = Cfg
|
||||||
|
{ cfgTrustMap = diff cfgTrustMap
|
||||||
|
, cfgGroupMap = diff cfgGroupMap
|
||||||
|
, cfgPreferredContentMap = diff cfgPreferredContentMap
|
||||||
|
, cfgRequiredContentMap = diff cfgRequiredContentMap
|
||||||
|
, cfgGroupPreferredContentMap = diff cfgGroupPreferredContentMap
|
||||||
|
, cfgScheduleMap = diff cfgScheduleMap
|
||||||
|
}
|
||||||
where
|
where
|
||||||
diff f = M.differenceWith (\x y -> if x == y then Nothing else Just x)
|
diff f = M.differenceWith (\x y -> if x == y then Nothing else Just x)
|
||||||
(f newcfg) (f curcfg)
|
(f newcfg) (f curcfg)
|
||||||
|
|
||||||
genCfg :: Cfg -> M.Map UUID String -> String
|
genCfg :: Cfg -> M.Map UUID String -> String
|
||||||
genCfg cfg descs = unlines $ concat
|
genCfg cfg descs = unlines $ intercalate [""]
|
||||||
[intro, trust, groups, preferredcontent, schedule]
|
[ intro
|
||||||
|
, trust
|
||||||
|
, groups
|
||||||
|
, preferredcontent
|
||||||
|
, grouppreferredcontent
|
||||||
|
, standardgroups
|
||||||
|
, requiredcontent
|
||||||
|
, schedule
|
||||||
|
]
|
||||||
where
|
where
|
||||||
intro =
|
intro =
|
||||||
[ com "git-annex configuration"
|
[ com "git-annex configuration"
|
||||||
|
@ -95,22 +116,20 @@ genCfg cfg descs = unlines $ concat
|
||||||
, com "Changes saved to this file will be recorded in the git-annex branch."
|
, com "Changes saved to this file will be recorded in the git-annex branch."
|
||||||
, com ""
|
, com ""
|
||||||
, com "Lines in this file have the format:"
|
, com "Lines in this file have the format:"
|
||||||
, com " setting uuid = value"
|
, com " setting field = value"
|
||||||
]
|
]
|
||||||
|
|
||||||
trust = settings cfgTrustMap
|
trust = settings cfg descs cfgTrustMap
|
||||||
[ ""
|
[ com "Repository trust configuration"
|
||||||
, com "Repository trust configuration"
|
|
||||||
, com "(Valid trust levels: " ++ trustlevels ++ ")"
|
, com "(Valid trust levels: " ++ trustlevels ++ ")"
|
||||||
]
|
]
|
||||||
(\(t, u) -> line "trust" u $ showTrustLevel t)
|
(\(t, u) -> line "trust" u $ showTrustLevel t)
|
||||||
(\u -> lcom $ line "trust" u $ showTrustLevel SemiTrusted)
|
(\u -> lcom $ line "trust" u $ showTrustLevel SemiTrusted)
|
||||||
where
|
where
|
||||||
trustlevels = unwords $ map showTrustLevel [Trusted .. DeadTrusted]
|
trustlevels = unwords $ map showTrustLevel [Trusted .. DeadTrusted]
|
||||||
|
|
||||||
groups = settings cfgGroupMap
|
groups = settings cfg descs cfgGroupMap
|
||||||
[ ""
|
[ com "Repository groups"
|
||||||
, com "Repository groups"
|
|
||||||
, com $ "(Standard groups: " ++ grouplist ++ ")"
|
, com $ "(Standard groups: " ++ grouplist ++ ")"
|
||||||
, com "(Separate group names with spaces)"
|
, com "(Separate group names with spaces)"
|
||||||
]
|
]
|
||||||
|
@ -119,33 +138,65 @@ genCfg cfg descs = unlines $ concat
|
||||||
where
|
where
|
||||||
grouplist = unwords $ map fromStandardGroup [minBound..]
|
grouplist = unwords $ map fromStandardGroup [minBound..]
|
||||||
|
|
||||||
preferredcontent = settings cfgPreferredContentMap
|
preferredcontent = settings cfg descs cfgPreferredContentMap
|
||||||
[ ""
|
[ com "Repository preferred contents" ]
|
||||||
, com "Repository preferred contents"
|
(\(s, u) -> line "wanted" u s)
|
||||||
]
|
(\u -> line "wanted" u "standard")
|
||||||
(\(s, u) -> line "content" u s)
|
|
||||||
(\u -> line "content" u "")
|
|
||||||
|
|
||||||
schedule = settings cfgScheduleMap
|
requiredcontent = settings cfg descs cfgRequiredContentMap
|
||||||
[ ""
|
[ com "Repository required contents" ]
|
||||||
, com "Scheduled activities"
|
(\(s, u) -> line "required" u s)
|
||||||
|
(\u -> line "required" u "")
|
||||||
|
|
||||||
|
grouppreferredcontent = settings' cfg allgroups cfgGroupPreferredContentMap
|
||||||
|
[ com "Group preferred contents"
|
||||||
|
, com "(Used by repositories with \"groupwanted\" in their preferred contents)"
|
||||||
|
]
|
||||||
|
(\(s, g) -> gline g s)
|
||||||
|
(\g -> gline g "standard")
|
||||||
|
where
|
||||||
|
gline g value = [ unwords ["groupwanted", g, "=", value] ]
|
||||||
|
allgroups = S.unions $ stdgroups : M.elems (cfgGroupMap cfg)
|
||||||
|
stdgroups = S.fromList $ map fromStandardGroup [minBound..maxBound]
|
||||||
|
|
||||||
|
standardgroups =
|
||||||
|
[ com "Standard preferred contents"
|
||||||
|
, com "(Used by wanted or groupwanted expressions containing \"standard\")"
|
||||||
|
, com "(For reference only; built-in and cannot be changed!)"
|
||||||
|
]
|
||||||
|
++ map gline [minBound..maxBound]
|
||||||
|
where
|
||||||
|
gline g = com $ unwords
|
||||||
|
[ "standard"
|
||||||
|
, fromStandardGroup g, "=", standardPreferredContent g
|
||||||
|
]
|
||||||
|
|
||||||
|
schedule = settings cfg descs cfgScheduleMap
|
||||||
|
[ com "Scheduled activities"
|
||||||
, com "(Separate multiple activities with \"; \")"
|
, com "(Separate multiple activities with \"; \")"
|
||||||
]
|
]
|
||||||
(\(l, u) -> line "schedule" u $ fromScheduledActivities l)
|
(\(l, u) -> line "schedule" u $ fromScheduledActivities l)
|
||||||
(\u -> line "schedule" u "")
|
(\u -> line "schedule" u "")
|
||||||
|
|
||||||
settings field desc showvals showdefaults = concat
|
|
||||||
[ desc
|
|
||||||
, concatMap showvals $ sort $ map swap $ M.toList $ field cfg
|
|
||||||
, concatMap (lcom . showdefaults) $ missing field
|
|
||||||
]
|
|
||||||
|
|
||||||
line setting u value =
|
line setting u value =
|
||||||
[ com $ "(for " ++ fromMaybe "" (M.lookup u descs) ++ ")"
|
[ com $ "(for " ++ fromMaybe "" (M.lookup u descs) ++ ")"
|
||||||
, unwords [setting, fromUUID u, "=", value]
|
, unwords [setting, fromUUID u, "=", value]
|
||||||
]
|
]
|
||||||
lcom = map (\l -> if "#" `isPrefixOf` l then l else '#' : l)
|
|
||||||
missing field = S.toList $ M.keysSet descs `S.difference` M.keysSet (field cfg)
|
settings :: Ord v => Cfg -> M.Map UUID String -> (Cfg -> M.Map UUID v) -> [String] -> ((v, UUID) -> [String]) -> (UUID -> [String]) -> [String]
|
||||||
|
settings cfg descs = settings' cfg (M.keysSet descs)
|
||||||
|
|
||||||
|
settings' :: (Ord v, Ord f) => Cfg -> S.Set f -> (Cfg -> M.Map f v) -> [String] -> ((v, f) -> [String]) -> (f -> [String]) -> [String]
|
||||||
|
settings' cfg s field desc showvals showdefaults = concat
|
||||||
|
[ desc
|
||||||
|
, concatMap showvals $ sort $ map swap $ M.toList $ field cfg
|
||||||
|
, concatMap (lcom . showdefaults) missing
|
||||||
|
]
|
||||||
|
where
|
||||||
|
missing = S.toList $ s `S.difference` M.keysSet (field cfg)
|
||||||
|
|
||||||
|
lcom :: [String] -> [String]
|
||||||
|
lcom = map (\l -> if "#" `isPrefixOf` l then l else '#' : l)
|
||||||
|
|
||||||
{- If there's a parse error, returns a new version of the file,
|
{- If there's a parse error, returns a new version of the file,
|
||||||
- with the problem lines noted. -}
|
- with the problem lines noted. -}
|
||||||
|
@ -163,16 +214,16 @@ parseCfg curcfg = go [] curcfg . lines
|
||||||
parse l cfg
|
parse l cfg
|
||||||
| null l = Right cfg
|
| null l = Right cfg
|
||||||
| "#" `isPrefixOf` l = Right cfg
|
| "#" `isPrefixOf` l = Right cfg
|
||||||
| null setting || null u = Left "missing repository uuid"
|
| null setting || null f = Left "missing field"
|
||||||
| otherwise = handle cfg (toUUID u) setting value'
|
| otherwise = handle cfg f setting value'
|
||||||
where
|
where
|
||||||
(setting, rest) = separate isSpace l
|
(setting, rest) = separate isSpace l
|
||||||
(r, value) = separate (== '=') rest
|
(r, value) = separate (== '=') rest
|
||||||
value' = trimspace value
|
value' = trimspace value
|
||||||
u = reverse $ trimspace $ reverse $ trimspace r
|
f = reverse $ trimspace $ reverse $ trimspace r
|
||||||
trimspace = dropWhile isSpace
|
trimspace = dropWhile isSpace
|
||||||
|
|
||||||
handle cfg u setting value
|
handle cfg f setting value
|
||||||
| setting == "trust" = case readTrustLevel value of
|
| setting == "trust" = case readTrustLevel value of
|
||||||
Nothing -> badval "trust value" value
|
Nothing -> badval "trust value" value
|
||||||
Just t ->
|
Just t ->
|
||||||
|
@ -181,18 +232,32 @@ parseCfg curcfg = go [] curcfg . lines
|
||||||
| setting == "group" =
|
| setting == "group" =
|
||||||
let m = M.insert u (S.fromList $ words value) (cfgGroupMap cfg)
|
let m = M.insert u (S.fromList $ words value) (cfgGroupMap cfg)
|
||||||
in Right $ cfg { cfgGroupMap = m }
|
in Right $ cfg { cfgGroupMap = m }
|
||||||
| setting == "content" =
|
| setting == "wanted" =
|
||||||
case checkPreferredContentExpression value of
|
case checkPreferredContentExpression value of
|
||||||
Just e -> Left e
|
Just e -> Left e
|
||||||
Nothing ->
|
Nothing ->
|
||||||
let m = M.insert u value (cfgPreferredContentMap cfg)
|
let m = M.insert u value (cfgPreferredContentMap cfg)
|
||||||
in Right $ cfg { cfgPreferredContentMap = m }
|
in Right $ cfg { cfgPreferredContentMap = m }
|
||||||
|
| setting == "required" =
|
||||||
|
case checkPreferredContentExpression value of
|
||||||
|
Just e -> Left e
|
||||||
|
Nothing ->
|
||||||
|
let m = M.insert u value (cfgRequiredContentMap cfg)
|
||||||
|
in Right $ cfg { cfgRequiredContentMap = m }
|
||||||
|
| setting == "groupwanted" =
|
||||||
|
case checkPreferredContentExpression value of
|
||||||
|
Just e -> Left e
|
||||||
|
Nothing ->
|
||||||
|
let m = M.insert f value (cfgGroupPreferredContentMap cfg)
|
||||||
|
in Right $ cfg { cfgGroupPreferredContentMap = m }
|
||||||
| setting == "schedule" = case parseScheduledActivities value of
|
| setting == "schedule" = case parseScheduledActivities value of
|
||||||
Left e -> Left e
|
Left e -> Left e
|
||||||
Right l ->
|
Right l ->
|
||||||
let m = M.insert u l (cfgScheduleMap cfg)
|
let m = M.insert u l (cfgScheduleMap cfg)
|
||||||
in Right $ cfg { cfgScheduleMap = m }
|
in Right $ cfg { cfgScheduleMap = m }
|
||||||
| otherwise = badval "setting" setting
|
| otherwise = badval "setting" setting
|
||||||
|
where
|
||||||
|
u = toUUID f
|
||||||
|
|
||||||
showerr (Just msg, l) = [parseerr ++ msg, l]
|
showerr (Just msg, l) = [parseerr ++ msg, l]
|
||||||
showerr (Nothing, l)
|
showerr (Nothing, l)
|
||||||
|
@ -203,11 +268,11 @@ parseCfg curcfg = go [] curcfg . lines
|
||||||
|
|
||||||
badval desc val = Left $ "unknown " ++ desc ++ " \"" ++ val ++ "\""
|
badval desc val = Left $ "unknown " ++ desc ++ " \"" ++ val ++ "\""
|
||||||
badheader =
|
badheader =
|
||||||
[ com "There was a problem parsing your input."
|
[ com "** There was a problem parsing your input!"
|
||||||
, com "Search for \"Parse error\" to find the bad lines."
|
, com "** Search for \"Parse error\" to find the bad lines."
|
||||||
, com "Either fix the bad lines, or delete them (to discard your changes)."
|
, com "** Either fix the bad lines, or delete them (to discard your changes)."
|
||||||
]
|
]
|
||||||
parseerr = com "Parse error in next line: "
|
parseerr = com "** Parse error in next line: "
|
||||||
|
|
||||||
com :: String -> String
|
com :: String -> String
|
||||||
com s = "# " ++ s
|
com s = "# " ++ s
|
||||||
|
|
|
@ -11,6 +11,7 @@ import Control.Exception.Extensible as X (IOException)
|
||||||
import Data.Maybe as X
|
import Data.Maybe as X
|
||||||
import Data.List as X hiding (head, tail, init, last)
|
import Data.List as X hiding (head, tail, init, last)
|
||||||
import Data.String.Utils as X hiding (join)
|
import Data.String.Utils as X hiding (join)
|
||||||
|
import Data.Monoid as X
|
||||||
|
|
||||||
import System.FilePath as X
|
import System.FilePath as X
|
||||||
import System.Directory as X
|
import System.Directory as X
|
||||||
|
|
|
@ -108,6 +108,6 @@ catTree h treeref = go <$> catObjectDetails h treeref
|
||||||
dropsha = L.drop 21
|
dropsha = L.drop 21
|
||||||
|
|
||||||
parsemodefile b =
|
parsemodefile b =
|
||||||
let (modestr, file) = separate (== ' ') (encodeW8 $ L.unpack b)
|
let (modestr, file) = separate (== ' ') (decodeBS b)
|
||||||
in (file, readmode modestr)
|
in (file, readmode modestr)
|
||||||
readmode = fst . fromMaybe (0, undefined) . headMaybe . readOct
|
readmode = fst . fromMaybe (0, undefined) . headMaybe . readOct
|
||||||
|
|
28
Git/Fsck.hs
28
Git/Fsck.hs
|
@ -24,10 +24,16 @@ import qualified Git.Version
|
||||||
|
|
||||||
import qualified Data.Set as S
|
import qualified Data.Set as S
|
||||||
import System.Process (std_out, std_err)
|
import System.Process (std_out, std_err)
|
||||||
|
import Control.Concurrent.Async
|
||||||
|
|
||||||
type MissingObjects = S.Set Sha
|
type MissingObjects = S.Set Sha
|
||||||
|
|
||||||
data FsckResults = FsckFoundMissing MissingObjects | FsckFailed
|
data FsckResults
|
||||||
|
= FsckFoundMissing
|
||||||
|
{ missingObjects :: MissingObjects
|
||||||
|
, missingObjectsTruncated :: Bool
|
||||||
|
}
|
||||||
|
| FsckFailed
|
||||||
deriving (Show)
|
deriving (Show)
|
||||||
|
|
||||||
{- Runs fsck to find some of the broken objects in the repository.
|
{- Runs fsck to find some of the broken objects in the repository.
|
||||||
|
@ -53,22 +59,26 @@ findBroken batchmode r = do
|
||||||
{ std_out = CreatePipe
|
{ std_out = CreatePipe
|
||||||
, std_err = CreatePipe
|
, std_err = CreatePipe
|
||||||
}
|
}
|
||||||
bad1 <- readMissingObjs r supportsNoDangling (stdoutHandle p)
|
(bad1, bad2) <- concurrently
|
||||||
bad2 <- readMissingObjs r supportsNoDangling (stderrHandle p)
|
(readMissingObjs maxobjs r supportsNoDangling (stdoutHandle p))
|
||||||
|
(readMissingObjs maxobjs r supportsNoDangling (stderrHandle p))
|
||||||
fsckok <- checkSuccessProcess pid
|
fsckok <- checkSuccessProcess pid
|
||||||
|
let truncated = S.size bad1 == maxobjs || S.size bad1 == maxobjs
|
||||||
let badobjs = S.union bad1 bad2
|
let badobjs = S.union bad1 bad2
|
||||||
|
|
||||||
if S.null badobjs && not fsckok
|
if S.null badobjs && not fsckok
|
||||||
then return FsckFailed
|
then return FsckFailed
|
||||||
else return $ FsckFoundMissing badobjs
|
else return $ FsckFoundMissing badobjs truncated
|
||||||
|
where
|
||||||
|
maxobjs = 10000
|
||||||
|
|
||||||
foundBroken :: FsckResults -> Bool
|
foundBroken :: FsckResults -> Bool
|
||||||
foundBroken FsckFailed = True
|
foundBroken FsckFailed = True
|
||||||
foundBroken (FsckFoundMissing s) = not (S.null s)
|
foundBroken (FsckFoundMissing s _) = not (S.null s)
|
||||||
|
|
||||||
knownMissing :: FsckResults -> MissingObjects
|
knownMissing :: FsckResults -> MissingObjects
|
||||||
knownMissing FsckFailed = S.empty
|
knownMissing FsckFailed = S.empty
|
||||||
knownMissing (FsckFoundMissing s) = s
|
knownMissing (FsckFoundMissing s _) = s
|
||||||
|
|
||||||
{- Finds objects that are missing from the git repsitory, or are corrupt.
|
{- Finds objects that are missing from the git repsitory, or are corrupt.
|
||||||
-
|
-
|
||||||
|
@ -78,9 +88,9 @@ knownMissing (FsckFoundMissing s) = s
|
||||||
findMissing :: [Sha] -> Repo -> IO MissingObjects
|
findMissing :: [Sha] -> Repo -> IO MissingObjects
|
||||||
findMissing objs r = S.fromList <$> filterM (`isMissing` r) objs
|
findMissing objs r = S.fromList <$> filterM (`isMissing` r) objs
|
||||||
|
|
||||||
readMissingObjs :: Repo -> Bool -> Handle -> IO MissingObjects
|
readMissingObjs :: Int -> Repo -> Bool -> Handle -> IO MissingObjects
|
||||||
readMissingObjs r supportsNoDangling h = do
|
readMissingObjs maxobjs r supportsNoDangling h = do
|
||||||
objs <- findShas supportsNoDangling <$> hGetContents h
|
objs <- take maxobjs . findShas supportsNoDangling <$> hGetContents h
|
||||||
findMissing objs r
|
findMissing objs r
|
||||||
|
|
||||||
isMissing :: Sha -> Repo -> IO Bool
|
isMissing :: Sha -> Repo -> IO Bool
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
{- git repository recovery
|
{- git repository recovery
|
||||||
import qualified Data.Set as S
|
|
||||||
-
|
-
|
||||||
- Copyright 2013 Joey Hess <joey@kitenet.net>
|
- Copyright 2013-2014 Joey Hess <joey@kitenet.net>
|
||||||
-
|
-
|
||||||
- Licensed under the GNU GPL version 3 or higher.
|
- Licensed under the GNU GPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
@ -115,7 +114,9 @@ retrieveMissingObjects missing referencerepo r
|
||||||
void $ copyObjects tmpr r
|
void $ copyObjects tmpr r
|
||||||
case stillmissing of
|
case stillmissing of
|
||||||
FsckFailed -> return $ FsckFailed
|
FsckFailed -> return $ FsckFailed
|
||||||
FsckFoundMissing s -> FsckFoundMissing <$> findMissing (S.toList s) r
|
FsckFoundMissing s t -> FsckFoundMissing
|
||||||
|
<$> findMissing (S.toList s) r
|
||||||
|
<*> pure t
|
||||||
, return stillmissing
|
, return stillmissing
|
||||||
)
|
)
|
||||||
pullremotes tmpr (rmt:rmts) fetchrefs ms
|
pullremotes tmpr (rmt:rmts) fetchrefs ms
|
||||||
|
@ -128,9 +129,9 @@ retrieveMissingObjects missing referencerepo r
|
||||||
void $ copyObjects tmpr r
|
void $ copyObjects tmpr r
|
||||||
case ms of
|
case ms of
|
||||||
FsckFailed -> pullremotes tmpr rmts fetchrefs ms
|
FsckFailed -> pullremotes tmpr rmts fetchrefs ms
|
||||||
FsckFoundMissing s -> do
|
FsckFoundMissing s t -> do
|
||||||
stillmissing <- findMissing (S.toList s) r
|
stillmissing <- findMissing (S.toList s) r
|
||||||
pullremotes tmpr rmts fetchrefs (FsckFoundMissing stillmissing)
|
pullremotes tmpr rmts fetchrefs (FsckFoundMissing stillmissing t)
|
||||||
, pullremotes tmpr rmts fetchrefs ms
|
, pullremotes tmpr rmts fetchrefs ms
|
||||||
)
|
)
|
||||||
fetchfrom fetchurl ps = runBool $
|
fetchfrom fetchurl ps = runBool $
|
||||||
|
@ -278,7 +279,7 @@ findUncorruptedCommit missing goodcommits branch r = do
|
||||||
then return (Just c, gcs')
|
then return (Just c, gcs')
|
||||||
else findfirst gcs' cs
|
else findfirst gcs' cs
|
||||||
|
|
||||||
{- Verifies tha none of the missing objects in the set are used by
|
{- Verifies that none of the missing objects in the set are used by
|
||||||
- the commit. Also adds to a set of commit shas that have been verified to
|
- the commit. Also adds to a set of commit shas that have been verified to
|
||||||
- be good, which can be passed into subsequent calls to avoid
|
- be good, which can be passed into subsequent calls to avoid
|
||||||
- redundant work when eg, chasing down branches to find the first
|
- redundant work when eg, chasing down branches to find the first
|
||||||
|
@ -452,7 +453,7 @@ runRepair' removablebranch fsckresult forced referencerepo g = do
|
||||||
missing <- findBroken False g
|
missing <- findBroken False g
|
||||||
stillmissing <- retrieveMissingObjects missing referencerepo g
|
stillmissing <- retrieveMissingObjects missing referencerepo g
|
||||||
case stillmissing of
|
case stillmissing of
|
||||||
FsckFoundMissing s
|
FsckFoundMissing s t
|
||||||
| S.null s -> if repoIsLocalBare g
|
| S.null s -> if repoIsLocalBare g
|
||||||
then successfulfinish []
|
then successfulfinish []
|
||||||
else ifM (checkIndex g)
|
else ifM (checkIndex g)
|
||||||
|
@ -465,7 +466,7 @@ runRepair' removablebranch fsckresult forced referencerepo g = do
|
||||||
)
|
)
|
||||||
| otherwise -> if forced
|
| otherwise -> if forced
|
||||||
then ifM (checkIndex g)
|
then ifM (checkIndex g)
|
||||||
( continuerepairs s
|
( forcerepair s t
|
||||||
, corruptedindex
|
, corruptedindex
|
||||||
)
|
)
|
||||||
else do
|
else do
|
||||||
|
@ -478,17 +479,16 @@ runRepair' removablebranch fsckresult forced referencerepo g = do
|
||||||
| forced -> ifM (pure (repoIsLocalBare g) <||> checkIndex g)
|
| forced -> ifM (pure (repoIsLocalBare g) <||> checkIndex g)
|
||||||
( do
|
( do
|
||||||
cleanCorruptObjects FsckFailed g
|
cleanCorruptObjects FsckFailed g
|
||||||
missing' <- findBroken False g
|
stillmissing' <- findBroken False g
|
||||||
case missing' of
|
case stillmissing' of
|
||||||
FsckFailed -> return (False, [])
|
FsckFailed -> return (False, [])
|
||||||
FsckFoundMissing stillmissing' ->
|
FsckFoundMissing s t -> forcerepair s t
|
||||||
continuerepairs stillmissing'
|
|
||||||
, corruptedindex
|
, corruptedindex
|
||||||
)
|
)
|
||||||
| otherwise -> unsuccessfulfinish
|
| otherwise -> unsuccessfulfinish
|
||||||
where
|
where
|
||||||
continuerepairs stillmissing = do
|
repairbranches missing = do
|
||||||
(removedbranches, goodcommits) <- removeBadBranches removablebranch stillmissing emptyGoodCommits g
|
(removedbranches, goodcommits) <- removeBadBranches removablebranch missing emptyGoodCommits g
|
||||||
let remotebranches = filter isTrackingBranch removedbranches
|
let remotebranches = filter isTrackingBranch removedbranches
|
||||||
unless (null remotebranches) $
|
unless (null remotebranches) $
|
||||||
putStrLn $ unwords
|
putStrLn $ unwords
|
||||||
|
@ -496,31 +496,42 @@ runRepair' removablebranch fsckresult forced referencerepo g = do
|
||||||
, show (length remotebranches)
|
, show (length remotebranches)
|
||||||
, "remote tracking branches that referred to missing objects."
|
, "remote tracking branches that referred to missing objects."
|
||||||
]
|
]
|
||||||
(resetbranches, deletedbranches, _) <- resetLocalBranches stillmissing goodcommits g
|
(resetbranches, deletedbranches, _) <- resetLocalBranches missing goodcommits g
|
||||||
displayList (map fromRef resetbranches)
|
displayList (map fromRef resetbranches)
|
||||||
"Reset these local branches to old versions before the missing objects were committed:"
|
"Reset these local branches to old versions before the missing objects were committed:"
|
||||||
displayList (map fromRef deletedbranches)
|
displayList (map fromRef deletedbranches)
|
||||||
"Deleted these local branches, which could not be recovered due to missing objects:"
|
"Deleted these local branches, which could not be recovered due to missing objects:"
|
||||||
|
return (resetbranches ++ deletedbranches)
|
||||||
|
|
||||||
|
forcerepair missing fscktruncated = do
|
||||||
|
modifiedbranches <- repairbranches missing
|
||||||
deindexedfiles <- rewriteIndex g
|
deindexedfiles <- rewriteIndex g
|
||||||
displayList deindexedfiles
|
displayList deindexedfiles
|
||||||
"Removed these missing files from the index. You should look at what files are present in your working tree and git add them back to the index when appropriate."
|
"Removed these missing files from the index. You should look at what files are present in your working tree and git add them back to the index when appropriate."
|
||||||
let modifiedbranches = resetbranches ++ deletedbranches
|
|
||||||
if null resetbranches && null deletedbranches
|
-- When the fsck results were truncated, try
|
||||||
then successfulfinish modifiedbranches
|
-- fscking again, and as long as different
|
||||||
else do
|
-- missing objects are found, continue
|
||||||
unless (repoIsLocalBare g) $ do
|
-- the repair process.
|
||||||
mcurr <- Branch.currentUnsafe g
|
if fscktruncated
|
||||||
case mcurr of
|
then do
|
||||||
Nothing -> return ()
|
fsckresult' <- findBroken False g
|
||||||
Just curr -> when (any (== curr) modifiedbranches) $ do
|
case fsckresult' of
|
||||||
|
FsckFailed -> do
|
||||||
|
putStrLn "git fsck is failing"
|
||||||
|
return (False, modifiedbranches)
|
||||||
|
FsckFoundMissing s _
|
||||||
|
| S.null s -> successfulfinish modifiedbranches
|
||||||
|
| S.null (s `S.difference` missing) -> do
|
||||||
putStrLn $ unwords
|
putStrLn $ unwords
|
||||||
[ "You currently have"
|
[ show (S.size s)
|
||||||
, fromRef curr
|
, "missing objects could not be recovered!"
|
||||||
, "checked out. You may have staged changes in the index that can be committed to recover the lost state of this branch!"
|
|
||||||
]
|
]
|
||||||
putStrLn "Successfully recovered repository!"
|
return (False, modifiedbranches)
|
||||||
putStrLn "Please carefully check that the changes mentioned above are ok.."
|
| otherwise -> do
|
||||||
return (True, modifiedbranches)
|
(ok, modifiedbranches') <- runRepairOf fsckresult' removablebranch forced referencerepo g
|
||||||
|
return (ok, modifiedbranches++modifiedbranches')
|
||||||
|
else successfulfinish modifiedbranches
|
||||||
|
|
||||||
corruptedindex = do
|
corruptedindex = do
|
||||||
nukeFile (indexFile g)
|
nukeFile (indexFile g)
|
||||||
|
@ -531,12 +542,28 @@ runRepair' removablebranch fsckresult forced referencerepo g = do
|
||||||
putStrLn "Removed the corrupted index file. You should look at what files are present in your working tree and git add them back to the index when appropriate."
|
putStrLn "Removed the corrupted index file. You should look at what files are present in your working tree and git add them back to the index when appropriate."
|
||||||
return result
|
return result
|
||||||
|
|
||||||
successfulfinish modifiedbranches = do
|
successfulfinish modifiedbranches
|
||||||
mapM_ putStrLn
|
| null modifiedbranches = do
|
||||||
[ "Successfully recovered repository!"
|
mapM_ putStrLn
|
||||||
, "You should run \"git fsck\" to make sure, but it looks like everything was recovered ok."
|
[ "Successfully recovered repository!"
|
||||||
]
|
, "You should run \"git fsck\" to make sure, but it looks like everything was recovered ok."
|
||||||
return (True, modifiedbranches)
|
]
|
||||||
|
return (True, modifiedbranches)
|
||||||
|
| otherwise = do
|
||||||
|
unless (repoIsLocalBare g) $ do
|
||||||
|
mcurr <- Branch.currentUnsafe g
|
||||||
|
case mcurr of
|
||||||
|
Nothing -> return ()
|
||||||
|
Just curr -> when (any (== curr) modifiedbranches) $ do
|
||||||
|
putStrLn $ unwords
|
||||||
|
[ "You currently have"
|
||||||
|
, fromRef curr
|
||||||
|
, "checked out. You may have staged changes in the index that can be committed to recover the lost state of this branch!"
|
||||||
|
]
|
||||||
|
putStrLn "Successfully recovered repository!"
|
||||||
|
putStrLn "Please carefully check that the changes mentioned above are ok.."
|
||||||
|
return (True, modifiedbranches)
|
||||||
|
|
||||||
unsuccessfulfinish = do
|
unsuccessfulfinish = do
|
||||||
if repoIsLocalBare g
|
if repoIsLocalBare g
|
||||||
then do
|
then do
|
||||||
|
|
67
Limit.hs
67
Limit.hs
|
@ -20,7 +20,6 @@ import Types.TrustLevel
|
||||||
import Types.Key
|
import Types.Key
|
||||||
import Types.Group
|
import Types.Group
|
||||||
import Types.FileMatcher
|
import Types.FileMatcher
|
||||||
import Types.Limit
|
|
||||||
import Types.MetaData
|
import Types.MetaData
|
||||||
import Logs.MetaData
|
import Logs.MetaData
|
||||||
import Logs.Group
|
import Logs.Group
|
||||||
|
@ -45,21 +44,20 @@ getMatcher :: Annex (MatchInfo -> Annex Bool)
|
||||||
getMatcher = Utility.Matcher.matchM <$> getMatcher'
|
getMatcher = Utility.Matcher.matchM <$> getMatcher'
|
||||||
|
|
||||||
getMatcher' :: Annex (Utility.Matcher.Matcher (MatchInfo -> Annex Bool))
|
getMatcher' :: Annex (Utility.Matcher.Matcher (MatchInfo -> Annex Bool))
|
||||||
getMatcher' = do
|
getMatcher' = go =<< Annex.getState Annex.limit
|
||||||
m <- Annex.getState Annex.limit
|
where
|
||||||
case m of
|
go (CompleteMatcher matcher) = return matcher
|
||||||
Right r -> return r
|
go (BuildingMatcher l) = do
|
||||||
Left l -> do
|
let matcher = Utility.Matcher.generate (reverse l)
|
||||||
let matcher = Utility.Matcher.generate (reverse l)
|
Annex.changeState $ \s ->
|
||||||
Annex.changeState $ \s ->
|
s { Annex.limit = CompleteMatcher matcher }
|
||||||
s { Annex.limit = Right matcher }
|
return matcher
|
||||||
return matcher
|
|
||||||
|
|
||||||
{- Adds something to the limit list, which is built up reversed. -}
|
{- Adds something to the limit list, which is built up reversed. -}
|
||||||
add :: Utility.Matcher.Token (MatchInfo -> Annex Bool) -> Annex ()
|
add :: Utility.Matcher.Token (MatchInfo -> Annex Bool) -> Annex ()
|
||||||
add l = Annex.changeState $ \s -> s { Annex.limit = prepend $ Annex.limit s }
|
add l = Annex.changeState $ \s -> s { Annex.limit = prepend $ Annex.limit s }
|
||||||
where
|
where
|
||||||
prepend (Left ls) = Left $ l:ls
|
prepend (BuildingMatcher ls) = BuildingMatcher $ l:ls
|
||||||
prepend _ = error "internal"
|
prepend _ = error "internal"
|
||||||
|
|
||||||
{- Adds a new token. -}
|
{- Adds a new token. -}
|
||||||
|
@ -67,21 +65,21 @@ addToken :: String -> Annex ()
|
||||||
addToken = add . Utility.Matcher.token
|
addToken = add . Utility.Matcher.token
|
||||||
|
|
||||||
{- Adds a new limit. -}
|
{- Adds a new limit. -}
|
||||||
addLimit :: Either String MatchFiles -> Annex ()
|
addLimit :: Either String (MatchFiles Annex) -> Annex ()
|
||||||
addLimit = either error (\l -> add $ Utility.Matcher.Operation $ l S.empty)
|
addLimit = either error (\l -> add $ Utility.Matcher.Operation $ l S.empty)
|
||||||
|
|
||||||
{- Add a limit to skip files that do not match the glob. -}
|
{- Add a limit to skip files that do not match the glob. -}
|
||||||
addInclude :: String -> Annex ()
|
addInclude :: String -> Annex ()
|
||||||
addInclude = addLimit . limitInclude
|
addInclude = addLimit . limitInclude
|
||||||
|
|
||||||
limitInclude :: MkLimit
|
limitInclude :: MkLimit Annex
|
||||||
limitInclude glob = Right $ const $ return . matchGlobFile glob
|
limitInclude glob = Right $ const $ return . matchGlobFile glob
|
||||||
|
|
||||||
{- Add a limit to skip files that match the glob. -}
|
{- Add a limit to skip files that match the glob. -}
|
||||||
addExclude :: String -> Annex ()
|
addExclude :: String -> Annex ()
|
||||||
addExclude = addLimit . limitExclude
|
addExclude = addLimit . limitExclude
|
||||||
|
|
||||||
limitExclude :: MkLimit
|
limitExclude :: MkLimit Annex
|
||||||
limitExclude glob = Right $ const $ return . not . matchGlobFile glob
|
limitExclude glob = Right $ const $ return . not . matchGlobFile glob
|
||||||
|
|
||||||
matchGlobFile :: String -> (MatchInfo -> Bool)
|
matchGlobFile :: String -> (MatchInfo -> Bool)
|
||||||
|
@ -94,18 +92,16 @@ matchGlobFile glob = go
|
||||||
{- Adds a limit to skip files not believed to be present
|
{- Adds a limit to skip files not believed to be present
|
||||||
- in a specfied repository. Optionally on a prior date. -}
|
- in a specfied repository. Optionally on a prior date. -}
|
||||||
addIn :: String -> Annex ()
|
addIn :: String -> Annex ()
|
||||||
addIn = addLimit . limitIn
|
addIn s = addLimit =<< mk
|
||||||
|
|
||||||
limitIn :: MkLimit
|
|
||||||
limitIn s = Right $ \notpresent -> checkKey $ \key ->
|
|
||||||
if name == "."
|
|
||||||
then if null date
|
|
||||||
then inhere notpresent key
|
|
||||||
else inuuid notpresent key =<< getUUID
|
|
||||||
else inuuid notpresent key =<< Remote.nameToUUID name
|
|
||||||
where
|
where
|
||||||
(name, date) = separate (== '@') s
|
(name, date) = separate (== '@') s
|
||||||
inuuid notpresent key u
|
mk
|
||||||
|
| name == "." = if null date
|
||||||
|
then use inhere
|
||||||
|
else use . inuuid =<< getUUID
|
||||||
|
| otherwise = use . inuuid =<< Remote.nameToUUID name
|
||||||
|
use a = return $ Right $ \notpresent -> checkKey (a notpresent)
|
||||||
|
inuuid u notpresent key
|
||||||
| null date = do
|
| null date = do
|
||||||
us <- Remote.keyLocations key
|
us <- Remote.keyLocations key
|
||||||
return $ u `elem` us && u `S.notMember` notpresent
|
return $ u `elem` us && u `S.notMember` notpresent
|
||||||
|
@ -121,8 +117,11 @@ limitIn s = Right $ \notpresent -> checkKey $ \key ->
|
||||||
else inAnnex key
|
else inAnnex key
|
||||||
|
|
||||||
{- Limit to content that is currently present on a uuid. -}
|
{- Limit to content that is currently present on a uuid. -}
|
||||||
limitPresent :: Maybe UUID -> MkLimit
|
limitPresent :: Maybe UUID -> MkLimit Annex
|
||||||
limitPresent u _ = Right $ const $ checkKey $ \key -> do
|
limitPresent u _ = Right $ matchPresent u
|
||||||
|
|
||||||
|
matchPresent :: Maybe UUID -> MatchFiles Annex
|
||||||
|
matchPresent u _ = checkKey $ \key -> do
|
||||||
hereu <- getUUID
|
hereu <- getUUID
|
||||||
if u == Just hereu || isNothing u
|
if u == Just hereu || isNothing u
|
||||||
then inAnnex key
|
then inAnnex key
|
||||||
|
@ -131,7 +130,7 @@ limitPresent u _ = Right $ const $ checkKey $ \key -> do
|
||||||
return $ maybe False (`elem` us) u
|
return $ maybe False (`elem` us) u
|
||||||
|
|
||||||
{- Limit to content that is in a directory, anywhere in the repository tree -}
|
{- Limit to content that is in a directory, anywhere in the repository tree -}
|
||||||
limitInDir :: FilePath -> MkLimit
|
limitInDir :: FilePath -> MkLimit Annex
|
||||||
limitInDir dir = const $ Right $ const go
|
limitInDir dir = const $ Right $ const go
|
||||||
where
|
where
|
||||||
go (MatchingFile fi) = return $ elem dir $ splitPath $ takeDirectory $ matchFile fi
|
go (MatchingFile fi) = return $ elem dir $ splitPath $ takeDirectory $ matchFile fi
|
||||||
|
@ -142,7 +141,7 @@ limitInDir dir = const $ Right $ const go
|
||||||
addCopies :: String -> Annex ()
|
addCopies :: String -> Annex ()
|
||||||
addCopies = addLimit . limitCopies
|
addCopies = addLimit . limitCopies
|
||||||
|
|
||||||
limitCopies :: MkLimit
|
limitCopies :: MkLimit Annex
|
||||||
limitCopies want = case split ":" want of
|
limitCopies want = case split ":" want of
|
||||||
[v, n] -> case parsetrustspec v of
|
[v, n] -> case parsetrustspec v of
|
||||||
Just checker -> go n $ checktrust checker
|
Just checker -> go n $ checktrust checker
|
||||||
|
@ -168,7 +167,7 @@ limitCopies want = case split ":" want of
|
||||||
addLackingCopies :: Bool -> String -> Annex ()
|
addLackingCopies :: Bool -> String -> Annex ()
|
||||||
addLackingCopies approx = addLimit . limitLackingCopies approx
|
addLackingCopies approx = addLimit . limitLackingCopies approx
|
||||||
|
|
||||||
limitLackingCopies :: Bool -> MkLimit
|
limitLackingCopies :: Bool -> MkLimit Annex
|
||||||
limitLackingCopies approx want = case readish want of
|
limitLackingCopies approx want = case readish want of
|
||||||
Just needed -> Right $ \notpresent mi -> flip checkKey mi $
|
Just needed -> Right $ \notpresent mi -> flip checkKey mi $
|
||||||
handle mi needed notpresent
|
handle mi needed notpresent
|
||||||
|
@ -190,7 +189,7 @@ limitLackingCopies approx want = case readish want of
|
||||||
- This has a nice optimisation: When a file exists,
|
- This has a nice optimisation: When a file exists,
|
||||||
- its key is obviously not unused.
|
- its key is obviously not unused.
|
||||||
-}
|
-}
|
||||||
limitUnused :: MatchFiles
|
limitUnused :: MatchFiles Annex
|
||||||
limitUnused _ (MatchingFile _) = return False
|
limitUnused _ (MatchingFile _) = return False
|
||||||
limitUnused _ (MatchingKey k) = S.member k <$> unusedKeys
|
limitUnused _ (MatchingKey k) = S.member k <$> unusedKeys
|
||||||
|
|
||||||
|
@ -201,7 +200,7 @@ addInAllGroup groupname = do
|
||||||
m <- groupMap
|
m <- groupMap
|
||||||
addLimit $ limitInAllGroup m groupname
|
addLimit $ limitInAllGroup m groupname
|
||||||
|
|
||||||
limitInAllGroup :: GroupMap -> MkLimit
|
limitInAllGroup :: GroupMap -> MkLimit Annex
|
||||||
limitInAllGroup m groupname
|
limitInAllGroup m groupname
|
||||||
| S.null want = Right $ const $ const $ return True
|
| S.null want = Right $ const $ const $ return True
|
||||||
| otherwise = Right $ \notpresent -> checkKey $ check notpresent
|
| otherwise = Right $ \notpresent -> checkKey $ check notpresent
|
||||||
|
@ -218,7 +217,7 @@ limitInAllGroup m groupname
|
||||||
addInBackend :: String -> Annex ()
|
addInBackend :: String -> Annex ()
|
||||||
addInBackend = addLimit . limitInBackend
|
addInBackend = addLimit . limitInBackend
|
||||||
|
|
||||||
limitInBackend :: MkLimit
|
limitInBackend :: MkLimit Annex
|
||||||
limitInBackend name = Right $ const $ checkKey check
|
limitInBackend name = Right $ const $ checkKey check
|
||||||
where
|
where
|
||||||
check key = pure $ keyBackendName key == name
|
check key = pure $ keyBackendName key == name
|
||||||
|
@ -230,7 +229,7 @@ addLargerThan = addLimit . limitSize (>)
|
||||||
addSmallerThan :: String -> Annex ()
|
addSmallerThan :: String -> Annex ()
|
||||||
addSmallerThan = addLimit . limitSize (<)
|
addSmallerThan = addLimit . limitSize (<)
|
||||||
|
|
||||||
limitSize :: (Maybe Integer -> Maybe Integer -> Bool) -> MkLimit
|
limitSize :: (Maybe Integer -> Maybe Integer -> Bool) -> MkLimit Annex
|
||||||
limitSize vs s = case readSize dataUnits s of
|
limitSize vs s = case readSize dataUnits s of
|
||||||
Nothing -> Left "bad size"
|
Nothing -> Left "bad size"
|
||||||
Just sz -> Right $ go sz
|
Just sz -> Right $ go sz
|
||||||
|
@ -248,7 +247,7 @@ limitSize vs s = case readSize dataUnits s of
|
||||||
addMetaData :: String -> Annex ()
|
addMetaData :: String -> Annex ()
|
||||||
addMetaData = addLimit . limitMetaData
|
addMetaData = addLimit . limitMetaData
|
||||||
|
|
||||||
limitMetaData :: MkLimit
|
limitMetaData :: MkLimit Annex
|
||||||
limitMetaData s = case parseMetaData s of
|
limitMetaData s = case parseMetaData s of
|
||||||
Left e -> Left e
|
Left e -> Left e
|
||||||
Right (f, v) ->
|
Right (f, v) ->
|
||||||
|
|
|
@ -41,6 +41,7 @@ module Locations (
|
||||||
gitAnnexMergeDir,
|
gitAnnexMergeDir,
|
||||||
gitAnnexJournalDir,
|
gitAnnexJournalDir,
|
||||||
gitAnnexJournalLock,
|
gitAnnexJournalLock,
|
||||||
|
gitAnnexPreCommitLock,
|
||||||
gitAnnexIndex,
|
gitAnnexIndex,
|
||||||
gitAnnexIndexStatus,
|
gitAnnexIndexStatus,
|
||||||
gitAnnexViewIndex,
|
gitAnnexViewIndex,
|
||||||
|
@ -257,6 +258,10 @@ gitAnnexJournalDir r = addTrailingPathSeparator $ gitAnnexDir r </> "journal"
|
||||||
gitAnnexJournalLock :: Git.Repo -> FilePath
|
gitAnnexJournalLock :: Git.Repo -> FilePath
|
||||||
gitAnnexJournalLock r = gitAnnexDir r </> "journal.lck"
|
gitAnnexJournalLock r = gitAnnexDir r </> "journal.lck"
|
||||||
|
|
||||||
|
{- Lock file for the pre-commit hook. -}
|
||||||
|
gitAnnexPreCommitLock :: Git.Repo -> FilePath
|
||||||
|
gitAnnexPreCommitLock r = gitAnnexDir r </> "precommit.lck"
|
||||||
|
|
||||||
{- .git/annex/index is used to stage changes to the git-annex branch -}
|
{- .git/annex/index is used to stage changes to the git-annex branch -}
|
||||||
gitAnnexIndex :: Git.Repo -> FilePath
|
gitAnnexIndex :: Git.Repo -> FilePath
|
||||||
gitAnnexIndex r = gitAnnexDir r </> "index"
|
gitAnnexIndex r = gitAnnexDir r </> "index"
|
||||||
|
|
16
Logs.hs
16
Logs.hs
|
@ -24,7 +24,7 @@ getLogVariety :: FilePath -> Maybe LogVariety
|
||||||
getLogVariety f
|
getLogVariety f
|
||||||
| f `elem` topLevelUUIDBasedLogs = Just UUIDBasedLog
|
| f `elem` topLevelUUIDBasedLogs = Just UUIDBasedLog
|
||||||
| isRemoteStateLog f = Just NewUUIDBasedLog
|
| isRemoteStateLog f = Just NewUUIDBasedLog
|
||||||
| isMetaDataLog f || f == numcopiesLog = Just OtherLog
|
| isMetaDataLog f || f `elem` otherLogs = Just OtherLog
|
||||||
| otherwise = PresenceLog <$> firstJust (presenceLogs f)
|
| otherwise = PresenceLog <$> firstJust (presenceLogs f)
|
||||||
|
|
||||||
{- All the uuid-based logs stored in the top of the git-annex branch. -}
|
{- All the uuid-based logs stored in the top of the git-annex branch. -}
|
||||||
|
@ -35,6 +35,7 @@ topLevelUUIDBasedLogs =
|
||||||
, trustLog
|
, trustLog
|
||||||
, groupLog
|
, groupLog
|
||||||
, preferredContentLog
|
, preferredContentLog
|
||||||
|
, requiredContentLog
|
||||||
, scheduleLog
|
, scheduleLog
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -45,6 +46,13 @@ presenceLogs f =
|
||||||
, locationLogFileKey f
|
, locationLogFileKey f
|
||||||
]
|
]
|
||||||
|
|
||||||
|
{- Logs that are neither UUID based nor presence logs. -}
|
||||||
|
otherLogs :: [FilePath]
|
||||||
|
otherLogs =
|
||||||
|
[ numcopiesLog
|
||||||
|
, groupPreferredContentLog
|
||||||
|
]
|
||||||
|
|
||||||
uuidLog :: FilePath
|
uuidLog :: FilePath
|
||||||
uuidLog = "uuid.log"
|
uuidLog = "uuid.log"
|
||||||
|
|
||||||
|
@ -63,6 +71,12 @@ groupLog = "group.log"
|
||||||
preferredContentLog :: FilePath
|
preferredContentLog :: FilePath
|
||||||
preferredContentLog = "preferred-content.log"
|
preferredContentLog = "preferred-content.log"
|
||||||
|
|
||||||
|
requiredContentLog :: FilePath
|
||||||
|
requiredContentLog = "required-content.log"
|
||||||
|
|
||||||
|
groupPreferredContentLog :: FilePath
|
||||||
|
groupPreferredContentLog = "group-preferred-content.log"
|
||||||
|
|
||||||
scheduleLog :: FilePath
|
scheduleLog :: FilePath
|
||||||
scheduleLog = "schedule.log"
|
scheduleLog = "schedule.log"
|
||||||
|
|
||||||
|
|
|
@ -23,25 +23,31 @@ writeFsckResults u fsckresults = do
|
||||||
logfile <- fromRepo $ gitAnnexFsckResultsLog u
|
logfile <- fromRepo $ gitAnnexFsckResultsLog u
|
||||||
liftIO $
|
liftIO $
|
||||||
case fsckresults of
|
case fsckresults of
|
||||||
FsckFailed -> store S.empty logfile
|
FsckFailed -> store S.empty False logfile
|
||||||
FsckFoundMissing s
|
FsckFoundMissing s t
|
||||||
| S.null s -> nukeFile logfile
|
| S.null s -> nukeFile logfile
|
||||||
| otherwise -> store s logfile
|
| otherwise -> store s t logfile
|
||||||
where
|
where
|
||||||
store s logfile = do
|
store s t logfile = do
|
||||||
createDirectoryIfMissing True (parentDir logfile)
|
createDirectoryIfMissing True (parentDir logfile)
|
||||||
liftIO $ viaTmp writeFile logfile $ serialize s
|
liftIO $ viaTmp writeFile logfile $ serialize s t
|
||||||
serialize = unlines . map fromRef . S.toList
|
serialize s t =
|
||||||
|
let ls = map fromRef (S.toList s)
|
||||||
|
in if t
|
||||||
|
then unlines ("truncated":ls)
|
||||||
|
else unlines ls
|
||||||
|
|
||||||
readFsckResults :: UUID -> Annex FsckResults
|
readFsckResults :: UUID -> Annex FsckResults
|
||||||
readFsckResults u = do
|
readFsckResults u = do
|
||||||
logfile <- fromRepo $ gitAnnexFsckResultsLog u
|
logfile <- fromRepo $ gitAnnexFsckResultsLog u
|
||||||
liftIO $ catchDefaultIO (FsckFoundMissing S.empty) $
|
liftIO $ catchDefaultIO (FsckFoundMissing S.empty False) $
|
||||||
deserialize <$> readFile logfile
|
deserialize . lines <$> readFile logfile
|
||||||
where
|
where
|
||||||
deserialize l =
|
deserialize ("truncated":ls) = deserialize' ls True
|
||||||
let s = S.fromList $ map Ref $ lines l
|
deserialize ls = deserialize' ls False
|
||||||
in if S.null s then FsckFailed else FsckFoundMissing s
|
deserialize' ls t =
|
||||||
|
let s = S.fromList $ map Ref ls
|
||||||
|
in if S.null s then FsckFailed else FsckFoundMissing s t
|
||||||
|
|
||||||
clearFsckResults :: UUID -> Annex ()
|
clearFsckResults :: UUID -> Annex ()
|
||||||
clearFsckResults = liftIO . nukeFile <=< fromRepo . gitAnnexFsckResultsLog
|
clearFsckResults = liftIO . nukeFile <=< fromRepo . gitAnnexFsckResultsLog
|
||||||
|
|
81
Logs/MapLog.hs
Normal file
81
Logs/MapLog.hs
Normal file
|
@ -0,0 +1,81 @@
|
||||||
|
{- git-annex Map log
|
||||||
|
-
|
||||||
|
- This is used to store a Map, in a way that can be union merged.
|
||||||
|
-
|
||||||
|
- A line of the log will look like: "timestamp field value"
|
||||||
|
-
|
||||||
|
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||||
|
-
|
||||||
|
- Licensed under the GNU GPL version 3 or higher.
|
||||||
|
-}
|
||||||
|
|
||||||
|
module Logs.MapLog where
|
||||||
|
|
||||||
|
import qualified Data.Map as M
|
||||||
|
import Data.Time.Clock.POSIX
|
||||||
|
import Data.Time
|
||||||
|
import System.Locale
|
||||||
|
|
||||||
|
import Common
|
||||||
|
|
||||||
|
data TimeStamp = Unknown | Date POSIXTime
|
||||||
|
deriving (Eq, Ord, Show)
|
||||||
|
|
||||||
|
data LogEntry v = LogEntry
|
||||||
|
{ changed :: TimeStamp
|
||||||
|
, value :: v
|
||||||
|
} deriving (Eq, Show)
|
||||||
|
|
||||||
|
type MapLog f v = M.Map f (LogEntry v)
|
||||||
|
|
||||||
|
showMapLog :: (f -> String) -> (v -> String) -> MapLog f v -> String
|
||||||
|
showMapLog fieldshower valueshower = unlines . map showpair . M.toList
|
||||||
|
where
|
||||||
|
showpair (f, LogEntry (Date p) v) =
|
||||||
|
unwords [show p, fieldshower f, valueshower v]
|
||||||
|
showpair (f, LogEntry Unknown v) =
|
||||||
|
unwords ["0", fieldshower f, valueshower v]
|
||||||
|
|
||||||
|
parseMapLog :: Ord f => (String -> Maybe f) -> (String -> Maybe v) -> String -> MapLog f v
|
||||||
|
parseMapLog fieldparser valueparser = M.fromListWith best . mapMaybe parse . lines
|
||||||
|
where
|
||||||
|
parse line = do
|
||||||
|
let (ts, rest) = splitword line
|
||||||
|
(sf, sv) = splitword rest
|
||||||
|
date <- Date . utcTimeToPOSIXSeconds <$> parseTime defaultTimeLocale "%s%Qs" ts
|
||||||
|
f <- fieldparser sf
|
||||||
|
v <- valueparser sv
|
||||||
|
Just (f, LogEntry date v)
|
||||||
|
splitword = separate (== ' ')
|
||||||
|
|
||||||
|
changeMapLog :: Ord f => POSIXTime -> f -> v -> MapLog f v -> MapLog f v
|
||||||
|
changeMapLog t f v = M.insert f $ LogEntry (Date t) v
|
||||||
|
|
||||||
|
{- Only add an LogEntry if it's newer (or at least as new as) than any
|
||||||
|
- existing LogEntry for a field. -}
|
||||||
|
addMapLog :: Ord f => f -> LogEntry v -> MapLog f v -> MapLog f v
|
||||||
|
addMapLog = M.insertWith' best
|
||||||
|
|
||||||
|
{- Converts a MapLog into a simple Map without the timestamp information.
|
||||||
|
- This is a one-way trip, but useful for code that never needs to change
|
||||||
|
- the log. -}
|
||||||
|
simpleMap :: MapLog f v -> M.Map f v
|
||||||
|
simpleMap = M.map value
|
||||||
|
|
||||||
|
best :: LogEntry v -> LogEntry v -> LogEntry v
|
||||||
|
best new old
|
||||||
|
| changed old > changed new = old
|
||||||
|
| otherwise = new
|
||||||
|
|
||||||
|
-- Unknown is oldest.
|
||||||
|
prop_TimeStamp_sane :: Bool
|
||||||
|
prop_TimeStamp_sane = Unknown < Date 1
|
||||||
|
|
||||||
|
prop_addMapLog_sane :: Bool
|
||||||
|
prop_addMapLog_sane = newWins && newestWins
|
||||||
|
where
|
||||||
|
newWins = addMapLog ("foo") (LogEntry (Date 1) "new") l == l2
|
||||||
|
newestWins = addMapLog ("foo") (LogEntry (Date 1) "newest") l2 /= l2
|
||||||
|
|
||||||
|
l = M.fromList [("foo", LogEntry (Date 0) "old")]
|
||||||
|
l2 = M.fromList [("foo", LogEntry (Date 1) "new")]
|
|
@ -36,26 +36,54 @@ module Logs.MetaData (
|
||||||
|
|
||||||
import Common.Annex
|
import Common.Annex
|
||||||
import Types.MetaData
|
import Types.MetaData
|
||||||
|
import Annex.MetaData.StandardFields
|
||||||
import qualified Annex.Branch
|
import qualified Annex.Branch
|
||||||
import Logs
|
import Logs
|
||||||
import Logs.SingleValue
|
import Logs.SingleValue
|
||||||
|
|
||||||
import qualified Data.Set as S
|
import qualified Data.Set as S
|
||||||
|
import qualified Data.Map as M
|
||||||
import Data.Time.Clock.POSIX
|
import Data.Time.Clock.POSIX
|
||||||
|
import Data.Time.Format
|
||||||
|
import System.Locale
|
||||||
|
|
||||||
instance SingleValueSerializable MetaData where
|
instance SingleValueSerializable MetaData where
|
||||||
serialize = Types.MetaData.serialize
|
serialize = Types.MetaData.serialize
|
||||||
deserialize = Types.MetaData.deserialize
|
deserialize = Types.MetaData.deserialize
|
||||||
|
|
||||||
getMetaData :: Key -> Annex (Log MetaData)
|
getMetaDataLog :: Key -> Annex (Log MetaData)
|
||||||
getMetaData = readLog . metaDataLogFile
|
getMetaDataLog = readLog . metaDataLogFile
|
||||||
|
|
||||||
{- Go through the log from oldest to newest, and combine it all
|
{- Go through the log from oldest to newest, and combine it all
|
||||||
- into a single MetaData representing the current state. -}
|
- into a single MetaData representing the current state.
|
||||||
|
-
|
||||||
|
- Automatically generates a lastchanged metadata for each field that's
|
||||||
|
- currently set, based on timestamps in the log.
|
||||||
|
-}
|
||||||
getCurrentMetaData :: Key -> Annex MetaData
|
getCurrentMetaData :: Key -> Annex MetaData
|
||||||
getCurrentMetaData = currentMetaData . collect <$$> getMetaData
|
getCurrentMetaData k = do
|
||||||
|
ls <- S.toAscList <$> getMetaDataLog k
|
||||||
|
let loggedmeta = currentMetaData $ combineMetaData $ map value ls
|
||||||
|
return $ currentMetaData $ unionMetaData loggedmeta
|
||||||
|
(lastchanged ls loggedmeta)
|
||||||
where
|
where
|
||||||
collect = foldl' unionMetaData emptyMetaData . map value . S.toAscList
|
lastchanged [] _ = emptyMetaData
|
||||||
|
lastchanged ls (MetaData currentlyset) =
|
||||||
|
let m = foldl' (flip M.union) M.empty (map genlastchanged ls)
|
||||||
|
in MetaData $
|
||||||
|
-- Add a overall lastchanged using the oldest log
|
||||||
|
-- item (log is in ascending order).
|
||||||
|
M.insert lastChangedField (lastchangedval $ Prelude.last ls) $
|
||||||
|
M.mapKeys mkLastChangedField $
|
||||||
|
-- Only include fields that are currently set.
|
||||||
|
m `M.intersection` currentlyset
|
||||||
|
-- Makes each field have the timestamp as its value.
|
||||||
|
genlastchanged l =
|
||||||
|
let MetaData m = value l
|
||||||
|
ts = lastchangedval l
|
||||||
|
in M.map (const ts) m
|
||||||
|
lastchangedval l = S.singleton $ toMetaValue $ showts $ changed l
|
||||||
|
showts = formatTime defaultTimeLocale "%F@%H-%M-%S" . posixSecondsToUTCTime
|
||||||
|
|
||||||
{- Adds in some metadata, which can override existing values, or unset
|
{- Adds in some metadata, which can override existing values, or unset
|
||||||
- them, but otherwise leaves any existing metadata as-is. -}
|
- them, but otherwise leaves any existing metadata as-is. -}
|
||||||
|
@ -67,10 +95,12 @@ addMetaData k metadata = addMetaData' k metadata =<< liftIO getPOSIXTime
|
||||||
- will tend to be generated across the different log files, and so
|
- will tend to be generated across the different log files, and so
|
||||||
- git will be able to pack the data more efficiently. -}
|
- git will be able to pack the data more efficiently. -}
|
||||||
addMetaData' :: Key -> MetaData -> POSIXTime -> Annex ()
|
addMetaData' :: Key -> MetaData -> POSIXTime -> Annex ()
|
||||||
addMetaData' k metadata now = Annex.Branch.change (metaDataLogFile k) $
|
addMetaData' k (MetaData m) now = Annex.Branch.change (metaDataLogFile k) $
|
||||||
showLog . simplifyLog
|
showLog . simplifyLog
|
||||||
. S.insert (LogEntry now metadata)
|
. S.insert (LogEntry now metadata)
|
||||||
. parseLog
|
. parseLog
|
||||||
|
where
|
||||||
|
metadata = MetaData $ M.filterWithKey (\f _ -> not (isLastChangedField f)) m
|
||||||
|
|
||||||
{- Simplify a log, removing historical values that are no longer
|
{- Simplify a log, removing historical values that are no longer
|
||||||
- needed.
|
- needed.
|
||||||
|
@ -148,7 +178,7 @@ copyMetaData :: Key -> Key -> Annex ()
|
||||||
copyMetaData oldkey newkey
|
copyMetaData oldkey newkey
|
||||||
| oldkey == newkey = noop
|
| oldkey == newkey = noop
|
||||||
| otherwise = do
|
| otherwise = do
|
||||||
l <- getMetaData oldkey
|
l <- getMetaDataLog oldkey
|
||||||
unless (S.null l) $
|
unless (S.null l) $
|
||||||
Annex.Branch.change (metaDataLogFile newkey) $
|
Annex.Branch.change (metaDataLogFile newkey) $
|
||||||
const $ showLog l
|
const $ showLog l
|
||||||
|
|
|
@ -1,19 +1,24 @@
|
||||||
{- git-annex preferred content matcher configuration
|
{- git-annex preferred content matcher configuration
|
||||||
-
|
-
|
||||||
- Copyright 2012 Joey Hess <joey@kitenet.net>
|
- Copyright 2012-2014 Joey Hess <joey@kitenet.net>
|
||||||
-
|
-
|
||||||
- Licensed under the GNU GPL version 3 or higher.
|
- Licensed under the GNU GPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
module Logs.PreferredContent (
|
module Logs.PreferredContent (
|
||||||
preferredContentLog,
|
|
||||||
preferredContentSet,
|
preferredContentSet,
|
||||||
|
requiredContentSet,
|
||||||
|
groupPreferredContentSet,
|
||||||
isPreferredContent,
|
isPreferredContent,
|
||||||
|
isRequiredContent,
|
||||||
preferredContentMap,
|
preferredContentMap,
|
||||||
preferredContentMapLoad,
|
|
||||||
preferredContentMapRaw,
|
preferredContentMapRaw,
|
||||||
|
requiredContentMap,
|
||||||
|
requiredContentMapRaw,
|
||||||
|
groupPreferredContentMapRaw,
|
||||||
checkPreferredContentExpression,
|
checkPreferredContentExpression,
|
||||||
setStandardGroup,
|
setStandardGroup,
|
||||||
|
preferredRequiredMapsLoad,
|
||||||
) where
|
) where
|
||||||
|
|
||||||
import qualified Data.Map as M
|
import qualified Data.Map as M
|
||||||
|
@ -26,70 +31,106 @@ import qualified Annex.Branch
|
||||||
import qualified Annex
|
import qualified Annex
|
||||||
import Logs
|
import Logs
|
||||||
import Logs.UUIDBased
|
import Logs.UUIDBased
|
||||||
import qualified Utility.Matcher
|
import Utility.Matcher hiding (tokens)
|
||||||
import Annex.FileMatcher
|
import Annex.FileMatcher
|
||||||
import Annex.UUID
|
import Annex.UUID
|
||||||
import Types.Limit
|
|
||||||
import Types.Group
|
import Types.Group
|
||||||
import Types.Remote (RemoteConfig)
|
import Types.Remote (RemoteConfig)
|
||||||
import Logs.Group
|
import Logs.Group
|
||||||
import Logs.Remote
|
import Logs.Remote
|
||||||
|
import Types.FileMatcher
|
||||||
import Types.StandardGroups
|
import Types.StandardGroups
|
||||||
|
import Limit
|
||||||
|
|
||||||
{- Checks if a file is preferred content for the specified repository
|
{- Checks if a file is preferred content for the specified repository
|
||||||
- (or the current repository if none is specified). -}
|
- (or the current repository if none is specified). -}
|
||||||
isPreferredContent :: Maybe UUID -> AssumeNotPresent -> Maybe Key -> AssociatedFile -> Bool -> Annex Bool
|
isPreferredContent :: Maybe UUID -> AssumeNotPresent -> Maybe Key -> AssociatedFile -> Bool -> Annex Bool
|
||||||
isPreferredContent mu notpresent mkey afile def = do
|
isPreferredContent = checkMap preferredContentMap
|
||||||
|
|
||||||
|
isRequiredContent :: Maybe UUID -> AssumeNotPresent -> Maybe Key -> AssociatedFile -> Bool -> Annex Bool
|
||||||
|
isRequiredContent = checkMap requiredContentMap
|
||||||
|
|
||||||
|
checkMap :: Annex (FileMatcherMap Annex) -> Maybe UUID -> AssumeNotPresent -> Maybe Key -> AssociatedFile -> Bool -> Annex Bool
|
||||||
|
checkMap getmap mu notpresent mkey afile def = do
|
||||||
u <- maybe getUUID return mu
|
u <- maybe getUUID return mu
|
||||||
m <- preferredContentMap
|
m <- getmap
|
||||||
case M.lookup u m of
|
case M.lookup u m of
|
||||||
Nothing -> return def
|
Nothing -> return def
|
||||||
Just matcher -> checkMatcher matcher mkey afile notpresent def
|
Just matcher -> checkMatcher matcher mkey afile notpresent def
|
||||||
|
|
||||||
{- The map is cached for speed. -}
|
preferredContentMap :: Annex (FileMatcherMap Annex)
|
||||||
preferredContentMap :: Annex Annex.PreferredContentMap
|
preferredContentMap = maybe (fst <$> preferredRequiredMapsLoad) return
|
||||||
preferredContentMap = maybe preferredContentMapLoad return
|
|
||||||
=<< Annex.getState Annex.preferredcontentmap
|
=<< Annex.getState Annex.preferredcontentmap
|
||||||
|
|
||||||
{- Loads the map, updating the cache. -}
|
requiredContentMap :: Annex (FileMatcherMap Annex)
|
||||||
preferredContentMapLoad :: Annex Annex.PreferredContentMap
|
requiredContentMap = maybe (snd <$> preferredRequiredMapsLoad) return
|
||||||
preferredContentMapLoad = do
|
=<< Annex.getState Annex.requiredcontentmap
|
||||||
|
|
||||||
|
preferredRequiredMapsLoad :: Annex (FileMatcherMap Annex, FileMatcherMap Annex)
|
||||||
|
preferredRequiredMapsLoad = do
|
||||||
groupmap <- groupMap
|
groupmap <- groupMap
|
||||||
configmap <- readRemoteLog
|
configmap <- readRemoteLog
|
||||||
m <- simpleMap
|
let genmap l gm = simpleMap
|
||||||
. parseLogWithUUID ((Just .) . makeMatcher groupmap configmap)
|
. parseLogWithUUID ((Just .) . makeMatcher groupmap configmap gm)
|
||||||
<$> Annex.Branch.get preferredContentLog
|
<$> Annex.Branch.get l
|
||||||
Annex.changeState $ \s -> s { Annex.preferredcontentmap = Just m }
|
pc <- genmap preferredContentLog =<< groupPreferredContentMapRaw
|
||||||
return m
|
rc <- genmap requiredContentLog M.empty
|
||||||
|
-- Required content is implicitly also preferred content, so OR
|
||||||
|
let m = M.unionWith MOr pc rc
|
||||||
|
Annex.changeState $ \s -> s
|
||||||
|
{ Annex.preferredcontentmap = Just m
|
||||||
|
, Annex.requiredcontentmap = Just rc
|
||||||
|
}
|
||||||
|
return (m, rc)
|
||||||
|
|
||||||
{- This intentionally never fails, even on unparsable expressions,
|
{- This intentionally never fails, even on unparsable expressions,
|
||||||
- because the configuration is shared among repositories and newer
|
- because the configuration is shared among repositories and newer
|
||||||
- versions of git-annex may add new features. Instead, parse errors
|
- versions of git-annex may add new features. -}
|
||||||
- result in a Matcher that will always succeed. -}
|
makeMatcher
|
||||||
makeMatcher :: GroupMap -> M.Map UUID RemoteConfig -> UUID -> PreferredContentExpression -> FileMatcher
|
:: GroupMap
|
||||||
makeMatcher groupmap configmap u expr
|
-> M.Map UUID RemoteConfig
|
||||||
| expr == "standard" = standardMatcher groupmap configmap u
|
-> M.Map Group PreferredContentExpression
|
||||||
| null (lefts tokens) = Utility.Matcher.generate $ rights tokens
|
-> UUID
|
||||||
| otherwise = matchAll
|
-> PreferredContentExpression
|
||||||
|
-> FileMatcher Annex
|
||||||
|
makeMatcher groupmap configmap groupwantedmap u = go True True
|
||||||
where
|
where
|
||||||
tokens = exprParser groupmap configmap (Just u) expr
|
go expandstandard expandgroupwanted expr
|
||||||
|
| null (lefts tokens) = generate $ rights tokens
|
||||||
|
| otherwise = unknownMatcher u
|
||||||
|
where
|
||||||
|
tokens = exprParser matchstandard matchgroupwanted groupmap configmap (Just u) expr
|
||||||
|
matchstandard
|
||||||
|
| expandstandard = maybe (unknownMatcher u) (go False False)
|
||||||
|
(standardPreferredContent <$> getStandardGroup mygroups)
|
||||||
|
| otherwise = unknownMatcher u
|
||||||
|
matchgroupwanted
|
||||||
|
| expandgroupwanted = maybe (unknownMatcher u) (go True False)
|
||||||
|
(groupwanted mygroups)
|
||||||
|
| otherwise = unknownMatcher u
|
||||||
|
mygroups = fromMaybe S.empty (u `M.lookup` groupsByUUID groupmap)
|
||||||
|
groupwanted s = case M.elems $ M.filterWithKey (\k _ -> S.member k s) groupwantedmap of
|
||||||
|
[pc] -> Just pc
|
||||||
|
_ -> Nothing
|
||||||
|
|
||||||
{- Standard matchers are pre-defined for some groups. If none is defined,
|
{- When a preferred content expression cannot be parsed, but is already
|
||||||
- or a repository is in multiple groups with standard matchers, match all. -}
|
- in the log (eg, put there by a newer version of git-annex),
|
||||||
standardMatcher :: GroupMap -> M.Map UUID RemoteConfig -> UUID -> FileMatcher
|
- the fallback behavior is to match only files that are currently present.
|
||||||
standardMatcher groupmap configmap u =
|
-
|
||||||
maybe matchAll (makeMatcher groupmap configmap u . preferredContent) $
|
- This avoid unwanted/expensive changes to the content, until the problem
|
||||||
getStandardGroup =<< u `M.lookup` groupsByUUID groupmap
|
- is resolved. -}
|
||||||
|
unknownMatcher :: UUID -> FileMatcher Annex
|
||||||
|
unknownMatcher u = generate [present]
|
||||||
|
where
|
||||||
|
present = Operation $ matchPresent (Just u)
|
||||||
|
|
||||||
{- Checks if an expression can be parsed, if not returns Just error -}
|
{- Checks if an expression can be parsed, if not returns Just error -}
|
||||||
checkPreferredContentExpression :: PreferredContentExpression -> Maybe String
|
checkPreferredContentExpression :: PreferredContentExpression -> Maybe String
|
||||||
checkPreferredContentExpression expr
|
checkPreferredContentExpression expr = case parsedToMatcher tokens of
|
||||||
| expr == "standard" = Nothing
|
Left e -> Just e
|
||||||
| otherwise = case parsedToMatcher tokens of
|
Right _ -> Nothing
|
||||||
Left e -> Just e
|
|
||||||
Right _ -> Nothing
|
|
||||||
where
|
where
|
||||||
tokens = exprParser emptyGroupMap M.empty Nothing expr
|
tokens = exprParser matchAll matchAll emptyGroupMap M.empty Nothing expr
|
||||||
|
|
||||||
{- Puts a UUID in a standard group, and sets its preferred content to use
|
{- Puts a UUID in a standard group, and sets its preferred content to use
|
||||||
- the standard expression for that group, unless something is already set. -}
|
- the standard expression for that group, unless something is already set. -}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{- unparsed preferred content expressions
|
{- unparsed preferred content expressions
|
||||||
-
|
-
|
||||||
- Copyright 2012 Joey Hess <joey@kitenet.net>
|
- Copyright 2012-2014 Joey Hess <joey@kitenet.net>
|
||||||
-
|
-
|
||||||
- Licensed under the GNU GPL version 3 or higher.
|
- Licensed under the GNU GPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
@ -15,17 +15,48 @@ import qualified Annex.Branch
|
||||||
import qualified Annex
|
import qualified Annex
|
||||||
import Logs
|
import Logs
|
||||||
import Logs.UUIDBased
|
import Logs.UUIDBased
|
||||||
|
import Logs.MapLog
|
||||||
import Types.StandardGroups
|
import Types.StandardGroups
|
||||||
|
import Types.Group
|
||||||
|
|
||||||
{- Changes the preferred content configuration of a remote. -}
|
{- Changes the preferred content configuration of a remote. -}
|
||||||
preferredContentSet :: UUID -> PreferredContentExpression -> Annex ()
|
preferredContentSet :: UUID -> PreferredContentExpression -> Annex ()
|
||||||
preferredContentSet uuid@(UUID _) val = do
|
preferredContentSet = setLog preferredContentLog
|
||||||
|
|
||||||
|
requiredContentSet :: UUID -> PreferredContentExpression -> Annex ()
|
||||||
|
requiredContentSet = setLog requiredContentLog
|
||||||
|
|
||||||
|
setLog :: FilePath -> UUID -> PreferredContentExpression -> Annex ()
|
||||||
|
setLog logfile uuid@(UUID _) val = do
|
||||||
ts <- liftIO getPOSIXTime
|
ts <- liftIO getPOSIXTime
|
||||||
Annex.Branch.change preferredContentLog $
|
Annex.Branch.change logfile $
|
||||||
showLog id . changeLog ts uuid val . parseLog Just
|
showLog id
|
||||||
|
. changeLog ts uuid val
|
||||||
|
. parseLog Just
|
||||||
|
Annex.changeState $ \s -> s
|
||||||
|
{ Annex.preferredcontentmap = Nothing
|
||||||
|
, Annex.requiredcontentmap = Nothing
|
||||||
|
}
|
||||||
|
setLog _ NoUUID _ = error "unknown UUID; cannot modify"
|
||||||
|
|
||||||
|
{- Changes the preferred content configuration of a group. -}
|
||||||
|
groupPreferredContentSet :: Group -> PreferredContentExpression -> Annex ()
|
||||||
|
groupPreferredContentSet g val = do
|
||||||
|
ts <- liftIO getPOSIXTime
|
||||||
|
Annex.Branch.change groupPreferredContentLog $
|
||||||
|
showMapLog id id
|
||||||
|
. changeMapLog ts g val
|
||||||
|
. parseMapLog Just Just
|
||||||
Annex.changeState $ \s -> s { Annex.preferredcontentmap = Nothing }
|
Annex.changeState $ \s -> s { Annex.preferredcontentmap = Nothing }
|
||||||
preferredContentSet NoUUID _ = error "unknown UUID; cannot modify"
|
|
||||||
|
|
||||||
preferredContentMapRaw :: Annex (M.Map UUID PreferredContentExpression)
|
preferredContentMapRaw :: Annex (M.Map UUID PreferredContentExpression)
|
||||||
preferredContentMapRaw = simpleMap . parseLog Just
|
preferredContentMapRaw = simpleMap . parseLog Just
|
||||||
<$> Annex.Branch.get preferredContentLog
|
<$> Annex.Branch.get preferredContentLog
|
||||||
|
|
||||||
|
requiredContentMapRaw :: Annex (M.Map UUID PreferredContentExpression)
|
||||||
|
requiredContentMapRaw = simpleMap . parseLog Just
|
||||||
|
<$> Annex.Branch.get requiredContentLog
|
||||||
|
|
||||||
|
groupPreferredContentMapRaw :: Annex (M.Map Group PreferredContentExpression)
|
||||||
|
groupPreferredContentMapRaw = simpleMap . parseMapLog Just Just
|
||||||
|
<$> Annex.Branch.get groupPreferredContentLog
|
||||||
|
|
102
Logs/Transfer.hs
102
Logs/Transfer.hs
|
@ -88,108 +88,6 @@ percentComplete :: Transfer -> TransferInfo -> Maybe Percentage
|
||||||
percentComplete (Transfer { transferKey = key }) info =
|
percentComplete (Transfer { transferKey = key }) info =
|
||||||
percentage <$> keySize key <*> Just (fromMaybe 0 $ bytesComplete info)
|
percentage <$> keySize key <*> Just (fromMaybe 0 $ bytesComplete info)
|
||||||
|
|
||||||
type RetryDecider = TransferInfo -> TransferInfo -> Bool
|
|
||||||
|
|
||||||
noRetry :: RetryDecider
|
|
||||||
noRetry _ _ = False
|
|
||||||
|
|
||||||
{- Retries a transfer when it fails, as long as the failed transfer managed
|
|
||||||
- to send some data. -}
|
|
||||||
forwardRetry :: RetryDecider
|
|
||||||
forwardRetry old new = bytesComplete old < bytesComplete new
|
|
||||||
|
|
||||||
upload :: UUID -> Key -> AssociatedFile -> RetryDecider -> (MeterUpdate -> Annex Bool) -> Annex Bool
|
|
||||||
upload u key = runTransfer (Transfer Upload u key)
|
|
||||||
|
|
||||||
download :: UUID -> Key -> AssociatedFile -> RetryDecider -> (MeterUpdate -> Annex Bool) -> Annex Bool
|
|
||||||
download u key = runTransfer (Transfer Download u key)
|
|
||||||
|
|
||||||
{- Runs a transfer action. Creates and locks the lock file while the
|
|
||||||
- action is running, and stores info in the transfer information
|
|
||||||
- file.
|
|
||||||
-
|
|
||||||
- If the transfer action returns False, the transfer info is
|
|
||||||
- left in the failedTransferDir.
|
|
||||||
-
|
|
||||||
- If the transfer is already in progress, returns False.
|
|
||||||
-
|
|
||||||
- An upload can be run from a read-only filesystem, and in this case
|
|
||||||
- no transfer information or lock file is used.
|
|
||||||
-}
|
|
||||||
runTransfer :: Transfer -> Maybe FilePath -> RetryDecider -> (MeterUpdate -> Annex Bool) -> Annex Bool
|
|
||||||
runTransfer t file shouldretry a = do
|
|
||||||
info <- liftIO $ startTransferInfo file
|
|
||||||
(meter, tfile, metervar) <- mkProgressUpdater t info
|
|
||||||
mode <- annexFileMode
|
|
||||||
(fd, inprogress) <- liftIO $ prep tfile mode info
|
|
||||||
if inprogress
|
|
||||||
then do
|
|
||||||
showNote "transfer already in progress"
|
|
||||||
return False
|
|
||||||
else do
|
|
||||||
ok <- retry info metervar $
|
|
||||||
bracketIO (return fd) (cleanup tfile) (const $ a meter)
|
|
||||||
unless ok $ recordFailedTransfer t info
|
|
||||||
return ok
|
|
||||||
where
|
|
||||||
#ifndef mingw32_HOST_OS
|
|
||||||
prep tfile mode info = do
|
|
||||||
mfd <- catchMaybeIO $
|
|
||||||
openFd (transferLockFile tfile) ReadWrite (Just mode)
|
|
||||||
defaultFileFlags { trunc = True }
|
|
||||||
case mfd of
|
|
||||||
Nothing -> return (Nothing, False)
|
|
||||||
Just fd -> do
|
|
||||||
locked <- catchMaybeIO $
|
|
||||||
setLock fd (WriteLock, AbsoluteSeek, 0, 0)
|
|
||||||
if isNothing locked
|
|
||||||
then return (Nothing, True)
|
|
||||||
else do
|
|
||||||
void $ tryIO $ writeTransferInfoFile info tfile
|
|
||||||
return (mfd, False)
|
|
||||||
#else
|
|
||||||
prep tfile _mode info = do
|
|
||||||
v <- catchMaybeIO $ lockExclusive (transferLockFile tfile)
|
|
||||||
case v of
|
|
||||||
Nothing -> return (Nothing, False)
|
|
||||||
Just Nothing -> return (Nothing, True)
|
|
||||||
Just (Just lockhandle) -> do
|
|
||||||
void $ tryIO $ writeTransferInfoFile info tfile
|
|
||||||
return (Just lockhandle, False)
|
|
||||||
#endif
|
|
||||||
cleanup _ Nothing = noop
|
|
||||||
cleanup tfile (Just lockhandle) = do
|
|
||||||
void $ tryIO $ removeFile tfile
|
|
||||||
#ifndef mingw32_HOST_OS
|
|
||||||
void $ tryIO $ removeFile $ transferLockFile tfile
|
|
||||||
closeFd lockhandle
|
|
||||||
#else
|
|
||||||
{- Windows cannot delete the lockfile until the lock
|
|
||||||
- is closed. So it's possible to race with another
|
|
||||||
- process that takes the lock before it's removed,
|
|
||||||
- so ignore failure to remove.
|
|
||||||
-}
|
|
||||||
dropLock lockhandle
|
|
||||||
void $ tryIO $ removeFile $ transferLockFile tfile
|
|
||||||
#endif
|
|
||||||
retry oldinfo metervar run = do
|
|
||||||
v <- tryAnnex run
|
|
||||||
case v of
|
|
||||||
Right b -> return b
|
|
||||||
Left _ -> do
|
|
||||||
b <- getbytescomplete metervar
|
|
||||||
let newinfo = oldinfo { bytesComplete = Just b }
|
|
||||||
if shouldretry oldinfo newinfo
|
|
||||||
then retry newinfo metervar run
|
|
||||||
else return False
|
|
||||||
getbytescomplete metervar
|
|
||||||
| transferDirection t == Upload =
|
|
||||||
liftIO $ readMVar metervar
|
|
||||||
| otherwise = do
|
|
||||||
f <- fromRepo $ gitAnnexTmpObjectLocation (transferKey t)
|
|
||||||
liftIO $ catchDefaultIO 0 $
|
|
||||||
fromIntegral . fileSize <$> getFileStatus f
|
|
||||||
|
|
||||||
{- Generates a callback that can be called as transfer progresses to update
|
{- Generates a callback that can be called as transfer progresses to update
|
||||||
- the transfer info file. Also returns the file it'll be updating, and a
|
- the transfer info file. Also returns the file it'll be updating, and a
|
||||||
- MVar that can be used to read the number of bytesComplete. -}
|
- MVar that can be used to read the number of bytesComplete. -}
|
||||||
|
|
|
@ -26,9 +26,6 @@ module Logs.UUIDBased (
|
||||||
changeLog,
|
changeLog,
|
||||||
addLog,
|
addLog,
|
||||||
simpleMap,
|
simpleMap,
|
||||||
|
|
||||||
prop_TimeStamp_sane,
|
|
||||||
prop_addLog_sane,
|
|
||||||
) where
|
) where
|
||||||
|
|
||||||
import qualified Data.Map as M
|
import qualified Data.Map as M
|
||||||
|
@ -38,21 +35,11 @@ import System.Locale
|
||||||
|
|
||||||
import Common
|
import Common
|
||||||
import Types.UUID
|
import Types.UUID
|
||||||
|
import Logs.MapLog
|
||||||
|
|
||||||
data TimeStamp = Unknown | Date POSIXTime
|
type Log v = MapLog UUID v
|
||||||
deriving (Eq, Ord, Show)
|
|
||||||
|
|
||||||
data LogEntry a = LogEntry
|
showLog :: (v -> String) -> Log v -> String
|
||||||
{ changed :: TimeStamp
|
|
||||||
, value :: a
|
|
||||||
} deriving (Eq, Show)
|
|
||||||
|
|
||||||
type Log a = M.Map UUID (LogEntry a)
|
|
||||||
|
|
||||||
tskey :: String
|
|
||||||
tskey = "timestamp="
|
|
||||||
|
|
||||||
showLog :: (a -> String) -> Log a -> String
|
|
||||||
showLog shower = unlines . map showpair . M.toList
|
showLog shower = unlines . map showpair . M.toList
|
||||||
where
|
where
|
||||||
showpair (k, LogEntry (Date p) v) =
|
showpair (k, LogEntry (Date p) v) =
|
||||||
|
@ -60,14 +47,6 @@ showLog shower = unlines . map showpair . M.toList
|
||||||
showpair (k, LogEntry Unknown v) =
|
showpair (k, LogEntry Unknown v) =
|
||||||
unwords [fromUUID k, shower v]
|
unwords [fromUUID k, shower v]
|
||||||
|
|
||||||
showLogNew :: (a -> String) -> Log a -> String
|
|
||||||
showLogNew shower = unlines . map showpair . M.toList
|
|
||||||
where
|
|
||||||
showpair (k, LogEntry (Date p) v) =
|
|
||||||
unwords [show p, fromUUID k, shower v]
|
|
||||||
showpair (k, LogEntry Unknown v) =
|
|
||||||
unwords ["0", fromUUID k, shower v]
|
|
||||||
|
|
||||||
parseLog :: (String -> Maybe a) -> String -> Log a
|
parseLog :: (String -> Maybe a) -> String -> Log a
|
||||||
parseLog = parseLogWithUUID . const
|
parseLog = parseLogWithUUID . const
|
||||||
|
|
||||||
|
@ -98,45 +77,17 @@ parseLogWithUUID parser = M.fromListWith best . mapMaybe parse . lines
|
||||||
Nothing -> Unknown
|
Nothing -> Unknown
|
||||||
Just d -> Date $ utcTimeToPOSIXSeconds d
|
Just d -> Date $ utcTimeToPOSIXSeconds d
|
||||||
|
|
||||||
parseLogNew :: (String -> Maybe a) -> String -> Log a
|
showLogNew :: (v -> String) -> Log v -> String
|
||||||
parseLogNew parser = M.fromListWith best . mapMaybe parse . lines
|
showLogNew = showMapLog fromUUID
|
||||||
where
|
|
||||||
parse line = do
|
|
||||||
let (ts, rest) = splitword line
|
|
||||||
(u, v) = splitword rest
|
|
||||||
date <- Date . utcTimeToPOSIXSeconds <$> parseTime defaultTimeLocale "%s%Qs" ts
|
|
||||||
val <- parser v
|
|
||||||
Just (toUUID u, LogEntry date val)
|
|
||||||
splitword = separate (== ' ')
|
|
||||||
|
|
||||||
changeLog :: POSIXTime -> UUID -> a -> Log a -> Log a
|
parseLogNew :: (String -> Maybe v) -> String -> Log v
|
||||||
changeLog t u v = M.insert u $ LogEntry (Date t) v
|
parseLogNew = parseMapLog (Just . toUUID)
|
||||||
|
|
||||||
{- Only add an LogEntry if it's newer (or at least as new as) than any
|
changeLog :: POSIXTime -> UUID -> v -> Log v -> Log v
|
||||||
- existing LogEntry for a UUID. -}
|
changeLog = changeMapLog
|
||||||
addLog :: UUID -> LogEntry a -> Log a -> Log a
|
|
||||||
addLog = M.insertWith' best
|
|
||||||
|
|
||||||
{- Converts a Log into a simple Map without the timestamp information.
|
addLog :: UUID -> LogEntry v -> Log v -> Log v
|
||||||
- This is a one-way trip, but useful for code that never needs to change
|
addLog = addMapLog
|
||||||
- the log. -}
|
|
||||||
simpleMap :: Log a -> M.Map UUID a
|
|
||||||
simpleMap = M.map value
|
|
||||||
|
|
||||||
best :: LogEntry a -> LogEntry a -> LogEntry a
|
tskey :: String
|
||||||
best new old
|
tskey = "timestamp="
|
||||||
| changed old > changed new = old
|
|
||||||
| otherwise = new
|
|
||||||
|
|
||||||
-- Unknown is oldest.
|
|
||||||
prop_TimeStamp_sane :: Bool
|
|
||||||
prop_TimeStamp_sane = Unknown < Date 1
|
|
||||||
|
|
||||||
prop_addLog_sane :: Bool
|
|
||||||
prop_addLog_sane = newWins && newestWins
|
|
||||||
where
|
|
||||||
newWins = addLog (UUID "foo") (LogEntry (Date 1) "new") l == l2
|
|
||||||
newestWins = addLog (UUID "foo") (LogEntry (Date 1) "newest") l2 /= l2
|
|
||||||
|
|
||||||
l = M.fromList [(UUID "foo", LogEntry (Date 0) "old")]
|
|
||||||
l2 = M.fromList [(UUID "foo", LogEntry (Date 1) "new")]
|
|
||||||
|
|
|
@ -67,7 +67,7 @@ updateUnusedLog prefix m = do
|
||||||
writeUnusedLog :: FilePath -> UnusedLog -> Annex ()
|
writeUnusedLog :: FilePath -> UnusedLog -> Annex ()
|
||||||
writeUnusedLog prefix l = do
|
writeUnusedLog prefix l = do
|
||||||
logfile <- fromRepo $ gitAnnexUnusedLog prefix
|
logfile <- fromRepo $ gitAnnexUnusedLog prefix
|
||||||
liftIO $ viaTmp writeFile logfile $ unlines $ map format $ M.toList l
|
liftIO $ viaTmp writeFileAnyEncoding logfile $ unlines $ map format $ M.toList l
|
||||||
where
|
where
|
||||||
format (k, (i, Just t)) = show i ++ " " ++ key2file k ++ " " ++ show t
|
format (k, (i, Just t)) = show i ++ " " ++ key2file k ++ " " ++ show t
|
||||||
format (k, (i, Nothing)) = show i ++ " " ++ key2file k
|
format (k, (i, Nothing)) = show i ++ " " ++ key2file k
|
||||||
|
@ -77,7 +77,7 @@ readUnusedLog prefix = do
|
||||||
f <- fromRepo $ gitAnnexUnusedLog prefix
|
f <- fromRepo $ gitAnnexUnusedLog prefix
|
||||||
ifM (liftIO $ doesFileExist f)
|
ifM (liftIO $ doesFileExist f)
|
||||||
( M.fromList . mapMaybe parse . lines
|
( M.fromList . mapMaybe parse . lines
|
||||||
<$> liftIO (readFile f)
|
<$> liftIO (readFileStrictAnyEncoding f)
|
||||||
, return M.empty
|
, return M.empty
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
|
@ -99,7 +99,6 @@ dateUnusedLog prefix = do
|
||||||
f <- fromRepo $ gitAnnexUnusedLog prefix
|
f <- fromRepo $ gitAnnexUnusedLog prefix
|
||||||
liftIO $ catchMaybeIO $ getModificationTime f
|
liftIO $ catchMaybeIO $ getModificationTime f
|
||||||
#else
|
#else
|
||||||
#warning foo
|
|
||||||
-- old ghc's getModificationTime returned a ClockTime
|
-- old ghc's getModificationTime returned a ClockTime
|
||||||
dateUnusedLog _prefix = return Nothing
|
dateUnusedLog _prefix = return Nothing
|
||||||
#endif
|
#endif
|
||||||
|
|
2
Makefile
2
Makefile
|
@ -119,7 +119,7 @@ linuxstandalone-nobuild: Build/Standalone Build/LinuxMkLibs
|
||||||
strip "$(LINUXSTANDALONE_DEST)/bin/git-annex"
|
strip "$(LINUXSTANDALONE_DEST)/bin/git-annex"
|
||||||
ln -sf git-annex "$(LINUXSTANDALONE_DEST)/bin/git-annex-shell"
|
ln -sf git-annex "$(LINUXSTANDALONE_DEST)/bin/git-annex-shell"
|
||||||
zcat standalone/licences.gz > $(LINUXSTANDALONE_DEST)/LICENSE
|
zcat standalone/licences.gz > $(LINUXSTANDALONE_DEST)/LICENSE
|
||||||
cp doc/favicon.png doc/logo.svg $(LINUXSTANDALONE_DEST)
|
cp doc/logo_16x16.png doc/logo.svg $(LINUXSTANDALONE_DEST)
|
||||||
|
|
||||||
./Build/Standalone "$(LINUXSTANDALONE_DEST)"
|
./Build/Standalone "$(LINUXSTANDALONE_DEST)"
|
||||||
|
|
||||||
|
|
34
Remote.hs
34
Remote.hs
|
@ -37,6 +37,7 @@ module Remote (
|
||||||
keyPossibilities,
|
keyPossibilities,
|
||||||
keyPossibilitiesTrusted,
|
keyPossibilitiesTrusted,
|
||||||
nameToUUID,
|
nameToUUID,
|
||||||
|
nameToUUID',
|
||||||
showTriedRemotes,
|
showTriedRemotes,
|
||||||
showLocations,
|
showLocations,
|
||||||
forceTrust,
|
forceTrust,
|
||||||
|
@ -48,7 +49,6 @@ module Remote (
|
||||||
import qualified Data.Map as M
|
import qualified Data.Map as M
|
||||||
import Text.JSON
|
import Text.JSON
|
||||||
import Text.JSON.Generic
|
import Text.JSON.Generic
|
||||||
import Data.Tuple
|
|
||||||
import Data.Ord
|
import Data.Ord
|
||||||
|
|
||||||
import Common.Annex
|
import Common.Annex
|
||||||
|
@ -121,23 +121,25 @@ noRemoteUUIDMsg r = "cannot determine uuid for " ++ name r
|
||||||
- and returns its UUID. Finds even repositories that are not
|
- and returns its UUID. Finds even repositories that are not
|
||||||
- configured in .git/config. -}
|
- configured in .git/config. -}
|
||||||
nameToUUID :: RemoteName -> Annex UUID
|
nameToUUID :: RemoteName -> Annex UUID
|
||||||
nameToUUID "." = getUUID -- special case for current repo
|
nameToUUID = either error return <=< nameToUUID'
|
||||||
nameToUUID "here" = getUUID
|
|
||||||
nameToUUID "" = error "no remote specified"
|
nameToUUID' :: RemoteName -> Annex (Either String UUID)
|
||||||
nameToUUID n = byName' n >>= go
|
nameToUUID' "." = Right <$> getUUID -- special case for current repo
|
||||||
|
nameToUUID' "here" = Right <$> getUUID
|
||||||
|
nameToUUID' n = byName' n >>= go
|
||||||
where
|
where
|
||||||
go (Right r) = case uuid r of
|
go (Right r) = return $ case uuid r of
|
||||||
NoUUID -> error $ noRemoteUUIDMsg r
|
NoUUID -> Left $ noRemoteUUIDMsg r
|
||||||
u -> return u
|
u -> Right u
|
||||||
go (Left e) = fromMaybe (error e) <$> bydescription
|
go (Left e) = do
|
||||||
bydescription = do
|
|
||||||
m <- uuidMap
|
m <- uuidMap
|
||||||
case M.lookup n $ transform swap m of
|
return $ case M.keys (M.filter (== n) m) of
|
||||||
Just u -> return $ Just u
|
[u] -> Right u
|
||||||
Nothing -> return $ byuuid m
|
[] -> let u = toUUID n
|
||||||
byuuid m = M.lookup (toUUID n) $ transform double m
|
in case M.keys (M.filterWithKey (\k _ -> k == u) m) of
|
||||||
transform a = M.fromList . map a . M.toList
|
[] -> Left e
|
||||||
double (a, _) = (a, a)
|
_ -> Right u
|
||||||
|
_us -> Left "Found multiple repositories with that description"
|
||||||
|
|
||||||
{- Pretty-prints a list of UUIDs of remotes, for human display.
|
{- Pretty-prints a list of UUIDs of remotes, for human display.
|
||||||
-
|
-
|
||||||
|
|
|
@ -11,6 +11,7 @@ import Remote.External.Types
|
||||||
import qualified Annex
|
import qualified Annex
|
||||||
import Common.Annex
|
import Common.Annex
|
||||||
import Types.Remote
|
import Types.Remote
|
||||||
|
import Types.CleanupActions
|
||||||
import qualified Git
|
import qualified Git
|
||||||
import Config
|
import Config
|
||||||
import Remote.Helper.Special
|
import Remote.Helper.Special
|
||||||
|
@ -43,7 +44,7 @@ remote = RemoteType {
|
||||||
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
|
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
|
||||||
gen r u c gc = do
|
gen r u c gc = do
|
||||||
external <- newExternal externaltype u c
|
external <- newExternal externaltype u c
|
||||||
Annex.addCleanup (fromUUID u) $ stopExternal external
|
Annex.addCleanup (RemoteCleanup u) $ stopExternal external
|
||||||
cst <- getCost external r gc
|
cst <- getCost external r gc
|
||||||
avail <- getAvailability external r gc
|
avail <- getAvailability external r gc
|
||||||
return $ Just $ encryptableRemote c
|
return $ Just $ encryptableRemote c
|
||||||
|
|
|
@ -24,7 +24,7 @@ import qualified Git.Command
|
||||||
import qualified Git.GCrypt
|
import qualified Git.GCrypt
|
||||||
import qualified Annex
|
import qualified Annex
|
||||||
import Logs.Presence
|
import Logs.Presence
|
||||||
import Logs.Transfer
|
import Annex.Transfer
|
||||||
import Annex.UUID
|
import Annex.UUID
|
||||||
import Annex.Exception
|
import Annex.Exception
|
||||||
import qualified Annex.Content
|
import qualified Annex.Content
|
||||||
|
@ -36,6 +36,7 @@ import Config
|
||||||
import Config.Cost
|
import Config.Cost
|
||||||
import Annex.Init
|
import Annex.Init
|
||||||
import Types.Key
|
import Types.Key
|
||||||
|
import Types.CleanupActions
|
||||||
import qualified CmdLine.GitAnnexShell.Fields as Fields
|
import qualified CmdLine.GitAnnexShell.Fields as Fields
|
||||||
import Logs.Location
|
import Logs.Location
|
||||||
import Utility.Metered
|
import Utility.Metered
|
||||||
|
@ -320,7 +321,7 @@ copyFromRemote' r key file dest
|
||||||
case v of
|
case v of
|
||||||
Nothing -> return False
|
Nothing -> return False
|
||||||
Just (object, checksuccess) ->
|
Just (object, checksuccess) ->
|
||||||
upload u key file noRetry
|
runTransfer (Transfer Download u key) file noRetry
|
||||||
(rsyncOrCopyFile params object dest)
|
(rsyncOrCopyFile params object dest)
|
||||||
<&&> checksuccess
|
<&&> checksuccess
|
||||||
| Git.repoIsSsh (repo r) = feedprogressback $ \feeder -> do
|
| Git.repoIsSsh (repo r) = feedprogressback $ \feeder -> do
|
||||||
|
@ -417,7 +418,7 @@ copyToRemote r key file p
|
||||||
( return True
|
( return True
|
||||||
, do
|
, do
|
||||||
ensureInitialized
|
ensureInitialized
|
||||||
download u key file noRetry $ const $
|
runTransfer (Transfer Download u key) file noRetry $ const $
|
||||||
Annex.Content.saveState True `after`
|
Annex.Content.saveState True `after`
|
||||||
Annex.Content.getViaTmpChecked (liftIO checksuccessio) key
|
Annex.Content.getViaTmpChecked (liftIO checksuccessio) key
|
||||||
(\d -> rsyncOrCopyFile params object d p)
|
(\d -> rsyncOrCopyFile params object d p)
|
||||||
|
@ -510,7 +511,7 @@ rsyncOrCopyFile rsyncparams src dest p =
|
||||||
commitOnCleanup :: Remote -> Annex a -> Annex a
|
commitOnCleanup :: Remote -> Annex a -> Annex a
|
||||||
commitOnCleanup r a = go `after` a
|
commitOnCleanup r a = go `after` a
|
||||||
where
|
where
|
||||||
go = Annex.addCleanup (Git.repoLocation $ repo r) cleanup
|
go = Annex.addCleanup (RemoteCleanup $ uuid r) cleanup
|
||||||
cleanup
|
cleanup
|
||||||
| not $ Git.repoIsUrl (repo r) = onLocal r $
|
| not $ Git.repoIsUrl (repo r) = onLocal r $
|
||||||
doQuietSideAction $
|
doQuietSideAction $
|
||||||
|
|
|
@ -82,7 +82,7 @@ glacierSetup' enabling u c = do
|
||||||
unless enabling $
|
unless enabling $
|
||||||
genVault fullconfig u
|
genVault fullconfig u
|
||||||
gitConfigSpecialRemote u fullconfig "glacier" "true"
|
gitConfigSpecialRemote u fullconfig "glacier" "true"
|
||||||
return (c', u)
|
return (fullconfig, u)
|
||||||
where
|
where
|
||||||
remotename = fromJust (M.lookup "name" c)
|
remotename = fromJust (M.lookup "name" c)
|
||||||
defvault = remotename ++ "-" ++ fromUUID u
|
defvault = remotename ++ "-" ++ fromUUID u
|
||||||
|
@ -225,7 +225,8 @@ glacierParams :: RemoteConfig -> [CommandParam] -> [CommandParam]
|
||||||
glacierParams c params = datacenter:params
|
glacierParams c params = datacenter:params
|
||||||
where
|
where
|
||||||
datacenter = Param $ "--region=" ++
|
datacenter = Param $ "--region=" ++
|
||||||
fromJust (M.lookup "datacenter" c)
|
fromMaybe (error "Missing datacenter configuration")
|
||||||
|
(M.lookup "datacenter" c)
|
||||||
|
|
||||||
glacierEnv :: RemoteConfig -> UUID -> Annex (Maybe [(String, String)])
|
glacierEnv :: RemoteConfig -> UUID -> Annex (Maybe [(String, String)])
|
||||||
glacierEnv c u = go =<< getRemoteCredPairFor "glacier" c creds
|
glacierEnv c u = go =<< getRemoteCredPairFor "glacier" c creds
|
||||||
|
@ -239,7 +240,8 @@ glacierEnv c u = go =<< getRemoteCredPairFor "glacier" c creds
|
||||||
(uk, pk) = credPairEnvironment creds
|
(uk, pk) = credPairEnvironment creds
|
||||||
|
|
||||||
getVault :: RemoteConfig -> Vault
|
getVault :: RemoteConfig -> Vault
|
||||||
getVault = fromJust . M.lookup "vault"
|
getVault = fromMaybe (error "Missing vault configuration")
|
||||||
|
. M.lookup "vault"
|
||||||
|
|
||||||
archive :: Remote -> Key -> Archive
|
archive :: Remote -> Key -> Archive
|
||||||
archive r k = fileprefix ++ key2file k
|
archive r k = fileprefix ++ key2file k
|
||||||
|
|
|
@ -13,6 +13,7 @@ import qualified Data.Map as M
|
||||||
|
|
||||||
import Common.Annex
|
import Common.Annex
|
||||||
import Types.Remote
|
import Types.Remote
|
||||||
|
import Types.CleanupActions
|
||||||
import qualified Annex
|
import qualified Annex
|
||||||
import Annex.LockPool
|
import Annex.LockPool
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
|
@ -74,7 +75,7 @@ runHooks r starthook stophook a = do
|
||||||
-- So, requiring idempotency is the right approach.
|
-- So, requiring idempotency is the right approach.
|
||||||
run starthook
|
run starthook
|
||||||
|
|
||||||
Annex.addCleanup (remoteid ++ "-stop-command") $ runstop lck
|
Annex.addCleanup (StopHook $ uuid r) $ runstop lck
|
||||||
runstop lck = do
|
runstop lck = do
|
||||||
-- Drop any shared lock we have, and take an
|
-- Drop any shared lock we have, and take an
|
||||||
-- exclusive lock, without blocking. If the lock
|
-- exclusive lock, without blocking. If the lock
|
||||||
|
|
|
@ -28,6 +28,7 @@ import Annex.UUID
|
||||||
import Annex.Ssh
|
import Annex.Ssh
|
||||||
import Remote.Helper.Special
|
import Remote.Helper.Special
|
||||||
import Remote.Helper.Encryptable
|
import Remote.Helper.Encryptable
|
||||||
|
import Remote.Rsync.RsyncUrl
|
||||||
import Crypto
|
import Crypto
|
||||||
import Utility.Rsync
|
import Utility.Rsync
|
||||||
import Utility.CopyFile
|
import Utility.CopyFile
|
||||||
|
@ -40,16 +41,6 @@ import Types.Creds
|
||||||
import qualified Data.ByteString.Lazy as L
|
import qualified Data.ByteString.Lazy as L
|
||||||
import qualified Data.Map as M
|
import qualified Data.Map as M
|
||||||
|
|
||||||
type RsyncUrl = String
|
|
||||||
|
|
||||||
data RsyncOpts = RsyncOpts
|
|
||||||
{ rsyncUrl :: RsyncUrl
|
|
||||||
, rsyncOptions :: [CommandParam]
|
|
||||||
, rsyncUploadOptions :: [CommandParam]
|
|
||||||
, rsyncDownloadOptions :: [CommandParam]
|
|
||||||
, rsyncShellEscape :: Bool
|
|
||||||
}
|
|
||||||
|
|
||||||
remote :: RemoteType
|
remote :: RemoteType
|
||||||
remote = RemoteType {
|
remote = RemoteType {
|
||||||
typename = "rsync",
|
typename = "rsync",
|
||||||
|
@ -148,17 +139,6 @@ rsyncSetup mu _ c = do
|
||||||
gitConfigSpecialRemote u c' "rsyncurl" url
|
gitConfigSpecialRemote u c' "rsyncurl" url
|
||||||
return (c', u)
|
return (c', u)
|
||||||
|
|
||||||
rsyncEscape :: RsyncOpts -> String -> String
|
|
||||||
rsyncEscape o s
|
|
||||||
| rsyncShellEscape o && rsyncUrlIsShell (rsyncUrl o) = shellEscape s
|
|
||||||
| otherwise = s
|
|
||||||
|
|
||||||
rsyncUrls :: RsyncOpts -> Key -> [String]
|
|
||||||
rsyncUrls o k = map use annexHashes
|
|
||||||
where
|
|
||||||
use h = rsyncUrl o </> h k </> rsyncEscape o (f </> f)
|
|
||||||
f = keyFile k
|
|
||||||
|
|
||||||
store :: RsyncOpts -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
|
store :: RsyncOpts -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
|
||||||
store o k _f p = sendAnnex k (void $ remove o k) $ rsyncSend o p k False
|
store o k _f p = sendAnnex k (void $ remove o k) $ rsyncSend o p k False
|
||||||
|
|
||||||
|
|
46
Remote/Rsync/RsyncUrl.hs
Normal file
46
Remote/Rsync/RsyncUrl.hs
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
{- Rsync urls.
|
||||||
|
-
|
||||||
|
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||||
|
-
|
||||||
|
- Licensed under the GNU GPL version 3 or higher.
|
||||||
|
-}
|
||||||
|
|
||||||
|
{-# LANGUAGE CPP #-}
|
||||||
|
|
||||||
|
module Remote.Rsync.RsyncUrl where
|
||||||
|
|
||||||
|
import Types
|
||||||
|
import Locations
|
||||||
|
import Utility.Rsync
|
||||||
|
import Utility.SafeCommand
|
||||||
|
|
||||||
|
import System.FilePath.Posix
|
||||||
|
#ifdef mingw32_HOST_OS
|
||||||
|
import Data.String.Utils
|
||||||
|
#endif
|
||||||
|
|
||||||
|
type RsyncUrl = String
|
||||||
|
|
||||||
|
data RsyncOpts = RsyncOpts
|
||||||
|
{ rsyncUrl :: RsyncUrl
|
||||||
|
, rsyncOptions :: [CommandParam]
|
||||||
|
, rsyncUploadOptions :: [CommandParam]
|
||||||
|
, rsyncDownloadOptions :: [CommandParam]
|
||||||
|
, rsyncShellEscape :: Bool
|
||||||
|
}
|
||||||
|
|
||||||
|
rsyncEscape :: RsyncOpts -> RsyncUrl -> RsyncUrl
|
||||||
|
rsyncEscape o u
|
||||||
|
| rsyncShellEscape o && rsyncUrlIsShell (rsyncUrl o) = shellEscape u
|
||||||
|
| otherwise = u
|
||||||
|
|
||||||
|
rsyncUrls :: RsyncOpts -> Key -> [RsyncUrl]
|
||||||
|
rsyncUrls o k = map use annexHashes
|
||||||
|
where
|
||||||
|
use h = rsyncUrl o </> hash h </> rsyncEscape o (f </> f)
|
||||||
|
f = keyFile k
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
|
hash h = h k
|
||||||
|
#else
|
||||||
|
hash h = replace "\\" "/" (h k)
|
||||||
|
#endif
|
|
@ -216,7 +216,7 @@ readTahoe hdl command params = withTahoeConfigDir hdl $ \configdir ->
|
||||||
|
|
||||||
tahoeParams :: TahoeConfigDir -> String -> [CommandParam] -> [CommandParam]
|
tahoeParams :: TahoeConfigDir -> String -> [CommandParam] -> [CommandParam]
|
||||||
tahoeParams configdir command params =
|
tahoeParams configdir command params =
|
||||||
Param command : Param "-d" : File configdir : params
|
Param "-d" : File configdir : Param command : params
|
||||||
|
|
||||||
storeCapability :: UUID -> Key -> Capability -> Annex ()
|
storeCapability :: UUID -> Key -> Capability -> Annex ()
|
||||||
storeCapability u k cap = setRemoteState u k cap
|
storeCapability u k cap = setRemoteState u k cap
|
||||||
|
|
27
Test.hs
27
Test.hs
|
@ -17,12 +17,14 @@ import Test.Tasty.Ingredients.Rerun
|
||||||
import Data.Monoid
|
import Data.Monoid
|
||||||
|
|
||||||
import Options.Applicative hiding (command)
|
import Options.Applicative hiding (command)
|
||||||
|
#if MIN_VERSION_optparse_applicative(0,8,0)
|
||||||
|
import qualified Options.Applicative.Types as Opt
|
||||||
|
#endif
|
||||||
import Control.Exception.Extensible
|
import Control.Exception.Extensible
|
||||||
import qualified Data.Map as M
|
import qualified Data.Map as M
|
||||||
import System.IO.HVFS (SystemFS(..))
|
import System.IO.HVFS (SystemFS(..))
|
||||||
import qualified Text.JSON
|
import qualified Text.JSON
|
||||||
import System.Path
|
import System.Path
|
||||||
import qualified Data.ByteString.Lazy as L
|
|
||||||
|
|
||||||
import Common
|
import Common
|
||||||
|
|
||||||
|
@ -43,7 +45,7 @@ import qualified Types.Backend
|
||||||
import qualified Types.TrustLevel
|
import qualified Types.TrustLevel
|
||||||
import qualified Types
|
import qualified Types
|
||||||
import qualified Logs
|
import qualified Logs
|
||||||
import qualified Logs.UUIDBased
|
import qualified Logs.MapLog
|
||||||
import qualified Logs.Trust
|
import qualified Logs.Trust
|
||||||
import qualified Logs.Remote
|
import qualified Logs.Remote
|
||||||
import qualified Logs.Unused
|
import qualified Logs.Unused
|
||||||
|
@ -104,8 +106,7 @@ main ps = do
|
||||||
-- parameters is "test".
|
-- parameters is "test".
|
||||||
let pinfo = info (helper <*> suiteOptionParser ingredients tests)
|
let pinfo = info (helper <*> suiteOptionParser ingredients tests)
|
||||||
( fullDesc <> header "Builtin test suite" )
|
( fullDesc <> header "Builtin test suite" )
|
||||||
opts <- either (\f -> error =<< errMessage f "git-annex test") return $
|
opts <- parseOpts (prefs idm) pinfo ps
|
||||||
execParserPure (prefs idm) pinfo ps
|
|
||||||
case tryIngredients ingredients opts tests of
|
case tryIngredients ingredients opts tests of
|
||||||
Nothing -> error "No tests found!?"
|
Nothing -> error "No tests found!?"
|
||||||
Just act -> ifM act
|
Just act -> ifM act
|
||||||
|
@ -115,6 +116,18 @@ main ps = do
|
||||||
putStrLn " with utilities, such as git, installed on this system.)"
|
putStrLn " with utilities, such as git, installed on this system.)"
|
||||||
exitFailure
|
exitFailure
|
||||||
)
|
)
|
||||||
|
where
|
||||||
|
progdesc = "git-annex test"
|
||||||
|
parseOpts pprefs pinfo args =
|
||||||
|
#if MIN_VERSION_optparse_applicative(0,8,0)
|
||||||
|
pure $ case execParserPure pprefs pinfo args of
|
||||||
|
Opt.Success v -> v
|
||||||
|
Opt.Failure f -> error $ fst $ Opt.execFailure f progdesc
|
||||||
|
Opt.CompletionInvoked _ -> error "completion not supported"
|
||||||
|
#else
|
||||||
|
either (error <=< flip errMessage progdesc) return $
|
||||||
|
execParserPure pprefs pinfo args
|
||||||
|
#endif
|
||||||
|
|
||||||
ingredients :: [Ingredient]
|
ingredients :: [Ingredient]
|
||||||
ingredients =
|
ingredients =
|
||||||
|
@ -140,8 +153,8 @@ properties = localOption (QuickCheckTests 1000) $ testGroup "QuickCheck"
|
||||||
, testProperty "prop_cost_sane" Config.Cost.prop_cost_sane
|
, testProperty "prop_cost_sane" Config.Cost.prop_cost_sane
|
||||||
, testProperty "prop_matcher_sane" Utility.Matcher.prop_matcher_sane
|
, testProperty "prop_matcher_sane" Utility.Matcher.prop_matcher_sane
|
||||||
, testProperty "prop_HmacSha1WithCipher_sane" Crypto.prop_HmacSha1WithCipher_sane
|
, testProperty "prop_HmacSha1WithCipher_sane" Crypto.prop_HmacSha1WithCipher_sane
|
||||||
, testProperty "prop_TimeStamp_sane" Logs.UUIDBased.prop_TimeStamp_sane
|
, testProperty "prop_TimeStamp_sane" Logs.MapLog.prop_TimeStamp_sane
|
||||||
, testProperty "prop_addLog_sane" Logs.UUIDBased.prop_addLog_sane
|
, testProperty "prop_addMapLog_sane" Logs.MapLog.prop_addMapLog_sane
|
||||||
, testProperty "prop_verifiable_sane" Utility.Verifiable.prop_verifiable_sane
|
, testProperty "prop_verifiable_sane" Utility.Verifiable.prop_verifiable_sane
|
||||||
, testProperty "prop_segment_regressionTest" Utility.Misc.prop_segment_regressionTest
|
, testProperty "prop_segment_regressionTest" Utility.Misc.prop_segment_regressionTest
|
||||||
, testProperty "prop_read_write_transferinfo" Logs.Transfer.prop_read_write_transferinfo
|
, testProperty "prop_read_write_transferinfo" Logs.Transfer.prop_read_write_transferinfo
|
||||||
|
@ -1272,7 +1285,7 @@ test_add_subdirs env = intmpclonerepo env $ do
|
||||||
{- Regression test for Windows bug where symlinks were not
|
{- Regression test for Windows bug where symlinks were not
|
||||||
- calculated correctly for files in subdirs. -}
|
- calculated correctly for files in subdirs. -}
|
||||||
git_annex env "sync" [] @? "sync failed"
|
git_annex env "sync" [] @? "sync failed"
|
||||||
l <- annexeval $ encodeW8 . L.unpack <$> Annex.CatFile.catObject (Git.Types.Ref "HEAD:dir/foo")
|
l <- annexeval $ decodeBS <$> Annex.CatFile.catObject (Git.Types.Ref "HEAD:dir/foo")
|
||||||
"../.git/annex/" `isPrefixOf` l @? ("symlink from subdir to .git/annex is wrong: " ++ l)
|
"../.git/annex/" `isPrefixOf` l @? ("symlink from subdir to .git/annex is wrong: " ++ l)
|
||||||
|
|
||||||
createDirectory "dir2"
|
createDirectory "dir2"
|
||||||
|
|
17
Types/CleanupActions.hs
Normal file
17
Types/CleanupActions.hs
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
{- Enumeration of cleanup actions
|
||||||
|
-
|
||||||
|
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||||
|
-
|
||||||
|
- Licensed under the GNU GPL version 3 or higher.
|
||||||
|
-}
|
||||||
|
|
||||||
|
module Types.CleanupActions where
|
||||||
|
|
||||||
|
import Types.UUID
|
||||||
|
|
||||||
|
data CleanupAction
|
||||||
|
= RemoteCleanup UUID
|
||||||
|
| StopHook UUID
|
||||||
|
| FsckCleanup
|
||||||
|
| SshCachingCleanup
|
||||||
|
deriving (Eq, Ord)
|
27
Types/DesktopNotify.hs
Normal file
27
Types/DesktopNotify.hs
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
{- git-annex DesktopNotify type
|
||||||
|
-
|
||||||
|
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||||
|
-
|
||||||
|
- Licensed under the GNU GPL version 3 or higher.
|
||||||
|
-}
|
||||||
|
|
||||||
|
module Types.DesktopNotify where
|
||||||
|
|
||||||
|
import Data.Monoid
|
||||||
|
|
||||||
|
data DesktopNotify = DesktopNotify
|
||||||
|
{ notifyStart :: Bool
|
||||||
|
, notifyFinish :: Bool
|
||||||
|
}
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
instance Monoid DesktopNotify where
|
||||||
|
mempty = DesktopNotify False False
|
||||||
|
mappend (DesktopNotify s1 f1) (DesktopNotify s2 f2) =
|
||||||
|
DesktopNotify (s1 || s2) (f1 || f2)
|
||||||
|
|
||||||
|
mkNotifyStart :: DesktopNotify
|
||||||
|
mkNotifyStart = DesktopNotify True False
|
||||||
|
|
||||||
|
mkNotifyFinish :: DesktopNotify
|
||||||
|
mkNotifyFinish = DesktopNotify False True
|
|
@ -7,7 +7,12 @@
|
||||||
|
|
||||||
module Types.FileMatcher where
|
module Types.FileMatcher where
|
||||||
|
|
||||||
|
import Types.UUID (UUID)
|
||||||
import Types.Key (Key)
|
import Types.Key (Key)
|
||||||
|
import Utility.Matcher (Matcher, Token)
|
||||||
|
|
||||||
|
import qualified Data.Map as M
|
||||||
|
import qualified Data.Set as S
|
||||||
|
|
||||||
data MatchInfo
|
data MatchInfo
|
||||||
= MatchingFile FileInfo
|
= MatchingFile FileInfo
|
||||||
|
@ -17,3 +22,19 @@ data FileInfo = FileInfo
|
||||||
{ relFile :: FilePath -- may be relative to cwd
|
{ relFile :: FilePath -- may be relative to cwd
|
||||||
, matchFile :: FilePath -- filepath to match on; may be relative to top
|
, matchFile :: FilePath -- filepath to match on; may be relative to top
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type FileMatcherMap a = M.Map UUID (Utility.Matcher.Matcher (S.Set UUID -> MatchInfo -> a Bool))
|
||||||
|
|
||||||
|
type MkLimit a = String -> Either String (MatchFiles a)
|
||||||
|
|
||||||
|
type AssumeNotPresent = S.Set UUID
|
||||||
|
|
||||||
|
type MatchFiles a = AssumeNotPresent -> MatchInfo -> a Bool
|
||||||
|
|
||||||
|
type FileMatcher a = Matcher (MatchFiles a)
|
||||||
|
|
||||||
|
-- This is a matcher that can have tokens added to it while it's being
|
||||||
|
-- built, and once complete is compiled to an unchangable matcher.
|
||||||
|
data ExpandableMatcher a
|
||||||
|
= BuildingMatcher [Token (MatchInfo -> a Bool)]
|
||||||
|
| CompleteMatcher (Matcher (MatchInfo -> a Bool))
|
||||||
|
|
|
@ -1,20 +0,0 @@
|
||||||
{- types for limits
|
|
||||||
-
|
|
||||||
- Copyright 2013 Joey Hess <joey@kitenet.net>
|
|
||||||
-
|
|
||||||
- Licensed under the GNU GPL version 3 or higher.
|
|
||||||
-}
|
|
||||||
|
|
||||||
{-# LANGUAGE CPP #-}
|
|
||||||
|
|
||||||
module Types.Limit where
|
|
||||||
|
|
||||||
import Common.Annex
|
|
||||||
import Types.FileMatcher
|
|
||||||
|
|
||||||
import qualified Data.Set as S
|
|
||||||
|
|
||||||
type MkLimit = String -> Either String MatchFiles
|
|
||||||
|
|
||||||
type AssumeNotPresent = S.Set UUID
|
|
||||||
type MatchFiles = AssumeNotPresent -> MatchInfo -> Annex Bool
|
|
|
@ -28,6 +28,7 @@ module Types.MetaData (
|
||||||
emptyMetaData,
|
emptyMetaData,
|
||||||
updateMetaData,
|
updateMetaData,
|
||||||
unionMetaData,
|
unionMetaData,
|
||||||
|
combineMetaData,
|
||||||
differenceMetaData,
|
differenceMetaData,
|
||||||
isSet,
|
isSet,
|
||||||
currentMetaData,
|
currentMetaData,
|
||||||
|
@ -140,7 +141,7 @@ toMetaField f
|
||||||
- that would break views.
|
- that would break views.
|
||||||
-
|
-
|
||||||
- So, require they have an alphanumeric first letter, with the remainder
|
- So, require they have an alphanumeric first letter, with the remainder
|
||||||
- being either alphanumeric or a small set of shitelisted common punctuation.
|
- being either alphanumeric or a small set of whitelisted common punctuation.
|
||||||
-}
|
-}
|
||||||
legalField :: String -> Bool
|
legalField :: String -> Bool
|
||||||
legalField [] = False
|
legalField [] = False
|
||||||
|
@ -188,6 +189,9 @@ unionMetaData :: MetaData -> MetaData -> MetaData
|
||||||
unionMetaData (MetaData old) (MetaData new) = MetaData $
|
unionMetaData (MetaData old) (MetaData new) = MetaData $
|
||||||
M.unionWith S.union new old
|
M.unionWith S.union new old
|
||||||
|
|
||||||
|
combineMetaData :: [MetaData] -> MetaData
|
||||||
|
combineMetaData = foldl' unionMetaData emptyMetaData
|
||||||
|
|
||||||
differenceMetaData :: MetaData -> MetaData -> MetaData
|
differenceMetaData :: MetaData -> MetaData -> MetaData
|
||||||
differenceMetaData (MetaData m) (MetaData excludem) = MetaData $
|
differenceMetaData (MetaData m) (MetaData excludem) = MetaData $
|
||||||
M.differenceWith diff m excludem
|
M.differenceWith diff m excludem
|
||||||
|
@ -260,7 +264,9 @@ parseMetaData p = (,)
|
||||||
instance Arbitrary MetaData where
|
instance Arbitrary MetaData where
|
||||||
arbitrary = do
|
arbitrary = do
|
||||||
size <- arbitrarySizedBoundedIntegral `suchThat` (< 500)
|
size <- arbitrarySizedBoundedIntegral `suchThat` (< 500)
|
||||||
MetaData . M.fromList <$> vector size
|
MetaData . M.filterWithKey legal . M.fromList <$> vector size
|
||||||
|
where
|
||||||
|
legal k _v = legalField $ fromMetaField k
|
||||||
|
|
||||||
instance Arbitrary MetaValue where
|
instance Arbitrary MetaValue where
|
||||||
arbitrary = MetaValue <$> arbitrary <*> arbitrary
|
arbitrary = MetaValue <$> arbitrary <*> arbitrary
|
||||||
|
|
|
@ -8,6 +8,7 @@
|
||||||
module Types.StandardGroups where
|
module Types.StandardGroups where
|
||||||
|
|
||||||
import Types.Remote (RemoteConfig)
|
import Types.Remote (RemoteConfig)
|
||||||
|
import Types.Group
|
||||||
|
|
||||||
import qualified Data.Map as M
|
import qualified Data.Map as M
|
||||||
import Data.Maybe
|
import Data.Maybe
|
||||||
|
@ -27,7 +28,7 @@ data StandardGroup
|
||||||
| UnwantedGroup
|
| UnwantedGroup
|
||||||
deriving (Eq, Ord, Enum, Bounded, Show)
|
deriving (Eq, Ord, Enum, Bounded, Show)
|
||||||
|
|
||||||
fromStandardGroup :: StandardGroup -> String
|
fromStandardGroup :: StandardGroup -> Group
|
||||||
fromStandardGroup ClientGroup = "client"
|
fromStandardGroup ClientGroup = "client"
|
||||||
fromStandardGroup TransferGroup = "transfer"
|
fromStandardGroup TransferGroup = "transfer"
|
||||||
fromStandardGroup BackupGroup = "backup"
|
fromStandardGroup BackupGroup = "backup"
|
||||||
|
@ -39,7 +40,7 @@ fromStandardGroup ManualGroup = "manual"
|
||||||
fromStandardGroup PublicGroup = "public"
|
fromStandardGroup PublicGroup = "public"
|
||||||
fromStandardGroup UnwantedGroup = "unwanted"
|
fromStandardGroup UnwantedGroup = "unwanted"
|
||||||
|
|
||||||
toStandardGroup :: String -> Maybe StandardGroup
|
toStandardGroup :: Group -> Maybe StandardGroup
|
||||||
toStandardGroup "client" = Just ClientGroup
|
toStandardGroup "client" = Just ClientGroup
|
||||||
toStandardGroup "transfer" = Just TransferGroup
|
toStandardGroup "transfer" = Just TransferGroup
|
||||||
toStandardGroup "backup" = Just BackupGroup
|
toStandardGroup "backup" = Just BackupGroup
|
||||||
|
@ -77,21 +78,21 @@ specialRemoteOnly PublicGroup = True
|
||||||
specialRemoteOnly _ = False
|
specialRemoteOnly _ = False
|
||||||
|
|
||||||
{- See doc/preferred_content.mdwn for explanations of these expressions. -}
|
{- See doc/preferred_content.mdwn for explanations of these expressions. -}
|
||||||
preferredContent :: StandardGroup -> PreferredContentExpression
|
standardPreferredContent :: StandardGroup -> PreferredContentExpression
|
||||||
preferredContent ClientGroup = lastResort $
|
standardPreferredContent ClientGroup = lastResort $
|
||||||
"((exclude=*/archive/* and exclude=archive/*) or (" ++ notArchived ++ ")) and not unused"
|
"((exclude=*/archive/* and exclude=archive/*) or (" ++ notArchived ++ ")) and not unused"
|
||||||
preferredContent TransferGroup = lastResort $
|
standardPreferredContent TransferGroup = lastResort $
|
||||||
"not (inallgroup=client and copies=client:2) and (" ++ preferredContent ClientGroup ++ ")"
|
"not (inallgroup=client and copies=client:2) and (" ++ standardPreferredContent ClientGroup ++ ")"
|
||||||
preferredContent BackupGroup = "include=* or unused"
|
standardPreferredContent BackupGroup = "include=* or unused"
|
||||||
preferredContent IncrementalBackupGroup = lastResort
|
standardPreferredContent IncrementalBackupGroup = lastResort
|
||||||
"(include=* or unused) and (not copies=incrementalbackup:1)"
|
"(include=* or unused) and (not copies=incrementalbackup:1)"
|
||||||
preferredContent SmallArchiveGroup = lastResort $
|
standardPreferredContent SmallArchiveGroup = lastResort $
|
||||||
"(include=*/archive/* or include=archive/*) and (" ++ preferredContent FullArchiveGroup ++ ")"
|
"(include=*/archive/* or include=archive/*) and (" ++ standardPreferredContent FullArchiveGroup ++ ")"
|
||||||
preferredContent FullArchiveGroup = lastResort notArchived
|
standardPreferredContent FullArchiveGroup = lastResort notArchived
|
||||||
preferredContent SourceGroup = "not (copies=1)"
|
standardPreferredContent SourceGroup = "not (copies=1)"
|
||||||
preferredContent ManualGroup = "present and (" ++ preferredContent ClientGroup ++ ")"
|
standardPreferredContent ManualGroup = "present and (" ++ standardPreferredContent ClientGroup ++ ")"
|
||||||
preferredContent PublicGroup = "inpreferreddir"
|
standardPreferredContent PublicGroup = "inpreferreddir"
|
||||||
preferredContent UnwantedGroup = "exclude=*"
|
standardPreferredContent UnwantedGroup = "exclude=*"
|
||||||
|
|
||||||
notArchived :: String
|
notArchived :: String
|
||||||
notArchived = "not (copies=archive:1 or copies=smallarchive:1)"
|
notArchived = "not (copies=archive:1 or copies=smallarchive:1)"
|
||||||
|
|
|
@ -9,15 +9,18 @@
|
||||||
|
|
||||||
module Utility.FileMode where
|
module Utility.FileMode where
|
||||||
|
|
||||||
import Common
|
import System.IO
|
||||||
|
import Control.Monad
|
||||||
import Control.Exception (bracket)
|
import Control.Exception (bracket)
|
||||||
import System.PosixCompat.Types
|
import System.PosixCompat.Types
|
||||||
|
import Utility.PosixFiles
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
import System.Posix.Files
|
import System.Posix.Files
|
||||||
#endif
|
#endif
|
||||||
import Foreign (complement)
|
import Foreign (complement)
|
||||||
|
|
||||||
|
import Utility.Exception
|
||||||
|
|
||||||
{- Applies a conversion function to a file's mode. -}
|
{- Applies a conversion function to a file's mode. -}
|
||||||
modifyFileMode :: FilePath -> (FileMode -> FileMode) -> IO ()
|
modifyFileMode :: FilePath -> (FileMode -> FileMode) -> IO ()
|
||||||
modifyFileMode f convert = void $ modifyFileMode' f convert
|
modifyFileMode f convert = void $ modifyFileMode' f convert
|
||||||
|
@ -56,6 +59,12 @@ readModes = [ownerReadMode, groupReadMode, otherReadMode]
|
||||||
executeModes :: [FileMode]
|
executeModes :: [FileMode]
|
||||||
executeModes = [ownerExecuteMode, groupExecuteMode, otherExecuteMode]
|
executeModes = [ownerExecuteMode, groupExecuteMode, otherExecuteMode]
|
||||||
|
|
||||||
|
otherGroupModes :: [FileMode]
|
||||||
|
otherGroupModes =
|
||||||
|
[ groupReadMode, otherReadMode
|
||||||
|
, groupWriteMode, otherWriteMode
|
||||||
|
]
|
||||||
|
|
||||||
{- Removes the write bits from a file. -}
|
{- Removes the write bits from a file. -}
|
||||||
preventWrite :: FilePath -> IO ()
|
preventWrite :: FilePath -> IO ()
|
||||||
preventWrite f = modifyFileMode f $ removeModes writeModes
|
preventWrite f = modifyFileMode f $ removeModes writeModes
|
||||||
|
@ -99,13 +108,20 @@ noUmask :: FileMode -> IO a -> IO a
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
noUmask mode a
|
noUmask mode a
|
||||||
| mode == stdFileMode = a
|
| mode == stdFileMode = a
|
||||||
| otherwise = bracket setup cleanup go
|
| otherwise = withUmask nullFileMode a
|
||||||
|
#else
|
||||||
|
noUmask _ a = a
|
||||||
|
#endif
|
||||||
|
|
||||||
|
withUmask :: FileMode -> IO a -> IO a
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
|
withUmask umask a = bracket setup cleanup go
|
||||||
where
|
where
|
||||||
setup = setFileCreationMask nullFileMode
|
setup = setFileCreationMask umask
|
||||||
cleanup = setFileCreationMask
|
cleanup = setFileCreationMask
|
||||||
go _ = a
|
go _ = a
|
||||||
#else
|
#else
|
||||||
noUmask _ a = a
|
withUmask _ a = a
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
combineModes :: [FileMode] -> FileMode
|
combineModes :: [FileMode] -> FileMode
|
||||||
|
@ -127,14 +143,16 @@ setSticky f = modifyFileMode f $ addModes [stickyMode]
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
{- Writes a file, ensuring that its modes do not allow it to be read
|
{- Writes a file, ensuring that its modes do not allow it to be read
|
||||||
- by anyone other than the current user, before any content is written.
|
- or written by anyone other than the current user,
|
||||||
|
- before any content is written.
|
||||||
|
-
|
||||||
|
- When possible, this is done using the umask.
|
||||||
-
|
-
|
||||||
- On a filesystem that does not support file permissions, this is the same
|
- On a filesystem that does not support file permissions, this is the same
|
||||||
- as writeFile.
|
- as writeFile.
|
||||||
-}
|
-}
|
||||||
writeFileProtected :: FilePath -> String -> IO ()
|
writeFileProtected :: FilePath -> String -> IO ()
|
||||||
writeFileProtected file content = withFile file WriteMode $ \h -> do
|
writeFileProtected file content = withUmask 0o0077 $
|
||||||
void $ tryIO $
|
withFile file WriteMode $ \h -> do
|
||||||
modifyFileMode file $
|
void $ tryIO $ modifyFileMode file $ removeModes otherGroupModes
|
||||||
removeModes [groupReadMode, otherReadMode]
|
hPutStr h content
|
||||||
hPutStr h content
|
|
||||||
|
|
|
@ -1,14 +1,17 @@
|
||||||
{- GHC File system encoding handling.
|
{- GHC File system encoding handling.
|
||||||
-
|
-
|
||||||
- Copyright 2012-2013 Joey Hess <joey@kitenet.net>
|
- Copyright 2012-2014 Joey Hess <joey@kitenet.net>
|
||||||
-
|
-
|
||||||
- Licensed under the GNU GPL version 3 or higher.
|
- Licensed under the GNU GPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
|
{-# LANGUAGE CPP #-}
|
||||||
|
|
||||||
module Utility.FileSystemEncoding (
|
module Utility.FileSystemEncoding (
|
||||||
fileEncoding,
|
fileEncoding,
|
||||||
withFilePath,
|
withFilePath,
|
||||||
md5FilePath,
|
md5FilePath,
|
||||||
|
decodeBS,
|
||||||
decodeW8,
|
decodeW8,
|
||||||
encodeW8,
|
encodeW8,
|
||||||
truncateFilePath,
|
truncateFilePath,
|
||||||
|
@ -22,13 +25,24 @@ import System.IO.Unsafe
|
||||||
import qualified Data.Hash.MD5 as MD5
|
import qualified Data.Hash.MD5 as MD5
|
||||||
import Data.Word
|
import Data.Word
|
||||||
import Data.Bits.Utils
|
import Data.Bits.Utils
|
||||||
|
import qualified Data.ByteString.Lazy as L
|
||||||
|
#ifdef mingw32_HOST_OS
|
||||||
|
import qualified Data.ByteString.Lazy.UTF8 as L8
|
||||||
|
#endif
|
||||||
|
|
||||||
{- Sets a Handle to use the filesystem encoding. This causes data
|
{- Sets a Handle to use the filesystem encoding. This causes data
|
||||||
- written or read from it to be encoded/decoded the same
|
- written or read from it to be encoded/decoded the same
|
||||||
- as ghc 7.4 does to filenames etc. This special encoding
|
- as ghc 7.4 does to filenames etc. This special encoding
|
||||||
- allows "arbitrary undecodable bytes to be round-tripped through it". -}
|
- allows "arbitrary undecodable bytes to be round-tripped through it".
|
||||||
|
-}
|
||||||
fileEncoding :: Handle -> IO ()
|
fileEncoding :: Handle -> IO ()
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
fileEncoding h = hSetEncoding h =<< Encoding.getFileSystemEncoding
|
fileEncoding h = hSetEncoding h =<< Encoding.getFileSystemEncoding
|
||||||
|
#else
|
||||||
|
{- The file system encoding does not work well on Windows,
|
||||||
|
- and Windows only has utf FilePaths anyway. -}
|
||||||
|
fileEncoding h = hSetEncoding h Encoding.utf8
|
||||||
|
#endif
|
||||||
|
|
||||||
{- Marshal a Haskell FilePath into a NUL terminated C string using temporary
|
{- Marshal a Haskell FilePath into a NUL terminated C string using temporary
|
||||||
- storage. The FilePath is encoded using the filesystem encoding,
|
- storage. The FilePath is encoded using the filesystem encoding,
|
||||||
|
@ -60,6 +74,16 @@ _encodeFilePath fp = unsafePerformIO $ do
|
||||||
md5FilePath :: FilePath -> MD5.Str
|
md5FilePath :: FilePath -> MD5.Str
|
||||||
md5FilePath = MD5.Str . _encodeFilePath
|
md5FilePath = MD5.Str . _encodeFilePath
|
||||||
|
|
||||||
|
{- Decodes a ByteString into a FilePath, applying the filesystem encoding. -}
|
||||||
|
decodeBS :: L.ByteString -> FilePath
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
|
decodeBS = encodeW8 . L.unpack
|
||||||
|
#else
|
||||||
|
{- On Windows, we assume that the ByteString is utf-8, since Windows
|
||||||
|
- only uses unicode for filenames. -}
|
||||||
|
decodeBS = L8.toString
|
||||||
|
#endif
|
||||||
|
|
||||||
{- Converts a [Word8] to a FilePath, encoding using the filesystem encoding.
|
{- Converts a [Word8] to a FilePath, encoding using the filesystem encoding.
|
||||||
-
|
-
|
||||||
- w82c produces a String, which may contain Chars that are invalid
|
- w82c produces a String, which may contain Chars that are invalid
|
||||||
|
@ -84,6 +108,7 @@ decodeW8 = s2w8 . _encodeFilePath
|
||||||
- cost of efficiency when running on a large FilePath.
|
- cost of efficiency when running on a large FilePath.
|
||||||
-}
|
-}
|
||||||
truncateFilePath :: Int -> FilePath -> FilePath
|
truncateFilePath :: Int -> FilePath -> FilePath
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
truncateFilePath n = go . reverse
|
truncateFilePath n = go . reverse
|
||||||
where
|
where
|
||||||
go f =
|
go f =
|
||||||
|
@ -91,3 +116,17 @@ truncateFilePath n = go . reverse
|
||||||
in if length bytes <= n
|
in if length bytes <= n
|
||||||
then reverse f
|
then reverse f
|
||||||
else go (drop 1 f)
|
else go (drop 1 f)
|
||||||
|
#else
|
||||||
|
{- On Windows, count the number of bytes used by each utf8 character. -}
|
||||||
|
truncateFilePath n = reverse . go [] n . L8.fromString
|
||||||
|
where
|
||||||
|
go coll cnt bs
|
||||||
|
| cnt <= 0 = coll
|
||||||
|
| otherwise = case L8.decode bs of
|
||||||
|
Just (c, x) | c /= L8.replacement_char ->
|
||||||
|
let x' = fromIntegral x
|
||||||
|
in if cnt - x' < 0
|
||||||
|
then coll
|
||||||
|
else go (c:coll) (cnt - x') (L8.drop 1 bs)
|
||||||
|
_ -> coll
|
||||||
|
#endif
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
module Utility.Matcher (
|
module Utility.Matcher (
|
||||||
Token(..),
|
Token(..),
|
||||||
Matcher,
|
Matcher(..),
|
||||||
token,
|
token,
|
||||||
tokens,
|
tokens,
|
||||||
generate,
|
generate,
|
||||||
|
|
|
@ -28,10 +28,10 @@ instance (Arbitrary v, Eq v, Ord v) => Arbitrary (S.Set v) where
|
||||||
|
|
||||||
{- Times before the epoch are excluded. -}
|
{- Times before the epoch are excluded. -}
|
||||||
instance Arbitrary POSIXTime where
|
instance Arbitrary POSIXTime where
|
||||||
arbitrary = nonNegative arbitrarySizedIntegral
|
arbitrary = fromInteger <$> nonNegative arbitrarySizedIntegral
|
||||||
|
|
||||||
instance Arbitrary EpochTime where
|
instance Arbitrary EpochTime where
|
||||||
arbitrary = nonNegative arbitrarySizedIntegral
|
arbitrary = fromInteger <$> nonNegative arbitrarySizedIntegral
|
||||||
|
|
||||||
{- Pids are never negative, or 0. -}
|
{- Pids are never negative, or 0. -}
|
||||||
instance Arbitrary ProcessID where
|
instance Arbitrary ProcessID where
|
||||||
|
|
|
@ -10,10 +10,13 @@
|
||||||
|
|
||||||
module Utility.ThreadScheduler where
|
module Utility.ThreadScheduler where
|
||||||
|
|
||||||
import Common
|
import Control.Monad
|
||||||
|
|
||||||
import Control.Concurrent
|
import Control.Concurrent
|
||||||
#ifndef mingw32_HOST_OS
|
#ifndef mingw32_HOST_OS
|
||||||
|
import Control.Monad.IfElse
|
||||||
|
import System.Posix.IO
|
||||||
|
#endif
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
import System.Posix.Signals
|
import System.Posix.Signals
|
||||||
#ifndef __ANDROID__
|
#ifndef __ANDROID__
|
||||||
import System.Posix.Terminal
|
import System.Posix.Terminal
|
||||||
|
|
|
@ -77,7 +77,8 @@ exists url uo = case parseURIRelaxed url of
|
||||||
Nothing -> dne
|
Nothing -> dne
|
||||||
| otherwise -> if Build.SysConfig.curl
|
| otherwise -> if Build.SysConfig.curl
|
||||||
then do
|
then do
|
||||||
output <- readProcess "curl" $ toCommand curlparams
|
output <- catchDefaultIO "" $
|
||||||
|
readProcess "curl" $ toCommand curlparams
|
||||||
case lastMaybe (lines output) of
|
case lastMaybe (lines output) of
|
||||||
Just ('2':_:_) -> return (True, extractsize output)
|
Just ('2':_:_) -> return (True, extractsize output)
|
||||||
_ -> dne
|
_ -> dne
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{- Yesod webapp
|
{- Yesod webapp
|
||||||
-
|
-
|
||||||
- Copyright 2012 Joey Hess <joey@kitenet.net>
|
- Copyright 2012-2014 Joey Hess <joey@kitenet.net>
|
||||||
-
|
-
|
||||||
- Licensed under the GNU GPL version 3 or higher.
|
- Licensed under the GNU GPL version 3 or higher.
|
||||||
-}
|
-}
|
||||||
|
@ -33,9 +33,12 @@ import qualified Data.Text as T
|
||||||
import qualified Data.Text.Encoding as TE
|
import qualified Data.Text.Encoding as TE
|
||||||
import Blaze.ByteString.Builder.Char.Utf8 (fromText)
|
import Blaze.ByteString.Builder.Char.Utf8 (fromText)
|
||||||
import Blaze.ByteString.Builder (Builder)
|
import Blaze.ByteString.Builder (Builder)
|
||||||
import Data.Monoid
|
|
||||||
import Control.Arrow ((***))
|
import Control.Arrow ((***))
|
||||||
import Control.Concurrent
|
import Control.Concurrent
|
||||||
|
#ifdef WITH_WEBAPP_SECURE
|
||||||
|
import Data.SecureMem
|
||||||
|
import Data.Byteable
|
||||||
|
#endif
|
||||||
#ifdef __ANDROID__
|
#ifdef __ANDROID__
|
||||||
import Data.Endian
|
import Data.Endian
|
||||||
#endif
|
#endif
|
||||||
|
@ -74,14 +77,14 @@ browserProc url = proc "xdg-open" [url]
|
||||||
runWebApp :: Maybe TLSSettings -> Maybe HostName -> Wai.Application -> (SockAddr -> IO ()) -> IO ()
|
runWebApp :: Maybe TLSSettings -> Maybe HostName -> Wai.Application -> (SockAddr -> IO ()) -> IO ()
|
||||||
runWebApp tlssettings h app observer = withSocketsDo $ do
|
runWebApp tlssettings h app observer = withSocketsDo $ do
|
||||||
sock <- getSocket h
|
sock <- getSocket h
|
||||||
void $ forkIO $ run webAppSettings sock app
|
void $ forkIO $ go webAppSettings sock app
|
||||||
sockaddr <- fixSockAddr <$> getSocketName sock
|
sockaddr <- fixSockAddr <$> getSocketName sock
|
||||||
observer sockaddr
|
observer sockaddr
|
||||||
where
|
where
|
||||||
#ifdef WITH_WEBAPP_HTTPS
|
#ifdef WITH_WEBAPP_SECURE
|
||||||
run = (maybe runSettingsSocket (\ts -> runTLSSocket ts) tlssettings)
|
go = (maybe runSettingsSocket (\ts -> runTLSSocket ts) tlssettings)
|
||||||
#else
|
#else
|
||||||
run = runSettingsSocket
|
go = runSettingsSocket
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
fixSockAddr :: SockAddr -> SockAddr
|
fixSockAddr :: SockAddr -> SockAddr
|
||||||
|
@ -208,15 +211,35 @@ webAppSessionBackend _ = do
|
||||||
#endif
|
#endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
{- Generates a random sha512 string, suitable to be used for an
|
#ifdef WITH_WEBAPP_SECURE
|
||||||
- authentication secret. -}
|
type AuthToken = SecureMem
|
||||||
genRandomToken :: IO String
|
#else
|
||||||
genRandomToken = do
|
type AuthToken = T.Text
|
||||||
|
#endif
|
||||||
|
|
||||||
|
toAuthToken :: T.Text -> AuthToken
|
||||||
|
#ifdef WITH_WEBAPP_SECURE
|
||||||
|
toAuthToken = secureMemFromByteString . TE.encodeUtf8
|
||||||
|
#else
|
||||||
|
toAuthToken = id
|
||||||
|
#endif
|
||||||
|
|
||||||
|
fromAuthToken :: AuthToken -> T.Text
|
||||||
|
#ifdef WITH_WEBAPP_SECURE
|
||||||
|
fromAuthToken = TE.decodeLatin1 . toBytes
|
||||||
|
#else
|
||||||
|
fromAuthToken = id
|
||||||
|
#endif
|
||||||
|
|
||||||
|
{- Generates a random sha512 string, encapsulated in a SecureMem,
|
||||||
|
- suitable to be used for an authentication secret. -}
|
||||||
|
genAuthToken :: IO AuthToken
|
||||||
|
genAuthToken = do
|
||||||
g <- newGenIO :: IO SystemRandom
|
g <- newGenIO :: IO SystemRandom
|
||||||
return $
|
return $
|
||||||
case genBytes 512 g of
|
case genBytes 512 g of
|
||||||
Left e -> error $ "failed to generate secret token: " ++ show e
|
Left e -> error $ "failed to generate auth token: " ++ show e
|
||||||
Right (s, _) -> show $ sha512 $ L.fromChunks [s]
|
Right (s, _) -> toAuthToken $ T.pack $ show $ sha512 $ L.fromChunks [s]
|
||||||
|
|
||||||
{- A Yesod isAuthorized method, which checks the auth cgi parameter
|
{- A Yesod isAuthorized method, which checks the auth cgi parameter
|
||||||
- against a token extracted from the Yesod application.
|
- against a token extracted from the Yesod application.
|
||||||
|
@ -225,15 +248,15 @@ genRandomToken = do
|
||||||
- possibly leaking the auth token in urls on that page!
|
- possibly leaking the auth token in urls on that page!
|
||||||
-}
|
-}
|
||||||
#if MIN_VERSION_yesod(1,2,0)
|
#if MIN_VERSION_yesod(1,2,0)
|
||||||
checkAuthToken :: (Monad m, Yesod.MonadHandler m) => (Yesod.HandlerSite m -> T.Text) -> m Yesod.AuthResult
|
checkAuthToken :: (Monad m, Yesod.MonadHandler m) => (Yesod.HandlerSite m -> AuthToken) -> m Yesod.AuthResult
|
||||||
#else
|
#else
|
||||||
checkAuthToken :: forall t sub. (t -> T.Text) -> Yesod.GHandler sub t Yesod.AuthResult
|
checkAuthToken :: forall t sub. (t -> AuthToken) -> Yesod.GHandler sub t Yesod.AuthResult
|
||||||
#endif
|
#endif
|
||||||
checkAuthToken extractToken = do
|
checkAuthToken extractAuthToken = do
|
||||||
webapp <- Yesod.getYesod
|
webapp <- Yesod.getYesod
|
||||||
req <- Yesod.getRequest
|
req <- Yesod.getRequest
|
||||||
let params = Yesod.reqGetParams req
|
let params = Yesod.reqGetParams req
|
||||||
if lookup "auth" params == Just (extractToken webapp)
|
if (toAuthToken <$> lookup "auth" params) == Just (extractAuthToken webapp)
|
||||||
then return Yesod.Authorized
|
then return Yesod.Authorized
|
||||||
else Yesod.sendResponseStatus unauthorized401 ()
|
else Yesod.sendResponseStatus unauthorized401 ()
|
||||||
|
|
||||||
|
@ -243,21 +266,21 @@ checkAuthToken extractToken = do
|
||||||
-
|
-
|
||||||
- A typical predicate would exclude files under /static.
|
- A typical predicate would exclude files under /static.
|
||||||
-}
|
-}
|
||||||
insertAuthToken :: forall y. (y -> T.Text)
|
insertAuthToken :: forall y. (y -> AuthToken)
|
||||||
-> ([T.Text] -> Bool)
|
-> ([T.Text] -> Bool)
|
||||||
-> y
|
-> y
|
||||||
-> T.Text
|
-> T.Text
|
||||||
-> [T.Text]
|
-> [T.Text]
|
||||||
-> [(T.Text, T.Text)]
|
-> [(T.Text, T.Text)]
|
||||||
-> Builder
|
-> Builder
|
||||||
insertAuthToken extractToken predicate webapp root pathbits params =
|
insertAuthToken extractAuthToken predicate webapp root pathbits params =
|
||||||
fromText root `mappend` encodePath pathbits' encodedparams
|
fromText root `mappend` encodePath pathbits' encodedparams
|
||||||
where
|
where
|
||||||
pathbits' = if null pathbits then [T.empty] else pathbits
|
pathbits' = if null pathbits then [T.empty] else pathbits
|
||||||
encodedparams = map (TE.encodeUtf8 *** go) params'
|
encodedparams = map (TE.encodeUtf8 *** go) params'
|
||||||
go "" = Nothing
|
go "" = Nothing
|
||||||
go x = Just $ TE.encodeUtf8 x
|
go x = Just $ TE.encodeUtf8 x
|
||||||
authparam = (T.pack "auth", extractToken webapp)
|
authparam = (T.pack "auth", fromAuthToken (extractAuthToken webapp))
|
||||||
params'
|
params'
|
||||||
| predicate pathbits = authparam:params
|
| predicate pathbits = authparam:params
|
||||||
| otherwise = params
|
| otherwise = params
|
||||||
|
|
71
debian/changelog
vendored
71
debian/changelog
vendored
|
@ -1,14 +1,79 @@
|
||||||
git-annex (5.20140307) UNRELEASED; urgency=medium
|
git-annex (5.20140402) unstable; urgency=medium
|
||||||
|
|
||||||
|
* unannex, uninit: Avoid committing after every file is unannexed,
|
||||||
|
for massive speedup.
|
||||||
|
* --notify-finish switch will cause desktop notifications after each
|
||||||
|
file upload/download/drop completes
|
||||||
|
(using the dbus Desktop Notifications Specification)
|
||||||
|
* --notify-start switch will show desktop notifications when each
|
||||||
|
file upload/download starts.
|
||||||
|
* webapp: Automatically install Nautilus integration scripts
|
||||||
|
to get and drop files.
|
||||||
|
* tahoe: Pass -d parameter before subcommand; putting it after
|
||||||
|
the subcommand no longer works with tahoe-lafs version 1.10.
|
||||||
|
(Thanks, Alberto Berti)
|
||||||
|
* forget --drop-dead: Avoid removing the dead remote from the trust.log,
|
||||||
|
so that if git remotes for it still exist anywhere, git annex info
|
||||||
|
will still know it's dead and not show it.
|
||||||
|
* git-annex-shell: Make configlist automatically initialize
|
||||||
|
a remote git repository, as long as a git-annex branch has
|
||||||
|
been pushed to it, to simplify setup of remote git repositories,
|
||||||
|
including via gitolite.
|
||||||
|
* add --include-dotfiles: New option, perhaps useful for backups.
|
||||||
|
* Version 5.20140227 broke creation of glacier repositories,
|
||||||
|
not including the datacenter and vault in their configuration.
|
||||||
|
This bug is fixed, but glacier repositories set up with the broken
|
||||||
|
version of git-annex need to have the datacenter and vault set
|
||||||
|
in order to be usable. This can be done using git annex enableremote
|
||||||
|
to add the missing settings. For details, see
|
||||||
|
http://git-annex.branchable.com/bugs/problems_with_glacier/
|
||||||
|
* Added required content configuration.
|
||||||
|
* assistant: Improve ssh authorized keys line generated in local pairing
|
||||||
|
or for a remote ssh server to set environment variables in an
|
||||||
|
alternative way that works with the non-POSIX fish shell, as well
|
||||||
|
as POSIX shells.
|
||||||
|
|
||||||
|
-- Joey Hess <joeyh@debian.org> Wed, 02 Apr 2014 16:42:53 -0400
|
||||||
|
|
||||||
|
git-annex (5.20140320) unstable; urgency=medium
|
||||||
|
|
||||||
* Fix zombie leak and general inneficiency when copying files to a
|
* Fix zombie leak and general inneficiency when copying files to a
|
||||||
local git repo.
|
local git repo.
|
||||||
|
* Fix ssh connection caching stop method to work with openssh 6.5p1,
|
||||||
|
which broke the old method.
|
||||||
* webapp: Added a "Sync now" item to each repository's menu.
|
* webapp: Added a "Sync now" item to each repository's menu.
|
||||||
|
* webapp: Use securemem for constant time auth token comparisons.
|
||||||
|
* copy --fast --to remote: Avoid printing anything for files that
|
||||||
|
are already believed to be present on the remote.
|
||||||
|
* Commands that allow specifying which repository to act on using
|
||||||
|
the repository's description will now fail when multiple repositories
|
||||||
|
match, rather than picking a repository at random.
|
||||||
|
(So will --in=)
|
||||||
|
* Better workaround for problem umasks when eg, setting up ssh keys.
|
||||||
|
* "standard" can now be used as a first-class keyword in preferred content
|
||||||
|
expressions. For example "standard or (include=otherdir/*)"
|
||||||
|
* groupwanted can be used in preferred content expressions.
|
||||||
|
* vicfg: Allows editing preferred content expressions for groups.
|
||||||
|
* Improve behavior when unable to parse a preferred content expression
|
||||||
|
(thanks, ion).
|
||||||
|
* metadata: Add --get
|
||||||
|
* metadata: Support --key option (and some other ones like --all)
|
||||||
|
* For each metadata field, there's now an automatically maintained
|
||||||
|
"$field-lastchanged" that gives the date of the last change to that
|
||||||
|
field. Also the "lastchanged" field for the date of the last change
|
||||||
|
to any of a file's metadata.
|
||||||
* unused: In direct mode, files that are deleted from the work tree
|
* unused: In direct mode, files that are deleted from the work tree
|
||||||
are no longer incorrectly detected as unused.
|
and so have no content present are no longer incorrectly detected as
|
||||||
|
unused.
|
||||||
|
* Avoid encoding errors when using the unused log file.
|
||||||
|
* map: Fix crash when one of the remotes of a repo is a local directory
|
||||||
|
that does not exist, or is not a git repo.
|
||||||
* repair: Improve memory usage when git fsck finds a great many broken
|
* repair: Improve memory usage when git fsck finds a great many broken
|
||||||
objects.
|
objects.
|
||||||
|
* Windows: Fix some filename encoding bugs.
|
||||||
|
* rsync special remote: Fix slashes when used on Windows.
|
||||||
|
|
||||||
-- Joey Hess <joeyh@debian.org> Thu, 06 Mar 2014 16:17:01 -0400
|
-- Joey Hess <joeyh@debian.org> Thu, 20 Mar 2014 13:21:12 -0400
|
||||||
|
|
||||||
git-annex (5.20140306) unstable; urgency=high
|
git-annex (5.20140306) unstable; urgency=high
|
||||||
|
|
||||||
|
|
3
debian/control
vendored
3
debian/control
vendored
|
@ -30,6 +30,7 @@ Build-Depends:
|
||||||
libghc-hinotify-dev [linux-any],
|
libghc-hinotify-dev [linux-any],
|
||||||
libghc-stm-dev (>= 2.3),
|
libghc-stm-dev (>= 2.3),
|
||||||
libghc-dbus-dev (>= 0.10.3) [linux-any],
|
libghc-dbus-dev (>= 0.10.3) [linux-any],
|
||||||
|
libghc-fdo-notify-dev (>= 0.3) [linux-any],
|
||||||
libghc-yesod-dev [i386 amd64 kfreebsd-i386 kfreebsd-amd64 powerpc sparc],
|
libghc-yesod-dev [i386 amd64 kfreebsd-i386 kfreebsd-amd64 powerpc sparc],
|
||||||
libghc-yesod-static-dev [i386 amd64 kfreebsd-i386 kfreebsd-amd64 powerpc sparc],
|
libghc-yesod-static-dev [i386 amd64 kfreebsd-i386 kfreebsd-amd64 powerpc sparc],
|
||||||
libghc-yesod-default-dev [i386 amd64 kfreebsd-amd64 powerpc sparc],
|
libghc-yesod-default-dev [i386 amd64 kfreebsd-amd64 powerpc sparc],
|
||||||
|
@ -39,6 +40,8 @@ Build-Depends:
|
||||||
libghc-warp-tls-dev [i386 amd64 kfreebsd-i386 kfreebsd-amd64 powerpc sparc],
|
libghc-warp-tls-dev [i386 amd64 kfreebsd-i386 kfreebsd-amd64 powerpc sparc],
|
||||||
libghc-wai-dev [i386 amd64 kfreebsd-i386 kfreebsd-amd64 powerpc sparc],
|
libghc-wai-dev [i386 amd64 kfreebsd-i386 kfreebsd-amd64 powerpc sparc],
|
||||||
libghc-wai-logger-dev [i386 amd64 kfreebsd-i386 kfreebsd-amd64 powerpc sparc],
|
libghc-wai-logger-dev [i386 amd64 kfreebsd-i386 kfreebsd-amd64 powerpc sparc],
|
||||||
|
libghc-securemem-dev,
|
||||||
|
libghc-byteable-dev,
|
||||||
libghc-dns-dev,
|
libghc-dns-dev,
|
||||||
libghc-case-insensitive-dev,
|
libghc-case-insensitive-dev,
|
||||||
libghc-http-types-dev,
|
libghc-http-types-dev,
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
[[!comment format=txt
|
[[!comment format=mdwn
|
||||||
username="http://yarikoptic.myopenid.com/"
|
username="http://yarikoptic.myopenid.com/"
|
||||||
nickname="site-myopenid"
|
nickname="site-myopenid"
|
||||||
subject="Does it require the device to be rooted?"
|
subject="Does it require the device to be rooted?"
|
||||||
|
|
BIN
doc/assistant/downloadnotification.png
Normal file
BIN
doc/assistant/downloadnotification.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 4.4 KiB |
BIN
doc/assistant/nautilusmenu.png
Normal file
BIN
doc/assistant/nautilusmenu.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 58 KiB |
|
@ -0,0 +1,14 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="severo"
|
||||||
|
ip="88.182.182.135"
|
||||||
|
subject="git-assistant and transfer repository"
|
||||||
|
date="2014-03-16T17:05:43Z"
|
||||||
|
content="""
|
||||||
|
In your comment http://git-annex.branchable.com/assistant/remote_sharing_walkthrough/#comment-f97efe1d05c0101232684b4e4edc4866, you describe a way to synchronize two devices using an intermediate USB drive configured as a \"transfer repository\".
|
||||||
|
|
||||||
|
I understand that in that case, the USB drive can only be used as a \"transmitter\", in a git repository form, not as a copy of the files structure. This means the files contained by the USB drive cannot be accessed without git/git-annnex.
|
||||||
|
|
||||||
|
Is there a way to use the USB drive as a \"client repository\" in order to allow synchronization, as described earlier, but also as a simple copy of the files, in order to access them from any device (opening them with windows in a cyber coffee for example).
|
||||||
|
|
||||||
|
Thanks
|
||||||
|
"""]]
|
|
@ -0,0 +1,8 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="http://joeyh.name/"
|
||||||
|
ip="209.250.56.154"
|
||||||
|
subject="comment 7"
|
||||||
|
date="2014-03-17T19:50:48Z"
|
||||||
|
content="""
|
||||||
|
@severo the web app does not support setting up that use case. However, you can make a non-bare clone of your repository onto a removable drive, and if you do the assistant will use it just the same as if you'd set up a removable drive using the webapp. Note that you will need to run `git annex sync` inside that repository in order to update the tree it displays.
|
||||||
|
"""]]
|
|
@ -0,0 +1,8 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="severo"
|
||||||
|
ip="95.152.107.168"
|
||||||
|
subject="comment 8"
|
||||||
|
date="2014-03-18T10:06:50Z"
|
||||||
|
content="""
|
||||||
|
Thansk @joeyh.name for your answer. Do you think this feature could be integrated into the git-annex assistant ?
|
||||||
|
"""]]
|
|
@ -0,0 +1,8 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="severo"
|
||||||
|
ip="95.152.107.168"
|
||||||
|
subject="comment 9"
|
||||||
|
date="2014-03-18T11:16:19Z"
|
||||||
|
content="""
|
||||||
|
Some explanations in French on how to do: http://seenthis.net/messages/237648#message238202
|
||||||
|
"""]]
|
|
@ -1,4 +1,4 @@
|
||||||
[[!comment format=txt
|
[[!comment format=mdwn
|
||||||
username="https://www.google.com/accounts/o8/id?id=AItOawnJTqmRu1YCKS2Hsm4vtOflLhP4fU-k98w"
|
username="https://www.google.com/accounts/o8/id?id=AItOawnJTqmRu1YCKS2Hsm4vtOflLhP4fU-k98w"
|
||||||
nickname="Ahmed"
|
nickname="Ahmed"
|
||||||
subject="Customise conflict resolution behaviour"
|
subject="Customise conflict resolution behaviour"
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="https://www.google.com/accounts/o8/id?id=AItOawn3p4i4lk_zMilvjnJ9sS6g2nerpgz0Fjc"
|
||||||
|
nickname="Matthias"
|
||||||
|
subject="Use automatic merge without syncing"
|
||||||
|
date="2014-03-20T10:03:41Z"
|
||||||
|
content="""
|
||||||
|
Is there a possibility to use the automatic merge logic without using \"git annex sync\"? I don't want to have the \"synced\"-branches, but the auto-conflict-resolution is very nice.
|
||||||
|
"""]]
|
|
@ -0,0 +1,8 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="http://joeyh.name/"
|
||||||
|
ip="209.250.56.102"
|
||||||
|
subject="comment 3"
|
||||||
|
date="2014-03-20T16:10:10Z"
|
||||||
|
content="""
|
||||||
|
@Matthias `git annex merge` will do what you want, as long as you have git-annex 4.20130709 or newer.
|
||||||
|
"""]]
|
|
@ -0,0 +1,17 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="https://www.google.com/accounts/o8/id?id=AItOawn3p4i4lk_zMilvjnJ9sS6g2nerpgz0Fjc"
|
||||||
|
nickname="Matthias"
|
||||||
|
subject="merge for master branch?"
|
||||||
|
date="2014-03-23T23:02:23Z"
|
||||||
|
content="""
|
||||||
|
As far as I observed, \"git annex merge\" only merges the \"git-annex\" branch. My wish is to have the conflict resolution from \"git annex sync\" in the \"master\" branch, but no automatic commit, such that the user can verify and possibly correct the merge. The proposed merge could go to the index. Consider the following scenario:
|
||||||
|
|
||||||
|
1. We have repo A, B, and CENTRAL
|
||||||
|
2. All three start with a root commit in \"master\" branch
|
||||||
|
3. Then A commits a file \"test.txt\" with content \"a\" and syncs with CENTRAL
|
||||||
|
4. Meanwhile, B commits \"test.txt\" with content \"b\"
|
||||||
|
5. When B tries to sync with CENTRAL, the proposed conflict resolution having two files \"test.txt-variantXXXX\" and \"test.txt-variantYYYY\" should be staged in the index, but not committed yet.
|
||||||
|
6. B can now commit a custom merge, e.g. with file content \"ab\".
|
||||||
|
|
||||||
|
The point is that I really like the conflict resolution, but still want to force the user to check the result.
|
||||||
|
"""]]
|
|
@ -0,0 +1,10 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="http://joeyh.name/"
|
||||||
|
ip="209.250.56.41"
|
||||||
|
subject="comment 5"
|
||||||
|
date="2014-03-26T18:56:30Z"
|
||||||
|
content="""
|
||||||
|
@Matthias you need to install git-annex 4.20130709 or newer. Then `git-annex merge` will do what you want. As I said before.
|
||||||
|
|
||||||
|
As for committing the merge, you can always adjust the result after the fact and use `git commit --amend`.
|
||||||
|
"""]]
|
|
@ -12,5 +12,5 @@ In the log, there are many "too many open files" errors like these :
|
||||||
|
|
||||||
git:createProcess: runInteractiveProcess: pipe: resource exhausted (Too many open files)
|
git:createProcess: runInteractiveProcess: pipe: resource exhausted (Too many open files)
|
||||||
|
|
||||||
[[!moreinfo]]
|
[[!tag moreinfo]]
|
||||||
[[!meta title="too many open files on android"]]
|
[[!meta title="too many open files on android"]]
|
||||||
|
|
|
@ -0,0 +1,520 @@
|
||||||
|
I have a git annex assistant process using 1.2 gigabytes of RAM and a git cat-file --batch child consuming CPU time constantly. I am running 5.20140320 on Ubuntu 12.04.
|
||||||
|
|
||||||
|
[[!format sh """
|
||||||
|
PID USER PR NI VIRT RES SHR S %CPU %MEM TIME+ COMMAND
|
||||||
|
11775 ion 20 0 1350m 1.2g 12m S 48 62.4 425:56.85 git-annex
|
||||||
|
11787 ion 20 0 9856 1484 1232 R 54 0.1 366:16.14 git
|
||||||
|
"""]]
|
||||||
|
|
||||||
|
The assistant UI looks perfectly normal and does not indicate it is doing anything. daemon.log is empty and the assistant process seems to be logging into a rotated and deleted log file.
|
||||||
|
|
||||||
|
[[!format sh """
|
||||||
|
COMMAND PID USER FD TYPE DEVICE SIZE/OFF NODE NAME
|
||||||
|
git-annex 11775 ion 1w REG 9,127 80841 55181369 /storage/ion/media/video/.git/annex/daemon.log.10 (deleted)
|
||||||
|
git-annex 11775 ion 2w REG 9,127 80841 55181369 /storage/ion/media/video/.git/annex/daemon.log.10 (deleted)
|
||||||
|
"""]]
|
||||||
|
|
||||||
|
strace -s10000 -e trace=read,write -p 11787 indicates that the assistant is having the cat-file process cat same objects over and over again.
|
||||||
|
|
||||||
|
[[!format sh """
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||||
|
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
read(3, "", 214) = 0
|
||||||
|
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||||
|
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||||
|
write(1, "\n", 1) = 1
|
||||||
|
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||||
|
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||||
|
"""]]
|
|
@ -0,0 +1,10 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="http://joeyh.name/"
|
||||||
|
ip="209.250.56.244"
|
||||||
|
subject="comment 1"
|
||||||
|
date="2014-04-02T18:48:51Z"
|
||||||
|
content="""
|
||||||
|
All I can tell from the strace is that it's looking at location logs, and it's looking at the same few keys, but not a single on in a tight loop.
|
||||||
|
|
||||||
|
It would probably help a lot to run the assistant with --debug and get a debug log while this is going on. We need to pinpoint the part of the assistant that is affected, and there may be other activity too.
|
||||||
|
"""]]
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue