tagging package git-annex version 5.20140412
-----BEGIN PGP SIGNATURE----- Version: GnuPG v1 iQIVAwUAU0hs2MkQ2SIlEuPHAQIQmA//boUwVbPk1fDaoj8wgnC0oXOTtNCKt3ja 1fEq28ZoyRgrdWIlpBUtxsswz6HEJxqoUzDvvFw1WrDjqi8e6xCiMPSP7TRCc9Ez j2q7LtHzZbqsR6kldvedLEZp+5VsHpNMC45od3K9BtLkOB7eerLzacFb4v4Z+Emj QOp1PkTIYFvjmO0tn/h67u7J/1EaWg1+6lOqdK81iq1/tEu3wjEYxanjbnbqQvWQ KXPZrB8P4znXy24Ow4IHd79EtI9B/Jhdx5u6/7f6gYjhSkj2vQ2J9OKkEhdFaaTL BEdb4DulBJDATURODORJEzpkWOaojE9gph4+RP767dWCatApfo6f+P2Qpm0lmTwR H21B7RfzUKgHnd3Igbc08cNvW+ymereJcbrCS0WqV26THujLCmSR7pzldLeqUn4K BzzXUIHwx7bPlRSDug/E20ejfbnm3/JcLFcaUy4w/UmsHtHDh7Hg1iWhMVr7sbSX CgLST3l3yYPKXGNou5+P0NSKRIOpkhU+oePgArms+HY9M1qjF8dzOh4rYbQmo81K CkFHY49pZ6SqjOccdzMAhEleUgqLVBoNfpspD+By2DAr5q8ADGP0CfXi+khwdXer l3OHQL0XLQ2PT9Owie9qmnSrddDahGwyDdis7PdnBUVkqU3aWRhlkNYtkgDdEhLm VBswoF1Pej4= =V7mr -----END PGP SIGNATURE----- Merge tag '5.20140412' into debian-wheezy-backport tagging package git-annex version 5.20140412 # gpg: Signature made Fri Apr 11 18:29:44 2014 JEST using RSA key ID 2512E3C7 # gpg: Good signature from "Joey Hess <joeyh@debian.org>" # gpg: aka "Joey Hess <joey@kitenet.net>" # gpg: aka "Joey Hess <id@joeyh.name>" # gpg: WARNING: This key is not certified with a trusted signature! # gpg: There is no indication that the signature belongs to the owner. # Primary key fingerprint: E85A 5F63 B31D 24C1 EBF0 D81C C910 D922 2512 E3C7
This commit is contained in:
commit
1a425b9c90
276 changed files with 5821 additions and 682 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -23,6 +23,9 @@ html
|
|||
dist
|
||||
# Sandboxed builds
|
||||
cabal-dev
|
||||
.cabal-sandbox
|
||||
cabal.sandbox.config
|
||||
cabal.config
|
||||
# Project-local emacs configuration
|
||||
.dir-locals.el
|
||||
# OSX related
|
||||
|
|
24
Annex.hs
24
Annex.hs
|
@ -5,12 +5,11 @@
|
|||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE GeneralizedNewtypeDeriving, PackageImports #-}
|
||||
{-# LANGUAGE CPP, GeneralizedNewtypeDeriving, PackageImports #-}
|
||||
|
||||
module Annex (
|
||||
Annex,
|
||||
AnnexState(..),
|
||||
PreferredContentMap,
|
||||
new,
|
||||
run,
|
||||
eval,
|
||||
|
@ -60,11 +59,13 @@ import Types.FileMatcher
|
|||
import Types.NumCopies
|
||||
import Types.LockPool
|
||||
import Types.MetaData
|
||||
import Types.DesktopNotify
|
||||
import Types.CleanupActions
|
||||
import qualified Utility.Matcher
|
||||
import qualified Data.Map as M
|
||||
import qualified Data.Set as S
|
||||
#ifdef WITH_QUVI
|
||||
import Utility.Quvi (QuviVersion)
|
||||
#endif
|
||||
|
||||
{- git-annex's monad is a ReaderT around an AnnexState stored in a MVar.
|
||||
- This allows modifying the state in an exception-safe fashion.
|
||||
|
@ -80,9 +81,6 @@ newtype Annex a = Annex { runAnnex :: ReaderT (MVar AnnexState) IO a }
|
|||
Applicative
|
||||
)
|
||||
|
||||
type Matcher a = Either [Utility.Matcher.Token a] (Utility.Matcher.Matcher a)
|
||||
type PreferredContentMap = M.Map UUID (Utility.Matcher.Matcher (S.Set UUID -> MatchInfo -> Annex Bool))
|
||||
|
||||
-- internal state storage
|
||||
data AnnexState = AnnexState
|
||||
{ repo :: Git.Repo
|
||||
|
@ -103,9 +101,10 @@ data AnnexState = AnnexState
|
|||
, forcebackend :: Maybe String
|
||||
, globalnumcopies :: Maybe NumCopies
|
||||
, forcenumcopies :: Maybe NumCopies
|
||||
, limit :: Matcher (MatchInfo -> Annex Bool)
|
||||
, limit :: ExpandableMatcher Annex
|
||||
, uuidmap :: Maybe UUIDMap
|
||||
, preferredcontentmap :: Maybe PreferredContentMap
|
||||
, preferredcontentmap :: Maybe (FileMatcherMap Annex)
|
||||
, requiredcontentmap :: Maybe (FileMatcherMap Annex)
|
||||
, shared :: Maybe SharedRepository
|
||||
, forcetrust :: TrustMap
|
||||
, trustmap :: Maybe TrustMap
|
||||
|
@ -120,8 +119,11 @@ data AnnexState = AnnexState
|
|||
, useragent :: Maybe String
|
||||
, errcounter :: Integer
|
||||
, unusedkeys :: Maybe (S.Set Key)
|
||||
#ifdef WITH_QUVI
|
||||
, quviversion :: Maybe QuviVersion
|
||||
#endif
|
||||
, existinghooks :: M.Map Git.Hook.Hook Bool
|
||||
, desktopnotify :: DesktopNotify
|
||||
}
|
||||
|
||||
newState :: GitConfig -> Git.Repo -> AnnexState
|
||||
|
@ -144,9 +146,10 @@ newState c r = AnnexState
|
|||
, forcebackend = Nothing
|
||||
, globalnumcopies = Nothing
|
||||
, forcenumcopies = Nothing
|
||||
, limit = Left []
|
||||
, limit = BuildingMatcher []
|
||||
, uuidmap = Nothing
|
||||
, preferredcontentmap = Nothing
|
||||
, requiredcontentmap = Nothing
|
||||
, shared = Nothing
|
||||
, forcetrust = M.empty
|
||||
, trustmap = Nothing
|
||||
|
@ -161,8 +164,11 @@ newState c r = AnnexState
|
|||
, useragent = Nothing
|
||||
, errcounter = 0
|
||||
, unusedkeys = Nothing
|
||||
#ifdef WITH_QUVI
|
||||
, quviversion = Nothing
|
||||
#endif
|
||||
, existinghooks = M.empty
|
||||
, desktopnotify = mempty
|
||||
}
|
||||
|
||||
{- Makes an Annex state object for the specified git repo.
|
||||
|
|
|
@ -32,8 +32,12 @@ getTransitionCalculator ForgetDeadRemotes = Just dropDead
|
|||
|
||||
dropDead :: FilePath -> String -> TrustMap -> FileTransition
|
||||
dropDead f content trustmap = case getLogVariety f of
|
||||
Just UUIDBasedLog -> ChangeFile $
|
||||
UUIDBased.showLog id $ dropDeadFromUUIDBasedLog trustmap $ UUIDBased.parseLog Just content
|
||||
Just UUIDBasedLog
|
||||
-- Don't remove the dead repo from the trust log,
|
||||
-- because git remotes may still exist, and they need
|
||||
-- to still know it's dead.
|
||||
| f == trustLog -> PreserveFile
|
||||
| otherwise -> ChangeFile $ UUIDBased.showLog id $ dropDeadFromUUIDBasedLog trustmap $ UUIDBased.parseLog Just content
|
||||
Just NewUUIDBasedLog -> ChangeFile $
|
||||
UUIDBased.showLogNew id $ dropDeadFromUUIDBasedLog trustmap $ UUIDBased.parseLogNew Just content
|
||||
Just (PresenceLog _) ->
|
||||
|
|
|
@ -13,7 +13,6 @@ import Common.Annex
|
|||
import Limit
|
||||
import Utility.Matcher
|
||||
import Types.Group
|
||||
import Types.Limit
|
||||
import Logs.Group
|
||||
import Logs.Remote
|
||||
import Annex.UUID
|
||||
|
@ -25,12 +24,10 @@ import Types.Remote (RemoteConfig)
|
|||
import Data.Either
|
||||
import qualified Data.Set as S
|
||||
|
||||
type FileMatcher = Matcher MatchFiles
|
||||
|
||||
checkFileMatcher :: FileMatcher -> FilePath -> Annex Bool
|
||||
checkFileMatcher :: (FileMatcher Annex) -> FilePath -> Annex Bool
|
||||
checkFileMatcher matcher file = checkMatcher matcher Nothing (Just file) S.empty True
|
||||
|
||||
checkMatcher :: FileMatcher -> Maybe Key -> AssociatedFile -> AssumeNotPresent -> Bool -> Annex Bool
|
||||
checkMatcher :: (FileMatcher Annex) -> Maybe Key -> AssociatedFile -> AssumeNotPresent -> Bool -> Annex Bool
|
||||
checkMatcher matcher mkey afile notpresent def
|
||||
| isEmpty matcher = return def
|
||||
| otherwise = case (mkey, afile) of
|
||||
|
@ -48,15 +45,15 @@ fileMatchInfo file = do
|
|||
, relFile = file
|
||||
}
|
||||
|
||||
matchAll :: FileMatcher
|
||||
matchAll :: FileMatcher Annex
|
||||
matchAll = generate []
|
||||
|
||||
parsedToMatcher :: [Either String (Token MatchFiles)] -> Either String FileMatcher
|
||||
parsedToMatcher :: [Either String (Token (MatchFiles Annex))] -> Either String (FileMatcher Annex)
|
||||
parsedToMatcher parsed = case partitionEithers parsed of
|
||||
([], vs) -> Right $ generate vs
|
||||
(es, _) -> Left $ unwords $ map ("Parse failure: " ++) es
|
||||
|
||||
exprParser :: FileMatcher -> FileMatcher -> GroupMap -> M.Map UUID RemoteConfig -> Maybe UUID -> String -> [Either String (Token MatchFiles)]
|
||||
exprParser :: FileMatcher Annex -> FileMatcher Annex -> GroupMap -> M.Map UUID RemoteConfig -> Maybe UUID -> String -> [Either String (Token (MatchFiles Annex))]
|
||||
exprParser matchstandard matchgroupwanted groupmap configmap mu expr =
|
||||
map parse $ tokenizeMatcher expr
|
||||
where
|
||||
|
@ -69,7 +66,7 @@ exprParser matchstandard matchgroupwanted groupmap configmap mu expr =
|
|||
preferreddir = fromMaybe "public" $
|
||||
M.lookup "preferreddir" =<< (`M.lookup` configmap) =<< mu
|
||||
|
||||
parseToken :: FileMatcher -> FileMatcher -> MkLimit -> MkLimit -> GroupMap -> String -> Either String (Token MatchFiles)
|
||||
parseToken :: FileMatcher Annex -> FileMatcher Annex -> MkLimit Annex -> MkLimit Annex -> GroupMap -> String -> Either String (Token (MatchFiles Annex))
|
||||
parseToken matchstandard matchgroupwanted checkpresent checkpreferreddir groupmap t
|
||||
| t `elem` tokens = Right $ token t
|
||||
| t == "standard" = call matchstandard
|
||||
|
@ -106,7 +103,7 @@ tokenizeMatcher = filter (not . null ) . concatMap splitparens . words
|
|||
|
||||
{- Generates a matcher for files large enough (or meeting other criteria)
|
||||
- to be added to the annex, rather than directly to git. -}
|
||||
largeFilesMatcher :: Annex FileMatcher
|
||||
largeFilesMatcher :: Annex (FileMatcher Annex)
|
||||
largeFilesMatcher = go =<< annexLargeFiles <$> Annex.getGitConfig
|
||||
where
|
||||
go Nothing = return matchAll
|
||||
|
|
101
Annex/Notification.hs
Normal file
101
Annex/Notification.hs
Normal file
|
@ -0,0 +1,101 @@
|
|||
{- git-annex desktop notifications
|
||||
-
|
||||
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE CPP #-}
|
||||
|
||||
module Annex.Notification (NotifyWitness, notifyTransfer, notifyDrop) where
|
||||
|
||||
import Common.Annex
|
||||
import Logs.Transfer
|
||||
#ifdef WITH_DBUS_NOTIFICATIONS
|
||||
import qualified Annex
|
||||
import Types.DesktopNotify
|
||||
import qualified DBus.Notify as Notify
|
||||
import qualified DBus.Client
|
||||
#endif
|
||||
|
||||
-- Witness that notification has happened.
|
||||
data NotifyWitness = NotifyWitness
|
||||
|
||||
{- Wrap around an action that performs a transfer, which may run multiple
|
||||
- attempts. Displays notification when supported and when the user asked
|
||||
- for it. -}
|
||||
notifyTransfer :: Direction -> Maybe FilePath -> (NotifyWitness -> Annex Bool) -> Annex Bool
|
||||
notifyTransfer _ Nothing a = a NotifyWitness
|
||||
#ifdef WITH_DBUS_NOTIFICATIONS
|
||||
notifyTransfer direction (Just f) a = do
|
||||
wanted <- Annex.getState Annex.desktopnotify
|
||||
if (notifyStart wanted || notifyFinish wanted)
|
||||
then do
|
||||
client <- liftIO DBus.Client.connectSession
|
||||
startnotification <- liftIO $ if notifyStart wanted
|
||||
then Just <$> Notify.notify client (startedTransferNote direction f)
|
||||
else pure Nothing
|
||||
ok <- a NotifyWitness
|
||||
when (notifyFinish wanted) $ liftIO $ void $ maybe
|
||||
(Notify.notify client $ finishedTransferNote ok direction f)
|
||||
(\n -> Notify.replace client n $ finishedTransferNote ok direction f)
|
||||
startnotification
|
||||
return ok
|
||||
else a NotifyWitness
|
||||
#else
|
||||
notifyTransfer _ (Just _) a = do a NotifyWitness
|
||||
#endif
|
||||
|
||||
notifyDrop :: Maybe FilePath -> Bool -> Annex ()
|
||||
notifyDrop Nothing _ = noop
|
||||
#ifdef WITH_DBUS_NOTIFICATIONS
|
||||
notifyDrop (Just f) ok = do
|
||||
wanted <- Annex.getState Annex.desktopnotify
|
||||
when (notifyFinish wanted) $ liftIO $ do
|
||||
client <- DBus.Client.connectSession
|
||||
void $ Notify.notify client (droppedNote ok f)
|
||||
#else
|
||||
notifyDrop (Just _) _ = noop
|
||||
#endif
|
||||
|
||||
#ifdef WITH_DBUS_NOTIFICATIONS
|
||||
startedTransferNote :: Direction -> FilePath -> Notify.Note
|
||||
startedTransferNote Upload = mkNote Notify.Transfer Notify.Low iconUpload
|
||||
"Uploading"
|
||||
startedTransferNote Download = mkNote Notify.Transfer Notify.Low iconDownload
|
||||
"Downloading"
|
||||
|
||||
finishedTransferNote :: Bool -> Direction -> FilePath -> Notify.Note
|
||||
finishedTransferNote False Upload = mkNote Notify.TransferError Notify.Normal iconFailure
|
||||
"Failed to upload"
|
||||
finishedTransferNote False Download = mkNote Notify.TransferError Notify.Normal iconFailure
|
||||
"Failed to download"
|
||||
finishedTransferNote True Upload = mkNote Notify.TransferComplete Notify.Low iconSuccess
|
||||
"Finished uploading"
|
||||
finishedTransferNote True Download = mkNote Notify.TransferComplete Notify.Low iconSuccess
|
||||
"Finished downloading"
|
||||
|
||||
droppedNote :: Bool -> FilePath -> Notify.Note
|
||||
droppedNote False = mkNote Notify.TransferError Notify.Normal iconFailure
|
||||
"Failed to drop"
|
||||
droppedNote True = mkNote Notify.TransferComplete Notify.Low iconSuccess
|
||||
"Dropped"
|
||||
|
||||
iconUpload, iconDownload, iconFailure, iconSuccess :: String
|
||||
iconUpload = "network-transmit"
|
||||
iconDownload = "network-receive"
|
||||
iconFailure = "dialog-error"
|
||||
iconSuccess = "git-annex" -- Is there a standard icon for success/completion?
|
||||
|
||||
mkNote :: Notify.Category -> Notify.UrgencyLevel -> String -> String -> FilePath -> Notify.Note
|
||||
mkNote category urgency icon desc path = Notify.blankNote
|
||||
{ Notify.appName = "git-annex"
|
||||
, Notify.appImage = Just (Notify.Icon icon)
|
||||
, Notify.summary = desc ++ " " ++ path
|
||||
, Notify.hints =
|
||||
[ Notify.Category category
|
||||
, Notify.Urgency urgency
|
||||
, Notify.SuppressSound True
|
||||
]
|
||||
}
|
||||
#endif
|
131
Annex/Transfer.hs
Normal file
131
Annex/Transfer.hs
Normal file
|
@ -0,0 +1,131 @@
|
|||
{- git-annex transfers
|
||||
-
|
||||
- Copyright 2012-2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE CPP #-}
|
||||
|
||||
module Annex.Transfer (
|
||||
module X,
|
||||
upload,
|
||||
download,
|
||||
runTransfer,
|
||||
noRetry,
|
||||
forwardRetry,
|
||||
) where
|
||||
|
||||
import Common.Annex
|
||||
import Logs.Transfer as X
|
||||
import Annex.Notification as X
|
||||
import Annex.Perms
|
||||
import Annex.Exception
|
||||
import Utility.Metered
|
||||
#ifdef mingw32_HOST_OS
|
||||
import Utility.WinLock
|
||||
#endif
|
||||
|
||||
import Control.Concurrent
|
||||
|
||||
upload :: UUID -> Key -> AssociatedFile -> RetryDecider -> (MeterUpdate -> Annex Bool) -> NotifyWitness -> Annex Bool
|
||||
upload u key f d a _witness = runTransfer (Transfer Upload u key) f d a
|
||||
|
||||
download :: UUID -> Key -> AssociatedFile -> RetryDecider -> (MeterUpdate -> Annex Bool) -> NotifyWitness -> Annex Bool
|
||||
download u key f d a _witness = runTransfer (Transfer Download u key) f d a
|
||||
|
||||
{- Runs a transfer action. Creates and locks the lock file while the
|
||||
- action is running, and stores info in the transfer information
|
||||
- file.
|
||||
-
|
||||
- If the transfer action returns False, the transfer info is
|
||||
- left in the failedTransferDir.
|
||||
-
|
||||
- If the transfer is already in progress, returns False.
|
||||
-
|
||||
- An upload can be run from a read-only filesystem, and in this case
|
||||
- no transfer information or lock file is used.
|
||||
-}
|
||||
runTransfer :: Transfer -> Maybe FilePath -> RetryDecider -> (MeterUpdate -> Annex Bool) -> Annex Bool
|
||||
runTransfer t file shouldretry a = do
|
||||
info <- liftIO $ startTransferInfo file
|
||||
(meter, tfile, metervar) <- mkProgressUpdater t info
|
||||
mode <- annexFileMode
|
||||
(fd, inprogress) <- liftIO $ prep tfile mode info
|
||||
if inprogress
|
||||
then do
|
||||
showNote "transfer already in progress"
|
||||
return False
|
||||
else do
|
||||
ok <- retry info metervar $
|
||||
bracketIO (return fd) (cleanup tfile) (const $ a meter)
|
||||
unless ok $ recordFailedTransfer t info
|
||||
return ok
|
||||
where
|
||||
#ifndef mingw32_HOST_OS
|
||||
prep tfile mode info = do
|
||||
mfd <- catchMaybeIO $
|
||||
openFd (transferLockFile tfile) ReadWrite (Just mode)
|
||||
defaultFileFlags { trunc = True }
|
||||
case mfd of
|
||||
Nothing -> return (Nothing, False)
|
||||
Just fd -> do
|
||||
locked <- catchMaybeIO $
|
||||
setLock fd (WriteLock, AbsoluteSeek, 0, 0)
|
||||
if isNothing locked
|
||||
then return (Nothing, True)
|
||||
else do
|
||||
void $ tryIO $ writeTransferInfoFile info tfile
|
||||
return (mfd, False)
|
||||
#else
|
||||
prep tfile _mode info = do
|
||||
v <- catchMaybeIO $ lockExclusive (transferLockFile tfile)
|
||||
case v of
|
||||
Nothing -> return (Nothing, False)
|
||||
Just Nothing -> return (Nothing, True)
|
||||
Just (Just lockhandle) -> do
|
||||
void $ tryIO $ writeTransferInfoFile info tfile
|
||||
return (Just lockhandle, False)
|
||||
#endif
|
||||
cleanup _ Nothing = noop
|
||||
cleanup tfile (Just lockhandle) = do
|
||||
void $ tryIO $ removeFile tfile
|
||||
#ifndef mingw32_HOST_OS
|
||||
void $ tryIO $ removeFile $ transferLockFile tfile
|
||||
closeFd lockhandle
|
||||
#else
|
||||
{- Windows cannot delete the lockfile until the lock
|
||||
- is closed. So it's possible to race with another
|
||||
- process that takes the lock before it's removed,
|
||||
- so ignore failure to remove.
|
||||
-}
|
||||
dropLock lockhandle
|
||||
void $ tryIO $ removeFile $ transferLockFile tfile
|
||||
#endif
|
||||
retry oldinfo metervar run = do
|
||||
v <- tryAnnex run
|
||||
case v of
|
||||
Right b -> return b
|
||||
Left _ -> do
|
||||
b <- getbytescomplete metervar
|
||||
let newinfo = oldinfo { bytesComplete = Just b }
|
||||
if shouldretry oldinfo newinfo
|
||||
then retry newinfo metervar run
|
||||
else return False
|
||||
getbytescomplete metervar
|
||||
| transferDirection t == Upload =
|
||||
liftIO $ readMVar metervar
|
||||
| otherwise = do
|
||||
f <- fromRepo $ gitAnnexTmpObjectLocation (transferKey t)
|
||||
liftIO $ catchDefaultIO 0 $
|
||||
fromIntegral . fileSize <$> getFileStatus f
|
||||
|
||||
type RetryDecider = TransferInfo -> TransferInfo -> Bool
|
||||
|
||||
noRetry :: RetryDecider
|
||||
noRetry _ _ = False
|
||||
|
||||
{- Retries a transfer when it fails, as long as the failed transfer managed
|
||||
- to send some data. -}
|
||||
forwardRetry :: RetryDecider
|
||||
forwardRetry old new = bytesComplete old < bytesComplete new
|
|
@ -14,7 +14,6 @@ import Utility.Tense
|
|||
import qualified Data.Text as T
|
||||
import Data.Text (Text)
|
||||
import qualified Data.Map as M
|
||||
import Data.Monoid
|
||||
|
||||
{- This is as many alerts as it makes sense to display at a time.
|
||||
- A display might be smaller, or larger, the point is to not overwhelm the
|
||||
|
|
|
@ -35,11 +35,14 @@ standaloneAppBase = getEnv "GIT_ANNEX_APP_BASE"
|
|||
-
|
||||
- Note that this is done every time it's started, so if the user moves
|
||||
- it around, the paths this sets up won't break.
|
||||
-
|
||||
- Nautilus hook script installation is done even for packaged apps,
|
||||
- since it has to go into the user's home directory.
|
||||
-}
|
||||
ensureInstalled :: IO ()
|
||||
ensureInstalled = go =<< standaloneAppBase
|
||||
where
|
||||
go Nothing = noop
|
||||
go Nothing = installNautilus "git-annex"
|
||||
go (Just base) = do
|
||||
let program = base </> "git-annex"
|
||||
programfile <- programFile
|
||||
|
@ -78,6 +81,33 @@ ensureInstalled = go =<< standaloneAppBase
|
|||
viaTmp writeFile shim content
|
||||
modifyFileMode shim $ addModes [ownerExecuteMode]
|
||||
|
||||
installNautilus program
|
||||
|
||||
installNautilus :: FilePath -> IO ()
|
||||
#ifdef linux_HOST_OS
|
||||
installNautilus program = do
|
||||
scriptdir <- (\d -> d </> "nautilus" </> "scripts") <$> userDataDir
|
||||
whenM (doesDirectoryExist scriptdir) $ do
|
||||
genscript scriptdir "get"
|
||||
genscript scriptdir "drop"
|
||||
where
|
||||
genscript scriptdir action =
|
||||
installscript (scriptdir </> scriptname action) $ unlines
|
||||
[ shebang_local
|
||||
, autoaddedcomment
|
||||
, "exec " ++ program ++ " " ++ action ++ " --notify-start --notify-finish -- \"$@\""
|
||||
]
|
||||
scriptname action = "git-annex " ++ action
|
||||
installscript f c = whenM (safetoinstallscript f) $ do
|
||||
writeFile f c
|
||||
modifyFileMode f $ addModes [ownerExecuteMode]
|
||||
safetoinstallscript f = catchDefaultIO True $
|
||||
elem autoaddedcomment . lines <$> readFileStrict f
|
||||
autoaddedcomment = "# Automatically added by git-annex, do not edit. (To disable, chmod 600 this file.)"
|
||||
#else
|
||||
installNautilus _ = noop
|
||||
#endif
|
||||
|
||||
{- Returns a cleaned up environment that lacks settings used to make the
|
||||
- standalone builds use their bundled libraries and programs.
|
||||
- Useful when calling programs not included in the standalone builds.
|
||||
|
|
|
@ -197,7 +197,7 @@ authorizedKeysLine gitannexshellonly dir pubkey
|
|||
- long perl script. -}
|
||||
| otherwise = pubkey
|
||||
where
|
||||
limitcommand = "command=\"GIT_ANNEX_SHELL_DIRECTORY="++shellEscape dir++" ~/.ssh/git-annex-shell\",no-agent-forwarding,no-port-forwarding,no-X11-forwarding,no-pty "
|
||||
limitcommand = "command=\"env GIT_ANNEX_SHELL_DIRECTORY="++shellEscape dir++" ~/.ssh/git-annex-shell\",no-agent-forwarding,no-port-forwarding,no-X11-forwarding,no-pty "
|
||||
|
||||
{- Generates a ssh key pair. -}
|
||||
genSshKeyPair :: IO SshKeyPair
|
||||
|
|
|
@ -62,15 +62,17 @@ configFilesActions =
|
|||
, (groupLog, void $ liftAnnex groupMapLoad)
|
||||
, (numcopiesLog, void $ liftAnnex globalNumCopiesLoad)
|
||||
, (scheduleLog, void updateScheduleLog)
|
||||
-- Preferred content settings depend on most of the other configs,
|
||||
-- so will be reloaded whenever any configs change.
|
||||
-- Preferred and required content settings depend on most of the
|
||||
-- other configs, so will be reloaded whenever any configs change.
|
||||
, (preferredContentLog, noop)
|
||||
, (requiredContentLog, noop)
|
||||
, (groupPreferredContentLog, noop)
|
||||
]
|
||||
|
||||
reloadConfigs :: Configs -> Assistant ()
|
||||
reloadConfigs changedconfigs = do
|
||||
sequence_ as
|
||||
void $ liftAnnex preferredContentMapLoad
|
||||
void $ liftAnnex preferredRequiredMapsLoad
|
||||
{- Changes to the remote log, or the trust log, can affect the
|
||||
- syncRemotes list. Changes to the uuid log may affect its
|
||||
- display so are also included. -}
|
||||
|
|
|
@ -35,6 +35,7 @@ import Annex.CatFile
|
|||
import Annex.CheckIgnore
|
||||
import Annex.Link
|
||||
import Annex.FileMatcher
|
||||
import Types.FileMatcher
|
||||
import Annex.ReplaceFile
|
||||
import Git.Types
|
||||
import Config
|
||||
|
@ -196,7 +197,7 @@ runHandler handler file filestatus = void $ do
|
|||
| otherwise = f
|
||||
|
||||
{- Small files are added to git as-is, while large ones go into the annex. -}
|
||||
add :: FileMatcher -> FilePath -> Assistant (Maybe Change)
|
||||
add :: FileMatcher Annex -> FilePath -> Assistant (Maybe Change)
|
||||
add bigfilematcher file = ifM (liftAnnex $ checkFileMatcher bigfilematcher file)
|
||||
( pendingAddChange file
|
||||
, do
|
||||
|
@ -205,7 +206,7 @@ add bigfilematcher file = ifM (liftAnnex $ checkFileMatcher bigfilematcher file)
|
|||
madeChange file AddFileChange
|
||||
)
|
||||
|
||||
onAdd :: FileMatcher -> Handler
|
||||
onAdd :: FileMatcher Annex -> Handler
|
||||
onAdd matcher file filestatus
|
||||
| maybe False isRegularFile filestatus =
|
||||
unlessIgnored file $
|
||||
|
@ -218,7 +219,7 @@ shouldRestage ds = scanComplete ds || forceRestage ds
|
|||
{- In direct mode, add events are received for both new files, and
|
||||
- modified existing files.
|
||||
-}
|
||||
onAddDirect :: Bool -> FileMatcher -> Handler
|
||||
onAddDirect :: Bool -> FileMatcher Annex -> Handler
|
||||
onAddDirect symlinkssupported matcher file fs = do
|
||||
v <- liftAnnex $ catKeyFile file
|
||||
case (v, fs) of
|
||||
|
|
|
@ -45,7 +45,12 @@ bundledPrograms = catMaybes
|
|||
#endif
|
||||
, SysConfig.gpg
|
||||
, ifset SysConfig.curl "curl"
|
||||
#ifndef darwin_HOST_OS
|
||||
-- wget on OSX has been problimatic, looking for certs in the wrong
|
||||
-- places. Don't ship it, use curl or the OSX's own wget if it has
|
||||
-- one.
|
||||
, ifset SysConfig.wget "wget"
|
||||
#endif
|
||||
, ifset SysConfig.bup "bup"
|
||||
, SysConfig.lsof
|
||||
, SysConfig.gcrypt
|
||||
|
|
|
@ -14,12 +14,10 @@ import System.FilePath
|
|||
import System.Directory
|
||||
import Control.Monad
|
||||
import Data.List
|
||||
import Data.List.Utils
|
||||
import System.Posix.Files
|
||||
import Data.Char
|
||||
import Control.Monad.IfElse
|
||||
|
||||
import Utility.PartialPrelude
|
||||
import Utility.LinuxMkLibs
|
||||
import Utility.Directory
|
||||
import Utility.Process
|
||||
import Utility.Monad
|
||||
|
@ -41,7 +39,7 @@ mklibs top = do
|
|||
libs <- parseLdd <$> readProcess "ldd" exes
|
||||
glibclibs <- glibcLibs
|
||||
let libs' = nub $ libs ++ glibclibs
|
||||
libdirs <- nub . catMaybes <$> mapM (installLib top) libs'
|
||||
libdirs <- nub . catMaybes <$> mapM (installLib installFile top) libs'
|
||||
|
||||
-- Various files used by runshell to set up env vars used by the
|
||||
-- linker shims.
|
||||
|
@ -53,26 +51,6 @@ mklibs top = do
|
|||
|
||||
mapM_ (installLinkerShim top) exes
|
||||
|
||||
{- Installs a library. If the library is a symlink to another file,
|
||||
- install the file it links to, and update the symlink to be relative. -}
|
||||
installLib :: FilePath -> FilePath -> IO (Maybe FilePath)
|
||||
installLib top lib = ifM (doesFileExist lib)
|
||||
( do
|
||||
installFile top lib
|
||||
checksymlink lib
|
||||
return $ Just $ parentDir lib
|
||||
, return Nothing
|
||||
)
|
||||
where
|
||||
checksymlink f = whenM (isSymbolicLink <$> getSymbolicLinkStatus (inTop top f)) $ do
|
||||
l <- readSymbolicLink (inTop top f)
|
||||
let absl = absPathFrom (parentDir f) l
|
||||
let target = relPathDirToFile (parentDir f) absl
|
||||
installFile top absl
|
||||
nukeFile (top ++ f)
|
||||
createSymbolicLink target (inTop top f)
|
||||
checksymlink absl
|
||||
|
||||
{- Installs a linker shim script around a binary.
|
||||
-
|
||||
- Note that each binary is put into its own separate directory,
|
||||
|
@ -108,10 +86,6 @@ installFile top f = do
|
|||
where
|
||||
destdir = inTop top $ parentDir f
|
||||
|
||||
-- Note that f is not relative, so cannot use </>
|
||||
inTop :: FilePath -> FilePath -> FilePath
|
||||
inTop top f = top ++ f --
|
||||
|
||||
checkExe :: FilePath -> IO Bool
|
||||
checkExe f
|
||||
| ".so" `isSuffixOf` f = return False
|
||||
|
@ -127,18 +101,3 @@ checkFileExe s = and
|
|||
[ "ELF" `isInfixOf` s
|
||||
, "executable" `isInfixOf` s || "shared object" `isInfixOf` s
|
||||
]
|
||||
|
||||
{- Parse ldd output, getting all the libraries that the input files
|
||||
- link to. Note that some of the libraries may not exist
|
||||
- (eg, linux-vdso.so) -}
|
||||
parseLdd :: String -> [FilePath]
|
||||
parseLdd = catMaybes . map (getlib . dropWhile isSpace) . lines
|
||||
where
|
||||
getlib l = headMaybe . words =<< lastMaybe (split " => " l)
|
||||
|
||||
{- Get all glibc libs and other support files, including gconv files
|
||||
-
|
||||
- XXX Debian specific. -}
|
||||
glibcLibs :: IO [FilePath]
|
||||
glibcLibs = lines <$> readProcess "sh"
|
||||
["-c", "dpkg -L libc6:$(dpkg --print-architecture) libgcc1:$(dpkg --print-architecture) | egrep '\\.so|gconv'"]
|
||||
|
|
|
@ -57,6 +57,9 @@ buildFlags = filter (not . null)
|
|||
#ifdef WITH_DBUS
|
||||
, "DBus"
|
||||
#endif
|
||||
#ifdef WITH_DESKTOP_NOTIFY
|
||||
, "DesktopNotify"
|
||||
#endif
|
||||
#ifdef WITH_XMPP
|
||||
, "XMPP"
|
||||
#else
|
||||
|
|
|
@ -89,6 +89,7 @@ import qualified Command.WebApp
|
|||
#ifdef WITH_XMPP
|
||||
import qualified Command.XMPPGit
|
||||
#endif
|
||||
import qualified Command.RemoteDaemon
|
||||
#endif
|
||||
import qualified Command.Test
|
||||
#ifdef WITH_TESTSUITE
|
||||
|
@ -176,6 +177,7 @@ cmds = concat
|
|||
#ifdef WITH_XMPP
|
||||
, Command.XMPPGit.def
|
||||
#endif
|
||||
, Command.RemoteDaemon.def
|
||||
#endif
|
||||
, Command.Test.def
|
||||
#ifdef WITH_TESTSUITE
|
||||
|
|
|
@ -29,6 +29,7 @@ import qualified Command.RecvKey
|
|||
import qualified Command.SendKey
|
||||
import qualified Command.TransferInfo
|
||||
import qualified Command.Commit
|
||||
import qualified Command.NotifyChanges
|
||||
import qualified Command.GCryptSetup
|
||||
|
||||
cmds_readonly :: [Command]
|
||||
|
@ -37,6 +38,7 @@ cmds_readonly = concat
|
|||
, gitAnnexShellCheck Command.InAnnex.def
|
||||
, gitAnnexShellCheck Command.SendKey.def
|
||||
, gitAnnexShellCheck Command.TransferInfo.def
|
||||
, gitAnnexShellCheck Command.NotifyChanges.def
|
||||
]
|
||||
|
||||
cmds_notreadonly :: [Command]
|
||||
|
|
|
@ -20,6 +20,7 @@ import System.Console.GetOpt
|
|||
import Common.Annex
|
||||
import qualified Annex
|
||||
import Types.Messages
|
||||
import Types.DesktopNotify
|
||||
import Limit
|
||||
import CmdLine.Usage
|
||||
|
||||
|
@ -41,6 +42,10 @@ commonOptions =
|
|||
"don't show debug messages"
|
||||
, Option ['b'] ["backend"] (ReqArg setforcebackend paramName)
|
||||
"specify key-value backend to use"
|
||||
, Option [] ["notify-finish"] (NoArg (setdesktopnotify mkNotifyFinish))
|
||||
"show desktop notification after transfer finishes"
|
||||
, Option [] ["notify-start"] (NoArg (setdesktopnotify mkNotifyStart))
|
||||
"show desktop notification after transfer completes"
|
||||
]
|
||||
where
|
||||
setforce v = Annex.changeState $ \s -> s { Annex.force = v }
|
||||
|
@ -49,6 +54,7 @@ commonOptions =
|
|||
setforcebackend v = Annex.changeState $ \s -> s { Annex.forcebackend = Just v }
|
||||
setdebug = Annex.changeGitConfig $ \c -> c { annexDebug = True }
|
||||
unsetdebug = Annex.changeGitConfig $ \c -> c { annexDebug = False }
|
||||
setdesktopnotify v = Annex.changeState $ \s -> s { Annex.desktopnotify = Annex.desktopnotify s <> v }
|
||||
|
||||
matcherOptions :: [Option]
|
||||
matcherOptions =
|
||||
|
|
|
@ -30,14 +30,15 @@ withFilesInGit :: (FilePath -> CommandStart) -> CommandSeek
|
|||
withFilesInGit a params = seekActions $ prepFiltered a $
|
||||
seekHelper LsFiles.inRepo params
|
||||
|
||||
withFilesNotInGit :: (FilePath -> CommandStart) -> CommandSeek
|
||||
withFilesNotInGit a params = do
|
||||
{- dotfiles are not acted on unless explicitly listed -}
|
||||
files <- filter (not . dotfile) <$>
|
||||
seekunless (null ps && not (null params)) ps
|
||||
dotfiles <- seekunless (null dotps) dotps
|
||||
seekActions $ prepFiltered a $
|
||||
return $ concat $ segmentPaths params (files++dotfiles)
|
||||
withFilesNotInGit :: Bool -> (FilePath -> CommandStart) -> CommandSeek
|
||||
withFilesNotInGit skipdotfiles a params
|
||||
| skipdotfiles = do
|
||||
{- dotfiles are not acted on unless explicitly listed -}
|
||||
files <- filter (not . dotfile) <$>
|
||||
seekunless (null ps && not (null params)) ps
|
||||
dotfiles <- seekunless (null dotps) dotps
|
||||
go (files++dotfiles)
|
||||
| otherwise = go =<< seekunless False params
|
||||
where
|
||||
(dotps, ps) = partition dotfile params
|
||||
seekunless True _ = return []
|
||||
|
@ -45,6 +46,8 @@ withFilesNotInGit a params = do
|
|||
force <- Annex.getState Annex.force
|
||||
g <- gitRepo
|
||||
liftIO $ Git.Command.leaveZombie <$> LsFiles.notInRepo force l g
|
||||
go l = seekActions $ prepFiltered a $
|
||||
return $ concat $ segmentPaths params l
|
||||
|
||||
withPathContents :: ((FilePath, FilePath) -> CommandStart) -> CommandSeek
|
||||
withPathContents a params = seekActions $
|
||||
|
|
|
@ -34,8 +34,12 @@ import Annex.ReplaceFile
|
|||
import Utility.Tmp
|
||||
|
||||
def :: [Command]
|
||||
def = [notBareRepo $ command "add" paramPaths seek SectionCommon
|
||||
"add files to annex"]
|
||||
def = [notBareRepo $ withOptions [includeDotFilesOption] $
|
||||
command "add" paramPaths seek SectionCommon
|
||||
"add files to annex"]
|
||||
|
||||
includeDotFilesOption :: Option
|
||||
includeDotFilesOption = flagOption [] "include-dotfiles" "don't skip dotfiles"
|
||||
|
||||
{- Add acts on both files not checked into git yet, and unlocked files.
|
||||
-
|
||||
|
@ -47,7 +51,8 @@ seek ps = do
|
|||
( start file
|
||||
, stop
|
||||
)
|
||||
go withFilesNotInGit
|
||||
skipdotfiles <- not <$> Annex.getFlag (optionName includeDotFilesOption)
|
||||
go $ withFilesNotInGit skipdotfiles
|
||||
ifM isDirect
|
||||
( go withFilesMaybeModified
|
||||
, go withFilesUnlocked
|
||||
|
|
|
@ -26,7 +26,7 @@ import Types.KeySource
|
|||
import Config
|
||||
import Annex.Content.Direct
|
||||
import Logs.Location
|
||||
import qualified Logs.Transfer as Transfer
|
||||
import qualified Annex.Transfer as Transfer
|
||||
#ifdef WITH_QUVI
|
||||
import Annex.Quvi
|
||||
import qualified Utility.Quvi as Quvi
|
||||
|
@ -116,9 +116,10 @@ addUrlFileQuvi relaxed quviurl videourl file = do
|
|||
prepGetViaTmpChecked sizedkey $ do
|
||||
tmp <- fromRepo $ gitAnnexTmpObjectLocation key
|
||||
showOutput
|
||||
ok <- Transfer.download webUUID key (Just file) Transfer.forwardRetry $ const $ do
|
||||
liftIO $ createDirectoryIfMissing True (parentDir tmp)
|
||||
downloadUrl [videourl] tmp
|
||||
ok <- Transfer.notifyTransfer Transfer.Download (Just file) $
|
||||
Transfer.download webUUID key (Just file) Transfer.forwardRetry $ const $ do
|
||||
liftIO $ createDirectoryIfMissing True (parentDir tmp)
|
||||
downloadUrl [videourl] tmp
|
||||
if ok
|
||||
then cleanup quviurl file key (Just tmp)
|
||||
else return False
|
||||
|
@ -133,17 +134,20 @@ perform relaxed url file = ifAnnexed file addurl geturl
|
|||
| relaxed = do
|
||||
setUrlPresent key url
|
||||
next $ return True
|
||||
| otherwise = do
|
||||
(exists, samesize) <- Url.withUrlOptions $ Url.check url (keySize key)
|
||||
if exists && samesize
|
||||
then do
|
||||
setUrlPresent key url
|
||||
next $ return True
|
||||
else do
|
||||
warning $ if exists
|
||||
then "url does not have expected file size (use --relaxed to bypass this check) " ++ url
|
||||
else "failed to verify url exists: " ++ url
|
||||
stop
|
||||
| otherwise = ifM (elem url <$> getUrls key)
|
||||
( stop
|
||||
, do
|
||||
(exists, samesize) <- Url.withUrlOptions $ Url.check url (keySize key)
|
||||
if exists && samesize
|
||||
then do
|
||||
setUrlPresent key url
|
||||
next $ return True
|
||||
else do
|
||||
warning $ "while adding a new url to an already annexed file, " ++ if exists
|
||||
then "url does not have expected file size (use --relaxed to bypass this check) " ++ url
|
||||
else "failed to verify url exists: " ++ url
|
||||
stop
|
||||
)
|
||||
|
||||
addUrlFile :: Bool -> URLString -> FilePath -> Annex Bool
|
||||
addUrlFile relaxed url file = do
|
||||
|
@ -179,7 +183,7 @@ download url file = do
|
|||
, return False
|
||||
)
|
||||
where
|
||||
runtransfer dummykey tmp =
|
||||
runtransfer dummykey tmp = Transfer.notifyTransfer Transfer.Download (Just file) $
|
||||
Transfer.download webUUID dummykey (Just file) Transfer.forwardRetry $ const $ do
|
||||
liftIO $ createDirectoryIfMissing True (parentDir tmp)
|
||||
downloadUrl [url] tmp
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{- git-annex command
|
||||
-
|
||||
- Copyright 2010 Joey Hess <joey@kitenet.net>
|
||||
- Copyright 2010-2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
@ -10,6 +10,8 @@ module Command.ConfigList where
|
|||
import Common.Annex
|
||||
import Command
|
||||
import Annex.UUID
|
||||
import Annex.Init
|
||||
import qualified Annex.Branch
|
||||
import qualified Git.Config
|
||||
import Remote.GCrypt (coreGCryptId)
|
||||
|
||||
|
@ -22,9 +24,23 @@ seek = withNothing start
|
|||
|
||||
start :: CommandStart
|
||||
start = do
|
||||
u <- getUUID
|
||||
u <- findOrGenUUID
|
||||
showConfig "annex.uuid" $ fromUUID u
|
||||
showConfig coreGCryptId =<< fromRepo (Git.Config.get coreGCryptId "")
|
||||
stop
|
||||
where
|
||||
showConfig k v = liftIO $ putStrLn $ k ++ "=" ++ v
|
||||
|
||||
{- The repository may not yet have a UUID; automatically initialize it
|
||||
- when there's a git-annex branch available. -}
|
||||
findOrGenUUID :: Annex UUID
|
||||
findOrGenUUID = do
|
||||
u <- getUUID
|
||||
if u /= NoUUID
|
||||
then return u
|
||||
else ifM Annex.Branch.hasSibling
|
||||
( do
|
||||
initialize Nothing
|
||||
getUUID
|
||||
, return NoUUID
|
||||
)
|
||||
|
|
|
@ -14,9 +14,13 @@ import qualified Annex
|
|||
import Annex.UUID
|
||||
import Logs.Location
|
||||
import Logs.Trust
|
||||
import Logs.PreferredContent
|
||||
import Config.NumCopies
|
||||
import Annex.Content
|
||||
import Annex.Wanted
|
||||
import Annex.Notification
|
||||
|
||||
import qualified Data.Set as S
|
||||
|
||||
def :: [Command]
|
||||
def = [withOptions [dropFromOption] $ command "drop" paramPaths seek
|
||||
|
@ -44,27 +48,34 @@ start from file (key, _) = checkDropAuto from file key $ \numcopies ->
|
|||
startLocal :: AssociatedFile -> NumCopies -> Key -> Maybe Remote -> CommandStart
|
||||
startLocal afile numcopies key knownpresentremote = stopUnless (inAnnex key) $ do
|
||||
showStart' "drop" key afile
|
||||
next $ performLocal key numcopies knownpresentremote
|
||||
next $ performLocal key afile numcopies knownpresentremote
|
||||
|
||||
startRemote :: AssociatedFile -> NumCopies -> Key -> Remote -> CommandStart
|
||||
startRemote afile numcopies key remote = do
|
||||
showStart' ("drop " ++ Remote.name remote) key afile
|
||||
next $ performRemote key numcopies remote
|
||||
next $ performRemote key afile numcopies remote
|
||||
|
||||
performLocal :: Key -> NumCopies -> Maybe Remote -> CommandPerform
|
||||
performLocal key numcopies knownpresentremote = lockContent key $ do
|
||||
performLocal :: Key -> AssociatedFile -> NumCopies -> Maybe Remote -> CommandPerform
|
||||
performLocal key afile numcopies knownpresentremote = lockContent key $ do
|
||||
(remotes, trusteduuids) <- Remote.keyPossibilitiesTrusted key
|
||||
let trusteduuids' = case knownpresentremote of
|
||||
Nothing -> trusteduuids
|
||||
Just r -> nub (Remote.uuid r:trusteduuids)
|
||||
untrusteduuids <- trustGet UnTrusted
|
||||
let tocheck = Remote.remotesWithoutUUID remotes (trusteduuids'++untrusteduuids)
|
||||
stopUnless (canDropKey key numcopies trusteduuids' tocheck []) $ do
|
||||
removeAnnex key
|
||||
next $ cleanupLocal key
|
||||
u <- getUUID
|
||||
ifM (canDrop u key afile numcopies trusteduuids' tocheck [])
|
||||
( do
|
||||
removeAnnex key
|
||||
notifyDrop afile True
|
||||
next $ cleanupLocal key
|
||||
, do
|
||||
notifyDrop afile False
|
||||
stop
|
||||
)
|
||||
|
||||
performRemote :: Key -> NumCopies -> Remote -> CommandPerform
|
||||
performRemote key numcopies remote = lockContent key $ do
|
||||
performRemote :: Key -> AssociatedFile -> NumCopies -> Remote -> CommandPerform
|
||||
performRemote key afile numcopies remote = lockContent key $ do
|
||||
-- Filter the remote it's being dropped from out of the lists of
|
||||
-- places assumed to have the key, and places to check.
|
||||
-- When the local repo has the key, that's one additional copy.
|
||||
|
@ -76,7 +87,7 @@ performRemote key numcopies remote = lockContent key $ do
|
|||
untrusteduuids <- trustGet UnTrusted
|
||||
let tocheck = filter (/= remote) $
|
||||
Remote.remotesWithoutUUID remotes (have++untrusteduuids)
|
||||
stopUnless (canDropKey key numcopies have tocheck [uuid]) $ do
|
||||
stopUnless (canDrop uuid key afile numcopies have tocheck [uuid]) $ do
|
||||
ok <- Remote.removeKey remote key
|
||||
next $ cleanupRemote key remote ok
|
||||
where
|
||||
|
@ -95,13 +106,19 @@ cleanupRemote key remote ok = do
|
|||
|
||||
{- Checks specified remotes to verify that enough copies of a key exist to
|
||||
- allow it to be safely removed (with no data loss). Can be provided with
|
||||
- some locations where the key is known/assumed to be present. -}
|
||||
canDropKey :: Key -> NumCopies -> [UUID] -> [Remote] -> [UUID] -> Annex Bool
|
||||
canDropKey key numcopies have check skip = do
|
||||
force <- Annex.getState Annex.force
|
||||
if force || numcopies == NumCopies 0
|
||||
then return True
|
||||
else findCopies key numcopies skip have check
|
||||
- some locations where the key is known/assumed to be present.
|
||||
-
|
||||
- Also checks if it's required content, and refuses to drop if so.
|
||||
-
|
||||
- --force overrides and always allows dropping.
|
||||
-}
|
||||
canDrop :: UUID -> Key -> AssociatedFile -> NumCopies -> [UUID] -> [Remote] -> [UUID] -> Annex Bool
|
||||
canDrop dropfrom key afile numcopies have check skip = ifM (Annex.getState Annex.force)
|
||||
( return True
|
||||
, checkRequiredContent dropfrom key afile
|
||||
<&&>
|
||||
findCopies key numcopies skip have check
|
||||
)
|
||||
|
||||
findCopies :: Key -> NumCopies -> [UUID] -> [UUID] -> [Remote] -> Annex Bool
|
||||
findCopies key need skip = helper [] []
|
||||
|
@ -137,6 +154,19 @@ notEnoughCopies key need have skip bad = do
|
|||
unsafe = showNote "unsafe"
|
||||
hint = showLongNote "(Use --force to override this check, or adjust numcopies.)"
|
||||
|
||||
checkRequiredContent :: UUID -> Key -> AssociatedFile -> Annex Bool
|
||||
checkRequiredContent u k afile =
|
||||
ifM (isRequiredContent (Just u) S.empty (Just k) afile False)
|
||||
( requiredContent
|
||||
, return True
|
||||
)
|
||||
|
||||
requiredContent :: Annex Bool
|
||||
requiredContent = do
|
||||
showLongNote "That file is required content, it cannot be dropped!"
|
||||
showLongNote "(Use --force to override this check, or adjust required content configuration.)"
|
||||
return False
|
||||
|
||||
{- In auto mode, only runs the action if there are enough
|
||||
- copies on other semitrusted repositories. -}
|
||||
checkDropAuto :: Maybe Remote -> FilePath -> Key -> (NumCopies -> CommandStart) -> CommandStart
|
||||
|
|
|
@ -34,8 +34,8 @@ perform numcopies key = maybe droplocal dropremote =<< Remote.byNameWithUUID =<<
|
|||
where
|
||||
dropremote r = do
|
||||
showAction $ "from " ++ Remote.name r
|
||||
Command.Drop.performRemote key numcopies r
|
||||
droplocal = Command.Drop.performLocal key numcopies Nothing
|
||||
Command.Drop.performRemote key Nothing numcopies r
|
||||
droplocal = Command.Drop.performLocal key Nothing numcopies Nothing
|
||||
from = Annex.getField $ optionName Command.Drop.dropFromOption
|
||||
|
||||
performOther :: (Key -> Git.Repo -> FilePath) -> Key -> CommandPerform
|
||||
|
|
|
@ -11,7 +11,7 @@ import Common.Annex
|
|||
import Command
|
||||
import qualified Remote
|
||||
import Annex.Content
|
||||
import Logs.Transfer
|
||||
import Annex.Transfer
|
||||
import Config.NumCopies
|
||||
import Annex.Wanted
|
||||
import qualified Command.Move
|
||||
|
@ -69,15 +69,15 @@ getKeyFile' key afile dest = dispatch
|
|||
showNote "not available"
|
||||
showlocs
|
||||
return False
|
||||
dispatch remotes = trycopy remotes remotes
|
||||
trycopy full [] = do
|
||||
dispatch remotes = notifyTransfer Download afile $ trycopy remotes remotes
|
||||
trycopy full [] _ = do
|
||||
Remote.showTriedRemotes full
|
||||
showlocs
|
||||
return False
|
||||
trycopy full (r:rs) =
|
||||
trycopy full (r:rs) witness =
|
||||
ifM (probablyPresent r)
|
||||
( docopy r (trycopy full rs)
|
||||
, trycopy full rs
|
||||
( docopy r witness <||> trycopy full rs witness
|
||||
, trycopy full rs witness
|
||||
)
|
||||
showlocs = Remote.showLocations key []
|
||||
"No other repository is known to contain the file."
|
||||
|
@ -87,8 +87,6 @@ getKeyFile' key afile dest = dispatch
|
|||
| Remote.hasKeyCheap r =
|
||||
either (const False) id <$> Remote.hasKey r key
|
||||
| otherwise = return True
|
||||
docopy r continue = do
|
||||
ok <- download (Remote.uuid r) key afile noRetry $ \p -> do
|
||||
showAction $ "from " ++ Remote.name r
|
||||
Remote.retrieveKeyFile r key afile dest p
|
||||
if ok then return ok else continue
|
||||
docopy r = download (Remote.uuid r) key afile noRetry $ \p -> do
|
||||
showAction $ "from " ++ Remote.name r
|
||||
Remote.retrieveKeyFile r key afile dest p
|
||||
|
|
|
@ -15,6 +15,8 @@ import Text.Feed.Types
|
|||
import qualified Data.Set as S
|
||||
import qualified Data.Map as M
|
||||
import Data.Time.Clock
|
||||
import Data.Time.Format
|
||||
import System.Locale
|
||||
|
||||
import Common.Annex
|
||||
import qualified Annex
|
||||
|
@ -212,6 +214,7 @@ feedFile tmpl i extension = Utility.Format.format tmpl $ M.fromList
|
|||
, fieldMaybe "itemdescription" $ getItemDescription $ item i
|
||||
, fieldMaybe "itemrights" $ getItemRights $ item i
|
||||
, fieldMaybe "itemid" $ snd <$> getItemId (item i)
|
||||
, fieldMaybe "itempubdate" $ pubdate $ item i
|
||||
, ("extension", sanitizeFilePath extension)
|
||||
]
|
||||
where
|
||||
|
@ -221,6 +224,12 @@ feedFile tmpl i extension = Utility.Format.format tmpl $ M.fromList
|
|||
fieldMaybe k Nothing = (k, "none")
|
||||
fieldMaybe k (Just v) = field k v
|
||||
|
||||
pubdate itm = case getItemPublishDate itm :: Maybe (Maybe UTCTime) of
|
||||
Just (Just d) -> Just $
|
||||
formatTime defaultTimeLocale "%F" d
|
||||
-- if date cannot be parsed, use the raw string
|
||||
_ -> replace "/" "-" <$> getItemPublishDateString itm
|
||||
|
||||
{- Called when there is a problem with a feed.
|
||||
- Throws an error if the feed is broken, otherwise shows a warning. -}
|
||||
feedProblem :: URLString -> String -> Annex ()
|
||||
|
|
|
@ -38,7 +38,7 @@ seek ps = do
|
|||
|
||||
getList :: Annex [(UUID, RemoteName, TrustLevel)]
|
||||
getList = ifM (Annex.getFlag $ optionName allrepos)
|
||||
( nubBy ((==) `on` fst3) <$> ((++) <$> getRemotes <*> getAll)
|
||||
( nubBy ((==) `on` fst3) <$> ((++) <$> getRemotes <*> getAllUUIDs)
|
||||
, getRemotes
|
||||
)
|
||||
where
|
||||
|
@ -48,7 +48,7 @@ getList = ifM (Annex.getFlag $ optionName allrepos)
|
|||
hereu <- getUUID
|
||||
heretrust <- lookupTrust hereu
|
||||
return $ (hereu, "here", heretrust) : zip3 (map uuid rs) (map name rs) ts
|
||||
getAll = do
|
||||
getAllUUIDs = do
|
||||
rs <- M.toList <$> uuidMap
|
||||
rs3 <- forM rs $ \(u, n) -> (,,)
|
||||
<$> pure u
|
||||
|
|
|
@ -14,8 +14,8 @@ import qualified Annex
|
|||
import Annex.Content
|
||||
import qualified Remote
|
||||
import Annex.UUID
|
||||
import Annex.Transfer
|
||||
import Logs.Presence
|
||||
import Logs.Transfer
|
||||
|
||||
def :: [Command]
|
||||
def = [withOptions moveOptions $ command "move" paramPaths seek
|
||||
|
@ -98,8 +98,9 @@ toPerform dest move key afile fastcheck isthere = moveLock move key $
|
|||
stop
|
||||
Right False -> do
|
||||
showAction $ "to " ++ Remote.name dest
|
||||
ok <- upload (Remote.uuid dest) key afile noRetry $
|
||||
Remote.storeKey dest key afile
|
||||
ok <- notifyTransfer Upload afile $
|
||||
upload (Remote.uuid dest) key afile noRetry $
|
||||
Remote.storeKey dest key afile
|
||||
if ok
|
||||
then do
|
||||
Remote.logStatus dest key InfoPresent
|
||||
|
@ -155,9 +156,10 @@ fromPerform src move key afile = moveLock move key $
|
|||
, handle move =<< go
|
||||
)
|
||||
where
|
||||
go = download (Remote.uuid src) key afile noRetry $ \p -> do
|
||||
showAction $ "from " ++ Remote.name src
|
||||
getViaTmp key $ \t -> Remote.retrieveKeyFile src key afile t p
|
||||
go = notifyTransfer Download afile $
|
||||
download (Remote.uuid src) key afile noRetry $ \p -> do
|
||||
showAction $ "from " ++ Remote.name src
|
||||
getViaTmp key $ \t -> Remote.retrieveKeyFile src key afile t p
|
||||
handle _ False = stop -- failed
|
||||
handle False True = next $ return True -- copy complete
|
||||
handle True True = do -- finish moving
|
||||
|
|
83
Command/NotifyChanges.hs
Normal file
83
Command/NotifyChanges.hs
Normal file
|
@ -0,0 +1,83 @@
|
|||
{- git-annex-shell command
|
||||
-
|
||||
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
module Command.NotifyChanges where
|
||||
|
||||
import Common.Annex
|
||||
import Command
|
||||
import Utility.DirWatcher
|
||||
import Utility.DirWatcher.Types
|
||||
import qualified Git
|
||||
import Git.Sha
|
||||
import RemoteDaemon.Transport.Ssh.Types
|
||||
|
||||
import Control.Concurrent
|
||||
import Control.Concurrent.Async
|
||||
import Control.Concurrent.STM
|
||||
|
||||
def :: [Command]
|
||||
def = [noCommit $ command "notifychanges" paramNothing seek SectionPlumbing
|
||||
"sends notification when git refs are changed"]
|
||||
|
||||
seek :: CommandSeek
|
||||
seek = withNothing start
|
||||
|
||||
start :: CommandStart
|
||||
start = do
|
||||
-- This channel is used to accumulate notifcations,
|
||||
-- because the DirWatcher might have multiple threads that find
|
||||
-- changes at the same time.
|
||||
chan <- liftIO newTChanIO
|
||||
|
||||
g <- gitRepo
|
||||
let refdir = Git.localGitDir g </> "refs"
|
||||
liftIO $ createDirectoryIfMissing True refdir
|
||||
|
||||
let notifyhook = Just $ notifyHook chan
|
||||
let hooks = mkWatchHooks
|
||||
{ addHook = notifyhook
|
||||
, modifyHook = notifyhook
|
||||
}
|
||||
|
||||
void $ liftIO $ watchDir refdir (const False) True hooks id
|
||||
|
||||
let sender = do
|
||||
send READY
|
||||
forever $ send . CHANGED =<< drain chan
|
||||
|
||||
-- No messages need to be received from the caller,
|
||||
-- but when it closes the connection, notice and terminate.
|
||||
let receiver = forever $ void $ getLine
|
||||
void $ liftIO $ concurrently sender receiver
|
||||
stop
|
||||
|
||||
notifyHook :: TChan Git.Sha -> FilePath -> Maybe FileStatus -> IO ()
|
||||
notifyHook chan reffile _
|
||||
| ".lock" `isSuffixOf` reffile = noop
|
||||
| otherwise = void $ do
|
||||
sha <- catchDefaultIO Nothing $
|
||||
extractSha <$> readFile reffile
|
||||
maybe noop (atomically . writeTChan chan) sha
|
||||
|
||||
-- When possible, coalesce ref writes that occur closely together
|
||||
-- in time. Delay up to 0.05 seconds to get more ref writes.
|
||||
drain :: TChan Git.Sha -> IO [Git.Sha]
|
||||
drain chan = do
|
||||
r <- atomically $ readTChan chan
|
||||
threadDelay 50000
|
||||
rs <- atomically $ drain' chan
|
||||
return (r:rs)
|
||||
|
||||
drain' :: TChan Git.Sha -> STM [Git.Sha]
|
||||
drain' chan = loop []
|
||||
where
|
||||
loop rs = maybe (return rs) (\r -> loop (r:rs)) =<< tryReadTChan chan
|
||||
|
||||
send :: Notification -> IO ()
|
||||
send n = do
|
||||
putStrLn $ unwords $ formatMessage n
|
||||
hFlush stdout
|
|
@ -5,6 +5,8 @@
|
|||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE CPP #-}
|
||||
|
||||
module Command.PreCommit where
|
||||
|
||||
import Common.Annex
|
||||
|
@ -16,11 +18,17 @@ import Annex.Direct
|
|||
import Annex.Hook
|
||||
import Annex.View
|
||||
import Annex.View.ViewedFile
|
||||
import Annex.Perms
|
||||
import Annex.Exception
|
||||
import Logs.View
|
||||
import Logs.MetaData
|
||||
import Types.View
|
||||
import Types.MetaData
|
||||
|
||||
#ifdef mingw32_HOST_OS
|
||||
import Utility.WinLock
|
||||
#endif
|
||||
|
||||
import qualified Data.Set as S
|
||||
|
||||
def :: [Command]
|
||||
|
@ -28,7 +36,7 @@ def = [command "pre-commit" paramPaths seek SectionPlumbing
|
|||
"run by git pre-commit hook"]
|
||||
|
||||
seek :: CommandSeek
|
||||
seek ps = ifM isDirect
|
||||
seek ps = lockPreCommitHook $ ifM isDirect
|
||||
( do
|
||||
-- update direct mode mappings for committed files
|
||||
withWords startDirect ps
|
||||
|
@ -82,3 +90,22 @@ showMetaDataChange = showLongNote . unlines . concatMap showmeta . fromMetaData
|
|||
showset v
|
||||
| isSet v = "+"
|
||||
| otherwise = "-"
|
||||
|
||||
{- Takes exclusive lock; blocks until available. -}
|
||||
lockPreCommitHook :: Annex a -> Annex a
|
||||
lockPreCommitHook a = do
|
||||
lockfile <- fromRepo gitAnnexPreCommitLock
|
||||
createAnnexDirectory $ takeDirectory lockfile
|
||||
mode <- annexFileMode
|
||||
bracketIO (lock lockfile mode) unlock (const a)
|
||||
where
|
||||
#ifndef mingw32_HOST_OS
|
||||
lock lockfile mode = do
|
||||
l <- liftIO $ noUmask mode $ createFile lockfile mode
|
||||
liftIO $ waitToSetLock l (WriteLock, AbsoluteSeek, 0, 0)
|
||||
return l
|
||||
unlock = closeFd
|
||||
#else
|
||||
lock lockfile _mode = liftIO $ waitToLock $ lockExclusive lockfile
|
||||
unlock = dropLock
|
||||
#endif
|
||||
|
|
24
Command/RemoteDaemon.hs
Normal file
24
Command/RemoteDaemon.hs
Normal file
|
@ -0,0 +1,24 @@
|
|||
{- git-annex command
|
||||
-
|
||||
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
module Command.RemoteDaemon where
|
||||
|
||||
import Common.Annex
|
||||
import Command
|
||||
import RemoteDaemon.Core
|
||||
|
||||
def :: [Command]
|
||||
def = [noCommit $ command "remotedaemon" paramNothing seek SectionPlumbing
|
||||
"detects when remotes have changed, and fetches from them"]
|
||||
|
||||
seek :: CommandSeek
|
||||
seek = withNothing start
|
||||
|
||||
start :: CommandStart
|
||||
start = do
|
||||
liftIO runForeground
|
||||
stop
|
|
@ -12,7 +12,7 @@ import Command
|
|||
import Annex.Content
|
||||
import Annex
|
||||
import Utility.Rsync
|
||||
import Logs.Transfer
|
||||
import Annex.Transfer
|
||||
import qualified CmdLine.GitAnnexShell.Fields as Fields
|
||||
import Utility.Metered
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ import Common.Annex
|
|||
import Command
|
||||
import Annex.Content
|
||||
import Logs.Location
|
||||
import Logs.Transfer
|
||||
import Annex.Transfer
|
||||
import qualified Remote
|
||||
import Types.Remote
|
||||
|
||||
|
@ -41,7 +41,7 @@ start to from file key =
|
|||
_ -> error "specify either --from or --to"
|
||||
|
||||
toPerform :: Remote -> Key -> AssociatedFile -> CommandPerform
|
||||
toPerform remote key file = go $
|
||||
toPerform remote key file = go Upload file $
|
||||
upload (uuid remote) key file forwardRetry $ \p -> do
|
||||
ok <- Remote.storeKey remote key file p
|
||||
when ok $
|
||||
|
@ -49,9 +49,9 @@ toPerform remote key file = go $
|
|||
return ok
|
||||
|
||||
fromPerform :: Remote -> Key -> AssociatedFile -> CommandPerform
|
||||
fromPerform remote key file = go $
|
||||
fromPerform remote key file = go Upload file $
|
||||
download (uuid remote) key file forwardRetry $ \p ->
|
||||
getViaTmp key $ \t -> Remote.retrieveKeyFile remote key file t p
|
||||
|
||||
go :: Annex Bool -> CommandPerform
|
||||
go a = a >>= liftIO . exitBool
|
||||
go :: Direction -> AssociatedFile -> (NotifyWitness -> Annex Bool) -> CommandPerform
|
||||
go direction file a = notifyTransfer direction file a >>= liftIO . exitBool
|
||||
|
|
|
@ -13,11 +13,10 @@ import Common.Annex
|
|||
import Command
|
||||
import Annex.Content
|
||||
import Logs.Location
|
||||
import Logs.Transfer
|
||||
import Annex.Transfer
|
||||
import qualified Remote
|
||||
import Types.Key
|
||||
|
||||
import GHC.IO.Handle
|
||||
import Utility.SimpleProtocol (ioHandles)
|
||||
|
||||
data TransferRequest = TransferRequest Direction Remote Key AssociatedFile
|
||||
|
||||
|
@ -29,34 +28,21 @@ seek :: CommandSeek
|
|||
seek = withNothing start
|
||||
|
||||
start :: CommandStart
|
||||
start = withHandles $ \(readh, writeh) -> do
|
||||
start = do
|
||||
(readh, writeh) <- liftIO ioHandles
|
||||
runRequests readh writeh runner
|
||||
stop
|
||||
where
|
||||
runner (TransferRequest direction remote key file)
|
||||
| direction == Upload =
|
||||
| direction == Upload = notifyTransfer direction file $
|
||||
upload (Remote.uuid remote) key file forwardRetry $ \p -> do
|
||||
ok <- Remote.storeKey remote key file p
|
||||
when ok $
|
||||
Remote.logStatus remote key InfoPresent
|
||||
return ok
|
||||
| otherwise = download (Remote.uuid remote) key file forwardRetry $ \p ->
|
||||
getViaTmp key $ \t -> Remote.retrieveKeyFile remote key file t p
|
||||
|
||||
{- stdin and stdout are connected with the caller, to be used for
|
||||
- communication with it. But doing a transfer might involve something
|
||||
- that tries to read from stdin, or write to stdout. To avoid that, close
|
||||
- stdin, and duplicate stderr to stdout. Return two new handles
|
||||
- that are duplicates of the original (stdin, stdout). -}
|
||||
withHandles :: ((Handle, Handle) -> Annex a) -> Annex a
|
||||
withHandles a = do
|
||||
readh <- liftIO $ hDuplicate stdin
|
||||
writeh <- liftIO $ hDuplicate stdout
|
||||
liftIO $ do
|
||||
nullh <- openFile devNull ReadMode
|
||||
nullh `hDuplicateTo` stdin
|
||||
stderr `hDuplicateTo` stdout
|
||||
a (readh, writeh)
|
||||
| otherwise = notifyTransfer direction file $
|
||||
download (Remote.uuid remote) key file forwardRetry $ \p ->
|
||||
getViaTmp key $ \t -> Remote.retrieveKeyFile remote key file t p
|
||||
|
||||
runRequests
|
||||
:: Handle
|
||||
|
|
|
@ -16,15 +16,47 @@ import qualified Annex
|
|||
import Annex.Content
|
||||
import Annex.Content.Direct
|
||||
import qualified Git.Command
|
||||
import qualified Git.LsFiles as LsFiles
|
||||
import qualified Git.Ref
|
||||
import qualified Git.DiffTree as DiffTree
|
||||
import Utility.CopyFile
|
||||
import Command.PreCommit (lockPreCommitHook)
|
||||
|
||||
def :: [Command]
|
||||
def = [command "unannex" paramPaths seek SectionUtility
|
||||
"undo accidential add command"]
|
||||
|
||||
seek :: CommandSeek
|
||||
seek = withFilesInGit $ whenAnnexed start
|
||||
seek = wrapUnannex . (withFilesInGit $ whenAnnexed start)
|
||||
|
||||
wrapUnannex :: Annex a -> Annex a
|
||||
wrapUnannex a = ifM isDirect
|
||||
( a
|
||||
{- Run with the pre-commit hook disabled, to avoid confusing
|
||||
- behavior if an unannexed file is added back to git as
|
||||
- a normal, non-annexed file and then committed.
|
||||
- Otherwise, the pre-commit hook would think that the file
|
||||
- has been unlocked and needs to be re-annexed.
|
||||
-
|
||||
- At the end, make a commit removing the unannexed files.
|
||||
-}
|
||||
, ifM cleanindex
|
||||
( lockPreCommitHook $ commit `after` a
|
||||
, error "Cannot proceed with uncommitted changes staged in the index. Recommend you: git commit"
|
||||
)
|
||||
)
|
||||
where
|
||||
commit = inRepo $ Git.Command.run
|
||||
[ Param "commit"
|
||||
, Param "-q"
|
||||
, Param "--allow-empty"
|
||||
, Param "--no-verify"
|
||||
, Param "-m", Param "content removed from git annex"
|
||||
]
|
||||
cleanindex = do
|
||||
(diff, cleanup) <- inRepo $ DiffTree.diffIndex Git.Ref.headRef
|
||||
if null diff
|
||||
then void (liftIO cleanup) >> return True
|
||||
else void (liftIO cleanup) >> return False
|
||||
|
||||
start :: FilePath -> (Key, Backend) -> CommandStart
|
||||
start file (key, _) = stopUnless (inAnnex key) $ do
|
||||
|
@ -36,26 +68,7 @@ start file (key, _) = stopUnless (inAnnex key) $ do
|
|||
performIndirect :: FilePath -> Key -> CommandPerform
|
||||
performIndirect file key = do
|
||||
liftIO $ removeFile file
|
||||
|
||||
-- git rm deletes empty directory without --cached
|
||||
inRepo $ Git.Command.run [Params "rm --cached --force --quiet --", File file]
|
||||
|
||||
-- If the file was already committed, it is now staged for removal.
|
||||
-- Commit that removal now, to avoid later confusing the
|
||||
-- pre-commit hook, if this file is later added back to
|
||||
-- git as a normal non-annexed file, to thinking that the
|
||||
-- file has been unlocked and needs to be re-annexed.
|
||||
(s, reap) <- inRepo $ LsFiles.staged [file]
|
||||
unless (null s) $
|
||||
inRepo $ Git.Command.run
|
||||
[ Param "commit"
|
||||
, Param "-q"
|
||||
, Param "--no-verify"
|
||||
, Param "-m", Param "content removed from git annex"
|
||||
, Param "--", File file
|
||||
]
|
||||
void $ liftIO reap
|
||||
|
||||
next $ cleanupIndirect file key
|
||||
|
||||
cleanupIndirect :: FilePath -> Key -> CommandCleanup
|
||||
|
|
|
@ -36,7 +36,7 @@ check = do
|
|||
|
||||
seek :: CommandSeek
|
||||
seek ps = do
|
||||
withFilesNotInGit (whenAnnexed startCheckIncomplete) ps
|
||||
withFilesNotInGit False (whenAnnexed startCheckIncomplete) ps
|
||||
withFilesInGit (whenAnnexed Command.Unannex.start) ps
|
||||
finish
|
||||
|
||||
|
|
|
@ -61,6 +61,7 @@ data Cfg = Cfg
|
|||
{ cfgTrustMap :: TrustMap
|
||||
, cfgGroupMap :: M.Map UUID (S.Set Group)
|
||||
, cfgPreferredContentMap :: M.Map UUID PreferredContentExpression
|
||||
, cfgRequiredContentMap :: M.Map UUID PreferredContentExpression
|
||||
, cfgGroupPreferredContentMap :: M.Map Group PreferredContentExpression
|
||||
, cfgScheduleMap :: M.Map UUID [ScheduledActivity]
|
||||
}
|
||||
|
@ -70,6 +71,7 @@ getCfg = Cfg
|
|||
<$> trustMapRaw -- without local trust overrides
|
||||
<*> (groupsByUUID <$> groupMap)
|
||||
<*> preferredContentMapRaw
|
||||
<*> requiredContentMapRaw
|
||||
<*> groupPreferredContentMapRaw
|
||||
<*> scheduleMap
|
||||
|
||||
|
@ -79,6 +81,7 @@ setCfg curcfg newcfg = do
|
|||
mapM_ (uncurry trustSet) $ M.toList $ cfgTrustMap diff
|
||||
mapM_ (uncurry groupSet) $ M.toList $ cfgGroupMap diff
|
||||
mapM_ (uncurry preferredContentSet) $ M.toList $ cfgPreferredContentMap diff
|
||||
mapM_ (uncurry requiredContentSet) $ M.toList $ cfgRequiredContentMap diff
|
||||
mapM_ (uncurry groupPreferredContentSet) $ M.toList $ cfgGroupPreferredContentMap diff
|
||||
mapM_ (uncurry scheduleSet) $ M.toList $ cfgScheduleMap diff
|
||||
|
||||
|
@ -87,6 +90,7 @@ diffCfg curcfg newcfg = Cfg
|
|||
{ cfgTrustMap = diff cfgTrustMap
|
||||
, cfgGroupMap = diff cfgGroupMap
|
||||
, cfgPreferredContentMap = diff cfgPreferredContentMap
|
||||
, cfgRequiredContentMap = diff cfgRequiredContentMap
|
||||
, cfgGroupPreferredContentMap = diff cfgGroupPreferredContentMap
|
||||
, cfgScheduleMap = diff cfgScheduleMap
|
||||
}
|
||||
|
@ -102,6 +106,7 @@ genCfg cfg descs = unlines $ intercalate [""]
|
|||
, preferredcontent
|
||||
, grouppreferredcontent
|
||||
, standardgroups
|
||||
, requiredcontent
|
||||
, schedule
|
||||
]
|
||||
where
|
||||
|
@ -137,6 +142,11 @@ genCfg cfg descs = unlines $ intercalate [""]
|
|||
[ com "Repository preferred contents" ]
|
||||
(\(s, u) -> line "wanted" u s)
|
||||
(\u -> line "wanted" u "standard")
|
||||
|
||||
requiredcontent = settings cfg descs cfgRequiredContentMap
|
||||
[ com "Repository required contents" ]
|
||||
(\(s, u) -> line "required" u s)
|
||||
(\u -> line "required" u "")
|
||||
|
||||
grouppreferredcontent = settings' cfg allgroups cfgGroupPreferredContentMap
|
||||
[ com "Group preferred contents"
|
||||
|
@ -228,6 +238,12 @@ parseCfg curcfg = go [] curcfg . lines
|
|||
Nothing ->
|
||||
let m = M.insert u value (cfgPreferredContentMap cfg)
|
||||
in Right $ cfg { cfgPreferredContentMap = m }
|
||||
| setting == "required" =
|
||||
case checkPreferredContentExpression value of
|
||||
Just e -> Left e
|
||||
Nothing ->
|
||||
let m = M.insert u value (cfgRequiredContentMap cfg)
|
||||
in Right $ cfg { cfgRequiredContentMap = m }
|
||||
| setting == "groupwanted" =
|
||||
case checkPreferredContentExpression value of
|
||||
Just e -> Left e
|
||||
|
@ -255,7 +271,6 @@ parseCfg curcfg = go [] curcfg . lines
|
|||
[ com "** There was a problem parsing your input!"
|
||||
, com "** Search for \"Parse error\" to find the bad lines."
|
||||
, com "** Either fix the bad lines, or delete them (to discard your changes)."
|
||||
, ""
|
||||
]
|
||||
parseerr = com "** Parse error in next line: "
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@ import Control.Exception.Extensible as X (IOException)
|
|||
import Data.Maybe as X
|
||||
import Data.List as X hiding (head, tail, init, last)
|
||||
import Data.String.Utils as X hiding (join)
|
||||
import Data.Monoid as X
|
||||
|
||||
import System.FilePath as X
|
||||
import System.Directory as X
|
||||
|
|
|
@ -32,7 +32,10 @@ getConfigMaybe (ConfigKey key) = fromRepo $ Git.Config.getMaybe key
|
|||
setConfig :: ConfigKey -> String -> Annex ()
|
||||
setConfig (ConfigKey key) value = do
|
||||
inRepo $ Git.Command.run [Param "config", Param key, Param value]
|
||||
Annex.changeGitRepo =<< inRepo Git.Config.reRead
|
||||
reloadConfig
|
||||
|
||||
reloadConfig :: Annex ()
|
||||
reloadConfig = Annex.changeGitRepo =<< inRepo Git.Config.reRead
|
||||
|
||||
{- Unsets a git config setting. (Leaves it in state currently.) -}
|
||||
unsetConfig :: ConfigKey -> Annex ()
|
||||
|
|
|
@ -27,7 +27,7 @@ data RepoLocation
|
|||
| LocalUnknown FilePath
|
||||
| Url URI
|
||||
| Unknown
|
||||
deriving (Show, Eq)
|
||||
deriving (Show, Eq, Ord)
|
||||
|
||||
data Repo = Repo
|
||||
{ location :: RepoLocation
|
||||
|
@ -41,7 +41,7 @@ data Repo = Repo
|
|||
, gitEnv :: Maybe [(String, String)]
|
||||
-- global options to pass to git when running git commands
|
||||
, gitGlobalOpts :: [CommandParam]
|
||||
} deriving (Show, Eq)
|
||||
} deriving (Show, Eq, Ord)
|
||||
|
||||
type RemoteName = String
|
||||
|
||||
|
|
46
Limit.hs
46
Limit.hs
|
@ -20,7 +20,6 @@ import Types.TrustLevel
|
|||
import Types.Key
|
||||
import Types.Group
|
||||
import Types.FileMatcher
|
||||
import Types.Limit
|
||||
import Types.MetaData
|
||||
import Logs.MetaData
|
||||
import Logs.Group
|
||||
|
@ -45,21 +44,20 @@ getMatcher :: Annex (MatchInfo -> Annex Bool)
|
|||
getMatcher = Utility.Matcher.matchM <$> getMatcher'
|
||||
|
||||
getMatcher' :: Annex (Utility.Matcher.Matcher (MatchInfo -> Annex Bool))
|
||||
getMatcher' = do
|
||||
m <- Annex.getState Annex.limit
|
||||
case m of
|
||||
Right r -> return r
|
||||
Left l -> do
|
||||
let matcher = Utility.Matcher.generate (reverse l)
|
||||
Annex.changeState $ \s ->
|
||||
s { Annex.limit = Right matcher }
|
||||
return matcher
|
||||
getMatcher' = go =<< Annex.getState Annex.limit
|
||||
where
|
||||
go (CompleteMatcher matcher) = return matcher
|
||||
go (BuildingMatcher l) = do
|
||||
let matcher = Utility.Matcher.generate (reverse l)
|
||||
Annex.changeState $ \s ->
|
||||
s { Annex.limit = CompleteMatcher matcher }
|
||||
return matcher
|
||||
|
||||
{- Adds something to the limit list, which is built up reversed. -}
|
||||
add :: Utility.Matcher.Token (MatchInfo -> Annex Bool) -> Annex ()
|
||||
add l = Annex.changeState $ \s -> s { Annex.limit = prepend $ Annex.limit s }
|
||||
where
|
||||
prepend (Left ls) = Left $ l:ls
|
||||
prepend (BuildingMatcher ls) = BuildingMatcher $ l:ls
|
||||
prepend _ = error "internal"
|
||||
|
||||
{- Adds a new token. -}
|
||||
|
@ -67,21 +65,21 @@ addToken :: String -> Annex ()
|
|||
addToken = add . Utility.Matcher.token
|
||||
|
||||
{- Adds a new limit. -}
|
||||
addLimit :: Either String MatchFiles -> Annex ()
|
||||
addLimit :: Either String (MatchFiles Annex) -> Annex ()
|
||||
addLimit = either error (\l -> add $ Utility.Matcher.Operation $ l S.empty)
|
||||
|
||||
{- Add a limit to skip files that do not match the glob. -}
|
||||
addInclude :: String -> Annex ()
|
||||
addInclude = addLimit . limitInclude
|
||||
|
||||
limitInclude :: MkLimit
|
||||
limitInclude :: MkLimit Annex
|
||||
limitInclude glob = Right $ const $ return . matchGlobFile glob
|
||||
|
||||
{- Add a limit to skip files that match the glob. -}
|
||||
addExclude :: String -> Annex ()
|
||||
addExclude = addLimit . limitExclude
|
||||
|
||||
limitExclude :: MkLimit
|
||||
limitExclude :: MkLimit Annex
|
||||
limitExclude glob = Right $ const $ return . not . matchGlobFile glob
|
||||
|
||||
matchGlobFile :: String -> (MatchInfo -> Bool)
|
||||
|
@ -119,10 +117,10 @@ addIn s = addLimit =<< mk
|
|||
else inAnnex key
|
||||
|
||||
{- Limit to content that is currently present on a uuid. -}
|
||||
limitPresent :: Maybe UUID -> MkLimit
|
||||
limitPresent :: Maybe UUID -> MkLimit Annex
|
||||
limitPresent u _ = Right $ matchPresent u
|
||||
|
||||
matchPresent :: Maybe UUID -> MatchFiles
|
||||
matchPresent :: Maybe UUID -> MatchFiles Annex
|
||||
matchPresent u _ = checkKey $ \key -> do
|
||||
hereu <- getUUID
|
||||
if u == Just hereu || isNothing u
|
||||
|
@ -132,7 +130,7 @@ matchPresent u _ = checkKey $ \key -> do
|
|||
return $ maybe False (`elem` us) u
|
||||
|
||||
{- Limit to content that is in a directory, anywhere in the repository tree -}
|
||||
limitInDir :: FilePath -> MkLimit
|
||||
limitInDir :: FilePath -> MkLimit Annex
|
||||
limitInDir dir = const $ Right $ const go
|
||||
where
|
||||
go (MatchingFile fi) = return $ elem dir $ splitPath $ takeDirectory $ matchFile fi
|
||||
|
@ -143,7 +141,7 @@ limitInDir dir = const $ Right $ const go
|
|||
addCopies :: String -> Annex ()
|
||||
addCopies = addLimit . limitCopies
|
||||
|
||||
limitCopies :: MkLimit
|
||||
limitCopies :: MkLimit Annex
|
||||
limitCopies want = case split ":" want of
|
||||
[v, n] -> case parsetrustspec v of
|
||||
Just checker -> go n $ checktrust checker
|
||||
|
@ -169,7 +167,7 @@ limitCopies want = case split ":" want of
|
|||
addLackingCopies :: Bool -> String -> Annex ()
|
||||
addLackingCopies approx = addLimit . limitLackingCopies approx
|
||||
|
||||
limitLackingCopies :: Bool -> MkLimit
|
||||
limitLackingCopies :: Bool -> MkLimit Annex
|
||||
limitLackingCopies approx want = case readish want of
|
||||
Just needed -> Right $ \notpresent mi -> flip checkKey mi $
|
||||
handle mi needed notpresent
|
||||
|
@ -191,7 +189,7 @@ limitLackingCopies approx want = case readish want of
|
|||
- This has a nice optimisation: When a file exists,
|
||||
- its key is obviously not unused.
|
||||
-}
|
||||
limitUnused :: MatchFiles
|
||||
limitUnused :: MatchFiles Annex
|
||||
limitUnused _ (MatchingFile _) = return False
|
||||
limitUnused _ (MatchingKey k) = S.member k <$> unusedKeys
|
||||
|
||||
|
@ -202,7 +200,7 @@ addInAllGroup groupname = do
|
|||
m <- groupMap
|
||||
addLimit $ limitInAllGroup m groupname
|
||||
|
||||
limitInAllGroup :: GroupMap -> MkLimit
|
||||
limitInAllGroup :: GroupMap -> MkLimit Annex
|
||||
limitInAllGroup m groupname
|
||||
| S.null want = Right $ const $ const $ return True
|
||||
| otherwise = Right $ \notpresent -> checkKey $ check notpresent
|
||||
|
@ -219,7 +217,7 @@ limitInAllGroup m groupname
|
|||
addInBackend :: String -> Annex ()
|
||||
addInBackend = addLimit . limitInBackend
|
||||
|
||||
limitInBackend :: MkLimit
|
||||
limitInBackend :: MkLimit Annex
|
||||
limitInBackend name = Right $ const $ checkKey check
|
||||
where
|
||||
check key = pure $ keyBackendName key == name
|
||||
|
@ -231,7 +229,7 @@ addLargerThan = addLimit . limitSize (>)
|
|||
addSmallerThan :: String -> Annex ()
|
||||
addSmallerThan = addLimit . limitSize (<)
|
||||
|
||||
limitSize :: (Maybe Integer -> Maybe Integer -> Bool) -> MkLimit
|
||||
limitSize :: (Maybe Integer -> Maybe Integer -> Bool) -> MkLimit Annex
|
||||
limitSize vs s = case readSize dataUnits s of
|
||||
Nothing -> Left "bad size"
|
||||
Just sz -> Right $ go sz
|
||||
|
@ -249,7 +247,7 @@ limitSize vs s = case readSize dataUnits s of
|
|||
addMetaData :: String -> Annex ()
|
||||
addMetaData = addLimit . limitMetaData
|
||||
|
||||
limitMetaData :: MkLimit
|
||||
limitMetaData :: MkLimit Annex
|
||||
limitMetaData s = case parseMetaData s of
|
||||
Left e -> Left e
|
||||
Right (f, v) ->
|
||||
|
|
|
@ -41,6 +41,7 @@ module Locations (
|
|||
gitAnnexMergeDir,
|
||||
gitAnnexJournalDir,
|
||||
gitAnnexJournalLock,
|
||||
gitAnnexPreCommitLock,
|
||||
gitAnnexIndex,
|
||||
gitAnnexIndexStatus,
|
||||
gitAnnexViewIndex,
|
||||
|
@ -257,6 +258,10 @@ gitAnnexJournalDir r = addTrailingPathSeparator $ gitAnnexDir r </> "journal"
|
|||
gitAnnexJournalLock :: Git.Repo -> FilePath
|
||||
gitAnnexJournalLock r = gitAnnexDir r </> "journal.lck"
|
||||
|
||||
{- Lock file for the pre-commit hook. -}
|
||||
gitAnnexPreCommitLock :: Git.Repo -> FilePath
|
||||
gitAnnexPreCommitLock r = gitAnnexDir r </> "precommit.lck"
|
||||
|
||||
{- .git/annex/index is used to stage changes to the git-annex branch -}
|
||||
gitAnnexIndex :: Git.Repo -> FilePath
|
||||
gitAnnexIndex r = gitAnnexDir r </> "index"
|
||||
|
|
4
Logs.hs
4
Logs.hs
|
@ -35,6 +35,7 @@ topLevelUUIDBasedLogs =
|
|||
, trustLog
|
||||
, groupLog
|
||||
, preferredContentLog
|
||||
, requiredContentLog
|
||||
, scheduleLog
|
||||
]
|
||||
|
||||
|
@ -70,6 +71,9 @@ groupLog = "group.log"
|
|||
preferredContentLog :: FilePath
|
||||
preferredContentLog = "preferred-content.log"
|
||||
|
||||
requiredContentLog :: FilePath
|
||||
requiredContentLog = "required-content.log"
|
||||
|
||||
groupPreferredContentLog :: FilePath
|
||||
groupPreferredContentLog = "group-preferred-content.log"
|
||||
|
||||
|
|
|
@ -6,16 +6,19 @@
|
|||
-}
|
||||
|
||||
module Logs.PreferredContent (
|
||||
preferredContentLog,
|
||||
preferredContentSet,
|
||||
requiredContentSet,
|
||||
groupPreferredContentSet,
|
||||
isPreferredContent,
|
||||
isRequiredContent,
|
||||
preferredContentMap,
|
||||
preferredContentMapLoad,
|
||||
preferredContentMapRaw,
|
||||
requiredContentMap,
|
||||
requiredContentMapRaw,
|
||||
groupPreferredContentMapRaw,
|
||||
checkPreferredContentExpression,
|
||||
setStandardGroup,
|
||||
preferredRequiredMapsLoad,
|
||||
) where
|
||||
|
||||
import qualified Data.Map as M
|
||||
|
@ -28,43 +31,57 @@ import qualified Annex.Branch
|
|||
import qualified Annex
|
||||
import Logs
|
||||
import Logs.UUIDBased
|
||||
import qualified Utility.Matcher
|
||||
import Utility.Matcher hiding (tokens)
|
||||
import Annex.FileMatcher
|
||||
import Annex.UUID
|
||||
import Types.Limit
|
||||
import Types.Group
|
||||
import Types.Remote (RemoteConfig)
|
||||
import Logs.Group
|
||||
import Logs.Remote
|
||||
import Types.FileMatcher
|
||||
import Types.StandardGroups
|
||||
import Limit
|
||||
|
||||
{- Checks if a file is preferred content for the specified repository
|
||||
- (or the current repository if none is specified). -}
|
||||
isPreferredContent :: Maybe UUID -> AssumeNotPresent -> Maybe Key -> AssociatedFile -> Bool -> Annex Bool
|
||||
isPreferredContent mu notpresent mkey afile def = do
|
||||
isPreferredContent = checkMap preferredContentMap
|
||||
|
||||
isRequiredContent :: Maybe UUID -> AssumeNotPresent -> Maybe Key -> AssociatedFile -> Bool -> Annex Bool
|
||||
isRequiredContent = checkMap requiredContentMap
|
||||
|
||||
checkMap :: Annex (FileMatcherMap Annex) -> Maybe UUID -> AssumeNotPresent -> Maybe Key -> AssociatedFile -> Bool -> Annex Bool
|
||||
checkMap getmap mu notpresent mkey afile def = do
|
||||
u <- maybe getUUID return mu
|
||||
m <- preferredContentMap
|
||||
m <- getmap
|
||||
case M.lookup u m of
|
||||
Nothing -> return def
|
||||
Just matcher -> checkMatcher matcher mkey afile notpresent def
|
||||
|
||||
{- The map is cached for speed. -}
|
||||
preferredContentMap :: Annex Annex.PreferredContentMap
|
||||
preferredContentMap = maybe preferredContentMapLoad return
|
||||
preferredContentMap :: Annex (FileMatcherMap Annex)
|
||||
preferredContentMap = maybe (fst <$> preferredRequiredMapsLoad) return
|
||||
=<< Annex.getState Annex.preferredcontentmap
|
||||
|
||||
{- Loads the map, updating the cache. -}
|
||||
preferredContentMapLoad :: Annex Annex.PreferredContentMap
|
||||
preferredContentMapLoad = do
|
||||
requiredContentMap :: Annex (FileMatcherMap Annex)
|
||||
requiredContentMap = maybe (snd <$> preferredRequiredMapsLoad) return
|
||||
=<< Annex.getState Annex.requiredcontentmap
|
||||
|
||||
preferredRequiredMapsLoad :: Annex (FileMatcherMap Annex, FileMatcherMap Annex)
|
||||
preferredRequiredMapsLoad = do
|
||||
groupmap <- groupMap
|
||||
configmap <- readRemoteLog
|
||||
groupwantedmap <- groupPreferredContentMapRaw
|
||||
m <- simpleMap
|
||||
. parseLogWithUUID ((Just .) . makeMatcher groupmap configmap groupwantedmap)
|
||||
<$> Annex.Branch.get preferredContentLog
|
||||
Annex.changeState $ \s -> s { Annex.preferredcontentmap = Just m }
|
||||
return m
|
||||
let genmap l gm = simpleMap
|
||||
. parseLogWithUUID ((Just .) . makeMatcher groupmap configmap gm)
|
||||
<$> Annex.Branch.get l
|
||||
pc <- genmap preferredContentLog =<< groupPreferredContentMapRaw
|
||||
rc <- genmap requiredContentLog M.empty
|
||||
-- Required content is implicitly also preferred content, so OR
|
||||
let m = M.unionWith MOr pc rc
|
||||
Annex.changeState $ \s -> s
|
||||
{ Annex.preferredcontentmap = Just m
|
||||
, Annex.requiredcontentmap = Just rc
|
||||
}
|
||||
return (m, rc)
|
||||
|
||||
{- This intentionally never fails, even on unparsable expressions,
|
||||
- because the configuration is shared among repositories and newer
|
||||
|
@ -75,11 +92,11 @@ makeMatcher
|
|||
-> M.Map Group PreferredContentExpression
|
||||
-> UUID
|
||||
-> PreferredContentExpression
|
||||
-> FileMatcher
|
||||
-> FileMatcher Annex
|
||||
makeMatcher groupmap configmap groupwantedmap u = go True True
|
||||
where
|
||||
go expandstandard expandgroupwanted expr
|
||||
| null (lefts tokens) = Utility.Matcher.generate $ rights tokens
|
||||
| null (lefts tokens) = generate $ rights tokens
|
||||
| otherwise = unknownMatcher u
|
||||
where
|
||||
tokens = exprParser matchstandard matchgroupwanted groupmap configmap (Just u) expr
|
||||
|
@ -102,10 +119,10 @@ makeMatcher groupmap configmap groupwantedmap u = go True True
|
|||
-
|
||||
- This avoid unwanted/expensive changes to the content, until the problem
|
||||
- is resolved. -}
|
||||
unknownMatcher :: UUID -> FileMatcher
|
||||
unknownMatcher u = Utility.Matcher.generate [present]
|
||||
unknownMatcher :: UUID -> FileMatcher Annex
|
||||
unknownMatcher u = generate [present]
|
||||
where
|
||||
present = Utility.Matcher.Operation $ matchPresent (Just u)
|
||||
present = Operation $ matchPresent (Just u)
|
||||
|
||||
{- Checks if an expression can be parsed, if not returns Just error -}
|
||||
checkPreferredContentExpression :: PreferredContentExpression -> Maybe String
|
||||
|
|
|
@ -21,14 +21,23 @@ import Types.Group
|
|||
|
||||
{- Changes the preferred content configuration of a remote. -}
|
||||
preferredContentSet :: UUID -> PreferredContentExpression -> Annex ()
|
||||
preferredContentSet uuid@(UUID _) val = do
|
||||
preferredContentSet = setLog preferredContentLog
|
||||
|
||||
requiredContentSet :: UUID -> PreferredContentExpression -> Annex ()
|
||||
requiredContentSet = setLog requiredContentLog
|
||||
|
||||
setLog :: FilePath -> UUID -> PreferredContentExpression -> Annex ()
|
||||
setLog logfile uuid@(UUID _) val = do
|
||||
ts <- liftIO getPOSIXTime
|
||||
Annex.Branch.change preferredContentLog $
|
||||
Annex.Branch.change logfile $
|
||||
showLog id
|
||||
. changeLog ts uuid val
|
||||
. parseLog Just
|
||||
Annex.changeState $ \s -> s { Annex.preferredcontentmap = Nothing }
|
||||
preferredContentSet NoUUID _ = error "unknown UUID; cannot modify"
|
||||
Annex.changeState $ \s -> s
|
||||
{ Annex.preferredcontentmap = Nothing
|
||||
, Annex.requiredcontentmap = Nothing
|
||||
}
|
||||
setLog _ NoUUID _ = error "unknown UUID; cannot modify"
|
||||
|
||||
{- Changes the preferred content configuration of a group. -}
|
||||
groupPreferredContentSet :: Group -> PreferredContentExpression -> Annex ()
|
||||
|
@ -44,6 +53,10 @@ preferredContentMapRaw :: Annex (M.Map UUID PreferredContentExpression)
|
|||
preferredContentMapRaw = simpleMap . parseLog Just
|
||||
<$> Annex.Branch.get preferredContentLog
|
||||
|
||||
requiredContentMapRaw :: Annex (M.Map UUID PreferredContentExpression)
|
||||
requiredContentMapRaw = simpleMap . parseLog Just
|
||||
<$> Annex.Branch.get requiredContentLog
|
||||
|
||||
groupPreferredContentMapRaw :: Annex (M.Map Group PreferredContentExpression)
|
||||
groupPreferredContentMapRaw = simpleMap . parseMapLog Just Just
|
||||
<$> Annex.Branch.get groupPreferredContentLog
|
||||
|
|
102
Logs/Transfer.hs
102
Logs/Transfer.hs
|
@ -88,108 +88,6 @@ percentComplete :: Transfer -> TransferInfo -> Maybe Percentage
|
|||
percentComplete (Transfer { transferKey = key }) info =
|
||||
percentage <$> keySize key <*> Just (fromMaybe 0 $ bytesComplete info)
|
||||
|
||||
type RetryDecider = TransferInfo -> TransferInfo -> Bool
|
||||
|
||||
noRetry :: RetryDecider
|
||||
noRetry _ _ = False
|
||||
|
||||
{- Retries a transfer when it fails, as long as the failed transfer managed
|
||||
- to send some data. -}
|
||||
forwardRetry :: RetryDecider
|
||||
forwardRetry old new = bytesComplete old < bytesComplete new
|
||||
|
||||
upload :: UUID -> Key -> AssociatedFile -> RetryDecider -> (MeterUpdate -> Annex Bool) -> Annex Bool
|
||||
upload u key = runTransfer (Transfer Upload u key)
|
||||
|
||||
download :: UUID -> Key -> AssociatedFile -> RetryDecider -> (MeterUpdate -> Annex Bool) -> Annex Bool
|
||||
download u key = runTransfer (Transfer Download u key)
|
||||
|
||||
{- Runs a transfer action. Creates and locks the lock file while the
|
||||
- action is running, and stores info in the transfer information
|
||||
- file.
|
||||
-
|
||||
- If the transfer action returns False, the transfer info is
|
||||
- left in the failedTransferDir.
|
||||
-
|
||||
- If the transfer is already in progress, returns False.
|
||||
-
|
||||
- An upload can be run from a read-only filesystem, and in this case
|
||||
- no transfer information or lock file is used.
|
||||
-}
|
||||
runTransfer :: Transfer -> Maybe FilePath -> RetryDecider -> (MeterUpdate -> Annex Bool) -> Annex Bool
|
||||
runTransfer t file shouldretry a = do
|
||||
info <- liftIO $ startTransferInfo file
|
||||
(meter, tfile, metervar) <- mkProgressUpdater t info
|
||||
mode <- annexFileMode
|
||||
(fd, inprogress) <- liftIO $ prep tfile mode info
|
||||
if inprogress
|
||||
then do
|
||||
showNote "transfer already in progress"
|
||||
return False
|
||||
else do
|
||||
ok <- retry info metervar $
|
||||
bracketIO (return fd) (cleanup tfile) (const $ a meter)
|
||||
unless ok $ recordFailedTransfer t info
|
||||
return ok
|
||||
where
|
||||
#ifndef mingw32_HOST_OS
|
||||
prep tfile mode info = do
|
||||
mfd <- catchMaybeIO $
|
||||
openFd (transferLockFile tfile) ReadWrite (Just mode)
|
||||
defaultFileFlags { trunc = True }
|
||||
case mfd of
|
||||
Nothing -> return (Nothing, False)
|
||||
Just fd -> do
|
||||
locked <- catchMaybeIO $
|
||||
setLock fd (WriteLock, AbsoluteSeek, 0, 0)
|
||||
if isNothing locked
|
||||
then return (Nothing, True)
|
||||
else do
|
||||
void $ tryIO $ writeTransferInfoFile info tfile
|
||||
return (mfd, False)
|
||||
#else
|
||||
prep tfile _mode info = do
|
||||
v <- catchMaybeIO $ lockExclusive (transferLockFile tfile)
|
||||
case v of
|
||||
Nothing -> return (Nothing, False)
|
||||
Just Nothing -> return (Nothing, True)
|
||||
Just (Just lockhandle) -> do
|
||||
void $ tryIO $ writeTransferInfoFile info tfile
|
||||
return (Just lockhandle, False)
|
||||
#endif
|
||||
cleanup _ Nothing = noop
|
||||
cleanup tfile (Just lockhandle) = do
|
||||
void $ tryIO $ removeFile tfile
|
||||
#ifndef mingw32_HOST_OS
|
||||
void $ tryIO $ removeFile $ transferLockFile tfile
|
||||
closeFd lockhandle
|
||||
#else
|
||||
{- Windows cannot delete the lockfile until the lock
|
||||
- is closed. So it's possible to race with another
|
||||
- process that takes the lock before it's removed,
|
||||
- so ignore failure to remove.
|
||||
-}
|
||||
dropLock lockhandle
|
||||
void $ tryIO $ removeFile $ transferLockFile tfile
|
||||
#endif
|
||||
retry oldinfo metervar run = do
|
||||
v <- tryAnnex run
|
||||
case v of
|
||||
Right b -> return b
|
||||
Left _ -> do
|
||||
b <- getbytescomplete metervar
|
||||
let newinfo = oldinfo { bytesComplete = Just b }
|
||||
if shouldretry oldinfo newinfo
|
||||
then retry newinfo metervar run
|
||||
else return False
|
||||
getbytescomplete metervar
|
||||
| transferDirection t == Upload =
|
||||
liftIO $ readMVar metervar
|
||||
| otherwise = do
|
||||
f <- fromRepo $ gitAnnexTmpObjectLocation (transferKey t)
|
||||
liftIO $ catchDefaultIO 0 $
|
||||
fromIntegral . fileSize <$> getFileStatus f
|
||||
|
||||
{- Generates a callback that can be called as transfer progresses to update
|
||||
- the transfer info file. Also returns the file it'll be updating, and a
|
||||
- MVar that can be used to read the number of bytesComplete. -}
|
||||
|
|
2
Makefile
2
Makefile
|
@ -140,7 +140,7 @@ OSXAPP_BASE=$(OSXAPP_DEST)/Contents/MacOS/bundle
|
|||
osxapp: Build/Standalone Build/OSXMkLibs
|
||||
$(MAKE) git-annex
|
||||
|
||||
rm -rf "$(OSXAPP_DEST)"
|
||||
rm -rf "$(OSXAPP_DEST)" "$(OSXAPP_BASE)"
|
||||
install -d tmp/build-dmg
|
||||
cp -R standalone/osx/git-annex.app "$(OSXAPP_DEST)"
|
||||
|
||||
|
|
167
Remote/External/Types.hs
vendored
167
Remote/External/Types.hs
vendored
|
@ -6,6 +6,7 @@
|
|||
-}
|
||||
|
||||
{-# LANGUAGE FlexibleInstances, TypeSynonymInstances #-}
|
||||
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||
|
||||
module Remote.External.Types (
|
||||
External(..),
|
||||
|
@ -15,9 +16,9 @@ module Remote.External.Types (
|
|||
withExternalLock,
|
||||
ExternalState(..),
|
||||
PrepareStatus(..),
|
||||
parseMessage,
|
||||
Sendable(..),
|
||||
Receivable(..),
|
||||
Proto.parseMessage,
|
||||
Proto.Sendable(..),
|
||||
Proto.Receivable(..),
|
||||
Request(..),
|
||||
needsPREPARE,
|
||||
Response(..),
|
||||
|
@ -39,12 +40,11 @@ import Logs.Transfer (Direction(..))
|
|||
import Config.Cost (Cost)
|
||||
import Types.Remote (RemoteConfig)
|
||||
import Types.Availability (Availability(..))
|
||||
import qualified Utility.SimpleProtocol as Proto
|
||||
|
||||
import Data.Char
|
||||
import Control.Concurrent.STM
|
||||
|
||||
-- If the remote is not yet running, the ExternalState TMVar is empty.
|
||||
-- The
|
||||
data External = External
|
||||
{ externalType :: ExternalType
|
||||
, externalUUID :: UUID
|
||||
|
@ -85,22 +85,6 @@ withExternalLock external = bracketIO setup cleanup
|
|||
cleanup = atomically . putTMVar v
|
||||
v = externalLock external
|
||||
|
||||
-- Messages that git-annex can send.
|
||||
class Sendable m where
|
||||
formatMessage :: m -> [String]
|
||||
|
||||
-- Messages that git-annex can receive.
|
||||
class Receivable m where
|
||||
-- Passed the first word of the message, returns
|
||||
-- a Parser that can be be fed the rest of the message to generate
|
||||
-- the value.
|
||||
parseCommand :: String -> Parser m
|
||||
|
||||
parseMessage :: (Receivable m) => String -> Maybe m
|
||||
parseMessage s = parseCommand command rest
|
||||
where
|
||||
(command, rest) = splitWord s
|
||||
|
||||
-- Messages that can be sent to the external remote to request it do something.
|
||||
data Request
|
||||
= PREPARE
|
||||
|
@ -118,15 +102,19 @@ needsPREPARE PREPARE = False
|
|||
needsPREPARE INITREMOTE = False
|
||||
needsPREPARE _ = True
|
||||
|
||||
instance Sendable Request where
|
||||
instance Proto.Sendable Request where
|
||||
formatMessage PREPARE = ["PREPARE"]
|
||||
formatMessage INITREMOTE = ["INITREMOTE"]
|
||||
formatMessage GETCOST = ["GETCOST"]
|
||||
formatMessage GETAVAILABILITY = ["GETAVAILABILITY"]
|
||||
formatMessage (TRANSFER direction key file) =
|
||||
[ "TRANSFER", serialize direction, serialize key, serialize file ]
|
||||
formatMessage (CHECKPRESENT key) = [ "CHECKPRESENT", serialize key ]
|
||||
formatMessage (REMOVE key) = [ "REMOVE", serialize key ]
|
||||
[ "TRANSFER"
|
||||
, Proto.serialize direction
|
||||
, Proto.serialize key
|
||||
, Proto.serialize file
|
||||
]
|
||||
formatMessage (CHECKPRESENT key) = [ "CHECKPRESENT", Proto.serialize key ]
|
||||
formatMessage (REMOVE key) = [ "REMOVE", Proto.serialize key ]
|
||||
|
||||
-- Responses the external remote can make to requests.
|
||||
data Response
|
||||
|
@ -146,22 +134,22 @@ data Response
|
|||
| UNSUPPORTED_REQUEST
|
||||
deriving (Show)
|
||||
|
||||
instance Receivable Response where
|
||||
parseCommand "PREPARE-SUCCESS" = parse0 PREPARE_SUCCESS
|
||||
parseCommand "PREPARE-FAILURE" = parse1 PREPARE_FAILURE
|
||||
parseCommand "TRANSFER-SUCCESS" = parse2 TRANSFER_SUCCESS
|
||||
parseCommand "TRANSFER-FAILURE" = parse3 TRANSFER_FAILURE
|
||||
parseCommand "CHECKPRESENT-SUCCESS" = parse1 CHECKPRESENT_SUCCESS
|
||||
parseCommand "CHECKPRESENT-FAILURE" = parse1 CHECKPRESENT_FAILURE
|
||||
parseCommand "CHECKPRESENT-UNKNOWN" = parse2 CHECKPRESENT_UNKNOWN
|
||||
parseCommand "REMOVE-SUCCESS" = parse1 REMOVE_SUCCESS
|
||||
parseCommand "REMOVE-FAILURE" = parse2 REMOVE_FAILURE
|
||||
parseCommand "COST" = parse1 COST
|
||||
parseCommand "AVAILABILITY" = parse1 AVAILABILITY
|
||||
parseCommand "INITREMOTE-SUCCESS" = parse0 INITREMOTE_SUCCESS
|
||||
parseCommand "INITREMOTE-FAILURE" = parse1 INITREMOTE_FAILURE
|
||||
parseCommand "UNSUPPORTED-REQUEST" = parse0 UNSUPPORTED_REQUEST
|
||||
parseCommand _ = parseFail
|
||||
instance Proto.Receivable Response where
|
||||
parseCommand "PREPARE-SUCCESS" = Proto.parse0 PREPARE_SUCCESS
|
||||
parseCommand "PREPARE-FAILURE" = Proto.parse1 PREPARE_FAILURE
|
||||
parseCommand "TRANSFER-SUCCESS" = Proto.parse2 TRANSFER_SUCCESS
|
||||
parseCommand "TRANSFER-FAILURE" = Proto.parse3 TRANSFER_FAILURE
|
||||
parseCommand "CHECKPRESENT-SUCCESS" = Proto.parse1 CHECKPRESENT_SUCCESS
|
||||
parseCommand "CHECKPRESENT-FAILURE" = Proto.parse1 CHECKPRESENT_FAILURE
|
||||
parseCommand "CHECKPRESENT-UNKNOWN" = Proto.parse2 CHECKPRESENT_UNKNOWN
|
||||
parseCommand "REMOVE-SUCCESS" = Proto.parse1 REMOVE_SUCCESS
|
||||
parseCommand "REMOVE-FAILURE" = Proto.parse2 REMOVE_FAILURE
|
||||
parseCommand "COST" = Proto.parse1 COST
|
||||
parseCommand "AVAILABILITY" = Proto.parse1 AVAILABILITY
|
||||
parseCommand "INITREMOTE-SUCCESS" = Proto.parse0 INITREMOTE_SUCCESS
|
||||
parseCommand "INITREMOTE-FAILURE" = Proto.parse1 INITREMOTE_FAILURE
|
||||
parseCommand "UNSUPPORTED-REQUEST" = Proto.parse0 UNSUPPORTED_REQUEST
|
||||
parseCommand _ = Proto.parseFail
|
||||
|
||||
-- Requests that the external remote can send at any time it's in control.
|
||||
data RemoteRequest
|
||||
|
@ -181,22 +169,22 @@ data RemoteRequest
|
|||
| DEBUG String
|
||||
deriving (Show)
|
||||
|
||||
instance Receivable RemoteRequest where
|
||||
parseCommand "VERSION" = parse1 VERSION
|
||||
parseCommand "PROGRESS" = parse1 PROGRESS
|
||||
parseCommand "DIRHASH" = parse1 DIRHASH
|
||||
parseCommand "SETCONFIG" = parse2 SETCONFIG
|
||||
parseCommand "GETCONFIG" = parse1 GETCONFIG
|
||||
parseCommand "SETCREDS" = parse3 SETCREDS
|
||||
parseCommand "GETCREDS" = parse1 GETCREDS
|
||||
parseCommand "GETUUID" = parse0 GETUUID
|
||||
parseCommand "GETGITDIR" = parse0 GETGITDIR
|
||||
parseCommand "SETWANTED" = parse1 SETWANTED
|
||||
parseCommand "GETWANTED" = parse0 GETWANTED
|
||||
parseCommand "SETSTATE" = parse2 SETSTATE
|
||||
parseCommand "GETSTATE" = parse1 GETSTATE
|
||||
parseCommand "DEBUG" = parse1 DEBUG
|
||||
parseCommand _ = parseFail
|
||||
instance Proto.Receivable RemoteRequest where
|
||||
parseCommand "VERSION" = Proto.parse1 VERSION
|
||||
parseCommand "PROGRESS" = Proto.parse1 PROGRESS
|
||||
parseCommand "DIRHASH" = Proto.parse1 DIRHASH
|
||||
parseCommand "SETCONFIG" = Proto.parse2 SETCONFIG
|
||||
parseCommand "GETCONFIG" = Proto.parse1 GETCONFIG
|
||||
parseCommand "SETCREDS" = Proto.parse3 SETCREDS
|
||||
parseCommand "GETCREDS" = Proto.parse1 GETCREDS
|
||||
parseCommand "GETUUID" = Proto.parse0 GETUUID
|
||||
parseCommand "GETGITDIR" = Proto.parse0 GETGITDIR
|
||||
parseCommand "SETWANTED" = Proto.parse1 SETWANTED
|
||||
parseCommand "GETWANTED" = Proto.parse0 GETWANTED
|
||||
parseCommand "SETSTATE" = Proto.parse2 SETSTATE
|
||||
parseCommand "GETSTATE" = Proto.parse1 GETSTATE
|
||||
parseCommand "DEBUG" = Proto.parse1 DEBUG
|
||||
parseCommand _ = Proto.parseFail
|
||||
|
||||
-- Responses to RemoteRequest.
|
||||
data RemoteResponse
|
||||
|
@ -204,21 +192,21 @@ data RemoteResponse
|
|||
| CREDS String String
|
||||
deriving (Show)
|
||||
|
||||
instance Sendable RemoteResponse where
|
||||
formatMessage (VALUE s) = [ "VALUE", serialize s ]
|
||||
formatMessage (CREDS login password) = [ "CREDS", serialize login, serialize password ]
|
||||
instance Proto.Sendable RemoteResponse where
|
||||
formatMessage (VALUE s) = [ "VALUE", Proto.serialize s ]
|
||||
formatMessage (CREDS login password) = [ "CREDS", Proto.serialize login, Proto.serialize password ]
|
||||
|
||||
-- Messages that can be sent at any time by either git-annex or the remote.
|
||||
data AsyncMessage
|
||||
= ERROR ErrorMsg
|
||||
deriving (Show)
|
||||
|
||||
instance Sendable AsyncMessage where
|
||||
formatMessage (ERROR err) = [ "ERROR", serialize err ]
|
||||
instance Proto.Sendable AsyncMessage where
|
||||
formatMessage (ERROR err) = [ "ERROR", Proto.serialize err ]
|
||||
|
||||
instance Receivable AsyncMessage where
|
||||
parseCommand "ERROR" = parse1 ERROR
|
||||
parseCommand _ = parseFail
|
||||
instance Proto.Receivable AsyncMessage where
|
||||
parseCommand "ERROR" = Proto.parse1 ERROR
|
||||
parseCommand _ = Proto.parseFail
|
||||
|
||||
-- Data types used for parameters when communicating with the remote.
|
||||
-- All are serializable.
|
||||
|
@ -229,11 +217,7 @@ type ProtocolVersion = Int
|
|||
supportedProtocolVersions :: [ProtocolVersion]
|
||||
supportedProtocolVersions = [1]
|
||||
|
||||
class ExternalSerializable a where
|
||||
serialize :: a -> String
|
||||
deserialize :: String -> Maybe a
|
||||
|
||||
instance ExternalSerializable Direction where
|
||||
instance Proto.Serializable Direction where
|
||||
serialize Upload = "STORE"
|
||||
serialize Download = "RETRIEVE"
|
||||
|
||||
|
@ -241,23 +225,23 @@ instance ExternalSerializable Direction where
|
|||
deserialize "RETRIEVE" = Just Download
|
||||
deserialize _ = Nothing
|
||||
|
||||
instance ExternalSerializable Key where
|
||||
instance Proto.Serializable Key where
|
||||
serialize = key2file
|
||||
deserialize = file2key
|
||||
|
||||
instance ExternalSerializable [Char] where
|
||||
instance Proto.Serializable [Char] where
|
||||
serialize = id
|
||||
deserialize = Just
|
||||
|
||||
instance ExternalSerializable ProtocolVersion where
|
||||
instance Proto.Serializable ProtocolVersion where
|
||||
serialize = show
|
||||
deserialize = readish
|
||||
|
||||
instance ExternalSerializable Cost where
|
||||
instance Proto.Serializable Cost where
|
||||
serialize = show
|
||||
deserialize = readish
|
||||
|
||||
instance ExternalSerializable Availability where
|
||||
instance Proto.Serializable Availability where
|
||||
serialize GloballyAvailable = "GLOBAL"
|
||||
serialize LocallyAvailable = "LOCAL"
|
||||
|
||||
|
@ -265,37 +249,6 @@ instance ExternalSerializable Availability where
|
|||
deserialize "LOCAL" = Just LocallyAvailable
|
||||
deserialize _ = Nothing
|
||||
|
||||
instance ExternalSerializable BytesProcessed where
|
||||
instance Proto.Serializable BytesProcessed where
|
||||
serialize (BytesProcessed n) = show n
|
||||
deserialize = BytesProcessed <$$> readish
|
||||
|
||||
{- Parsing the parameters of messages. Using the right parseN ensures
|
||||
- that the string is split into exactly the requested number of words,
|
||||
- which allows the last parameter of a message to contain arbitrary
|
||||
- whitespace, etc, without needing any special quoting.
|
||||
-}
|
||||
type Parser a = String -> Maybe a
|
||||
|
||||
parseFail :: Parser a
|
||||
parseFail _ = Nothing
|
||||
|
||||
parse0 :: a -> Parser a
|
||||
parse0 mk "" = Just mk
|
||||
parse0 _ _ = Nothing
|
||||
|
||||
parse1 :: ExternalSerializable p1 => (p1 -> a) -> Parser a
|
||||
parse1 mk p1 = mk <$> deserialize p1
|
||||
|
||||
parse2 :: (ExternalSerializable p1, ExternalSerializable p2) => (p1 -> p2 -> a) -> Parser a
|
||||
parse2 mk s = mk <$> deserialize p1 <*> deserialize p2
|
||||
where
|
||||
(p1, p2) = splitWord s
|
||||
|
||||
parse3 :: (ExternalSerializable p1, ExternalSerializable p2, ExternalSerializable p3) => (p1 -> p2 -> p3 -> a) -> Parser a
|
||||
parse3 mk s = mk <$> deserialize p1 <*> deserialize p2 <*> deserialize p3
|
||||
where
|
||||
(p1, rest) = splitWord s
|
||||
(p2, p3) = splitWord rest
|
||||
|
||||
splitWord :: String -> (String, String)
|
||||
splitWord = separate isSpace
|
||||
|
|
|
@ -24,7 +24,7 @@ import qualified Git.Command
|
|||
import qualified Git.GCrypt
|
||||
import qualified Annex
|
||||
import Logs.Presence
|
||||
import Logs.Transfer
|
||||
import Annex.Transfer
|
||||
import Annex.UUID
|
||||
import Annex.Exception
|
||||
import qualified Annex.Content
|
||||
|
@ -321,7 +321,7 @@ copyFromRemote' r key file dest
|
|||
case v of
|
||||
Nothing -> return False
|
||||
Just (object, checksuccess) ->
|
||||
upload u key file noRetry
|
||||
runTransfer (Transfer Download u key) file noRetry
|
||||
(rsyncOrCopyFile params object dest)
|
||||
<&&> checksuccess
|
||||
| Git.repoIsSsh (repo r) = feedprogressback $ \feeder -> do
|
||||
|
@ -418,7 +418,7 @@ copyToRemote r key file p
|
|||
( return True
|
||||
, do
|
||||
ensureInitialized
|
||||
download u key file noRetry $ const $
|
||||
runTransfer (Transfer Download u key) file noRetry $ const $
|
||||
Annex.Content.saveState True `after`
|
||||
Annex.Content.getViaTmpChecked (liftIO checksuccessio) key
|
||||
(\d -> rsyncOrCopyFile params object d p)
|
||||
|
|
|
@ -82,7 +82,7 @@ glacierSetup' enabling u c = do
|
|||
unless enabling $
|
||||
genVault fullconfig u
|
||||
gitConfigSpecialRemote u fullconfig "glacier" "true"
|
||||
return (c', u)
|
||||
return (fullconfig, u)
|
||||
where
|
||||
remotename = fromJust (M.lookup "name" c)
|
||||
defvault = remotename ++ "-" ++ fromUUID u
|
||||
|
@ -225,7 +225,8 @@ glacierParams :: RemoteConfig -> [CommandParam] -> [CommandParam]
|
|||
glacierParams c params = datacenter:params
|
||||
where
|
||||
datacenter = Param $ "--region=" ++
|
||||
fromJust (M.lookup "datacenter" c)
|
||||
fromMaybe (error "Missing datacenter configuration")
|
||||
(M.lookup "datacenter" c)
|
||||
|
||||
glacierEnv :: RemoteConfig -> UUID -> Annex (Maybe [(String, String)])
|
||||
glacierEnv c u = go =<< getRemoteCredPairFor "glacier" c creds
|
||||
|
@ -239,7 +240,8 @@ glacierEnv c u = go =<< getRemoteCredPairFor "glacier" c creds
|
|||
(uk, pk) = credPairEnvironment creds
|
||||
|
||||
getVault :: RemoteConfig -> Vault
|
||||
getVault = fromJust . M.lookup "vault"
|
||||
getVault = fromMaybe (error "Missing vault configuration")
|
||||
. M.lookup "vault"
|
||||
|
||||
archive :: Remote -> Key -> Archive
|
||||
archive r k = fileprefix ++ key2file k
|
||||
|
|
|
@ -216,7 +216,7 @@ readTahoe hdl command params = withTahoeConfigDir hdl $ \configdir ->
|
|||
|
||||
tahoeParams :: TahoeConfigDir -> String -> [CommandParam] -> [CommandParam]
|
||||
tahoeParams configdir command params =
|
||||
Param command : Param "-d" : File configdir : params
|
||||
Param "-d" : File configdir : Param command : params
|
||||
|
||||
storeCapability :: UUID -> Key -> Capability -> Annex ()
|
||||
storeCapability u k cap = setRemoteState u k cap
|
||||
|
|
42
RemoteDaemon/Common.hs
Normal file
42
RemoteDaemon/Common.hs
Normal file
|
@ -0,0 +1,42 @@
|
|||
{- git-remote-daemon utilities
|
||||
-
|
||||
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
module RemoteDaemon.Common
|
||||
( liftAnnex
|
||||
, inLocalRepo
|
||||
, checkNewShas
|
||||
) where
|
||||
|
||||
import qualified Annex
|
||||
import Common.Annex
|
||||
import RemoteDaemon.Types
|
||||
import qualified Git
|
||||
import Annex.CatFile
|
||||
|
||||
import Control.Concurrent
|
||||
|
||||
-- Runs an Annex action. Long-running actions should be avoided,
|
||||
-- since only one liftAnnex can be running at a time, amoung all
|
||||
-- transports.
|
||||
liftAnnex :: TransportHandle -> Annex a -> IO a
|
||||
liftAnnex (TransportHandle _ annexstate) a = do
|
||||
st <- takeMVar annexstate
|
||||
(r, st') <- Annex.run st a
|
||||
putMVar annexstate st'
|
||||
return r
|
||||
|
||||
inLocalRepo :: TransportHandle -> (Git.Repo -> IO a) -> IO a
|
||||
inLocalRepo (TransportHandle g _) a = a g
|
||||
|
||||
-- Check if any of the shas are actally new in the local git repo,
|
||||
-- to avoid unnecessary fetching.
|
||||
checkNewShas :: TransportHandle -> [Git.Sha] -> IO Bool
|
||||
checkNewShas transporthandle = check
|
||||
where
|
||||
check [] = return True
|
||||
check (r:rs) = maybe (check rs) (const $ return False)
|
||||
=<< liftAnnex transporthandle (catObjectDetails r)
|
118
RemoteDaemon/Core.hs
Normal file
118
RemoteDaemon/Core.hs
Normal file
|
@ -0,0 +1,118 @@
|
|||
{- git-remote-daemon core
|
||||
-
|
||||
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
module RemoteDaemon.Core (runForeground) where
|
||||
|
||||
import qualified Annex
|
||||
import Common
|
||||
import Types.GitConfig
|
||||
import RemoteDaemon.Common
|
||||
import RemoteDaemon.Types
|
||||
import RemoteDaemon.Transport
|
||||
import qualified Git
|
||||
import qualified Git.Types as Git
|
||||
import qualified Git.CurrentRepo
|
||||
import Utility.SimpleProtocol
|
||||
import Config
|
||||
|
||||
import Control.Concurrent.Async
|
||||
import Control.Concurrent
|
||||
import Network.URI
|
||||
import qualified Data.Map as M
|
||||
|
||||
runForeground :: IO ()
|
||||
runForeground = do
|
||||
(readh, writeh) <- ioHandles
|
||||
ichan <- newChan :: IO (Chan Consumed)
|
||||
ochan <- newChan :: IO (Chan Emitted)
|
||||
|
||||
let reader = forever $ do
|
||||
l <- hGetLine readh
|
||||
case parseMessage l of
|
||||
Nothing -> error $ "protocol error: " ++ l
|
||||
Just cmd -> writeChan ichan cmd
|
||||
let writer = forever $ do
|
||||
msg <- readChan ochan
|
||||
hPutStrLn writeh $ unwords $ formatMessage msg
|
||||
hFlush writeh
|
||||
let controller = runController ichan ochan
|
||||
|
||||
-- If any thread fails, the rest will be killed.
|
||||
void $ tryIO $
|
||||
reader `concurrently` writer `concurrently` controller
|
||||
|
||||
type RemoteMap = M.Map Git.Repo (IO (), Chan Consumed)
|
||||
|
||||
-- Runs the transports, dispatching messages to them, and handling
|
||||
-- the main control messages.
|
||||
runController :: Chan Consumed -> Chan Emitted -> IO ()
|
||||
runController ichan ochan = do
|
||||
h <- genTransportHandle
|
||||
m <- genRemoteMap h ochan
|
||||
startrunning m
|
||||
go h False m
|
||||
where
|
||||
go h paused m = do
|
||||
cmd <- readChan ichan
|
||||
case cmd of
|
||||
RELOAD -> do
|
||||
liftAnnex h reloadConfig
|
||||
m' <- genRemoteMap h ochan
|
||||
let common = M.intersection m m'
|
||||
let new = M.difference m' m
|
||||
let old = M.difference m m'
|
||||
stoprunning old
|
||||
unless paused $
|
||||
startrunning new
|
||||
go h paused (M.union common new)
|
||||
PAUSE -> do
|
||||
stoprunning m
|
||||
go h True m
|
||||
RESUME -> do
|
||||
when paused $
|
||||
startrunning m
|
||||
go h False m
|
||||
STOP -> exitSuccess
|
||||
-- All remaining messages are sent to
|
||||
-- all Transports.
|
||||
msg -> do
|
||||
unless paused $
|
||||
forM_ chans (`writeChan` msg)
|
||||
go h paused m
|
||||
where
|
||||
chans = map snd (M.elems m)
|
||||
|
||||
startrunning m = forM_ (M.elems m) startrunning'
|
||||
startrunning' (transport, _) = void $ async transport
|
||||
|
||||
-- Ask the transport nicely to stop.
|
||||
stoprunning m = forM_ (M.elems m) stoprunning'
|
||||
stoprunning' (_, c) = writeChan c STOP
|
||||
|
||||
-- Generates a map with a transport for each supported remote in the git repo,
|
||||
-- except those that have annex.sync = false
|
||||
genRemoteMap :: TransportHandle -> Chan Emitted -> IO RemoteMap
|
||||
genRemoteMap h@(TransportHandle g _) ochan =
|
||||
M.fromList . catMaybes <$> mapM gen (Git.remotes g)
|
||||
where
|
||||
gen r = case Git.location r of
|
||||
Git.Url u -> case M.lookup (uriScheme u) remoteTransports of
|
||||
Just transport
|
||||
| remoteAnnexSync (extractRemoteGitConfig r (Git.repoDescribe r)) -> do
|
||||
ichan <- newChan :: IO (Chan Consumed)
|
||||
return $ Just
|
||||
( r
|
||||
, (transport r (Git.repoDescribe r) h ichan ochan, ichan)
|
||||
)
|
||||
_ -> return Nothing
|
||||
_ -> return Nothing
|
||||
|
||||
genTransportHandle :: IO TransportHandle
|
||||
genTransportHandle = do
|
||||
annexstate <- newMVar =<< Annex.new =<< Git.CurrentRepo.get
|
||||
g <- Annex.repo <$> readMVar annexstate
|
||||
return $ TransportHandle g annexstate
|
21
RemoteDaemon/Transport.hs
Normal file
21
RemoteDaemon/Transport.hs
Normal file
|
@ -0,0 +1,21 @@
|
|||
{- git-remote-daemon transports
|
||||
-
|
||||
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
module RemoteDaemon.Transport where
|
||||
|
||||
import RemoteDaemon.Types
|
||||
import qualified RemoteDaemon.Transport.Ssh
|
||||
|
||||
import qualified Data.Map as M
|
||||
|
||||
-- Corresponds to uriScheme
|
||||
type TransportScheme = String
|
||||
|
||||
remoteTransports :: M.Map TransportScheme Transport
|
||||
remoteTransports = M.fromList
|
||||
[ ("ssh:", RemoteDaemon.Transport.Ssh.transport)
|
||||
]
|
72
RemoteDaemon/Transport/Ssh.hs
Normal file
72
RemoteDaemon/Transport/Ssh.hs
Normal file
|
@ -0,0 +1,72 @@
|
|||
{- git-remote-daemon, git-annex-shell over ssh transport
|
||||
-
|
||||
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
module RemoteDaemon.Transport.Ssh (transport) where
|
||||
|
||||
import Common.Annex
|
||||
import RemoteDaemon.Types
|
||||
import RemoteDaemon.Common
|
||||
import Remote.Helper.Ssh
|
||||
import qualified RemoteDaemon.Transport.Ssh.Types as SshRemote
|
||||
import Utility.SimpleProtocol
|
||||
import Git.Command
|
||||
|
||||
import Control.Concurrent.Chan
|
||||
import Control.Concurrent.Async
|
||||
import System.Process (std_in, std_out)
|
||||
|
||||
transport :: Transport
|
||||
transport r remotename transporthandle ichan ochan = do
|
||||
v <- liftAnnex transporthandle $ git_annex_shell r "notifychanges" [] []
|
||||
case v of
|
||||
Nothing -> noop
|
||||
Just (cmd, params) -> go cmd (toCommand params)
|
||||
where
|
||||
go cmd params = do
|
||||
(Just toh, Just fromh, _, pid) <- createProcess (proc cmd params)
|
||||
{ std_in = CreatePipe
|
||||
, std_out = CreatePipe
|
||||
}
|
||||
|
||||
let shutdown = do
|
||||
hClose toh
|
||||
hClose fromh
|
||||
void $ waitForProcess pid
|
||||
send DISCONNECTED
|
||||
|
||||
let fromshell = forever $ do
|
||||
l <- hGetLine fromh
|
||||
case parseMessage l of
|
||||
Just SshRemote.READY -> send CONNECTED
|
||||
Just (SshRemote.CHANGED shas) ->
|
||||
whenM (checkNewShas transporthandle shas) $
|
||||
fetch
|
||||
Nothing -> shutdown
|
||||
|
||||
-- The only control message that matters is STOP.
|
||||
--
|
||||
-- Note that a CHANGED control message is not handled;
|
||||
-- we don't push to the ssh remote. The assistant
|
||||
-- and git-annex sync both handle pushes, so there's no
|
||||
-- need to do it here.
|
||||
let handlecontrol = forever $ do
|
||||
msg <- readChan ichan
|
||||
case msg of
|
||||
STOP -> ioError (userError "done")
|
||||
_ -> noop
|
||||
|
||||
-- Run both threads until one finishes.
|
||||
void $ tryIO $ concurrently fromshell handlecontrol
|
||||
shutdown
|
||||
|
||||
send msg = writeChan ochan (msg remotename)
|
||||
|
||||
fetch = do
|
||||
send SYNCING
|
||||
ok <- inLocalRepo transporthandle $
|
||||
runBool [Param "fetch", Param remotename]
|
||||
send (DONESYNCING ok)
|
32
RemoteDaemon/Transport/Ssh/Types.hs
Normal file
32
RemoteDaemon/Transport/Ssh/Types.hs
Normal file
|
@ -0,0 +1,32 @@
|
|||
{- git-remote-daemon, git-annex-shell notifychanges protocol types
|
||||
-
|
||||
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
|
||||
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||
|
||||
module RemoteDaemon.Transport.Ssh.Types (
|
||||
Notification(..),
|
||||
Proto.serialize,
|
||||
Proto.deserialize,
|
||||
Proto.formatMessage,
|
||||
) where
|
||||
|
||||
import qualified Utility.SimpleProtocol as Proto
|
||||
import RemoteDaemon.Types (RefList)
|
||||
|
||||
data Notification
|
||||
= READY
|
||||
| CHANGED RefList
|
||||
|
||||
instance Proto.Sendable Notification where
|
||||
formatMessage READY = ["READY"]
|
||||
formatMessage (CHANGED shas) = ["CHANGED", Proto.serialize shas]
|
||||
|
||||
instance Proto.Receivable Notification where
|
||||
parseCommand "READY" = Proto.parse0 READY
|
||||
parseCommand "CHANGED" = Proto.parse1 CHANGED
|
||||
parseCommand _ = Proto.parseFail
|
93
RemoteDaemon/Types.hs
Normal file
93
RemoteDaemon/Types.hs
Normal file
|
@ -0,0 +1,93 @@
|
|||
{- git-remote-daemon data types.
|
||||
-
|
||||
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
|
||||
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||
|
||||
module RemoteDaemon.Types where
|
||||
|
||||
import qualified Annex
|
||||
import qualified Git.Types as Git
|
||||
import qualified Utility.SimpleProtocol as Proto
|
||||
|
||||
import Control.Concurrent
|
||||
|
||||
-- A Transport for a particular git remote consumes some messages
|
||||
-- from a Chan, and emits others to another Chan.
|
||||
type Transport = RemoteRepo -> RemoteName -> TransportHandle -> Chan Consumed -> Chan Emitted -> IO ()
|
||||
|
||||
type RemoteRepo = Git.Repo
|
||||
type LocalRepo = Git.Repo
|
||||
|
||||
-- All Transports share a single AnnexState MVar
|
||||
data TransportHandle = TransportHandle LocalRepo (MVar Annex.AnnexState)
|
||||
|
||||
-- Messages that the daemon emits.
|
||||
data Emitted
|
||||
= CONNECTED RemoteName
|
||||
| DISCONNECTED RemoteName
|
||||
| SYNCING RemoteName
|
||||
| DONESYNCING Bool RemoteName
|
||||
|
||||
-- Messages that the deamon consumes.
|
||||
data Consumed
|
||||
= PAUSE
|
||||
| RESUME
|
||||
| CHANGED RefList
|
||||
| RELOAD
|
||||
| STOP
|
||||
|
||||
type RemoteName = String
|
||||
type RefList = [Git.Ref]
|
||||
|
||||
instance Proto.Sendable Emitted where
|
||||
formatMessage (CONNECTED remote) =
|
||||
["CONNECTED", Proto.serialize remote]
|
||||
formatMessage (DISCONNECTED remote) =
|
||||
["DISCONNECTED", Proto.serialize remote]
|
||||
formatMessage (SYNCING remote) =
|
||||
["SYNCING", Proto.serialize remote]
|
||||
formatMessage (DONESYNCING status remote) =
|
||||
["DONESYNCING", Proto.serialize status, Proto.serialize remote]
|
||||
|
||||
instance Proto.Sendable Consumed where
|
||||
formatMessage PAUSE = ["PAUSE"]
|
||||
formatMessage RESUME = ["RESUME"]
|
||||
formatMessage (CHANGED refs) =["CHANGED", Proto.serialize refs]
|
||||
formatMessage RELOAD = ["RELOAD"]
|
||||
formatMessage STOP = ["STOP"]
|
||||
|
||||
instance Proto.Receivable Emitted where
|
||||
parseCommand "CONNECTED" = Proto.parse1 CONNECTED
|
||||
parseCommand "DISCONNECTED" = Proto.parse1 DISCONNECTED
|
||||
parseCommand "SYNCING" = Proto.parse1 SYNCING
|
||||
parseCommand "DONESYNCING" = Proto.parse2 DONESYNCING
|
||||
parseCommand _ = Proto.parseFail
|
||||
|
||||
instance Proto.Receivable Consumed where
|
||||
parseCommand "PAUSE" = Proto.parse0 PAUSE
|
||||
parseCommand "RESUME" = Proto.parse0 RESUME
|
||||
parseCommand "CHANGED" = Proto.parse1 CHANGED
|
||||
parseCommand "RELOAD" = Proto.parse0 RELOAD
|
||||
parseCommand "STOP" = Proto.parse0 STOP
|
||||
parseCommand _ = Proto.parseFail
|
||||
|
||||
instance Proto.Serializable [Char] where
|
||||
serialize = id
|
||||
deserialize = Just
|
||||
|
||||
instance Proto.Serializable RefList where
|
||||
serialize = unwords . map Git.fromRef
|
||||
deserialize = Just . map Git.Ref . words
|
||||
|
||||
instance Proto.Serializable Bool where
|
||||
serialize False = "0"
|
||||
serialize True = "1"
|
||||
|
||||
deserialize "0" = Just False
|
||||
deserialize "1" = Just True
|
||||
deserialize _ = Nothing
|
27
Types/DesktopNotify.hs
Normal file
27
Types/DesktopNotify.hs
Normal file
|
@ -0,0 +1,27 @@
|
|||
{- git-annex DesktopNotify type
|
||||
-
|
||||
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
module Types.DesktopNotify where
|
||||
|
||||
import Data.Monoid
|
||||
|
||||
data DesktopNotify = DesktopNotify
|
||||
{ notifyStart :: Bool
|
||||
, notifyFinish :: Bool
|
||||
}
|
||||
deriving (Show)
|
||||
|
||||
instance Monoid DesktopNotify where
|
||||
mempty = DesktopNotify False False
|
||||
mappend (DesktopNotify s1 f1) (DesktopNotify s2 f2) =
|
||||
DesktopNotify (s1 || s2) (f1 || f2)
|
||||
|
||||
mkNotifyStart :: DesktopNotify
|
||||
mkNotifyStart = DesktopNotify True False
|
||||
|
||||
mkNotifyFinish :: DesktopNotify
|
||||
mkNotifyFinish = DesktopNotify False True
|
|
@ -7,7 +7,12 @@
|
|||
|
||||
module Types.FileMatcher where
|
||||
|
||||
import Types.UUID (UUID)
|
||||
import Types.Key (Key)
|
||||
import Utility.Matcher (Matcher, Token)
|
||||
|
||||
import qualified Data.Map as M
|
||||
import qualified Data.Set as S
|
||||
|
||||
data MatchInfo
|
||||
= MatchingFile FileInfo
|
||||
|
@ -17,3 +22,19 @@ data FileInfo = FileInfo
|
|||
{ relFile :: FilePath -- may be relative to cwd
|
||||
, matchFile :: FilePath -- filepath to match on; may be relative to top
|
||||
}
|
||||
|
||||
type FileMatcherMap a = M.Map UUID (Utility.Matcher.Matcher (S.Set UUID -> MatchInfo -> a Bool))
|
||||
|
||||
type MkLimit a = String -> Either String (MatchFiles a)
|
||||
|
||||
type AssumeNotPresent = S.Set UUID
|
||||
|
||||
type MatchFiles a = AssumeNotPresent -> MatchInfo -> a Bool
|
||||
|
||||
type FileMatcher a = Matcher (MatchFiles a)
|
||||
|
||||
-- This is a matcher that can have tokens added to it while it's being
|
||||
-- built, and once complete is compiled to an unchangable matcher.
|
||||
data ExpandableMatcher a
|
||||
= BuildingMatcher [Token (MatchInfo -> a Bool)]
|
||||
| CompleteMatcher (Matcher (MatchInfo -> a Bool))
|
||||
|
|
|
@ -1,20 +0,0 @@
|
|||
{- types for limits
|
||||
-
|
||||
- Copyright 2013 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE CPP #-}
|
||||
|
||||
module Types.Limit where
|
||||
|
||||
import Common.Annex
|
||||
import Types.FileMatcher
|
||||
|
||||
import qualified Data.Set as S
|
||||
|
||||
type MkLimit = String -> Either String MatchFiles
|
||||
|
||||
type AssumeNotPresent = S.Set UUID
|
||||
type MatchFiles = AssumeNotPresent -> MatchInfo -> Annex Bool
|
|
@ -264,7 +264,9 @@ parseMetaData p = (,)
|
|||
instance Arbitrary MetaData where
|
||||
arbitrary = do
|
||||
size <- arbitrarySizedBoundedIntegral `suchThat` (< 500)
|
||||
MetaData . M.fromList <$> vector size
|
||||
MetaData . M.filterWithKey legal . M.fromList <$> vector size
|
||||
where
|
||||
legal k _v = legalField $ fromMetaField k
|
||||
|
||||
instance Arbitrary MetaValue where
|
||||
arbitrary = MetaValue <$> arbitrary <*> arbitrary
|
||||
|
|
|
@ -9,15 +9,18 @@
|
|||
|
||||
module Utility.FileMode where
|
||||
|
||||
import Common
|
||||
|
||||
import System.IO
|
||||
import Control.Monad
|
||||
import Control.Exception (bracket)
|
||||
import System.PosixCompat.Types
|
||||
import Utility.PosixFiles
|
||||
#ifndef mingw32_HOST_OS
|
||||
import System.Posix.Files
|
||||
#endif
|
||||
import Foreign (complement)
|
||||
|
||||
import Utility.Exception
|
||||
|
||||
{- Applies a conversion function to a file's mode. -}
|
||||
modifyFileMode :: FilePath -> (FileMode -> FileMode) -> IO ()
|
||||
modifyFileMode f convert = void $ modifyFileMode' f convert
|
||||
|
@ -56,6 +59,12 @@ readModes = [ownerReadMode, groupReadMode, otherReadMode]
|
|||
executeModes :: [FileMode]
|
||||
executeModes = [ownerExecuteMode, groupExecuteMode, otherExecuteMode]
|
||||
|
||||
otherGroupModes :: [FileMode]
|
||||
otherGroupModes =
|
||||
[ groupReadMode, otherReadMode
|
||||
, groupWriteMode, otherWriteMode
|
||||
]
|
||||
|
||||
{- Removes the write bits from a file. -}
|
||||
preventWrite :: FilePath -> IO ()
|
||||
preventWrite f = modifyFileMode f $ removeModes writeModes
|
||||
|
@ -145,9 +154,5 @@ setSticky f = modifyFileMode f $ addModes [stickyMode]
|
|||
writeFileProtected :: FilePath -> String -> IO ()
|
||||
writeFileProtected file content = withUmask 0o0077 $
|
||||
withFile file WriteMode $ \h -> do
|
||||
void $ tryIO $ modifyFileMode file $
|
||||
removeModes
|
||||
[ groupReadMode, otherReadMode
|
||||
, groupWriteMode, otherWriteMode
|
||||
]
|
||||
void $ tryIO $ modifyFileMode file $ removeModes otherGroupModes
|
||||
hPutStr h content
|
||||
|
|
61
Utility/LinuxMkLibs.hs
Normal file
61
Utility/LinuxMkLibs.hs
Normal file
|
@ -0,0 +1,61 @@
|
|||
{- Linux library copier and binary shimmer
|
||||
-
|
||||
- Copyright 2013 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
module Utility.LinuxMkLibs where
|
||||
|
||||
import Control.Applicative
|
||||
import Data.Maybe
|
||||
import System.Directory
|
||||
import Data.List.Utils
|
||||
import System.Posix.Files
|
||||
import Data.Char
|
||||
import Control.Monad.IfElse
|
||||
|
||||
import Utility.PartialPrelude
|
||||
import Utility.Directory
|
||||
import Utility.Process
|
||||
import Utility.Monad
|
||||
import Utility.Path
|
||||
|
||||
{- Installs a library. If the library is a symlink to another file,
|
||||
- install the file it links to, and update the symlink to be relative. -}
|
||||
installLib :: (FilePath -> FilePath -> IO ()) -> FilePath -> FilePath -> IO (Maybe FilePath)
|
||||
installLib installfile top lib = ifM (doesFileExist lib)
|
||||
( do
|
||||
installfile top lib
|
||||
checksymlink lib
|
||||
return $ Just $ parentDir lib
|
||||
, return Nothing
|
||||
)
|
||||
where
|
||||
checksymlink f = whenM (isSymbolicLink <$> getSymbolicLinkStatus (inTop top f)) $ do
|
||||
l <- readSymbolicLink (inTop top f)
|
||||
let absl = absPathFrom (parentDir f) l
|
||||
let target = relPathDirToFile (parentDir f) absl
|
||||
installfile top absl
|
||||
nukeFile (top ++ f)
|
||||
createSymbolicLink target (inTop top f)
|
||||
checksymlink absl
|
||||
|
||||
-- Note that f is not relative, so cannot use </>
|
||||
inTop :: FilePath -> FilePath -> FilePath
|
||||
inTop top f = top ++ f
|
||||
|
||||
{- Parse ldd output, getting all the libraries that the input files
|
||||
- link to. Note that some of the libraries may not exist
|
||||
- (eg, linux-vdso.so) -}
|
||||
parseLdd :: String -> [FilePath]
|
||||
parseLdd = catMaybes . map (getlib . dropWhile isSpace) . lines
|
||||
where
|
||||
getlib l = headMaybe . words =<< lastMaybe (split " => " l)
|
||||
|
||||
{- Get all glibc libs and other support files, including gconv files
|
||||
-
|
||||
- XXX Debian specific. -}
|
||||
glibcLibs :: IO [FilePath]
|
||||
glibcLibs = lines <$> readProcess "sh"
|
||||
["-c", "dpkg -L libc6:$(dpkg --print-architecture) libgcc1:$(dpkg --print-architecture) | egrep '\\.so|gconv'"]
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
module Utility.Matcher (
|
||||
Token(..),
|
||||
Matcher,
|
||||
Matcher(..),
|
||||
token,
|
||||
tokens,
|
||||
generate,
|
||||
|
|
|
@ -124,6 +124,9 @@ rsyncUrlIsPath s
|
|||
- after the \r is the number of bytes processed. After the number,
|
||||
- there must appear some whitespace, or we didn't get the whole number,
|
||||
- and return the \r and part we did get, for later processing.
|
||||
-
|
||||
- In some locales, the number will have one or more commas in the middle
|
||||
- of it.
|
||||
-}
|
||||
parseRsyncProgress :: String -> (Maybe Integer, String)
|
||||
parseRsyncProgress = go [] . reverse . progresschunks
|
||||
|
@ -142,7 +145,7 @@ parseRsyncProgress = go [] . reverse . progresschunks
|
|||
parsebytes s = case break isSpace s of
|
||||
([], _) -> Nothing
|
||||
(_, []) -> Nothing
|
||||
(b, _) -> readish b
|
||||
(b, _) -> readish $ filter (/= ',') b
|
||||
|
||||
{- Filters options to those that are safe to pass to rsync in server mode,
|
||||
- without causing it to eg, expose files. -}
|
||||
|
|
|
@ -10,7 +10,11 @@ module Utility.Scheduled (
|
|||
Recurrance(..),
|
||||
ScheduledTime(..),
|
||||
NextTime(..),
|
||||
WeekDay,
|
||||
MonthDay,
|
||||
YearDay,
|
||||
nextTime,
|
||||
startTime,
|
||||
fromSchedule,
|
||||
fromScheduledTime,
|
||||
toScheduledTime,
|
||||
|
@ -21,9 +25,13 @@ module Utility.Scheduled (
|
|||
prop_schedule_roundtrips
|
||||
) where
|
||||
|
||||
import Common
|
||||
import Utility.Data
|
||||
import Utility.QuickCheck
|
||||
import Utility.PartialPrelude
|
||||
import Utility.Misc
|
||||
|
||||
import Control.Applicative
|
||||
import Data.List
|
||||
import Data.Time.Clock
|
||||
import Data.Time.LocalTime
|
||||
import Data.Time.Calendar
|
||||
|
@ -41,9 +49,9 @@ data Recurrance
|
|||
| Weekly (Maybe WeekDay)
|
||||
| Monthly (Maybe MonthDay)
|
||||
| Yearly (Maybe YearDay)
|
||||
-- Days, Weeks, or Months of the year evenly divisible by a number.
|
||||
-- (Divisible Year is years evenly divisible by a number.)
|
||||
| Divisible Int Recurrance
|
||||
-- ^ Days, Weeks, or Months of the year evenly divisible by a number.
|
||||
-- (Divisible Year is years evenly divisible by a number.)
|
||||
deriving (Eq, Read, Show, Ord)
|
||||
|
||||
type WeekDay = Int
|
||||
|
@ -78,7 +86,7 @@ nextTime schedule lasttime = do
|
|||
{- Calculate the next time that fits a Schedule, based on the
|
||||
- last time it occurred, and the current time. -}
|
||||
calcNextTime :: Schedule -> Maybe LocalTime -> LocalTime -> Maybe NextTime
|
||||
calcNextTime (Schedule recurrance scheduledtime) lasttime currenttime
|
||||
calcNextTime schedule@(Schedule recurrance scheduledtime) lasttime currenttime
|
||||
| scheduledtime == AnyTime = do
|
||||
next <- findfromtoday True
|
||||
return $ case next of
|
||||
|
@ -100,65 +108,71 @@ calcNextTime (Schedule recurrance scheduledtime) lasttime currenttime
|
|||
window startd endd = NextTimeWindow
|
||||
(LocalTime startd nexttime)
|
||||
(LocalTime endd (TimeOfDay 23 59 0))
|
||||
findfrom r afterday day = case r of
|
||||
findfrom r afterday candidate
|
||||
| ynum candidate > (ynum (localDay currenttime)) + 100 =
|
||||
-- avoid possible infinite recusion
|
||||
error $ "bug: calcNextTime did not find a time within 100 years to run " ++
|
||||
show (schedule, lasttime, currenttime)
|
||||
| otherwise = findfromChecked r afterday candidate
|
||||
findfromChecked r afterday candidate = case r of
|
||||
Daily
|
||||
| afterday -> Just $ exactly $ addDays 1 day
|
||||
| otherwise -> Just $ exactly day
|
||||
| afterday -> Just $ exactly $ addDays 1 candidate
|
||||
| otherwise -> Just $ exactly candidate
|
||||
Weekly Nothing
|
||||
| afterday -> skip 1
|
||||
| otherwise -> case (wday <$> lastday, wday day) of
|
||||
(Nothing, _) -> Just $ window day (addDays 6 day)
|
||||
| otherwise -> case (wday <$> lastday, wday candidate) of
|
||||
(Nothing, _) -> Just $ window candidate (addDays 6 candidate)
|
||||
(Just old, curr)
|
||||
| old == curr -> Just $ window day (addDays 6 day)
|
||||
| old == curr -> Just $ window candidate (addDays 6 candidate)
|
||||
| otherwise -> skip 1
|
||||
Monthly Nothing
|
||||
| afterday -> skip 1
|
||||
| maybe True (\old -> mnum day > mday old && mday day >= (mday old `mod` minmday)) lastday ->
|
||||
| maybe True (\old -> mday candidate > mday old && mday candidate >= (mday old `mod` minmday)) lastday ->
|
||||
-- Window only covers current month,
|
||||
-- in case there is a Divisible requirement.
|
||||
Just $ window day (endOfMonth day)
|
||||
Just $ window candidate (endOfMonth candidate)
|
||||
| otherwise -> skip 1
|
||||
Yearly Nothing
|
||||
| afterday -> skip 1
|
||||
| maybe True (\old -> ynum day > ynum old && yday day >= (yday old `mod` minyday)) lastday ->
|
||||
Just $ window day (endOfYear day)
|
||||
| maybe True (\old -> ynum candidate > ynum old && yday candidate >= (yday old `mod` minyday)) lastday ->
|
||||
Just $ window candidate (endOfYear candidate)
|
||||
| otherwise -> skip 1
|
||||
Weekly (Just w)
|
||||
| w < 0 || w > maxwday -> Nothing
|
||||
| w == wday day -> if afterday
|
||||
then Just $ exactly $ addDays 7 day
|
||||
else Just $ exactly day
|
||||
| w == wday candidate -> if afterday
|
||||
then Just $ exactly $ addDays 7 candidate
|
||||
else Just $ exactly candidate
|
||||
| otherwise -> Just $ exactly $
|
||||
addDays (fromIntegral $ (w - wday day) `mod` 7) day
|
||||
addDays (fromIntegral $ (w - wday candidate) `mod` 7) candidate
|
||||
Monthly (Just m)
|
||||
| m < 0 || m > maxmday -> Nothing
|
||||
-- TODO can be done more efficiently than recursing
|
||||
| m == mday day -> if afterday
|
||||
| m == mday candidate -> if afterday
|
||||
then skip 1
|
||||
else Just $ exactly day
|
||||
else Just $ exactly candidate
|
||||
| otherwise -> skip 1
|
||||
Yearly (Just y)
|
||||
| y < 0 || y > maxyday -> Nothing
|
||||
| y == yday day -> if afterday
|
||||
| y == yday candidate -> if afterday
|
||||
then skip 365
|
||||
else Just $ exactly day
|
||||
else Just $ exactly candidate
|
||||
| otherwise -> skip 1
|
||||
Divisible n r'@Daily -> handlediv n r' yday (Just maxyday)
|
||||
Divisible n r'@(Weekly _) -> handlediv n r' wnum (Just maxwnum)
|
||||
Divisible n r'@(Monthly _) -> handlediv n r' mnum (Just maxmnum)
|
||||
Divisible n r'@(Yearly _) -> handlediv n r' ynum Nothing
|
||||
Divisible _ r'@(Divisible _ _) -> findfrom r' afterday day
|
||||
Divisible _ r'@(Divisible _ _) -> findfrom r' afterday candidate
|
||||
where
|
||||
skip n = findfrom r False (addDays n day)
|
||||
skip n = findfrom r False (addDays n candidate)
|
||||
handlediv n r' getval mmax
|
||||
| n > 0 && maybe True (n <=) mmax =
|
||||
findfromwhere r' (divisible n . getval) afterday day
|
||||
findfromwhere r' (divisible n . getval) afterday candidate
|
||||
| otherwise = Nothing
|
||||
findfromwhere r p afterday day
|
||||
findfromwhere r p afterday candidate
|
||||
| maybe True (p . getday) next = next
|
||||
| otherwise = maybe Nothing (findfromwhere r p True . getday) next
|
||||
where
|
||||
next = findfrom r afterday day
|
||||
next = findfrom r afterday candidate
|
||||
getday = localDay . startTime
|
||||
divisible n v = v `rem` n == 0
|
||||
|
||||
|
|
90
Utility/SimpleProtocol.hs
Normal file
90
Utility/SimpleProtocol.hs
Normal file
|
@ -0,0 +1,90 @@
|
|||
{- Simple line-based protocols.
|
||||
-
|
||||
- Copyright 2013-2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
module Utility.SimpleProtocol (
|
||||
Sendable(..),
|
||||
Receivable(..),
|
||||
parseMessage,
|
||||
Serializable(..),
|
||||
Parser,
|
||||
parseFail,
|
||||
parse0,
|
||||
parse1,
|
||||
parse2,
|
||||
parse3,
|
||||
ioHandles,
|
||||
) where
|
||||
|
||||
import Data.Char
|
||||
import GHC.IO.Handle
|
||||
|
||||
import Common
|
||||
|
||||
-- Messages that can be sent.
|
||||
class Sendable m where
|
||||
formatMessage :: m -> [String]
|
||||
|
||||
-- Messages that can be received.
|
||||
class Receivable m where
|
||||
-- Passed the first word of the message, returns
|
||||
-- a Parser that can be be fed the rest of the message to generate
|
||||
-- the value.
|
||||
parseCommand :: String -> Parser m
|
||||
|
||||
parseMessage :: (Receivable m) => String -> Maybe m
|
||||
parseMessage s = parseCommand command rest
|
||||
where
|
||||
(command, rest) = splitWord s
|
||||
|
||||
class Serializable a where
|
||||
serialize :: a -> String
|
||||
deserialize :: String -> Maybe a
|
||||
|
||||
{- Parsing the parameters of messages. Using the right parseN ensures
|
||||
- that the string is split into exactly the requested number of words,
|
||||
- which allows the last parameter of a message to contain arbitrary
|
||||
- whitespace, etc, without needing any special quoting.
|
||||
-}
|
||||
type Parser a = String -> Maybe a
|
||||
|
||||
parseFail :: Parser a
|
||||
parseFail _ = Nothing
|
||||
|
||||
parse0 :: a -> Parser a
|
||||
parse0 mk "" = Just mk
|
||||
parse0 _ _ = Nothing
|
||||
|
||||
parse1 :: Serializable p1 => (p1 -> a) -> Parser a
|
||||
parse1 mk p1 = mk <$> deserialize p1
|
||||
|
||||
parse2 :: (Serializable p1, Serializable p2) => (p1 -> p2 -> a) -> Parser a
|
||||
parse2 mk s = mk <$> deserialize p1 <*> deserialize p2
|
||||
where
|
||||
(p1, p2) = splitWord s
|
||||
|
||||
parse3 :: (Serializable p1, Serializable p2, Serializable p3) => (p1 -> p2 -> p3 -> a) -> Parser a
|
||||
parse3 mk s = mk <$> deserialize p1 <*> deserialize p2 <*> deserialize p3
|
||||
where
|
||||
(p1, rest) = splitWord s
|
||||
(p2, p3) = splitWord rest
|
||||
|
||||
splitWord :: String -> (String, String)
|
||||
splitWord = separate isSpace
|
||||
|
||||
{- When a program speaks a simple protocol over stdio, any other output
|
||||
- to stdout (or anything that attempts to read from stdin)
|
||||
- will mess up the protocol. To avoid that, close stdin, and
|
||||
- and duplicate stderr to stdout. Return two new handles
|
||||
- that are duplicates of the original (stdin, stdout). -}
|
||||
ioHandles :: IO (Handle, Handle)
|
||||
ioHandles = do
|
||||
readh <- hDuplicate stdin
|
||||
writeh <- hDuplicate stdout
|
||||
nullh <- openFile devNull ReadMode
|
||||
nullh `hDuplicateTo` stdin
|
||||
stderr `hDuplicateTo` stdout
|
||||
return (readh, writeh)
|
|
@ -10,10 +10,13 @@
|
|||
|
||||
module Utility.ThreadScheduler where
|
||||
|
||||
import Common
|
||||
|
||||
import Control.Monad
|
||||
import Control.Concurrent
|
||||
#ifndef mingw32_HOST_OS
|
||||
import Control.Monad.IfElse
|
||||
import System.Posix.IO
|
||||
#endif
|
||||
#ifndef mingw32_HOST_OS
|
||||
import System.Posix.Signals
|
||||
#ifndef __ANDROID__
|
||||
import System.Posix.Terminal
|
||||
|
|
|
@ -77,7 +77,8 @@ exists url uo = case parseURIRelaxed url of
|
|||
Nothing -> dne
|
||||
| otherwise -> if Build.SysConfig.curl
|
||||
then do
|
||||
output <- readProcess "curl" $ toCommand curlparams
|
||||
output <- catchDefaultIO "" $
|
||||
readProcess "curl" $ toCommand curlparams
|
||||
case lastMaybe (lines output) of
|
||||
Just ('2':_:_) -> return (True, extractsize output)
|
||||
_ -> dne
|
||||
|
|
|
@ -33,7 +33,6 @@ import qualified Data.Text as T
|
|||
import qualified Data.Text.Encoding as TE
|
||||
import Blaze.ByteString.Builder.Char.Utf8 (fromText)
|
||||
import Blaze.ByteString.Builder (Builder)
|
||||
import Data.Monoid
|
||||
import Control.Arrow ((***))
|
||||
import Control.Concurrent
|
||||
#ifdef WITH_WEBAPP_SECURE
|
||||
|
|
69
debian/changelog
vendored
69
debian/changelog
vendored
|
@ -1,3 +1,72 @@
|
|||
git-annex (5.20140412) unstable; urgency=high
|
||||
|
||||
* Last release didn't quite fix the high cpu issue in all cases, this should.
|
||||
|
||||
-- Joey Hess <joeyh@debian.org> Fri, 11 Apr 2014 17:14:38 -0400
|
||||
|
||||
git-annex (5.20140411) unstable; urgency=high
|
||||
|
||||
* importfeed: Filename template can now contain an itempubdate variable.
|
||||
Needs feed 0.3.9.2.
|
||||
* Fix rsync progress parsing in locales that use comma in number display.
|
||||
Closes: #744148
|
||||
* assistant: Fix high CPU usage triggered when a monthly fsck is scheduled,
|
||||
and the last time the job ran was a day of the month > 12. This caused a
|
||||
runaway loop. Thanks to Anarcat for his assistance, and to Maximiliano
|
||||
Curia for identifying the cause of this bug.
|
||||
* Remove wget from OSX dmg, due to issues with cert paths that broke
|
||||
git-annex automatic upgrading. Instead, curl is used, unless the
|
||||
OSX system has wget installed, which will then be used.
|
||||
|
||||
-- Joey Hess <joeyh@debian.org> Fri, 11 Apr 2014 14:59:49 -0400
|
||||
|
||||
git-annex (5.20140405) unstable; urgency=medium
|
||||
|
||||
* git-annex-shell: Added notifychanges command.
|
||||
* Improve display of dbus notifications. Thanks, Johan Kiviniemi.
|
||||
* Fix nautilus script installation to not crash when the nautilus script dir
|
||||
does not exist. Instead, only install scripts when the directory already
|
||||
exists.
|
||||
|
||||
-- Joey Hess <joeyh@debian.org> Sat, 05 Apr 2014 16:54:33 -0400
|
||||
|
||||
git-annex (5.20140402) unstable; urgency=medium
|
||||
|
||||
* unannex, uninit: Avoid committing after every file is unannexed,
|
||||
for massive speedup.
|
||||
* --notify-finish switch will cause desktop notifications after each
|
||||
file upload/download/drop completes
|
||||
(using the dbus Desktop Notifications Specification)
|
||||
* --notify-start switch will show desktop notifications when each
|
||||
file upload/download starts.
|
||||
* webapp: Automatically install Nautilus integration scripts
|
||||
to get and drop files.
|
||||
* tahoe: Pass -d parameter before subcommand; putting it after
|
||||
the subcommand no longer works with tahoe-lafs version 1.10.
|
||||
(Thanks, Alberto Berti)
|
||||
* forget --drop-dead: Avoid removing the dead remote from the trust.log,
|
||||
so that if git remotes for it still exist anywhere, git annex info
|
||||
will still know it's dead and not show it.
|
||||
* git-annex-shell: Make configlist automatically initialize
|
||||
a remote git repository, as long as a git-annex branch has
|
||||
been pushed to it, to simplify setup of remote git repositories,
|
||||
including via gitolite.
|
||||
* add --include-dotfiles: New option, perhaps useful for backups.
|
||||
* Version 5.20140227 broke creation of glacier repositories,
|
||||
not including the datacenter and vault in their configuration.
|
||||
This bug is fixed, but glacier repositories set up with the broken
|
||||
version of git-annex need to have the datacenter and vault set
|
||||
in order to be usable. This can be done using git annex enableremote
|
||||
to add the missing settings. For details, see
|
||||
http://git-annex.branchable.com/bugs/problems_with_glacier/
|
||||
* Added required content configuration.
|
||||
* assistant: Improve ssh authorized keys line generated in local pairing
|
||||
or for a remote ssh server to set environment variables in an
|
||||
alternative way that works with the non-POSIX fish shell, as well
|
||||
as POSIX shells.
|
||||
|
||||
-- Joey Hess <joeyh@debian.org> Wed, 02 Apr 2014 16:42:53 -0400
|
||||
|
||||
git-annex (5.20140320~bpo70+1) wheezy-backports; urgency=medium
|
||||
|
||||
* Updating backport to newest release.
|
||||
|
|
3
debian/control
vendored
3
debian/control
vendored
|
@ -29,6 +29,7 @@ Build-Depends:
|
|||
libghc-hinotify-dev [linux-any],
|
||||
libghc-stm-dev (>= 2.3),
|
||||
libghc-dbus-dev (>= 0.10.3) [linux-any],
|
||||
libghc-fdo-notify-dev (>= 0.3) [linux-any],
|
||||
libghc-yesod-dev [i386 amd64 kfreebsd-i386 kfreebsd-amd64 powerpc sparc],
|
||||
libghc-yesod-static-dev [i386 amd64 kfreebsd-i386 kfreebsd-amd64 powerpc sparc],
|
||||
libghc-yesod-default-dev [i386 amd64 kfreebsd-amd64 powerpc sparc],
|
||||
|
@ -50,7 +51,7 @@ Build-Depends:
|
|||
libghc-xml-types-dev,
|
||||
libghc-async-dev,
|
||||
libghc-http-dev,
|
||||
libghc-feed-dev,
|
||||
libghc-feed-dev (>= 0.3.9.2),
|
||||
libghc-regex-tdfa-dev [!mipsel !s390],
|
||||
libghc-regex-compat-dev [mipsel s390],
|
||||
lsof [!kfreebsd-i386 !kfreebsd-amd64],
|
||||
|
|
BIN
doc/assistant/connection.png
Normal file
BIN
doc/assistant/connection.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 3.1 KiB |
BIN
doc/assistant/downloadnotification.png
Normal file
BIN
doc/assistant/downloadnotification.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 4.4 KiB |
BIN
doc/assistant/nautilusmenu.png
Normal file
BIN
doc/assistant/nautilusmenu.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 58 KiB |
|
@ -1,3 +1,14 @@
|
|||
## version 5.20140411
|
||||
|
||||
This release fixes a bug that could cause the assistant to use a *lot* of
|
||||
CPU, when monthly fscking was set up.
|
||||
|
||||
Automatic upgrading was broken on OSX for previous versions. This has been
|
||||
fixed, but you'll need to manually upgrade to this version to get it going
|
||||
again. (Note that the fix is currently only available in the daily builds,
|
||||
not a released version.) Workaround: Remove the wget bundled inside the
|
||||
git-annex dmg.
|
||||
|
||||
## version 5.20140221
|
||||
|
||||
The Windows port of the assistant and webapp is now considered to be beta
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
[[!comment format=mdwn
|
||||
username="https://www.google.com/accounts/o8/id?id=AItOawn3p4i4lk_zMilvjnJ9sS6g2nerpgz0Fjc"
|
||||
nickname="Matthias"
|
||||
subject="merge for master branch?"
|
||||
date="2014-03-23T23:02:23Z"
|
||||
content="""
|
||||
As far as I observed, \"git annex merge\" only merges the \"git-annex\" branch. My wish is to have the conflict resolution from \"git annex sync\" in the \"master\" branch, but no automatic commit, such that the user can verify and possibly correct the merge. The proposed merge could go to the index. Consider the following scenario:
|
||||
|
||||
1. We have repo A, B, and CENTRAL
|
||||
2. All three start with a root commit in \"master\" branch
|
||||
3. Then A commits a file \"test.txt\" with content \"a\" and syncs with CENTRAL
|
||||
4. Meanwhile, B commits \"test.txt\" with content \"b\"
|
||||
5. When B tries to sync with CENTRAL, the proposed conflict resolution having two files \"test.txt-variantXXXX\" and \"test.txt-variantYYYY\" should be staged in the index, but not committed yet.
|
||||
6. B can now commit a custom merge, e.g. with file content \"ab\".
|
||||
|
||||
The point is that I really like the conflict resolution, but still want to force the user to check the result.
|
||||
"""]]
|
|
@ -0,0 +1,10 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.41"
|
||||
subject="comment 5"
|
||||
date="2014-03-26T18:56:30Z"
|
||||
content="""
|
||||
@Matthias you need to install git-annex 4.20130709 or newer. Then `git-annex merge` will do what you want. As I said before.
|
||||
|
||||
As for committing the merge, you can always adjust the result after the fact and use `git commit --amend`.
|
||||
"""]]
|
|
@ -0,0 +1,520 @@
|
|||
I have a git annex assistant process using 1.2 gigabytes of RAM and a git cat-file --batch child consuming CPU time constantly. I am running 5.20140320 on Ubuntu 12.04.
|
||||
|
||||
[[!format sh """
|
||||
PID USER PR NI VIRT RES SHR S %CPU %MEM TIME+ COMMAND
|
||||
11775 ion 20 0 1350m 1.2g 12m S 48 62.4 425:56.85 git-annex
|
||||
11787 ion 20 0 9856 1484 1232 R 54 0.1 366:16.14 git
|
||||
"""]]
|
||||
|
||||
The assistant UI looks perfectly normal and does not indicate it is doing anything. daemon.log is empty and the assistant process seems to be logging into a rotated and deleted log file.
|
||||
|
||||
[[!format sh """
|
||||
COMMAND PID USER FD TYPE DEVICE SIZE/OFF NODE NAME
|
||||
git-annex 11775 ion 1w REG 9,127 80841 55181369 /storage/ion/media/video/.git/annex/daemon.log.10 (deleted)
|
||||
git-annex 11775 ion 2w REG 9,127 80841 55181369 /storage/ion/media/video/.git/annex/daemon.log.10 (deleted)
|
||||
"""]]
|
||||
|
||||
strace -s10000 -e trace=read,write -p 11787 indicates that the assistant is having the cat-file process cat same objects over and over again.
|
||||
|
||||
[[!format sh """
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
"""]]
|
|
@ -0,0 +1,10 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.244"
|
||||
subject="comment 1"
|
||||
date="2014-04-02T18:48:51Z"
|
||||
content="""
|
||||
All I can tell from the strace is that it's looking at location logs, and it's looking at the same few keys, but not a single on in a tight loop.
|
||||
|
||||
It would probably help a lot to run the assistant with --debug and get a debug log while this is going on. We need to pinpoint the part of the assistant that is affected, and there may be other activity too.
|
||||
"""]]
|
|
@ -0,0 +1,22 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.244"
|
||||
subject="comment 2"
|
||||
date="2014-04-07T21:07:35Z"
|
||||
content="""
|
||||
Except of log when this apparently happened. Note the 6 minute time discontinuity when it was apparently looping:
|
||||
|
||||
<pre>
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"write-tree\"]
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"rev-parse\",\"84068090af4bcd3d24f16d865ac07b0478f20ada:\"]
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"symbolic-ref\",\"HEAD\"]
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"show-ref\",\"refs/heads/master\"]
|
||||
[2014-04-07 23:30:13 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"show-ref\",\"git-annex\"]
|
||||
[2014-04-07 23:30:13 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"show-ref\",\"--hash\",\"refs/heads/git-annex\"]
|
||||
[2014-04-07 23:30:13 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"log\",\"refs/heads/git-annex..214ed317536695b91c8dd5bed059c46c11ad00be\",\"--oneline\",\"-n1\"]
|
||||
</pre>
|
||||
|
||||
Also probably relevant, the network topology AIUI was: `client --> server` where both nodes ran the assistant. This happened on the server shortly after the client dropped off a refs/heads/synced/master.
|
||||
|
||||
(Also, the \"logging to a deleted file\" appears to have been a local misconfiguration; a cron job that repeatedly tried to start the assistant. Only one will start, but later ones will rotate the logs before noticing it's running and giving up.)
|
||||
"""]]
|
|
@ -0,0 +1,23 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.244"
|
||||
subject="comment 3"
|
||||
date="2014-04-07T21:55:19Z"
|
||||
content="""
|
||||
Unfortunately all I have been able to tell for sure from this log is that it seems that the expensive transfer scan is not running, and this is unlikely to be a repository auto-repair.
|
||||
|
||||
My best guess as to what might be going on is an update of the git-annex branch.
|
||||
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"write-tree\"]
|
||||
|
||||
This is prep for an index file commit, probably to the git-annex branch.
|
||||
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"rev-parse\",\"84068090af4bcd3d24f16d865ac07b0478f20ada:\"]
|
||||
|
||||
This is a getting the parent commit's tree.
|
||||
|
||||
The git-cat-file churn could then be a union merge reading the contents of the git-annex branch to union-merge it into the `.git/annex/index` (in `mergeIndex`). This would reuse the main git cat-file process.
|
||||
|
||||
That does not explain why it would need to read eg, SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log 28 times.
|
||||
Normally, during a union merge only files listed by `diff-index` need to be read, and it lists each file only once.
|
||||
"""]]
|
|
@ -0,0 +1,12 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.244"
|
||||
subject="comment 4"
|
||||
date="2014-04-07T21:57:32Z"
|
||||
content="""
|
||||
Does `git log git-annex` show a commit that was made at 23:30?
|
||||
|
||||
Does it show a commit 84068090af4bcd3d24f16d865ac07b0478f20ada?
|
||||
|
||||
Is 84068090af4bcd3d24f16d865ac07b0478f20ada the parent of the 23:30 commit?
|
||||
"""]]
|
|
@ -0,0 +1,19 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.244"
|
||||
subject="comment 5"
|
||||
date="2014-04-07T22:12:14Z"
|
||||
content="""
|
||||
Is the repository using direct mode?
|
||||
|
||||
Another theory is that:
|
||||
|
||||
* test/hello appears
|
||||
* watcher sees new symlink, tries to make a commit with it
|
||||
* master branch already has that symlink
|
||||
* this is why the write-tree is not followed by a commit-tree. The commit would have been empty.
|
||||
|
||||
If this is the case, then 84068090af4bcd3d24f16d865ac07b0478f20ada will be a ref on the master branch.
|
||||
|
||||
And all of the above is normal operation. But it does suggest, that if this repo is in direct mode, it might be running a direct mode work tree update around then. Which requires a lot of cat-file queries of the git-annex branch. And would certainly make repeated queries at least if the repository has duplicate copies of some files..
|
||||
"""]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.244"
|
||||
subject="comment 6"
|
||||
date="2014-04-07T22:17:16Z"
|
||||
content="""
|
||||
Does the git log have any recent commits that were \"git-annex automatic merge conflict fix\" ?
|
||||
"""]]
|
|
@ -0,0 +1,81 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://johan.kiviniemi.name/"
|
||||
nickname="Johan"
|
||||
subject="comment 7"
|
||||
date="2014-04-07T22:44:33Z"
|
||||
content="""
|
||||
In the git-annex branch, there is
|
||||
|
||||
* [[!toggle id=\"4deec8203e0baf7bb5b7d5d868d82439261ab3bc\" text=\"a commit at 23:21:51\"]] from my desktop box where I added `test/hello`
|
||||
|
||||
[[!toggleable id=\"4deec8203e0baf7bb5b7d5d868d82439261ab3bc\" text=\"\"\"
|
||||
commit 4deec8203e0baf7bb5b7d5d868d82439261ab3bc
|
||||
Author: Johan Kiviniemi <devel@johan.kiviniemi.name>
|
||||
Date: Mon Apr 7 23:21:51 2014 +0300
|
||||
|
||||
update
|
||||
|
||||
diff --git a/992/280/SHA256E-s6--5891b5b522d5df086d0ff0b110fbd9d21bb4fc7163af34d08286a2e846f6be03.log b/992/280/SHA256E-s6--5891b5b522d5df086d0ff0b110fbd9d21bb4fc7163af34d08286a2e846f6be03.log
|
||||
new file mode 100644
|
||||
index 0000000..1cf060c
|
||||
--- /dev/null
|
||||
+++ b/992/280/SHA256E-s6--5891b5b522d5df086d0ff0b110fbd9d21bb4fc7163af34d08286a2e846f6be03.log
|
||||
@@ -0,0 +1 @@
|
||||
+1396902111.893785s 1 86e07a59-8bba-4878-8d0b-5dfe8c6366c4
|
||||
\"\"\"]]
|
||||
|
||||
* [[!toggle id=\"2e0884d9c8859339855ceee396b9ea9ae05865b4\" text=\"a commit at 23:21:54\"]] when the desktop box synced to the server (from which the log excerpt came)
|
||||
|
||||
[[!toggleable id=\"2e0884d9c8859339855ceee396b9ea9ae05865b4\" text=\"\"\"
|
||||
commit 2e0884d9c8859339855ceee396b9ea9ae05865b4
|
||||
Author: Johan Kiviniemi <devel@johan.kiviniemi.name>
|
||||
Date: Mon Apr 7 23:21:54 2014 +0300
|
||||
|
||||
update
|
||||
|
||||
diff --git a/992/280/SHA256E-s6--5891b5b522d5df086d0ff0b110fbd9d21bb4fc7163af34d08286a2e84
|
||||
6f6be03.log b/992/280/SHA256E-s6--5891b5b522d5df086d0ff0b110fbd9d21bb4fc7163af34d08286a2e8
|
||||
46f6be03.log
|
||||
index 1cf060c..cd0bccc 100644
|
||||
--- a/992/280/SHA256E-s6--5891b5b522d5df086d0ff0b110fbd9d21bb4fc7163af34d08286a2e846f6be03
|
||||
.log
|
||||
+++ b/992/280/SHA256E-s6--5891b5b522d5df086d0ff0b110fbd9d21bb4fc7163af34d08286a2e846f6be03
|
||||
.log
|
||||
@@ -1 +1,2 @@
|
||||
+1396902112.657779s 1 09ada430-8802-47da-bbfa-f5256a3c55d2
|
||||
1396902111.893785s 1 86e07a59-8bba-4878-8d0b-5dfe8c6366c4
|
||||
\"\"\"]]
|
||||
|
||||
* [[!toggle id=\"214ed317536695b91c8dd5bed059c46c11ad00be\" text=\"a commit at 23:24:24\"]] (2.5 minutes later!) when the assistant on the server finally merged `synced/git-annex` into `git-annex` (`test/hello` became visible in the working tree at that time).
|
||||
|
||||
[[!toggleable id=\"214ed317536695b91c8dd5bed059c46c11ad00be\" text=\"\"\"
|
||||
commit 214ed317536695b91c8dd5bed059c46c11ad00be
|
||||
Merge: 4deec82 2e0884d
|
||||
Author: sarjat <sarjat@alku.heh.fi>
|
||||
Date: Mon Apr 7 23:24:24 2014 +0300
|
||||
|
||||
merging synced/git-annex into git-annex
|
||||
\"\"\"]]
|
||||
|
||||
There is no commit in the `git-annex` branch at 23:30. The next commit is from unrelated changes at 00:06.
|
||||
|
||||
[[!toggle id=\"84068090af4bcd3d24f16d865ac07b0478f20ada\" text=\"84068090af4bcd3d24f16d865ac07b0478f20ada\"]] is the commit in `master` which added `test/hello` at 23:21:51.
|
||||
|
||||
[[!toggleable id=\"84068090af4bcd3d24f16d865ac07b0478f20ada\" text=\"\"\"
|
||||
commit 84068090af4bcd3d24f16d865ac07b0478f20ada
|
||||
Author: Johan Kiviniemi <devel@johan.kiviniemi.name>
|
||||
Date: Mon Apr 7 23:21:51 2014 +0300
|
||||
|
||||
diff --git a/test/hello b/test/hello
|
||||
new file mode 120000
|
||||
index 0000000..8c2678f
|
||||
--- /dev/null
|
||||
+++ b/test/hello
|
||||
@@ -0,0 +1 @@
|
||||
+../.git/annex/objects/zK/02/SHA256E-s6--5891b5b522d5df086d0ff0b110fbd9d21bb4fc7163af34d08
|
||||
\ No newline at end of file
|
||||
\"\"\"]]
|
||||
|
||||
The repository on the server is in indirect mode.
|
||||
|
||||
"""]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://johan.kiviniemi.name/"
|
||||
nickname="Johan"
|
||||
subject="comment 8"
|
||||
date="2014-04-07T22:48:18Z"
|
||||
content="""
|
||||
There are no commits in `master` or `git-annex` that have the word conflict in the description.
|
||||
"""]]
|
|
@ -0,0 +1,89 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://johan.kiviniemi.name/"
|
||||
nickname="Johan"
|
||||
subject="comment 9"
|
||||
date="2014-04-07T22:55:12Z"
|
||||
content="""
|
||||
[[!toggle id=\"excerpt\" text=\"The full log excerpt\"]] which includes the sync from the client and the final messages after the cat-file loop ended and things stabilized (but a memory leak of 30 MB in the git-annex assistant process remained).
|
||||
|
||||
[[!toggleable id=\"excerpt\" text=\"\"\"
|
||||
[2014-04-04 10:55:00 EEST] main: starting assistant version 5.20140402
|
||||
|
||||
|
||||
|
||||
[2014-04-07 23:21:08 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"show-ref\",\"git-annex\"]
|
||||
[2014-04-07 23:21:08 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"show-ref\",\"--hash\",\"refs/heads/git-annex\"]
|
||||
[2014-04-07 23:21:08 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"log\",\"refs/heads/git-annex..20d1f5538f6aa430f29ef938f6db045f5a69425d\",\"--oneline\",\"-n1\"]
|
||||
[2014-04-07 23:21:08 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"log\",\"refs/heads/git-annex..b402a6e7b9268e25dbd9c6a027f4a5258993980d\",\"--oneline\",\"-n1\"]
|
||||
[2014-04-07 23:21:08 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"log\",\"refs/heads/git-annex..7b18a191d58d779aab5789b923adb09863938ffe\",\"--oneline\",\"-n1\"]
|
||||
[2014-04-07 23:21:08 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"ls-tree\",\"--full-tree\",\"-z\",\"--\",\"refs/heads/git-annex\",\"uuid.log\",\"remote.log\",\"trust.log\",\"group.log\",\"numcopies.log\",\"schedule.log\",\"preferred-content.log\",\"required-content.log\",\"group-preferred-content.log\"]
|
||||
[2014-04-07 23:21:52 EEST] TransferWatcher: transfer starting: Download UUID \"86e07a59-8bba-4878-8d0b-5dfe8c6366c4\" SHA256E-s6--5891b5b522d5df086d0ff0b110fbd9d21bb4fc7163af34d08286a2e846f6be03 Nothing
|
||||
[2014-04-07 23:21:52 EEST] read: git [\"config\",\"--null\",\"--list\"]
|
||||
[2014-04-07 23:21:52 EEST] TransferWatcher: transfer starting: Download UUID \"86e07a59-8bba-4878-8d0b-5dfe8c6366c4\" test/hello Nothing
|
||||
[2014-04-07 23:21:52 EEST] read: git [\"config\",\"--null\",\"--list\"]
|
||||
[2014-04-07 23:21:52 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"show-ref\",\"git-annex\"]
|
||||
[2014-04-07 23:21:52 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"show-ref\",\"--hash\",\"refs/heads/git-annex\"]
|
||||
[2014-04-07 23:21:52 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"log\",\"refs/heads/git-annex..20d1f5538f6aa430f29ef938f6db045f5a69425d\",\"--oneline\",\"-n1\"]
|
||||
[2014-04-07 23:21:52 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"log\",\"refs/heads/git-annex..4deec8203e0baf7bb5b7d5d868d82439261ab3bc\",\"--oneline\",\"-n1\"]
|
||||
[2014-04-07 23:21:52 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"log\",\"refs/heads/git-annex..b402a6e7b9268e25dbd9c6a027f4a5258993980d\",\"--oneline\",\"-n1\"]
|
||||
[2014-04-07 23:21:52 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"log\",\"refs/heads/git-annex..7b18a191d58d779aab5789b923adb09863938ffe\",\"--oneline\",\"-n1\"]
|
||||
[2014-04-07 23:21:52 EEST] feed: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"update-index\",\"-z\",\"--index-info\"]
|
||||
[2014-04-07 23:21:52 EEST] TransferWatcher: transfer finishing: Transfer {transferDirection = Download, transferUUID = UUID \"86e07a59-8bba-4878-8d0b-5dfe8c6366c4\", transferKey = Key {keyName = \"5891b5b522d5df086d0ff0b110fbd9d21bb4fc7163af34d08286a2e846f6be03\", keyBackendName = \"SHA256E\", keySize = Just 6, keyMtime = Nothing}}
|
||||
[2014-04-07 23:21:52 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"diff-index\",\"--raw\",\"-z\",\"-r\",\"--no-renames\",\"-l0\",\"--cached\",\"4deec8203e0baf7bb5b7d5d868d82439261ab3bc\"]
|
||||
[2014-04-07 23:21:52 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"log\",\"4deec8203e0baf7bb5b7d5d868d82439261ab3bc..refs/heads/git-annex\",\"--oneline\",\"-n1\"]
|
||||
[2014-04-07 23:21:52 EEST] call: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"update-ref\",\"refs/heads/git-annex\",\"4deec8203e0baf7bb5b7d5d868d82439261ab3bc\"]
|
||||
[2014-04-07 23:22:08 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"ls-tree\",\"--full-tree\",\"-z\",\"--\",\"refs/heads/git-annex\",\"uuid.log\",\"remote.log\",\"trust.log\",\"group.log\",\"numcopies.log\",\"schedule.log\",\"preferred-content.log\",\"required-content.log\",\"group-preferred-content.log\"]
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"symbolic-ref\",\"HEAD\"]
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"show-ref\",\"refs/heads/master\"]
|
||||
[2014-04-07 23:24:24 EEST] Merger: merging refs/heads/synced/master into refs/heads/master
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"show-ref\",\"--hash\",\"refs/heads/master\"]
|
||||
[2014-04-07 23:24:24 EEST] call: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"merge\",\"--no-edit\",\"refs/heads/synced/master\"]
|
||||
Updating 645e474..8406809
|
||||
Fast-forward
|
||||
test/hello | 1 +
|
||||
1 file changed, 1 insertion(+)
|
||||
create mode 120000 test/hello
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"show-ref\",\"git-annex\"]
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"show-ref\",\"--hash\",\"refs/heads/git-annex\"]
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"log\",\"refs/heads/git-annex..4deec8203e0baf7bb5b7d5d868d82439261ab3bc\",\"--oneline\",\"-n1\"]
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"log\",\"refs/heads/git-annex..2e0884d9c8859339855ceee396b9ea9ae05865b4\",\"--oneline\",\"-n1\"]
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"log\",\"refs/heads/git-annex..b402a6e7b9268e25dbd9c6a027f4a5258993980d\",\"--oneline\",\"-n1\"]
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"log\",\"refs/heads/git-annex..7b18a191d58d779aab5789b923adb09863938ffe\",\"--oneline\",\"-n1\"]
|
||||
[2014-04-07 23:24:24 EEST] chat: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"hash-object\",\"-w\",\"--stdin-paths\",\"--no-filters\"]
|
||||
[2014-04-07 23:24:24 EEST] feed: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"update-index\",\"-z\",\"--index-info\"]
|
||||
[2014-04-07 23:24:24 EEST] feed: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"update-index\",\"-z\",\"--index-info\"]
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"diff-index\",\"--raw\",\"-z\",\"-r\",\"--no-renames\",\"-l0\",\"--cached\",\"2e0884d9c8859339855ceee396b9ea9ae05865b4\"]
|
||||
[2014-04-07 23:24:24 EEST] chat: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"hash-object\",\"-t\",\"blob\",\"-w\",\"--stdin\",\"--no-filters\"]
|
||||
[2014-04-07 23:24:24 EEST] feed: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"update-index\",\"-z\",\"--index-info\"]
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"diff-index\",\"--raw\",\"-z\",\"-r\",\"--no-renames\",\"-l0\",\"--cached\",\"refs/heads/git-annex\"]
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"write-tree\"]
|
||||
[2014-04-07 23:24:24 EEST] chat: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"commit-tree\",\"0bd4352b4008165d356bc9b1250bdb456c675175\",\"-p\",\"refs/heads/git-annex\",\"-p\",\"2e0884d9c8859339855ceee396b9ea9ae05865b4\"]
|
||||
[2014-04-07 23:24:24 EEST] call: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"update-ref\",\"refs/heads/git-annex\",\"214ed317536695b91c8dd5bed059c46c11ad00be\"]
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"ls-tree\",\"--full-tree\",\"-z\",\"--\",\"refs/heads/git-annex\",\"uuid.log\",\"remote.log\",\"trust.log\",\"group.log\",\"numcopies.log\",\"schedule.log\",\"preferred-content.log\",\"required-content.log\",\"group-preferred-content.log\"]
|
||||
[2014-04-07 23:24:24 EEST] Watcher: add symlink test/hello
|
||||
[2014-04-07 23:24:24 EEST] chat: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"hash-object\",\"-t\",\"blob\",\"-w\",\"--stdin\",\"--no-filters\"]
|
||||
[2014-04-07 23:24:24 EEST] Committer: committing 1 changes
|
||||
[2014-04-07 23:24:24 EEST] Committer: Committing changes to git
|
||||
[2014-04-07 23:24:24 EEST] feed: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"update-index\",\"-z\",\"--index-info\"]
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"symbolic-ref\",\"HEAD\"]
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"show-ref\",\"--hash\",\"refs/heads/master\"]
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"write-tree\"]
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"rev-parse\",\"84068090af4bcd3d24f16d865ac07b0478f20ada:\"]
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"symbolic-ref\",\"HEAD\"]
|
||||
[2014-04-07 23:24:24 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"show-ref\",\"refs/heads/master\"]
|
||||
[2014-04-07 23:30:13 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"show-ref\",\"git-annex\"]
|
||||
[2014-04-07 23:30:13 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"show-ref\",\"--hash\",\"refs/heads/git-annex\"]
|
||||
[2014-04-07 23:30:13 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"log\",\"refs/heads/git-annex..214ed317536695b91c8dd5bed059c46c11ad00be\",\"--oneline\",\"-n1\"]
|
||||
[2014-04-07 23:30:13 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"log\",\"refs/heads/git-annex..2e0884d9c8859339855ceee396b9ea9ae05865b4\",\"--oneline\",\"-n1\"]
|
||||
[2014-04-07 23:30:13 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"log\",\"refs/heads/git-annex..b402a6e7b9268e25dbd9c6a027f4a5258993980d\",\"--oneline\",\"-n1\"]
|
||||
[2014-04-07 23:30:13 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"log\",\"refs/heads/git-annex..7b18a191d58d779aab5789b923adb09863938ffe\",\"--oneline\",\"-n1\"]
|
||||
[2014-04-07 23:30:13 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"ls-tree\",\"--full-tree\",\"-z\",\"--\",\"refs/heads/git-annex\",\"uuid.log\",\"remote.log\",\"trust.log\",\"group.log\",\"numcopies.log\",\"schedule.log\",\"preferred-content.log\",\"required-content.log\",\"group-preferred-content.log\"]
|
||||
[2014-04-07 23:30:13 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"show-ref\",\"git-annex\"]
|
||||
[2014-04-07 23:30:13 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"show-ref\",\"--hash\",\"refs/heads/git-annex\"]
|
||||
[2014-04-07 23:30:13 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"log\",\"refs/heads/git-annex..214ed317536695b91c8dd5bed059c46c11ad00be\",\"--oneline\",\"-n1\"]
|
||||
[2014-04-07 23:30:13 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"log\",\"refs/heads/git-annex..2e0884d9c8859339855ceee396b9ea9ae05865b4\",\"--oneline\",\"-n1\"]
|
||||
[2014-04-07 23:30:13 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"log\",\"refs/heads/git-annex..b402a6e7b9268e25dbd9c6a027f4a5258993980d\",\"--oneline\",\"-n1\"]
|
||||
[2014-04-07 23:30:13 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"log\",\"refs/heads/git-annex..7b18a191d58d779aab5789b923adb09863938ffe\",\"--oneline\",\"-n1\"]
|
||||
[2014-04-07 23:31:13 EEST] read: git [\"--git-dir=/storage/sarjat/annex-sarjat/.git\",\"--work-tree=/storage/sarjat/annex-sarjat\",\"ls-tree\",\"--full-tree\",\"-z\",\"--\",\"refs/heads/git-annex\",\"uuid.log\",\"remote.log\",\"trust.log\",\"group.log\",\"numcopies.log\",\"schedule.log\",\"preferred-content.log\",\"required-content.log\",\"group-preferred-content.log\"]
|
||||
\"\"\"]]
|
||||
"""]]
|
|
@ -76,3 +76,13 @@ accept: unsupported operation (Function not implemented)
|
|||
|
||||
lost dbus connection; falling back to polling (SocketError {socketErrorMessage = "connect: does not exist (No such file or directory)", socketErrorFatal = True, socketErrorAddress = Just (Address "unix:path=/var/run/dbus/system_bus_socket")})
|
||||
"""]]
|
||||
|
||||
> [[done]]; This turned out to not be dbus related, but the http server failing,
|
||||
> and I fixed that bug.
|
||||
>
|
||||
> AFAICS the user running git-annex did not have their own dbus daemon
|
||||
> running, and that's why the low-volume dbus messages come up.
|
||||
> Probably because this is an embedded device, and so no desktop
|
||||
> environment. git-annex only uses dbus for detecting network connection
|
||||
> changes and removable media mounts. None of which probably matter in an
|
||||
> embedded environment. --[[Joey]]
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://schnouki.net/"
|
||||
nickname="Schnouki"
|
||||
subject="comment 13"
|
||||
date="2014-03-20T23:54:51Z"
|
||||
content="""
|
||||
Sorry for the delay (my laptop hard drive died so I was offline for a few days).
|
||||
|
||||
Just tested git-annex 5.20140320 on my NAS and it works just fine! The webapp is running, I can connect to it, and there's no more spam in the daemon.log (except for the dbus message every minute, but that's not really a problem).
|
||||
|
||||
Thanks a lot Joey!
|
||||
"""]]
|
|
@ -0,0 +1,36 @@
|
|||
### Please describe the problem.
|
||||
[[!format sh """
|
||||
cabal install -O2 -j1 -f-webdav -f-s3 git-annex
|
||||
Resolving dependencies...
|
||||
Configuring dns-1.2.0...
|
||||
Building dns-1.2.0...
|
||||
Preprocessing library dns-1.2.0...
|
||||
|
||||
Network/DNS/Decode.hs:15:8:
|
||||
Could not find module `Data.Conduit.Network'
|
||||
It is a member of the hidden package `conduit-extra-1.1.0'.
|
||||
Perhaps you need to add `conduit-extra' to the build-depends in your .cabal file.
|
||||
Use -v to see a list of the files searched for.
|
||||
Failed to install dns-1.2.0
|
||||
cabal: Error: some packages failed to install:
|
||||
dns-1.2.0 failed during the building phase. The exception was:
|
||||
ExitFailure 1
|
||||
git-annex-5.20140402 depends on dns-1.2.0 which failed to install.
|
||||
"""]]
|
||||
|
||||
### What steps will reproduce the problem?
|
||||
|
||||
[[!format sh """
|
||||
cabal update
|
||||
mkdir -p ~/haskell/git-annex
|
||||
cd ~/haskell/git-annex
|
||||
cabal sandbox init
|
||||
cabal install -O2 -j1 -f-webdav -f-s3 c2hs git-annex
|
||||
"""]]
|
||||
### What version of git-annex are you using? On what operating system?
|
||||
|
||||
5.20140402, Gentoo Linux
|
||||
|
||||
### Please provide any additional information below.
|
||||
|
||||
> [[fixed|done]] --[[Joey]]
|
|
@ -0,0 +1,39 @@
|
|||
[[!comment format=mdwn
|
||||
username="https://www.google.com/accounts/o8/id?id=AItOawl1D_4vD5ueaDw8gRsIYPO3UHRKEpFfg9I"
|
||||
nickname="Dmitry"
|
||||
subject="comment 1"
|
||||
date="2014-04-04T06:40:21Z"
|
||||
content="""
|
||||
Author of \"dns\" library already fixed this issue.
|
||||
|
||||
Next error is:
|
||||
[[!format sh \"\"\"
|
||||
Preprocessing executable 'git-annex' for git-annex-5.20140402...
|
||||
|
||||
Utility/Yesod.hs:36:8:
|
||||
Could not find module `Text.Hamlet'
|
||||
It is a member of the hidden package `shakespeare-2.0.0.1'.
|
||||
Perhaps you need to add `shakespeare' to the build-depends in your .cabal file.
|
||||
Use -v to see a list of the files searched for.
|
||||
cabal: Error: some packages failed to install:
|
||||
\"\"\"]]
|
||||
|
||||
Here is the fix:
|
||||
|
||||
|
||||
[[!format diff \"\"\"
|
||||
Index: git-annex/git-annex.cabal
|
||||
===================================================================
|
||||
--- git-annex.orig/git-annex.cabal
|
||||
+++ git-annex/git-annex.cabal
|
||||
@@ -101,7 +101,7 @@ Executable git-annex
|
||||
base (>= 4.5 && < 4.9), monad-control, MonadCatchIO-transformers,
|
||||
IfElse, text, QuickCheck >= 2.1, bloomfilter, edit-distance, process,
|
||||
SafeSemaphore, uuid, random, dlist, unix-compat, async, stm (>= 2.3),
|
||||
- data-default, case-insensitive
|
||||
+ data-default, case-insensitive, shakespeare
|
||||
CC-Options: -Wall
|
||||
GHC-Options: -Wall
|
||||
Extensions: PackageImports
|
||||
\"\"\"]]
|
||||
"""]]
|
|
@ -0,0 +1,11 @@
|
|||
[[!comment format=mdwn
|
||||
username="https://www.google.com/accounts/o8/id?id=AItOawnvVfFLW4CTKs7UjdiLIsOn_cxj1Jnh64I"
|
||||
nickname="Charl"
|
||||
subject="Could it be gmail.com XMPP throttling?"
|
||||
date="2014-03-23T19:26:08Z"
|
||||
content="""
|
||||
I was seeing similar error messages, until I registered for a different XMPP account at jabber.de and started using that instead of my gmail.com account.
|
||||
|
||||
My current suspicion is that it could be Google performing throttling on their XMPP service. See here: http://stackoverflow.com/questions/1843837/what-is-the-throttling-rate-that-gtalk-applies-to-xmpp-messages
|
||||
|
||||
"""]]
|
23
doc/bugs/Crash_when_disabling_syncing_in_the_webapp.mdwn
Normal file
23
doc/bugs/Crash_when_disabling_syncing_in_the_webapp.mdwn
Normal file
|
@ -0,0 +1,23 @@
|
|||
### Please describe the problem.
|
||||
The watcher crashes.
|
||||
|
||||
I only need to restart the thread in the pop-up to get everything to work again, but I'm reporting just in case that this issue has any other implications.
|
||||
|
||||
|
||||
### What steps will reproduce the problem?
|
||||
I open the webapp and in the minutes before it starts syncing (syncing is enabled) I disable it (clicking in the 'syncing enabled' text).
|
||||
|
||||
This produces a crash every time.
|
||||
|
||||
|
||||
### What version of git-annex are you using? On what operating system?
|
||||
5.20140320 in Debian sid and testing
|
||||
|
||||
|
||||
### Please provide any additional information below.
|
||||
This is all I can see in the logs
|
||||
|
||||
[[!format sh """
|
||||
Watcher crashed: PauseWatcher
|
||||
[2014-03-26 08:54:57 CET] Watcher: warning Watcher crashed: PauseWatcher
|
||||
"""]]
|
|
@ -0,0 +1,10 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.41"
|
||||
subject="comment 1"
|
||||
date="2014-03-26T17:36:00Z"
|
||||
content="""
|
||||
How did you install git-annex? Is this Debian Linux?
|
||||
|
||||
I have not been able to reproduce a crash. It's indeed the case that a PauseWatcher exception is thrown, but the Watcher explicitly catches that exception.
|
||||
"""]]
|
|
@ -0,0 +1,10 @@
|
|||
[[!comment format=mdwn
|
||||
username="https://www.google.com/accounts/o8/id?id=AItOawk9nck8WX8-ADF3Fdh5vFo4Qrw1I_bJcR8"
|
||||
nickname="Jon Ander"
|
||||
subject="comment 2"
|
||||
date="2014-04-01T08:04:51Z"
|
||||
content="""
|
||||
Yes, this is Debian Linux and I've been able to reproduce it in i386 and amd64. git-annex is installed from the Debian repositories.
|
||||
|
||||
I'll try to continue testing the issue and will report back if I can find any useful info.
|
||||
"""]]
|
|
@ -0,0 +1,12 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.244"
|
||||
subject="comment 3"
|
||||
date="2014-04-02T20:38:12Z"
|
||||
content="""
|
||||
So we have the same version of git-annex from the same build, and only you see the problem. Hmm..
|
||||
|
||||
You mentioned that you see the problem if you disable syncing at a particular time. Does it only crash at that time, or at any time?
|
||||
|
||||
If you create a brand new empty repository and run the webapp in it, can you reporoduce the problem there? Ie, \"mkdir test; cd test; git init; git annex init; git annex webapp\"
|
||||
"""]]
|
|
@ -0,0 +1,14 @@
|
|||
When using git annex as part of an automated backup system, it's sometimes important that we archive all files, including dotfiles.
|
||||
|
||||
AFAICT there's no way to tell git annex add to add all dotfiles in a given directory; the only way to do it is to list every dotfile individually. (git annex add --force doesn't seem to do it.)
|
||||
|
||||
This can be worked around with find and xargs, but this is more work than it should be, I think.
|
||||
|
||||
It might also be nice if git annex add displayed a warning when adding a directory with dotfiles; something like "Warning, N files ignored; pass --whatever to add them."
|
||||
|
||||
> [[!commit 34abd7bca80a8cc012f92d64116014449b1b2392]] explains
|
||||
> the rationalle for dotfiles to be skipped. Such as it was.
|
||||
>
|
||||
> I don't think it makes sense for --force to be the flag to override
|
||||
> this, because you may want to add dotfiles, but not .gitignored
|
||||
> files. So, made a new --include-dotfiles option. [[done]] --[[Joey]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="https://www.google.com/accounts/o8/id?id=AItOawmUJBh1lYmvfCCiGr3yrdx-QhuLCSRnU5c"
|
||||
nickname="Justin"
|
||||
subject="comment 1"
|
||||
date="2014-03-24T07:03:42Z"
|
||||
content="""
|
||||
Maybe the right solution is to make --force not ignore dotfiles, although perhaps that would break people who rely on its current behavior.
|
||||
"""]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="https://www.google.com/accounts/o8/id?id=AItOawmUJBh1lYmvfCCiGr3yrdx-QhuLCSRnU5c"
|
||||
nickname="Justin"
|
||||
subject="comment 2"
|
||||
date="2014-04-03T01:44:58Z"
|
||||
content="""
|
||||
Thanks a lot for adding this flag!
|
||||
"""]]
|
|
@ -0,0 +1,62 @@
|
|||
### Please describe the problem.
|
||||
Just a small patch to Tahoe.hs which fixes "initremote repo
|
||||
type=thaoe" when using the latest tahoe-lafs release available for
|
||||
download.
|
||||
|
||||
I'm trying to add an attachment here but the UI says "prohibited by
|
||||
allowed_attachments (user is not an admin)" so please have a look also
|
||||
here: <https://github.com/joeyh/git-annex/pull/21>
|
||||
|
||||
### What steps will reproduce the problem?
|
||||
|
||||
1. Install latest tahoe-lafs
|
||||
2. run "TAHOE_FURL=... git annex initremote repo type=tahoe"
|
||||
|
||||
### What version of git-annex are you using? On what operating system?
|
||||
|
||||
OS: Debian Sid updated to latest packages
|
||||
git-annex: version 5.20140306
|
||||
|
||||
> Thanks, I've fixed this. [[done]] --[[Joey]]
|
||||
|
||||
### Please provide any additional information below.
|
||||
|
||||
I would like to add few things if i'm able (it's my first time for
|
||||
haskell, and I'm a bit lost as now):
|
||||
|
||||
1. add an optional parameter or envvar for a root dir cap and switch
|
||||
from storing anonymous files to saving a tree of dirs and files
|
||||
(much like the webdav back-end) so that i can later renew the
|
||||
leases on the files to prevent tahoe's garbage collection process
|
||||
expiration (for details see
|
||||
<https://tahoe-lafs.org/trac/tahoe-lafs/browser/docs/garbage-collection.rst#client-side-renewal>)
|
||||
|
||||
The poor man way to that without touching git-annex is to in some
|
||||
way collect the caps of the files from annex metadatas and to link
|
||||
them to a directory, which where i can then run "tahoe deep-check
|
||||
--add-lease" on;
|
||||
|
||||
> When I talked this over with Zooko before, he
|
||||
> thought it was better for git-annex to not use tahoe's directories,
|
||||
> which is why it doesn't. See [[todo/tahoe_lfs_for_reals]].
|
||||
>
|
||||
> This is the first I have heard about tahoe garbage collection.
|
||||
> It sounds like it's an optional process.
|
||||
>
|
||||
> It would certainly be possible to pull the caps for files out of
|
||||
> git-annex's remote state log.
|
||||
> --[[Joey]]
|
||||
|
||||
2. add convergence as an optional parameter. As of now many of the
|
||||
files that i'm willing to manage with git-annex are already on my
|
||||
grid, mostly because i've used tahoe's backup command to upload
|
||||
them in the past. By using the same convergence value that i've
|
||||
already setup on my other tahoe client installation i would be able
|
||||
to save much time and space by avoiding duplicates.
|
||||
|
||||
Do you have any good pointers to pieces code of git-annex that i can
|
||||
read and hack to try to implement this or any other suggestion?
|
||||
|
||||
> It was not documented, but you can already pass
|
||||
> `shared-convergence-secret=xxx` to `initremote`.
|
||||
> I have documented it. --[[Joey]]
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue