Record git-annex (5.20140402) in archive suite sid
This commit is contained in:
commit
a89fe531d7
180 changed files with 3674 additions and 397 deletions
16
Annex.hs
16
Annex.hs
|
@ -10,7 +10,6 @@
|
|||
module Annex (
|
||||
Annex,
|
||||
AnnexState(..),
|
||||
PreferredContentMap,
|
||||
new,
|
||||
run,
|
||||
eval,
|
||||
|
@ -60,8 +59,8 @@ import Types.FileMatcher
|
|||
import Types.NumCopies
|
||||
import Types.LockPool
|
||||
import Types.MetaData
|
||||
import Types.DesktopNotify
|
||||
import Types.CleanupActions
|
||||
import qualified Utility.Matcher
|
||||
import qualified Data.Map as M
|
||||
import qualified Data.Set as S
|
||||
import Utility.Quvi (QuviVersion)
|
||||
|
@ -80,9 +79,6 @@ newtype Annex a = Annex { runAnnex :: ReaderT (MVar AnnexState) IO a }
|
|||
Applicative
|
||||
)
|
||||
|
||||
type Matcher a = Either [Utility.Matcher.Token a] (Utility.Matcher.Matcher a)
|
||||
type PreferredContentMap = M.Map UUID (Utility.Matcher.Matcher (S.Set UUID -> MatchInfo -> Annex Bool))
|
||||
|
||||
-- internal state storage
|
||||
data AnnexState = AnnexState
|
||||
{ repo :: Git.Repo
|
||||
|
@ -103,9 +99,10 @@ data AnnexState = AnnexState
|
|||
, forcebackend :: Maybe String
|
||||
, globalnumcopies :: Maybe NumCopies
|
||||
, forcenumcopies :: Maybe NumCopies
|
||||
, limit :: Matcher (MatchInfo -> Annex Bool)
|
||||
, limit :: ExpandableMatcher Annex
|
||||
, uuidmap :: Maybe UUIDMap
|
||||
, preferredcontentmap :: Maybe PreferredContentMap
|
||||
, preferredcontentmap :: Maybe (FileMatcherMap Annex)
|
||||
, requiredcontentmap :: Maybe (FileMatcherMap Annex)
|
||||
, shared :: Maybe SharedRepository
|
||||
, forcetrust :: TrustMap
|
||||
, trustmap :: Maybe TrustMap
|
||||
|
@ -122,6 +119,7 @@ data AnnexState = AnnexState
|
|||
, unusedkeys :: Maybe (S.Set Key)
|
||||
, quviversion :: Maybe QuviVersion
|
||||
, existinghooks :: M.Map Git.Hook.Hook Bool
|
||||
, desktopnotify :: DesktopNotify
|
||||
}
|
||||
|
||||
newState :: GitConfig -> Git.Repo -> AnnexState
|
||||
|
@ -144,9 +142,10 @@ newState c r = AnnexState
|
|||
, forcebackend = Nothing
|
||||
, globalnumcopies = Nothing
|
||||
, forcenumcopies = Nothing
|
||||
, limit = Left []
|
||||
, limit = BuildingMatcher []
|
||||
, uuidmap = Nothing
|
||||
, preferredcontentmap = Nothing
|
||||
, requiredcontentmap = Nothing
|
||||
, shared = Nothing
|
||||
, forcetrust = M.empty
|
||||
, trustmap = Nothing
|
||||
|
@ -163,6 +162,7 @@ newState c r = AnnexState
|
|||
, unusedkeys = Nothing
|
||||
, quviversion = Nothing
|
||||
, existinghooks = M.empty
|
||||
, desktopnotify = mempty
|
||||
}
|
||||
|
||||
{- Makes an Annex state object for the specified git repo.
|
||||
|
|
|
@ -32,8 +32,12 @@ getTransitionCalculator ForgetDeadRemotes = Just dropDead
|
|||
|
||||
dropDead :: FilePath -> String -> TrustMap -> FileTransition
|
||||
dropDead f content trustmap = case getLogVariety f of
|
||||
Just UUIDBasedLog -> ChangeFile $
|
||||
UUIDBased.showLog id $ dropDeadFromUUIDBasedLog trustmap $ UUIDBased.parseLog Just content
|
||||
Just UUIDBasedLog
|
||||
-- Don't remove the dead repo from the trust log,
|
||||
-- because git remotes may still exist, and they need
|
||||
-- to still know it's dead.
|
||||
| f == trustLog -> PreserveFile
|
||||
| otherwise -> ChangeFile $ UUIDBased.showLog id $ dropDeadFromUUIDBasedLog trustmap $ UUIDBased.parseLog Just content
|
||||
Just NewUUIDBasedLog -> ChangeFile $
|
||||
UUIDBased.showLogNew id $ dropDeadFromUUIDBasedLog trustmap $ UUIDBased.parseLogNew Just content
|
||||
Just (PresenceLog _) ->
|
||||
|
|
|
@ -13,7 +13,6 @@ import Common.Annex
|
|||
import Limit
|
||||
import Utility.Matcher
|
||||
import Types.Group
|
||||
import Types.Limit
|
||||
import Logs.Group
|
||||
import Logs.Remote
|
||||
import Annex.UUID
|
||||
|
@ -25,12 +24,10 @@ import Types.Remote (RemoteConfig)
|
|||
import Data.Either
|
||||
import qualified Data.Set as S
|
||||
|
||||
type FileMatcher = Matcher MatchFiles
|
||||
|
||||
checkFileMatcher :: FileMatcher -> FilePath -> Annex Bool
|
||||
checkFileMatcher :: (FileMatcher Annex) -> FilePath -> Annex Bool
|
||||
checkFileMatcher matcher file = checkMatcher matcher Nothing (Just file) S.empty True
|
||||
|
||||
checkMatcher :: FileMatcher -> Maybe Key -> AssociatedFile -> AssumeNotPresent -> Bool -> Annex Bool
|
||||
checkMatcher :: (FileMatcher Annex) -> Maybe Key -> AssociatedFile -> AssumeNotPresent -> Bool -> Annex Bool
|
||||
checkMatcher matcher mkey afile notpresent def
|
||||
| isEmpty matcher = return def
|
||||
| otherwise = case (mkey, afile) of
|
||||
|
@ -48,15 +45,15 @@ fileMatchInfo file = do
|
|||
, relFile = file
|
||||
}
|
||||
|
||||
matchAll :: FileMatcher
|
||||
matchAll :: FileMatcher Annex
|
||||
matchAll = generate []
|
||||
|
||||
parsedToMatcher :: [Either String (Token MatchFiles)] -> Either String FileMatcher
|
||||
parsedToMatcher :: [Either String (Token (MatchFiles Annex))] -> Either String (FileMatcher Annex)
|
||||
parsedToMatcher parsed = case partitionEithers parsed of
|
||||
([], vs) -> Right $ generate vs
|
||||
(es, _) -> Left $ unwords $ map ("Parse failure: " ++) es
|
||||
|
||||
exprParser :: FileMatcher -> FileMatcher -> GroupMap -> M.Map UUID RemoteConfig -> Maybe UUID -> String -> [Either String (Token MatchFiles)]
|
||||
exprParser :: FileMatcher Annex -> FileMatcher Annex -> GroupMap -> M.Map UUID RemoteConfig -> Maybe UUID -> String -> [Either String (Token (MatchFiles Annex))]
|
||||
exprParser matchstandard matchgroupwanted groupmap configmap mu expr =
|
||||
map parse $ tokenizeMatcher expr
|
||||
where
|
||||
|
@ -69,7 +66,7 @@ exprParser matchstandard matchgroupwanted groupmap configmap mu expr =
|
|||
preferreddir = fromMaybe "public" $
|
||||
M.lookup "preferreddir" =<< (`M.lookup` configmap) =<< mu
|
||||
|
||||
parseToken :: FileMatcher -> FileMatcher -> MkLimit -> MkLimit -> GroupMap -> String -> Either String (Token MatchFiles)
|
||||
parseToken :: FileMatcher Annex -> FileMatcher Annex -> MkLimit Annex -> MkLimit Annex -> GroupMap -> String -> Either String (Token (MatchFiles Annex))
|
||||
parseToken matchstandard matchgroupwanted checkpresent checkpreferreddir groupmap t
|
||||
| t `elem` tokens = Right $ token t
|
||||
| t == "standard" = call matchstandard
|
||||
|
@ -106,7 +103,7 @@ tokenizeMatcher = filter (not . null ) . concatMap splitparens . words
|
|||
|
||||
{- Generates a matcher for files large enough (or meeting other criteria)
|
||||
- to be added to the annex, rather than directly to git. -}
|
||||
largeFilesMatcher :: Annex FileMatcher
|
||||
largeFilesMatcher :: Annex (FileMatcher Annex)
|
||||
largeFilesMatcher = go =<< annexLargeFiles <$> Annex.getGitConfig
|
||||
where
|
||||
go Nothing = return matchAll
|
||||
|
|
81
Annex/Notification.hs
Normal file
81
Annex/Notification.hs
Normal file
|
@ -0,0 +1,81 @@
|
|||
{- git-annex desktop notifications
|
||||
-
|
||||
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE CPP #-}
|
||||
|
||||
module Annex.Notification where
|
||||
|
||||
import Common.Annex
|
||||
import Logs.Transfer
|
||||
#ifdef WITH_DBUS_NOTIFICATIONS
|
||||
import qualified Annex
|
||||
import Types.DesktopNotify
|
||||
import qualified DBus.Notify as Notify
|
||||
import qualified DBus.Client
|
||||
#endif
|
||||
|
||||
-- Witness that notification has happened.
|
||||
data NotifyWitness = NotifyWitness
|
||||
|
||||
{- Wrap around an action that performs a transfer, which may run multiple
|
||||
- attempts. Displays notification when supported and when the user asked
|
||||
- for it. -}
|
||||
notifyTransfer :: Direction -> Maybe FilePath -> (NotifyWitness -> Annex Bool) -> Annex Bool
|
||||
notifyTransfer _ Nothing a = a NotifyWitness
|
||||
#ifdef WITH_DBUS_NOTIFICATIONS
|
||||
notifyTransfer direction (Just f) a = do
|
||||
wanted <- Annex.getState Annex.desktopnotify
|
||||
let action = if direction == Upload then "uploading" else "downloading"
|
||||
let basedesc = action ++ " " ++ f
|
||||
let startdesc = "started " ++ basedesc
|
||||
let enddesc ok = if ok
|
||||
then "finished " ++ basedesc
|
||||
else basedesc ++ " failed"
|
||||
if (notifyStart wanted || notifyFinish wanted)
|
||||
then do
|
||||
client <- liftIO DBus.Client.connectSession
|
||||
startnotification <- liftIO $ if notifyStart wanted
|
||||
then Just <$> Notify.notify client (mkNote startdesc)
|
||||
else pure Nothing
|
||||
ok <- a NotifyWitness
|
||||
when (notifyFinish wanted) $ liftIO $ void $ maybe
|
||||
(Notify.notify client $ mkNote $ enddesc ok)
|
||||
(\n -> Notify.replace client n $ mkNote $ enddesc ok)
|
||||
startnotification
|
||||
return ok
|
||||
else a NotifyWitness
|
||||
#else
|
||||
notifyTransfer _ (Just _) a = do a NotifyWitness
|
||||
#endif
|
||||
|
||||
notifyDrop :: Maybe FilePath -> Bool -> Annex ()
|
||||
notifyDrop Nothing _ = noop
|
||||
#ifdef WITH_DBUS_NOTIFICATIONS
|
||||
notifyDrop (Just f) ok = do
|
||||
wanted <- Annex.getState Annex.desktopnotify
|
||||
when (notifyFinish wanted) $ liftIO $ do
|
||||
client <- DBus.Client.connectSession
|
||||
let msg = if ok
|
||||
then "dropped " ++ f
|
||||
else "failed to drop" ++ f
|
||||
void $ Notify.notify client (mkNote msg)
|
||||
#else
|
||||
notifyDrop (Just _) _ = noop
|
||||
#endif
|
||||
|
||||
#ifdef WITH_DBUS_NOTIFICATIONS
|
||||
mkNote :: String -> Notify.Note
|
||||
mkNote desc = Notify.blankNote
|
||||
{ Notify.appName = "git-annex"
|
||||
, Notify.body = Just $ Notify.Text desc
|
||||
, Notify.hints =
|
||||
[ Notify.Category Notify.Transfer
|
||||
, Notify.Urgency Notify.Low
|
||||
, Notify.SuppressSound True
|
||||
]
|
||||
}
|
||||
#endif
|
131
Annex/Transfer.hs
Normal file
131
Annex/Transfer.hs
Normal file
|
@ -0,0 +1,131 @@
|
|||
{- git-annex transfers
|
||||
-
|
||||
- Copyright 2012-2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE CPP #-}
|
||||
|
||||
module Annex.Transfer (
|
||||
module X,
|
||||
upload,
|
||||
download,
|
||||
runTransfer,
|
||||
noRetry,
|
||||
forwardRetry,
|
||||
) where
|
||||
|
||||
import Common.Annex
|
||||
import Logs.Transfer as X
|
||||
import Annex.Notification as X
|
||||
import Annex.Perms
|
||||
import Annex.Exception
|
||||
import Utility.Metered
|
||||
#ifdef mingw32_HOST_OS
|
||||
import Utility.WinLock
|
||||
#endif
|
||||
|
||||
import Control.Concurrent
|
||||
|
||||
upload :: UUID -> Key -> AssociatedFile -> RetryDecider -> (MeterUpdate -> Annex Bool) -> NotifyWitness -> Annex Bool
|
||||
upload u key f d a _witness = runTransfer (Transfer Upload u key) f d a
|
||||
|
||||
download :: UUID -> Key -> AssociatedFile -> RetryDecider -> (MeterUpdate -> Annex Bool) -> NotifyWitness -> Annex Bool
|
||||
download u key f d a _witness = runTransfer (Transfer Download u key) f d a
|
||||
|
||||
{- Runs a transfer action. Creates and locks the lock file while the
|
||||
- action is running, and stores info in the transfer information
|
||||
- file.
|
||||
-
|
||||
- If the transfer action returns False, the transfer info is
|
||||
- left in the failedTransferDir.
|
||||
-
|
||||
- If the transfer is already in progress, returns False.
|
||||
-
|
||||
- An upload can be run from a read-only filesystem, and in this case
|
||||
- no transfer information or lock file is used.
|
||||
-}
|
||||
runTransfer :: Transfer -> Maybe FilePath -> RetryDecider -> (MeterUpdate -> Annex Bool) -> Annex Bool
|
||||
runTransfer t file shouldretry a = do
|
||||
info <- liftIO $ startTransferInfo file
|
||||
(meter, tfile, metervar) <- mkProgressUpdater t info
|
||||
mode <- annexFileMode
|
||||
(fd, inprogress) <- liftIO $ prep tfile mode info
|
||||
if inprogress
|
||||
then do
|
||||
showNote "transfer already in progress"
|
||||
return False
|
||||
else do
|
||||
ok <- retry info metervar $
|
||||
bracketIO (return fd) (cleanup tfile) (const $ a meter)
|
||||
unless ok $ recordFailedTransfer t info
|
||||
return ok
|
||||
where
|
||||
#ifndef mingw32_HOST_OS
|
||||
prep tfile mode info = do
|
||||
mfd <- catchMaybeIO $
|
||||
openFd (transferLockFile tfile) ReadWrite (Just mode)
|
||||
defaultFileFlags { trunc = True }
|
||||
case mfd of
|
||||
Nothing -> return (Nothing, False)
|
||||
Just fd -> do
|
||||
locked <- catchMaybeIO $
|
||||
setLock fd (WriteLock, AbsoluteSeek, 0, 0)
|
||||
if isNothing locked
|
||||
then return (Nothing, True)
|
||||
else do
|
||||
void $ tryIO $ writeTransferInfoFile info tfile
|
||||
return (mfd, False)
|
||||
#else
|
||||
prep tfile _mode info = do
|
||||
v <- catchMaybeIO $ lockExclusive (transferLockFile tfile)
|
||||
case v of
|
||||
Nothing -> return (Nothing, False)
|
||||
Just Nothing -> return (Nothing, True)
|
||||
Just (Just lockhandle) -> do
|
||||
void $ tryIO $ writeTransferInfoFile info tfile
|
||||
return (Just lockhandle, False)
|
||||
#endif
|
||||
cleanup _ Nothing = noop
|
||||
cleanup tfile (Just lockhandle) = do
|
||||
void $ tryIO $ removeFile tfile
|
||||
#ifndef mingw32_HOST_OS
|
||||
void $ tryIO $ removeFile $ transferLockFile tfile
|
||||
closeFd lockhandle
|
||||
#else
|
||||
{- Windows cannot delete the lockfile until the lock
|
||||
- is closed. So it's possible to race with another
|
||||
- process that takes the lock before it's removed,
|
||||
- so ignore failure to remove.
|
||||
-}
|
||||
dropLock lockhandle
|
||||
void $ tryIO $ removeFile $ transferLockFile tfile
|
||||
#endif
|
||||
retry oldinfo metervar run = do
|
||||
v <- tryAnnex run
|
||||
case v of
|
||||
Right b -> return b
|
||||
Left _ -> do
|
||||
b <- getbytescomplete metervar
|
||||
let newinfo = oldinfo { bytesComplete = Just b }
|
||||
if shouldretry oldinfo newinfo
|
||||
then retry newinfo metervar run
|
||||
else return False
|
||||
getbytescomplete metervar
|
||||
| transferDirection t == Upload =
|
||||
liftIO $ readMVar metervar
|
||||
| otherwise = do
|
||||
f <- fromRepo $ gitAnnexTmpObjectLocation (transferKey t)
|
||||
liftIO $ catchDefaultIO 0 $
|
||||
fromIntegral . fileSize <$> getFileStatus f
|
||||
|
||||
type RetryDecider = TransferInfo -> TransferInfo -> Bool
|
||||
|
||||
noRetry :: RetryDecider
|
||||
noRetry _ _ = False
|
||||
|
||||
{- Retries a transfer when it fails, as long as the failed transfer managed
|
||||
- to send some data. -}
|
||||
forwardRetry :: RetryDecider
|
||||
forwardRetry old new = bytesComplete old < bytesComplete new
|
|
@ -14,7 +14,6 @@ import Utility.Tense
|
|||
import qualified Data.Text as T
|
||||
import Data.Text (Text)
|
||||
import qualified Data.Map as M
|
||||
import Data.Monoid
|
||||
|
||||
{- This is as many alerts as it makes sense to display at a time.
|
||||
- A display might be smaller, or larger, the point is to not overwhelm the
|
||||
|
|
|
@ -35,11 +35,14 @@ standaloneAppBase = getEnv "GIT_ANNEX_APP_BASE"
|
|||
-
|
||||
- Note that this is done every time it's started, so if the user moves
|
||||
- it around, the paths this sets up won't break.
|
||||
-
|
||||
- Nautilus hook script installation is done even for packaged apps,
|
||||
- since it has to go into the user's home directory.
|
||||
-}
|
||||
ensureInstalled :: IO ()
|
||||
ensureInstalled = go =<< standaloneAppBase
|
||||
where
|
||||
go Nothing = noop
|
||||
go Nothing = installNautilus "git-annex"
|
||||
go (Just base) = do
|
||||
let program = base </> "git-annex"
|
||||
programfile <- programFile
|
||||
|
@ -78,6 +81,32 @@ ensureInstalled = go =<< standaloneAppBase
|
|||
viaTmp writeFile shim content
|
||||
modifyFileMode shim $ addModes [ownerExecuteMode]
|
||||
|
||||
installNautilus program
|
||||
|
||||
installNautilus :: FilePath -> IO ()
|
||||
#ifdef linux_HOST_OS
|
||||
installNautilus program = do
|
||||
scriptdir <- (\d -> d </> "nautilus" </> "scripts") <$> userDataDir
|
||||
genscript scriptdir "get"
|
||||
genscript scriptdir "drop"
|
||||
where
|
||||
genscript scriptdir action =
|
||||
installscript (scriptdir </> scriptname action) $ unlines
|
||||
[ shebang_local
|
||||
, autoaddedcomment
|
||||
, "exec " ++ program ++ " " ++ action ++ " --notify-start --notify-finish -- \"$@\""
|
||||
]
|
||||
scriptname action = "git-annex " ++ action
|
||||
installscript f c = whenM (safetoinstallscript f) $ do
|
||||
writeFile f c
|
||||
modifyFileMode f $ addModes [ownerExecuteMode]
|
||||
safetoinstallscript f = catchDefaultIO True $
|
||||
elem autoaddedcomment . lines <$> readFileStrict f
|
||||
autoaddedcomment = "# Automatically added by git-annex, do not edit. (To disable, chmod 600 this file.)"
|
||||
#else
|
||||
installNautilus _ = noop
|
||||
#endif
|
||||
|
||||
{- Returns a cleaned up environment that lacks settings used to make the
|
||||
- standalone builds use their bundled libraries and programs.
|
||||
- Useful when calling programs not included in the standalone builds.
|
||||
|
|
|
@ -197,7 +197,7 @@ authorizedKeysLine gitannexshellonly dir pubkey
|
|||
- long perl script. -}
|
||||
| otherwise = pubkey
|
||||
where
|
||||
limitcommand = "command=\"GIT_ANNEX_SHELL_DIRECTORY="++shellEscape dir++" ~/.ssh/git-annex-shell\",no-agent-forwarding,no-port-forwarding,no-X11-forwarding,no-pty "
|
||||
limitcommand = "command=\"env GIT_ANNEX_SHELL_DIRECTORY="++shellEscape dir++" ~/.ssh/git-annex-shell\",no-agent-forwarding,no-port-forwarding,no-X11-forwarding,no-pty "
|
||||
|
||||
{- Generates a ssh key pair. -}
|
||||
genSshKeyPair :: IO SshKeyPair
|
||||
|
|
|
@ -62,15 +62,17 @@ configFilesActions =
|
|||
, (groupLog, void $ liftAnnex groupMapLoad)
|
||||
, (numcopiesLog, void $ liftAnnex globalNumCopiesLoad)
|
||||
, (scheduleLog, void updateScheduleLog)
|
||||
-- Preferred content settings depend on most of the other configs,
|
||||
-- so will be reloaded whenever any configs change.
|
||||
-- Preferred and required content settings depend on most of the
|
||||
-- other configs, so will be reloaded whenever any configs change.
|
||||
, (preferredContentLog, noop)
|
||||
, (requiredContentLog, noop)
|
||||
, (groupPreferredContentLog, noop)
|
||||
]
|
||||
|
||||
reloadConfigs :: Configs -> Assistant ()
|
||||
reloadConfigs changedconfigs = do
|
||||
sequence_ as
|
||||
void $ liftAnnex preferredContentMapLoad
|
||||
void $ liftAnnex preferredRequiredMapsLoad
|
||||
{- Changes to the remote log, or the trust log, can affect the
|
||||
- syncRemotes list. Changes to the uuid log may affect its
|
||||
- display so are also included. -}
|
||||
|
|
|
@ -35,6 +35,7 @@ import Annex.CatFile
|
|||
import Annex.CheckIgnore
|
||||
import Annex.Link
|
||||
import Annex.FileMatcher
|
||||
import Types.FileMatcher
|
||||
import Annex.ReplaceFile
|
||||
import Git.Types
|
||||
import Config
|
||||
|
@ -196,7 +197,7 @@ runHandler handler file filestatus = void $ do
|
|||
| otherwise = f
|
||||
|
||||
{- Small files are added to git as-is, while large ones go into the annex. -}
|
||||
add :: FileMatcher -> FilePath -> Assistant (Maybe Change)
|
||||
add :: FileMatcher Annex -> FilePath -> Assistant (Maybe Change)
|
||||
add bigfilematcher file = ifM (liftAnnex $ checkFileMatcher bigfilematcher file)
|
||||
( pendingAddChange file
|
||||
, do
|
||||
|
@ -205,7 +206,7 @@ add bigfilematcher file = ifM (liftAnnex $ checkFileMatcher bigfilematcher file)
|
|||
madeChange file AddFileChange
|
||||
)
|
||||
|
||||
onAdd :: FileMatcher -> Handler
|
||||
onAdd :: FileMatcher Annex -> Handler
|
||||
onAdd matcher file filestatus
|
||||
| maybe False isRegularFile filestatus =
|
||||
unlessIgnored file $
|
||||
|
@ -218,7 +219,7 @@ shouldRestage ds = scanComplete ds || forceRestage ds
|
|||
{- In direct mode, add events are received for both new files, and
|
||||
- modified existing files.
|
||||
-}
|
||||
onAddDirect :: Bool -> FileMatcher -> Handler
|
||||
onAddDirect :: Bool -> FileMatcher Annex -> Handler
|
||||
onAddDirect symlinkssupported matcher file fs = do
|
||||
v <- liftAnnex $ catKeyFile file
|
||||
case (v, fs) of
|
||||
|
|
|
@ -57,6 +57,9 @@ buildFlags = filter (not . null)
|
|||
#ifdef WITH_DBUS
|
||||
, "DBus"
|
||||
#endif
|
||||
#ifdef WITH_DESKTOP_NOTIFY
|
||||
, "DesktopNotify"
|
||||
#endif
|
||||
#ifdef WITH_XMPP
|
||||
, "XMPP"
|
||||
#else
|
||||
|
|
|
@ -20,6 +20,7 @@ import System.Console.GetOpt
|
|||
import Common.Annex
|
||||
import qualified Annex
|
||||
import Types.Messages
|
||||
import Types.DesktopNotify
|
||||
import Limit
|
||||
import CmdLine.Usage
|
||||
|
||||
|
@ -41,6 +42,10 @@ commonOptions =
|
|||
"don't show debug messages"
|
||||
, Option ['b'] ["backend"] (ReqArg setforcebackend paramName)
|
||||
"specify key-value backend to use"
|
||||
, Option [] ["notify-finish"] (NoArg (setdesktopnotify mkNotifyFinish))
|
||||
"show desktop notification after transfer finishes"
|
||||
, Option [] ["notify-start"] (NoArg (setdesktopnotify mkNotifyStart))
|
||||
"show desktop notification after transfer completes"
|
||||
]
|
||||
where
|
||||
setforce v = Annex.changeState $ \s -> s { Annex.force = v }
|
||||
|
@ -49,6 +54,7 @@ commonOptions =
|
|||
setforcebackend v = Annex.changeState $ \s -> s { Annex.forcebackend = Just v }
|
||||
setdebug = Annex.changeGitConfig $ \c -> c { annexDebug = True }
|
||||
unsetdebug = Annex.changeGitConfig $ \c -> c { annexDebug = False }
|
||||
setdesktopnotify v = Annex.changeState $ \s -> s { Annex.desktopnotify = Annex.desktopnotify s <> v }
|
||||
|
||||
matcherOptions :: [Option]
|
||||
matcherOptions =
|
||||
|
|
|
@ -30,14 +30,15 @@ withFilesInGit :: (FilePath -> CommandStart) -> CommandSeek
|
|||
withFilesInGit a params = seekActions $ prepFiltered a $
|
||||
seekHelper LsFiles.inRepo params
|
||||
|
||||
withFilesNotInGit :: (FilePath -> CommandStart) -> CommandSeek
|
||||
withFilesNotInGit a params = do
|
||||
{- dotfiles are not acted on unless explicitly listed -}
|
||||
files <- filter (not . dotfile) <$>
|
||||
seekunless (null ps && not (null params)) ps
|
||||
dotfiles <- seekunless (null dotps) dotps
|
||||
seekActions $ prepFiltered a $
|
||||
return $ concat $ segmentPaths params (files++dotfiles)
|
||||
withFilesNotInGit :: Bool -> (FilePath -> CommandStart) -> CommandSeek
|
||||
withFilesNotInGit skipdotfiles a params
|
||||
| skipdotfiles = do
|
||||
{- dotfiles are not acted on unless explicitly listed -}
|
||||
files <- filter (not . dotfile) <$>
|
||||
seekunless (null ps && not (null params)) ps
|
||||
dotfiles <- seekunless (null dotps) dotps
|
||||
go (files++dotfiles)
|
||||
| otherwise = go =<< seekunless False params
|
||||
where
|
||||
(dotps, ps) = partition dotfile params
|
||||
seekunless True _ = return []
|
||||
|
@ -45,6 +46,8 @@ withFilesNotInGit a params = do
|
|||
force <- Annex.getState Annex.force
|
||||
g <- gitRepo
|
||||
liftIO $ Git.Command.leaveZombie <$> LsFiles.notInRepo force l g
|
||||
go l = seekActions $ prepFiltered a $
|
||||
return $ concat $ segmentPaths params l
|
||||
|
||||
withPathContents :: ((FilePath, FilePath) -> CommandStart) -> CommandSeek
|
||||
withPathContents a params = seekActions $
|
||||
|
|
|
@ -34,8 +34,12 @@ import Annex.ReplaceFile
|
|||
import Utility.Tmp
|
||||
|
||||
def :: [Command]
|
||||
def = [notBareRepo $ command "add" paramPaths seek SectionCommon
|
||||
"add files to annex"]
|
||||
def = [notBareRepo $ withOptions [includeDotFilesOption] $
|
||||
command "add" paramPaths seek SectionCommon
|
||||
"add files to annex"]
|
||||
|
||||
includeDotFilesOption :: Option
|
||||
includeDotFilesOption = flagOption [] "include-dotfiles" "don't skip dotfiles"
|
||||
|
||||
{- Add acts on both files not checked into git yet, and unlocked files.
|
||||
-
|
||||
|
@ -47,7 +51,8 @@ seek ps = do
|
|||
( start file
|
||||
, stop
|
||||
)
|
||||
go withFilesNotInGit
|
||||
skipdotfiles <- not <$> Annex.getFlag (optionName includeDotFilesOption)
|
||||
go $ withFilesNotInGit skipdotfiles
|
||||
ifM isDirect
|
||||
( go withFilesMaybeModified
|
||||
, go withFilesUnlocked
|
||||
|
|
|
@ -26,7 +26,7 @@ import Types.KeySource
|
|||
import Config
|
||||
import Annex.Content.Direct
|
||||
import Logs.Location
|
||||
import qualified Logs.Transfer as Transfer
|
||||
import qualified Annex.Transfer as Transfer
|
||||
#ifdef WITH_QUVI
|
||||
import Annex.Quvi
|
||||
import qualified Utility.Quvi as Quvi
|
||||
|
@ -116,9 +116,10 @@ addUrlFileQuvi relaxed quviurl videourl file = do
|
|||
prepGetViaTmpChecked sizedkey $ do
|
||||
tmp <- fromRepo $ gitAnnexTmpObjectLocation key
|
||||
showOutput
|
||||
ok <- Transfer.download webUUID key (Just file) Transfer.forwardRetry $ const $ do
|
||||
liftIO $ createDirectoryIfMissing True (parentDir tmp)
|
||||
downloadUrl [videourl] tmp
|
||||
ok <- Transfer.notifyTransfer Transfer.Download (Just file) $
|
||||
Transfer.download webUUID key (Just file) Transfer.forwardRetry $ const $ do
|
||||
liftIO $ createDirectoryIfMissing True (parentDir tmp)
|
||||
downloadUrl [videourl] tmp
|
||||
if ok
|
||||
then cleanup quviurl file key (Just tmp)
|
||||
else return False
|
||||
|
@ -133,17 +134,20 @@ perform relaxed url file = ifAnnexed file addurl geturl
|
|||
| relaxed = do
|
||||
setUrlPresent key url
|
||||
next $ return True
|
||||
| otherwise = do
|
||||
(exists, samesize) <- Url.withUrlOptions $ Url.check url (keySize key)
|
||||
if exists && samesize
|
||||
then do
|
||||
setUrlPresent key url
|
||||
next $ return True
|
||||
else do
|
||||
warning $ if exists
|
||||
then "url does not have expected file size (use --relaxed to bypass this check) " ++ url
|
||||
else "failed to verify url exists: " ++ url
|
||||
stop
|
||||
| otherwise = ifM (elem url <$> getUrls key)
|
||||
( stop
|
||||
, do
|
||||
(exists, samesize) <- Url.withUrlOptions $ Url.check url (keySize key)
|
||||
if exists && samesize
|
||||
then do
|
||||
setUrlPresent key url
|
||||
next $ return True
|
||||
else do
|
||||
warning $ "while adding a new url to an already annexed file, " ++ if exists
|
||||
then "url does not have expected file size (use --relaxed to bypass this check) " ++ url
|
||||
else "failed to verify url exists: " ++ url
|
||||
stop
|
||||
)
|
||||
|
||||
addUrlFile :: Bool -> URLString -> FilePath -> Annex Bool
|
||||
addUrlFile relaxed url file = do
|
||||
|
@ -179,7 +183,7 @@ download url file = do
|
|||
, return False
|
||||
)
|
||||
where
|
||||
runtransfer dummykey tmp =
|
||||
runtransfer dummykey tmp = Transfer.notifyTransfer Transfer.Download (Just file) $
|
||||
Transfer.download webUUID dummykey (Just file) Transfer.forwardRetry $ const $ do
|
||||
liftIO $ createDirectoryIfMissing True (parentDir tmp)
|
||||
downloadUrl [url] tmp
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{- git-annex command
|
||||
-
|
||||
- Copyright 2010 Joey Hess <joey@kitenet.net>
|
||||
- Copyright 2010-2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
@ -10,6 +10,8 @@ module Command.ConfigList where
|
|||
import Common.Annex
|
||||
import Command
|
||||
import Annex.UUID
|
||||
import Annex.Init
|
||||
import qualified Annex.Branch
|
||||
import qualified Git.Config
|
||||
import Remote.GCrypt (coreGCryptId)
|
||||
|
||||
|
@ -22,9 +24,23 @@ seek = withNothing start
|
|||
|
||||
start :: CommandStart
|
||||
start = do
|
||||
u <- getUUID
|
||||
u <- findOrGenUUID
|
||||
showConfig "annex.uuid" $ fromUUID u
|
||||
showConfig coreGCryptId =<< fromRepo (Git.Config.get coreGCryptId "")
|
||||
stop
|
||||
where
|
||||
showConfig k v = liftIO $ putStrLn $ k ++ "=" ++ v
|
||||
|
||||
{- The repository may not yet have a UUID; automatically initialize it
|
||||
- when there's a git-annex branch available. -}
|
||||
findOrGenUUID :: Annex UUID
|
||||
findOrGenUUID = do
|
||||
u <- getUUID
|
||||
if u /= NoUUID
|
||||
then return u
|
||||
else ifM Annex.Branch.hasSibling
|
||||
( do
|
||||
initialize Nothing
|
||||
getUUID
|
||||
, return NoUUID
|
||||
)
|
||||
|
|
|
@ -14,9 +14,13 @@ import qualified Annex
|
|||
import Annex.UUID
|
||||
import Logs.Location
|
||||
import Logs.Trust
|
||||
import Logs.PreferredContent
|
||||
import Config.NumCopies
|
||||
import Annex.Content
|
||||
import Annex.Wanted
|
||||
import Annex.Notification
|
||||
|
||||
import qualified Data.Set as S
|
||||
|
||||
def :: [Command]
|
||||
def = [withOptions [dropFromOption] $ command "drop" paramPaths seek
|
||||
|
@ -44,27 +48,34 @@ start from file (key, _) = checkDropAuto from file key $ \numcopies ->
|
|||
startLocal :: AssociatedFile -> NumCopies -> Key -> Maybe Remote -> CommandStart
|
||||
startLocal afile numcopies key knownpresentremote = stopUnless (inAnnex key) $ do
|
||||
showStart' "drop" key afile
|
||||
next $ performLocal key numcopies knownpresentremote
|
||||
next $ performLocal key afile numcopies knownpresentremote
|
||||
|
||||
startRemote :: AssociatedFile -> NumCopies -> Key -> Remote -> CommandStart
|
||||
startRemote afile numcopies key remote = do
|
||||
showStart' ("drop " ++ Remote.name remote) key afile
|
||||
next $ performRemote key numcopies remote
|
||||
next $ performRemote key afile numcopies remote
|
||||
|
||||
performLocal :: Key -> NumCopies -> Maybe Remote -> CommandPerform
|
||||
performLocal key numcopies knownpresentremote = lockContent key $ do
|
||||
performLocal :: Key -> AssociatedFile -> NumCopies -> Maybe Remote -> CommandPerform
|
||||
performLocal key afile numcopies knownpresentremote = lockContent key $ do
|
||||
(remotes, trusteduuids) <- Remote.keyPossibilitiesTrusted key
|
||||
let trusteduuids' = case knownpresentremote of
|
||||
Nothing -> trusteduuids
|
||||
Just r -> nub (Remote.uuid r:trusteduuids)
|
||||
untrusteduuids <- trustGet UnTrusted
|
||||
let tocheck = Remote.remotesWithoutUUID remotes (trusteduuids'++untrusteduuids)
|
||||
stopUnless (canDropKey key numcopies trusteduuids' tocheck []) $ do
|
||||
removeAnnex key
|
||||
next $ cleanupLocal key
|
||||
u <- getUUID
|
||||
ifM (canDrop u key afile numcopies trusteduuids' tocheck [])
|
||||
( do
|
||||
removeAnnex key
|
||||
notifyDrop afile True
|
||||
next $ cleanupLocal key
|
||||
, do
|
||||
notifyDrop afile False
|
||||
stop
|
||||
)
|
||||
|
||||
performRemote :: Key -> NumCopies -> Remote -> CommandPerform
|
||||
performRemote key numcopies remote = lockContent key $ do
|
||||
performRemote :: Key -> AssociatedFile -> NumCopies -> Remote -> CommandPerform
|
||||
performRemote key afile numcopies remote = lockContent key $ do
|
||||
-- Filter the remote it's being dropped from out of the lists of
|
||||
-- places assumed to have the key, and places to check.
|
||||
-- When the local repo has the key, that's one additional copy.
|
||||
|
@ -76,7 +87,7 @@ performRemote key numcopies remote = lockContent key $ do
|
|||
untrusteduuids <- trustGet UnTrusted
|
||||
let tocheck = filter (/= remote) $
|
||||
Remote.remotesWithoutUUID remotes (have++untrusteduuids)
|
||||
stopUnless (canDropKey key numcopies have tocheck [uuid]) $ do
|
||||
stopUnless (canDrop uuid key afile numcopies have tocheck [uuid]) $ do
|
||||
ok <- Remote.removeKey remote key
|
||||
next $ cleanupRemote key remote ok
|
||||
where
|
||||
|
@ -95,13 +106,19 @@ cleanupRemote key remote ok = do
|
|||
|
||||
{- Checks specified remotes to verify that enough copies of a key exist to
|
||||
- allow it to be safely removed (with no data loss). Can be provided with
|
||||
- some locations where the key is known/assumed to be present. -}
|
||||
canDropKey :: Key -> NumCopies -> [UUID] -> [Remote] -> [UUID] -> Annex Bool
|
||||
canDropKey key numcopies have check skip = do
|
||||
force <- Annex.getState Annex.force
|
||||
if force || numcopies == NumCopies 0
|
||||
then return True
|
||||
else findCopies key numcopies skip have check
|
||||
- some locations where the key is known/assumed to be present.
|
||||
-
|
||||
- Also checks if it's required content, and refuses to drop if so.
|
||||
-
|
||||
- --force overrides and always allows dropping.
|
||||
-}
|
||||
canDrop :: UUID -> Key -> AssociatedFile -> NumCopies -> [UUID] -> [Remote] -> [UUID] -> Annex Bool
|
||||
canDrop dropfrom key afile numcopies have check skip = ifM (Annex.getState Annex.force)
|
||||
( return True
|
||||
, checkRequiredContent dropfrom key afile
|
||||
<&&>
|
||||
findCopies key numcopies skip have check
|
||||
)
|
||||
|
||||
findCopies :: Key -> NumCopies -> [UUID] -> [UUID] -> [Remote] -> Annex Bool
|
||||
findCopies key need skip = helper [] []
|
||||
|
@ -137,6 +154,19 @@ notEnoughCopies key need have skip bad = do
|
|||
unsafe = showNote "unsafe"
|
||||
hint = showLongNote "(Use --force to override this check, or adjust numcopies.)"
|
||||
|
||||
checkRequiredContent :: UUID -> Key -> AssociatedFile -> Annex Bool
|
||||
checkRequiredContent u k afile =
|
||||
ifM (isRequiredContent (Just u) S.empty (Just k) afile False)
|
||||
( requiredContent
|
||||
, return True
|
||||
)
|
||||
|
||||
requiredContent :: Annex Bool
|
||||
requiredContent = do
|
||||
showLongNote "That file is required content, it cannot be dropped!"
|
||||
showLongNote "(Use --force to override this check, or adjust required content configuration.)"
|
||||
return False
|
||||
|
||||
{- In auto mode, only runs the action if there are enough
|
||||
- copies on other semitrusted repositories. -}
|
||||
checkDropAuto :: Maybe Remote -> FilePath -> Key -> (NumCopies -> CommandStart) -> CommandStart
|
||||
|
|
|
@ -34,8 +34,8 @@ perform numcopies key = maybe droplocal dropremote =<< Remote.byNameWithUUID =<<
|
|||
where
|
||||
dropremote r = do
|
||||
showAction $ "from " ++ Remote.name r
|
||||
Command.Drop.performRemote key numcopies r
|
||||
droplocal = Command.Drop.performLocal key numcopies Nothing
|
||||
Command.Drop.performRemote key Nothing numcopies r
|
||||
droplocal = Command.Drop.performLocal key Nothing numcopies Nothing
|
||||
from = Annex.getField $ optionName Command.Drop.dropFromOption
|
||||
|
||||
performOther :: (Key -> Git.Repo -> FilePath) -> Key -> CommandPerform
|
||||
|
|
|
@ -11,7 +11,7 @@ import Common.Annex
|
|||
import Command
|
||||
import qualified Remote
|
||||
import Annex.Content
|
||||
import Logs.Transfer
|
||||
import Annex.Transfer
|
||||
import Config.NumCopies
|
||||
import Annex.Wanted
|
||||
import qualified Command.Move
|
||||
|
@ -69,15 +69,15 @@ getKeyFile' key afile dest = dispatch
|
|||
showNote "not available"
|
||||
showlocs
|
||||
return False
|
||||
dispatch remotes = trycopy remotes remotes
|
||||
trycopy full [] = do
|
||||
dispatch remotes = notifyTransfer Download afile $ trycopy remotes remotes
|
||||
trycopy full [] _ = do
|
||||
Remote.showTriedRemotes full
|
||||
showlocs
|
||||
return False
|
||||
trycopy full (r:rs) =
|
||||
trycopy full (r:rs) witness =
|
||||
ifM (probablyPresent r)
|
||||
( docopy r (trycopy full rs)
|
||||
, trycopy full rs
|
||||
( docopy r witness <||> trycopy full rs witness
|
||||
, trycopy full rs witness
|
||||
)
|
||||
showlocs = Remote.showLocations key []
|
||||
"No other repository is known to contain the file."
|
||||
|
@ -87,8 +87,6 @@ getKeyFile' key afile dest = dispatch
|
|||
| Remote.hasKeyCheap r =
|
||||
either (const False) id <$> Remote.hasKey r key
|
||||
| otherwise = return True
|
||||
docopy r continue = do
|
||||
ok <- download (Remote.uuid r) key afile noRetry $ \p -> do
|
||||
showAction $ "from " ++ Remote.name r
|
||||
Remote.retrieveKeyFile r key afile dest p
|
||||
if ok then return ok else continue
|
||||
docopy r = download (Remote.uuid r) key afile noRetry $ \p -> do
|
||||
showAction $ "from " ++ Remote.name r
|
||||
Remote.retrieveKeyFile r key afile dest p
|
||||
|
|
|
@ -38,7 +38,7 @@ seek ps = do
|
|||
|
||||
getList :: Annex [(UUID, RemoteName, TrustLevel)]
|
||||
getList = ifM (Annex.getFlag $ optionName allrepos)
|
||||
( nubBy ((==) `on` fst3) <$> ((++) <$> getRemotes <*> getAll)
|
||||
( nubBy ((==) `on` fst3) <$> ((++) <$> getRemotes <*> getAllUUIDs)
|
||||
, getRemotes
|
||||
)
|
||||
where
|
||||
|
@ -48,7 +48,7 @@ getList = ifM (Annex.getFlag $ optionName allrepos)
|
|||
hereu <- getUUID
|
||||
heretrust <- lookupTrust hereu
|
||||
return $ (hereu, "here", heretrust) : zip3 (map uuid rs) (map name rs) ts
|
||||
getAll = do
|
||||
getAllUUIDs = do
|
||||
rs <- M.toList <$> uuidMap
|
||||
rs3 <- forM rs $ \(u, n) -> (,,)
|
||||
<$> pure u
|
||||
|
|
|
@ -14,8 +14,8 @@ import qualified Annex
|
|||
import Annex.Content
|
||||
import qualified Remote
|
||||
import Annex.UUID
|
||||
import Annex.Transfer
|
||||
import Logs.Presence
|
||||
import Logs.Transfer
|
||||
|
||||
def :: [Command]
|
||||
def = [withOptions moveOptions $ command "move" paramPaths seek
|
||||
|
@ -98,8 +98,9 @@ toPerform dest move key afile fastcheck isthere = moveLock move key $
|
|||
stop
|
||||
Right False -> do
|
||||
showAction $ "to " ++ Remote.name dest
|
||||
ok <- upload (Remote.uuid dest) key afile noRetry $
|
||||
Remote.storeKey dest key afile
|
||||
ok <- notifyTransfer Upload afile $
|
||||
upload (Remote.uuid dest) key afile noRetry $
|
||||
Remote.storeKey dest key afile
|
||||
if ok
|
||||
then do
|
||||
Remote.logStatus dest key InfoPresent
|
||||
|
@ -155,9 +156,10 @@ fromPerform src move key afile = moveLock move key $
|
|||
, handle move =<< go
|
||||
)
|
||||
where
|
||||
go = download (Remote.uuid src) key afile noRetry $ \p -> do
|
||||
showAction $ "from " ++ Remote.name src
|
||||
getViaTmp key $ \t -> Remote.retrieveKeyFile src key afile t p
|
||||
go = notifyTransfer Download afile $
|
||||
download (Remote.uuid src) key afile noRetry $ \p -> do
|
||||
showAction $ "from " ++ Remote.name src
|
||||
getViaTmp key $ \t -> Remote.retrieveKeyFile src key afile t p
|
||||
handle _ False = stop -- failed
|
||||
handle False True = next $ return True -- copy complete
|
||||
handle True True = do -- finish moving
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE CPP #-}
|
||||
|
||||
module Command.PreCommit where
|
||||
|
||||
import Common.Annex
|
||||
|
@ -16,11 +18,17 @@ import Annex.Direct
|
|||
import Annex.Hook
|
||||
import Annex.View
|
||||
import Annex.View.ViewedFile
|
||||
import Annex.Perms
|
||||
import Annex.Exception
|
||||
import Logs.View
|
||||
import Logs.MetaData
|
||||
import Types.View
|
||||
import Types.MetaData
|
||||
|
||||
#ifdef mingw32_HOST_OS
|
||||
import Utility.WinLock
|
||||
#endif
|
||||
|
||||
import qualified Data.Set as S
|
||||
|
||||
def :: [Command]
|
||||
|
@ -28,7 +36,7 @@ def = [command "pre-commit" paramPaths seek SectionPlumbing
|
|||
"run by git pre-commit hook"]
|
||||
|
||||
seek :: CommandSeek
|
||||
seek ps = ifM isDirect
|
||||
seek ps = lockPreCommitHook $ ifM isDirect
|
||||
( do
|
||||
-- update direct mode mappings for committed files
|
||||
withWords startDirect ps
|
||||
|
@ -82,3 +90,22 @@ showMetaDataChange = showLongNote . unlines . concatMap showmeta . fromMetaData
|
|||
showset v
|
||||
| isSet v = "+"
|
||||
| otherwise = "-"
|
||||
|
||||
{- Takes exclusive lock; blocks until available. -}
|
||||
lockPreCommitHook :: Annex a -> Annex a
|
||||
lockPreCommitHook a = do
|
||||
lockfile <- fromRepo gitAnnexPreCommitLock
|
||||
createAnnexDirectory $ takeDirectory lockfile
|
||||
mode <- annexFileMode
|
||||
bracketIO (lock lockfile mode) unlock (const a)
|
||||
where
|
||||
#ifndef mingw32_HOST_OS
|
||||
lock lockfile mode = do
|
||||
l <- liftIO $ noUmask mode $ createFile lockfile mode
|
||||
liftIO $ waitToSetLock l (WriteLock, AbsoluteSeek, 0, 0)
|
||||
return l
|
||||
unlock = closeFd
|
||||
#else
|
||||
lock lockfile _mode = liftIO $ waitToLock $ lockExclusive lockfile
|
||||
unlock = dropLock
|
||||
#endif
|
||||
|
|
|
@ -12,7 +12,7 @@ import Command
|
|||
import Annex.Content
|
||||
import Annex
|
||||
import Utility.Rsync
|
||||
import Logs.Transfer
|
||||
import Annex.Transfer
|
||||
import qualified CmdLine.GitAnnexShell.Fields as Fields
|
||||
import Utility.Metered
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ import Common.Annex
|
|||
import Command
|
||||
import Annex.Content
|
||||
import Logs.Location
|
||||
import Logs.Transfer
|
||||
import Annex.Transfer
|
||||
import qualified Remote
|
||||
import Types.Remote
|
||||
|
||||
|
@ -41,7 +41,7 @@ start to from file key =
|
|||
_ -> error "specify either --from or --to"
|
||||
|
||||
toPerform :: Remote -> Key -> AssociatedFile -> CommandPerform
|
||||
toPerform remote key file = go $
|
||||
toPerform remote key file = go Upload file $
|
||||
upload (uuid remote) key file forwardRetry $ \p -> do
|
||||
ok <- Remote.storeKey remote key file p
|
||||
when ok $
|
||||
|
@ -49,9 +49,9 @@ toPerform remote key file = go $
|
|||
return ok
|
||||
|
||||
fromPerform :: Remote -> Key -> AssociatedFile -> CommandPerform
|
||||
fromPerform remote key file = go $
|
||||
fromPerform remote key file = go Upload file $
|
||||
download (uuid remote) key file forwardRetry $ \p ->
|
||||
getViaTmp key $ \t -> Remote.retrieveKeyFile remote key file t p
|
||||
|
||||
go :: Annex Bool -> CommandPerform
|
||||
go a = a >>= liftIO . exitBool
|
||||
go :: Direction -> AssociatedFile -> (NotifyWitness -> Annex Bool) -> CommandPerform
|
||||
go direction file a = notifyTransfer direction file a >>= liftIO . exitBool
|
||||
|
|
|
@ -13,7 +13,7 @@ import Common.Annex
|
|||
import Command
|
||||
import Annex.Content
|
||||
import Logs.Location
|
||||
import Logs.Transfer
|
||||
import Annex.Transfer
|
||||
import qualified Remote
|
||||
import Types.Key
|
||||
|
||||
|
@ -34,14 +34,15 @@ start = withHandles $ \(readh, writeh) -> do
|
|||
stop
|
||||
where
|
||||
runner (TransferRequest direction remote key file)
|
||||
| direction == Upload =
|
||||
| direction == Upload = notifyTransfer direction file $
|
||||
upload (Remote.uuid remote) key file forwardRetry $ \p -> do
|
||||
ok <- Remote.storeKey remote key file p
|
||||
when ok $
|
||||
Remote.logStatus remote key InfoPresent
|
||||
return ok
|
||||
| otherwise = download (Remote.uuid remote) key file forwardRetry $ \p ->
|
||||
getViaTmp key $ \t -> Remote.retrieveKeyFile remote key file t p
|
||||
| otherwise = notifyTransfer direction file $
|
||||
download (Remote.uuid remote) key file forwardRetry $ \p ->
|
||||
getViaTmp key $ \t -> Remote.retrieveKeyFile remote key file t p
|
||||
|
||||
{- stdin and stdout are connected with the caller, to be used for
|
||||
- communication with it. But doing a transfer might involve something
|
||||
|
|
|
@ -16,15 +16,47 @@ import qualified Annex
|
|||
import Annex.Content
|
||||
import Annex.Content.Direct
|
||||
import qualified Git.Command
|
||||
import qualified Git.LsFiles as LsFiles
|
||||
import qualified Git.Ref
|
||||
import qualified Git.DiffTree as DiffTree
|
||||
import Utility.CopyFile
|
||||
import Command.PreCommit (lockPreCommitHook)
|
||||
|
||||
def :: [Command]
|
||||
def = [command "unannex" paramPaths seek SectionUtility
|
||||
"undo accidential add command"]
|
||||
|
||||
seek :: CommandSeek
|
||||
seek = withFilesInGit $ whenAnnexed start
|
||||
seek = wrapUnannex . (withFilesInGit $ whenAnnexed start)
|
||||
|
||||
wrapUnannex :: Annex a -> Annex a
|
||||
wrapUnannex a = ifM isDirect
|
||||
( a
|
||||
{- Run with the pre-commit hook disabled, to avoid confusing
|
||||
- behavior if an unannexed file is added back to git as
|
||||
- a normal, non-annexed file and then committed.
|
||||
- Otherwise, the pre-commit hook would think that the file
|
||||
- has been unlocked and needs to be re-annexed.
|
||||
-
|
||||
- At the end, make a commit removing the unannexed files.
|
||||
-}
|
||||
, ifM cleanindex
|
||||
( lockPreCommitHook $ commit `after` a
|
||||
, error "Cannot proceed with uncommitted changes staged in the index. Recommend you: git commit"
|
||||
)
|
||||
)
|
||||
where
|
||||
commit = inRepo $ Git.Command.run
|
||||
[ Param "commit"
|
||||
, Param "-q"
|
||||
, Param "--allow-empty"
|
||||
, Param "--no-verify"
|
||||
, Param "-m", Param "content removed from git annex"
|
||||
]
|
||||
cleanindex = do
|
||||
(diff, cleanup) <- inRepo $ DiffTree.diffIndex Git.Ref.headRef
|
||||
if null diff
|
||||
then void (liftIO cleanup) >> return True
|
||||
else void (liftIO cleanup) >> return False
|
||||
|
||||
start :: FilePath -> (Key, Backend) -> CommandStart
|
||||
start file (key, _) = stopUnless (inAnnex key) $ do
|
||||
|
@ -36,26 +68,7 @@ start file (key, _) = stopUnless (inAnnex key) $ do
|
|||
performIndirect :: FilePath -> Key -> CommandPerform
|
||||
performIndirect file key = do
|
||||
liftIO $ removeFile file
|
||||
|
||||
-- git rm deletes empty directory without --cached
|
||||
inRepo $ Git.Command.run [Params "rm --cached --force --quiet --", File file]
|
||||
|
||||
-- If the file was already committed, it is now staged for removal.
|
||||
-- Commit that removal now, to avoid later confusing the
|
||||
-- pre-commit hook, if this file is later added back to
|
||||
-- git as a normal non-annexed file, to thinking that the
|
||||
-- file has been unlocked and needs to be re-annexed.
|
||||
(s, reap) <- inRepo $ LsFiles.staged [file]
|
||||
unless (null s) $
|
||||
inRepo $ Git.Command.run
|
||||
[ Param "commit"
|
||||
, Param "-q"
|
||||
, Param "--no-verify"
|
||||
, Param "-m", Param "content removed from git annex"
|
||||
, Param "--", File file
|
||||
]
|
||||
void $ liftIO reap
|
||||
|
||||
next $ cleanupIndirect file key
|
||||
|
||||
cleanupIndirect :: FilePath -> Key -> CommandCleanup
|
||||
|
|
|
@ -36,7 +36,7 @@ check = do
|
|||
|
||||
seek :: CommandSeek
|
||||
seek ps = do
|
||||
withFilesNotInGit (whenAnnexed startCheckIncomplete) ps
|
||||
withFilesNotInGit False (whenAnnexed startCheckIncomplete) ps
|
||||
withFilesInGit (whenAnnexed Command.Unannex.start) ps
|
||||
finish
|
||||
|
||||
|
|
|
@ -61,6 +61,7 @@ data Cfg = Cfg
|
|||
{ cfgTrustMap :: TrustMap
|
||||
, cfgGroupMap :: M.Map UUID (S.Set Group)
|
||||
, cfgPreferredContentMap :: M.Map UUID PreferredContentExpression
|
||||
, cfgRequiredContentMap :: M.Map UUID PreferredContentExpression
|
||||
, cfgGroupPreferredContentMap :: M.Map Group PreferredContentExpression
|
||||
, cfgScheduleMap :: M.Map UUID [ScheduledActivity]
|
||||
}
|
||||
|
@ -70,6 +71,7 @@ getCfg = Cfg
|
|||
<$> trustMapRaw -- without local trust overrides
|
||||
<*> (groupsByUUID <$> groupMap)
|
||||
<*> preferredContentMapRaw
|
||||
<*> requiredContentMapRaw
|
||||
<*> groupPreferredContentMapRaw
|
||||
<*> scheduleMap
|
||||
|
||||
|
@ -79,6 +81,7 @@ setCfg curcfg newcfg = do
|
|||
mapM_ (uncurry trustSet) $ M.toList $ cfgTrustMap diff
|
||||
mapM_ (uncurry groupSet) $ M.toList $ cfgGroupMap diff
|
||||
mapM_ (uncurry preferredContentSet) $ M.toList $ cfgPreferredContentMap diff
|
||||
mapM_ (uncurry requiredContentSet) $ M.toList $ cfgRequiredContentMap diff
|
||||
mapM_ (uncurry groupPreferredContentSet) $ M.toList $ cfgGroupPreferredContentMap diff
|
||||
mapM_ (uncurry scheduleSet) $ M.toList $ cfgScheduleMap diff
|
||||
|
||||
|
@ -87,6 +90,7 @@ diffCfg curcfg newcfg = Cfg
|
|||
{ cfgTrustMap = diff cfgTrustMap
|
||||
, cfgGroupMap = diff cfgGroupMap
|
||||
, cfgPreferredContentMap = diff cfgPreferredContentMap
|
||||
, cfgRequiredContentMap = diff cfgRequiredContentMap
|
||||
, cfgGroupPreferredContentMap = diff cfgGroupPreferredContentMap
|
||||
, cfgScheduleMap = diff cfgScheduleMap
|
||||
}
|
||||
|
@ -102,6 +106,7 @@ genCfg cfg descs = unlines $ intercalate [""]
|
|||
, preferredcontent
|
||||
, grouppreferredcontent
|
||||
, standardgroups
|
||||
, requiredcontent
|
||||
, schedule
|
||||
]
|
||||
where
|
||||
|
@ -137,6 +142,11 @@ genCfg cfg descs = unlines $ intercalate [""]
|
|||
[ com "Repository preferred contents" ]
|
||||
(\(s, u) -> line "wanted" u s)
|
||||
(\u -> line "wanted" u "standard")
|
||||
|
||||
requiredcontent = settings cfg descs cfgRequiredContentMap
|
||||
[ com "Repository required contents" ]
|
||||
(\(s, u) -> line "required" u s)
|
||||
(\u -> line "required" u "")
|
||||
|
||||
grouppreferredcontent = settings' cfg allgroups cfgGroupPreferredContentMap
|
||||
[ com "Group preferred contents"
|
||||
|
@ -228,6 +238,12 @@ parseCfg curcfg = go [] curcfg . lines
|
|||
Nothing ->
|
||||
let m = M.insert u value (cfgPreferredContentMap cfg)
|
||||
in Right $ cfg { cfgPreferredContentMap = m }
|
||||
| setting == "required" =
|
||||
case checkPreferredContentExpression value of
|
||||
Just e -> Left e
|
||||
Nothing ->
|
||||
let m = M.insert u value (cfgRequiredContentMap cfg)
|
||||
in Right $ cfg { cfgRequiredContentMap = m }
|
||||
| setting == "groupwanted" =
|
||||
case checkPreferredContentExpression value of
|
||||
Just e -> Left e
|
||||
|
@ -255,7 +271,6 @@ parseCfg curcfg = go [] curcfg . lines
|
|||
[ com "** There was a problem parsing your input!"
|
||||
, com "** Search for \"Parse error\" to find the bad lines."
|
||||
, com "** Either fix the bad lines, or delete them (to discard your changes)."
|
||||
, ""
|
||||
]
|
||||
parseerr = com "** Parse error in next line: "
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@ import Control.Exception.Extensible as X (IOException)
|
|||
import Data.Maybe as X
|
||||
import Data.List as X hiding (head, tail, init, last)
|
||||
import Data.String.Utils as X hiding (join)
|
||||
import Data.Monoid as X
|
||||
|
||||
import System.FilePath as X
|
||||
import System.Directory as X
|
||||
|
|
46
Limit.hs
46
Limit.hs
|
@ -20,7 +20,6 @@ import Types.TrustLevel
|
|||
import Types.Key
|
||||
import Types.Group
|
||||
import Types.FileMatcher
|
||||
import Types.Limit
|
||||
import Types.MetaData
|
||||
import Logs.MetaData
|
||||
import Logs.Group
|
||||
|
@ -45,21 +44,20 @@ getMatcher :: Annex (MatchInfo -> Annex Bool)
|
|||
getMatcher = Utility.Matcher.matchM <$> getMatcher'
|
||||
|
||||
getMatcher' :: Annex (Utility.Matcher.Matcher (MatchInfo -> Annex Bool))
|
||||
getMatcher' = do
|
||||
m <- Annex.getState Annex.limit
|
||||
case m of
|
||||
Right r -> return r
|
||||
Left l -> do
|
||||
let matcher = Utility.Matcher.generate (reverse l)
|
||||
Annex.changeState $ \s ->
|
||||
s { Annex.limit = Right matcher }
|
||||
return matcher
|
||||
getMatcher' = go =<< Annex.getState Annex.limit
|
||||
where
|
||||
go (CompleteMatcher matcher) = return matcher
|
||||
go (BuildingMatcher l) = do
|
||||
let matcher = Utility.Matcher.generate (reverse l)
|
||||
Annex.changeState $ \s ->
|
||||
s { Annex.limit = CompleteMatcher matcher }
|
||||
return matcher
|
||||
|
||||
{- Adds something to the limit list, which is built up reversed. -}
|
||||
add :: Utility.Matcher.Token (MatchInfo -> Annex Bool) -> Annex ()
|
||||
add l = Annex.changeState $ \s -> s { Annex.limit = prepend $ Annex.limit s }
|
||||
where
|
||||
prepend (Left ls) = Left $ l:ls
|
||||
prepend (BuildingMatcher ls) = BuildingMatcher $ l:ls
|
||||
prepend _ = error "internal"
|
||||
|
||||
{- Adds a new token. -}
|
||||
|
@ -67,21 +65,21 @@ addToken :: String -> Annex ()
|
|||
addToken = add . Utility.Matcher.token
|
||||
|
||||
{- Adds a new limit. -}
|
||||
addLimit :: Either String MatchFiles -> Annex ()
|
||||
addLimit :: Either String (MatchFiles Annex) -> Annex ()
|
||||
addLimit = either error (\l -> add $ Utility.Matcher.Operation $ l S.empty)
|
||||
|
||||
{- Add a limit to skip files that do not match the glob. -}
|
||||
addInclude :: String -> Annex ()
|
||||
addInclude = addLimit . limitInclude
|
||||
|
||||
limitInclude :: MkLimit
|
||||
limitInclude :: MkLimit Annex
|
||||
limitInclude glob = Right $ const $ return . matchGlobFile glob
|
||||
|
||||
{- Add a limit to skip files that match the glob. -}
|
||||
addExclude :: String -> Annex ()
|
||||
addExclude = addLimit . limitExclude
|
||||
|
||||
limitExclude :: MkLimit
|
||||
limitExclude :: MkLimit Annex
|
||||
limitExclude glob = Right $ const $ return . not . matchGlobFile glob
|
||||
|
||||
matchGlobFile :: String -> (MatchInfo -> Bool)
|
||||
|
@ -119,10 +117,10 @@ addIn s = addLimit =<< mk
|
|||
else inAnnex key
|
||||
|
||||
{- Limit to content that is currently present on a uuid. -}
|
||||
limitPresent :: Maybe UUID -> MkLimit
|
||||
limitPresent :: Maybe UUID -> MkLimit Annex
|
||||
limitPresent u _ = Right $ matchPresent u
|
||||
|
||||
matchPresent :: Maybe UUID -> MatchFiles
|
||||
matchPresent :: Maybe UUID -> MatchFiles Annex
|
||||
matchPresent u _ = checkKey $ \key -> do
|
||||
hereu <- getUUID
|
||||
if u == Just hereu || isNothing u
|
||||
|
@ -132,7 +130,7 @@ matchPresent u _ = checkKey $ \key -> do
|
|||
return $ maybe False (`elem` us) u
|
||||
|
||||
{- Limit to content that is in a directory, anywhere in the repository tree -}
|
||||
limitInDir :: FilePath -> MkLimit
|
||||
limitInDir :: FilePath -> MkLimit Annex
|
||||
limitInDir dir = const $ Right $ const go
|
||||
where
|
||||
go (MatchingFile fi) = return $ elem dir $ splitPath $ takeDirectory $ matchFile fi
|
||||
|
@ -143,7 +141,7 @@ limitInDir dir = const $ Right $ const go
|
|||
addCopies :: String -> Annex ()
|
||||
addCopies = addLimit . limitCopies
|
||||
|
||||
limitCopies :: MkLimit
|
||||
limitCopies :: MkLimit Annex
|
||||
limitCopies want = case split ":" want of
|
||||
[v, n] -> case parsetrustspec v of
|
||||
Just checker -> go n $ checktrust checker
|
||||
|
@ -169,7 +167,7 @@ limitCopies want = case split ":" want of
|
|||
addLackingCopies :: Bool -> String -> Annex ()
|
||||
addLackingCopies approx = addLimit . limitLackingCopies approx
|
||||
|
||||
limitLackingCopies :: Bool -> MkLimit
|
||||
limitLackingCopies :: Bool -> MkLimit Annex
|
||||
limitLackingCopies approx want = case readish want of
|
||||
Just needed -> Right $ \notpresent mi -> flip checkKey mi $
|
||||
handle mi needed notpresent
|
||||
|
@ -191,7 +189,7 @@ limitLackingCopies approx want = case readish want of
|
|||
- This has a nice optimisation: When a file exists,
|
||||
- its key is obviously not unused.
|
||||
-}
|
||||
limitUnused :: MatchFiles
|
||||
limitUnused :: MatchFiles Annex
|
||||
limitUnused _ (MatchingFile _) = return False
|
||||
limitUnused _ (MatchingKey k) = S.member k <$> unusedKeys
|
||||
|
||||
|
@ -202,7 +200,7 @@ addInAllGroup groupname = do
|
|||
m <- groupMap
|
||||
addLimit $ limitInAllGroup m groupname
|
||||
|
||||
limitInAllGroup :: GroupMap -> MkLimit
|
||||
limitInAllGroup :: GroupMap -> MkLimit Annex
|
||||
limitInAllGroup m groupname
|
||||
| S.null want = Right $ const $ const $ return True
|
||||
| otherwise = Right $ \notpresent -> checkKey $ check notpresent
|
||||
|
@ -219,7 +217,7 @@ limitInAllGroup m groupname
|
|||
addInBackend :: String -> Annex ()
|
||||
addInBackend = addLimit . limitInBackend
|
||||
|
||||
limitInBackend :: MkLimit
|
||||
limitInBackend :: MkLimit Annex
|
||||
limitInBackend name = Right $ const $ checkKey check
|
||||
where
|
||||
check key = pure $ keyBackendName key == name
|
||||
|
@ -231,7 +229,7 @@ addLargerThan = addLimit . limitSize (>)
|
|||
addSmallerThan :: String -> Annex ()
|
||||
addSmallerThan = addLimit . limitSize (<)
|
||||
|
||||
limitSize :: (Maybe Integer -> Maybe Integer -> Bool) -> MkLimit
|
||||
limitSize :: (Maybe Integer -> Maybe Integer -> Bool) -> MkLimit Annex
|
||||
limitSize vs s = case readSize dataUnits s of
|
||||
Nothing -> Left "bad size"
|
||||
Just sz -> Right $ go sz
|
||||
|
@ -249,7 +247,7 @@ limitSize vs s = case readSize dataUnits s of
|
|||
addMetaData :: String -> Annex ()
|
||||
addMetaData = addLimit . limitMetaData
|
||||
|
||||
limitMetaData :: MkLimit
|
||||
limitMetaData :: MkLimit Annex
|
||||
limitMetaData s = case parseMetaData s of
|
||||
Left e -> Left e
|
||||
Right (f, v) ->
|
||||
|
|
|
@ -41,6 +41,7 @@ module Locations (
|
|||
gitAnnexMergeDir,
|
||||
gitAnnexJournalDir,
|
||||
gitAnnexJournalLock,
|
||||
gitAnnexPreCommitLock,
|
||||
gitAnnexIndex,
|
||||
gitAnnexIndexStatus,
|
||||
gitAnnexViewIndex,
|
||||
|
@ -257,6 +258,10 @@ gitAnnexJournalDir r = addTrailingPathSeparator $ gitAnnexDir r </> "journal"
|
|||
gitAnnexJournalLock :: Git.Repo -> FilePath
|
||||
gitAnnexJournalLock r = gitAnnexDir r </> "journal.lck"
|
||||
|
||||
{- Lock file for the pre-commit hook. -}
|
||||
gitAnnexPreCommitLock :: Git.Repo -> FilePath
|
||||
gitAnnexPreCommitLock r = gitAnnexDir r </> "precommit.lck"
|
||||
|
||||
{- .git/annex/index is used to stage changes to the git-annex branch -}
|
||||
gitAnnexIndex :: Git.Repo -> FilePath
|
||||
gitAnnexIndex r = gitAnnexDir r </> "index"
|
||||
|
|
4
Logs.hs
4
Logs.hs
|
@ -35,6 +35,7 @@ topLevelUUIDBasedLogs =
|
|||
, trustLog
|
||||
, groupLog
|
||||
, preferredContentLog
|
||||
, requiredContentLog
|
||||
, scheduleLog
|
||||
]
|
||||
|
||||
|
@ -70,6 +71,9 @@ groupLog = "group.log"
|
|||
preferredContentLog :: FilePath
|
||||
preferredContentLog = "preferred-content.log"
|
||||
|
||||
requiredContentLog :: FilePath
|
||||
requiredContentLog = "required-content.log"
|
||||
|
||||
groupPreferredContentLog :: FilePath
|
||||
groupPreferredContentLog = "group-preferred-content.log"
|
||||
|
||||
|
|
|
@ -6,16 +6,19 @@
|
|||
-}
|
||||
|
||||
module Logs.PreferredContent (
|
||||
preferredContentLog,
|
||||
preferredContentSet,
|
||||
requiredContentSet,
|
||||
groupPreferredContentSet,
|
||||
isPreferredContent,
|
||||
isRequiredContent,
|
||||
preferredContentMap,
|
||||
preferredContentMapLoad,
|
||||
preferredContentMapRaw,
|
||||
requiredContentMap,
|
||||
requiredContentMapRaw,
|
||||
groupPreferredContentMapRaw,
|
||||
checkPreferredContentExpression,
|
||||
setStandardGroup,
|
||||
preferredRequiredMapsLoad,
|
||||
) where
|
||||
|
||||
import qualified Data.Map as M
|
||||
|
@ -28,43 +31,57 @@ import qualified Annex.Branch
|
|||
import qualified Annex
|
||||
import Logs
|
||||
import Logs.UUIDBased
|
||||
import qualified Utility.Matcher
|
||||
import Utility.Matcher hiding (tokens)
|
||||
import Annex.FileMatcher
|
||||
import Annex.UUID
|
||||
import Types.Limit
|
||||
import Types.Group
|
||||
import Types.Remote (RemoteConfig)
|
||||
import Logs.Group
|
||||
import Logs.Remote
|
||||
import Types.FileMatcher
|
||||
import Types.StandardGroups
|
||||
import Limit
|
||||
|
||||
{- Checks if a file is preferred content for the specified repository
|
||||
- (or the current repository if none is specified). -}
|
||||
isPreferredContent :: Maybe UUID -> AssumeNotPresent -> Maybe Key -> AssociatedFile -> Bool -> Annex Bool
|
||||
isPreferredContent mu notpresent mkey afile def = do
|
||||
isPreferredContent = checkMap preferredContentMap
|
||||
|
||||
isRequiredContent :: Maybe UUID -> AssumeNotPresent -> Maybe Key -> AssociatedFile -> Bool -> Annex Bool
|
||||
isRequiredContent = checkMap requiredContentMap
|
||||
|
||||
checkMap :: Annex (FileMatcherMap Annex) -> Maybe UUID -> AssumeNotPresent -> Maybe Key -> AssociatedFile -> Bool -> Annex Bool
|
||||
checkMap getmap mu notpresent mkey afile def = do
|
||||
u <- maybe getUUID return mu
|
||||
m <- preferredContentMap
|
||||
m <- getmap
|
||||
case M.lookup u m of
|
||||
Nothing -> return def
|
||||
Just matcher -> checkMatcher matcher mkey afile notpresent def
|
||||
|
||||
{- The map is cached for speed. -}
|
||||
preferredContentMap :: Annex Annex.PreferredContentMap
|
||||
preferredContentMap = maybe preferredContentMapLoad return
|
||||
preferredContentMap :: Annex (FileMatcherMap Annex)
|
||||
preferredContentMap = maybe (fst <$> preferredRequiredMapsLoad) return
|
||||
=<< Annex.getState Annex.preferredcontentmap
|
||||
|
||||
{- Loads the map, updating the cache. -}
|
||||
preferredContentMapLoad :: Annex Annex.PreferredContentMap
|
||||
preferredContentMapLoad = do
|
||||
requiredContentMap :: Annex (FileMatcherMap Annex)
|
||||
requiredContentMap = maybe (snd <$> preferredRequiredMapsLoad) return
|
||||
=<< Annex.getState Annex.requiredcontentmap
|
||||
|
||||
preferredRequiredMapsLoad :: Annex (FileMatcherMap Annex, FileMatcherMap Annex)
|
||||
preferredRequiredMapsLoad = do
|
||||
groupmap <- groupMap
|
||||
configmap <- readRemoteLog
|
||||
groupwantedmap <- groupPreferredContentMapRaw
|
||||
m <- simpleMap
|
||||
. parseLogWithUUID ((Just .) . makeMatcher groupmap configmap groupwantedmap)
|
||||
<$> Annex.Branch.get preferredContentLog
|
||||
Annex.changeState $ \s -> s { Annex.preferredcontentmap = Just m }
|
||||
return m
|
||||
let genmap l gm = simpleMap
|
||||
. parseLogWithUUID ((Just .) . makeMatcher groupmap configmap gm)
|
||||
<$> Annex.Branch.get l
|
||||
pc <- genmap preferredContentLog =<< groupPreferredContentMapRaw
|
||||
rc <- genmap requiredContentLog M.empty
|
||||
-- Required content is implicitly also preferred content, so OR
|
||||
let m = M.unionWith MOr pc rc
|
||||
Annex.changeState $ \s -> s
|
||||
{ Annex.preferredcontentmap = Just m
|
||||
, Annex.requiredcontentmap = Just rc
|
||||
}
|
||||
return (m, rc)
|
||||
|
||||
{- This intentionally never fails, even on unparsable expressions,
|
||||
- because the configuration is shared among repositories and newer
|
||||
|
@ -75,11 +92,11 @@ makeMatcher
|
|||
-> M.Map Group PreferredContentExpression
|
||||
-> UUID
|
||||
-> PreferredContentExpression
|
||||
-> FileMatcher
|
||||
-> FileMatcher Annex
|
||||
makeMatcher groupmap configmap groupwantedmap u = go True True
|
||||
where
|
||||
go expandstandard expandgroupwanted expr
|
||||
| null (lefts tokens) = Utility.Matcher.generate $ rights tokens
|
||||
| null (lefts tokens) = generate $ rights tokens
|
||||
| otherwise = unknownMatcher u
|
||||
where
|
||||
tokens = exprParser matchstandard matchgroupwanted groupmap configmap (Just u) expr
|
||||
|
@ -102,10 +119,10 @@ makeMatcher groupmap configmap groupwantedmap u = go True True
|
|||
-
|
||||
- This avoid unwanted/expensive changes to the content, until the problem
|
||||
- is resolved. -}
|
||||
unknownMatcher :: UUID -> FileMatcher
|
||||
unknownMatcher u = Utility.Matcher.generate [present]
|
||||
unknownMatcher :: UUID -> FileMatcher Annex
|
||||
unknownMatcher u = generate [present]
|
||||
where
|
||||
present = Utility.Matcher.Operation $ matchPresent (Just u)
|
||||
present = Operation $ matchPresent (Just u)
|
||||
|
||||
{- Checks if an expression can be parsed, if not returns Just error -}
|
||||
checkPreferredContentExpression :: PreferredContentExpression -> Maybe String
|
||||
|
|
|
@ -21,14 +21,23 @@ import Types.Group
|
|||
|
||||
{- Changes the preferred content configuration of a remote. -}
|
||||
preferredContentSet :: UUID -> PreferredContentExpression -> Annex ()
|
||||
preferredContentSet uuid@(UUID _) val = do
|
||||
preferredContentSet = setLog preferredContentLog
|
||||
|
||||
requiredContentSet :: UUID -> PreferredContentExpression -> Annex ()
|
||||
requiredContentSet = setLog requiredContentLog
|
||||
|
||||
setLog :: FilePath -> UUID -> PreferredContentExpression -> Annex ()
|
||||
setLog logfile uuid@(UUID _) val = do
|
||||
ts <- liftIO getPOSIXTime
|
||||
Annex.Branch.change preferredContentLog $
|
||||
Annex.Branch.change logfile $
|
||||
showLog id
|
||||
. changeLog ts uuid val
|
||||
. parseLog Just
|
||||
Annex.changeState $ \s -> s { Annex.preferredcontentmap = Nothing }
|
||||
preferredContentSet NoUUID _ = error "unknown UUID; cannot modify"
|
||||
Annex.changeState $ \s -> s
|
||||
{ Annex.preferredcontentmap = Nothing
|
||||
, Annex.requiredcontentmap = Nothing
|
||||
}
|
||||
setLog _ NoUUID _ = error "unknown UUID; cannot modify"
|
||||
|
||||
{- Changes the preferred content configuration of a group. -}
|
||||
groupPreferredContentSet :: Group -> PreferredContentExpression -> Annex ()
|
||||
|
@ -44,6 +53,10 @@ preferredContentMapRaw :: Annex (M.Map UUID PreferredContentExpression)
|
|||
preferredContentMapRaw = simpleMap . parseLog Just
|
||||
<$> Annex.Branch.get preferredContentLog
|
||||
|
||||
requiredContentMapRaw :: Annex (M.Map UUID PreferredContentExpression)
|
||||
requiredContentMapRaw = simpleMap . parseLog Just
|
||||
<$> Annex.Branch.get requiredContentLog
|
||||
|
||||
groupPreferredContentMapRaw :: Annex (M.Map Group PreferredContentExpression)
|
||||
groupPreferredContentMapRaw = simpleMap . parseMapLog Just Just
|
||||
<$> Annex.Branch.get groupPreferredContentLog
|
||||
|
|
102
Logs/Transfer.hs
102
Logs/Transfer.hs
|
@ -88,108 +88,6 @@ percentComplete :: Transfer -> TransferInfo -> Maybe Percentage
|
|||
percentComplete (Transfer { transferKey = key }) info =
|
||||
percentage <$> keySize key <*> Just (fromMaybe 0 $ bytesComplete info)
|
||||
|
||||
type RetryDecider = TransferInfo -> TransferInfo -> Bool
|
||||
|
||||
noRetry :: RetryDecider
|
||||
noRetry _ _ = False
|
||||
|
||||
{- Retries a transfer when it fails, as long as the failed transfer managed
|
||||
- to send some data. -}
|
||||
forwardRetry :: RetryDecider
|
||||
forwardRetry old new = bytesComplete old < bytesComplete new
|
||||
|
||||
upload :: UUID -> Key -> AssociatedFile -> RetryDecider -> (MeterUpdate -> Annex Bool) -> Annex Bool
|
||||
upload u key = runTransfer (Transfer Upload u key)
|
||||
|
||||
download :: UUID -> Key -> AssociatedFile -> RetryDecider -> (MeterUpdate -> Annex Bool) -> Annex Bool
|
||||
download u key = runTransfer (Transfer Download u key)
|
||||
|
||||
{- Runs a transfer action. Creates and locks the lock file while the
|
||||
- action is running, and stores info in the transfer information
|
||||
- file.
|
||||
-
|
||||
- If the transfer action returns False, the transfer info is
|
||||
- left in the failedTransferDir.
|
||||
-
|
||||
- If the transfer is already in progress, returns False.
|
||||
-
|
||||
- An upload can be run from a read-only filesystem, and in this case
|
||||
- no transfer information or lock file is used.
|
||||
-}
|
||||
runTransfer :: Transfer -> Maybe FilePath -> RetryDecider -> (MeterUpdate -> Annex Bool) -> Annex Bool
|
||||
runTransfer t file shouldretry a = do
|
||||
info <- liftIO $ startTransferInfo file
|
||||
(meter, tfile, metervar) <- mkProgressUpdater t info
|
||||
mode <- annexFileMode
|
||||
(fd, inprogress) <- liftIO $ prep tfile mode info
|
||||
if inprogress
|
||||
then do
|
||||
showNote "transfer already in progress"
|
||||
return False
|
||||
else do
|
||||
ok <- retry info metervar $
|
||||
bracketIO (return fd) (cleanup tfile) (const $ a meter)
|
||||
unless ok $ recordFailedTransfer t info
|
||||
return ok
|
||||
where
|
||||
#ifndef mingw32_HOST_OS
|
||||
prep tfile mode info = do
|
||||
mfd <- catchMaybeIO $
|
||||
openFd (transferLockFile tfile) ReadWrite (Just mode)
|
||||
defaultFileFlags { trunc = True }
|
||||
case mfd of
|
||||
Nothing -> return (Nothing, False)
|
||||
Just fd -> do
|
||||
locked <- catchMaybeIO $
|
||||
setLock fd (WriteLock, AbsoluteSeek, 0, 0)
|
||||
if isNothing locked
|
||||
then return (Nothing, True)
|
||||
else do
|
||||
void $ tryIO $ writeTransferInfoFile info tfile
|
||||
return (mfd, False)
|
||||
#else
|
||||
prep tfile _mode info = do
|
||||
v <- catchMaybeIO $ lockExclusive (transferLockFile tfile)
|
||||
case v of
|
||||
Nothing -> return (Nothing, False)
|
||||
Just Nothing -> return (Nothing, True)
|
||||
Just (Just lockhandle) -> do
|
||||
void $ tryIO $ writeTransferInfoFile info tfile
|
||||
return (Just lockhandle, False)
|
||||
#endif
|
||||
cleanup _ Nothing = noop
|
||||
cleanup tfile (Just lockhandle) = do
|
||||
void $ tryIO $ removeFile tfile
|
||||
#ifndef mingw32_HOST_OS
|
||||
void $ tryIO $ removeFile $ transferLockFile tfile
|
||||
closeFd lockhandle
|
||||
#else
|
||||
{- Windows cannot delete the lockfile until the lock
|
||||
- is closed. So it's possible to race with another
|
||||
- process that takes the lock before it's removed,
|
||||
- so ignore failure to remove.
|
||||
-}
|
||||
dropLock lockhandle
|
||||
void $ tryIO $ removeFile $ transferLockFile tfile
|
||||
#endif
|
||||
retry oldinfo metervar run = do
|
||||
v <- tryAnnex run
|
||||
case v of
|
||||
Right b -> return b
|
||||
Left _ -> do
|
||||
b <- getbytescomplete metervar
|
||||
let newinfo = oldinfo { bytesComplete = Just b }
|
||||
if shouldretry oldinfo newinfo
|
||||
then retry newinfo metervar run
|
||||
else return False
|
||||
getbytescomplete metervar
|
||||
| transferDirection t == Upload =
|
||||
liftIO $ readMVar metervar
|
||||
| otherwise = do
|
||||
f <- fromRepo $ gitAnnexTmpObjectLocation (transferKey t)
|
||||
liftIO $ catchDefaultIO 0 $
|
||||
fromIntegral . fileSize <$> getFileStatus f
|
||||
|
||||
{- Generates a callback that can be called as transfer progresses to update
|
||||
- the transfer info file. Also returns the file it'll be updating, and a
|
||||
- MVar that can be used to read the number of bytesComplete. -}
|
||||
|
|
|
@ -24,7 +24,7 @@ import qualified Git.Command
|
|||
import qualified Git.GCrypt
|
||||
import qualified Annex
|
||||
import Logs.Presence
|
||||
import Logs.Transfer
|
||||
import Annex.Transfer
|
||||
import Annex.UUID
|
||||
import Annex.Exception
|
||||
import qualified Annex.Content
|
||||
|
@ -321,7 +321,7 @@ copyFromRemote' r key file dest
|
|||
case v of
|
||||
Nothing -> return False
|
||||
Just (object, checksuccess) ->
|
||||
upload u key file noRetry
|
||||
runTransfer (Transfer Download u key) file noRetry
|
||||
(rsyncOrCopyFile params object dest)
|
||||
<&&> checksuccess
|
||||
| Git.repoIsSsh (repo r) = feedprogressback $ \feeder -> do
|
||||
|
@ -418,7 +418,7 @@ copyToRemote r key file p
|
|||
( return True
|
||||
, do
|
||||
ensureInitialized
|
||||
download u key file noRetry $ const $
|
||||
runTransfer (Transfer Download u key) file noRetry $ const $
|
||||
Annex.Content.saveState True `after`
|
||||
Annex.Content.getViaTmpChecked (liftIO checksuccessio) key
|
||||
(\d -> rsyncOrCopyFile params object d p)
|
||||
|
|
|
@ -82,7 +82,7 @@ glacierSetup' enabling u c = do
|
|||
unless enabling $
|
||||
genVault fullconfig u
|
||||
gitConfigSpecialRemote u fullconfig "glacier" "true"
|
||||
return (c', u)
|
||||
return (fullconfig, u)
|
||||
where
|
||||
remotename = fromJust (M.lookup "name" c)
|
||||
defvault = remotename ++ "-" ++ fromUUID u
|
||||
|
@ -225,7 +225,8 @@ glacierParams :: RemoteConfig -> [CommandParam] -> [CommandParam]
|
|||
glacierParams c params = datacenter:params
|
||||
where
|
||||
datacenter = Param $ "--region=" ++
|
||||
fromJust (M.lookup "datacenter" c)
|
||||
fromMaybe (error "Missing datacenter configuration")
|
||||
(M.lookup "datacenter" c)
|
||||
|
||||
glacierEnv :: RemoteConfig -> UUID -> Annex (Maybe [(String, String)])
|
||||
glacierEnv c u = go =<< getRemoteCredPairFor "glacier" c creds
|
||||
|
@ -239,7 +240,8 @@ glacierEnv c u = go =<< getRemoteCredPairFor "glacier" c creds
|
|||
(uk, pk) = credPairEnvironment creds
|
||||
|
||||
getVault :: RemoteConfig -> Vault
|
||||
getVault = fromJust . M.lookup "vault"
|
||||
getVault = fromMaybe (error "Missing vault configuration")
|
||||
. M.lookup "vault"
|
||||
|
||||
archive :: Remote -> Key -> Archive
|
||||
archive r k = fileprefix ++ key2file k
|
||||
|
|
|
@ -216,7 +216,7 @@ readTahoe hdl command params = withTahoeConfigDir hdl $ \configdir ->
|
|||
|
||||
tahoeParams :: TahoeConfigDir -> String -> [CommandParam] -> [CommandParam]
|
||||
tahoeParams configdir command params =
|
||||
Param command : Param "-d" : File configdir : params
|
||||
Param "-d" : File configdir : Param command : params
|
||||
|
||||
storeCapability :: UUID -> Key -> Capability -> Annex ()
|
||||
storeCapability u k cap = setRemoteState u k cap
|
||||
|
|
27
Types/DesktopNotify.hs
Normal file
27
Types/DesktopNotify.hs
Normal file
|
@ -0,0 +1,27 @@
|
|||
{- git-annex DesktopNotify type
|
||||
-
|
||||
- Copyright 2014 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
module Types.DesktopNotify where
|
||||
|
||||
import Data.Monoid
|
||||
|
||||
data DesktopNotify = DesktopNotify
|
||||
{ notifyStart :: Bool
|
||||
, notifyFinish :: Bool
|
||||
}
|
||||
deriving (Show)
|
||||
|
||||
instance Monoid DesktopNotify where
|
||||
mempty = DesktopNotify False False
|
||||
mappend (DesktopNotify s1 f1) (DesktopNotify s2 f2) =
|
||||
DesktopNotify (s1 || s2) (f1 || f2)
|
||||
|
||||
mkNotifyStart :: DesktopNotify
|
||||
mkNotifyStart = DesktopNotify True False
|
||||
|
||||
mkNotifyFinish :: DesktopNotify
|
||||
mkNotifyFinish = DesktopNotify False True
|
|
@ -7,7 +7,12 @@
|
|||
|
||||
module Types.FileMatcher where
|
||||
|
||||
import Types.UUID (UUID)
|
||||
import Types.Key (Key)
|
||||
import Utility.Matcher (Matcher, Token)
|
||||
|
||||
import qualified Data.Map as M
|
||||
import qualified Data.Set as S
|
||||
|
||||
data MatchInfo
|
||||
= MatchingFile FileInfo
|
||||
|
@ -17,3 +22,19 @@ data FileInfo = FileInfo
|
|||
{ relFile :: FilePath -- may be relative to cwd
|
||||
, matchFile :: FilePath -- filepath to match on; may be relative to top
|
||||
}
|
||||
|
||||
type FileMatcherMap a = M.Map UUID (Utility.Matcher.Matcher (S.Set UUID -> MatchInfo -> a Bool))
|
||||
|
||||
type MkLimit a = String -> Either String (MatchFiles a)
|
||||
|
||||
type AssumeNotPresent = S.Set UUID
|
||||
|
||||
type MatchFiles a = AssumeNotPresent -> MatchInfo -> a Bool
|
||||
|
||||
type FileMatcher a = Matcher (MatchFiles a)
|
||||
|
||||
-- This is a matcher that can have tokens added to it while it's being
|
||||
-- built, and once complete is compiled to an unchangable matcher.
|
||||
data ExpandableMatcher a
|
||||
= BuildingMatcher [Token (MatchInfo -> a Bool)]
|
||||
| CompleteMatcher (Matcher (MatchInfo -> a Bool))
|
||||
|
|
|
@ -1,20 +0,0 @@
|
|||
{- types for limits
|
||||
-
|
||||
- Copyright 2013 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE CPP #-}
|
||||
|
||||
module Types.Limit where
|
||||
|
||||
import Common.Annex
|
||||
import Types.FileMatcher
|
||||
|
||||
import qualified Data.Set as S
|
||||
|
||||
type MkLimit = String -> Either String MatchFiles
|
||||
|
||||
type AssumeNotPresent = S.Set UUID
|
||||
type MatchFiles = AssumeNotPresent -> MatchInfo -> Annex Bool
|
|
@ -264,7 +264,9 @@ parseMetaData p = (,)
|
|||
instance Arbitrary MetaData where
|
||||
arbitrary = do
|
||||
size <- arbitrarySizedBoundedIntegral `suchThat` (< 500)
|
||||
MetaData . M.fromList <$> vector size
|
||||
MetaData . M.filterWithKey legal . M.fromList <$> vector size
|
||||
where
|
||||
legal k _v = legalField $ fromMetaField k
|
||||
|
||||
instance Arbitrary MetaValue where
|
||||
arbitrary = MetaValue <$> arbitrary <*> arbitrary
|
||||
|
|
|
@ -9,15 +9,18 @@
|
|||
|
||||
module Utility.FileMode where
|
||||
|
||||
import Common
|
||||
|
||||
import System.IO
|
||||
import Control.Monad
|
||||
import Control.Exception (bracket)
|
||||
import System.PosixCompat.Types
|
||||
import Utility.PosixFiles
|
||||
#ifndef mingw32_HOST_OS
|
||||
import System.Posix.Files
|
||||
#endif
|
||||
import Foreign (complement)
|
||||
|
||||
import Utility.Exception
|
||||
|
||||
{- Applies a conversion function to a file's mode. -}
|
||||
modifyFileMode :: FilePath -> (FileMode -> FileMode) -> IO ()
|
||||
modifyFileMode f convert = void $ modifyFileMode' f convert
|
||||
|
@ -56,6 +59,12 @@ readModes = [ownerReadMode, groupReadMode, otherReadMode]
|
|||
executeModes :: [FileMode]
|
||||
executeModes = [ownerExecuteMode, groupExecuteMode, otherExecuteMode]
|
||||
|
||||
otherGroupModes :: [FileMode]
|
||||
otherGroupModes =
|
||||
[ groupReadMode, otherReadMode
|
||||
, groupWriteMode, otherWriteMode
|
||||
]
|
||||
|
||||
{- Removes the write bits from a file. -}
|
||||
preventWrite :: FilePath -> IO ()
|
||||
preventWrite f = modifyFileMode f $ removeModes writeModes
|
||||
|
@ -145,9 +154,5 @@ setSticky f = modifyFileMode f $ addModes [stickyMode]
|
|||
writeFileProtected :: FilePath -> String -> IO ()
|
||||
writeFileProtected file content = withUmask 0o0077 $
|
||||
withFile file WriteMode $ \h -> do
|
||||
void $ tryIO $ modifyFileMode file $
|
||||
removeModes
|
||||
[ groupReadMode, otherReadMode
|
||||
, groupWriteMode, otherWriteMode
|
||||
]
|
||||
void $ tryIO $ modifyFileMode file $ removeModes otherGroupModes
|
||||
hPutStr h content
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
module Utility.Matcher (
|
||||
Token(..),
|
||||
Matcher,
|
||||
Matcher(..),
|
||||
token,
|
||||
tokens,
|
||||
generate,
|
||||
|
|
|
@ -10,10 +10,13 @@
|
|||
|
||||
module Utility.ThreadScheduler where
|
||||
|
||||
import Common
|
||||
|
||||
import Control.Monad
|
||||
import Control.Concurrent
|
||||
#ifndef mingw32_HOST_OS
|
||||
import Control.Monad.IfElse
|
||||
import System.Posix.IO
|
||||
#endif
|
||||
#ifndef mingw32_HOST_OS
|
||||
import System.Posix.Signals
|
||||
#ifndef __ANDROID__
|
||||
import System.Posix.Terminal
|
||||
|
|
|
@ -77,7 +77,8 @@ exists url uo = case parseURIRelaxed url of
|
|||
Nothing -> dne
|
||||
| otherwise -> if Build.SysConfig.curl
|
||||
then do
|
||||
output <- readProcess "curl" $ toCommand curlparams
|
||||
output <- catchDefaultIO "" $
|
||||
readProcess "curl" $ toCommand curlparams
|
||||
case lastMaybe (lines output) of
|
||||
Just ('2':_:_) -> return (True, extractsize output)
|
||||
_ -> dne
|
||||
|
|
|
@ -33,7 +33,6 @@ import qualified Data.Text as T
|
|||
import qualified Data.Text.Encoding as TE
|
||||
import Blaze.ByteString.Builder.Char.Utf8 (fromText)
|
||||
import Blaze.ByteString.Builder (Builder)
|
||||
import Data.Monoid
|
||||
import Control.Arrow ((***))
|
||||
import Control.Concurrent
|
||||
#ifdef WITH_WEBAPP_SECURE
|
||||
|
|
37
debian/changelog
vendored
37
debian/changelog
vendored
|
@ -1,3 +1,40 @@
|
|||
git-annex (5.20140402) unstable; urgency=medium
|
||||
|
||||
* unannex, uninit: Avoid committing after every file is unannexed,
|
||||
for massive speedup.
|
||||
* --notify-finish switch will cause desktop notifications after each
|
||||
file upload/download/drop completes
|
||||
(using the dbus Desktop Notifications Specification)
|
||||
* --notify-start switch will show desktop notifications when each
|
||||
file upload/download starts.
|
||||
* webapp: Automatically install Nautilus integration scripts
|
||||
to get and drop files.
|
||||
* tahoe: Pass -d parameter before subcommand; putting it after
|
||||
the subcommand no longer works with tahoe-lafs version 1.10.
|
||||
(Thanks, Alberto Berti)
|
||||
* forget --drop-dead: Avoid removing the dead remote from the trust.log,
|
||||
so that if git remotes for it still exist anywhere, git annex info
|
||||
will still know it's dead and not show it.
|
||||
* git-annex-shell: Make configlist automatically initialize
|
||||
a remote git repository, as long as a git-annex branch has
|
||||
been pushed to it, to simplify setup of remote git repositories,
|
||||
including via gitolite.
|
||||
* add --include-dotfiles: New option, perhaps useful for backups.
|
||||
* Version 5.20140227 broke creation of glacier repositories,
|
||||
not including the datacenter and vault in their configuration.
|
||||
This bug is fixed, but glacier repositories set up with the broken
|
||||
version of git-annex need to have the datacenter and vault set
|
||||
in order to be usable. This can be done using git annex enableremote
|
||||
to add the missing settings. For details, see
|
||||
http://git-annex.branchable.com/bugs/problems_with_glacier/
|
||||
* Added required content configuration.
|
||||
* assistant: Improve ssh authorized keys line generated in local pairing
|
||||
or for a remote ssh server to set environment variables in an
|
||||
alternative way that works with the non-POSIX fish shell, as well
|
||||
as POSIX shells.
|
||||
|
||||
-- Joey Hess <joeyh@debian.org> Wed, 02 Apr 2014 16:42:53 -0400
|
||||
|
||||
git-annex (5.20140320) unstable; urgency=medium
|
||||
|
||||
* Fix zombie leak and general inneficiency when copying files to a
|
||||
|
|
1
debian/control
vendored
1
debian/control
vendored
|
@ -30,6 +30,7 @@ Build-Depends:
|
|||
libghc-hinotify-dev [linux-any],
|
||||
libghc-stm-dev (>= 2.3),
|
||||
libghc-dbus-dev (>= 0.10.3) [linux-any],
|
||||
libghc-fdo-notify-dev (>= 0.3) [linux-any],
|
||||
libghc-yesod-dev [i386 amd64 kfreebsd-i386 kfreebsd-amd64 powerpc sparc],
|
||||
libghc-yesod-static-dev [i386 amd64 kfreebsd-i386 kfreebsd-amd64 powerpc sparc],
|
||||
libghc-yesod-default-dev [i386 amd64 kfreebsd-amd64 powerpc sparc],
|
||||
|
|
BIN
doc/assistant/downloadnotification.png
Normal file
BIN
doc/assistant/downloadnotification.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 4.4 KiB |
BIN
doc/assistant/nautilusmenu.png
Normal file
BIN
doc/assistant/nautilusmenu.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 58 KiB |
|
@ -0,0 +1,17 @@
|
|||
[[!comment format=mdwn
|
||||
username="https://www.google.com/accounts/o8/id?id=AItOawn3p4i4lk_zMilvjnJ9sS6g2nerpgz0Fjc"
|
||||
nickname="Matthias"
|
||||
subject="merge for master branch?"
|
||||
date="2014-03-23T23:02:23Z"
|
||||
content="""
|
||||
As far as I observed, \"git annex merge\" only merges the \"git-annex\" branch. My wish is to have the conflict resolution from \"git annex sync\" in the \"master\" branch, but no automatic commit, such that the user can verify and possibly correct the merge. The proposed merge could go to the index. Consider the following scenario:
|
||||
|
||||
1. We have repo A, B, and CENTRAL
|
||||
2. All three start with a root commit in \"master\" branch
|
||||
3. Then A commits a file \"test.txt\" with content \"a\" and syncs with CENTRAL
|
||||
4. Meanwhile, B commits \"test.txt\" with content \"b\"
|
||||
5. When B tries to sync with CENTRAL, the proposed conflict resolution having two files \"test.txt-variantXXXX\" and \"test.txt-variantYYYY\" should be staged in the index, but not committed yet.
|
||||
6. B can now commit a custom merge, e.g. with file content \"ab\".
|
||||
|
||||
The point is that I really like the conflict resolution, but still want to force the user to check the result.
|
||||
"""]]
|
|
@ -0,0 +1,10 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.41"
|
||||
subject="comment 5"
|
||||
date="2014-03-26T18:56:30Z"
|
||||
content="""
|
||||
@Matthias you need to install git-annex 4.20130709 or newer. Then `git-annex merge` will do what you want. As I said before.
|
||||
|
||||
As for committing the merge, you can always adjust the result after the fact and use `git commit --amend`.
|
||||
"""]]
|
|
@ -0,0 +1,520 @@
|
|||
I have a git annex assistant process using 1.2 gigabytes of RAM and a git cat-file --batch child consuming CPU time constantly. I am running 5.20140320 on Ubuntu 12.04.
|
||||
|
||||
[[!format sh """
|
||||
PID USER PR NI VIRT RES SHR S %CPU %MEM TIME+ COMMAND
|
||||
11775 ion 20 0 1350m 1.2g 12m S 48 62.4 425:56.85 git-annex
|
||||
11787 ion 20 0 9856 1484 1232 R 54 0.1 366:16.14 git
|
||||
"""]]
|
||||
|
||||
The assistant UI looks perfectly normal and does not indicate it is doing anything. daemon.log is empty and the assistant process seems to be logging into a rotated and deleted log file.
|
||||
|
||||
[[!format sh """
|
||||
COMMAND PID USER FD TYPE DEVICE SIZE/OFF NODE NAME
|
||||
git-annex 11775 ion 1w REG 9,127 80841 55181369 /storage/ion/media/video/.git/annex/daemon.log.10 (deleted)
|
||||
git-annex 11775 ion 2w REG 9,127 80841 55181369 /storage/ion/media/video/.git/annex/daemon.log.10 (deleted)
|
||||
"""]]
|
||||
|
||||
strace -s10000 -e trace=read,write -p 11787 indicates that the assistant is having the cat-file process cat same objects over and over again.
|
||||
|
||||
[[!format sh """
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "d95950acebb5c4318329d7b989d36d01b76b7801 blob 232\n", 50) = 50
|
||||
write(1, "1396057825.366657s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.538068s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.560144s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.538542s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:ee2/2ee/SHA256E-s106800355--c70e31d511e7eec4881a15dfba521ea3d1fe14694968f81ae1819f1a2a93f9be.mp4.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "977039ea431522e6e27a78bdec2c1299f883eb85 blob 232\n", 50) = 50
|
||||
write(1, "1396057823.999737s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057835.133409s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057835.215084s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.468307s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:a84/f1f/SHA256E-s47051987--dcfd0413db883506ccb8c45e3b2d60cb3ff5c83cc55c9c7e44818d7556dbc07f.mp4.log\n", 4096) = 121
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "6c0ff555a1a34337c9379d8856c8283429bef973 blob 231\n", 50) = 50
|
||||
write(1, "1396057829.505426s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057839.859236s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057839.875213s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.77741s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:50a/5fc/SHA256E-s275654757--52823cd2061375910ccbd8de38865eca91511d9b4621243d2ef96a974d7546aa.flv.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "cdbc7ce6b426dfcce9d718387b6c412e870a2d12 blob 232\n", 50) = 50
|
||||
write(1, "1396057828.887576s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.117938s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.197196s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.687354s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 232) = 232
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:9b5/545/SHA256E-s32710--6005b5faf1a6d42d499053f8cca87d080536abfa8442b33c87f7966e86726e4f.fin.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
read(3, "", 214) = 0
|
||||
write(1, "82a98cbfe8f24d336a537cecea2182922c4681e1 blob 231\n", 50) = 50
|
||||
write(1, "1396057825.390306s 1 3f89d0d8-6162-4362-852a-cb688d6c0696\n1396057866.37922s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396057866.386029s 1 161c3d7f-eb60-4294-84d2-eb611786c91e\n1396058160.588219s 0 3f89d0d8-6162-4362-852a-cb688d6c0696\n", 231) = 231
|
||||
write(1, "\n", 1) = 1
|
||||
read(0, "refs/heads/git-annex:d28/166/SHA256E-s59188--9c04581bd67ea7c78b537a164d104bea5ac91a4a69f06b477cf07892a2d9b852.fih.srt.log\n", 4096) = 122
|
||||
read(3, "0936a1fdd849b8b46abb879d7cf82cc758b367e3\n", 255) = 41
|
||||
"""]]
|
|
@ -0,0 +1,10 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.244"
|
||||
subject="comment 1"
|
||||
date="2014-04-02T18:48:51Z"
|
||||
content="""
|
||||
All I can tell from the strace is that it's looking at location logs, and it's looking at the same few keys, but not a single on in a tight loop.
|
||||
|
||||
It would probably help a lot to run the assistant with --debug and get a debug log while this is going on. We need to pinpoint the part of the assistant that is affected, and there may be other activity too.
|
||||
"""]]
|
|
@ -76,3 +76,13 @@ accept: unsupported operation (Function not implemented)
|
|||
|
||||
lost dbus connection; falling back to polling (SocketError {socketErrorMessage = "connect: does not exist (No such file or directory)", socketErrorFatal = True, socketErrorAddress = Just (Address "unix:path=/var/run/dbus/system_bus_socket")})
|
||||
"""]]
|
||||
|
||||
> [[done]]; This turned out to not be dbus related, but the http server failing,
|
||||
> and I fixed that bug.
|
||||
>
|
||||
> AFAICS the user running git-annex did not have their own dbus daemon
|
||||
> running, and that's why the low-volume dbus messages come up.
|
||||
> Probably because this is an embedded device, and so no desktop
|
||||
> environment. git-annex only uses dbus for detecting network connection
|
||||
> changes and removable media mounts. None of which probably matter in an
|
||||
> embedded environment. --[[Joey]]
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://schnouki.net/"
|
||||
nickname="Schnouki"
|
||||
subject="comment 13"
|
||||
date="2014-03-20T23:54:51Z"
|
||||
content="""
|
||||
Sorry for the delay (my laptop hard drive died so I was offline for a few days).
|
||||
|
||||
Just tested git-annex 5.20140320 on my NAS and it works just fine! The webapp is running, I can connect to it, and there's no more spam in the daemon.log (except for the dbus message every minute, but that's not really a problem).
|
||||
|
||||
Thanks a lot Joey!
|
||||
"""]]
|
|
@ -0,0 +1,11 @@
|
|||
[[!comment format=mdwn
|
||||
username="https://www.google.com/accounts/o8/id?id=AItOawnvVfFLW4CTKs7UjdiLIsOn_cxj1Jnh64I"
|
||||
nickname="Charl"
|
||||
subject="Could it be gmail.com XMPP throttling?"
|
||||
date="2014-03-23T19:26:08Z"
|
||||
content="""
|
||||
I was seeing similar error messages, until I registered for a different XMPP account at jabber.de and started using that instead of my gmail.com account.
|
||||
|
||||
My current suspicion is that it could be Google performing throttling on their XMPP service. See here: http://stackoverflow.com/questions/1843837/what-is-the-throttling-rate-that-gtalk-applies-to-xmpp-messages
|
||||
|
||||
"""]]
|
23
doc/bugs/Crash_when_disabling_syncing_in_the_webapp.mdwn
Normal file
23
doc/bugs/Crash_when_disabling_syncing_in_the_webapp.mdwn
Normal file
|
@ -0,0 +1,23 @@
|
|||
### Please describe the problem.
|
||||
The watcher crashes.
|
||||
|
||||
I only need to restart the thread in the pop-up to get everything to work again, but I'm reporting just in case that this issue has any other implications.
|
||||
|
||||
|
||||
### What steps will reproduce the problem?
|
||||
I open the webapp and in the minutes before it starts syncing (syncing is enabled) I disable it (clicking in the 'syncing enabled' text).
|
||||
|
||||
This produces a crash every time.
|
||||
|
||||
|
||||
### What version of git-annex are you using? On what operating system?
|
||||
5.20140320 in Debian sid and testing
|
||||
|
||||
|
||||
### Please provide any additional information below.
|
||||
This is all I can see in the logs
|
||||
|
||||
[[!format sh """
|
||||
Watcher crashed: PauseWatcher
|
||||
[2014-03-26 08:54:57 CET] Watcher: warning Watcher crashed: PauseWatcher
|
||||
"""]]
|
|
@ -0,0 +1,10 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.41"
|
||||
subject="comment 1"
|
||||
date="2014-03-26T17:36:00Z"
|
||||
content="""
|
||||
How did you install git-annex? Is this Debian Linux?
|
||||
|
||||
I have not been able to reproduce a crash. It's indeed the case that a PauseWatcher exception is thrown, but the Watcher explicitly catches that exception.
|
||||
"""]]
|
|
@ -0,0 +1,10 @@
|
|||
[[!comment format=mdwn
|
||||
username="https://www.google.com/accounts/o8/id?id=AItOawk9nck8WX8-ADF3Fdh5vFo4Qrw1I_bJcR8"
|
||||
nickname="Jon Ander"
|
||||
subject="comment 2"
|
||||
date="2014-04-01T08:04:51Z"
|
||||
content="""
|
||||
Yes, this is Debian Linux and I've been able to reproduce it in i386 and amd64. git-annex is installed from the Debian repositories.
|
||||
|
||||
I'll try to continue testing the issue and will report back if I can find any useful info.
|
||||
"""]]
|
|
@ -0,0 +1,12 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.244"
|
||||
subject="comment 3"
|
||||
date="2014-04-02T20:38:12Z"
|
||||
content="""
|
||||
So we have the same version of git-annex from the same build, and only you see the problem. Hmm..
|
||||
|
||||
You mentioned that you see the problem if you disable syncing at a particular time. Does it only crash at that time, or at any time?
|
||||
|
||||
If you create a brand new empty repository and run the webapp in it, can you reporoduce the problem there? Ie, \"mkdir test; cd test; git init; git annex init; git annex webapp\"
|
||||
"""]]
|
|
@ -0,0 +1,14 @@
|
|||
When using git annex as part of an automated backup system, it's sometimes important that we archive all files, including dotfiles.
|
||||
|
||||
AFAICT there's no way to tell git annex add to add all dotfiles in a given directory; the only way to do it is to list every dotfile individually. (git annex add --force doesn't seem to do it.)
|
||||
|
||||
This can be worked around with find and xargs, but this is more work than it should be, I think.
|
||||
|
||||
It might also be nice if git annex add displayed a warning when adding a directory with dotfiles; something like "Warning, N files ignored; pass --whatever to add them."
|
||||
|
||||
> [[!commit 34abd7bca80a8cc012f92d64116014449b1b2392]] explains
|
||||
> the rationalle for dotfiles to be skipped. Such as it was.
|
||||
>
|
||||
> I don't think it makes sense for --force to be the flag to override
|
||||
> this, because you may want to add dotfiles, but not .gitignored
|
||||
> files. So, made a new --include-dotfiles option. [[done]] --[[Joey]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="https://www.google.com/accounts/o8/id?id=AItOawmUJBh1lYmvfCCiGr3yrdx-QhuLCSRnU5c"
|
||||
nickname="Justin"
|
||||
subject="comment 1"
|
||||
date="2014-03-24T07:03:42Z"
|
||||
content="""
|
||||
Maybe the right solution is to make --force not ignore dotfiles, although perhaps that would break people who rely on its current behavior.
|
||||
"""]]
|
|
@ -0,0 +1,62 @@
|
|||
### Please describe the problem.
|
||||
Just a small patch to Tahoe.hs which fixes "initremote repo
|
||||
type=thaoe" when using the latest tahoe-lafs release available for
|
||||
download.
|
||||
|
||||
I'm trying to add an attachment here but the UI says "prohibited by
|
||||
allowed_attachments (user is not an admin)" so please have a look also
|
||||
here: <https://github.com/joeyh/git-annex/pull/21>
|
||||
|
||||
### What steps will reproduce the problem?
|
||||
|
||||
1. Install latest tahoe-lafs
|
||||
2. run "TAHOE_FURL=... git annex initremote repo type=tahoe"
|
||||
|
||||
### What version of git-annex are you using? On what operating system?
|
||||
|
||||
OS: Debian Sid updated to latest packages
|
||||
git-annex: version 5.20140306
|
||||
|
||||
> Thanks, I've fixed this. [[done]] --[[Joey]]
|
||||
|
||||
### Please provide any additional information below.
|
||||
|
||||
I would like to add few things if i'm able (it's my first time for
|
||||
haskell, and I'm a bit lost as now):
|
||||
|
||||
1. add an optional parameter or envvar for a root dir cap and switch
|
||||
from storing anonymous files to saving a tree of dirs and files
|
||||
(much like the webdav back-end) so that i can later renew the
|
||||
leases on the files to prevent tahoe's garbage collection process
|
||||
expiration (for details see
|
||||
<https://tahoe-lafs.org/trac/tahoe-lafs/browser/docs/garbage-collection.rst#client-side-renewal>)
|
||||
|
||||
The poor man way to that without touching git-annex is to in some
|
||||
way collect the caps of the files from annex metadatas and to link
|
||||
them to a directory, which where i can then run "tahoe deep-check
|
||||
--add-lease" on;
|
||||
|
||||
> When I talked this over with Zooko before, he
|
||||
> thought it was better for git-annex to not use tahoe's directories,
|
||||
> which is why it doesn't. See [[todo/tahoe_lfs_for_reals]].
|
||||
>
|
||||
> This is the first I have heard about tahoe garbage collection.
|
||||
> It sounds like it's an optional process.
|
||||
>
|
||||
> It would certainly be possible to pull the caps for files out of
|
||||
> git-annex's remote state log.
|
||||
> --[[Joey]]
|
||||
|
||||
2. add convergence as an optional parameter. As of now many of the
|
||||
files that i'm willing to manage with git-annex are already on my
|
||||
grid, mostly because i've used tahoe's backup command to upload
|
||||
them in the past. By using the same convergence value that i've
|
||||
already setup on my other tahoe client installation i would be able
|
||||
to save much time and space by avoiding duplicates.
|
||||
|
||||
Do you have any good pointers to pieces code of git-annex that i can
|
||||
read and hack to try to implement this or any other suggestion?
|
||||
|
||||
> It was not documented, but you can already pass
|
||||
> `shared-convergence-secret=xxx` to `initremote`.
|
||||
> I have documented it. --[[Joey]]
|
|
@ -0,0 +1,195 @@
|
|||
Addurl can fail due to an apparent race condition when watch or assistant is running and the repository is in direct mode. The following stress test script encounters the bug consistently on my system. I am running git-annex 5.20140320 on on Ubuntu 13.10.
|
||||
|
||||
[[!format sh """
|
||||
#!/bin/sh
|
||||
set -eu
|
||||
|
||||
cleanup() {
|
||||
local dir
|
||||
dir="$1"; shift
|
||||
if [ -d "$dir" ]; then
|
||||
(
|
||||
set -x
|
||||
fuser -k -w "$dir/annex/.git/annex/daemon.log" || :
|
||||
find "$dir" -type d -exec chmod 700 '{}' '+'
|
||||
find "$dir" -type f -exec chmod 600 '{}' '+'
|
||||
rm -fr "$dir"
|
||||
)
|
||||
fi
|
||||
}
|
||||
|
||||
go() {
|
||||
local dir
|
||||
dir="$(mktemp -d "${TMP:-/tmp}/stress-annex.XXXXXXXXXX")"
|
||||
trap "cleanup '$dir'" 0 1 2 13 15
|
||||
|
||||
(
|
||||
cd "$dir"
|
||||
mkdir annex
|
||||
cd annex
|
||||
set -x
|
||||
|
||||
git init
|
||||
git annex init
|
||||
git annex direct
|
||||
git annex watch
|
||||
|
||||
for n in $(seq 100); do
|
||||
git annex addurl --file=foo http://heh.fi/robots.txt
|
||||
git annex sync
|
||||
rm -f foo
|
||||
git annex sync
|
||||
done
|
||||
|
||||
git annex watch --stop
|
||||
git annex uninit
|
||||
)
|
||||
|
||||
cleanup "$dir"
|
||||
trap - 0 1 2 13 14
|
||||
}
|
||||
|
||||
go
|
||||
"""]]
|
||||
|
||||
Script output:
|
||||
|
||||
[[!format sh """
|
||||
% ./stress-annex
|
||||
+ git init
|
||||
Initialized empty Git repository in /tmp/stress-annex.OKj6D8kVmV/annex/.git/
|
||||
+ git annex init
|
||||
init ok
|
||||
(Recording state in git...)
|
||||
+ git annex direct
|
||||
commit
|
||||
On branch master
|
||||
|
||||
Initial commit
|
||||
|
||||
nothing to commit
|
||||
ok
|
||||
direct ok
|
||||
+ git annex watch
|
||||
+ seq 100
|
||||
+ git annex addurl --file=foo http://heh.fi/robots.txt
|
||||
addurl foo (downloading http://heh.fi/robots.txt ...)
|
||||
--2014-03-27 03:14:29-- http://heh.fi/robots.txt
|
||||
Resolving heh.fi (heh.fi)... 83.145.237.222
|
||||
Connecting to heh.fi (heh.fi)|83.145.237.222|:80... connected.
|
||||
HTTP request sent, awaiting response... 200 OK
|
||||
Length: 0 [text/plain]
|
||||
Saving to: ‘/tmp/stress-annex.OKj6D8kVmV/annex/.git/annex/tmp/URL--http&c%%heh.fi%robots.txt’
|
||||
|
||||
[ <=> ] 0 --.-K/s in 0s
|
||||
|
||||
2014-03-27 03:14:29 (0.00 B/s) - ‘/tmp/stress-annex.OKj6D8kVmV/annex/.git/annex/tmp/URL--http&c%%heh.fi%robots.txt’ saved [0/0]
|
||||
|
||||
(Recording state in git...)
|
||||
ok
|
||||
(Recording state in git...)
|
||||
+ git annex sync
|
||||
commit ok
|
||||
+ rm -f foo
|
||||
+ git annex sync
|
||||
commit (Recording state in git...)
|
||||
ok
|
||||
(Recording state in git...)
|
||||
+ git annex addurl --file=foo http://heh.fi/robots.txt
|
||||
addurl foo (downloading http://heh.fi/robots.txt ...)
|
||||
--2014-03-27 03:14:29-- http://heh.fi/robots.txt
|
||||
Resolving heh.fi (heh.fi)... 83.145.237.222
|
||||
Connecting to heh.fi (heh.fi)|83.145.237.222|:80... connected.
|
||||
HTTP request sent, awaiting response... 200 OK
|
||||
Length: 0 [text/plain]
|
||||
Saving to: ‘/tmp/stress-annex.OKj6D8kVmV/annex/.git/annex/tmp/URL--http&c%%heh.fi%robots.txt’
|
||||
|
||||
[ <=> ] 0 --.-K/s in 0s
|
||||
|
||||
2014-03-27 03:14:29 (0.00 B/s) - ‘/tmp/stress-annex.OKj6D8kVmV/annex/.git/annex/tmp/URL--http&c%%heh.fi%robots.txt’ saved [0/0]
|
||||
|
||||
(Recording state in git...)
|
||||
ok
|
||||
(Recording state in git...)
|
||||
+ git annex sync
|
||||
commit ok
|
||||
+ rm -f foo
|
||||
+ git annex sync
|
||||
commit (Recording state in git...)
|
||||
ok
|
||||
(Recording state in git...)
|
||||
+ git annex addurl --file=foo http://heh.fi/robots.txt
|
||||
addurl foo (downloading http://heh.fi/robots.txt ...)
|
||||
--2014-03-27 03:14:29-- http://heh.fi/robots.txt
|
||||
Resolving heh.fi (heh.fi)... 83.145.237.222
|
||||
Connecting to heh.fi (heh.fi)|83.145.237.222|:80... connected.
|
||||
HTTP request sent, awaiting response... 200 OK
|
||||
Length: 0 [text/plain]
|
||||
Saving to: ‘/tmp/stress-annex.OKj6D8kVmV/annex/.git/annex/tmp/URL--http&c%%heh.fi%robots.txt’
|
||||
|
||||
[ <=> ] 0 --.-K/s in 0s
|
||||
|
||||
2014-03-27 03:14:29 (0.00 B/s) - ‘/tmp/stress-annex.OKj6D8kVmV/annex/.git/annex/tmp/URL--http&c%%heh.fi%robots.txt’ saved [0/0]
|
||||
|
||||
|
||||
git-annex: /tmp/stress-annex.OKj6D8kVmV/annex/.git/annex/objects/pX/ZJ/SHA256E-s0--e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855/: openTempFile: permission denied (Permission denied)
|
||||
failed
|
||||
git-annex: addurl: 1 failed
|
||||
+ fuser -k -w /tmp/stress-annex.OKj6D8kVmV/annex/.git/annex/daemon.log
|
||||
/tmp/stress-annex.OKj6D8kVmV/annex/.git/annex/daemon.log: 30704 30709 30735 30738 30778
|
||||
+ find /tmp/stress-annex.OKj6D8kVmV -type d -exec chmod 700 {} +
|
||||
+ find /tmp/stress-annex.OKj6D8kVmV -type f -exec chmod 600 {} +
|
||||
+ rm -fr /tmp/stress-annex.OKj6D8kVmV
|
||||
"""]]
|
||||
|
||||
The script also seems to encounter another issue. The output when seq 100 is changed to seq 1 and addurl happens to succeed:
|
||||
|
||||
[[!format sh """
|
||||
+ git init
|
||||
Initialized empty Git repository in /tmp/stress-annex.QEs0pNyS9z/annex/.git/
|
||||
+ git annex init
|
||||
init ok
|
||||
(Recording state in git...)
|
||||
+ git annex direct
|
||||
commit
|
||||
On branch master
|
||||
|
||||
Initial commit
|
||||
|
||||
nothing to commit
|
||||
ok
|
||||
direct ok
|
||||
+ git annex watch
|
||||
+ seq 1
|
||||
+ git annex addurl --file=foo http://heh.fi/robots.txt
|
||||
addurl foo (downloading http://heh.fi/robots.txt ...)
|
||||
--2014-03-27 03:17:20-- http://heh.fi/robots.txt
|
||||
Resolving heh.fi (heh.fi)... 83.145.237.222
|
||||
Connecting to heh.fi (heh.fi)|83.145.237.222|:80... connected.
|
||||
HTTP request sent, awaiting response... 200 OK
|
||||
Length: 0 [text/plain]
|
||||
Saving to: ‘/tmp/stress-annex.QEs0pNyS9z/annex/.git/annex/tmp/URL--http&c%%heh.fi%robots.txt’
|
||||
|
||||
[ <=> ] 0 --.-K/s in 0s
|
||||
|
||||
2014-03-27 03:17:20 (0.00 B/s) - ‘/tmp/stress-annex.QEs0pNyS9z/annex/.git/annex/tmp/URL--http&c%%heh.fi%robots.txt’ saved [0/0]
|
||||
|
||||
(Recording state in git...)
|
||||
ok
|
||||
(Recording state in git...)
|
||||
+ git annex sync
|
||||
commit ok
|
||||
+ rm -f foo
|
||||
+ git annex sync
|
||||
commit (Recording state in git...)
|
||||
ok
|
||||
(Recording state in git...)
|
||||
+ git annex watch --stop
|
||||
+ git annex uninit
|
||||
git-annex: /tmp/stress-annex.QEs0pNyS9z/annex/.git/annex/objects/pX/ZJ/SHA256E-s0--e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855/SHA256E-s0--e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855.map: removeLink: permission denied (Permission denied)
|
||||
+ fuser -k -w /tmp/stress-annex.QEs0pNyS9z/annex/.git/annex/daemon.log
|
||||
+ :
|
||||
+ find /tmp/stress-annex.QEs0pNyS9z -type d -exec chmod 700 {} +
|
||||
+ find /tmp/stress-annex.QEs0pNyS9z -type f -exec chmod 600 {} +
|
||||
+ rm -fr /tmp/stress-annex.QEs0pNyS9z
|
||||
"""]]
|
|
@ -0,0 +1,55 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://johan.kiviniemi.name/"
|
||||
nickname="Johan"
|
||||
subject="Another race condition"
|
||||
date="2014-03-31T03:42:42Z"
|
||||
content="""
|
||||
Here’s another race condition which seems related:
|
||||
|
||||
[[!format sh \"\"\"
|
||||
% git annex addurl 'quvi:http://youtu.be/-CbFj9K9AQg'
|
||||
addurl David_Raymond_Christiansen___Dependently_Typed_Programming_in_Idris___A_Demo.webm
|
||||
--2014-03-31 05:45:49-- http://r4---sn-oxc0a5-ixae.googlevideo.com/videoplayback?<cut>
|
||||
Resolving r4---sn-oxc0a5-ixae.googlevideo.com (r4---sn-oxc0a5-ixae.googlevideo.com)... 83.145.196.143, 2001:1bc8:100:1b::f
|
||||
Connecting to r4---sn-oxc0a5-ixae.googlevideo.com (r4---sn-oxc0a5-ixae.googlevideo.com)|83.145.196.143|:80... connected.
|
||||
HTTP request sent, awaiting response... 302 Found
|
||||
Location: http://r13---sn-5go7dn7s.googlevideo.com/videoplayback?<cut> [following]
|
||||
--2014-03-31 05:45:49-- http://r13---sn-5go7dn7s.googlevideo.com/videoplayback?<cut>
|
||||
Resolving r13---sn-5go7dn7s.googlevideo.com (r13---sn-5go7dn7s.googlevideo.com)... 173.194.48.18, 2a00:1450:400f::12
|
||||
Connecting to r13---sn-5go7dn7s.googlevideo.com (r13---sn-5go7dn7s.googlevideo.com)|173.194.48.18|:80... connected.
|
||||
HTTP request sent, awaiting response... 200 OK
|
||||
Length: 320557466 (306M) [video/webm]
|
||||
Saving to: ‘/home/ion/nobackup/media/video/.git/annex/tmp/URL--quvi&chttp&c%%youtu.be%-CbFj9K9AQg’
|
||||
|
||||
100%[======================================>] 320,557,466 895KB/s in 6m 14s
|
||||
|
||||
2014-03-31 05:52:04 (837 KB/s) - ‘/home/ion/nobackup/media/video/.git/annex/tmp/URL--quvi&chttp&c%%youtu.be%-CbFj9K9AQg’ saved [320557466/320557466]
|
||||
|
||||
(Recording state in git...)
|
||||
fatal: Unable to create '/home/ion/nobackup/media/video/.git/index.lock': File exists.
|
||||
|
||||
If no other git process is currently running, this probably means a
|
||||
git process crashed in this repository earlier. Make sure no other git
|
||||
process is running and remove the file manually to continue.
|
||||
|
||||
git-annex: user error (xargs [\"-0\",\"git\",\"--git-dir=/home/ion/nobackup/media/video/.git\",\"--work-tree=/home/ion/nobackup/media/video\",\"-c\",\"core.bare=false\",\"add\",\"--\"] exited 123)
|
||||
failed
|
||||
git-annex: addurl: 1 failed
|
||||
|
||||
% ls -l /home/ion/nobackup/media/video/.git/index.lock
|
||||
ls: cannot access /home/ion/nobackup/media/video/.git/index.lock: No such file or directory
|
||||
\"\"\"]]
|
||||
|
||||
The only lines in daemon.log from that time:
|
||||
|
||||
[[!format sh \"\"\"
|
||||
[2014-03-31 05:52:04 EEST] Committer: Committing changes to git
|
||||
[2014-03-31 05:52:04 EEST] Pusher: Syncing with heh.fi
|
||||
Already up-to-date.
|
||||
To heh.fi:/storage/ion/media/video
|
||||
3df241b..095d6c3 git-annex -> synced/git-annex
|
||||
10b3166..98074c1 annex/direct/master -> synced/master
|
||||
Already up-to-date.
|
||||
\"\"\"]]
|
||||
|
||||
"""]]
|
|
@ -0,0 +1,14 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.244"
|
||||
subject="comment 2"
|
||||
date="2014-04-02T20:17:19Z"
|
||||
content="""
|
||||
These races look beniegn, as far as I can see it doesn't cause any data to be lost, or indeed anything to happen that wouldn't have happened if addurl had been run without the assistant running.
|
||||
|
||||
The first race probably has addurl and the assistant both trying to move the file object into the annex at the same time. One wins and moves it; the other loses and sulks.
|
||||
|
||||
The second race has addurl trying to `git add` the file, while the assistant has already noticed the file appeared, `git add`ed it, and committed the add.
|
||||
|
||||
The only way to really avoid these races would be to add a lot of lock checking. Or just make `git annex addurl` and presumably also `git annex add` and maybe several other commands refuse to run when the assistant is running.
|
||||
"""]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="https://www.google.com/accounts/o8/id?id=AItOawk_QeJTzgCJj2ZC8EAJEODsLvxJ7dCOCVM"
|
||||
nickname="Sindre"
|
||||
subject="Non-ASCII Hostname fails"
|
||||
date="2014-04-02T05:36:54Z"
|
||||
content="""
|
||||
This bug still persists when specifying non-ascii hostnames.
|
||||
"""]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.244"
|
||||
subject="comment 9"
|
||||
date="2014-04-02T19:46:36Z"
|
||||
content="""
|
||||
Sorry, but this bug report is closed. If you think you have a bug in git-annex, file a *new* bug report with full details for how I can reproduce it.
|
||||
"""]]
|
|
@ -0,0 +1,88 @@
|
|||
As per topic, `git annex forget --drop-dead --force` does not behave as expected. Instead of getting rid of dead repositories, it actually revives them.
|
||||
|
||||
I messed up the initial setup of the S3 special remote and instead of risking manual intervention, I figured it would be cleanest to nuke the old special remotes, using a new one instead.
|
||||
And yes, I should have used a test repo :(
|
||||
|
||||
As this is apu.debconf.org, you could get access to the repo if that helps.
|
||||
|
||||
|
||||
[[!format sh """
|
||||
richih@apu (git)-[master] /srv/video/video.debian.net % git annex info
|
||||
repository mode: indirect
|
||||
trusted repositories: 0
|
||||
semitrusted repositories: 4
|
||||
00000000-0000-0000-0000-000000000001 -- web
|
||||
070cff8a-6302-4aa7-a63c-3fdd34e598a2 -- amazon_s3_us_east--SHA512E
|
||||
0bae683f-bede-43dd-a815-c4f8fb6db32d -- aws_s3_us_east--SHA512E
|
||||
92e9fac9-97ec-401f-a421-33f6b4f43e47 -- here (apu.debconf.org/srv/video/conference_videos.annex)
|
||||
untrusted repositories: 0
|
||||
transfers in progress: none
|
||||
available local disk space: 136.3 gigabytes (+1 megabyte reserved)
|
||||
local annex keys: 4392
|
||||
local annex size: 884.64 gigabytes
|
||||
annexed files in working tree: 4628
|
||||
size of annexed files in working tree: 885.68 gigabytes
|
||||
bloom filter size: 16 mebibytes (0.9% full)
|
||||
backend usage:
|
||||
SHA512E: 9020
|
||||
richih@apu (git)-[master] /srv/video/video.debian.net % git annex dead 070cff8a-6302-4aa7-a63c-3fdd34e598a2
|
||||
dead 070cff8a-6302-4aa7-a63c-3fdd34e598a2 ok
|
||||
(Recording state in git...)
|
||||
richih@apu (git)-[master] /srv/video/video.debian.net % git annex dead 0bae683f-bede-43dd-a815-c4f8fb6db32d
|
||||
dead 0bae683f-bede-43dd-a815-c4f8fb6db32d ok
|
||||
(Recording state in git...)
|
||||
richih@apu (git)-[master] /srv/video/video.debian.net % git annex initremote amazon_aws_s3-us_east_1--SHA512E type=S3 encryption='none' embedcreds='no' fileprefix='SHA512E/' bucket='debian-video'
|
||||
initremote amazon_aws_s3-us_east_1--SHA512E (checking bucket...) git-annex: This bucket is already in use by a different S3 special remote, with UUID: 0bae683f-bede-43dd-a815-c4f8fb6db32d
|
||||
richih@apu (git)-[master] /srv/video/video.debian.net % git annex info
|
||||
repository mode: indirect
|
||||
trusted repositories: 0
|
||||
semitrusted repositories: 2
|
||||
00000000-0000-0000-0000-000000000001 -- web
|
||||
92e9fac9-97ec-401f-a421-33f6b4f43e47 -- here (apu.debconf.org/srv/video/conference_videos.annex)
|
||||
untrusted repositories: 0
|
||||
transfers in progress: none
|
||||
available local disk space: 136.3 gigabytes (+1 megabyte reserved)
|
||||
local annex keys: 4392
|
||||
local annex size: 884.64 gigabytes
|
||||
annexed files in working tree: 4628
|
||||
size of annexed files in working tree: 885.68 gigabytes
|
||||
bloom filter size: 16 mebibytes (0.9% full)
|
||||
backend usage:
|
||||
SHA512E: 9020
|
||||
richih@apu (git)-[master] /srv/video/video.debian.net % git annex forget --drop-dead --force
|
||||
forget git-annex (Recording state in git...)
|
||||
ok
|
||||
(Recording state in git...)
|
||||
richih@apu (git)-[master] /srv/video/video.debian.net % git annex info
|
||||
repository mode: indirect
|
||||
trusted repositories: 0
|
||||
semitrusted repositories: 4
|
||||
00000000-0000-0000-0000-000000000001 -- web
|
||||
070cff8a-6302-4aa7-a63c-3fdd34e598a2 -- amazon_s3_us_east--SHA512E
|
||||
0bae683f-bede-43dd-a815-c4f8fb6db32d -- aws_s3_us_east--SHA512E
|
||||
92e9fac9-97ec-401f-a421-33f6b4f43e47 -- here (apu.debconf.org/srv/video/conference_videos.annex)
|
||||
untrusted repositories: 0
|
||||
transfers in progress: none
|
||||
available local disk space: 136.3 gigabytes (+1 megabyte reserved)
|
||||
local annex keys: 4392
|
||||
local annex size: 884.64 gigabytes
|
||||
annexed files in working tree: 4628
|
||||
size of annexed files in working tree: 885.68 gigabytes
|
||||
bloom filter size: 16 mebibytes (0.9% full)
|
||||
backend usage:
|
||||
SHA512E: 9020
|
||||
richih@apu (git)-[master] /srv/video/video.debian.net % git annex version
|
||||
git-annex version: 5.20140117~bpo70+2
|
||||
build flags: Assistant Webapp Pairing S3 Inotify DBus XMPP Feeds Quvi TDFA
|
||||
key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SHA256 SHA1 SHA512 SHA224 SHA384 WORM URL
|
||||
remote types: git gcrypt S3 bup directory rsync web tahoe glacier hook external
|
||||
local repository version: 5
|
||||
supported repository version: 5
|
||||
upgrade supported from repository versions: 0 1 2 4
|
||||
richih@apu (git)-[master] /srv/video/video.debian.net % cat /etc/issue
|
||||
Debian GNU/Linux 7 \n \l
|
||||
|
||||
richih@apu (git)-[master] /srv/video/video.debian.net %
|
||||
"""]]
|
||||
|
||||
> [[fixed|done]] via not removing from trust.log --[[Joey]]
|
|
@ -0,0 +1,12 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.41"
|
||||
subject="comment 1"
|
||||
date="2014-03-26T17:19:58Z"
|
||||
content="""
|
||||
You still have git remotes configured with the uuid of the remote, so `git annex info` pulls that data in. Since --drop-dead removes all mention of the remote from the git-annex branch, including that its trust level is dead, the remaining info from the .git/config takes effect.
|
||||
|
||||
--drop-dead could `git remote rm` but that won't help other clones of the repo that also have the dead remote. Instead, I think it may make sense for --drop-dead to avoid removing the uuid from trust.log, so it will still know this remote is dead.
|
||||
|
||||
(Of course, you can easily deal with this locally by `git remote rm` yourself.)
|
||||
"""]]
|
|
@ -0,0 +1,90 @@
|
|||
[[!comment format=mdwn
|
||||
username="https://www.google.com/accounts/o8/id?id=AItOawl9sYlePmv1xK-VvjBdN-5doOa_Xw-jH4U"
|
||||
nickname="Richard"
|
||||
subject="comment 2"
|
||||
date="2014-03-26T22:39:34Z"
|
||||
content="""
|
||||
Sorry, I had to remove the fixed tag.
|
||||
|
||||
|
||||
The bug makes sense, as does your fix. I didn't even consider that this may be the cause. Still, `git remote rm` and `git annex forget --drop-dead --force` does not seem to be enough to truly get rid of the repo (and its UUID & state):
|
||||
|
||||
[[!format sh \"\"\"
|
||||
richih@apu [2] [0] (git)-[master] /srv/video/video.debian.net % git annex info
|
||||
repository mode: indirect
|
||||
trusted repositories: 0
|
||||
semitrusted repositories: 4
|
||||
00000000-0000-0000-0000-000000000001 -- web
|
||||
070cff8a-6302-4aa7-a63c-3fdd34e598a2 -- amazon_s3_us_east--SHA512E
|
||||
0bae683f-bede-43dd-a815-c4f8fb6db32d -- aws_s3_us_east--SHA512E
|
||||
92e9fac9-97ec-401f-a421-33f6b4f43e47 -- here (apu.debconf.org/srv/video/conference_videos.annex)
|
||||
untrusted repositories: 0
|
||||
transfers in progress: none
|
||||
available local disk space: 136.3 gigabytes (+1 megabyte reserved)
|
||||
local annex keys: git4392
|
||||
local annex size: 884.64 gigabytes
|
||||
annexed files in working tree: am4628
|
||||
size of annexed files in working tree: 885.68 gigabytes
|
||||
bloom filter size: 16 mebibytes (0.9% full)
|
||||
backend usage:
|
||||
SHA512E: 9020
|
||||
richih@apu [2] [0] (git)-[master] /srv/video/video.debian.net % git annex dead amazon_s3_us_east--SHA512E
|
||||
dead amazon_s3_us_east--SHA512E ok
|
||||
(Recording state in git...)
|
||||
richih@apu [2] [0] (git)-[master] /srv/video/video.debian.net % git annex dead aws_s3_us_east--SHA512E
|
||||
dead aws_s3_us_east--SHA512E ok
|
||||
(Recording state in git...)
|
||||
richih@apu [2] [0] (git)-[master] /srv/video/video.debian.net % git remote rm amazon_s3_us_east--SHA512E
|
||||
richih@apu [2] [0] (git)-[master] /srv/video/video.debian.net % git remote rm aws_s3_us_east--SHA512E
|
||||
richih@apu [2] [0] (git)-[master] /srv/video/video.debian.net % git annex forget --drop-dead --force
|
||||
forget git-annex (Recording state in git...)
|
||||
ok
|
||||
(Recording state in git...)
|
||||
richih@apu [2] [0] (git)-[master] /srv/video/video.debian.net % git annex info
|
||||
repository mode: indirect
|
||||
trusted repositories: 0
|
||||
semitrusted repositories: 2
|
||||
00000000-0000-0000-0000-000000000001 -- web
|
||||
92e9fac9-97ec-401f-a421-33f6b4f43e47 -- here (apu.debconf.org/srv/video/conference_videos.annex)
|
||||
untrusted repositories: 0
|
||||
transfers in progress: none
|
||||
available local disk space: 136.3 gigabytes (+1 megabyte reserved)
|
||||
local annex keys: 4392
|
||||
local annex size: 884.64 gigabytes
|
||||
annexed files in working tree: 4628
|
||||
size of annexed files in working tree: 885.68 gigabytes
|
||||
bloom filter size: 16 mebibytes (0.9% full)
|
||||
backend usage:
|
||||
SHA512E: 9020
|
||||
richih@apu [2] [0] (git)-[master] /srv/video/video.debian.net % git annex initremote amazon_aws_s3-us_east_1--SHA512E type=S3 encryption='none' embedcreds='no' fileprefix='SHA512E/' bucket='debian-video'
|
||||
initremote amazon_aws_s3-us_east_1--SHA512E (checking bucket...) git-annex: This bucket is already in use by a different S3 special remote, with UUID: 0bae683f-bede-43dd-a815-c4f8fb6db32d
|
||||
richih@apu [2] [0] (git)-[master] /srv/video/video.debian.net % cat .git/annex
|
||||
cat: .git/annex: Ist ein Verzeichnis
|
||||
richih@apu [2] [0] (git)-[master] /srv/video/video.debian.net % cat .git/config
|
||||
[core]
|
||||
repositoryformatversion = 0
|
||||
filemode = true
|
||||
bare = false
|
||||
logallrefupdates = true
|
||||
[annex]
|
||||
uuid = 92e9fac9-97ec-401f-a421-33f6b4f43e47
|
||||
version = 5
|
||||
richih@apu [2] [0] (git)-[master] /srv/video/video.debian.net % git annex info 537 23:33:37 Mi 26.03.2014
|
||||
repository mode: indirect
|
||||
trusted repositories: 0
|
||||
semitrusted repositories: 2
|
||||
00000000-0000-0000-0000-000000000001 -- web
|
||||
92e9fac9-97ec-401f-a421-33f6b4f43e47 -- here (apu.debconf.org/srv/video/conference_videos.annex)
|
||||
untrusted repositories: 0
|
||||
transfers in progress: none
|
||||
available local disk space: 136.3 gigabytes (+1 megabyte reserved)
|
||||
local annex keys: 4392
|
||||
local annex size: 884.64 gigabytes
|
||||
annexed files in working tree: 4628
|
||||
size of annexed files in working tree: 885.68 gigabytes
|
||||
bloom filter size: 16 mebibytes (0.9% full)
|
||||
backend usage:
|
||||
SHA512E: 9020
|
||||
richih@apu [2] [0] (git)-[master] /srv/video/video.debian.net %
|
||||
\"\"\"]]
|
||||
"""]]
|
|
@ -0,0 +1,10 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.41"
|
||||
subject="comment 3"
|
||||
date="2014-03-27T17:40:51Z"
|
||||
content="""
|
||||
git-annex stores the uuid of a S3 remote inside the bucket for various reasons. Now that you have removed all knowledge of the remote from the repository, when you attempt to reuse the same bucket for some reason, git-annex has no way to know that this is a remote it used to use with that bucket.
|
||||
|
||||
I think this behavior is entirely reasonable. Also, it's not what you filed the original bug report about; I fixed that bug. I'm going to re-close this.
|
||||
"""]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="yasin.zaehringer"
|
||||
ip="90.218.200.128"
|
||||
subject="comment 3"
|
||||
date="2014-04-02T11:43:52Z"
|
||||
content="""
|
||||
The bug still exists. It is not possible to change the repository in the WebApp.
|
||||
"""]]
|
212
doc/bugs/git-annex_fails_to_initialize_under_Windows.mdwn
Normal file
212
doc/bugs/git-annex_fails_to_initialize_under_Windows.mdwn
Normal file
|
@ -0,0 +1,212 @@
|
|||
### Please describe the problem.
|
||||
Git-annex fails to initialize and fails tests.
|
||||
|
||||
### What steps will reproduce the problem?
|
||||
Attempted initialization:
|
||||
|
||||
C:\Users\Andrew\Documents\GitHub\git-annex-test [master]> git annex init
|
||||
init
|
||||
Detected a filesystem without fifo support.
|
||||
|
||||
Disabling ssh connection caching.
|
||||
|
||||
Detected a crippled filesystem.
|
||||
|
||||
Enabling direct mode.
|
||||
fatal: index file open failed: Invalid argument
|
||||
git-annex: git [Param "checkout",Param "-q",Param "-B",Param "annex/direct/master"] failed
|
||||
|
||||
Tests:
|
||||
|
||||
C:\Users\Andrew\Documents\GitHub\git-annex-test [master]> git annex test
|
||||
Tests
|
||||
QuickCheck
|
||||
prop_idempotent_deencode_git: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_idempotent_deencode: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_idempotent_fileKey: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_idempotent_key_encode: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_idempotent_key_decode: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_idempotent_shellEscape: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_idempotent_shellEscape_multiword: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_logs_sane: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_idempotent_configEscape: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_parse_show_Config: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_parentDir_basics: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_relPathDirToFile_basics: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_relPathDirToFile_regressionTest: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_cost_sane: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_matcher_sane: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_HmacSha1WithCipher_sane: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_TimeStamp_sane: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_addLog_sane: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_verifiable_sane: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_segment_regressionTest: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_read_write_transferinfo: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_read_show_inodecache: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_parse_show_log: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_read_show_TrustLevel: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_parse_show_TrustLog: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_hashes_stable: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_schedule_roundtrips: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_duration_roundtrips: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_metadata_sane: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_metadata_serialize: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_branchView_legal: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_view_roundtrips: OK
|
||||
+++ OK, passed 1000 tests.
|
||||
prop_viewedFile_rountrips: I n i t TOeKs
|
||||
ts
|
||||
i n+i+t+: OK, passed 1000 tests.
|
||||
Unit Tests
|
||||
add sha1dup: git-annex: System.PosixCompat.User.getEffectiveUserID: not support
|
||||
ed: illegal operation
|
||||
FAIL
|
||||
init failed
|
||||
add: git-annex: System.PosixCompat.User.getEffectiveUserID: not supported: illegal operation
|
||||
FAIL
|
||||
add failed
|
||||
|
||||
2 out of 2 tests failed
|
||||
FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
add extras: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
reinject: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
unannex (no copy): FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
unannex (with copy): FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
drop (no remote): FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
drop (with remote): FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
drop (untrusted remote): FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
get: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
move: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
copy: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
lock: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
edit (no pre-commit): FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
edit (pre-commit): FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
fix: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
trust: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
fsck (basics): FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
fsck (bare): FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
fsck (local untrusted): FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
fsck (remote untrusted): FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
migrate: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
migrate (via gitattributes): FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
unused: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
describe: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
find: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
merge: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
info: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
version: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
sync: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
union merge regression: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
conflict resolution: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
conflict_resolution (mixed directory and file): FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
conflict_resolution (mixed directory and file) 2: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
map: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
uninit: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
uninit (in git-annex branch): FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
upgrade: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
whereis: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
hook remote: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
directory remote: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
rsync remote: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
bup remote: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
crypto: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
preferred content: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
add subdirs: FAIL
|
||||
Exception: init tests failed! cannot continue
|
||||
|
||||
45 out of 78 tests failed
|
||||
(This could be due to a bug in git-annex, or an incompatability
|
||||
with utilities, such as git, installed on this system.)
|
||||
|
||||
### What version of git-annex are you using? On what operating system?
|
||||
C:\Users\Andrew\Documents\GitHub\git-annex-test [master]> git --version
|
||||
git version 1.8.4.msysgit.0
|
||||
C:\Users\Andrew\Documents\GitHub\git-annex-test [master]> git annex version
|
||||
git-annex version: 5.20140227-gd872677
|
||||
build flags: Assistant Webapp Pairing Testsuite S3 WebDAV DNS Feeds Quvi TDFA CryptoHash
|
||||
key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SKEIN256E SKEIN512E SHA256 SHA1 SHA512 SHA224 SHA384 SKEIN256
|
||||
SKEIN512 WORM URL
|
||||
remote types: git gcrypt S3 bup directory rsync web webdav tahoe glacier hook external
|
||||
local repository version: 5
|
||||
supported repository version: 5
|
||||
upgrade supported from repository versions: 2 3 4
|
||||
C:\Users\Andrew\Documents\GitHub\git-annex-test [master]> (Get-WmiObject -class Win32_OperatingSystem).Caption
|
||||
Microsoft Windows 8.1
|
||||
|
||||
### Please provide any additional information below.
|
||||
^^^ See above
|
|
@ -0,0 +1,10 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.41"
|
||||
subject="comment 1"
|
||||
date="2014-03-26T20:50:55Z"
|
||||
content="""
|
||||
That's a fairly old version of git-annex, so you could try upgrading.
|
||||
|
||||
This might happen if your shell environment you're using to run git-annex does not have either USERPROFILE or HOME envorironment variable set, or does not have one of USERNAME, USER, and LOGNAME set.
|
||||
"""]]
|
67
doc/bugs/git_annex_test_under_windows_8.1.mdwn
Normal file
67
doc/bugs/git_annex_test_under_windows_8.1.mdwn
Normal file
|
@ -0,0 +1,67 @@
|
|||
### Please describe the problem.
|
||||
I installed git and git annex under Windows (8.1) and ran git annex test. All except one tests passed with "ok"
|
||||
|
||||
### What steps will reproduce the problem?
|
||||
git annex test
|
||||
under Windows 8.1
|
||||
|
||||
### What version of git-annex are you using? On what operating system?
|
||||
$ git --version
|
||||
git version 1.9.0.msysgit.0
|
||||
|
||||
$ git annex version
|
||||
git-annex version: 5.20140320-g63535e3
|
||||
build flags: Assistant Webapp Webapp-secure Pairing Testsuite S3 WebDAV DNS Feeds Quvi TDFA CryptoHash key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SKEIN256E SKEIN512E SHA256 SHA1 SHA512 SHA224 SHA384 SKEIN256 SKEIN512 WORM URL
|
||||
remote types: git gcrypt S3 bup directory rsync web webdav tahoe glacier hook external local repository version: 5 supported repository version: 5 upgrade supported from repository versions: 2 3 4
|
||||
|
||||
Windows 8.1
|
||||
|
||||
### Please provide any additional information below.
|
||||
|
||||
[[!format sh """
|
||||
# If you can, paste a complete transcript of the problem occurring here.
|
||||
# If the problem is with the git-annex assistant, paste in .git/annex/daemon.log
|
||||
|
||||
prop_view_roundtrips: FAIL
|
||||
*** Failed! Falsifiable (after 814 tests and 5 shrinks):
|
||||
"a"
|
||||
IMneitta DTaetsat s(
|
||||
fr o miLniistt: [(MetaField "1\194",fromList [MetaValue (CurrentlySet True) "\r
|
||||
+\231Gb\157\227\ETB\bG",MetaValue (CurrentlySet True) "\DEL\239~\243_p\DC2."]),(
|
||||
MetaField "EG",fromList [MetaValue (CurrentlySet True) "",MetaValue (CurrentlySe
|
||||
t True) "\v\205] .T(",MetaValue (CurrentlySet False) "\NAK\128lo\169w",MetaValue
|
||||
(CurrentlySet True) "\SYN\STX\ENQ\n#u\ETXv\CANP<F)",MetaValue (CurrentlySet Fal
|
||||
se) "\US\213~",MetaValue (CurrentlySet False) "K\r3\v\165\&0\RSqk#\141",MetaValu
|
||||
e (CurrentlySet False) "Kx\b\231\156\220?+\216\v\146",MetaValue (CurrentlySet Tr
|
||||
ue) "j.\189\150\FS3{\233S\STX\SItg",MetaValue (CurrentlySet True) "\242\248\134\
|
||||
206\bal\174\135A\SI"]),(MetaField "k",fromList [MetaValue (CurrentlySet True) "\
|
||||
FS\150\129\b\fhjV\DC3\203",MetaValue (CurrentlySet False) "V.&sZ\245\f\a_\227\14
|
||||
0",MetaValue (CurrentlySet True) "\136r\ENQK{/\SI'\SYNN\235Q?",MetaValue (Curren
|
||||
tlySet True) "\179\255\233\227v\SUB]\n8",MetaValue (CurrentlySet True) "\238S\DC
|
||||
1"]),(MetaField "\179",fromList [MetaValue (CurrentlySet True) "\SOH+\ENQ",MetaV
|
||||
alue (CurrentlySet True) "\ACK{\140\248I\DLEw^\\\ENQF4",MetaValue (CurrentlySet
|
||||
False) "\FSc\239\r)HL\STX#V\DC1",MetaValue (CurrentlySet True) "Hc\219\146\230\1
|
||||
79\207",MetaValue (CurrentlySet False) "I]",MetaValue (CurrentlySet False) "P\19
|
||||
6\&0o\214\&8iH\251",MetaValue (CurrentlySet True) "`X",MetaValue (CurrentlySet F
|
||||
alse) "u\DEL\DC3Q\200",MetaValue (CurrentlySet True) "\128?",MetaValue (Currentl
|
||||
ySet True) "\225\135\f>\128\US~p",MetaValue (CurrentlySet False) "\250C\b\DC1\17
|
||||
6\154KT\191\SOf?\SI"]),(MetaField "\225a",fromList [MetaValue (CurrentlySet True
|
||||
) "",MetaValue (CurrentlySet True) "\b\ETB\b",MetaValue (CurrentlySet True) "\f\
|
||||
161\FS\176h-\ta\169\t",MetaValue (CurrentlySet False) "4",MetaValue (CurrentlySe
|
||||
t True) "A\FS\244V:\249kl5\ETX\SOH\SI)",MetaValue (CurrentlySet False) "Z",MetaV
|
||||
alue (CurrentlySet True) "\\Lt~\235v\"\211\DLE\NAK\210",MetaValue (CurrentlySet
|
||||
False) "a\SYNN",MetaValue (CurrentlySet True) "g:init test repo U5j\167G\ap-\ETX
|
||||
",MetaValue (CurrentlySet False) "l\NULoW\238rD",MetaValue (CurrentlySet True) "
|
||||
}\202\141\183Nxr",MetaValue (CurrentlySet False) "\170=\216S\ETB\187\SUB+!\DC3",
|
||||
MetaValue (CurrentlySet True) "\240H\GS\NAK\ETB\SYNRq\153\&4\204\EOT"])])
|
||||
True
|
||||
Use --quickcheck-replay '13 347062936 40785707' to reproduce.
|
||||
prop_viewedFile_rountrips: OK
|
||||
+++ OK, passed 1000 t
|
||||
e s tDse.t
|
||||
|
||||
# End of transcript or log.
|
||||
"""]]
|
||||
|
||||
> A sort of windows-specific bug in the test suite. I've fixed it. [[done]]
|
||||
> --[[Joey]]
|
41
doc/bugs/issues_with_non-posix_compatible_shells.mdwn
Normal file
41
doc/bugs/issues_with_non-posix_compatible_shells.mdwn
Normal file
|
@ -0,0 +1,41 @@
|
|||
### Please describe the problem.
|
||||
Some internals of git annex does not check if the shell it is running is Posix-compatible, ie. bash.
|
||||
|
||||
I am using fish, and after setting up local pairing, and working, I switched back the login-shell to fish, and when syncing a file, I got this error, read from daemon.log:
|
||||
|
||||
fish: Unknown command 'GIT_ANNEX_SHELL_DIRECTORY=/home/s/annex'. Did you mean to run ~/.ssh/git-annex-shell with a modified environment? Try 'env GIT_ANNEX_SHELL_DIRECTORY=/home/s/annex ~/.ssh/git-annex-shell...'. See the help section on the set command by typing 'help set'.
|
||||
Standard input: GIT_ANNEX_SHELL_DIRECTORY='/home/s/annex' ~/.ssh/git-annex-shell
|
||||
^
|
||||
fatal: Could not read from remote repository.
|
||||
|
||||
Please make sure you have the correct access rights
|
||||
and the repository exists.
|
||||
|
||||
### What steps will reproduce the problem?
|
||||
Set up local pairing ( I believe having sh/bash as login terminal is necessary for this).
|
||||
Switch back to fish as login-shell with chsh -s /usr/bin/fish
|
||||
Add a file to either repository.
|
||||
|
||||
### What version of git-annex are you using? On what operating system?
|
||||
[s@b ~]$ git annex version
|
||||
git-annex version: 5.20140320-g63535e3
|
||||
build flags: Assistant Webapp Webapp-secure Pairing Testsuite S3 WebDAV Inotify DBus XMPP DNS Feeds Quvi TDFA CryptoHash
|
||||
key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SKEIN256E SKEIN512E SHA256 SHA1 SHA512 SHA224 SHA384 SKEIN256 SKEIN512 WORM URL
|
||||
remote types: git gcrypt S3 bup directory rsync web webdav tahoe glacier hook external
|
||||
|
||||
### Please provide any additional information below.
|
||||
|
||||
[[!format sh """
|
||||
# If you can, paste a complete transcript of the problem occurring here.
|
||||
# If the problem is with the git-annex assistant, paste in .git/annex/daemon.log
|
||||
|
||||
|
||||
# End of transcript or log.
|
||||
"""]]
|
||||
|
||||
> [[fixed|done]] so
|
||||
>
|
||||
> I have not tried to make the assistant go back and fix up existing
|
||||
> `authorized_keys` lines. So if someone had been using a posix shell and
|
||||
> switched to fish, they'll hit this and need to fix it themselves. I judge
|
||||
> this is pretty small number of users. --[[Joey]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://johan.kiviniemi.name/"
|
||||
nickname="Johan"
|
||||
subject="comment 1"
|
||||
date="2014-04-02T08:40:17Z"
|
||||
content="""
|
||||
FWIW, `env foo=bar cmd` would probably work on every system on which the `foo=bar cmd` invocation works now.
|
||||
"""]]
|
|
@ -0,0 +1,29 @@
|
|||
### Please describe the problem.
|
||||
Filenames for the *include* statement for [preferred content](http://git-annex.branchable.com/preferred_content/) can not contain spaces.
|
||||
|
||||
### What steps will reproduce the problem?
|
||||
|
||||
* Create an annex repo
|
||||
* Run `git annex vicfg`
|
||||
* Enter expression *include='pictures/dir with spaces'*
|
||||
* git annex complains: # ** Parse error in next line: Parse failure: near "with" Parse failure: near "spaces'"
|
||||
|
||||
(The *'* is interpreted as part of the filepath.)
|
||||
|
||||
### What version of git-annex are you using? On what operating system?
|
||||
|
||||
I am using the current binaries from Debian stable amd64.
|
||||
|
||||
<pre>
|
||||
git-annex version: 5.20140320~bpo70+1
|
||||
build flags: Assistant Webapp Pairing S3 Inotify DBus XMPP Feeds Quvi TDFA
|
||||
key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SHA256 SHA1 SHA512 SHA224 SHA384 WORM URL
|
||||
remote types: git gcrypt S3 bup directory rsync web tahoe glacier hook external
|
||||
local repository version: 5
|
||||
supported repository version: 5
|
||||
upgrade supported from repository versions: 0 1 2 4
|
||||
</pre>
|
||||
|
||||
### Please provide any additional information below.
|
||||
|
||||
The only workaround I found is to use a glob for the filepath which only works for the first space: *include='pictures/dir\*'*.
|
|
@ -0,0 +1,12 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.244"
|
||||
subject="comment 1"
|
||||
date="2014-04-02T18:45:36Z"
|
||||
content="""
|
||||
A better workaround is:
|
||||
|
||||
include='pictures/dir?with?spaces'
|
||||
|
||||
Tokenizing text with embedded quotes is a bit of a PITA, certianly doable I suppose..
|
||||
"""]]
|
|
@ -0,0 +1,10 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://ypid.wordpress.com/"
|
||||
ip="213.153.84.215"
|
||||
subject="comment 2"
|
||||
date="2014-04-02T21:29:54Z"
|
||||
content="""
|
||||
Thanks for your workaround ... Works for me after removing the single quote signs.
|
||||
|
||||
With this workaround on hand one could include/match any filename. Works for me ;)
|
||||
"""]]
|
67
doc/bugs/problem_to_addurl_--file_with_ftp.mdwn
Normal file
67
doc/bugs/problem_to_addurl_--file_with_ftp.mdwn
Normal file
|
@ -0,0 +1,67 @@
|
|||
### Please describe the problem.
|
||||
I want to addurl using ftp protocol.
|
||||
`git annex addurl ftp://...` works fine, but `git annex addurl --file` fails with an error "failed to verify url exists".
|
||||
|
||||
### What steps will reproduce the problem?
|
||||
|
||||
setting up a new repo
|
||||
|
||||
% alias ga
|
||||
ga=/home/applis/git-annex.linux/git-annex
|
||||
% ga init
|
||||
init ok
|
||||
(Recording state in git...)
|
||||
|
||||
addurl --file works with http
|
||||
|
||||
% wget http://downloads.kitenet.net/git-annex/linux/current/git-annex-standalone-amd64.tar.gz
|
||||
[...]
|
||||
2014-03-27 15:25:06 (10,1 MB/s) - ‘git-annex-standalone-amd64.tar.gz’ saved [30689438/30689438]
|
||||
% ga add git-annex-standalone-amd64.tar.gz
|
||||
add git-annex-standalone-amd64.tar.gz ok
|
||||
(Recording state in git...)
|
||||
% ga addurl http://downloads.kitenet.net/git-annex/linux/current/git-annex-standalone-amd64.tar.gz --file git-annex-standalone-amd64.tar.gz
|
||||
addurl git-annex-standalone-amd64.tar.gz ok
|
||||
(Recording state in git...)
|
||||
|
||||
addurl works with ftp:
|
||||
|
||||
% ga addurl ftp://ftp.belnet.be/debian-cd/7.4.0-live/i386/iso-hybrid/debian-live-7.4-i386-lxde-desktop.iso.log
|
||||
addurl ftp.belnet.be_debian_cd_7.4.0_live_i386_iso_hybrid_debian_live_7.4_i386_lxde_desktop.iso.log (downloading ftp://ftp.belnet.be/debian-cd/7.4.0-live/i386/iso-hybrid/debian-live-7.4-i386-lxde-desktop.iso.log ...)
|
||||
[...]
|
||||
2014-03-27 15:27:47 (11,1 MB/s) - ‘/data/annex/.git/annex/tmp/URL--ftp&c%%ftp.belnet.be%debian-cd%7.4.0-live%i386%iso-hybrid%debian-live-7.4-i386-lxde-desktop.iso.log’ saved [1235181]
|
||||
ok
|
||||
(Recording state in git...)
|
||||
|
||||
addurl --file doesn't work with ftp
|
||||
|
||||
% wget ftp://ftp.belnet.be/debian-cd/7.4.0-live/i386/iso-hybrid/debian-live-7.4-i386-standard.iso.zsync
|
||||
[...]
|
||||
2014-03-27 15:29:32 (19,4 MB/s) - ‘debian-live-7.4-i386-standard.iso.zsync’ saved [1932014]
|
||||
% ga add debian-live-7.4-i386-standard.iso.zsync
|
||||
add debian-live-7.4-i386-standard.iso.zsync ok
|
||||
(Recording state in git...)
|
||||
% ga addurl ftp://ftp.belnet.be/debian-cd/7.4.0-live/i386/iso-hybrid/debian-live-7.4-i386-standard.iso.zsync --file debian-live-7.4-i386-standard.iso.zsync
|
||||
addurl debian-live-7.4-i386-standard.iso.zsync
|
||||
failed to verify url exists: ftp://ftp.belnet.be/debian-cd/7.4.0-live/i386/iso-hybrid/debian-live-7.4-i386-standard.iso.zsync
|
||||
failed
|
||||
git-annex: addurl: 1 failed
|
||||
|
||||
### What version of git-annex are you using? On what operating system?
|
||||
|
||||
I am using current git-annex binary linux version on Fedora 19.
|
||||
|
||||
% which git ; git --version
|
||||
/usr/bin/git
|
||||
git version 1.8.3.1
|
||||
% which ga ; ga version
|
||||
ga=/home/applis/git-annex.linux/git-annex
|
||||
git-annex version: 5.20140320-g63535e3
|
||||
build flags: Assistant Webapp Webapp-secure Pairing Testsuite S3 WebDAV Inotify DBus XMPP DNS Feeds Quvi TDFA CryptoHash
|
||||
key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SKEIN256E SKEIN512E SHA256 SHA1 SHA512 SHA224 SHA384 SKEIN256 SKEIN512 WORM URL
|
||||
remote types: git gcrypt S3 bup directory rsync web webdav tahoe glacier hook external
|
||||
local repository version: 5
|
||||
supported repository version: 5
|
||||
upgrade supported from repository versions: 0 1 2 4
|
||||
|
||||
> [[done]] --[[Joey]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.41"
|
||||
subject="comment 1"
|
||||
date="2014-03-27T17:27:58Z"
|
||||
content="""
|
||||
--file does not change git-annex addurl's network communication in any way. I think this ftp server is sometimes working, and other times failing. It seems to be returning a 350 result code. The FTP spec is not clear what that means, but it does not seem to indicate success.
|
||||
"""]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="https://www.google.com/accounts/o8/id?id=AItOawmdbVIGiDH8KarAGAy8y2FHJD_F990JzXI"
|
||||
nickname="François"
|
||||
subject="comment 2"
|
||||
date="2014-04-01T08:55:29Z"
|
||||
content="""
|
||||
Ok, it works when adding option --relaxed. For some reason there seems to be a problem when checking file size.
|
||||
"""]]
|
|
@ -0,0 +1,22 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.244"
|
||||
subject="comment 3"
|
||||
date="2014-04-02T19:18:19Z"
|
||||
content="""
|
||||
Yes, --relaxed bypasses the code that uses curl to check the file size.
|
||||
|
||||
Ok, I have figured out what's going on.
|
||||
|
||||
git annex addurl --file foo ftp://host # this succeeds
|
||||
|
||||
run the command a second time, and it fails. Why? Because the file is already present in the annex, and you are running addurl in a different mode. In this mode, it is adding a *new* url to the file in the annex. (In this particular case, the new and old url are the same, but it's possible to see this bug in cases where they are not, too.)
|
||||
|
||||
As a sanity check, when adding a new url to an existing file, git-annex wants to check that the new url has the same size as the file. Otherwise it surely has different content. However, as I noted this ftp server is returning a weird 350 response when curl is used to try to get the size of the url. So that fails, and git-annex cannot add the new url to the file. Which would be pointless in this case anyway, since it's the same as the old url.
|
||||
|
||||
So, I can fix two things. I can make it detect when the url it's adding to an existing file in the annex is already a known url of that file, and skip doing anything in this case since it would be a no-op anyway. Done that.
|
||||
|
||||
And, I can improve the error message so the user is not confused about what they're asking git-annex to do, and why it's unable to. Fixed that.
|
||||
|
||||
This leaves the question of why curl sees a 350 code from this ftp server. But since it doesn't cause problems when using addurl, with or without --file to download the file from it, I think it's best to punt on that one.
|
||||
"""]]
|
65
doc/bugs/problems_with_glacier.mdwn
Normal file
65
doc/bugs/problems_with_glacier.mdwn
Normal file
|
@ -0,0 +1,65 @@
|
|||
### Please describe the problem.
|
||||
Annex errors when copying to glacier.
|
||||
|
||||
### What version of git-annex are you using? On what operating system?
|
||||
|
||||
OS X 10.9.2 Build 13C64
|
||||
|
||||
git-annex version: 5.20140318-gdcf93d0
|
||||
build flags: Assistant Webapp Webapp-secure Pairing Testsuite S3 WebDAV FsEvents XMPP DNS Feeds Quvi TDFA CryptoHash
|
||||
key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SKEIN256E SKEIN512E SHA256 SHA1 SHA512 SHA224 SHA384 SKEIN256 SKEIN512 WORM URL
|
||||
remote types: git gcrypt S3 bup directory rsync web webdav tahoe glacier hook external
|
||||
local repository version: 5
|
||||
supported repository version: 5
|
||||
upgrade supported from repository versions: 0 1 2 4
|
||||
|
||||
### Please provide any additional information below.
|
||||
|
||||
[[!format sh """
|
||||
# If you can, paste a complete transcript of the problem occurring here.
|
||||
# If the problem is with the git-annex assistant, paste in .git/annex/daemon.log
|
||||
|
||||
> git annex initremote glacier type=glacier encryption=hybrid keyid=E9053BDA datacenter=us-west-1 ║██████████╠ ∞ ∞
|
||||
initremote glacier (encryption setup) (hybrid cipher with gpg key B608B8F6E9053BDA) ok
|
||||
(Recording state in git...)
|
||||
> git annex copy Cobalt\ Strike\ Tradecraft --to=glacier --debug
|
||||
[2014-03-27 07:27:39 PDT] read: git ["--git-dir=/Users/akraut/Desktop/annexes/media/.git","--work-tree=/Users/akraut/Desktop/annexes/media","show-ref","git-annex"]
|
||||
[2014-03-27 07:27:39 PDT] read: git ["--git-dir=/Users/akraut/Desktop/annexes/media/.git","--work-tree=/Users/akraut/Desktop/annexes/media","show-ref","--hash","refs/heads/git-annex"]
|
||||
[2014-03-27 07:27:39 PDT] read: git ["--git-dir=/Users/akraut/Desktop/annexes/media/.git","--work-tree=/Users/akraut/Desktop/annexes/media","log","refs/heads/git-annex..9f59057d857784e6ae6b3dcd6793092264375913","--oneline","-n1"]
|
||||
[2014-03-27 07:27:39 PDT] chat: git ["--git-dir=/Users/akraut/Desktop/annexes/media/.git","--work-tree=/Users/akraut/Desktop/annexes/media","cat-file","--batch"]
|
||||
[2014-03-27 07:27:39 PDT] read: git ["config","--null","--list"]
|
||||
[2014-03-27 07:27:39 PDT] read: git ["--git-dir=/Users/akraut/Desktop/annexes/media/.git","--work-tree=/Users/akraut/Desktop/annexes/media","ls-files","--cached","-z","--","Cobalt Strike Tradecraft"]
|
||||
copy Cobalt Strike Tradecraft/Tradecraft__1_of_9____Introduction.mp4 (gpg) [2014-03-27 07:27:39 PDT] chat: gpg ["--quiet","--trust-model","always","--decrypt"]
|
||||
|
||||
You need a passphrase to unlock the secret key for
|
||||
user: "Andrew Mark Kraut <akraut@gmail.com>"
|
||||
4096-bit ELG-E key, ID 353E49B9, created 2008-11-11 (main key ID E9053BDA)
|
||||
|
||||
(checking glacier...) [2014-03-27 07:27:46 PDT] read: glacier ["--region=us-west-1","archive","checkpresent","git-annex: Maybe.fromJust: Nothing
|
||||
|
||||
# End of transcript or log.
|
||||
"""]]
|
||||
|
||||
> This was a bug introduced last month, it forgot to receord the
|
||||
> datacenter and vault used when initializing the glacier repository.
|
||||
>
|
||||
> I've fixed the bug, but this does not fix repositories created with
|
||||
> the broken version. I considered just making it use the default
|
||||
> datacenter and vault for such a repository, but
|
||||
> a) those may change in the future
|
||||
> and I don't want to have to worry about breaking such a repository
|
||||
> going forward and b) someone may have overridden it to use another
|
||||
> datacenter or vault name and so it shouldn't blindly assume the defaults.
|
||||
>
|
||||
> Instead, there's a manual fix up step you need to do. Luckily quite easy.
|
||||
> For example:
|
||||
>
|
||||
> git annex enableremote myglacier datacenter=us-east-1 vault=myglacier-fae9be57-8eb4-47af-932f-136b9b40e669
|
||||
>
|
||||
> The default datacenter is us-east-1, and the default vault name is
|
||||
> "$remotename-$uuid". So you just have to tell it these values
|
||||
> once with an enableremote command, and it will then work.
|
||||
|
||||
> You don't even need to get the fixed version of git-annex to work
|
||||
> around the bug this way.. Although it does have better error messages
|
||||
> too. [[fixed|done]] --[[Joey]]
|
|
@ -0,0 +1,12 @@
|
|||
[[!comment format=mdwn
|
||||
username="https://www.google.com/accounts/o8/id?id=AItOawmRFKwny4rArBaz-36xTcsJYqKIgdDaw5Q"
|
||||
nickname="Andrew"
|
||||
subject="comment 1"
|
||||
date="2014-03-27T14:46:19Z"
|
||||
content="""
|
||||
I just updated to the latest glacier-cli and boto and have confirmed that those are working properly and that the vault has been created on glacier:
|
||||
[[!format sh \"\"\"
|
||||
> glacier --region=us-west-1 vault list
|
||||
glacier-571d1ec3-8870-46cb-977e-15830a2b474d
|
||||
\"\"\"]]
|
||||
"""]]
|
90
doc/bugs/set_metadata_on_wrong_files.mdwn
Normal file
90
doc/bugs/set_metadata_on_wrong_files.mdwn
Normal file
|
@ -0,0 +1,90 @@
|
|||
### Please describe the problem.
|
||||
|
||||
For an example I wanted to add different metadata to some test files,
|
||||
but the outcome is that the last metadata gets applied to all three files. see transcript below.
|
||||
|
||||
|
||||
|
||||
### What steps will reproduce the problem?
|
||||
|
||||
1. Create a git annex repository
|
||||
2. add a few files
|
||||
3. add some metadata to the files, same keys, differnt values
|
||||
4. watch the metadata, only the last added one is shown for all files
|
||||
|
||||
|
||||
### What version of git-annex are you using? On what operating system?
|
||||
$cat /etc/debian_version; uname -a; git annex version
|
||||
7.4
|
||||
Linux jupiter 3.13.0ct #33 SMP PREEMPT Tue Jan 21 05:04:01 CET 2014 x86_64 GNU/Linux
|
||||
git-annex version: 5.20140306~bpo70+1
|
||||
build flags: Assistant Webapp Pairing S3 Inotify DBus XMPP Feeds Quvi TDFA
|
||||
key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SHA256 SHA1 SHA512 SHA224 SHA384 WORM URL
|
||||
remote types: git gcrypt S3 bup directory rsync web tahoe glacier hook external
|
||||
local repository version: 5
|
||||
supported repository version: 5
|
||||
upgrade supported from repository versions: 0 1 2 4
|
||||
|
||||
|
||||
### Please provide any additional information below.
|
||||
|
||||
Debian/Wheezy with git annex from backports. The test was done in /tmp which is a tmpfs.
|
||||
|
||||
|
||||
[[!format sh """
|
||||
$export LC_ALL=C
|
||||
$mkdir /tmp/annextest
|
||||
$cd /tmp/annextest
|
||||
$git init
|
||||
Initialized empty Git repository in /tmp/annextest/.git/
|
||||
$git annex init
|
||||
init ok
|
||||
(Recording state in git...)
|
||||
|
||||
$touch a.txt b.txt c.txt
|
||||
$git annex add a.txt b.txt c.txt
|
||||
add a.txt ok
|
||||
add b.txt ok
|
||||
add c.txt ok
|
||||
(Recording state in git...)
|
||||
$git commit -m init
|
||||
[master (root-commit) 5470bdb] init
|
||||
3 files changed, 3 insertions(+)
|
||||
create mode 120000 a.txt
|
||||
create mode 120000 b.txt
|
||||
create mode 120000 c.txt
|
||||
|
||||
$git annex metadata a.txt -s foo=bar -s num=1
|
||||
metadata a.txt
|
||||
foo=bar
|
||||
num=1
|
||||
ok
|
||||
(Recording state in git...)
|
||||
$git annex metadata b.txt -s foo=baz -s num=2
|
||||
metadata b.txt
|
||||
foo=baz
|
||||
num=2
|
||||
ok
|
||||
(Recording state in git...)
|
||||
$git annex metadata c.txt -s foo=barf -s num=3
|
||||
metadata c.txt
|
||||
foo=barf
|
||||
num=3
|
||||
ok
|
||||
(Recording state in git...)
|
||||
$git annex metadata
|
||||
metadata a.txt
|
||||
foo=barf
|
||||
num=3
|
||||
ok
|
||||
metadata b.txt
|
||||
foo=barf
|
||||
num=3
|
||||
ok
|
||||
metadata c.txt
|
||||
foo=barf
|
||||
num=3
|
||||
ok
|
||||
"""]]
|
||||
|
||||
> [[fixed|done]]; documentation improved --[[Joey]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.41"
|
||||
subject="comment 1"
|
||||
date="2014-03-26T20:55:07Z"
|
||||
content="""
|
||||
This is because metadata applies to the contents of files, and all 3 of your files have the same single content. I will update the documentation to make it more clear metadata works this way.
|
||||
"""]]
|
|
@ -0,0 +1,10 @@
|
|||
### Please describe the problem.
|
||||
git annex webapp created SSH keys for remote directory. SSH keys have wrong permission (chmod 600 fixed it) and thus the key-based login to the remote fails.
|
||||
|
||||
### What steps will reproduce the problem?
|
||||
Creating a SSH remote using the git annex webapp.
|
||||
|
||||
### What version of git-annex are you using? On what operating system?
|
||||
'current linux build for amd64', downloaded Mar 6th 2014, on Ubuntu 12.04.4 LTS.
|
||||
|
||||
> [[done]]
|
|
@ -0,0 +1,8 @@
|
|||
[[!comment format=mdwn
|
||||
username="http://joeyh.name/"
|
||||
ip="209.250.56.244"
|
||||
subject="comment 1"
|
||||
date="2014-04-02T19:53:04Z"
|
||||
content="""
|
||||
This is a duplicate of [[bugs/ssh:_unprotected_private_key_file]], which was fixed on March 14th.
|
||||
"""]]
|
|
@ -30,3 +30,6 @@ refuse to do so.
|
|||
|
||||
With N=2, in order to drop the file content from Laptop, it would need access
|
||||
to both USB and Server.
|
||||
|
||||
For more complicated requirements about which repositories contain which
|
||||
content, see [[required_content]].
|
||||
|
|
|
@ -6,7 +6,7 @@ locally paired systems, and remote servers with rsync.
|
|||
Help me prioritize my work: What special remote would you most like
|
||||
to use with the git-annex assistant?
|
||||
|
||||
[[!poll open=yes 16 "Amazon S3 (done)" 12 "Amazon Glacier (done)" 9 "Box.com (done)" 71 "My phone (or MP3 player)" 25 "Tahoe-LAFS" 10 "OpenStack SWIFT" 31 "Google Drive"]]
|
||||
[[!poll open=yes 16 "Amazon S3 (done)" 12 "Amazon Glacier (done)" 9 "Box.com (done)" 71 "My phone (or MP3 player)" 25 "Tahoe-LAFS" 10 "OpenStack SWIFT" 33 "Google Drive"]]
|
||||
|
||||
This poll is ordered with the options I consider easiest to build
|
||||
listed first. Mostly because git-annex already supports them and they
|
||||
|
|
|
@ -11,6 +11,9 @@ git-annex (assistant) repositories.
|
|||
* Rapid development, situation may change in a month or 2.
|
||||
* Is it secure? A security review should be done by competant people
|
||||
(not Joey). See <https://github.com/telehash/telehash.org/issues/23>
|
||||
* **Haskell version**
|
||||
<https://github.com/alanz/htelehash/tree/v2/src/TeleHash>
|
||||
Development on v2 in haskell is just starting up!
|
||||
|
||||
## implementation basics
|
||||
|
||||
|
|
|
@ -10,8 +10,8 @@ Now in the
|
|||
* Month 4 [[!traillink assistant/windows text="Windows webapp"]], Linux arm, [[!traillink todo/support_for_writing_external_special_remotes]]
|
||||
* Month 5 user-driven features and polishing
|
||||
* Month 6 get Windows out of beta, [[!traillink design/metadata text="metadata and views"]]
|
||||
* **Month 7 user-driven features and polishing**
|
||||
* Month 8 [[!traillink assistant/telehash]]
|
||||
* Month 7 user-driven features and polishing
|
||||
* **Month 8 [[!traillink assistant/telehash]]**
|
||||
* Month 9 [[!traillink assistant/gpgkeys]] [[!traillink assistant/sshpassword]]
|
||||
* Month 10 get [[assistant/Android]] out of beta
|
||||
* Month 11 [[!traillink assistant/chunks]] [[!traillink assistant/deltas]]
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
[[!comment format=mdwn
|
||||
username="stp"
|
||||
ip="188.193.207.34"
|
||||
subject="Any update on cleaning up commands?"
|
||||
date="2014-03-21T16:32:43Z"
|
||||
content="""
|
||||
Is there any update on cleaning up object/file references to objects/content not at all present and lost. I would love my git annex fsck --all to show current failures and not these old files all the time.
|
||||
Thanks
|
||||
"""]]
|
17
doc/devblog/day_139-140__traveling.mdwn
Normal file
17
doc/devblog/day_139-140__traveling.mdwn
Normal file
|
@ -0,0 +1,17 @@
|
|||
Yesterday coded up one nice improvement on the plane -- `git annex unannex`
|
||||
(and `uninit`) is now tons faster. Before it did a git commit after every
|
||||
file processed, now there's just 1 commit at the end. This required using
|
||||
some locking to prevent the `pre-commit` hook from running in a confusing
|
||||
state.
|
||||
|
||||
Today. LibrePlanet and a surprising amount of development. I've
|
||||
added [[tips/file_manager_integration]], only for Nautilus so far.
|
||||
The main part of this was adding --notify-start and --notify-finish, which
|
||||
use dbus desktop notifications to provide feedback.
|
||||
|
||||
(Made possible thanks to Max Rabkin for updating
|
||||
[fdo-notify](http://hackage.haskell.org/package/fdo-notify) to use the
|
||||
new dbus library, and ion for developing the initial Nautilus integration
|
||||
scripts.)
|
||||
|
||||
Today's work and LibrePlanet visit was sponsored by Jürgen Lüters.
|
3
doc/devblog/day_141__f-droid_sprint.mdwn
Normal file
3
doc/devblog/day_141__f-droid_sprint.mdwn
Normal file
|
@ -0,0 +1,3 @@
|
|||
Attended at the f-droid sprint at LibrePlanet, and have been getting a
|
||||
handle on how their build server works with an eye toward adding git-annex
|
||||
to it. Not entirely successful getting vagrant to build an image yet.
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue