finally merge the assistant into master

Progress bars still need to be done, otherwise it's fully working.
Although much work remains to hit all the use cases.
This commit is contained in:
Joey Hess 2012-08-27 14:49:09 -04:00
commit c1adde5294
157 changed files with 21888 additions and 718 deletions

1
.gitignore vendored
View file

@ -12,6 +12,7 @@ html
*.tix
.hpc
Utility/Touch.hs
Utility/Mounts.hs
Utility/*.o
dist
# Sandboxed builds

View file

@ -25,7 +25,6 @@ module Annex.Branch (
import qualified Data.ByteString.Lazy.Char8 as L
import Common.Annex
import Annex.Exception
import Annex.BranchState
import Annex.Journal
import qualified Git
@ -37,9 +36,9 @@ import qualified Git.UpdateIndex
import Git.HashObject
import Git.Types
import Git.FilePath
import qualified Git.Index
import Annex.CatFile
import Annex.Perms
import qualified Annex
{- Name of the branch that is used to store git-annex's information. -}
name :: Git.Ref
@ -90,10 +89,10 @@ getBranch = maybe (hasOrigin >>= go >>= use) return =<< branchsha
- called before data is read from it. Runs only once per git-annex run.
-}
update :: Annex ()
update = runUpdateOnce $ updateTo =<< siblingBranches
update = runUpdateOnce $ void $ updateTo =<< siblingBranches
{- Forces an update even if one has already been run. -}
forceUpdate :: Annex ()
forceUpdate :: Annex Bool
forceUpdate = updateTo =<< siblingBranches
{- Merges the specified Refs into the index, if they have any changes not
@ -111,8 +110,10 @@ forceUpdate = updateTo =<< siblingBranches
-
- The branch is fast-forwarded if possible, otherwise a merge commit is
- made.
-
- Returns True if any refs were merged in, False otherwise.
-}
updateTo :: [(Git.Ref, Git.Branch)] -> Annex ()
updateTo :: [(Git.Ref, Git.Branch)] -> Annex Bool
updateTo pairs = do
-- ensure branch exists, and get its current ref
branchref <- getBranch
@ -139,6 +140,7 @@ updateTo pairs = do
else commitBranch branchref merge_desc
(nub $ fullname:refs)
invalidateCache
return $ not $ null refs
where
isnewer (r, _) = inRepo $ Git.Branch.changed fullname r
@ -277,12 +279,18 @@ withIndex = withIndex' False
withIndex' :: Bool -> Annex a -> Annex a
withIndex' bootstrapping a = do
f <- fromRepo gitAnnexIndex
bracketIO (Git.Index.override f) id $ do
checkIndexOnce $ unlessM (liftIO $ doesFileExist f) $ do
unless bootstrapping create
liftIO $ createDirectoryIfMissing True $ takeDirectory f
unless bootstrapping $ inRepo genIndex
a
g <- gitRepo
let g' = g { gitEnv = Just [("GIT_INDEX_FILE", f)] }
Annex.changeState $ \s -> s { Annex.repo = g' }
checkIndexOnce $ unlessM (liftIO $ doesFileExist f) $ do
unless bootstrapping create
liftIO $ createDirectoryIfMissing True $ takeDirectory f
unless bootstrapping $ inRepo genIndex
r <- a
Annex.changeState $ \s -> s { Annex.repo = (Annex.repo s) { gitEnv = gitEnv g} }
return r
{- Runs an action using the branch's index file, first making sure that
- the branch and index are up-to-date. -}
@ -335,12 +343,13 @@ stageJournal :: Annex ()
stageJournal = do
showStoringStateAction
fs <- getJournalFiles
g <- gitRepo
withIndex $ liftIO $ do
h <- hashObjectStart g
Git.UpdateIndex.streamUpdateIndex g
[genstream (gitAnnexJournalDir g) h fs]
hashObjectStop h
withIndex $ do
g <- gitRepo
liftIO $ do
h <- hashObjectStart g
Git.UpdateIndex.streamUpdateIndex g
[genstream (gitAnnexJournalDir g) h fs]
hashObjectStop h
where
genstream dir h fs streamer = forM_ fs $ \file -> do
let path = dir </> file

View file

@ -32,8 +32,10 @@ configkey = annexConfig "uuid"
{- Generates a UUID. There is a library for this, but it's not packaged,
- so use the command line tool. -}
genUUID :: IO UUID
genUUID = pOpen ReadFromPipe command params $ liftM toUUID . hGetLine
genUUID = gen . lines <$> readProcess command params
where
gen [] = error $ "no output from " ++ command
gen (l:_) = toUUID l
command = SysConfig.uuid
params
-- request a random uuid be generated

View file

@ -10,7 +10,7 @@
- The initial thread run, double forks to background, starts other
- threads, and then stops, waiting for them to terminate,
- or for a ctrl-c.
- Thread 2: watcher
- Thread 2: Watcher
- Notices new files, and calls handlers for events, queuing changes.
- Thread 3: inotify internal
- Used by haskell inotify library to ensure inotify event buffer is
@ -19,67 +19,166 @@
- Scans the tree and registers inotify watches for each directory.
- A MVar lock is used to prevent other inotify handlers from running
- until this is complete.
- Thread 5: committer
- Thread 5: Committer
- Waits for changes to occur, and runs the git queue to update its
- index, then commits.
- Thread 6: status logger
- index, then commits. Also queues Transfer events to send added
- files to other remotes.
- Thread 6: Pusher
- Waits for commits to be made, and pushes updated branches to remotes,
- in parallel. (Forks a process for each git push.)
- Thread 7: PushRetryer
- Runs every 30 minutes when there are failed pushes, and retries
- them.
- Thread 8: Merger
- Waits for pushes to be received from remotes, and merges the
- updated branches into the current branch.
- (This uses inotify on .git/refs/heads, so there are additional
- inotify threads associated with it, too.)
- Thread 9: TransferWatcher
- Watches for transfer information files being created and removed,
- and maintains the DaemonStatus currentTransfers map.
- (This uses inotify on .git/annex/transfer/, so there are
- additional inotify threads associated with it, too.)
- Thread 10: Transferrer
- Waits for Transfers to be queued and does them.
- Thread 11: StatusLogger
- Wakes up periodically and records the daemon's status to disk.
- Thread 7: sanity checker
- Thread 12: SanityChecker
- Wakes up periodically (rarely) and does sanity checks.
- Thread 13: MountWatcher
- Either uses dbus to watch for drive mount events, or, when
- there's no dbus, polls to find newly mounted filesystems.
- Once a filesystem that contains a remote is mounted, updates
- state about that remote, pulls from it, and queues a push to it,
- as well as an update, and queues it onto the
- ConnectedRemoteChan
- Thread 13: NetWatcher
- Deals with network connection interruptions, which would cause
- transfers to fail, and can be recovered from by waiting for a
- network connection, and syncing with all network remotes.
- Uses dbus to watch for network connections, or when dbus
- cannot be used, assumes there's been one every 30 minutes.
- Thread 15: TransferScanner
- Does potentially expensive checks to find data that needs to be
- transferred from or to remotes, and queues Transfers.
- Uses the ScanRemotes map.
- Thread 16: WebApp
- Spawns more threads as necessary to handle clients.
- Displays the DaemonStatus.
-
- ThreadState: (MVar)
- The Annex state is stored here, which allows resuscitating the
- Annex monad in IO actions run by the inotify and committer
- Annex monad in IO actions run by the watcher and committer
- threads. Thus, a single state is shared amoung the threads, and
- only one at a time can access it.
- DaemonStatusHandle: (MVar)
- The daemon's current status. This MVar should only be manipulated
- from inside the Annex monad, which ensures it's accessed only
- after the ThreadState MVar.
- DaemonStatusHandle: (STM TMVar)
- The daemon's current status.
- ChangeChan: (STM TChan)
- Changes are indicated by writing to this channel. The committer
- reads from it.
- CommitChan: (STM TChan)
- Commits are indicated by writing to this channel. The pusher reads
- from it.
- FailedPushMap (STM TMVar)
- Failed pushes are indicated by writing to this TMVar. The push
- retrier blocks until they're available.
- TransferQueue (STM TChan)
- Transfers to make are indicated by writing to this channel.
- TransferSlots (QSemN)
- Count of the number of currently available transfer slots.
- Updated by the transfer watcher, this allows other threads
- to block until a slot is available.
- This MVar should only be manipulated from inside the Annex monad,
- which ensures it's accessed only after the ThreadState MVar.
- ScanRemotes (STM TMVar)
- Remotes that have been disconnected, and should be scanned
- are indicated by writing to this TMVar.
-}
{-# LANGUAGE CPP #-}
module Assistant where
import Common.Annex
import Assistant.Common
import Assistant.ThreadedMonad
import Assistant.DaemonStatus
import Assistant.Changes
import Assistant.Watcher
import Assistant.Committer
import Assistant.SanityChecker
import Assistant.Commits
import Assistant.Pushes
import Assistant.ScanRemotes
import Assistant.TransferQueue
import Assistant.TransferSlots
import Assistant.Threads.Watcher
import Assistant.Threads.Committer
import Assistant.Threads.Pusher
import Assistant.Threads.Merger
import Assistant.Threads.TransferWatcher
import Assistant.Threads.Transferrer
import Assistant.Threads.SanityChecker
import Assistant.Threads.MountWatcher
import Assistant.Threads.NetWatcher
import Assistant.Threads.TransferScanner
#ifdef WITH_WEBAPP
import Assistant.Threads.WebApp
#else
#warning Building without the webapp. You probably need to install Yesod..
#endif
import qualified Utility.Daemon
import Utility.LogFile
import Utility.ThreadScheduler
import Control.Concurrent
startDaemon :: Bool -> Annex ()
startDaemon foreground
stopDaemon :: Annex ()
stopDaemon = liftIO . Utility.Daemon.stopDaemon =<< fromRepo gitAnnexPidFile
startDaemon :: Bool -> Bool -> Maybe (Url -> FilePath -> IO ()) -> Annex ()
startDaemon assistant foreground webappwaiter
| foreground = do
showStart "watch" "."
showStart (if assistant then "assistant" else "watch") "."
liftIO . Utility.Daemon.lockPidFile =<< fromRepo gitAnnexPidFile
go id
| otherwise = do
logfd <- liftIO . openLog =<< fromRepo gitAnnexLogFile
pidfile <- fromRepo gitAnnexPidFile
go $ Utility.Daemon.daemonize logfd (Just pidfile) False
where
go a = withThreadState $ \st -> do
checkCanWatch
dstatus <- startDaemonStatus
liftIO $ a $ do
changechan <- newChangeChan
-- The commit thread is started early,
-- so that the user can immediately
-- begin adding files and having them
-- committed, even while the startup scan
-- is taking place.
_ <- forkIO $ commitThread st changechan
_ <- forkIO $ daemonStatusThread st dstatus
_ <- forkIO $ sanityCheckerThread st dstatus changechan
-- Does not return.
watchThread st dstatus changechan
go d = startAssistant assistant d webappwaiter
stopDaemon :: Annex ()
stopDaemon = liftIO . Utility.Daemon.stopDaemon =<< fromRepo gitAnnexPidFile
startAssistant :: Bool -> (IO () -> IO ()) -> Maybe (Url -> FilePath -> IO ()) -> Annex ()
startAssistant assistant daemonize webappwaiter = do
withThreadState $ \st -> do
checkCanWatch
dstatus <- startDaemonStatus
liftIO $ daemonize $ run dstatus st
where
run dstatus st = do
changechan <- newChangeChan
commitchan <- newCommitChan
pushmap <- newFailedPushMap
transferqueue <- newTransferQueue
transferslots <- newTransferSlots
scanremotes <- newScanRemoteMap
mapM_ startthread
[ watch $ commitThread st changechan commitchan transferqueue dstatus
#ifdef WITH_WEBAPP
, assist $ webAppThread (Just st) dstatus scanremotes transferqueue transferslots Nothing webappwaiter
#endif
, assist $ pushThread st dstatus commitchan pushmap
, assist $ pushRetryThread st dstatus pushmap
, assist $ mergeThread st
, assist $ transferWatcherThread st dstatus
, assist $ transfererThread st dstatus transferqueue transferslots
, assist $ daemonStatusThread st dstatus
, assist $ sanityCheckerThread st dstatus transferqueue changechan
, assist $ mountWatcherThread st dstatus scanremotes
, assist $ netWatcherThread st dstatus scanremotes
, assist $ transferScannerThread st dstatus scanremotes transferqueue
, watch $ watchThread st dstatus transferqueue changechan
]
waitForTermination
watch a = (True, a)
assist a = (False, a)
startthread (watcher, a)
| watcher || assistant = void $ forkIO a
| otherwise = noop

286
Assistant/Alert.hs Normal file
View file

@ -0,0 +1,286 @@
{- git-annex assistant alerts
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
{-# LANGUAGE RankNTypes, BangPatterns, OverloadedStrings #-}
module Assistant.Alert where
import Common.Annex
import qualified Remote
import Utility.Tense
import Logs.Transfer
import qualified Data.Text as T
import qualified Data.Map as M
import Data.String
{- Different classes of alerts are displayed differently. -}
data AlertClass = Success | Message | Activity | Warning | Error
deriving (Eq, Ord)
data AlertPriority = Filler | Low | Medium | High | Pinned
deriving (Eq, Ord)
{- An alert can have an name, which is used to combine it with other similar
- alerts. -}
data AlertName = FileAlert TenseChunk | DownloadFailedAlert | SanityCheckFixAlert
deriving (Eq)
{- The first alert is the new alert, the second is an old alert.
- Should return a modified version of the old alert. -}
type AlertCombiner = Alert -> Alert -> Maybe Alert
data Alert = Alert
{ alertClass :: AlertClass
, alertHeader :: Maybe TenseText
, alertMessageRender :: [TenseChunk] -> TenseText
, alertData :: [TenseChunk]
, alertBlockDisplay :: Bool
, alertClosable :: Bool
, alertPriority :: AlertPriority
, alertIcon :: Maybe String
, alertCombiner :: Maybe AlertCombiner
, alertName :: Maybe AlertName
}
type AlertPair = (AlertId, Alert)
type AlertMap = M.Map AlertId Alert
{- Higher AlertId indicates a more recent alert. -}
newtype AlertId = AlertId Integer
deriving (Read, Show, Eq, Ord)
firstAlertId :: AlertId
firstAlertId = AlertId 0
nextAlertId :: AlertId -> AlertId
nextAlertId (AlertId i) = AlertId $ succ i
{- This is as many alerts as it makes sense to display at a time.
- A display might be smaller, or larger, the point is to not overwhelm the
- user with a ton of alerts. -}
displayAlerts :: Int
displayAlerts = 6
{- This is not a hard maximum, but there's no point in keeping a great
- many filler alerts in an AlertMap, so when there's more than this many,
- they start being pruned, down toward displayAlerts. -}
maxAlerts :: Int
maxAlerts = displayAlerts * 2
{- The desired order is the reverse of:
-
- - Pinned alerts
- - High priority alerts, newest first
- - Medium priority Activity, newest first (mostly used for Activity)
- - Low priority alerts, newest first
- - Filler priorty alerts, newest first
- - Ties are broken by the AlertClass, with Errors etc coming first.
-}
compareAlertPairs :: AlertPair -> AlertPair -> Ordering
compareAlertPairs
(aid, Alert { alertClass = aclass, alertPriority = aprio })
(bid, Alert { alertClass = bclass, alertPriority = bprio })
= compare aprio bprio
`thenOrd` compare aid bid
`thenOrd` compare aclass bclass
sortAlertPairs :: [AlertPair] -> [AlertPair]
sortAlertPairs = sortBy compareAlertPairs
{- Renders an alert's header for display, if it has one. -}
renderAlertHeader :: Alert -> Maybe T.Text
renderAlertHeader alert = renderTense (alertTense alert) <$> alertHeader alert
{- Renders an alert's message for display. -}
renderAlertMessage :: Alert -> T.Text
renderAlertMessage alert = renderTense (alertTense alert) $
(alertMessageRender alert) (alertData alert)
alertTense :: Alert -> Tense
alertTense alert
| alertClass alert == Activity = Present
| otherwise = Past
{- Checks if two alerts display the same. -}
effectivelySameAlert :: Alert -> Alert -> Bool
effectivelySameAlert x y = all id
[ alertClass x == alertClass y
, alertHeader x == alertHeader y
, alertData x == alertData y
, alertBlockDisplay x == alertBlockDisplay y
, alertClosable x == alertClosable y
, alertPriority x == alertPriority y
]
makeAlertFiller :: Bool -> Alert -> Alert
makeAlertFiller success alert
| isFiller alert = alert
| otherwise = alert
{ alertClass = if c == Activity then c' else c
, alertPriority = Filler
, alertClosable = True
, alertIcon = Just $ if success then "ok" else "exclamation-sign"
}
where
c = alertClass alert
c'
| success = Success
| otherwise = Error
isFiller :: Alert -> Bool
isFiller alert = alertPriority alert == Filler
{- Updates the Alertmap, adding or updating an alert.
-
- Any old filler that looks the same as the alert is removed.
-
- Or, if the alert has an alertCombiner that combines it with
- an old alert, the old alert is replaced with the result, and the
- alert is removed.
-
- Old filler alerts are pruned once maxAlerts is reached.
-}
mergeAlert :: AlertId -> Alert -> AlertMap -> AlertMap
mergeAlert i al m = maybe updatePrune updateCombine (alertCombiner al)
where
pruneSame k al' = k == i || not (effectivelySameAlert al al')
pruneBloat m'
| bloat > 0 = M.fromList $ pruneold $ M.toList m'
| otherwise = m'
where
bloat = M.size m' - maxAlerts
pruneold l =
let (f, rest) = partition (\(_, a) -> isFiller a) l
in drop bloat f ++ rest
updatePrune = pruneBloat $ M.filterWithKey pruneSame $
M.insertWith' const i al m
updateCombine combiner =
let combined = M.mapMaybe (combiner al) m
in if M.null combined
then updatePrune
else M.delete i $ M.union combined m
baseActivityAlert :: Alert
baseActivityAlert = Alert
{ alertClass = Activity
, alertHeader = Nothing
, alertMessageRender = tenseWords
, alertData = []
, alertBlockDisplay = False
, alertClosable = False
, alertPriority = Medium
, alertIcon = Just "refresh"
, alertCombiner = Nothing
, alertName = Nothing
}
activityAlert :: Maybe TenseText -> [TenseChunk] -> Alert
activityAlert header dat = baseActivityAlert
{ alertHeader = header
, alertData = dat
}
startupScanAlert :: Alert
startupScanAlert = activityAlert Nothing $
[Tensed "Performing" "Performed", "startup scan"]
commitAlert :: Alert
commitAlert = activityAlert Nothing $
[Tensed "Committing" "Committed", "changes to git"]
showRemotes :: [Remote] -> TenseChunk
showRemotes = UnTensed . T.unwords . map (T.pack . Remote.name)
pushAlert :: [Remote] -> Alert
pushAlert rs = activityAlert Nothing $
[Tensed "Syncing" "Synced", "with", showRemotes rs]
pushRetryAlert :: [Remote] -> Alert
pushRetryAlert rs = activityAlert
(Just $ tenseWords [Tensed "Retrying" "Retried", "sync"])
(["with", showRemotes rs])
syncAlert :: [Remote] -> Alert
syncAlert rs = baseActivityAlert
{ alertHeader = Just $ tenseWords
[Tensed "Syncing" "Synced", "with", showRemotes rs]
, alertData = []
, alertPriority = Low
}
scanAlert :: [Remote] -> Alert
scanAlert rs = baseActivityAlert
{ alertHeader = Just $ tenseWords
[Tensed "Scanning" "Scanned", showRemotes rs]
, alertBlockDisplay = True
, alertPriority = Low
}
sanityCheckAlert :: Alert
sanityCheckAlert = activityAlert
(Just $ tenseWords [Tensed "Running" "Ran", "daily sanity check"])
["to make sure everything is ok."]
sanityCheckFixAlert :: String -> Alert
sanityCheckFixAlert msg = Alert
{ alertClass = Warning
, alertHeader = Just $ tenseWords ["Fixed a problem"]
, alertMessageRender = render
, alertData = [UnTensed $ T.pack msg]
, alertBlockDisplay = True
, alertPriority = High
, alertClosable = True
, alertIcon = Just "exclamation-sign"
, alertName = Just SanityCheckFixAlert
, alertCombiner = Just $ dataCombiner (++)
}
where
render dta = tenseWords $ alerthead : dta ++ [alertfoot]
alerthead = "The daily sanity check found and fixed a problem:"
alertfoot = "If these problems persist, consider filing a bug report."
fileAlert :: TenseChunk -> FilePath -> Alert
fileAlert msg file = (activityAlert Nothing [f])
{ alertName = Just $ FileAlert msg
, alertMessageRender = render
, alertCombiner = Just $ dataCombiner combiner
}
where
f = fromString $ shortFile $ takeFileName file
render fs = tenseWords $ msg : fs
combiner new old = take 10 $ new ++ old
addFileAlert :: FilePath -> Alert
addFileAlert = fileAlert (Tensed "Adding" "Added")
{- This is only used as a success alert after a transfer, not during it. -}
transferFileAlert :: Direction -> Bool -> FilePath -> Alert
transferFileAlert direction True
| direction == Upload = fileAlert "Uploaded"
| otherwise = fileAlert "Downloaded"
transferFileAlert direction False
| direction == Upload = fileAlert "Upload failed"
| otherwise = fileAlert "Download failed"
dataCombiner :: ([TenseChunk] -> [TenseChunk] -> [TenseChunk]) -> AlertCombiner
dataCombiner combiner new old
| alertClass new /= alertClass old = Nothing
| alertName new == alertName old =
Just $! old { alertData = alertData new `combiner` alertData old }
| otherwise = Nothing
shortFile :: FilePath -> String
shortFile f
| len < maxlen = f
| otherwise = take half f ++ ".." ++ drop (len - half) f
where
len = length f
maxlen = 20
half = (maxlen - 2) `div` 2

View file

@ -1,6 +1,8 @@
{- git-annex assistant change tracking
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Assistant.Changes where
@ -8,14 +10,14 @@ module Assistant.Changes where
import Common.Annex
import qualified Annex.Queue
import Types.KeySource
import Utility.TSet
import Control.Concurrent.STM
import Data.Time.Clock
data ChangeType = AddChange | LinkChange | RmChange | RmDirChange
deriving (Show, Eq)
type ChangeChan = TChan Change
type ChangeChan = TSet Change
data Change
= Change
@ -29,11 +31,8 @@ data Change
}
deriving (Show)
runChangeChan :: STM a -> IO a
runChangeChan = atomically
newChangeChan :: IO ChangeChan
newChangeChan = atomically newTChan
newChangeChan = newTSet
{- Handlers call this when they made a change that needs to get committed. -}
madeChange :: FilePath -> ChangeType -> Annex (Maybe Change)
@ -65,17 +64,13 @@ finishedChange c = c
{- Gets all unhandled changes.
- Blocks until at least one change is made. -}
getChanges :: ChangeChan -> IO [Change]
getChanges chan = runChangeChan $ do
c <- readTChan chan
go [c]
where
go l = do
v <- tryReadTChan chan
case v of
Nothing -> return l
Just c -> go (c:l)
getChanges = getTSet
{- Puts unhandled changes back into the channel.
- Note: Original order is not preserved. -}
refillChanges :: ChangeChan -> [Change] -> IO ()
refillChanges chan cs = runChangeChan $ mapM_ (writeTChan chan) cs
refillChanges = putTSet
{- Records a change in the channel. -}
recordChange :: ChangeChan -> Change -> IO ()
recordChange = putTSet1

34
Assistant/Commits.hs Normal file
View file

@ -0,0 +1,34 @@
{- git-annex assistant commit tracking
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Assistant.Commits where
import Utility.TSet
import Data.Time.Clock
type CommitChan = TSet Commit
data Commit = Commit UTCTime
deriving (Show)
newCommitChan :: IO CommitChan
newCommitChan = newTSet
{- Gets all unhandled commits.
- Blocks until at least one commit is made. -}
getCommits :: CommitChan -> IO [Commit]
getCommits = getTSet
{- Puts unhandled commits back into the channel.
- Note: Original order is not preserved. -}
refillCommits :: CommitChan -> [Commit] -> IO ()
refillCommits = putTSet
{- Records a commit in the channel. -}
recordCommit :: CommitChan -> Commit -> IO ()
recordCommit = putTSet1

21
Assistant/Common.hs Normal file
View file

@ -0,0 +1,21 @@
{- Common infrastructure for the git-annex assistant threads.
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Assistant.Common (
module X,
ThreadName,
debug
) where
import Common.Annex as X
import System.Log.Logger
type ThreadName = String
debug :: ThreadName -> [String] -> IO ()
debug threadname ws = debugM threadname $ unwords $ (threadname ++ ":") : ws

View file

@ -1,20 +1,28 @@
{- git-annex assistant daemon status
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Assistant.DaemonStatus where
import Common.Annex
import Assistant.ThreadedMonad
import Assistant.Alert
import Utility.ThreadScheduler
import Utility.TempFile
import Utility.NotificationBroadcaster
import Logs.Transfer
import Logs.Trust
import qualified Remote
import Control.Concurrent
import Control.Concurrent.STM
import System.Posix.Types
import Data.Time.Clock.POSIX
import Data.Time
import System.Locale
import qualified Data.Map as M
data DaemonStatus = DaemonStatus
-- False when the daemon is performing its startup scan
@ -25,47 +33,103 @@ data DaemonStatus = DaemonStatus
, sanityCheckRunning :: Bool
-- Last time the sanity checker ran
, lastSanityCheck :: Maybe POSIXTime
-- Currently running file content transfers
, currentTransfers :: TransferMap
-- Messages to display to the user.
, alertMap :: AlertMap
, lastAlertId :: AlertId
-- Ordered list of remotes to talk to.
, knownRemotes :: [Remote]
-- Broadcasts notifications about all changes to the DaemonStatus
, changeNotifier :: NotificationBroadcaster
-- Broadcasts notifications when queued or current transfers change.
, transferNotifier :: NotificationBroadcaster
-- Broadcasts notifications when there's a change to the alerts
, alertNotifier :: NotificationBroadcaster
}
deriving (Show)
type DaemonStatusHandle = MVar DaemonStatus
type TransferMap = M.Map Transfer TransferInfo
newDaemonStatus :: DaemonStatus
{- This TMVar is never left empty, so accessing it will never block. -}
type DaemonStatusHandle = TMVar DaemonStatus
newDaemonStatus :: IO DaemonStatus
newDaemonStatus = DaemonStatus
{ scanComplete = False
, lastRunning = Nothing
, sanityCheckRunning = False
, lastSanityCheck = Nothing
}
<$> pure False
<*> pure Nothing
<*> pure False
<*> pure Nothing
<*> pure M.empty
<*> pure M.empty
<*> pure firstAlertId
<*> pure []
<*> newNotificationBroadcaster
<*> newNotificationBroadcaster
<*> newNotificationBroadcaster
getDaemonStatus :: DaemonStatusHandle -> Annex DaemonStatus
getDaemonStatus = liftIO . readMVar
getDaemonStatus :: DaemonStatusHandle -> IO DaemonStatus
getDaemonStatus = atomically . readTMVar
modifyDaemonStatus :: DaemonStatusHandle -> (DaemonStatus -> DaemonStatus) -> Annex ()
modifyDaemonStatus handle a = liftIO $ modifyMVar_ handle (return . a)
modifyDaemonStatus_ :: DaemonStatusHandle -> (DaemonStatus -> DaemonStatus) -> IO ()
modifyDaemonStatus_ dstatus a = modifyDaemonStatus dstatus $ \s -> (a s, ())
{- Load any previous daemon status file, and store it in the MVar for this
- process to use as its DaemonStatus. -}
modifyDaemonStatus :: DaemonStatusHandle -> (DaemonStatus -> (DaemonStatus, b)) -> IO b
modifyDaemonStatus dstatus a = do
(s, b) <- atomically $ do
r@(s, _) <- a <$> takeTMVar dstatus
putTMVar dstatus s
return r
sendNotification $ changeNotifier s
return b
{- Remotes ordered by cost, with dead ones thrown out. -}
calcKnownRemotes :: Annex [Remote]
calcKnownRemotes = do
rs <- concat . Remote.byCost <$> Remote.enabledRemoteList
alive <- snd <$> trustPartition DeadTrusted (map Remote.uuid rs)
let good r = Remote.uuid r `elem` alive
return $ filter good rs
{- Updates the cached ordered list of remotes from the list in Annex
- state. -}
updateKnownRemotes :: DaemonStatusHandle -> Annex ()
updateKnownRemotes dstatus = do
remotes <- calcKnownRemotes
liftIO $ modifyDaemonStatus_ dstatus $
\s -> s { knownRemotes = remotes }
{- Load any previous daemon status file, and store it in a MVar for this
- process to use as its DaemonStatus. Also gets current transfer status. -}
startDaemonStatus :: Annex DaemonStatusHandle
startDaemonStatus = do
file <- fromRepo gitAnnexDaemonStatusFile
status <- liftIO $
catchDefaultIO (readDaemonStatusFile file) newDaemonStatus
liftIO $ newMVar status
catchDefaultIO (readDaemonStatusFile file) =<< newDaemonStatus
transfers <- M.fromList <$> getTransfers
remotes <- calcKnownRemotes
liftIO $ atomically $ newTMVar status
{ scanComplete = False
, sanityCheckRunning = False
, currentTransfers = transfers
, knownRemotes = remotes
}
{- This thread wakes up periodically and writes the daemon status to disk. -}
{- This writes the daemon status to disk, when it changes, but no more
- frequently than once every ten minutes.
-}
daemonStatusThread :: ThreadState -> DaemonStatusHandle -> IO ()
daemonStatusThread st handle = do
daemonStatusThread st dstatus = do
notifier <- newNotificationHandle
=<< changeNotifier <$> getDaemonStatus dstatus
checkpoint
runEvery (Seconds tenMinutes) checkpoint
runEvery (Seconds tenMinutes) $ do
waitNotification notifier
checkpoint
where
checkpoint = runThreadState st $ do
file <- fromRepo gitAnnexDaemonStatusFile
status <- getDaemonStatus handle
liftIO $ writeDaemonStatusFile file status
checkpoint = do
status <- getDaemonStatus dstatus
file <- runThreadState st $ fromRepo gitAnnexDaemonStatusFile
writeDaemonStatusFile file status
{- Don't just dump out the structure, because it will change over time,
- and parts of it are not relevant. -}
@ -81,9 +145,9 @@ writeDaemonStatusFile file status =
]
readDaemonStatusFile :: FilePath -> IO DaemonStatus
readDaemonStatusFile file = parse <$> readFile file
readDaemonStatusFile file = parse <$> newDaemonStatus <*> readFile file
where
parse = foldr parseline newDaemonStatus . lines
parse status = foldr parseline status . lines
parseline line status
| key == "lastRunning" = parseval readtime $ \v ->
status { lastRunning = Just v }
@ -117,3 +181,86 @@ afterLastDaemonRun timestamp status = maybe False (< t) (lastRunning status)
tenMinutes :: Int
tenMinutes = 10 * 60
{- Mutates the transfer map. Runs in STM so that the transfer map can
- be modified in the same transaction that modifies the transfer queue.
- Note that this does not send a notification of the change; that's left
- to the caller. -}
adjustTransfersSTM :: DaemonStatusHandle -> (TransferMap -> TransferMap) -> STM ()
adjustTransfersSTM dstatus a = do
s <- takeTMVar dstatus
putTMVar dstatus $ s { currentTransfers = a (currentTransfers s) }
{- Updates a transfer's info.
- Preserves the transferTid and transferPaused values,
- which are not written to disk. -}
updateTransferInfo :: DaemonStatusHandle -> Transfer -> TransferInfo -> IO ()
updateTransferInfo dstatus t info =
notifyTransfer dstatus `after` modifyDaemonStatus_ dstatus go
where
go s = s { currentTransfers = update (currentTransfers s) }
update m = M.insertWith' merge t info m
merge new old = new
{ transferTid = maybe (transferTid new) Just (transferTid old)
, transferPaused = transferPaused new || transferPaused old
}
{- Removes a transfer from the map, and returns its info. -}
removeTransfer :: DaemonStatusHandle -> Transfer -> IO (Maybe TransferInfo)
removeTransfer dstatus t =
notifyTransfer dstatus `after` modifyDaemonStatus dstatus go
where
go s =
let (info, ts) = M.updateLookupWithKey
(\_k _v -> Nothing)
t (currentTransfers s)
in (s { currentTransfers = ts }, info)
{- Send a notification when a transfer is changed. -}
notifyTransfer :: DaemonStatusHandle -> IO ()
notifyTransfer dstatus = sendNotification
=<< transferNotifier <$> atomically (readTMVar dstatus)
{- Send a notification when alerts are changed. -}
notifyAlert :: DaemonStatusHandle -> IO ()
notifyAlert dstatus = sendNotification
=<< alertNotifier <$> atomically (readTMVar dstatus)
{- Returns the alert's identifier, which can be used to remove it. -}
addAlert :: DaemonStatusHandle -> Alert -> IO AlertId
addAlert dstatus alert = notifyAlert dstatus `after` modifyDaemonStatus dstatus go
where
go s = (s { lastAlertId = i, alertMap = m }, i)
where
i = nextAlertId $ lastAlertId s
m = mergeAlert i alert (alertMap s)
removeAlert :: DaemonStatusHandle -> AlertId -> IO ()
removeAlert dstatus i = updateAlert dstatus i (const Nothing)
updateAlert :: DaemonStatusHandle -> AlertId -> (Alert -> Maybe Alert) -> IO ()
updateAlert dstatus i a = updateAlertMap dstatus $ \m -> M.update a i m
updateAlertMap :: DaemonStatusHandle -> (AlertMap -> AlertMap) -> IO ()
updateAlertMap dstatus a = notifyAlert dstatus `after` modifyDaemonStatus_ dstatus go
where
go s = s { alertMap = a (alertMap s) }
{- Displays an alert while performing an activity that returns True on
- success.
-
- The alert is left visible afterwards, as filler.
- Old filler is pruned, to prevent the map growing too large. -}
alertWhile :: DaemonStatusHandle -> Alert -> IO Bool -> IO Bool
alertWhile dstatus alert a = alertWhile' dstatus alert $ do
r <- a
return (r, r)
{- Like alertWhile, but allows the activity to return a value too. -}
alertWhile' :: DaemonStatusHandle -> Alert -> IO (Bool, a) -> IO a
alertWhile' dstatus alert a = do
let alert' = alert { alertClass = Activity }
i <- addAlert dstatus alert'
(ok, r) <- a
updateAlertMap dstatus $ mergeAlert i $ makeAlertFiller ok alert'
return r

46
Assistant/Pushes.hs Normal file
View file

@ -0,0 +1,46 @@
{- git-annex assistant push tracking
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Assistant.Pushes where
import Common.Annex
import Control.Concurrent.STM
import Data.Time.Clock
import qualified Data.Map as M
{- Track the most recent push failure for each remote. -}
type PushMap = M.Map Remote UTCTime
type FailedPushMap = TMVar PushMap
{- The TMVar starts empty, and is left empty when there are no
- failed pushes. This way we can block until there are some failed pushes.
-}
newFailedPushMap :: IO FailedPushMap
newFailedPushMap = atomically newEmptyTMVar
{- Blocks until there are failed pushes.
- Returns Remotes whose pushes failed a given time duration or more ago.
- (This may be an empty list.) -}
getFailedPushesBefore :: FailedPushMap -> NominalDiffTime -> IO [Remote]
getFailedPushesBefore v duration = do
m <- atomically $ readTMVar v
now <- getCurrentTime
return $ M.keys $ M.filter (not . toorecent now) m
where
toorecent now time = now `diffUTCTime` time < duration
{- Modifies the map. -}
changeFailedPushMap :: FailedPushMap -> (PushMap -> PushMap) -> IO ()
changeFailedPushMap v a = atomically $
store . a . fromMaybe M.empty =<< tryTakeTMVar v
where
{- tryTakeTMVar empties the TMVar; refill it only if
- the modified map is not itself empty -}
store m
| m == M.empty = noop
| otherwise = putTMVar v $! m

View file

@ -1,81 +0,0 @@
{- git-annex assistant sanity checker
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-}
module Assistant.SanityChecker (
sanityCheckerThread
) where
import Common.Annex
import qualified Git.LsFiles
import Assistant.DaemonStatus
import Assistant.ThreadedMonad
import Assistant.Changes
import Utility.ThreadScheduler
import qualified Assistant.Watcher
import Data.Time.Clock.POSIX
{- This thread wakes up occasionally to make sure the tree is in good shape. -}
sanityCheckerThread :: ThreadState -> DaemonStatusHandle -> ChangeChan -> IO ()
sanityCheckerThread st status changechan = forever $ do
waitForNextCheck st status
runThreadState st $
modifyDaemonStatus status $ \s -> s
{ sanityCheckRunning = True }
now <- getPOSIXTime -- before check started
catchIO (check st status changechan)
(runThreadState st . warning . show)
runThreadState st $ do
modifyDaemonStatus status $ \s -> s
{ sanityCheckRunning = False
, lastSanityCheck = Just now
}
{- Only run one check per day, from the time of the last check. -}
waitForNextCheck :: ThreadState -> DaemonStatusHandle -> IO ()
waitForNextCheck st status = do
v <- runThreadState st $
lastSanityCheck <$> getDaemonStatus status
now <- getPOSIXTime
threadDelaySeconds $ Seconds $ calcdelay now v
where
calcdelay _ Nothing = oneDay
calcdelay now (Just lastcheck)
| lastcheck < now = max oneDay $
oneDay - truncate (now - lastcheck)
| otherwise = oneDay
oneDay :: Int
oneDay = 24 * 60 * 60
{- It's important to stay out of the Annex monad as much as possible while
- running potentially expensive parts of this check, since remaining in it
- will block the watcher. -}
check :: ThreadState -> DaemonStatusHandle -> ChangeChan -> IO ()
check st status changechan = do
g <- runThreadState st $ do
showSideAction "Running daily check"
fromRepo id
-- Find old unstaged symlinks, and add them to git.
unstaged <- Git.LsFiles.notInRepo False ["."] g
now <- getPOSIXTime
forM_ unstaged $ \file -> do
ms <- catchMaybeIO $ getSymbolicLinkStatus file
case ms of
Just s | toonew (statusChangeTime s) now -> noop
| isSymbolicLink s ->
addsymlink file ms
_ -> noop
where
toonew timestamp now = now < (realToFrac (timestamp + slop) :: POSIXTime)
slop = fromIntegral tenMinutes
insanity m = runThreadState st $ warning m
addsymlink file s = do
insanity $ "found unstaged symlink: " ++ file
Assistant.Watcher.runHandler st status changechan
Assistant.Watcher.onAddSymlink file s

48
Assistant/ScanRemotes.hs Normal file
View file

@ -0,0 +1,48 @@
{- git-annex assistant remotes needing scanning
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Assistant.ScanRemotes where
import Common.Annex
import qualified Types.Remote as Remote
import Data.Function
import Control.Concurrent.STM
import qualified Data.Map as M
data ScanInfo = ScanInfo
{ scanPriority :: Int
, fullScan :: Bool
}
type ScanRemoteMap = TMVar (M.Map Remote ScanInfo)
{- The TMVar starts empty, and is left empty when there are no remotes
- to scan. -}
newScanRemoteMap :: IO ScanRemoteMap
newScanRemoteMap = atomically newEmptyTMVar
{- Blocks until there is a remote or remotes that need to be scanned.
-
- The list has higher priority remotes listed first. -}
getScanRemote :: ScanRemoteMap -> IO [(Remote, ScanInfo)]
getScanRemote v = atomically $
reverse . sortBy (compare `on` scanPriority . snd) . M.toList
<$> takeTMVar v
{- Adds new remotes that need scanning. -}
addScanRemotes :: ScanRemoteMap -> Bool -> [Remote] -> IO ()
addScanRemotes _ _ [] = noop
addScanRemotes v full rs = atomically $ do
m <- fromMaybe M.empty <$> tryTakeTMVar v
putTMVar v $ M.unionWith merge (M.fromList $ zip rs (map info rs)) m
where
info r = ScanInfo (-1 * Remote.cost r) full
merge x y = ScanInfo
{ scanPriority = max (scanPriority x) (scanPriority y)
, fullScan = fullScan x || fullScan y
}

105
Assistant/Sync.hs Normal file
View file

@ -0,0 +1,105 @@
{- git-annex assistant repo syncing
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Assistant.Sync where
import Assistant.Common
import Assistant.Pushes
import Assistant.Alert
import Assistant.ThreadedMonad
import Assistant.DaemonStatus
import Assistant.ScanRemotes
import qualified Command.Sync
import Utility.Parallel
import qualified Git
import qualified Git.Branch
import qualified Git.Command
import qualified Remote
import qualified Annex.Branch
import Data.Time.Clock
import qualified Data.Map as M
{- Syncs with remotes that may have been disconnected for a while.
-
- First gets git in sync, and then prepares any necessary file transfers.
-
- An expensive full scan is queued when the git-annex branches of some of
- the remotes have diverged from the local git-annex branch. Otherwise,
- it's sufficient to requeue failed transfers.
-}
reconnectRemotes :: ThreadName -> ThreadState -> DaemonStatusHandle -> ScanRemoteMap -> [Remote] -> IO ()
reconnectRemotes _ _ _ _ [] = noop
reconnectRemotes threadname st dstatus scanremotes rs = void $
alertWhile dstatus (syncAlert rs) $ do
sync =<< runThreadState st (inRepo Git.Branch.current)
where
sync (Just branch) = do
diverged <- manualPull st (Just branch) rs
addScanRemotes scanremotes diverged rs
now <- getCurrentTime
pushToRemotes threadname now st Nothing rs
{- No local branch exists yet, but we can try pulling. -}
sync Nothing = do
diverged <- manualPull st Nothing rs
addScanRemotes scanremotes diverged rs
return True
{- Updates the local sync branch, then pushes it to all remotes, in
- parallel.
-
- Avoids running possibly long-duration commands in the Annex monad, so
- as not to block other threads. -}
pushToRemotes :: ThreadName -> UTCTime -> ThreadState -> (Maybe FailedPushMap) -> [Remote] -> IO Bool
pushToRemotes threadname now st mpushmap remotes = do
(g, branch) <- runThreadState st $
(,) <$> fromRepo id <*> inRepo Git.Branch.current
go True branch g remotes
where
go _ Nothing _ _ = return True -- no branch, so nothing to do
go shouldretry (Just branch) g rs = do
debug threadname
[ "pushing to"
, show rs
]
Command.Sync.updateBranch (Command.Sync.syncBranch branch) g
(succeeded, failed) <- inParallel (push g branch) rs
let ok = null failed
case mpushmap of
Nothing -> noop
Just pushmap ->
changeFailedPushMap pushmap $ \m ->
M.union (makemap failed) $
M.difference m (makemap succeeded)
unless (ok) $
debug threadname
[ "failed to push to"
, show failed
]
if (ok || not shouldretry)
then return ok
else retry branch g failed
makemap l = M.fromList $ zip l (repeat now)
push g branch remote = Command.Sync.pushBranch remote branch g
retry branch g rs = do
debug threadname [ "trying manual pull to resolve failed pushes" ]
void $ manualPull st (Just branch) rs
go False (Just branch) g rs
{- Manually pull from remotes and merge their branches. -}
manualPull :: ThreadState -> (Maybe Git.Ref) -> [Remote] -> IO Bool
manualPull st currentbranch remotes = do
g <- runThreadState st $ fromRepo id
forM_ remotes $ \r ->
Git.Command.runBool "fetch" [Param $ Remote.name r] g
haddiverged <- runThreadState st $ Annex.Branch.forceUpdate
forM_ remotes $ \r ->
runThreadState st $ Command.Sync.mergeRemote r currentbranch
return haddiverged

View file

@ -1,17 +1,17 @@
{- making the Annex monad available across threads
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
{-# LANGUAGE BangPatterns #-}
module Assistant.ThreadedMonad where
import Common.Annex
import qualified Annex
import Control.Concurrent
import Control.Exception (throw)
import Data.Tuple
{- The Annex state is stored in a MVar, so that threaded actions can access
- it. -}
@ -32,13 +32,7 @@ withThreadState a = do
{- Runs an Annex action, using the state from the MVar.
-
- This serializes calls by threads. -}
- This serializes calls by threads; only one thread can run in Annex at a
- time. -}
runThreadState :: ThreadState -> Annex a -> IO a
runThreadState mvar a = do
startstate <- takeMVar mvar
-- catch IO errors and rethrow after restoring the MVar
!(r, newstate) <- catchIO (Annex.run startstate a) $ \e -> do
putMVar mvar startstate
throw e
putMVar mvar newstate
return r
runThreadState mvar a = modifyMVar mvar $ \state -> swap <$> Annex.run state a

View file

@ -1,14 +1,21 @@
{- git-annex assistant commit thread
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Assistant.Committer where
module Assistant.Threads.Committer where
import Common.Annex
import Assistant.Common
import Assistant.Changes
import Assistant.Commits
import Assistant.Alert
import Assistant.ThreadedMonad
import Assistant.Watcher
import Assistant.Threads.Watcher
import Assistant.TransferQueue
import Assistant.DaemonStatus
import Logs.Transfer
import qualified Annex
import qualified Annex.Queue
import qualified Git.Command
@ -25,9 +32,12 @@ import Data.Tuple.Utils
import qualified Data.Set as S
import Data.Either
thisThread :: ThreadName
thisThread = "Committer"
{- This thread makes git commits at appropriate times. -}
commitThread :: ThreadState -> ChangeChan -> IO ()
commitThread st changechan = runEvery (Seconds 1) $ do
commitThread :: ThreadState -> ChangeChan -> CommitChan -> TransferQueue -> DaemonStatusHandle -> IO ()
commitThread st changechan commitchan transferqueue dstatus = runEvery (Seconds 1) $ do
-- We already waited one second as a simple rate limiter.
-- Next, wait until at least one change is available for
-- processing.
@ -36,12 +46,30 @@ commitThread st changechan = runEvery (Seconds 1) $ do
time <- getCurrentTime
if shouldCommit time changes
then do
readychanges <- handleAdds st changechan changes
readychanges <- handleAdds st changechan transferqueue dstatus changes
if shouldCommit time readychanges
then do
void $ tryIO $ runThreadState st commitStaged
else refillChanges changechan readychanges
else refillChanges changechan changes
debug thisThread
[ "committing"
, show (length readychanges)
, "changes"
]
void $ alertWhile dstatus commitAlert $
tryIO (runThreadState st commitStaged)
>> return True
recordCommit commitchan (Commit time)
else refill readychanges
else refill changes
where
refill [] = noop
refill cs = do
debug thisThread
[ "delaying commit of"
, show (length cs)
, "changes"
]
refillChanges changechan cs
commitStaged :: Annex ()
commitStaged = do
@ -93,8 +121,8 @@ shouldCommit now changes
- Any pending adds that are not ready yet are put back into the ChangeChan,
- where they will be retried later.
-}
handleAdds :: ThreadState -> ChangeChan -> [Change] -> IO [Change]
handleAdds st changechan cs = returnWhen (null pendingadds) $ do
handleAdds :: ThreadState -> ChangeChan -> TransferQueue -> DaemonStatusHandle -> [Change] -> IO [Change]
handleAdds st changechan transferqueue dstatus cs = returnWhen (null pendingadds) $ do
(postponed, toadd) <- partitionEithers <$>
safeToAdd st pendingadds
@ -106,7 +134,7 @@ handleAdds st changechan cs = returnWhen (null pendingadds) $ do
if (DirWatcher.eventsCoalesce || null added)
then return $ added ++ otherchanges
else do
r <- handleAdds st changechan
r <- handleAdds st changechan transferqueue dstatus
=<< getChanges changechan
return $ r ++ added ++ otherchanges
where
@ -117,17 +145,21 @@ handleAdds st changechan cs = returnWhen (null pendingadds) $ do
| otherwise = a
add :: Change -> IO (Maybe Change)
add change@(PendingAddChange { keySource = ks }) = do
r <- catchMaybeIO $ sanitycheck ks $ runThreadState st $ do
showStart "add" $ keyFilename ks
handle (finishedChange change) (keyFilename ks)
=<< Command.Add.ingest ks
return $ maybeMaybe r
add change@(PendingAddChange { keySource = ks }) =
alertWhile' dstatus (addFileAlert $ keyFilename ks) $
liftM ret $ catchMaybeIO $
sanitycheck ks $ runThreadState st $ do
showStart "add" $ keyFilename ks
key <- Command.Add.ingest ks
handle (finishedChange change) (keyFilename ks) key
where
{- Add errors tend to be transient and will
- be automatically dealt with, so don't
- pass to the alert code. -}
ret (Just j@(Just _)) = (True, j)
ret _ = (True, Nothing)
add _ = return Nothing
maybeMaybe (Just j@(Just _)) = j
maybeMaybe _ = Nothing
handle _ _ Nothing = do
showEndFail
return Nothing
@ -137,6 +169,7 @@ handleAdds st changechan cs = returnWhen (null pendingadds) $ do
sha <- inRepo $
Git.HashObject.hashObject BlobObject link
stageSymlink file sha
queueTransfers Next transferqueue dstatus key (Just file) Upload
showEndOk
return $ Just change
@ -164,6 +197,15 @@ safeToAdd st changes = runThreadState st $
tmpdir <- fromRepo gitAnnexTmpDir
openfiles <- S.fromList . map fst3 . filter openwrite <$>
liftIO (Lsof.queryDir tmpdir)
-- TODO this is here for debugging a problem on
-- OSX, and is pretty expensive, so remove later
liftIO $ debug thisThread
[ "checking changes:"
, show changes
, "vs open files:"
, show openfiles
]
let checked = map (check openfiles) changes

View file

@ -0,0 +1,80 @@
{- git-annex assistant git merge thread
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Assistant.Threads.Merger where
import Assistant.Common
import Assistant.ThreadedMonad
import Utility.DirWatcher
import Utility.Types.DirWatcher
import qualified Git
import qualified Git.Merge
import qualified Git.Branch
import qualified Command.Sync
thisThread :: ThreadName
thisThread = "Merger"
{- This thread watches for changes to .git/refs/heads/synced/,
- which indicate incoming pushes. It merges those pushes into the
- currently checked out branch. -}
mergeThread :: ThreadState -> IO ()
mergeThread st = do
g <- runThreadState st $ fromRepo id
let dir = Git.localGitDir g </> "refs" </> "heads" </> "synced"
createDirectoryIfMissing True dir
let hook a = Just $ runHandler g a
let hooks = mkWatchHooks
{ addHook = hook onAdd
, errHook = hook onErr
}
void $ watchDir dir (const False) hooks id
debug thisThread ["watching", dir]
type Handler = Git.Repo -> FilePath -> Maybe FileStatus -> IO ()
{- Runs an action handler.
-
- Exceptions are ignored, otherwise a whole thread could be crashed.
-}
runHandler :: Git.Repo -> Handler -> FilePath -> Maybe FileStatus -> IO ()
runHandler g handler file filestatus = void $ do
either print (const noop) =<< tryIO go
where
go = handler g file filestatus
{- Called when there's an error with inotify. -}
onErr :: Handler
onErr _ msg _ = error msg
{- Called when a new branch ref is written.
-
- This relies on git's atomic method of updating branch ref files,
- which is to first write the new file to .lock, and then rename it
- over the old file. So, ignore .lock files, and the rename ensures
- the watcher sees a new file being added on each update.
-
- At startup, synthetic add events fire, causing this to run, but that's
- ok; it ensures that any changes pushed since the last time the assistant
- ran are merged in.
-}
onAdd :: Handler
onAdd g file _
| ".lock" `isSuffixOf` file = noop
| otherwise = do
let changedbranch = Git.Ref $
"refs" </> "heads" </> takeFileName file
current <- Git.Branch.current g
when (Just changedbranch == current) $ do
liftIO $ debug thisThread
[ "merging changes into"
, show current
]
void $ mergeBranch changedbranch g
mergeBranch :: Git.Ref -> Git.Repo -> IO Bool
mergeBranch = Git.Merge.mergeNonInteractive . Command.Sync.syncBranch

View file

@ -0,0 +1,189 @@
{- git-annex assistant mount watcher, using either dbus or mtab polling
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
module Assistant.Threads.MountWatcher where
import Assistant.Common
import Assistant.ThreadedMonad
import Assistant.DaemonStatus
import Assistant.ScanRemotes
import Assistant.Sync
import qualified Annex
import qualified Git
import Utility.ThreadScheduler
import Utility.Mounts
import Remote.List
import qualified Types.Remote as Remote
import Control.Concurrent
import qualified Control.Exception as E
import qualified Data.Set as S
#if WITH_DBUS
import Utility.DBus
import DBus.Client
import DBus
import Data.Word (Word32)
#else
#warning Building without dbus support; will use mtab polling
#endif
thisThread :: ThreadName
thisThread = "MountWatcher"
mountWatcherThread :: ThreadState -> DaemonStatusHandle -> ScanRemoteMap -> IO ()
mountWatcherThread st handle scanremotes =
#if WITH_DBUS
dbusThread st handle scanremotes
#else
pollingThread st handle scanremotes
#endif
#if WITH_DBUS
dbusThread :: ThreadState -> DaemonStatusHandle -> ScanRemoteMap -> IO ()
dbusThread st dstatus scanremotes = E.catch (go =<< connectSession) onerr
where
go client = ifM (checkMountMonitor client)
( do
{- Store the current mount points in an mvar,
- to be compared later. We could in theory
- work out the mount point from the dbus
- message, but this is easier. -}
mvar <- newMVar =<< currentMountPoints
forM_ mountChanged $ \matcher ->
listen client matcher $ \_event -> do
nowmounted <- currentMountPoints
wasmounted <- swapMVar mvar nowmounted
handleMounts st dstatus scanremotes wasmounted nowmounted
, do
runThreadState st $
warning "No known volume monitor available through dbus; falling back to mtab polling"
pollinstead
)
onerr :: E.SomeException -> IO ()
onerr e = do
runThreadState st $
warning $ "Failed to use dbus; falling back to mtab polling (" ++ show e ++ ")"
pollinstead
pollinstead = pollingThread st dstatus scanremotes
{- Examine the list of services connected to dbus, to see if there
- are any we can use to monitor mounts. If not, will attempt to start one. -}
checkMountMonitor :: Client -> IO Bool
checkMountMonitor client = do
running <- filter (`elem` usableservices)
<$> listServiceNames client
case running of
[] -> startOneService client startableservices
(service:_) -> do
debug thisThread [ "Using running DBUS service"
, service
, "to monitor mount events."
]
return True
where
startableservices = [gvfs]
usableservices = startableservices ++ [kde]
gvfs = "org.gtk.Private.GduVolumeMonitor"
kde = "org.kde.DeviceNotifications"
startOneService :: Client -> [ServiceName] -> IO Bool
startOneService _ [] = return False
startOneService client (x:xs) = do
_ <- callDBus client "StartServiceByName"
[toVariant x, toVariant (0 :: Word32)]
ifM (elem x <$> listServiceNames client)
( do
debug thisThread [ "Started DBUS service"
, x
, "to monitor mount events."
]
return True
, startOneService client xs
)
{- Filter matching events recieved when drives are mounted and unmounted. -}
mountChanged :: [MatchRule]
mountChanged = [gvfs True, gvfs False, kde, kdefallback]
where
{- gvfs reliably generates this event whenever a drive is mounted/unmounted,
- whether automatically, or manually -}
gvfs mount = matchAny
{ matchInterface = Just "org.gtk.Private.RemoteVolumeMonitor"
, matchMember = Just $ if mount then "MountAdded" else "MountRemoved"
}
{- This event fires when KDE prompts the user what to do with a drive,
- but maybe not at other times. And it's not received -}
kde = matchAny
{ matchInterface = Just "org.kde.Solid.Device"
, matchMember = Just "setupDone"
}
{- This event may not be closely related to mounting a drive, but it's
- observed reliably when a drive gets mounted or unmounted. -}
kdefallback = matchAny
{ matchInterface = Just "org.kde.KDirNotify"
, matchMember = Just "enteredDirectory"
}
#endif
pollingThread :: ThreadState -> DaemonStatusHandle -> ScanRemoteMap -> IO ()
pollingThread st dstatus scanremotes = go =<< currentMountPoints
where
go wasmounted = do
threadDelaySeconds (Seconds 10)
nowmounted <- currentMountPoints
handleMounts st dstatus scanremotes wasmounted nowmounted
go nowmounted
handleMounts :: ThreadState -> DaemonStatusHandle -> ScanRemoteMap -> MountPoints -> MountPoints -> IO ()
handleMounts st dstatus scanremotes wasmounted nowmounted =
mapM_ (handleMount st dstatus scanremotes . mnt_dir) $
S.toList $ newMountPoints wasmounted nowmounted
handleMount :: ThreadState -> DaemonStatusHandle -> ScanRemoteMap -> FilePath -> IO ()
handleMount st dstatus scanremotes dir = do
debug thisThread ["detected mount of", dir]
reconnectRemotes thisThread st dstatus scanremotes
=<< filter (Git.repoIsLocal . Remote.repo)
<$> remotesUnder st dstatus dir
{- Finds remotes located underneath the mount point.
-
- Updates state to include the remotes.
-
- The config of git remotes is re-read, as it may not have been available
- at startup time, or may have changed (it could even be a different
- repository at the same remote location..)
-}
remotesUnder :: ThreadState -> DaemonStatusHandle -> FilePath -> IO [Remote]
remotesUnder st dstatus dir = runThreadState st $ do
repotop <- fromRepo Git.repoPath
rs <- remoteList
pairs <- mapM (checkremote repotop) rs
let (waschanged, rs') = unzip pairs
when (any id waschanged) $ do
Annex.changeState $ \s -> s { Annex.remotes = rs' }
updateKnownRemotes dstatus
return $ map snd $ filter fst pairs
where
checkremote repotop r = case Remote.localpath r of
Just p | dirContains dir (absPathFrom repotop p) ->
(,) <$> pure True <*> updateRemote r
_ -> return (False, r)
type MountPoints = S.Set Mntent
currentMountPoints :: IO MountPoints
currentMountPoints = S.fromList <$> getMounts
newMountPoints :: MountPoints -> MountPoints -> MountPoints
newMountPoints old new = S.difference new old

View file

@ -0,0 +1,132 @@
{- git-annex assistant network connection watcher, using dbus
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
module Assistant.Threads.NetWatcher where
import Assistant.Common
import Assistant.ThreadedMonad
import Assistant.DaemonStatus
import Assistant.ScanRemotes
import Assistant.Sync
import qualified Git
import Utility.ThreadScheduler
import Remote.List
import qualified Types.Remote as Remote
import qualified Control.Exception as E
import Control.Concurrent
#if WITH_DBUS
import Utility.DBus
import DBus.Client
import DBus
import Data.Word (Word32)
#else
#warning Building without dbus support; will poll for network connection changes
#endif
thisThread :: ThreadName
thisThread = "NetWatcher"
netWatcherThread :: ThreadState -> DaemonStatusHandle -> ScanRemoteMap -> IO ()
netWatcherThread st dstatus scanremotes = do
#if WITH_DBUS
void $ forkIO $ dbusThread st dstatus scanremotes
#endif
{- This is a fallback for when dbus cannot be used to detect
- network connection changes, but it also ensures that
- any networked remotes that may have not been routable for a
- while (despite the local network staying up), are synced with
- periodically. -}
runEvery (Seconds 3600) $
handleConnection st dstatus scanremotes
#if WITH_DBUS
dbusThread :: ThreadState -> DaemonStatusHandle -> ScanRemoteMap -> IO ()
dbusThread st dstatus scanremotes = E.catch (go =<< connectSystem) onerr
where
go client = ifM (checkNetMonitor client)
( do
listenNMConnections client handle
listenWicdConnections client handle
, do
runThreadState st $
warning "No known network monitor available through dbus; falling back to polling"
)
onerr :: E.SomeException -> IO ()
onerr e = runThreadState st $
warning $ "Failed to use dbus; falling back to polling (" ++ show e ++ ")"
handle = do
debug thisThread ["detected network connection"]
handleConnection st dstatus scanremotes
{- Examine the list of services connected to dbus, to see if there
- are any we can use to monitor network connections. -}
checkNetMonitor :: Client -> IO Bool
checkNetMonitor client = do
running <- filter (`elem` [networkmanager, wicd])
<$> listServiceNames client
case running of
[] -> return False
(service:_) -> do
debug thisThread [ "Using running DBUS service"
, service
, "to monitor network connection events."
]
return True
where
networkmanager = "org.freedesktop.NetworkManager"
wicd = "org.wicd.daemon"
{- Listens for new NetworkManager connections. -}
listenNMConnections :: Client -> IO () -> IO ()
listenNMConnections client callback =
listen client matcher $ \event ->
when (Just True == anyM activeconnection (signalBody event)) $
callback
where
matcher = matchAny
{ matchInterface = Just "org.freedesktop.NetworkManager.Connection.Active"
, matchMember = Just "PropertiesChanged"
}
nm_connection_activated = toVariant (2 :: Word32)
nm_state_key = toVariant ("State" :: String)
activeconnection v = do
m <- fromVariant v
vstate <- lookup nm_state_key $ dictionaryItems m
state <- fromVariant vstate
return $ state == nm_connection_activated
{- Listens for new Wicd connections. -}
listenWicdConnections :: Client -> IO () -> IO ()
listenWicdConnections client callback =
listen client matcher $ \event ->
when (any (== wicd_success) (signalBody event)) $
callback
where
matcher = matchAny
{ matchInterface = Just "org.wicd.daemon"
, matchMember = Just "ConnectResultsSent"
}
wicd_success = toVariant ("success" :: String)
#endif
handleConnection :: ThreadState -> DaemonStatusHandle -> ScanRemoteMap -> IO ()
handleConnection st dstatus scanremotes = do
reconnectRemotes thisThread st dstatus scanremotes =<<
filter (Git.repoIsUrl . Remote.repo)
<$> networkRemotes st
{- Finds network remotes. -}
networkRemotes :: ThreadState -> IO [Remote]
networkRemotes st = runThreadState st $
filter (isNothing . Remote.localpath) <$> remoteList

View file

@ -0,0 +1,82 @@
{- git-annex assistant git pushing thread
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Assistant.Threads.Pusher where
import Assistant.Common
import Assistant.Commits
import Assistant.Pushes
import Assistant.Alert
import Assistant.ThreadedMonad
import Assistant.DaemonStatus
import Assistant.Sync
import Utility.ThreadScheduler
import qualified Remote
import qualified Types.Remote as Remote
import Data.Time.Clock
thisThread :: ThreadName
thisThread = "Pusher"
{- This thread retries pushes that failed before. -}
pushRetryThread :: ThreadState -> DaemonStatusHandle -> FailedPushMap -> IO ()
pushRetryThread st dstatus pushmap = runEvery (Seconds halfhour) $ do
-- We already waited half an hour, now wait until there are failed
-- pushes to retry.
topush <- getFailedPushesBefore pushmap (fromIntegral halfhour)
unless (null topush) $ do
debug thisThread
[ "retrying"
, show (length topush)
, "failed pushes"
]
now <- getCurrentTime
void $ alertWhile dstatus (pushRetryAlert topush) $
pushToRemotes thisThread now st (Just pushmap) topush
where
halfhour = 1800
{- This thread pushes git commits out to remotes soon after they are made. -}
pushThread :: ThreadState -> DaemonStatusHandle -> CommitChan -> FailedPushMap -> IO ()
pushThread st dstatus commitchan pushmap = do
runEvery (Seconds 2) $ do
-- We already waited two seconds as a simple rate limiter.
-- Next, wait until at least one commit has been made
commits <- getCommits commitchan
-- Now see if now's a good time to push.
now <- getCurrentTime
if shouldPush now commits
then do
remotes <- filter pushable . knownRemotes
<$> getDaemonStatus dstatus
unless (null remotes) $
void $ alertWhile dstatus (pushAlert remotes) $
pushToRemotes thisThread now st (Just pushmap) remotes
else do
debug thisThread
[ "delaying push of"
, show (length commits)
, "commits"
]
refillCommits commitchan commits
where
pushable r
| Remote.specialRemote r = False
| Remote.readonly r = False
| otherwise = True
{- Decide if now is a good time to push to remotes.
-
- Current strategy: Immediately push all commits. The commit machinery
- already determines batches of changes, so we can't easily determine
- batches better.
-}
shouldPush :: UTCTime -> [Commit] -> Bool
shouldPush _now commits
| not (null commits) = True
| otherwise = False

View file

@ -0,0 +1,98 @@
{- git-annex assistant sanity checker
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Assistant.Threads.SanityChecker (
sanityCheckerThread
) where
import Assistant.Common
import Assistant.DaemonStatus
import Assistant.ThreadedMonad
import Assistant.Changes
import Assistant.Alert
import Assistant.TransferQueue
import qualified Git.LsFiles
import Utility.ThreadScheduler
import qualified Assistant.Threads.Watcher as Watcher
import Data.Time.Clock.POSIX
thisThread :: ThreadName
thisThread = "SanityChecker"
{- This thread wakes up occasionally to make sure the tree is in good shape. -}
sanityCheckerThread :: ThreadState -> DaemonStatusHandle -> TransferQueue -> ChangeChan -> IO ()
sanityCheckerThread st dstatus transferqueue changechan = forever $ do
waitForNextCheck dstatus
debug thisThread ["starting sanity check"]
void $ alertWhile dstatus sanityCheckAlert go
debug thisThread ["sanity check complete"]
where
go = do
modifyDaemonStatus_ dstatus $ \s -> s
{ sanityCheckRunning = True }
now <- getPOSIXTime -- before check started
r <- catchIO (check st dstatus transferqueue changechan)
$ \e -> do
runThreadState st $ warning $ show e
return False
modifyDaemonStatus_ dstatus $ \s -> s
{ sanityCheckRunning = False
, lastSanityCheck = Just now
}
return r
{- Only run one check per day, from the time of the last check. -}
waitForNextCheck :: DaemonStatusHandle -> IO ()
waitForNextCheck dstatus = do
v <- lastSanityCheck <$> getDaemonStatus dstatus
now <- getPOSIXTime
threadDelaySeconds $ Seconds $ calcdelay now v
where
calcdelay _ Nothing = oneDay
calcdelay now (Just lastcheck)
| lastcheck < now = max oneDay $
oneDay - truncate (now - lastcheck)
| otherwise = oneDay
oneDay :: Int
oneDay = 24 * 60 * 60
{- It's important to stay out of the Annex monad as much as possible while
- running potentially expensive parts of this check, since remaining in it
- will block the watcher. -}
check :: ThreadState -> DaemonStatusHandle -> TransferQueue -> ChangeChan -> IO Bool
check st dstatus transferqueue changechan = do
g <- runThreadState st $ fromRepo id
-- Find old unstaged symlinks, and add them to git.
unstaged <- Git.LsFiles.notInRepo False ["."] g
now <- getPOSIXTime
forM_ unstaged $ \file -> do
ms <- catchMaybeIO $ getSymbolicLinkStatus file
case ms of
Just s | toonew (statusChangeTime s) now -> noop
| isSymbolicLink s ->
addsymlink file ms
_ -> noop
return True
where
toonew timestamp now = now < (realToFrac (timestamp + slop) :: POSIXTime)
slop = fromIntegral tenMinutes
insanity msg = do
runThreadState st $ warning msg
void $ addAlert dstatus $ sanityCheckFixAlert msg
addsymlink file s = do
Watcher.runHandler thisThread st dstatus
transferqueue changechan
Watcher.onAddSymlink file s
insanity $ "found unstaged symlink: " ++ file

View file

@ -0,0 +1,138 @@
{- git-annex assistant thread to scan remotes to find needed transfers
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Assistant.Threads.TransferScanner where
import Assistant.Common
import Assistant.ScanRemotes
import Assistant.TransferQueue
import Assistant.ThreadedMonad
import Assistant.DaemonStatus
import Assistant.Alert
import Logs.Transfer
import Logs.Location
import Logs.Web (webUUID)
import qualified Remote
import qualified Types.Remote as Remote
import Utility.ThreadScheduler
import qualified Git.LsFiles as LsFiles
import Command
import Annex.Content
import qualified Data.Set as S
thisThread :: ThreadName
thisThread = "TransferScanner"
{- This thread waits until a remote needs to be scanned, to find transfers
- that need to be made, to keep data in sync.
-}
transferScannerThread :: ThreadState -> DaemonStatusHandle -> ScanRemoteMap -> TransferQueue -> IO ()
transferScannerThread st dstatus scanremotes transferqueue = do
startupScan
go S.empty
where
go scanned = do
threadDelaySeconds (Seconds 2)
(rs, infos) <- unzip <$> getScanRemote scanremotes
if any fullScan infos || any (`S.notMember` scanned) rs
then do
expensiveScan st dstatus transferqueue rs
go (S.union scanned (S.fromList rs))
else do
mapM_ (failedTransferScan st dstatus transferqueue) rs
go scanned
{- All available remotes are scanned in full on startup,
- for multiple reasons, including:
-
- * This may be the first run, and there may be remotes
- already in place, that need to be synced.
- * We may have run before, and scanned a remote, but
- only been in a subdirectory of the git remote, and so
- not synced it all.
- * We may have run before, and had transfers queued,
- and then the system (or us) crashed, and that info was
- lost.
-}
startupScan = addScanRemotes scanremotes True
=<< knownRemotes <$> getDaemonStatus dstatus
{- This is a cheap scan for failed transfers involving a remote. -}
failedTransferScan :: ThreadState -> DaemonStatusHandle -> TransferQueue -> Remote -> IO ()
failedTransferScan st dstatus transferqueue r = do
ts <- runThreadState st $
getFailedTransfers $ Remote.uuid r
go ts
where
go [] = noop
go ((t, info):ts)
| transferDirection t == Download = do
{- Check if the remote still has the key.
- If not, relies on the expensiveScan to
- get it queued from some other remote. -}
ifM (runThreadState st $ remoteHas r $ transferKey t)
( requeue t info
, dequeue t
)
go ts
| otherwise = do
{- The Transferrer checks when uploading
- that the remote doesn't already have the
- key, so it's not redundantly checked
- here. -}
requeue t info
go ts
requeue t info = do
queueTransferWhenSmall
transferqueue dstatus (associatedFile info) t r
dequeue t
dequeue t = void $ runThreadState st $ inRepo $
liftIO . tryIO . removeFile . failedTransferFile t
{- This is a expensive scan through the full git work tree, finding
- files to download from or upload to any of the remotes.
-
- The scan is blocked when the transfer queue gets too large. -}
expensiveScan :: ThreadState -> DaemonStatusHandle -> TransferQueue -> [Remote] -> IO ()
expensiveScan st dstatus transferqueue rs = unless onlyweb $ do
liftIO $ debug thisThread ["starting scan of", show visiblers]
void $ alertWhile dstatus (scanAlert visiblers) $ do
g <- runThreadState st $ fromRepo id
files <- LsFiles.inRepo [] g
go files
return True
liftIO $ debug thisThread ["finished scan of", show visiblers]
where
onlyweb = all (== webUUID) $ map Remote.uuid rs
visiblers = let rs' = filter (not . Remote.readonly) rs
in if null rs' then rs else rs'
go [] = noop
go (f:fs) = do
mapM_ (enqueue f) =<< catMaybes <$> runThreadState st
(ifAnnexed f findtransfers $ return [])
go fs
enqueue f (r, t) = do
debug thisThread ["queuing", show t]
queueTransferWhenSmall transferqueue dstatus (Just f) t r
findtransfers (key, _) = do
locs <- loggedLocations key
let use a = return $ map (a key locs) rs
ifM (inAnnex key)
( use $ check Upload False
, use $ check Download True
)
check direction want key locs r
| direction == Upload && Remote.readonly r = Nothing
| (Remote.uuid r `elem` locs) == want = Just $
(r, Transfer direction (Remote.uuid r) key)
| otherwise = Nothing
remoteHas :: Remote -> Key -> Annex Bool
remoteHas r key = elem
<$> pure (Remote.uuid r)
<*> loggedLocations key

View file

@ -0,0 +1,80 @@
{- git-annex assistant transfer watching thread
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Assistant.Threads.TransferWatcher where
import Assistant.Common
import Assistant.ThreadedMonad
import Assistant.DaemonStatus
import Logs.Transfer
import Utility.DirWatcher
import Utility.Types.DirWatcher
import qualified Remote
thisThread :: ThreadName
thisThread = "TransferWatcher"
{- This thread watches for changes to the gitAnnexTransferDir,
- and updates the DaemonStatus's map of ongoing transfers. -}
transferWatcherThread :: ThreadState -> DaemonStatusHandle -> IO ()
transferWatcherThread st dstatus = do
g <- runThreadState st $ fromRepo id
let dir = gitAnnexTransferDir g
createDirectoryIfMissing True dir
let hook a = Just $ runHandler st dstatus a
let hooks = mkWatchHooks
{ addHook = hook onAdd
, delHook = hook onDel
, errHook = hook onErr
}
void $ watchDir dir (const False) hooks id
debug thisThread ["watching for transfers"]
type Handler = ThreadState -> DaemonStatusHandle -> FilePath -> Maybe FileStatus -> IO ()
{- Runs an action handler.
-
- Exceptions are ignored, otherwise a whole thread could be crashed.
-}
runHandler :: ThreadState -> DaemonStatusHandle -> Handler -> FilePath -> Maybe FileStatus -> IO ()
runHandler st dstatus handler file filestatus = void $ do
either print (const noop) =<< tryIO go
where
go = handler st dstatus file filestatus
{- Called when there's an error with inotify. -}
onErr :: Handler
onErr _ _ msg _ = error msg
{- Called when a new transfer information file is written. -}
onAdd :: Handler
onAdd st dstatus file _ = case parseTransferFile file of
Nothing -> noop
Just t -> go t =<< runThreadState st (checkTransfer t)
where
go _ Nothing = noop -- transfer already finished
go t (Just info) = do
debug thisThread
[ "transfer starting:"
, show t
]
r <- headMaybe . filter (sameuuid t) . knownRemotes
<$> getDaemonStatus dstatus
updateTransferInfo dstatus t info
{ transferRemote = r }
sameuuid t r = Remote.uuid r == transferUUID t
{- Called when a transfer information file is removed. -}
onDel :: Handler
onDel _ dstatus file _ = case parseTransferFile file of
Nothing -> noop
Just t -> do
debug thisThread
[ "transfer finishing:"
, show t
]
void $ removeTransfer dstatus t

View file

@ -0,0 +1,113 @@
{- git-annex assistant data transferrer thread
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Assistant.Threads.Transferrer where
import Assistant.Common
import Assistant.ThreadedMonad
import Assistant.DaemonStatus
import Assistant.TransferQueue
import Assistant.TransferSlots
import Assistant.Alert
import Logs.Transfer
import Logs.Location
import Annex.Content
import qualified Remote
import Types.Key
import Locations.UserConfig
import System.Process (create_group)
thisThread :: ThreadName
thisThread = "Transferrer"
{- For now only one transfer is run at a time. -}
maxTransfers :: Int
maxTransfers = 1
{- Dispatches transfers from the queue. -}
transfererThread :: ThreadState -> DaemonStatusHandle -> TransferQueue -> TransferSlots -> IO ()
transfererThread st dstatus transferqueue slots = go =<< readProgramFile
where
go program = getNextTransfer transferqueue dstatus notrunning >>= handle program
handle program Nothing = go program
handle program (Just (t, info)) = do
ifM (runThreadState st $ shouldTransfer t info)
( do
debug thisThread [ "Transferring:" , show t ]
notifyTransfer dstatus
transferThread dstatus slots t info inTransferSlot program
, do
debug thisThread [ "Skipping unnecessary transfer:" , show t ]
-- getNextTransfer added t to the
-- daemonstatus's transfer map.
void $ removeTransfer dstatus t
)
go program
{- Skip transfers that are already running. -}
notrunning i = startedTime i == Nothing
{- Checks if the file to download is already present, or the remote
- being uploaded to isn't known to have the file. -}
shouldTransfer :: Transfer -> TransferInfo -> Annex Bool
shouldTransfer t info
| transferDirection t == Download =
not <$> inAnnex key
| transferDirection t == Upload =
{- Trust the location log to check if the
- remote already has the key. This avoids
- a roundtrip to the remote. -}
case transferRemote info of
Nothing -> return False
Just remote ->
notElem (Remote.uuid remote)
<$> loggedLocations key
| otherwise = return False
where
key = transferKey t
{- A sepeate git-annex process is forked off to run a transfer,
- running in its own process group. This allows killing it and all its
- children if the user decides to cancel the transfer.
-
- A thread is forked off to run the process, and the thread
- occupies one of the transfer slots. If all slots are in use, this will
- block until one becomes available. The thread's id is also recorded in
- the transfer info; the thread will also be killed when a transfer is
- stopped, to avoid it displaying any alert about the transfer having
- failed. -}
transferThread :: DaemonStatusHandle -> TransferSlots -> Transfer -> TransferInfo -> TransferSlotRunner -> FilePath -> IO ()
transferThread dstatus slots t info runner program = case (transferRemote info, associatedFile info) of
(Nothing, _) -> noop
(_, Nothing) -> noop
(Just remote, Just file) -> do
tid <- runner slots $
transferprocess remote file
updateTransferInfo dstatus t $ info { transferTid = Just tid }
where
direction = transferDirection t
isdownload = direction == Download
transferprocess remote file = void $ do
(_, _, _, pid)
<- createProcess (proc program $ toCommand params)
{ create_group = True }
ok <- (==) ExitSuccess <$> waitForProcess pid
addAlert dstatus $
makeAlertFiller ok $
transferFileAlert direction ok file
where
params =
[ Param "transferkey"
, Param $ key2file $ transferKey t
, Param $ if isdownload
then "--from"
else "--to"
, Param $ Remote.name remote
, Param "--file"
, File file
]

View file

@ -5,14 +5,22 @@
- Licensed under the GNU GPL version 3 or higher.
-}
{-# LANGUAGE CPP #-}
module Assistant.Threads.Watcher (
watchThread,
checkCanWatch,
needLsof,
stageSymlink,
onAddSymlink,
runHandler,
) where
module Assistant.Watcher where
import Common.Annex
import Assistant.Common
import Assistant.ThreadedMonad
import Assistant.DaemonStatus
import Assistant.Changes
import Assistant.TransferQueue
import Assistant.Alert
import Logs.Transfer
import Utility.DirWatcher
import Utility.Types.DirWatcher
import qualified Annex
@ -27,10 +35,12 @@ import Annex.Content
import Annex.CatFile
import Git.Types
import Control.Concurrent.STM
import Data.Bits.Utils
import qualified Data.ByteString.Lazy as L
thisThread :: ThreadName
thisThread = "Watcher"
checkCanWatch :: Annex ()
checkCanWatch
| canWatch =
@ -46,11 +56,13 @@ needLsof = error $ unlines
, "Be warned: This can corrupt data in the annex, and make fsck complain."
]
watchThread :: ThreadState -> DaemonStatusHandle -> ChangeChan -> IO ()
watchThread st dstatus changechan = watchDir "." ignored hooks startup
watchThread :: ThreadState -> DaemonStatusHandle -> TransferQueue -> ChangeChan -> IO ()
watchThread st dstatus transferqueue changechan = do
void $ watchDir "." ignored hooks startup
debug thisThread [ "watching", "."]
where
startup = statupScan st dstatus
hook a = Just $ runHandler st dstatus changechan a
startup = startupScan st dstatus
hook a = Just $ runHandler thisThread st dstatus transferqueue changechan a
hooks = WatchHooks
{ addHook = hook onAdd
, delHook = hook onDel
@ -60,18 +72,21 @@ watchThread st dstatus changechan = watchDir "." ignored hooks startup
}
{- Initial scartup scan. The action should return once the scan is complete. -}
statupScan :: ThreadState -> DaemonStatusHandle -> IO a -> IO a
statupScan st dstatus scanner = do
runThreadState st $
showAction "scanning"
r <- scanner
runThreadState st $
modifyDaemonStatus dstatus $ \s -> s { scanComplete = True }
startupScan :: ThreadState -> DaemonStatusHandle -> IO a -> IO a
startupScan st dstatus scanner = do
runThreadState st $ showAction "scanning"
r <- alertWhile' dstatus startupScanAlert $ do
r <- scanner
-- Notice any files that were deleted before watching was started.
runThreadState st $ do
inRepo $ Git.Command.run "add" [Param "--update"]
showAction "started"
-- Notice any files that were deleted before
-- watching was started.
runThreadState st $ do
inRepo $ Git.Command.run "add" [Param "--update"]
showAction "started"
modifyDaemonStatus_ dstatus $ \s -> s { scanComplete = True }
return (True, r)
return r
@ -83,23 +98,22 @@ ignored = ig . takeFileName
ig ".gitattributes" = True
ig _ = False
type Handler = FilePath -> Maybe FileStatus -> DaemonStatusHandle -> Annex (Maybe Change)
type Handler = ThreadName -> FilePath -> Maybe FileStatus -> DaemonStatusHandle -> TransferQueue -> Annex (Maybe Change)
{- Runs an action handler, inside the Annex monad, and if there was a
- change, adds it to the ChangeChan.
-
- Exceptions are ignored, otherwise a whole watcher thread could be crashed.
-}
runHandler :: ThreadState -> DaemonStatusHandle -> ChangeChan -> Handler -> FilePath -> Maybe FileStatus -> IO ()
runHandler st dstatus changechan handler file filestatus = void $ do
runHandler :: ThreadName -> ThreadState -> DaemonStatusHandle -> TransferQueue -> ChangeChan -> Handler -> FilePath -> Maybe FileStatus -> IO ()
runHandler threadname st dstatus transferqueue changechan handler file filestatus = void $ do
r <- tryIO go
case r of
Left e -> print e
Right Nothing -> noop
Right (Just change) -> void $
runChangeChan $ writeTChan changechan change
Right (Just change) -> recordChange changechan change
where
go = runThreadState st $ handler file filestatus dstatus
go = runThreadState st $ handler threadname file filestatus dstatus transferqueue
{- During initial directory scan, this will be run for any regular files
- that are already checked into git. We don't want to turn those into
@ -120,9 +134,9 @@ runHandler st dstatus changechan handler file filestatus = void $ do
- the add.
-}
onAdd :: Handler
onAdd file filestatus dstatus
onAdd threadname file filestatus dstatus _
| maybe False isRegularFile filestatus = do
ifM (scanComplete <$> getDaemonStatus dstatus)
ifM (scanComplete <$> liftIO (getDaemonStatus dstatus))
( go
, ifM (null <$> inRepo (Git.LsFiles.notInRepo False [file]))
( noChange
@ -131,27 +145,33 @@ onAdd file filestatus dstatus
)
| otherwise = noChange
where
go = pendingAddChange =<< Command.Add.lockDown file
go = do
liftIO $ debug threadname ["file added", file]
pendingAddChange =<< Command.Add.lockDown file
{- A symlink might be an arbitrary symlink, which is just added.
- Or, if it is a git-annex symlink, ensure it points to the content
- before adding it.
-}
onAddSymlink :: Handler
onAddSymlink file filestatus dstatus = go =<< Backend.lookupFile file
onAddSymlink threadname file filestatus dstatus transferqueue = go =<< Backend.lookupFile file
where
go (Just (key, _)) = do
link <- calcGitLink file key
ifM ((==) link <$> liftIO (readSymbolicLink file))
( ensurestaged link =<< getDaemonStatus dstatus
( do
s <- liftIO $ getDaemonStatus dstatus
checkcontent key s
ensurestaged link s
, do
liftIO $ debug threadname ["fix symlink", file]
liftIO $ removeFile file
liftIO $ createSymbolicLink link file
addlink link
)
go Nothing = do -- other symlink
link <- liftIO (readSymbolicLink file)
ensurestaged link =<< getDaemonStatus dstatus
ensurestaged link =<< liftIO (getDaemonStatus dstatus)
{- This is often called on symlinks that are already
- staged correctly. A symlink may have been deleted
@ -174,6 +194,7 @@ onAddSymlink file filestatus dstatus = go =<< Backend.lookupFile file
{- For speed, tries to reuse the existing blob for
- the symlink target. -}
addlink link = do
liftIO $ debug threadname ["add symlink", file]
v <- catObjectDetails $ Ref $ ':':file
case v of
Just (currlink, sha)
@ -185,8 +206,17 @@ onAddSymlink file filestatus dstatus = go =<< Backend.lookupFile file
stageSymlink file sha
madeChange file LinkChange
{- When a new link appears, after the startup scan,
- try to get the key's content. -}
checkcontent key daemonstatus
| scanComplete daemonstatus = unlessM (inAnnex key) $
queueTransfers Next transferqueue dstatus
key (Just file) Download
| otherwise = noop
onDel :: Handler
onDel file _ _dstatus = do
onDel threadname file _ _dstatus _ = do
liftIO $ debug threadname ["file deleted", file]
Annex.Queue.addUpdateIndex =<<
inRepo (Git.UpdateIndex.unstageFile file)
madeChange file RmChange
@ -199,14 +229,15 @@ onDel file _ _dstatus = do
- command to get the recursive list of files in the directory, so rm is
- just as good. -}
onDelDir :: Handler
onDelDir dir _ _dstatus = do
onDelDir threadname dir _ _dstatus _ = do
liftIO $ debug threadname ["directory deleted", dir]
Annex.Queue.addCommand "rm"
[Params "--quiet -r --cached --ignore-unmatch --"] [dir]
madeChange dir RmDirChange
{- Called when there's an error with inotify. -}
onErr :: Handler
onErr msg _ _dstatus = do
onErr _ msg _ _dstatus _ = do
warning msg
return Nothing

112
Assistant/Threads/WebApp.hs Normal file
View file

@ -0,0 +1,112 @@
{- git-annex assistant webapp thread
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
{-# LANGUAGE TypeFamilies, QuasiQuotes, MultiParamTypeClasses, TemplateHaskell, OverloadedStrings, RankNTypes #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Assistant.Threads.WebApp where
import Assistant.Common
import Assistant.WebApp
import Assistant.WebApp.DashBoard
import Assistant.WebApp.SideBar
import Assistant.WebApp.Notifications
import Assistant.WebApp.Configurators
import Assistant.WebApp.Documentation
import Assistant.ThreadedMonad
import Assistant.DaemonStatus
import Assistant.ScanRemotes
import Assistant.TransferQueue
import Assistant.TransferSlots
import Utility.WebApp
import Utility.FileMode
import Utility.TempFile
import Git
import Yesod
import Yesod.Static
import Network.Socket (PortNumber)
import Data.Text (pack, unpack)
thisThread :: String
thisThread = "WebApp"
mkYesodDispatch "WebApp" $(parseRoutesFile "Assistant/WebApp/routes")
type Url = String
webAppThread
:: (Maybe ThreadState)
-> DaemonStatusHandle
-> ScanRemoteMap
-> TransferQueue
-> TransferSlots
-> Maybe (IO String)
-> Maybe (Url -> FilePath -> IO ())
-> IO ()
webAppThread mst dstatus scanremotes transferqueue transferslots postfirstrun onstartup = do
webapp <- WebApp
<$> pure mst
<*> pure dstatus
<*> pure scanremotes
<*> pure transferqueue
<*> pure transferslots
<*> (pack <$> genRandomToken)
<*> getreldir mst
<*> pure $(embed "static")
<*> newWebAppState
<*> pure postfirstrun
app <- toWaiAppPlain webapp
app' <- ifM debugEnabled
( return $ httpDebugLogger app
, return app
)
runWebApp app' $ \port -> case mst of
Nothing -> withTempFile "webapp.html" $ \tmpfile _ -> go port webapp tmpfile
Just st -> go port webapp =<< runThreadState st (fromRepo gitAnnexHtmlShim)
where
getreldir Nothing = return Nothing
getreldir (Just st) = Just <$>
(relHome =<< absPath
=<< runThreadState st (fromRepo repoPath))
go port webapp htmlshim = do
writeHtmlShim webapp port htmlshim
maybe noop (\a -> a (myUrl webapp port "/") htmlshim) onstartup
{- Creates a html shim file that's used to redirect into the webapp,
- to avoid exposing the secretToken when launching the web browser. -}
writeHtmlShim :: WebApp -> PortNumber -> FilePath -> IO ()
writeHtmlShim webapp port file = do
debug thisThread ["running on port", show port]
viaTmp go file $ genHtmlShim webapp port
where
go tmpfile content = do
h <- openFile tmpfile WriteMode
modifyFileMode tmpfile $ removeModes [groupReadMode, otherReadMode]
hPutStr h content
hClose h
{- TODO: generate this static file using Yesod. -}
genHtmlShim :: WebApp -> PortNumber -> String
genHtmlShim webapp port = unlines
[ "<html>"
, "<head>"
, "<title>Starting webapp...</title>"
, "<meta http-equiv=\"refresh\" content=\"0; URL="++url++"\">"
, "<body>"
, "<p>"
, "<a href=\"" ++ url ++ "\">Starting webapp...</a>"
, "</p>"
, "</body>"
, "</html>"
]
where
url = myUrl webapp port "/"
myUrl :: WebApp -> PortNumber -> FilePath -> Url
myUrl webapp port page = "http://localhost:" ++ show port ++ page ++
"?auth=" ++ unpack (secretToken webapp)

154
Assistant/TransferQueue.hs Normal file
View file

@ -0,0 +1,154 @@
{- git-annex assistant pending transfer queue
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Assistant.TransferQueue (
TransferQueue,
Schedule(..),
newTransferQueue,
getTransferQueue,
queueTransfers,
queueTransfer,
queueTransferAt,
queueTransferWhenSmall,
getNextTransfer,
dequeueTransfer,
) where
import Common.Annex
import Assistant.DaemonStatus
import Logs.Transfer
import Types.Remote
import qualified Remote
import qualified Types.Remote as Remote
import Control.Concurrent.STM
import qualified Data.Map as M
{- The transfer queue consists of a channel listing the transfers to make;
- the size of the queue is also tracked, and a list is maintained
- in parallel to allow for reading. -}
data TransferQueue = TransferQueue
{ queue :: TChan (Transfer, TransferInfo)
, queuesize :: TVar Int
, queuelist :: TVar [(Transfer, TransferInfo)]
}
data Schedule = Next | Later
deriving (Eq)
newTransferQueue :: IO TransferQueue
newTransferQueue = atomically $ TransferQueue
<$> newTChan
<*> newTVar 0
<*> newTVar []
{- Reads the queue's content without blocking or changing it. -}
getTransferQueue :: TransferQueue -> IO [(Transfer, TransferInfo)]
getTransferQueue q = atomically $ readTVar $ queuelist q
stubInfo :: AssociatedFile -> Remote -> TransferInfo
stubInfo f r = TransferInfo
{ startedTime = Nothing
, transferPid = Nothing
, transferTid = Nothing
, transferRemote = Just r
, bytesComplete = Nothing
, associatedFile = f
, transferPaused = False
}
{- Adds transfers to queue for some of the known remotes. -}
queueTransfers :: Schedule -> TransferQueue -> DaemonStatusHandle -> Key -> AssociatedFile -> Direction -> Annex ()
queueTransfers schedule q dstatus k f direction = do
rs <- knownRemotes <$> liftIO (getDaemonStatus dstatus)
mapM_ go =<< sufficientremotes rs
where
sufficientremotes rs
-- Queue downloads from all remotes that
-- have the key, with the cheapest ones first.
-- More expensive ones will only be tried if
-- downloading from a cheap one fails.
| direction == Download = do
uuids <- Remote.keyLocations k
return $ filter (\r -> uuid r `elem` uuids) rs
-- TODO: Determine a smaller set of remotes that
-- can be uploaded to, in order to ensure all
-- remotes can access the content. Currently,
-- send to every remote we can.
| otherwise = return $ filter (not . Remote.readonly) rs
gentransfer r = Transfer
{ transferDirection = direction
, transferKey = k
, transferUUID = Remote.uuid r
}
go r = liftIO $
enqueue schedule q dstatus (gentransfer r) (stubInfo f r)
enqueue :: Schedule -> TransferQueue -> DaemonStatusHandle -> Transfer -> TransferInfo -> IO ()
enqueue schedule q dstatus t info
| schedule == Next = go unGetTChan (new:)
| otherwise = go writeTChan (\l -> l++[new])
where
new = (t, info)
go modqueue modlist = do
atomically $ do
void $ modqueue (queue q) new
void $ modifyTVar' (queuesize q) succ
void $ modifyTVar' (queuelist q) modlist
void $ notifyTransfer dstatus
{- Adds a transfer to the queue. -}
queueTransfer :: Schedule -> TransferQueue -> DaemonStatusHandle -> AssociatedFile -> Transfer -> Remote -> IO ()
queueTransfer schedule q dstatus f t remote =
enqueue schedule q dstatus t (stubInfo f remote)
{- Blocks until the queue is no larger than a given size, and then adds a
- transfer to the queue. -}
queueTransferAt :: Int -> Schedule -> TransferQueue -> DaemonStatusHandle -> AssociatedFile -> Transfer -> Remote -> IO ()
queueTransferAt wantsz schedule q dstatus f t remote = do
atomically $ do
sz <- readTVar (queuesize q)
if sz <= wantsz
then return ()
else retry -- blocks until queuesize changes
enqueue schedule q dstatus t (stubInfo f remote)
queueTransferWhenSmall :: TransferQueue -> DaemonStatusHandle -> AssociatedFile -> Transfer -> Remote -> IO ()
queueTransferWhenSmall = queueTransferAt 10 Later
{- Blocks until a pending transfer is available from the queue,
- and removes it.
-
- Checks that it's acceptable, before adding it to the
- the currentTransfers map. If it's not acceptable, it's discarded.
-
- This is done in a single STM transaction, so there is no window
- where an observer sees an inconsistent status. -}
getNextTransfer :: TransferQueue -> DaemonStatusHandle -> (TransferInfo -> Bool) -> IO (Maybe (Transfer, TransferInfo))
getNextTransfer q dstatus acceptable = atomically $ do
void $ modifyTVar' (queuesize q) pred
void $ modifyTVar' (queuelist q) (drop 1)
r@(t, info) <- readTChan (queue q)
if acceptable info
then do
adjustTransfersSTM dstatus $
M.insertWith' const t info
return $ Just r
else return Nothing
{- Removes a transfer from the queue, if present, and returns True if it
- was present. -}
dequeueTransfer :: TransferQueue -> DaemonStatusHandle -> Transfer -> IO Bool
dequeueTransfer q dstatus t = do
ok <- atomically $ do
(l, removed) <- partition (\i -> fst i /= t) <$> readTVar (queuelist q)
void $ writeTVar (queuesize q) (length l)
void $ writeTVar (queuelist q) l
return $ not $ null removed
when ok $
notifyTransfer dstatus
return ok

View file

@ -0,0 +1,71 @@
{- git-annex assistant transfer slots
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
{-# LANGUAGE DeriveDataTypeable #-}
module Assistant.TransferSlots where
import qualified Control.Exception as E
import Control.Concurrent
import Data.Typeable
import Common.Annex
import Utility.ThreadScheduler
type TransferSlots = QSemN
{- A special exception that can be thrown to pause or resume a transfer, while
- keeping its slot in use. -}
data TransferException = PauseTransfer | ResumeTransfer
deriving (Show, Eq, Typeable)
instance E.Exception TransferException
type TransferSlotRunner = TransferSlots -> IO () -> IO ThreadId
{- Number of concurrent transfers allowed to be run from the assistant.
-
- Transfers launched by other means, including by remote assistants,
- do not currently take up slots.
-}
numSlots :: Int
numSlots = 1
newTransferSlots :: IO TransferSlots
newTransferSlots = newQSemN numSlots
{- Waits until a transfer slot becomes available, and runs a transfer
- action in the slot, in its own thread.
-}
inTransferSlot :: TransferSlotRunner
inTransferSlot = runTransferSlot (\s -> waitQSemN s 1)
{- Runs a transfer action, without waiting for a slot to become available. -}
inImmediateTransferSlot :: TransferSlotRunner
inImmediateTransferSlot = runTransferSlot (\s -> signalQSemN s (-1))
{- Note that the action is subject to being killed when the transfer
- is canceled or paused.
-
- A PauseTransfer exception is handled by letting the action be killed,
- then pausing the thread until a ResumeTransfer exception is raised,
- then rerunning the action.
-}
runTransferSlot :: (QSemN -> IO ()) -> TransferSlotRunner
runTransferSlot allocator s transfer = do
allocator s
forkIO $ E.bracket_ noop (signalQSemN s 1) go
where
go = catchPauseResume transfer
pause = catchPauseResume $ runEvery (Seconds 86400) noop
catchPauseResume a = E.catch a handlePauseResume
handlePauseResume PauseTransfer = do
putStrLn "pause"
pause
handlePauseResume ResumeTransfer = do
putStrLn "resume"
go

177
Assistant/WebApp.hs Normal file
View file

@ -0,0 +1,177 @@
{- git-annex assistant webapp data types
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
{-# LANGUAGE TypeFamilies, QuasiQuotes, MultiParamTypeClasses, TemplateHaskell, OverloadedStrings, RankNTypes #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Assistant.WebApp where
import Assistant.Common
import Assistant.ThreadedMonad
import Assistant.DaemonStatus
import Assistant.ScanRemotes
import Assistant.TransferQueue
import Assistant.TransferSlots
import Assistant.Alert
import Utility.NotificationBroadcaster
import Utility.WebApp
import Utility.Yesod
import Logs.Transfer
import Yesod
import Yesod.Static
import Text.Hamlet
import Data.Text (Text, pack, unpack)
import Control.Concurrent.STM
staticFiles "static"
mkYesodData "WebApp" $(parseRoutesFile "Assistant/WebApp/routes")
data WebApp = WebApp
{ threadState :: Maybe ThreadState
, daemonStatus :: DaemonStatusHandle
, scanRemotes :: ScanRemoteMap
, transferQueue :: TransferQueue
, transferSlots :: TransferSlots
, secretToken :: Text
, relDir :: Maybe FilePath
, getStatic :: Static
, webAppState :: TMVar WebAppState
, postFirstRun :: Maybe (IO String)
}
data NavBarItem = DashBoard | Config | About
deriving (Eq)
navBarName :: NavBarItem -> Text
navBarName DashBoard = "Dashboard"
navBarName Config = "Configuration"
navBarName About = "About"
navBarRoute :: NavBarItem -> Route WebApp
navBarRoute DashBoard = HomeR
navBarRoute Config = ConfigR
navBarRoute About = AboutR
defaultNavBar :: [NavBarItem]
defaultNavBar = [DashBoard, Config, About]
firstRunNavBar :: [NavBarItem]
firstRunNavBar = [Config, About]
selectNavBar :: Handler [NavBarItem]
selectNavBar = ifM (inFirstRun) (return firstRunNavBar, return defaultNavBar)
inFirstRun :: Handler Bool
inFirstRun = isNothing . relDir <$> getYesod
{- Used instead of defaultContent; highlights the current page if it's
- on the navbar. -}
bootstrap :: Maybe NavBarItem -> Widget -> Handler RepHtml
bootstrap navbaritem content = do
webapp <- getYesod
navbar <- map navdetails <$> selectNavBar
page <- widgetToPageContent $ do
addStylesheet $ StaticR css_bootstrap_css
addStylesheet $ StaticR css_bootstrap_responsive_css
addScript $ StaticR jquery_full_js
addScript $ StaticR js_bootstrap_dropdown_js
addScript $ StaticR js_bootstrap_modal_js
$(widgetFile "page")
hamletToRepHtml $(hamletFile $ hamletTemplate "bootstrap")
where
navdetails i = (navBarName i, navBarRoute i, Just i == navbaritem)
instance Yesod WebApp where
{- Require an auth token be set when accessing any (non-static route) -}
isAuthorized _ _ = checkAuthToken secretToken
{- Add the auth token to every url generated, except static subsite
- urls (which can show up in Permission Denied pages). -}
joinPath = insertAuthToken secretToken excludeStatic
where
excludeStatic [] = True
excludeStatic (p:_) = p /= "static"
makeSessionBackend = webAppSessionBackend
jsLoader _ = BottomOfHeadBlocking
instance RenderMessage WebApp FormMessage where
renderMessage _ _ = defaultFormMessage
type Form x = Html -> MForm WebApp WebApp (FormResult x, Widget)
data WebAppState = WebAppState
{ showIntro :: Bool
}
newWebAppState :: IO (TMVar WebAppState)
newWebAppState = liftIO $ atomically $
newTMVar $ WebAppState { showIntro = True }
getWebAppState :: forall sub. GHandler sub WebApp WebAppState
getWebAppState = liftIO . atomically . readTMVar =<< webAppState <$> getYesod
modifyWebAppState :: forall sub. (WebAppState -> WebAppState) -> GHandler sub WebApp ()
modifyWebAppState a = go =<< webAppState <$> getYesod
where
go s = liftIO $ atomically $ do
v <- takeTMVar s
putTMVar s $ a v
{- Runs an Annex action from the webapp.
-
- When the webapp is run outside a git-annex repository, the fallback
- value is returned.
-}
runAnnex :: forall sub a. a -> Annex a -> GHandler sub WebApp a
runAnnex fallback a = maybe (return fallback) go =<< threadState <$> getYesod
where
go st = liftIO $ runThreadState st a
waitNotifier :: forall sub. (DaemonStatus -> NotificationBroadcaster) -> NotificationId -> GHandler sub WebApp ()
waitNotifier selector nid = do
notifier <- getNotifier selector
liftIO $ waitNotification $ notificationHandleFromId notifier nid
newNotifier :: forall sub. (DaemonStatus -> NotificationBroadcaster) -> GHandler sub WebApp NotificationId
newNotifier selector = do
notifier <- getNotifier selector
liftIO $ notificationHandleToId <$> newNotificationHandle notifier
getNotifier :: forall sub. (DaemonStatus -> NotificationBroadcaster) -> GHandler sub WebApp NotificationBroadcaster
getNotifier selector = do
webapp <- getYesod
liftIO $ selector <$> getDaemonStatus (daemonStatus webapp)
instance PathPiece NotificationId where
toPathPiece = pack . show
fromPathPiece = readish . unpack
instance PathPiece AlertId where
toPathPiece = pack . show
fromPathPiece = readish . unpack
instance PathPiece Transfer where
toPathPiece = pack . show
fromPathPiece = readish . unpack
{- Adds the auth parameter as a hidden field on a form. Must be put into
- every form. -}
webAppFormAuthToken :: Widget
webAppFormAuthToken = do
webapp <- lift getYesod
[whamlet|<input type="hidden" name="auth" value="#{secretToken webapp}">|]
{- A button with an icon, and maybe label, that can be clicked to perform
- some action.
- With javascript, clicking it POSTs the Route, and remains on the same
- page.
- With noscript, clicking it GETs the Route. -}
actionButton :: Route WebApp -> (Maybe String) -> String -> String -> Widget
actionButton route label buttonclass iconclass = $(widgetFile "actionbutton")

View file

@ -0,0 +1,363 @@
{- git-annex assistant webapp configurators
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
{-# LANGUAGE TypeFamilies, QuasiQuotes, MultiParamTypeClasses, TemplateHaskell, OverloadedStrings, RankNTypes #-}
module Assistant.WebApp.Configurators where
import Assistant.Common
import Assistant.WebApp
import Assistant.WebApp.SideBar
import Assistant.DaemonStatus
import Assistant.Threads.MountWatcher (handleMount)
import Utility.Yesod
import qualified Remote
import qualified Types.Remote as Remote
import Remote.List
import Annex.UUID (getUUID)
import Init
import qualified Git
import qualified Git.Construct
import qualified Git.Config
import qualified Git.Command
import qualified Annex
import Locations.UserConfig
import Utility.FreeDesktop
import Utility.Mounts
import Utility.DiskFree
import Utility.DataUnits
import Utility.Network
import Yesod
import Data.Text (Text)
import qualified Data.Text as T
import Data.Char
import System.Posix.Directory
import qualified Control.Exception as E
{- The main configuration screen. -}
getConfigR :: Handler RepHtml
getConfigR = ifM (inFirstRun)
( getFirstRepositoryR
, bootstrap (Just Config) $ do
sideBarDisplay
setTitle "Configuration"
$(widgetFile "configurators/main")
)
{- Lists known repositories, followed by options to add more. -}
getRepositoriesR :: Handler RepHtml
getRepositoriesR = bootstrap (Just Config) $ do
sideBarDisplay
setTitle "Repositories"
repolist <- lift repoList
$(widgetFile "configurators/repositories")
{- A numbered list of known repositories, including the current one. -}
repoList :: Handler [(String, String)]
repoList = do
rs <- filter (not . Remote.readonly) . knownRemotes <$>
(liftIO . getDaemonStatus =<< daemonStatus <$> getYesod)
l <- runAnnex [] $ do
u <- getUUID
Remote.prettyListUUIDs $ nub $ u:(map Remote.uuid rs)
return $ zip counter l
where
counter = map show ([1..] :: [Int])
{- An intro message, list of repositories, and nudge to make more. -}
introDisplay :: Text -> Widget
introDisplay ident = do
webapp <- lift getYesod
repolist <- lift repoList
let n = length repolist
let numrepos = show n
let notenough = n < enough
let barelyenough = n == enough
let morethanenough = n > enough
$(widgetFile "configurators/intro")
lift $ modifyWebAppState $ \s -> s { showIntro = False }
where
enough = 2
data RepositoryPath = RepositoryPath Text
deriving Show
{- Custom field display for a RepositoryPath, with an icon etc.
-
- Validates that the path entered is not empty, and is a safe value
- to use as a repository. -}
repositoryPathField :: forall sub. Bool -> Field sub WebApp Text
repositoryPathField autofocus = Field { fieldParse = parse, fieldView = view }
where
view idAttr nameAttr attrs val isReq =
[whamlet|<input type="text" *{attrs} id="#{idAttr}" name="#{nameAttr}" :isReq:required :autofocus:autofocus value="#{either id id val}">|]
parse [path]
| T.null path = nopath
| otherwise = liftIO $ checkRepositoryPath path
parse [] = return $ Right Nothing
parse _ = nopath
nopath = return $ Left "Enter a location for the repository"
{- As well as checking the path for a lot of silly things, tilde is
- expanded in the returned path. -}
checkRepositoryPath :: Text -> IO (Either (SomeMessage WebApp) (Maybe Text))
checkRepositoryPath p = do
home <- myHomeDir
let basepath = expandTilde home $ T.unpack p
path <- absPath basepath
let parent = parentDir path
problems <- catMaybes <$> mapM runcheck
[ (return $ path == "/", "Enter the full path to use for the repository.")
, (return $ all isSpace basepath, "A blank path? Seems unlikely.")
, (doesFileExist path, "A file already exists with that name.")
, (return $ path == home, "Sorry, using git-annex for your whole home directory is not currently supported.")
, (not <$> doesDirectoryExist parent, "Parent directory does not exist.")
, (not <$> canWrite path, "Cannot write a repository there.")
, (not <$> canMakeSymlink path, "That directory is on a filesystem that does not support symlinks. Try a different location.")
]
return $
case headMaybe problems of
Nothing -> Right $ Just $ T.pack basepath
Just prob -> Left prob
where
runcheck (chk, msg) = ifM (chk)
( return $ Just msg
, return Nothing
)
expandTilde home ('~':'/':path) = home </> path
expandTilde _ path = path
{- On first run, if run in the home directory, default to putting it in
- ~/Desktop/annex, when a Desktop directory exists, and ~/annex otherwise.
-
- If run in another directory, the user probably wants to put it there. -}
defaultRepositoryPath :: Bool -> IO FilePath
defaultRepositoryPath firstrun = do
cwd <- liftIO $ getCurrentDirectory
home <- myHomeDir
if home == cwd && firstrun
then do
desktop <- userDesktopDir
ifM (doesDirectoryExist desktop)
(relHome (desktop </> "annex"), return "~/annex")
else return cwd
localRepositoryForm :: Form RepositoryPath
localRepositoryForm msg = do
path <- T.pack . addTrailingPathSeparator
<$> (liftIO . defaultRepositoryPath =<< lift inFirstRun)
(pathRes, pathView) <- mreq (repositoryPathField True) "" (Just path)
let (err, errmsg) = case pathRes of
FormMissing -> (False, "")
FormFailure l -> (True, concat $ map T.unpack l)
FormSuccess _ -> (False, "")
let form = do
webAppFormAuthToken
$(widgetFile "configurators/localrepositoryform")
return (RepositoryPath <$> pathRes, form)
{- Making the first repository, when starting the webapp for the first time. -}
getFirstRepositoryR :: Handler RepHtml
getFirstRepositoryR = bootstrap (Just Config) $ do
sideBarDisplay
setTitle "Getting started"
((res, form), enctype) <- lift $ runFormGet localRepositoryForm
case res of
FormSuccess (RepositoryPath p) -> lift $
startFullAssistant $ T.unpack p
_ -> $(widgetFile "configurators/firstrepository")
data RemovableDrive = RemovableDrive
{ diskFree :: Maybe Integer
, mountPoint :: Text
}
deriving (Show, Eq, Ord)
selectDriveForm :: [RemovableDrive] -> Maybe RemovableDrive -> Form RemovableDrive
selectDriveForm drives def = renderBootstrap $ RemovableDrive
<$> pure Nothing
<*> areq (selectFieldList pairs) "Select drive:" (mountPoint <$> def)
where
pairs = zip (map describe drives) (map mountPoint drives)
describe drive = case diskFree drive of
Nothing -> mountPoint drive
Just free ->
let sz = roughSize storageUnits True free
in T.unwords
[ mountPoint drive
, T.concat ["(", T.pack sz]
, "free)"
]
{- Adding a removable drive. -}
getAddDriveR :: Handler RepHtml
getAddDriveR = bootstrap (Just Config) $ do
sideBarDisplay
setTitle "Add a removable drive"
removabledrives <- liftIO $ driveList
writabledrives <- liftIO $
filterM (canWrite . T.unpack . mountPoint) removabledrives
((res, form), enctype) <- lift $ runFormGet $
selectDriveForm (sort writabledrives) Nothing
case res of
FormSuccess (RemovableDrive { mountPoint = d }) -> lift $ do
go $ T.unpack d
setMessage $ toHtml $ T.unwords ["Added", d]
redirect RepositoriesR
_ -> do
let authtoken = webAppFormAuthToken
$(widgetFile "configurators/adddrive")
where
go mountpoint = do
liftIO $ makerepo dir
liftIO $ initRepo dir $ Just remotename
addremotes dir remotename
webapp <- getYesod
liftIO $ syncrepo dir webapp
where
dir = mountpoint </> "annex"
remotename = takeFileName mountpoint
{- The repo may already exist, when adding removable media
- that has already been used elsewhere. -}
makerepo dir = liftIO $ do
r <- E.try (inDir dir $ return True) :: IO (Either E.SomeException Bool)
case r of
Right _ -> noop
Left _e -> do
createDirectoryIfMissing True dir
bare <- not <$> canMakeSymlink dir
makeRepo dir bare
{- Synthesize a mount event of the new git repository.
- This will sync it, and queue file transfers. -}
syncrepo dir webapp =
handleMount
(fromJust $ threadState webapp)
(daemonStatus webapp)
(scanRemotes webapp)
dir
{- Each repository is made a remote of the other. -}
addremotes dir name = runAnnex () $ do
hostname <- maybe "host" id <$> liftIO getHostname
hostlocation <- fromRepo Git.repoLocation
void $ liftIO $ inDir dir $
addremote hostname hostlocation
whenM (addremote name dir) $
void $ remoteListRefresh
{- Adds a remote only if there is not already one with
- the location. -}
addremote name location = inRepo $ \r ->
if (null $ filter samelocation $ Git.remotes r)
then do
let name' = uniqueremotename r name (0 :: Int)
Git.Command.runBool "remote"
[Param "add", Param name', Param location] r
else return False
where
samelocation x = Git.repoLocation x == location
{- Generate an unused name for a remote, adding a number if
- necessary. -}
uniqueremotename r basename n
| null namecollision = name
| otherwise = uniqueremotename r basename (succ n)
where
namecollision = filter samename (Git.remotes r)
samename x = Git.remoteName x == Just name
name
| n == 0 = basename
| otherwise = basename ++ show n
{- List of removable drives. -}
driveList :: IO [RemovableDrive]
driveList = mapM (gen . mnt_dir) =<< filter sane <$> getMounts
where
gen dir = RemovableDrive
<$> getDiskFree dir
<*> pure (T.pack dir)
-- filter out some things that are surely not removable drives
sane Mntent { mnt_dir = dir, mnt_fsname = dev }
{- We want real disks like /dev/foo, not
- dummy mount points like proc or tmpfs or
- gvfs-fuse-daemon. -}
| not ('/' `elem` dev) = False
{- Just in case: These mount points are surely not
- removable disks. -}
| dir == "/" = False
| dir == "/tmp" = False
| dir == "/run/shm" = False
| dir == "/run/lock" = False
| otherwise = True
{- Bootstraps from first run mode to a fully running assistant in a
- repository, by running the postFirstRun callback, which returns the
- url to the new webapp. -}
startFullAssistant :: FilePath -> Handler ()
startFullAssistant path = do
webapp <- getYesod
url <- liftIO $ do
makeRepo path False
initRepo path Nothing
addAutoStart path
changeWorkingDirectory path
fromJust $ postFirstRun webapp
redirect $ T.pack url
{- Makes a new git-annex repository. -}
makeRepo :: FilePath -> Bool -> IO ()
makeRepo path bare = do
unlessM (boolSystem "git" params) $
error "git init failed!"
where
baseparams = [Param "init", Param "--quiet"]
params
| bare = baseparams ++ [Param "--bare", File path]
| otherwise = baseparams ++ [File path]
{- Runs an action in the git-annex repository in the specified directory. -}
inDir :: FilePath -> Annex a -> IO a
inDir dir a = do
state <- Annex.new =<< Git.Config.read =<< Git.Construct.fromPath dir
Annex.eval state a
{- Initializes a git-annex repository in a directory with a description. -}
initRepo :: FilePath -> Maybe String -> IO ()
initRepo dir desc = inDir dir $
unlessM isInitialized $
initialize desc
{- Adds a directory to the autostart file. -}
addAutoStart :: FilePath -> IO ()
addAutoStart path = do
autostart <- autoStartFile
createDirectoryIfMissing True (parentDir autostart)
appendFile autostart $ path ++ "\n"
{- Checks if the user can write to a directory.
-
- The directory may be in the process of being created; if so
- the parent directory is checked instead. -}
canWrite :: FilePath -> IO Bool
canWrite dir = do
tocheck <- ifM (doesDirectoryExist dir)
(return dir, return $ parentDir dir)
catchBoolIO $ fileAccess tocheck False True False
{- Checks if a directory is on a filesystem that supports symlinks. -}
canMakeSymlink :: FilePath -> IO Bool
canMakeSymlink dir = ifM (doesDirectoryExist dir)
( catchBoolIO $ test dir
, canMakeSymlink (parentDir dir)
)
where
test d = do
let link = d </> "delete.me"
createSymbolicLink link link
removeLink link
return True

View file

@ -0,0 +1,219 @@
{- git-annex assistant webapp dashboard
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
{-# LANGUAGE CPP, TypeFamilies, QuasiQuotes, MultiParamTypeClasses, TemplateHaskell, OverloadedStrings, RankNTypes #-}
module Assistant.WebApp.DashBoard where
import Assistant.Common
import Assistant.WebApp
import Assistant.WebApp.SideBar
import Assistant.WebApp.Notifications
import Assistant.WebApp.Configurators
import Assistant.DaemonStatus
import Assistant.TransferQueue
import Assistant.TransferSlots
import qualified Assistant.Threads.Transferrer as Transferrer
import Utility.NotificationBroadcaster
import Utility.Yesod
import Logs.Transfer
import Utility.Percentage
import Utility.DataUnits
import Types.Key
import qualified Remote
import qualified Git
import Locations.UserConfig
import Yesod
import Text.Hamlet
import qualified Data.Map as M
import Control.Concurrent
import System.Posix.Signals (signalProcessGroup, sigTERM, sigKILL)
import System.Posix.Process (getProcessGroupIDOf)
{- A display of currently running and queued transfers.
-
- Or, if there have never been any this run, an intro display. -}
transfersDisplay :: Bool -> Widget
transfersDisplay warnNoScript = do
webapp <- lift getYesod
current <- lift $ M.toList <$> getCurrentTransfers
queued <- liftIO $ getTransferQueue $ transferQueue webapp
let ident = "transfers"
autoUpdate ident NotifierTransfersR (10 :: Int) (10 :: Int)
let transfers = current ++ queued
if null transfers
then ifM (lift $ showIntro <$> getWebAppState)
( introDisplay ident
, $(widgetFile "dashboard/transfers")
)
else $(widgetFile "dashboard/transfers")
where
isrunning info = not $
transferPaused info || isNothing (startedTime info)
{- Called by client to get a display of currently in process transfers.
-
- Returns a div, which will be inserted into the calling page.
-
- Note that the head of the widget is not included, only its
- body is. To get the widget head content, the widget is also
- inserted onto the getHomeR page.
-}
getTransfersR :: NotificationId -> Handler RepHtml
getTransfersR nid = do
waitNotifier transferNotifier nid
page <- widgetToPageContent $ transfersDisplay False
hamletToRepHtml $ [hamlet|^{pageBody page}|]
{- The main dashboard. -}
dashboard :: Bool -> Widget
dashboard warnNoScript = do
sideBarDisplay
let content = transfersDisplay warnNoScript
$(widgetFile "dashboard/main")
getHomeR :: Handler RepHtml
getHomeR = ifM (inFirstRun)
( redirect ConfigR
, bootstrap (Just DashBoard) $ dashboard True
)
{- Same as HomeR, except no autorefresh at all (and no noscript warning). -}
getNoScriptR :: Handler RepHtml
getNoScriptR = bootstrap (Just DashBoard) $ dashboard False
{- Same as HomeR, except with autorefreshing via meta refresh. -}
getNoScriptAutoR :: Handler RepHtml
getNoScriptAutoR = bootstrap (Just DashBoard) $ do
let ident = NoScriptR
let delayseconds = 3 :: Int
let this = NoScriptAutoR
toWidgetHead $(hamletFile $ hamletTemplate "dashboard/metarefresh")
dashboard False
{- The javascript code does a post. -}
postFileBrowserR :: Handler ()
postFileBrowserR = void openFileBrowser
{- Used by non-javascript browsers, where clicking on the link actually
- opens this page, so we redirect back to the referrer. -}
getFileBrowserR :: Handler ()
getFileBrowserR = whenM openFileBrowser $ redirectBack
redirectBack :: Handler ()
redirectBack = do
clearUltDest
setUltDestReferer
redirectUltDest HomeR
{- Opens the system file browser on the repo, or, as a fallback,
- goes to a file:// url. Returns True if it's ok to redirect away
- from the page (ie, the system file browser was opened).
-
- Note that the command is opened using a different thread, to avoid
- blocking the response to the browser on it. -}
openFileBrowser :: Handler Bool
openFileBrowser = do
path <- runAnnex (error "no configured repository") $
fromRepo Git.repoPath
ifM (liftIO $ inPath cmd <&&> inPath cmd)
( do
void $ liftIO $ forkIO $ void $
boolSystem cmd [Param path]
return True
, do
clearUltDest
setUltDest $ "file://" ++ path
void $ redirectUltDest HomeR
return False
)
where
#if OSX
cmd = "open"
#else
cmd = "xdg-open"
#endif
{- Transfer controls. The GET is done in noscript mode and redirects back
- to the referring page. The POST is called by javascript. -}
getPauseTransferR :: Transfer -> Handler ()
getPauseTransferR t = pauseTransfer t >> redirectBack
postPauseTransferR :: Transfer -> Handler ()
postPauseTransferR t = pauseTransfer t
getStartTransferR :: Transfer -> Handler ()
getStartTransferR t = startTransfer t >> redirectBack
postStartTransferR :: Transfer -> Handler ()
postStartTransferR t = startTransfer t
getCancelTransferR :: Transfer -> Handler ()
getCancelTransferR t = cancelTransfer False t >> redirectBack
postCancelTransferR :: Transfer -> Handler ()
postCancelTransferR t = cancelTransfer False t
pauseTransfer :: Transfer -> Handler ()
pauseTransfer = cancelTransfer True
cancelTransfer :: Bool -> Transfer-> Handler ()
cancelTransfer pause t = do
webapp <- getYesod
let dstatus = daemonStatus webapp
m <- getCurrentTransfers
liftIO $ do
{- remove queued transfer -}
void $ dequeueTransfer (transferQueue webapp) dstatus t
{- stop running transfer -}
maybe noop (stop dstatus) (M.lookup t m)
where
stop dstatus info = do
{- When there's a thread associated with the
- transfer, it's killed first, to avoid it
- displaying any alert about the transfer having
- failed when the transfer process is killed. -}
maybe noop signalthread $ transferTid info
maybe noop killproc $ transferPid info
if pause
then void $
updateTransferInfo dstatus t $ info
{ transferPaused = True }
else void $
removeTransfer dstatus t
signalthread tid
| pause = throwTo tid PauseTransfer
| otherwise = killThread tid
{- In order to stop helper processes like rsync,
- kill the whole process group of the process running the
- transfer. -}
killproc pid = do
g <- getProcessGroupIDOf pid
void $ tryIO $ signalProcessGroup sigTERM g
threadDelay 100000 -- 0.1 second grace period
void $ tryIO $ signalProcessGroup sigKILL g
startTransfer :: Transfer -> Handler ()
startTransfer t = do
m <- getCurrentTransfers
maybe noop resume (M.lookup t m)
-- TODO: handle starting a queued transfer
where
resume info = maybe (start info) signalthread $ transferTid info
signalthread tid = liftIO $ throwTo tid ResumeTransfer
start info = do
webapp <- getYesod
let dstatus = daemonStatus webapp
let slots = transferSlots webapp
{- This transfer was being run by another process,
- forget that old pid, and start a new one. -}
liftIO $ updateTransferInfo dstatus t $ info
{ transferPid = Nothing }
liftIO $ Transferrer.transferThread
dstatus slots t info inImmediateTransferSlot
=<< readProgramFile
getCurrentTransfers :: Handler TransferMap
getCurrentTransfers = currentTransfers
<$> (liftIO . getDaemonStatus =<< daemonStatus <$> getYesod)

View file

@ -0,0 +1,22 @@
{- git-annex assistant webapp documentation
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
{-# LANGUAGE TypeFamilies, QuasiQuotes, MultiParamTypeClasses, TemplateHaskell, OverloadedStrings, RankNTypes #-}
module Assistant.WebApp.Documentation where
import Assistant.WebApp
import Assistant.WebApp.SideBar
import Utility.Yesod
import Yesod
getAboutR :: Handler RepHtml
getAboutR = bootstrap (Just About) $ do
sideBarDisplay
setTitle "About git-annex"
$(widgetFile "documentation/about")

View file

@ -0,0 +1,58 @@
{- git-annex assistant webapp notifications
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
{-# LANGUAGE TypeFamilies, QuasiQuotes, MultiParamTypeClasses, TemplateHaskell, OverloadedStrings, RankNTypes #-}
module Assistant.WebApp.Notifications where
import Assistant.Common
import Assistant.WebApp
import Assistant.DaemonStatus
import Utility.NotificationBroadcaster
import Utility.Yesod
import Yesod
import Data.Text (Text)
import qualified Data.Text as T
{- Add to any widget to make it auto-update using long polling.
-
- The widget should have a html element with an id=ident, which will be
- replaced when it's updated.
-
- The geturl route should return the notifier url to use for polling.
-
- ms_delay is how long to delay between AJAX updates
- ms_startdelay is how long to delay before updating with AJAX at the start
-}
autoUpdate :: Text -> Route WebApp -> Int -> Int -> Widget
autoUpdate ident geturl ms_delay ms_startdelay = do
let delay = show ms_delay
let startdelay = show ms_startdelay
addScript $ StaticR longpolling_js
$(widgetFile "notifications/longpolling")
{- Notifier urls are requested by the javascript, to avoid allocation
- of NotificationIds when noscript pages are loaded. This constructs a
- notifier url for a given Route and NotificationBroadcaster.
-}
notifierUrl :: (NotificationId -> Route WebApp) -> (DaemonStatus -> NotificationBroadcaster) -> Handler RepPlain
notifierUrl route selector = do
(urlbits, _params) <- renderRoute . route <$> newNotifier selector
webapp <- getYesod
return $ RepPlain $ toContent $ T.concat
[ "/"
, T.intercalate "/" urlbits
, "?auth="
, secretToken webapp
]
getNotifierTransfersR :: Handler RepPlain
getNotifierTransfersR = notifierUrl TransfersR transferNotifier
getNotifierSideBarR :: Handler RepPlain
getNotifierSideBarR = notifierUrl SideBarR alertNotifier

View file

@ -0,0 +1,84 @@
{- git-annex assistant webapp sidebar
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
{-# LANGUAGE TypeFamilies, QuasiQuotes, MultiParamTypeClasses, TemplateHaskell, OverloadedStrings, RankNTypes #-}
module Assistant.WebApp.SideBar where
import Assistant.Common
import Assistant.WebApp
import Assistant.WebApp.Notifications
import Assistant.DaemonStatus
import Assistant.Alert
import Utility.NotificationBroadcaster
import Utility.Yesod
import Yesod
import Data.Text (Text)
import qualified Data.Map as M
import Control.Concurrent
sideBarDisplay :: Widget
sideBarDisplay = do
let content = do
{- Add newest alerts to the sidebar. -}
webapp <- lift getYesod
alertpairs <- M.toList . alertMap
<$> liftIO (getDaemonStatus $ daemonStatus webapp)
mapM_ renderalert $
take displayAlerts $ reverse $ sortAlertPairs alertpairs
let ident = "sidebar"
$(widgetFile "sidebar/main")
autoUpdate ident NotifierSideBarR (10 :: Int) (10 :: Int)
where
bootstrapclass Activity = "alert-info"
bootstrapclass Warning = "alert"
bootstrapclass Error = "alert-error"
bootstrapclass Success = "alert-success"
bootstrapclass Message = "alert-info"
renderalert (alertid, alert) = addalert
alertid
(alertClosable alert)
(alertBlockDisplay alert)
(bootstrapclass $ alertClass alert)
(renderAlertHeader alert)
(renderAlertMessage alert)
(alertIcon alert)
addalert :: AlertId -> Bool -> Bool -> Text -> Maybe Text -> Text -> Maybe String -> Widget
addalert i closable block divclass heading message icon = do
let alertid = show i
$(widgetFile "sidebar/alert")
{- Called by client to get a sidebar display.
-
- Returns a div, which will be inserted into the calling page.
-
- Note that the head of the widget is not included, only its
- body is. To get the widget head content, the widget is also
- inserted onto all pages.
-}
getSideBarR :: NotificationId -> Handler RepHtml
getSideBarR nid = do
waitNotifier alertNotifier nid
{- This 0.1 second delay avoids very transient notifications from
- being displayed and churning the sidebar unnecesarily.
-
- This needs to be below the level perceptable by the user,
- to avoid slowing down user actions like closing alerts. -}
liftIO $ threadDelay 100000
page <- widgetToPageContent sideBarDisplay
hamletToRepHtml $ [hamlet|^{pageBody page}|]
{- Called by the client to close an alert. -}
getCloseAlert :: AlertId -> Handler ()
getCloseAlert i = do
webapp <- getYesod
void $ liftIO $ removeAlert (daemonStatus webapp) i

22
Assistant/WebApp/routes Normal file
View file

@ -0,0 +1,22 @@
/ HomeR GET
/noscript NoScriptR GET
/noscript/auto NoScriptAutoR GET
/about AboutR GET
/config ConfigR GET
/config/repository RepositoriesR GET
/config/repository/add/drive AddDriveR GET
/config/repository/first FirstRepositoryR GET
/transfers/#NotificationId TransfersR GET
/sidebar/#NotificationId SideBarR GET
/notifier/transfers NotifierTransfersR GET
/notifier/sidebar NotifierSideBarR GET
/closealert/#AlertId CloseAlert GET
/filebrowser FileBrowserR GET POST
/transfer/pause/#Transfer PauseTransferR GET POST
/transfer/start/#Transfer StartTransferR GET POST
/transfer/cancel/#Transfer CancelTransferR GET POST
/static StaticR Static getStatic

View file

@ -53,14 +53,16 @@ shaN shasize file filesize = do
showAction "checksum"
case shaCommand shasize filesize of
Left sha -> liftIO $ sha <$> L.readFile file
Right command -> liftIO $ runcommand command
Right command -> liftIO $ parse command . lines <$>
readProcess command (toCommand [File file])
where
runcommand command =
pOpen ReadFromPipe command (toCommand [File file]) $ \h -> do
sha <- fst . separate (== ' ') <$> hGetLine h
if null sha
then error $ command ++ " parse error"
else return sha
parse command [] = bad command
parse command (l:_)
| null sha = bad command
| otherwise = sha
where
sha = fst $ separate (== ' ') l
bad command = error $ command ++ " parse error"
shaCommand :: SHASize -> Integer -> Either (L.ByteString -> String) String
shaCommand shasize filesize

View file

@ -4,7 +4,7 @@ module Build.Configure where
import System.Directory
import Data.List
import System.Cmd.Utils
import System.Process
import Control.Applicative
import System.FilePath
@ -71,7 +71,7 @@ getVersionString = do
getGitVersion :: Test
getGitVersion = do
(_, s) <- pipeFrom "git" ["--version"]
s <- readProcess "git" ["--version"] ""
let version = unwords $ drop 2 $ words $ head $ lines s
return $ Config "gitversion" (StringConfig version)

View file

@ -0,0 +1,61 @@
{- Generating and installing a desktop menu entry file
- and a desktop autostart file.
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Build.InstallDesktopFile where
import Utility.Exception
import Utility.FreeDesktop
import Utility.Path
import Locations.UserConfig
import Control.Applicative
import Control.Monad
import System.Directory
import System.Environment
import System.Posix.User
{- The command can be either just "git-annex", or the full path to use
- to run it. -}
desktop :: FilePath -> DesktopEntry
desktop command = genDesktopEntry
"Git Annex"
"Track and sync the files in your Git Annex"
False
(command ++ " webapp")
["Network", "FileTransfer"]
autostart :: FilePath -> DesktopEntry
autostart command = genDesktopEntry
"Git Annex Assistant"
"Autostart"
False
(command ++ " assistant --autostart")
[]
writeDesktop :: FilePath -> IO ()
writeDesktop command = do
destdir <- catchDefaultIO (getEnv "DESTDIR") ""
uid <- fromIntegral <$> getRealUserID
datadir <- if uid /= 0 then userDataDir else return systemDataDir
writeDesktopMenuFile (desktop command) $
desktopMenuFilePath "git-annex" datadir
configdir <- if uid /= 0 then userConfigDir else return systemConfigDir
writeDesktopMenuFile (autostart command) $
autoStartPath "git-annex" configdir
when (uid /= 0) $ do
programfile <- programFile
createDirectoryIfMissing True (parentDir programfile)
writeFile programfile command
main = getArgs >>= go
where
go [] = error "specify git-annex command"
go (command:_) = writeDesktop command

View file

@ -11,6 +11,7 @@ import Common.Annex
import Logs.Unused
import Command
import qualified Command.Add
import Types.Key
def :: [Command]
def = [command "addunused" (paramRepeating paramNumRange)
@ -25,7 +26,7 @@ start = startUnused "addunused" perform (performOther "bad") (performOther "tmp"
perform :: Key -> CommandPerform
perform key = next $ Command.Add.cleanup file key True
where
file = "unused." ++ show key
file = "unused." ++ key2file key
{- The content is not in the annex, but in another directory, and
- it seems better to error out, rather than moving bad/tmp content into

71
Command/Assistant.hs Normal file
View file

@ -0,0 +1,71 @@
{- git-annex assistant
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Command.Assistant where
import Common.Annex
import Command
import qualified Option
import qualified Command.Watch
import Init
import Locations.UserConfig
import System.Environment
import System.Posix.Directory
def :: [Command]
def = [noRepo checkAutoStart $ dontCheck repoExists $
withOptions [Command.Watch.foregroundOption, Command.Watch.stopOption, autoStartOption] $
command "assistant" paramNothing seek "automatically handle changes"]
autoStartOption :: Option
autoStartOption = Option.flag [] "autostart" "start in known repositories"
seek :: [CommandSeek]
seek = [withFlag Command.Watch.stopOption $ \stopdaemon ->
withFlag Command.Watch.foregroundOption $ \foreground ->
withFlag autoStartOption $ \autostart ->
withNothing $ start foreground stopdaemon autostart]
start :: Bool -> Bool -> Bool -> CommandStart
start foreground stopdaemon autostart
| autostart = do
liftIO $ autoStart
stop
| otherwise = do
ensureInitialized
Command.Watch.start True foreground stopdaemon
{- Run outside a git repository. Check to see if any parameter is
- --autostart and enter autostart mode. -}
checkAutoStart :: IO ()
checkAutoStart = ifM (any (== "--autostart") <$> getArgs)
( autoStart
, error "Not in a git repository."
)
autoStart :: IO ()
autoStart = do
autostartfile <- autoStartFile
let nothing = error $ "Nothing listed in " ++ autostartfile
ifM (doesFileExist autostartfile)
( do
dirs <- lines <$> readFile autostartfile
program <- readProgramFile
when (null dirs) nothing
forM_ dirs $ \d -> do
putStrLn $ "git-annex autostart in " ++ d
ifM (catchBoolIO $ go program d)
( putStrLn "ok"
, putStrLn "failed"
)
, nothing
)
where
go program dir = do
changeWorkingDirectory dir
boolSystem program [Param "assistant"]

View file

@ -12,6 +12,7 @@ import Command
import qualified Annex
import Logs.Location
import Annex.Content
import Types.Key
def :: [Command]
def = [oneShot $ command "dropkey" (paramRepeating paramKey) seek
@ -24,7 +25,7 @@ start :: Key -> CommandStart
start key = stopUnless (inAnnex key) $ do
unlessM (Annex.getState Annex.force) $
error "dropkey can cause data loss; use --force if you're sure you want to do this"
showStart "dropkey" (show key)
showStart "dropkey" (key2file key)
next $ perform key
perform :: Key -> CommandPerform

View file

@ -53,7 +53,7 @@ start format file (key, _) = do
where
vars =
[ ("file", file)
, ("key", show key)
, ("key", key2file key)
, ("backend", keyBackendName key)
, ("bytesize", size show)
, ("humansize", size $ roughSize storageUnits True)

View file

@ -22,7 +22,7 @@ seek = [withWords start]
start :: [String] -> CommandStart
start (keyname:file:[]) = notBareRepo $ do
let key = fromMaybe (error "bad key") $ readKey keyname
let key = fromMaybe (error "bad key") $ file2key keyname
inbackend <- inAnnex key
unless inbackend $ error $
"key ("++ keyname ++") is not present in backend"

View file

@ -7,6 +7,8 @@
module Command.Fsck where
import System.Posix.Process (getProcessID)
import Common.Annex
import Command
import qualified Annex
@ -24,6 +26,7 @@ import Utility.DataUnits
import Utility.FileMode
import Config
import qualified Option
import Types.Key
def :: [Command]
def = [withOptions options $ command "fsck" paramPaths seek
@ -112,7 +115,7 @@ startBare :: Key -> CommandStart
startBare key = case Backend.maybeLookupBackendName (Types.Key.keyBackendName key) of
Nothing -> stop
Just backend -> do
showStart "fsck" (show key)
showStart "fsck" (key2file key)
next $ performBare key backend
{- Note that numcopies cannot be checked in a bare repository, because
@ -120,7 +123,7 @@ startBare key = case Backend.maybeLookupBackendName (Types.Key.keyBackendName ke
- files. -}
performBare :: Key -> Backend -> CommandPerform
performBare key backend = check
[ verifyLocationLog key (show key)
[ verifyLocationLog key (key2file key)
, checkKeySize key
, checkBackend backend key
]

View file

@ -199,8 +199,10 @@ tryScan r
Left _ -> return Nothing
Right r' -> return $ Just r'
pipedconfig cmd params = safely $
pOpen ReadFromPipe cmd (toCommand params) $
withHandle StdoutHandle createProcessSuccess p $
Git.Config.hRead r
where
p = proc cmd $ toCommand params
configlist =
onRemote r (pipedconfig, Nothing) "configlist" [] []

View file

@ -135,13 +135,12 @@ fromPerform :: Remote -> Bool -> Key -> FilePath -> CommandPerform
fromPerform src move key file = moveLock move key $
ifM (inAnnex key)
( handle move True
, download (Remote.uuid src) key (Just file) $ do
showAction $ "from " ++ Remote.name src
ok <- getViaTmp key $
Remote.retrieveKeyFile src key (Just file)
handle move ok
, handle move =<< go
)
where
go = download (Remote.uuid src) key (Just file) $ do
showAction $ "from " ++ Remote.name src
getViaTmp key $ Remote.retrieveKeyFile src key (Just file)
handle _ False = stop -- failed
handle False True = next $ return True -- copy complete
handle True True = do -- finish moving

View file

@ -26,7 +26,7 @@ seek = [withPairs start]
start :: (FilePath, String) -> CommandStart
start (file, keyname) = ifAnnexed file go stop
where
newkey = fromMaybe (error "bad key") $ readKey keyname
newkey = fromMaybe (error "bad key") $ file2key keyname
go (oldkey, _)
| oldkey == newkey = stop
| otherwise = do

View file

@ -13,6 +13,7 @@ import CmdLine
import Annex.Content
import Utility.RsyncFile
import Logs.Transfer
import Command.SendKey (fieldTransfer)
def :: [Command]
def = [oneShot $ command "recvkey" paramKey seek
@ -30,7 +31,7 @@ start key = ifM (inAnnex key)
-- forcibly quit after receiving one key,
-- and shutdown cleanly
_ <- shutdown True
liftIO exitSuccess
, liftIO exitFailure
return True
, return False
)
)

View file

@ -12,6 +12,7 @@ import Command
import Annex.Content
import Utility.RsyncFile
import Logs.Transfer
import qualified Fields
def :: [Command]
def = [oneShot $ command "sendkey" paramKey seek
@ -24,9 +25,17 @@ start :: Key -> CommandStart
start key = ifM (inAnnex key)
( fieldTransfer Upload key $ do
file <- inRepo $ gitAnnexLocation key
liftIO $ ifM (rsyncServerSend file)
( exitSuccess , exitFailure )
liftIO $ rsyncServerSend file
, do
warning "requested key is not present"
liftIO exitFailure
)
fieldTransfer :: Direction -> Key -> Annex Bool -> CommandStart
fieldTransfer direction key a = do
afile <- Fields.getField Fields.associatedFile
ok <- maybe a (\u -> runTransfer (Transfer direction (toUUID u) key) afile a)
=<< Fields.getField Fields.remoteUUID
if ok
then liftIO exitSuccess
else liftIO exitFailure

View file

@ -183,8 +183,8 @@ transfer_list = stat "transfers in progress" $ nojson $ lift $ do
pp _ c [] = c
pp uuidmap c ((t, i):xs) = "\n\t" ++ line uuidmap t i ++ pp uuidmap c xs
line uuidmap t i = unwords
[ show (transferDirection t) ++ "ing"
, fromMaybe (show $ transferKey t) (associatedFile i)
[ showLcDirection (transferDirection t) ++ "ing"
, fromMaybe (key2file $ transferKey t) (associatedFile i)
, if transferDirection t == Upload then "to" else "from"
, maybe (fromUUID $ transferUUID t) Remote.name $
M.lookup (transferUUID t) uuidmap

View file

@ -6,8 +6,6 @@
- Licensed under the GNU GPL version 3 or higher.
-}
{-# LANGUAGE BangPatterns #-}
module Command.Sync where
import Common.Annex
@ -27,8 +25,8 @@ import qualified Git
import Git.Types (BlobType(..))
import qualified Types.Remote
import qualified Remote.Git
import Types.Key
import qualified Data.Map as M
import qualified Data.ByteString.Lazy as L
import Data.Hash.MD5
@ -39,7 +37,7 @@ def = [command "sync" (paramOptional (paramRepeating paramRemote))
-- syncing involves several operations, any of which can independently fail
seek :: CommandSeek
seek rs = do
!branch <- fromMaybe nobranch <$> inRepo Git.Branch.current
branch <- fromMaybe nobranch <$> inRepo Git.Branch.current
remotes <- syncRemotes rs
return $ concat
[ [ commit ]
@ -63,23 +61,19 @@ syncRemotes rs = ifM (Annex.getState Annex.fast) ( nub <$> pickfast , wanted )
where
pickfast = (++) <$> listed <*> (good =<< fastest <$> available)
wanted
| null rs = good =<< concat . byspeed <$> available
| null rs = good =<< concat . Remote.byCost <$> available
| otherwise = listed
listed = do
l <- catMaybes <$> mapM (Remote.byName . Just) rs
let s = filter special l
let s = filter Remote.specialRemote l
unless (null s) $
error $ "cannot sync special remotes: " ++
unwords (map Types.Remote.name s)
return l
available = filter nonspecial <$> Remote.enabledRemoteList
available = filter (not . Remote.specialRemote)
<$> Remote.enabledRemoteList
good = filterM $ Remote.Git.repoAvail . Types.Remote.repo
nonspecial r = Types.Remote.remotetype r == Remote.Git.remote
special = not . nonspecial
fastest = fromMaybe [] . headMaybe . byspeed
byspeed = map snd . sort . M.toList . costmap
costmap = M.fromListWith (++) . map costpair
costpair r = (Types.Remote.cost r, [r])
fastest = fromMaybe [] . headMaybe . Remote.byCost
commit :: CommandStart
commit = do
@ -98,7 +92,7 @@ mergeLocal branch = go =<< needmerge
syncbranch = syncBranch branch
needmerge = do
unlessM (inRepo $ Git.Ref.exists syncbranch) $
updateBranch syncbranch
inRepo $ updateBranch syncbranch
inRepo $ Git.Branch.changed branch syncbranch
go False = stop
go True = do
@ -107,17 +101,17 @@ mergeLocal branch = go =<< needmerge
pushLocal :: Git.Ref -> CommandStart
pushLocal branch = do
updateBranch $ syncBranch branch
inRepo $ updateBranch $ syncBranch branch
stop
updateBranch :: Git.Ref -> Annex ()
updateBranch syncbranch =
updateBranch :: Git.Ref -> Git.Repo -> IO ()
updateBranch syncbranch g =
unlessM go $ error $ "failed to update " ++ show syncbranch
where
go = inRepo $ Git.Command.runBool "branch"
go = Git.Command.runBool "branch"
[ Param "-f"
, Param $ show $ Git.Ref.base syncbranch
]
] g
pullRemote :: Remote -> Git.Ref -> CommandStart
pullRemote remote branch = do
@ -125,7 +119,7 @@ pullRemote remote branch = do
next $ do
showOutput
stopUnless fetch $
next $ mergeRemote remote branch
next $ mergeRemote remote (Just branch)
where
fetch = inRepo $ Git.Command.runBool "fetch"
[Param $ Remote.name remote]
@ -134,32 +128,46 @@ pullRemote remote branch = do
- Which to merge from? Well, the master has whatever latest changes
- were committed, while the synced/master may have changes that some
- other remote synced to this remote. So, merge them both. -}
mergeRemote :: Remote -> Git.Ref -> CommandCleanup
mergeRemote remote branch = all id <$> (mapM merge =<< tomerge)
mergeRemote :: Remote -> (Maybe Git.Ref) -> CommandCleanup
mergeRemote remote b = case b of
Nothing -> do
branch <- inRepo Git.Branch.currentUnsafe
all id <$> (mapM merge $ branchlist branch)
Just _ -> all id <$> (mapM merge =<< tomerge (branchlist b))
where
merge = mergeFrom . remoteBranch remote
tomerge = filterM (changed remote) [branch, syncBranch branch]
tomerge branches = filterM (changed remote) branches
branchlist Nothing = []
branchlist (Just branch) = [branch, syncBranch branch]
pushRemote :: Remote -> Git.Ref -> CommandStart
pushRemote remote branch = go =<< needpush
where
needpush = anyM (newer remote) [syncbranch, Annex.Branch.name]
needpush = anyM (newer remote) [syncBranch branch, Annex.Branch.name]
go False = stop
go True = do
showStart "push" (Remote.name remote)
next $ next $ do
showOutput
inRepo $ Git.Command.runBool "push"
[ Param (Remote.name remote)
, Param (show Annex.Branch.name)
, Param refspec
]
refspec = show (Git.Ref.base branch) ++ ":" ++ show (Git.Ref.base syncbranch)
syncbranch = syncBranch branch
inRepo $ pushBranch remote branch
pushBranch :: Remote -> Git.Ref -> Git.Repo -> IO Bool
pushBranch remote branch g =
Git.Command.runBool "push"
[ Param (Remote.name remote)
, Param (show Annex.Branch.name)
, Param refspec
] g
where
refspec = concat
[ show $ Git.Ref.base branch
, ":"
, show $ Git.Ref.base $ syncBranch branch
]
mergeAnnex :: CommandStart
mergeAnnex = do
Annex.Branch.forceUpdate
void $ Annex.Branch.forceUpdate
stop
mergeFrom :: Git.Ref -> Annex Bool
@ -248,8 +256,8 @@ resolveMerge' u
-}
mergeFile :: FilePath -> Key -> FilePath
mergeFile file key
| doubleconflict = go $ show key
| otherwise = go $ shortHash $ show key
| doubleconflict = go $ key2file key
| otherwise = go $ shortHash $ key2file key
where
varmarker = ".variant-"
doubleconflict = varmarker `isSuffixOf` (dropExtension file)

55
Command/TransferKey.hs Normal file
View file

@ -0,0 +1,55 @@
{- git-annex command, used internally by assistant
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Command.TransferKey where
import Common.Annex
import Command
import Annex.Content
import Logs.Location
import Logs.Transfer
import qualified Remote
import Types.Remote
import qualified Command.Move
import qualified Option
def :: [Command]
def = [withOptions options $
oneShot $ command "transferkey" paramKey seek
"transfers a key from or to a remote"]
options :: [Option]
options = fileOption : Command.Move.options
fileOption :: Option
fileOption = Option.field [] "file" paramFile "the associated file"
seek :: [CommandSeek]
seek = [withField Command.Move.toOption Remote.byName $ \to ->
withField Command.Move.fromOption Remote.byName $ \from ->
withField fileOption return $ \file ->
withKeys $ start to from file]
start :: Maybe Remote -> Maybe Remote -> AssociatedFile -> Key -> CommandStart
start to from file key =
case (from, to) of
(Nothing, Just dest) -> next $ toPerform dest key file
(Just src, Nothing) -> next $ fromPerform src key file
_ -> error "specify either --from or --to"
toPerform :: Remote -> Key -> AssociatedFile -> CommandPerform
toPerform remote key file = next $
upload (uuid remote) key file $ do
ok <- Remote.storeKey remote key file
when ok $
Remote.logStatus remote key InfoPresent
return ok
fromPerform :: Remote -> Key -> AssociatedFile -> CommandPerform
fromPerform remote key file = next $
download (uuid remote) key file $
getViaTmp key $ Remote.retrieveKeyFile remote key file

View file

@ -34,6 +34,7 @@ import qualified Remote
import qualified Annex.Branch
import qualified Option
import Annex.CatFile
import Types.Key
def :: [Command]
def = [withOptions [fromOption] $ command "unused" paramNothing seek
@ -100,7 +101,7 @@ number n (x:xs) = (n+1, x) : number (n+1) xs
table :: [(Int, Key)] -> [String]
table l = " NUMBER KEY" : map cols l
where
cols (n,k) = " " ++ pad 6 (show n) ++ " " ++ show k
cols (n,k) = " " ++ pad 6 (show n) ++ " " ++ key2file k
pad n s = s ++ replicate (n - length s) ' '
staleTmpMsg :: [(Int, Key)] -> String

View file

@ -1,6 +1,3 @@
{-# LANGUAGE CPP #-}
{-# LANGUAGE BangPatterns #-}
{- git-annex watch command
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
@ -22,7 +19,7 @@ def = [withOptions [foregroundOption, stopOption] $
seek :: [CommandSeek]
seek = [withFlag stopOption $ \stopdaemon ->
withFlag foregroundOption $ \foreground ->
withNothing $ start foreground stopdaemon]
withNothing $ start False foreground stopdaemon]
foregroundOption :: Option
foregroundOption = Option.flag [] "foreground" "do not daemonize"
@ -30,9 +27,9 @@ foregroundOption = Option.flag [] "foreground" "do not daemonize"
stopOption :: Option
stopOption = Option.flag [] "stop" "stop daemon"
start :: Bool -> Bool -> CommandStart
start foreground stopdaemon = notBareRepo $ do
start :: Bool -> Bool -> Bool -> CommandStart
start assistant foreground stopdaemon = notBareRepo $ do
if stopdaemon
then stopDaemon
else startDaemon foreground -- does not return
else startDaemon assistant foreground Nothing -- does not return
stop

130
Command/WebApp.hs Normal file
View file

@ -0,0 +1,130 @@
{- git-annex webapp launcher
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Command.WebApp where
import Common.Annex
import Command
import Assistant
import Assistant.DaemonStatus
import Assistant.ScanRemotes
import Assistant.TransferQueue
import Assistant.TransferSlots
import Assistant.Threads.WebApp
import Utility.WebApp
import Utility.Daemon (checkDaemon, lockPidFile)
import Init
import qualified Git
import qualified Git.Config
import qualified Git.CurrentRepo
import qualified Annex
import Locations.UserConfig
import System.Posix.Directory
import Control.Concurrent
import Control.Concurrent.STM
def :: [Command]
def = [oneShot $ noRepo startNoRepo $ dontCheck repoExists $
command "webapp" paramNothing seek "launch webapp"]
seek :: [CommandSeek]
seek = [withNothing start]
start :: CommandStart
start = notBareRepo $ do
ifM (isInitialized) ( go , liftIO startNoRepo )
stop
where
go = do
browser <- fromRepo webBrowser
f <- liftIO . absPath =<< fromRepo gitAnnexHtmlShim
ifM (checkpid <&&> checkshim f) $
( liftIO $ openBrowser browser f
, startDaemon True True $ Just $
const $ openBrowser browser
)
checkpid = do
pidfile <- fromRepo gitAnnexPidFile
liftIO $ isJust <$> checkDaemon pidfile
checkshim f = liftIO $ doesFileExist f
{- When run without a repo, see if there is an autoStartFile,
- and if so, start the first available listed repository.
- If not, it's our first time being run! -}
startNoRepo :: IO ()
startNoRepo = do
autostartfile <- autoStartFile
ifM (doesFileExist autostartfile) ( autoStart autostartfile , firstRun )
autoStart :: FilePath -> IO ()
autoStart autostartfile = do
dirs <- lines <$> readFile autostartfile
edirs <- filterM doesDirectoryExist dirs
case edirs of
[] -> firstRun -- what else can I do? Nothing works..
(d:_) -> do
changeWorkingDirectory d
state <- Annex.new =<< Git.CurrentRepo.get
void $ Annex.eval state $ doCommand start
{- Run the webapp without a repository, which prompts the user, makes one,
- changes to it, starts the regular assistant, and redirects the
- browser to its url.
-
- This is a very tricky dance -- The first webapp calls the signaler,
- which signals the main thread when it's ok to continue by writing to a
- MVar. The main thread starts the second webapp, and uses its callback
- to write its url back to the MVar, from where the signaler retrieves it,
- returning it to the first webapp, which does the redirect.
-
- Note that it's important that mainthread never terminates! Much
- of this complication is due to needing to keep the mainthread running.
-}
firstRun :: IO ()
firstRun = do
dstatus <- atomically . newTMVar =<< newDaemonStatus
scanremotes <- newScanRemoteMap
transferqueue <- newTransferQueue
transferslots <- newTransferSlots
v <- newEmptyMVar
let callback a = Just $ a v
webAppThread Nothing dstatus scanremotes transferqueue transferslots
(callback signaler) (callback mainthread)
where
signaler v = do
putMVar v ""
takeMVar v
mainthread v _url htmlshim = do
browser <- webBrowser <$> Git.Config.global
openBrowser browser htmlshim
_wait <- takeMVar v
state <- Annex.new =<< Git.CurrentRepo.get
Annex.eval state $ do
dummydaemonize
startAssistant True id $ Just $ sendurlback v
sendurlback v url _htmlshim = putMVar v url
{- Set up the pid file in the new repo. -}
dummydaemonize = do
liftIO . lockPidFile =<< fromRepo gitAnnexPidFile
openBrowser :: Maybe FilePath -> FilePath -> IO ()
openBrowser cmd htmlshim = go $ maybe runBrowser runCustomBrowser cmd
where
url = fileUrl htmlshim
go a = unlessM (a url) $
error $ "failed to start web browser on url " ++ url
runCustomBrowser c u = boolSystem c [Param u]
{- web.browser is a generic git config setting for a web browser program -}
webBrowser :: Git.Repo -> Maybe FilePath
webBrowser = Git.Config.getMaybe "web.browser"
fileUrl :: FilePath -> String
fileUrl file = "file://" ++ file

View file

@ -13,16 +13,15 @@ import Data.String.Utils as X
import System.Path as X
import System.FilePath as X
import System.Directory as X
import System.Cmd.Utils as X hiding (safeSystem)
import System.IO as X hiding (FilePath)
import System.Posix.Files as X
import System.Posix.IO as X
import System.Posix.Process as X hiding (executeFile)
import System.Exit as X
import Utility.Misc as X
import Utility.Exception as X
import Utility.SafeCommand as X
import Utility.Process as X
import Utility.Path as X
import Utility.Directory as X
import Utility.Monad as X

View file

@ -56,7 +56,7 @@ remoteCost r def = do
cmd <- getRemoteConfig r "cost-command" ""
(fromMaybe def . readish) <$>
if not $ null cmd
then liftIO $ snd <$> pipeFrom "sh" ["-c", cmd]
then liftIO $ readProcess "sh" ["-c", cmd]
else getRemoteConfig r "cost" ""
cheapRemoteCost :: Int
@ -116,4 +116,4 @@ getHttpHeaders = do
cmd <- getConfig (annexConfig "http-headers-command") ""
if null cmd
then fromRepo $ Git.Config.getList "annex.http-headers"
else lines . snd <$> liftIO (pipeFrom "sh" ["-c", cmd])
else lines <$> liftIO (readProcess "sh" ["-c", cmd])

View file

@ -112,7 +112,7 @@ decryptCipher (EncryptedCipher t _) = Cipher <$> Gpg.pipeStrict decrypt t
- on content. It does need to be repeatable. -}
encryptKey :: Cipher -> Key -> Key
encryptKey c k = Key
{ keyName = hmacWithCipher c (show k)
{ keyName = hmacWithCipher c (key2file k)
, keyBackendName = "GPGHMACSHA1"
, keySize = Nothing -- size and mtime omitted
, keyMtime = Nothing -- to avoid leaking data

View file

@ -18,6 +18,9 @@ data Field = Field
, fieldCheck :: String -> Bool
}
getField :: Field -> Annex (Maybe String)
getField = Annex.getField . fieldName
remoteUUID :: Field
remoteUUID = Field "remoteuuid" $
-- does it look like a UUID?
@ -27,6 +30,3 @@ associatedFile :: Field
associatedFile = Field "associatedfile" $ \f ->
-- is the file a safe relative filename?
not (isAbsolute f) && not ("../" `isPrefixOf` f)
getField :: Field -> Annex (Maybe String)
getField = Annex.getField . fieldName

View file

@ -5,6 +5,8 @@
- Licensed under the GNU GPL version 3 or higher.
-}
{-# LANGUAGE BangPatterns #-}
module Git.Branch where
import Common
@ -12,13 +14,32 @@ import Git
import Git.Sha
import Git.Command
{- The currently checked out branch. -}
{- The currently checked out branch.
-
- In a just initialized git repo before the first commit,
- symbolic-ref will show the master branch, even though that
- branch is not created yet. So, this also looks at show-ref HEAD
- to double-check.
-}
current :: Repo -> IO (Maybe Git.Ref)
current r = parse <$> pipeRead [Param "symbolic-ref", Param "HEAD"] r
current r = do
v <- currentUnsafe r
case v of
Nothing -> return Nothing
Just branch ->
ifM (null <$> pipeRead [Param "show-ref", Param $ show branch] r)
( return Nothing
, return v
)
{- The current branch, which may not really exist yet. -}
currentUnsafe :: Repo -> IO (Maybe Git.Ref)
currentUnsafe r = parse . firstLine
<$> pipeRead [Param "symbolic-ref", Param "HEAD"] r
where
parse v
| null v = Nothing
| otherwise = Just $ Git.Ref $ firstLine v
parse l
| null l = Nothing
| otherwise = Just $ Git.Ref l
{- Checks if the second branch has any commits not present on the first
- branch. -}
@ -73,12 +94,10 @@ commit :: String -> Branch -> [Ref] -> Repo -> IO Sha
commit message branch parentrefs repo = do
tree <- getSha "write-tree" $
pipeRead [Param "write-tree"] repo
sha <- getSha "commit-tree" $
ignorehandle $ pipeWriteRead
(map Param $ ["commit-tree", show tree] ++ ps)
message repo
sha <- getSha "commit-tree" $ pipeWriteRead
(map Param $ ["commit-tree", show tree] ++ ps)
message repo
run "update-ref" [Param $ show branch, Param $ show sha] repo
return sha
where
ignorehandle a = snd <$> a
ps = concatMap (\r -> ["-p", show r]) parentrefs

View file

@ -7,10 +7,8 @@
module Git.Command where
import qualified Data.Text.Lazy as L
import qualified Data.Text.Lazy.IO as L
import Control.Concurrent
import Control.Exception (finally)
import System.Posix.Process (getAnyProcessStatus)
import System.Process
import Common
import Git
@ -29,7 +27,9 @@ gitCommandLine _ repo = assertLocal repo $ error "internal"
{- Runs git in the specified repo. -}
runBool :: String -> [CommandParam] -> Repo -> IO Bool
runBool subcommand params repo = assertLocal repo $
boolSystem "git" $ gitCommandLine (Param subcommand : params) repo
boolSystemEnv "git"
(gitCommandLine (Param subcommand : params) repo)
(gitEnv repo)
{- Runs git in the specified repo, throwing an error if it fails. -}
run :: String -> [CommandParam] -> Repo -> IO ()
@ -43,30 +43,28 @@ run subcommand params repo = assertLocal repo $
- result unless reap is called.
-}
pipeRead :: [CommandParam] -> Repo -> IO String
pipeRead params repo = assertLocal repo $ do
(_, h) <- hPipeFrom "git" $ toCommand $ gitCommandLine params repo
fileEncoding h
hGetContents h
pipeRead params repo = assertLocal repo $
withHandle StdoutHandle createBackgroundProcess p $ \h -> do
fileEncoding h
hGetContents h
where
p = (proc "git" $ toCommand $ gitCommandLine params repo)
{ env = gitEnv repo }
{- Runs a git subcommand, feeding it input.
- You should call either getProcessStatus or forceSuccess on the PipeHandle. -}
pipeWrite :: [CommandParam] -> L.Text -> Repo -> IO PipeHandle
pipeWrite params s repo = assertLocal repo $ do
(p, h) <- hPipeTo "git" (toCommand $ gitCommandLine params repo)
L.hPutStr h s
hClose h
return p
{- Runs a git subcommand, feeding it input, and returning its output,
- which is expected to be fairly small, since it's all read into memory
- strictly. -}
pipeWriteRead :: [CommandParam] -> String -> Repo -> IO String
pipeWriteRead params s repo = assertLocal repo $
writeReadProcessEnv "git" (toCommand $ gitCommandLine params repo)
(gitEnv repo) s
{- Runs a git subcommand, feeding it input, and returning its output.
- You should call either getProcessStatus or forceSuccess on the PipeHandle. -}
pipeWriteRead :: [CommandParam] -> String -> Repo -> IO (PipeHandle, String)
pipeWriteRead params s repo = assertLocal repo $ do
(p, from, to) <- hPipeBoth "git" (toCommand $ gitCommandLine params repo)
fileEncoding to
fileEncoding from
_ <- forkIO $ finally (hPutStr to s) (hClose to)
c <- hGetContents from
return (p, c)
{- Runs a git subcommand, feeding it input on a handle with an action. -}
pipeWrite :: [CommandParam] -> Repo -> (Handle -> IO ()) -> IO ()
pipeWrite params repo = withHandle StdinHandle createProcessSuccess p
where
p = (proc "git" $ toCommand $ gitCommandLine params repo)
{ env = gitEnv repo }
{- Reads null terminated output of a git command (as enabled by the -z
- parameter), and splits it. -}

View file

@ -9,6 +9,7 @@ module Git.Config where
import qualified Data.Map as M
import Data.Char
import System.Process (cwd, env)
import Common
import Git
@ -39,7 +40,7 @@ reRead :: Repo -> IO Repo
reRead = read'
{- Cannot use pipeRead because it relies on the config having been already
- read. Instead, chdir to the repo.
- read. Instead, chdir to the repo and run git config.
-}
read' :: Repo -> IO Repo
read' repo = go repo
@ -47,9 +48,24 @@ read' repo = go repo
go Repo { location = Local { gitdir = d } } = git_config d
go Repo { location = LocalUnknown d } = git_config d
go _ = assertLocal repo $ error "internal"
git_config d = bracketCd d $
pOpen ReadFromPipe "git" ["config", "--null", "--list"] $
hRead repo
git_config d = withHandle StdoutHandle createProcessSuccess p $
hRead repo
where
params = ["config", "--null", "--list"]
p = (proc "git" params)
{ cwd = Just d
, env = gitEnv repo
}
{- Gets the global git config, returning a dummy Repo containing it. -}
global :: IO Repo
global = do
repo <- Git.Construct.fromUnknown
withHandle StdoutHandle createProcessSuccess p $
hRead repo
where
params = ["config", "--null", "--list", "--global"]
p = (proc "git" params)
{- Reads git config from a handle and populates a repo with it. -}
hRead :: Repo -> Handle -> IO Repo

View file

@ -225,6 +225,7 @@ newFrom l = return Repo
, fullconfig = M.empty
, remotes = []
, remoteName = Nothing
, gitEnv = Nothing
}

View file

@ -38,11 +38,9 @@ hashFile h file = CoProcess.query h send receive
{- Injects some content into git, returning its Sha. -}
hashObject :: ObjectType -> String -> Repo -> IO Sha
hashObject objtype content repo = getSha subcmd $ do
(h, s) <- pipeWriteRead (map Param params) content repo
length s `seq` do
forceSuccess h
reap -- XXX unsure why this is needed
return s
s <- pipeWriteRead (map Param params) content repo
reap -- XXX unsure why this is needed, of if it is anymore
return s
where
subcmd = "hash-object"
params = [subcmd, "-t", show objtype, "-w", "--stdin"]

View file

@ -12,7 +12,10 @@ import System.Posix.Env (setEnv, unsetEnv, getEnv)
{- Forces git to use the specified index file.
-
- Returns an action that will reset back to the default
- index file. -}
- index file.
-
- Warning: Not thread safe.
-}
override :: FilePath -> IO (IO ())
override index = do
res <- getEnv var

View file

@ -19,7 +19,7 @@ module Git.Queue (
import qualified Data.Map as M
import System.IO
import System.Cmd.Utils
import System.Process
import Data.String.Utils
import Utility.SafeCommand
@ -149,10 +149,12 @@ runAction repo (UpdateIndexAction streamers) =
-- list is stored in reverse order
Git.UpdateIndex.streamUpdateIndex repo $ reverse streamers
runAction repo action@(CommandAction {}) =
pOpen WriteToPipe "xargs" ("-0":"git":params) feedxargs
withHandle StdinHandle createProcessSuccess p $ \h -> do
fileEncoding h
hPutStr h $ join "\0" $ getFiles action
hClose h
where
params = toCommand $ gitCommandLine
p = (proc "xargs" params) { env = gitEnv repo }
params = "-0":"git":baseparams
baseparams = toCommand $ gitCommandLine
(Param (getSubcommand action):getParams action) repo
feedxargs h = do
fileEncoding h
hPutStr h $ join "\0" $ getFiles action

View file

@ -27,15 +27,17 @@ data RepoLocation
| Unknown
deriving (Show, Eq)
data Repo = Repo {
location :: RepoLocation,
config :: M.Map String String,
data Repo = Repo
{ location :: RepoLocation
, config :: M.Map String String
-- a given git config key can actually have multiple values
fullconfig :: M.Map String [String],
remotes :: [Repo],
, fullconfig :: M.Map String [String]
, remotes :: [Repo]
-- remoteName holds the name used for this repo in remotes
remoteName :: Maybe String
} deriving (Show, Eq)
, remoteName :: Maybe String
-- alternate environment to use when running git commands
, gitEnv :: Maybe [(String, String)]
} deriving (Show, Eq)
{- A git ref. Can be a sha1, or a branch or tag name. -}
newtype Ref = Ref String

View file

@ -17,8 +17,6 @@ module Git.UpdateIndex (
stageSymlink
) where
import System.Cmd.Utils
import Common
import Git
import Git.Types
@ -36,12 +34,10 @@ pureStreamer !s = \streamer -> streamer s
{- Streams content into update-index from a list of Streamers. -}
streamUpdateIndex :: Repo -> [Streamer] -> IO ()
streamUpdateIndex repo as = do
(p, h) <- hPipeTo "git" (toCommand $ gitCommandLine params repo)
streamUpdateIndex repo as = pipeWrite params repo $ \h -> do
fileEncoding h
forM_ as (stream h)
hClose h
forceSuccess p
where
params = map Param ["update-index", "-z", "--index-info"]
stream h a = a (streamer h)

View file

@ -30,6 +30,7 @@ import qualified Command.Copy
import qualified Command.Get
import qualified Command.FromKey
import qualified Command.DropKey
import qualified Command.TransferKey
import qualified Command.ReKey
import qualified Command.Reinject
import qualified Command.Fix
@ -62,6 +63,10 @@ import qualified Command.Upgrade
import qualified Command.Version
#ifdef WITH_ASSISTANT
import qualified Command.Watch
import qualified Command.Assistant
#ifdef WITH_WEBAPP
import qualified Command.WebApp
#endif
#endif
cmds :: [Command]
@ -89,6 +94,7 @@ cmds = concat
, Command.Dead.def
, Command.FromKey.def
, Command.DropKey.def
, Command.TransferKey.def
, Command.ReKey.def
, Command.Fix.def
, Command.Fsck.def
@ -106,6 +112,10 @@ cmds = concat
, Command.Version.def
#ifdef WITH_ASSISTANT
, Command.Watch.def
, Command.Assistant.def
#ifdef WITH_WEBAPP
, Command.WebApp.def
#endif
#endif
]

22
Init.hs
View file

@ -7,18 +7,34 @@
module Init (
ensureInitialized,
isInitialized,
initialize,
uninitialize
) where
import Common.Annex
import Utility.TempFile
import Utility.Network
import qualified Git
import qualified Annex.Branch
import Logs.UUID
import Annex.Version
import Annex.UUID
import System.Posix.User
genDescription :: Maybe String -> Annex String
genDescription (Just d) = return d
genDescription Nothing = do
hostname <- maybe "" id <$> liftIO getHostname
let at = if null hostname then "" else "@"
username <- clicketyclickety
reldir <- liftIO . relHome =<< fromRepo Git.repoPath
return $ concat [username, at, hostname, ":", reldir]
where
clicketyclickety = liftIO $ userName <$>
(getUserEntryForID =<< getEffectiveUserID)
initialize :: Maybe String -> Annex ()
initialize mdescription = do
prepUUID
@ -26,7 +42,7 @@ initialize mdescription = do
setVersion
gitPreCommitHookWrite
u <- getUUID
maybe (recordUUID u) (describeUUID u) mdescription
describeUUID u =<< genDescription mdescription
uninitialize :: Annex ()
uninitialize = do
@ -45,6 +61,10 @@ ensureInitialized = getVersion >>= maybe needsinit checkVersion
, error "First run: git-annex init"
)
{- Checks if a repository is initialized. Does not check version for ugrade. -}
isInitialized :: Annex Bool
isInitialized = maybe Annex.Branch.hasSibling (const $ return True) =<< getVersion
{- set up a git pre-commit hook, if one is not already present -}
gitPreCommitHookWrite :: Annex ()
gitPreCommitHookWrite = unlessBare $ do

View file

@ -27,6 +27,7 @@ module Locations (
gitAnnexPidFile,
gitAnnexDaemonStatusFile,
gitAnnexLogFile,
gitAnnexHtmlShim,
gitAnnexSshDir,
gitAnnexRemotesDir,
isLinkToAnnex,
@ -129,7 +130,7 @@ gitAnnexUnusedLog :: FilePath -> Git.Repo -> FilePath
gitAnnexUnusedLog prefix r = gitAnnexDir r </> (prefix ++ "unused")
{- .git/annex/transfer/ is used is used to record keys currently
- being transferred. -}
- being transferred, and other transfer bookkeeping info. -}
gitAnnexTransferDir :: Git.Repo -> FilePath
gitAnnexTransferDir r = addTrailingPathSeparator $ gitAnnexDir r </> "transfer"
@ -166,6 +167,10 @@ gitAnnexDaemonStatusFile r = gitAnnexDir r </> "daemon.status"
gitAnnexLogFile :: Git.Repo -> FilePath
gitAnnexLogFile r = gitAnnexDir r </> "daemon.log"
{- Html shim file used to launch the webapp. -}
gitAnnexHtmlShim :: Git.Repo -> FilePath
gitAnnexHtmlShim r = gitAnnexDir r </> "webapp.html"
{- .git/annex/ssh/ is used for ssh connection caching -}
gitAnnexSshDir :: Git.Repo -> FilePath
gitAnnexSshDir r = addTrailingPathSeparator $ gitAnnexDir r </> "ssh"
@ -194,7 +199,7 @@ isLinkToAnnex s = ('/':d) `isInfixOf` s || d `isPrefixOf` s
-}
keyFile :: Key -> FilePath
keyFile key = replace "/" "%" $ replace ":" "&c" $
replace "%" "&s" $ replace "&" "&a" $ show key
replace "%" "&s" $ replace "&" "&a" $ key2file key
{- A location to store a key on the filesystem. A directory hash is used,
- to protect against filesystems that dislike having many items in a
@ -215,7 +220,7 @@ keyPaths key = map (keyPath key) annexHashes
{- Reverses keyFile, converting a filename fragment (ie, the basename of
- the symlink target) into a key. -}
fileKey :: FilePath -> Maybe Key
fileKey file = readKey $
fileKey file = file2key $
replace "&a" "&" $ replace "&s" "%" $
replace "&c" ":" $ replace "%" "/" file
@ -237,12 +242,12 @@ hashDirMixed :: Hasher
hashDirMixed k = addTrailingPathSeparator $ take 2 dir </> drop 2 dir
where
dir = take 4 $ display_32bits_as_dir =<< [a,b,c,d]
ABCD (a,b,c,d) = md5 $ encodeFilePath $ show k
ABCD (a,b,c,d) = md5 $ encodeFilePath $ key2file k
hashDirLower :: Hasher
hashDirLower k = addTrailingPathSeparator $ take 3 dir </> drop 3 dir
where
dir = take 6 $ md5s $ encodeFilePath $ show k
dir = take 6 $ md5s $ encodeFilePath $ key2file k
{- modified version of display_32bits_as_hex from Data.Hash.MD5
- Copyright (C) 2001 Ian Lynagh

31
Locations/UserConfig.hs Normal file
View file

@ -0,0 +1,31 @@
{- git-annex user config files
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Locations.UserConfig where
import Common
import Utility.FreeDesktop
{- ~/.config/git-annex/file -}
userConfigFile :: FilePath -> IO FilePath
userConfigFile file = do
dir <- userConfigDir
return $ dir </> "git-annex" </> file
autoStartFile :: IO FilePath
autoStartFile = userConfigFile "autostart"
{- The path to git-annex is written here; which is useful when cabal
- has installed it to some aweful non-PATH location. -}
programFile :: IO FilePath
programFile = userConfigFile "program"
{- Returns a command to run for git-annex. -}
readProgramFile :: IO FilePath
readProgramFile = do
programfile <- programFile
catchDefaultIO (readFile programfile) "git-annex"

View file

@ -12,7 +12,8 @@ import Annex.Perms
import Annex.Exception
import qualified Git
import Types.Remote
import qualified Fields
import Types.Key
import Utility.Percentage
import System.Posix.Types
import Data.Time.Clock
@ -28,7 +29,7 @@ data Transfer = Transfer
, transferUUID :: UUID
, transferKey :: Key
}
deriving (Show, Eq, Ord)
deriving (Eq, Ord, Read, Show)
{- Information about a Transfer, stored in the transfer information file.
-
@ -43,39 +44,42 @@ data TransferInfo = TransferInfo
, transferRemote :: Maybe Remote
, bytesComplete :: Maybe Integer
, associatedFile :: Maybe FilePath
, transferPaused :: Bool
}
deriving (Show, Eq, Ord)
data Direction = Upload | Download
deriving (Eq, Ord)
deriving (Eq, Ord, Read, Show)
instance Show Direction where
show Upload = "upload"
show Download = "download"
showLcDirection :: Direction -> String
showLcDirection Upload = "upload"
showLcDirection Download = "download"
readDirection :: String -> Maybe Direction
readDirection "upload" = Just Upload
readDirection "download" = Just Download
readDirection _ = Nothing
readLcDirection :: String -> Maybe Direction
readLcDirection "upload" = Just Upload
readLcDirection "download" = Just Download
readLcDirection _ = Nothing
upload :: UUID -> Key -> AssociatedFile -> Annex a -> Annex a
upload u key file a = transfer (Transfer Upload u key) file a
percentComplete :: Transfer -> TransferInfo -> Maybe Percentage
percentComplete (Transfer { transferKey = key }) (TransferInfo { bytesComplete = Just complete }) =
(\size -> percentage size complete) <$> keySize key
percentComplete _ _ = Nothing
download :: UUID -> Key -> AssociatedFile -> Annex a -> Annex a
download u key file a = transfer (Transfer Download u key) file a
upload :: UUID -> Key -> AssociatedFile -> Annex Bool -> Annex Bool
upload u key file a = runTransfer (Transfer Upload u key) file a
fieldTransfer :: Direction -> Key -> Annex a -> Annex a
fieldTransfer direction key a = do
afile <- Fields.getField Fields.associatedFile
maybe a (\u -> transfer (Transfer direction (toUUID u) key) afile a)
=<< Fields.getField Fields.remoteUUID
download :: UUID -> Key -> AssociatedFile -> Annex Bool -> Annex Bool
download u key file a = runTransfer (Transfer Download u key) file a
{- Runs a transfer action. Creates and locks the lock file while the
- action is running, and stores info in the transfer information
- file. Will throw an error if the transfer is already in progress.
-
- If the transfer action returns False, the transfer info is
- left in the failedTransferDir.
-}
transfer :: Transfer -> Maybe FilePath -> Annex a -> Annex a
transfer t file a = do
runTransfer :: Transfer -> Maybe FilePath -> Annex Bool -> Annex Bool
runTransfer t file a = do
tfile <- fromRepo $ transferFile t
createAnnexDirectory $ takeDirectory tfile
mode <- annexFileMode
@ -86,21 +90,29 @@ transfer t file a = do
<*> pure Nothing -- not 0; transfer may be resuming
<*> pure Nothing
<*> pure file
bracketIO (prep tfile mode info) (cleanup tfile) a
<*> pure False
let content = writeTransferInfo info
ok <- bracketIO (prep tfile mode content) (cleanup tfile) a
unless ok $ failed content
return ok
where
prep tfile mode info = do
prep tfile mode content = do
fd <- openFd (transferLockFile tfile) ReadWrite (Just mode)
defaultFileFlags { trunc = True }
locked <- catchMaybeIO $
setLock fd (WriteLock, AbsoluteSeek, 0, 0)
when (locked == Nothing) $
error $ "transfer already in progress"
writeFile tfile $ writeTransferInfo info
writeFile tfile content
return fd
cleanup tfile fd = do
void $ tryIO $ removeFile tfile
void $ tryIO $ removeFile $ transferLockFile tfile
closeFd fd
failed content = do
failedtfile <- fromRepo $ failedTransferFile t
createAnnexDirectory $ takeDirectory failedtfile
liftIO $ writeFile failedtfile content
{- If a transfer is still running, returns its TransferInfo. -}
checkTransfer :: Transfer -> Annex (Maybe TransferInfo)
@ -119,28 +131,47 @@ checkTransfer t = do
Nothing -> return Nothing
Just (pid, _) -> liftIO $
flip catchDefaultIO Nothing $ do
readTransferInfo pid
readTransferInfo (Just pid)
<$> readFile tfile
{- Gets all currently running transfers. -}
getTransfers :: Annex [(Transfer, TransferInfo)]
getTransfers = do
transfers <- catMaybes . map parseTransferFile <$> findfiles
transfers <- catMaybes . map parseTransferFile . concat <$> findfiles
infos <- mapM checkTransfer transfers
return $ map (\(t, Just i) -> (t, i)) $
filter running $ zip transfers infos
where
findfiles = liftIO . dirContentsRecursive
=<< fromRepo gitAnnexTransferDir
findfiles = liftIO . mapM dirContentsRecursive
=<< mapM (fromRepo . transferDir)
[Download, Upload]
running (_, i) = isJust i
{- Gets failed transfers for a given remote UUID. -}
getFailedTransfers :: UUID -> Annex [(Transfer, TransferInfo)]
getFailedTransfers u = catMaybes <$> (liftIO . getpairs =<< concat <$> findfiles)
where
getpairs = mapM $ \f -> do
let mt = parseTransferFile f
mi <- readTransferInfo Nothing <$> readFile f
return $ case (mt, mi) of
(Just t, Just i) -> Just (t, i)
_ -> Nothing
findfiles = liftIO . mapM dirContentsRecursive
=<< mapM (fromRepo . failedTransferDir u)
[Download, Upload]
{- The transfer information file to use for a given Transfer. -}
transferFile :: Transfer -> Git.Repo -> FilePath
transferFile (Transfer direction u key) r = gitAnnexTransferDir r
</> show direction
transferFile (Transfer direction u key) r = transferDir direction r
</> fromUUID u
</> keyFile key
{- The transfer information file to use to record a failed Transfer -}
failedTransferFile :: Transfer -> Git.Repo -> FilePath
failedTransferFile (Transfer direction u key) r = failedTransferDir u direction r
</> keyFile key
{- The transfer lock file corresponding to a given transfer info file. -}
transferLockFile :: FilePath -> FilePath
transferLockFile infofile = let (d,f) = splitFileName infofile in
@ -152,7 +183,7 @@ parseTransferFile file
| "lck." `isPrefixOf` (takeFileName file) = Nothing
| otherwise = case drop (length bits - 3) bits of
[direction, u, key] -> Transfer
<$> readDirection direction
<$> readLcDirection direction
<*> pure (toUUID u)
<*> fileKey key
_ -> Nothing
@ -168,16 +199,17 @@ writeTransferInfo info = unlines
, fromMaybe "" $ associatedFile info -- comes last; arbitrary content
]
readTransferInfo :: ProcessID -> String -> Maybe TransferInfo
readTransferInfo pid s =
readTransferInfo :: (Maybe ProcessID) -> String -> Maybe TransferInfo
readTransferInfo mpid s =
case bits of
[time] -> TransferInfo
<$> (Just <$> parsePOSIXTime time)
<*> pure (Just pid)
<*> pure mpid
<*> pure Nothing
<*> pure Nothing
<*> pure Nothing
<*> pure (if null filename then Nothing else Just filename)
<*> pure False
_ -> Nothing
where
(bits, filebits) = splitAt 1 $ lines s
@ -186,3 +218,15 @@ readTransferInfo pid s =
parsePOSIXTime :: String -> Maybe POSIXTime
parsePOSIXTime s = utcTimeToPOSIXSeconds
<$> parseTime defaultTimeLocale "%s%Qs" s
{- The directory holding transfer information files for a given Direction. -}
transferDir :: Direction -> Git.Repo -> FilePath
transferDir direction r = gitAnnexTransferDir r </> showLcDirection direction
{- The directory holding failed transfer information files for a given
- Direction and UUID -}
failedTransferDir :: UUID -> Direction -> Git.Repo -> FilePath
failedTransferDir u direction r = gitAnnexTransferDir r
</> "failed"
</> showLcDirection direction
</> fromUUID u

View file

@ -25,7 +25,7 @@ writeUnusedLog :: FilePath -> [(Int, Key)] -> Annex ()
writeUnusedLog prefix l = do
logfile <- fromRepo $ gitAnnexUnusedLog prefix
liftIO $ viaTmp writeFile logfile $
unlines $ map (\(n, k) -> show n ++ " " ++ show k) l
unlines $ map (\(n, k) -> show n ++ " " ++ key2file k) l
readUnusedLog :: FilePath -> Annex UnusedMap
readUnusedLog prefix = do
@ -37,7 +37,7 @@ readUnusedLog prefix = do
)
where
parse line =
case (readish tag, readKey rest) of
case (readish tag, file2key rest) of
(Just num, Just key) -> Just (num, key)
_ -> Nothing
where

View file

@ -16,6 +16,7 @@ module Logs.Web (
import Common.Annex
import Logs.Presence
import Logs.Location
import Types.Key
type URLString = String
@ -29,7 +30,7 @@ urlLog key = hashDirLower key </> keyFile key ++ ".log.web"
{- Used to store the urls elsewhere. -}
oldurlLogs :: Key -> [FilePath]
oldurlLogs key =
[ "remote/web" </> hashDirLower key </> show key ++ ".log"
[ "remote/web" </> hashDirLower key </> key2file key ++ ".log"
, "remote/web" </> hashDirLower key </> keyFile key ++ ".log"
]

View file

@ -1,19 +1,24 @@
CFLAGS=-Wall
GIT_ANNEX_TMP_BUILD_DIR?=tmp
IGNORE=-ignore-package monads-fd -ignore-package monads-tf
BASEFLAGS=-threaded -Wall $(IGNORE) -outputdir $(GIT_ANNEX_TMP_BUILD_DIR) -IUtility
FEATURES=-DWITH_ASSISTANT -DWITH_S3 -DWITH_WEBAPP -DWITH_OLD_YESOD
bins=git-annex
mans=git-annex.1 git-annex-shell.1
sources=Build/SysConfig.hs Utility/Touch.hs
sources=Build/SysConfig.hs Utility/Touch.hs Utility/Mounts.hs
all=$(bins) $(mans) docs
CFLAGS=-Wall
OS:=$(shell uname | sed 's/[-_].*//')
ifeq ($(OS),Linux)
BASEFLAGS_OPTS=-DWITH_INOTIFY
clibs=Utility/libdiskfree.o
OPTFLAGS=-DWITH_INOTIFY -DWITH_DBUS
clibs=Utility/libdiskfree.o Utility/libmounts.o
else
# BSD system
BASEFLAGS_OPTS=-DWITH_KQUEUE
clibs=Utility/libdiskfree.o Utility/libkqueue.o
OPTFLAGS=-DWITH_KQUEUE
clibs=Utility/libdiskfree.o Utility/libmounts.o Utility/libkqueue.o
ifeq ($(OS),Darwin)
OPTFLAGS=-DWITH_KQUEUE -DOSX
# Ensure OSX compiler builds for 32 bit when using 32 bit ghc
GHCARCH:=$(shell ghc -e 'print System.Info.arch')
ifeq ($(GHCARCH),i386)
@ -23,12 +28,10 @@ endif
endif
PREFIX=/usr
IGNORE=-ignore-package monads-fd -ignore-package monads-tf
BASEFLAGS=-Wall $(IGNORE) -outputdir tmp -IUtility -DWITH_ASSISTANT -DWITH_S3 $(BASEFLAGS_OPTS)
GHCFLAGS=-O2 $(BASEFLAGS)
GHCFLAGS=-O2 $(BASEFLAGS) $(FEATURES) $(OPTFLAGS)
ifdef PROFILE
GHCFLAGS=-prof -auto-all -rtsopts -caf-all -fforce-recomp $(BASEFLAGS)
GHCFLAGS=-prof -auto-all -rtsopts -caf-all -fforce-recomp $(BASEFLAGS) $(FEATURES) $(OPTFLAGS)
endif
GHCMAKE=ghc $(GHCFLAGS) --make
@ -44,7 +47,7 @@ build: $(all)
sources: $(sources)
# Disables optimisation. Not for production use.
fast: GHCFLAGS=$(BASEFLAGS)
fast: GHCFLAGS=$(BASEFLAGS) $(FEATURES) $(OPTFLAGS)
fast: $(bins)
Build/SysConfig.hs: configure.hs Build/TestConfig.hs Build/Configure.hs
@ -54,9 +57,10 @@ Build/SysConfig.hs: configure.hs Build/TestConfig.hs Build/Configure.hs
%.hs: %.hsc
hsc2hs $<
git-annex: $(sources) $(clibs)
$(GHCMAKE) $@ $(clibs)
install -d $(GIT_ANNEX_TMP_BUILD_DIR)
$(GHCMAKE) $@ -o $(GIT_ANNEX_TMP_BUILD_DIR)/git-annex $(clibs)
ln -sf $(GIT_ANNEX_TMP_BUILD_DIR)/git-annex git-annex
git-annex.1: doc/git-annex.mdwn
./mdwn2man git-annex 1 doc/git-annex.mdwn > git-annex.1
@ -79,6 +83,7 @@ install: build-stamp install-docs
install -d $(DESTDIR)$(PREFIX)/bin
install $(bins) $(DESTDIR)$(PREFIX)/bin
ln -sf git-annex $(DESTDIR)$(PREFIX)/bin/git-annex-shell
runghc Build/InstallDesktopFile.hs $(PREFIX)/bin/git-annex || true
test: $(sources) $(clibs)
@if ! $(GHCMAKE) -O0 test $(clibs); then \
@ -91,7 +96,7 @@ test: $(sources) $(clibs)
testcoverage:
rm -f test.tix test
ghc $(GHCFLAGS) -outputdir tmp/testcoverage --make -fhpc test
ghc $(GHCFLAGS) -outputdir $(GIT_ANNEX_TMP_BUILD_DIR)/testcoverage --make -fhpc test
./test
@echo ""
@hpc report test --exclude=Main --exclude=QC
@ -115,7 +120,7 @@ docs: $(mans)
--exclude='news/.*'
clean:
rm -rf tmp $(bins) $(mans) test configure *.tix .hpc $(sources) \
rm -rf $(GIT_ANNEX_TMP_BUILD_DIR) $(bins) $(mans) test configure *.tix .hpc $(sources) \
doc/.ikiwiki html dist $(clibs) build-stamp
sdist: clean $(mans)

View file

@ -17,6 +17,9 @@ module Option (
import System.Console.GetOpt
import System.Log.Logger
import System.Log.Formatter
import System.Log.Handler (setFormatter, LogHandler)
import System.Log.Handler.Simple
import Common.Annex
import qualified Annex
@ -48,8 +51,13 @@ common =
setfast v = Annex.changeState $ \s -> s { Annex.fast = v }
setauto v = Annex.changeState $ \s -> s { Annex.auto = v }
setforcebackend v = Annex.changeState $ \s -> s { Annex.forcebackend = Just v }
setdebug = liftIO $ updateGlobalLogger rootLoggerName $
setLevel DEBUG
setdebug = liftIO $ do
s <- simpledebug
updateGlobalLogger rootLoggerName
(setLevel DEBUG . setHandlers [s])
simpledebug = setFormatter
<$> streamHandler stderr DEBUG
<*> pure (simpleLogFormatter "[$time] $msg")
matcher :: [Option]
matcher =

View file

@ -20,10 +20,13 @@ module Remote (
remoteTypes,
remoteList,
enabledRemoteList,
specialRemote,
remoteMap,
uuidDescriptions,
byName,
byCost,
prettyPrintUUIDs,
prettyListUUIDs,
remotesWithUUID,
remotesWithoutUUID,
keyLocations,
@ -128,6 +131,20 @@ prettyPrintUUIDs desc uuids = do
, ("here", toJSON $ hereu == u)
]
{- List of remote names and/or descriptions, for human display. -}
prettyListUUIDs :: [UUID] -> Annex [String]
prettyListUUIDs uuids = do
hereu <- getUUID
m <- uuidDescriptions
return $ map (\u -> prettify m hereu u) uuids
where
finddescription m u = M.findWithDefault "" u m
prettify m hereu u
| u == hereu = addName n "here"
| otherwise = n
where
n = finddescription m u
{- Filters a list of remotes to ones that have the listed uuids. -}
remotesWithUUID :: [Remote] -> [UUID] -> [Remote]
remotesWithUUID rs us = filter (\r -> uuid r `elem` us) rs
@ -208,3 +225,10 @@ forceTrust level remotename = do
- on the remote, but this cannot always be relied on. -}
logStatus :: Remote -> Key -> LogStatus -> Annex ()
logStatus remote key = logChange key (uuid remote)
{- Orders remotes by cost, with ones with the lowest cost grouped together. -}
byCost :: [Remote] -> [[Remote]]
byCost = map snd . sort . M.toList . costmap
where
costmap = M.fromListWith (++) . map costpair
costpair r = (cost r, [r])

View file

@ -13,6 +13,7 @@ import System.Process
import Common.Annex
import Types.Remote
import Types.Key
import qualified Git
import qualified Git.Command
import qualified Git.Config
@ -46,21 +47,25 @@ gen r u c = do
return $ encryptableRemote c
(storeEncrypted r buprepo)
(retrieveEncrypted buprepo)
Remote {
uuid = u',
cost = cst,
name = Git.repoDescribe r,
storeKey = store r buprepo,
retrieveKeyFile = retrieve buprepo,
retrieveKeyFileCheap = retrieveCheap buprepo,
removeKey = remove,
hasKey = checkPresent r bupr',
hasKeyCheap = bupLocal buprepo,
whereisKey = Nothing,
config = c,
repo = r,
remotetype = remote
}
Remote
{ uuid = u'
, cost = cst
, name = Git.repoDescribe r
, storeKey = store r buprepo
, retrieveKeyFile = retrieve buprepo
, retrieveKeyFileCheap = retrieveCheap buprepo
, removeKey = remove
, hasKey = checkPresent r bupr'
, hasKeyCheap = bupLocal buprepo
, whereisKey = Nothing
, config = c
, repo = r
, localpath = if bupLocal buprepo && not (null buprepo)
then Just buprepo
else Nothing
, remotetype = remote
, readonly = False
}
bupSetup :: UUID -> RemoteConfig -> Annex RemoteConfig
bupSetup u c = do
@ -133,13 +138,13 @@ retrieveCheap :: BupRepo -> Key -> FilePath -> Annex Bool
retrieveCheap _ _ _ = return False
retrieveEncrypted :: BupRepo -> (Cipher, Key) -> Key -> FilePath -> Annex Bool
retrieveEncrypted buprepo (cipher, enck) _ f = do
let params = bupParams "join" buprepo [Param $ bupRef enck]
liftIO $ catchBoolIO $ do
(pid, h) <- hPipeFrom "bup" $ toCommand params
retrieveEncrypted buprepo (cipher, enck) _ f = liftIO $ catchBoolIO $
withHandle StdoutHandle createProcessSuccess p $ \h -> do
withDecryptedContent cipher (L.hGetContents h) $ L.writeFile f
forceSuccess pid
return True
where
params = bupParams "join" buprepo [Param $ bupRef enck]
p = proc "bup" $ toCommand params
remove :: Key -> Annex Bool
remove _ = do
@ -240,7 +245,7 @@ bupRef k
| Git.Ref.legal True shown = shown
| otherwise = "git-annex-" ++ showDigest (sha256 (fromString shown))
where
shown = show k
shown = key2file k
bupLocal :: BupRepo -> Bool
bupLocal = notElem ':'

View file

@ -53,6 +53,8 @@ gen r u c = do
whereisKey = Nothing,
config = Nothing,
repo = r,
localpath = Just dir,
readonly = False,
remotetype = remote
}
where

View file

@ -5,7 +5,11 @@
- Licensed under the GNU GPL version 3 or higher.
-}
module Remote.Git (remote, repoAvail) where
module Remote.Git (
remote,
configRead,
repoAvail,
) where
import qualified Data.Map as M
import Control.Exception.Extensible
@ -45,7 +49,7 @@ list :: Annex [Git.Repo]
list = do
c <- fromRepo Git.config
rs <- mapM (tweakurl c) =<< fromRepo Git.remotes
mapM configread rs
mapM configRead rs
where
annexurl n = "remote." ++ n ++ ".annexurl"
tweakurl c r = do
@ -55,19 +59,21 @@ list = do
Just url -> inRepo $ \g ->
Git.Construct.remoteNamed n $
Git.Construct.fromRemoteLocation url g
{- It's assumed to be cheap to read the config of non-URL
- remotes, so this is done each time git-annex is run
- in a way that uses remotes.
- Conversely, the config of an URL remote is only read
- when there is no cached UUID value. -}
configread r = do
notignored <- repoNotIgnored r
u <- getRepoUUID r
case (repoCheap r, notignored, u) of
(_, False, _) -> return r
(True, _, _) -> tryGitConfigRead r
(False, _, NoUUID) -> tryGitConfigRead r
_ -> return r
{- It's assumed to be cheap to read the config of non-URL remotes, so this is
- done each time git-annex is run in a way that uses remotes.
-
- Conversely, the config of an URL remote is only read when there is no
- cached UUID value. -}
configRead :: Git.Repo -> Annex Git.Repo
configRead r = do
notignored <- repoNotIgnored r
u <- getRepoUUID r
case (repoCheap r, notignored, u) of
(_, False, _) -> return r
(True, _, _) -> tryGitConfigRead r
(False, _, NoUUID) -> tryGitConfigRead r
_ -> return r
repoCheap :: Git.Repo -> Bool
repoCheap = not . Git.repoIsUrl
@ -76,21 +82,26 @@ gen :: Git.Repo -> UUID -> Maybe RemoteConfig -> Annex Remote
gen r u _ = new <$> remoteCost r defcst
where
defcst = if repoCheap r then cheapRemoteCost else expensiveRemoteCost
new cst = Remote {
uuid = u,
cost = cst,
name = Git.repoDescribe r,
storeKey = copyToRemote r,
retrieveKeyFile = copyFromRemote r,
retrieveKeyFileCheap = copyFromRemoteCheap r,
removeKey = dropKey r,
hasKey = inAnnex r,
hasKeyCheap = repoCheap r,
whereisKey = Nothing,
config = Nothing,
repo = r,
remotetype = remote
}
new cst = Remote
{ uuid = u
, cost = cst
, name = Git.repoDescribe r
, storeKey = copyToRemote r
, retrieveKeyFile = copyFromRemote r
, retrieveKeyFileCheap = copyFromRemoteCheap r
, removeKey = dropKey r
, hasKey = inAnnex r
, hasKeyCheap = repoCheap r
, whereisKey = Nothing
, config = Nothing
, localpath = if Git.repoIsLocal r || Git.repoIsLocalUnknown r
then Just $ Git.repoPath r
else Nothing
, repo = r
, readonly = Git.repoIsHttp r
, remotetype = remote
}
{- Checks relatively inexpensively if a repository is available for use. -}
repoAvail :: Git.Repo -> Annex Bool
@ -127,16 +138,17 @@ tryGitConfigRead r
=<< liftIO (try a :: IO (Either SomeException Git.Repo))
pipedconfig cmd params = safely $
pOpen ReadFromPipe cmd (toCommand params) $
withHandle StdoutHandle createProcessSuccess p $
Git.Config.hRead r
where
p = proc cmd $ toCommand params
geturlconfig headers = do
s <- Url.get (Git.repoLocation r ++ "/config") headers
withTempFile "git-annex.tmp" $ \tmpfile h -> do
hPutStr h s
hClose h
pOpen ReadFromPipe "git" ["config", "--null", "--list", "--file", tmpfile] $
Git.Config.hRead r
pipedconfig "git" [Param "config", Param "--null", Param "--list", Param "--file", File tmpfile]
store = observe $ \r' -> do
g <- gitRepo
@ -172,7 +184,7 @@ inAnnex r key
v -> return v
checkremote = do
showAction $ "checking " ++ Git.repoDescribe r
onRemote r (check, unknown) "inannex" [Param (show key)] []
onRemote r (check, unknown) "inannex" [Param (key2file key)] []
where
check c p = dispatch <$> safeSystem c p
dispatch ExitSuccess = Right True
@ -217,7 +229,7 @@ dropKey r key
| Git.repoIsHttp r = error "dropping from http repo not supported"
| otherwise = commitOnCleanup r $ onRemote r (boolSystem, False) "dropkey"
[ Params "--quiet --force"
, Param $ show key
, Param $ key2file key
]
[]
@ -299,7 +311,7 @@ rsyncParamsRemote r sending key file afile = do
: maybe [] (\f -> [(Fields.associatedFile, f)]) afile
Just (shellcmd, shellparams) <- git_annex_shell r
(if sending then "sendkey" else "recvkey")
[ Param $ show key ]
[ Param $ key2file key ]
fields
-- Convert the ssh command into rsync command line.
let eparam = rsyncShell (Param shellcmd:shellparams)

View file

@ -9,11 +9,11 @@ module Remote.Hook (remote) where
import qualified Data.ByteString.Lazy as L
import qualified Data.Map as M
import System.Exit
import System.Environment
import Common.Annex
import Types.Remote
import Types.Key
import qualified Git
import Config
import Annex.Content
@ -48,7 +48,9 @@ gen r u c = do
hasKeyCheap = False,
whereisKey = Nothing,
config = Nothing,
localpath = Nothing,
repo = r,
readonly = False,
remotetype = remote
}
@ -68,7 +70,7 @@ hookEnv k f = Just <$> mergeenv (fileenv f ++ keyenv)
<$> M.fromList <$> getEnvironment
env s v = ("ANNEX_" ++ s, v)
keyenv =
[ env "KEY" (show k)
[ env "KEY" (key2file k)
, env "HASH_1" (hashbits !! 0)
, env "HASH_2" (hashbits !! 1)
]
@ -133,20 +135,8 @@ checkPresent r h k = do
v <- lookupHook h "checkpresent"
liftIO $ catchMsgIO $ check v
where
findkey s = show k `elem` lines s
findkey s = key2file k `elem` lines s
check Nothing = error "checkpresent hook misconfigured"
check (Just hook) = do
(frompipe, topipe) <- createPipe
pid <- forkProcess $ do
_ <- dupTo topipe stdOutput
closeFd frompipe
executeFile "sh" True ["-c", hook]
=<< hookEnv k Nothing
closeFd topipe
fromh <- fdToHandle frompipe
reply <- hGetContentsStrict fromh
hClose fromh
s <- getProcessStatus True False pid
case s of
Just (Exited ExitSuccess) -> return $ findkey reply
_ -> error "checkpresent hook failed"
env <- hookEnv k Nothing
findkey <$> readProcessEnv "sh" ["-c", hook] env

View file

@ -2,7 +2,7 @@
{- git-annex remote list
-
- Copyright 2011 Joey Hess <joey@kitenet.net>
- Copyright 2011,2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
@ -18,6 +18,8 @@ import Types.Remote
import Annex.UUID
import Config
import Remote.Helper.Hooks
import qualified Git
import qualified Git.Config
import qualified Remote.Git
#ifdef WITH_S3
@ -55,11 +57,40 @@ remoteList = do
return rs'
else return rs
where
process m t = enumerate t >>= mapM (gen m t)
gen m t r = do
u <- getRepoUUID r
addHooks =<< generate t r u (M.lookup u m)
process m t = enumerate t >>= mapM (remoteGen m t)
{- Forces the remoteList to be re-generated, re-reading the git config. -}
remoteListRefresh :: Annex [Remote]
remoteListRefresh = do
newg <- inRepo Git.Config.reRead
Annex.changeState $ \s -> s
{ Annex.remotes = []
, Annex.repo = newg
}
remoteList
{- Generates a Remote. -}
remoteGen :: (M.Map UUID RemoteConfig) -> RemoteType -> Git.Repo -> Annex Remote
remoteGen m t r = do
u <- getRepoUUID r
addHooks =<< generate t r u (M.lookup u m)
{- Updates a local git Remote, re-reading its git config. -}
updateRemote :: Remote -> Annex Remote
updateRemote remote = do
m <- readRemoteLog
remote' <- updaterepo $ repo remote
remoteGen m (remotetype remote) remote'
where
updaterepo r
| Git.repoIsLocal r || Git.repoIsLocalUnknown r =
Remote.Git.configRead r
| otherwise = return r
{- All remotes that are not ignored. -}
enabledRemoteList :: Annex [Remote]
enabledRemoteList = filterM (repoNotIgnored . repo) =<< remoteList
{- Checks if a remote is a special remote -}
specialRemote :: Remote -> Bool
specialRemote r = remotetype r /= Remote.Git.remote

View file

@ -9,6 +9,7 @@ module Remote.Rsync (remote) where
import qualified Data.ByteString.Lazy as L
import qualified Data.Map as M
import System.Posix.Process (getProcessID)
import Common.Annex
import Types.Remote
@ -44,21 +45,25 @@ gen r u c = do
return $ encryptableRemote c
(storeEncrypted o)
(retrieveEncrypted o)
Remote {
uuid = u,
cost = cst,
name = Git.repoDescribe r,
storeKey = store o,
retrieveKeyFile = retrieve o,
retrieveKeyFileCheap = retrieveCheap o,
removeKey = remove o,
hasKey = checkPresent r o,
hasKeyCheap = False,
whereisKey = Nothing,
config = Nothing,
repo = r,
remotetype = remote
}
Remote
{ uuid = u
, cost = cst
, name = Git.repoDescribe r
, storeKey = store o
, retrieveKeyFile = retrieve o
, retrieveKeyFileCheap = retrieveCheap o
, removeKey = remove o
, hasKey = checkPresent r o
, hasKeyCheap = False
, whereisKey = Nothing
, config = Nothing
, repo = r
, localpath = if rsyncUrlIsPath $ rsyncUrl o
then Just $ rsyncUrl o
else Nothing
, readonly = False
, remotetype = remote
}
genRsyncOpts :: Git.Repo -> Maybe RemoteConfig -> Annex RsyncOpts
genRsyncOpts r c = do

View file

@ -60,6 +60,8 @@ gen' r u c cst =
whereisKey = Nothing,
config = c,
repo = r,
localpath = Nothing,
readonly = False,
remotetype = remote
}
@ -210,12 +212,12 @@ s3Action r noconn action = do
_ -> return noconn
bucketFile :: Remote -> Key -> FilePath
bucketFile r = munge . show
bucketFile r = munge . key2file
where
munge s = case M.lookup "mungekeys" c of
Just "ia" -> iaMunge $ prefix ++ s
_ -> prefix ++ s
prefix = M.findWithDefault "" "fileprefix" c
Just "ia" -> iaMunge $ fileprefix ++ s
_ -> fileprefix ++ s
fileprefix = M.findWithDefault "" "fileprefix" c
c = fromJust $ config r
bucketKey :: Remote -> String -> Key -> S3Object

View file

@ -47,7 +47,9 @@ gen r _ _ =
hasKeyCheap = False,
whereisKey = Just getUrls,
config = Nothing,
localpath = Nothing,
repo = r,
readonly = True,
remotetype = remote
}

View file

@ -82,7 +82,7 @@ withFilesUnlocked' typechanged a params = do
withKeys :: (Key -> CommandStart) -> CommandSeek
withKeys a params = return $ map (a . parse) params
where
parse p = fromMaybe (error "bad key") $ readKey p
parse p = fromMaybe (error "bad key") $ file2key p
withValue :: Annex v -> (v -> CommandSeek) -> CommandSeek
withValue v a params = do
@ -108,9 +108,9 @@ withNothing _ _ = error "This command takes no parameters."
prepFiltered :: (FilePath -> CommandStart) -> Annex [FilePath] -> Annex [CommandStart]
prepFiltered a fs = do
matcher <- Limit.getMatcher
map (proc matcher) <$> fs
map (process matcher) <$> fs
where
proc matcher f = do
process matcher f = do
ok <- matcher f
if ok then a f else return Nothing

View file

@ -10,6 +10,7 @@ import Distribution.PackageDescription (PackageDescription(..))
import Distribution.Verbosity (Verbosity)
import System.FilePath
import qualified Build.InstallDesktopFile as InstallDesktopFile
import qualified Build.Configure as Configure
main = defaultMainWithHooks simpleUserHooks
@ -25,6 +26,7 @@ myPostInst :: Args -> InstallFlags -> PackageDescription -> LocalBuildInfo -> IO
myPostInst _ (InstallFlags { installVerbosity }) pkg lbi = do
installGitAnnexShell dest verbosity pkg lbi
installManpages dest verbosity pkg lbi
installDesktopFile dest verbosity pkg lbi
where
dest = NoCopyDest
verbosity = fromFlag installVerbosity
@ -47,3 +49,9 @@ installManpages copyDest verbosity pkg lbi =
srcManpages = zip (repeat srcManDir) manpages
srcManDir = ""
manpages = ["git-annex.1", "git-annex-shell.1"]
installDesktopFile :: CopyDest -> Verbosity -> PackageDescription -> LocalBuildInfo -> IO ()
installDesktopFile copyDest verbosity pkg lbi =
InstallDesktopFile.writeDesktop $ dstBinDir </> "git-annex"
where
dstBinDir = bindir $ absoluteInstallDirs pkg lbi copyDest

View file

@ -10,9 +10,10 @@
module Types.Key (
Key(..),
stubKey,
readKey,
key2file,
file2key,
prop_idempotent_key_read_show
prop_idempotent_key_encode
) where
import System.Posix.Types
@ -26,7 +27,7 @@ data Key = Key {
keyBackendName :: String,
keySize :: Maybe Integer,
keyMtime :: Maybe EpochTime
} deriving (Eq, Ord)
} deriving (Eq, Ord, Read, Show)
stubKey :: Key
stubKey = Key {
@ -39,21 +40,21 @@ stubKey = Key {
fieldSep :: Char
fieldSep = '-'
{- Keys show as strings that are suitable for use as filenames.
{- Converts a key to a strings that are suitable for use as a filename.
- The name field is always shown last, separated by doubled fieldSeps,
- and is the only field allowed to contain the fieldSep. -}
instance Show Key where
show Key { keyBackendName = b, keySize = s, keyMtime = m, keyName = n } =
b +++ ('s' ?: s) +++ ('m' ?: m) +++ (fieldSep : n)
where
"" +++ y = y
x +++ "" = x
x +++ y = x ++ fieldSep:y
c ?: (Just v) = c : show v
_ ?: _ = ""
key2file :: Key -> FilePath
key2file Key { keyBackendName = b, keySize = s, keyMtime = m, keyName = n } =
b +++ ('s' ?: s) +++ ('m' ?: m) +++ (fieldSep : n)
where
"" +++ y = y
x +++ "" = x
x +++ y = x ++ fieldSep:y
c ?: (Just v) = c : show v
_ ?: _ = ""
readKey :: String -> Maybe Key
readKey s = if key == Just stubKey then Nothing else key
file2key :: FilePath -> Maybe Key
file2key s = if key == Just stubKey then Nothing else key
where
key = startbackend stubKey s
@ -73,5 +74,5 @@ readKey s = if key == Just stubKey then Nothing else key
addfield 'm' k v = Just k { keyMtime = readish v }
addfield _ _ _ = Nothing
prop_idempotent_key_read_show :: Key -> Bool
prop_idempotent_key_read_show k = Just k == (readKey . show) k
prop_idempotent_key_encode :: Key -> Bool
prop_idempotent_key_encode k = Just k == (file2key . key2file) k

View file

@ -64,6 +64,10 @@ data RemoteA a = Remote {
config :: Maybe RemoteConfig,
-- git configuration for the remote
repo :: Git.Repo,
-- a Remote can be assocated with a specific local filesystem path
localpath :: Maybe FilePath,
-- a Remote can be known to be readonly
readonly :: Bool,
-- the type of the remote
remotetype :: RemoteTypeA a
}

View file

@ -9,7 +9,7 @@ module Types.UUID where
-- A UUID is either an arbitrary opaque string, or UUID info may be missing.
data UUID = NoUUID | UUID String
deriving (Eq, Ord, Show)
deriving (Eq, Ord, Show, Read)
fromUUID :: UUID -> String
fromUUID (UUID u) = u

View file

@ -142,7 +142,7 @@ oldlog2key l
-- as the v2 key that it is.
readKey1 :: String -> Key
readKey1 v
| mixup = fromJust $ readKey $ join ":" $ Prelude.tail bits
| mixup = fromJust $ file2key $ join ":" $ Prelude.tail bits
| otherwise = Key
{ keyName = n
, keyBackendName = b

View file

@ -13,23 +13,23 @@ module Utility.CoProcess (
query
) where
import System.Cmd.Utils
import Common
type CoProcessHandle = (PipeHandle, Handle, Handle)
type CoProcessHandle = (ProcessHandle, Handle, Handle, CreateProcess)
start :: FilePath -> [String] -> IO CoProcessHandle
start command params = hPipeBoth command params
start command params = do
(from, to, _err, pid) <- runInteractiveProcess command params Nothing Nothing
return (pid, to, from, proc command params)
stop :: CoProcessHandle -> IO ()
stop (pid, from, to) = do
stop (pid, from, to, p) = do
hClose to
hClose from
forceSuccess pid
forceSuccessProcess p pid
query :: CoProcessHandle -> (Handle -> IO a) -> (Handle -> IO b) -> IO b
query (_, from, to) send receive = do
query (_, from, to, _) send receive = do
_ <- send to
hFlush to
receive from

28
Utility/DBus.hs Normal file
View file

@ -0,0 +1,28 @@
{- DBus utilities
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
{-# LANGUAGE OverloadedStrings #-}
module Utility.DBus where
import DBus.Client
import DBus
import Data.Maybe
type ServiceName = String
listServiceNames :: Client -> IO [ServiceName]
listServiceNames client = do
reply <- callDBus client "ListNames" []
return $ fromMaybe [] $ fromVariant (methodReturnBody reply !! 0)
callDBus :: Client -> MemberName -> [Variant] -> IO MethodReturn
callDBus client name params = call_ client $
(methodCall "/org/freedesktop/DBus" "org.freedesktop.DBus" name)
{ methodCallDestination = Just "org.freedesktop.DBus"
, methodCallBody = params
}

View file

@ -27,7 +27,7 @@ daemonize logfd pidfile changedirectory a = do
_ <- forkProcess child2
out
child2 = do
maybe noop (lockPidFile alreadyrunning) pidfile
maybe noop lockPidFile pidfile
when changedirectory $
setCurrentDirectory "/"
nullfd <- openFd "/dev/null" ReadOnly Nothing defaultFileFlags
@ -39,47 +39,57 @@ daemonize logfd pidfile changedirectory a = do
redir newh h = do
closeFd h
dupTo newh h
alreadyrunning = error "Daemon is already running."
out = exitImmediately ExitSuccess
{- Locks the pid file, with an exclusive, non-blocking lock.
- Runs an action on failure. On success, writes the pid to the file,
- fully atomically. -}
lockPidFile :: IO () -> FilePath -> IO ()
lockPidFile onfailure file = do
- Writes the pid to the file, fully atomically.
- Fails if the pid file is already locked by another process. -}
lockPidFile :: FilePath -> IO ()
lockPidFile file = do
createDirectoryIfMissing True (parentDir file)
fd <- openFd file ReadWrite (Just stdFileMode) defaultFileFlags
locked <- catchMaybeIO $ setLock fd (WriteLock, AbsoluteSeek, 0, 0)
fd' <- openFd newfile ReadWrite (Just stdFileMode) defaultFileFlags
{ trunc = True }
locked' <- catchMaybeIO $ setLock fd' (WriteLock, AbsoluteSeek, 0, 0)
case (locked, locked') of
(Nothing, _) -> onfailure
(_, Nothing) -> onfailure
(Nothing, _) -> alreadyrunning
(_, Nothing) -> alreadyrunning
_ -> do
_ <- fdWrite fd' =<< show <$> getProcessID
renameFile newfile file
closeFd fd
where
newfile = file ++ ".new"
alreadyrunning = error "Daemon is already running."
{- Stops the daemon.
{- Checks if the daemon is running, by checking that the pid file
- is locked by the same process that is listed in the pid file.
-
- The pid file is used to get the daemon's pid.
-
- To guard against a stale pid, check the lock of the pid file,
- and compare the process that has it locked with the file content.
-}
stopDaemon :: FilePath -> IO ()
stopDaemon pidfile = do
fd <- openFd pidfile ReadOnly (Just stdFileMode) defaultFileFlags
locked <- getLock fd (ReadLock, AbsoluteSeek, 0, 0)
p <- readish <$> readFile pidfile
case (locked, p) of
(Nothing, _) -> noop
(_, Nothing) -> noop
(Just (pid, _), Just pid')
| pid == pid' -> signalProcess sigTERM pid
| otherwise -> error $
- If it's running, returns its pid. -}
checkDaemon :: FilePath -> IO (Maybe ProcessID)
checkDaemon pidfile = do
v <- catchMaybeIO $
openFd pidfile ReadOnly (Just stdFileMode) defaultFileFlags
case v of
Just fd -> do
locked <- getLock fd (ReadLock, AbsoluteSeek, 0, 0)
p <- readish <$> readFile pidfile
return $ check locked p
Nothing -> return Nothing
where
check Nothing _ = Nothing
check _ Nothing = Nothing
check (Just (pid, _)) (Just pid')
| pid == pid' = Just pid
| otherwise = error $
"stale pid in " ++ pidfile ++
" (got " ++ show pid' ++
"; expected" ++ show pid ++ " )"
"; expected " ++ show pid ++ " )"
{- Stops the daemon, safely. -}
stopDaemon :: FilePath -> IO ()
stopDaemon pidfile = go =<< checkDaemon pidfile
where
go Nothing = noop
go (Just pid) = signalProcess sigTERM pid

View file

@ -17,10 +17,10 @@ import Utility.Types.DirWatcher
#if WITH_INOTIFY
import qualified Utility.INotify as INotify
import qualified System.INotify as INotify
import Utility.ThreadScheduler
#endif
#if WITH_KQUEUE
import qualified Utility.Kqueue as Kqueue
import Control.Concurrent
#endif
type Pruner = FilePath -> Bool
@ -72,19 +72,41 @@ closingTracked = undefined
#endif
#endif
/* Starts a watcher thread. The runStartup action is passed a scanner action
* to run, that will return once the initial directory scan is complete.
* Once runStartup returns, the watcher thread continues running,
* and processing events. Returns a DirWatcherHandle that can be used
* to shutdown later. */
#if WITH_INOTIFY
watchDir :: FilePath -> Pruner -> WatchHooks -> (IO () -> IO ()) -> IO ()
watchDir dir prune hooks runstartup = INotify.withINotify $ \i -> do
type DirWatcherHandle = INotify.INotify
watchDir :: FilePath -> Pruner -> WatchHooks -> (IO () -> IO ()) -> IO DirWatcherHandle
watchDir dir prune hooks runstartup = do
i <- INotify.initINotify
runstartup $ INotify.watchDir i dir prune hooks
waitForTermination -- Let the inotify thread run.
return i
#else
#if WITH_KQUEUE
watchDir :: FilePath -> Pruner -> WatchHooks -> (IO Kqueue.Kqueue -> IO Kqueue.Kqueue) -> IO ()
type DirWatcherHandle = ThreadId
watchDir :: FilePath -> Pruner -> WatchHooks -> (IO Kqueue.Kqueue -> IO Kqueue.Kqueue) -> IO DirWatcherHandle
watchDir dir ignored hooks runstartup = do
kq <- runstartup $ Kqueue.initKqueue dir ignored
Kqueue.runHooks kq hooks
forkIO $ Kqueue.runHooks kq hooks
#else
watchDir :: FilePath -> Pruner -> WatchHooks -> (IO () -> IO ()) -> IO ()
type DirWatcherHandle = ()
watchDir :: FilePath -> Pruner -> WatchHooks -> (IO () -> IO ()) -> IO DirWatcherHandle
watchDir = undefined
#endif
#endif
#if WITH_INOTIFY
stopWatchDir :: DirWatcherHandle -> IO ()
stopWatchDir = INotify.killINotify
#else
#if WITH_KQUEUE
stopWatchDir :: DirWatcherHandle -> IO ()
stopWatchDir = killThread
#else
stopWatchDir :: DirWatcherHandle -> IO ()
stopWatchDir = undefined
#endif
#endif

View file

@ -15,7 +15,7 @@ import Foreign.C.Types
import Foreign.C.String
import Foreign.C.Error
foreign import ccall unsafe "libdiskfree.h diskfree" c_diskfree
foreign import ccall safe "libdiskfree.h diskfree" c_diskfree
:: CString -> IO CULLong
getDiskFree :: FilePath -> IO (Maybe Integer)

126
Utility/FreeDesktop.hs Normal file
View file

@ -0,0 +1,126 @@
{- Freedesktop.org specifications
-
- http://standards.freedesktop.org/basedir-spec/latest/
- http://standards.freedesktop.org/desktop-entry-spec/latest/
- http://standards.freedesktop.org/menu-spec/latest/
-
- Copyright 2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Utility.FreeDesktop (
DesktopEntry,
genDesktopEntry,
buildDesktopMenuFile,
writeDesktopMenuFile,
desktopMenuFilePath,
autoStartPath,
systemDataDir,
systemConfigDir,
userDataDir,
userConfigDir,
userDesktopDir
) where
import Utility.Exception
import Utility.Path
import Utility.Process
import Utility.PartialPrelude
import System.Environment
import System.Directory
import System.FilePath
import Data.List
import Data.String.Utils
import Control.Applicative
type DesktopEntry = [(Key, Value)]
type Key = String
data Value = StringV String | BoolV Bool | NumericV Float | ListV [Value]
toString :: Value -> String
toString (StringV s) = s
toString (BoolV b)
| b = "true"
| otherwise = "false"
toString(NumericV f) = show f
toString (ListV l)
| null l = ""
| otherwise = (intercalate ";" $ map (escapesemi . toString) l) ++ ";"
where
escapesemi = join "\\;" . split ";"
genDesktopEntry :: String -> String -> Bool -> FilePath -> [String] -> DesktopEntry
genDesktopEntry name comment terminal program categories =
[ item "Type" StringV "Application"
, item "Version" NumericV 1.0
, item "Name" StringV name
, item "Comment" StringV comment
, item "Terminal" BoolV terminal
, item "Exec" StringV program
, item "Categories" ListV (map StringV categories)
]
where
item x c y = (x, c y)
buildDesktopMenuFile :: DesktopEntry -> String
buildDesktopMenuFile d = unlines ("[Desktop Entry]" : map keyvalue d) ++ "\n"
where
keyvalue (k, v) = k ++ "=" ++ toString v
writeDesktopMenuFile :: DesktopEntry -> String -> IO ()
writeDesktopMenuFile d file = do
createDirectoryIfMissing True (parentDir file)
writeFile file $ buildDesktopMenuFile d
{- Path to use for a desktop menu file, in either the systemDataDir or
- the userDataDir -}
desktopMenuFilePath :: String -> FilePath -> FilePath
desktopMenuFilePath basename datadir =
datadir </> "applications" </> desktopfile basename
{- Path to use for a desktop autostart file, in either the systemDataDir
- or the userDataDir -}
autoStartPath :: String -> FilePath -> FilePath
autoStartPath basename configdir =
configdir </> "autostart" </> desktopfile basename
desktopfile :: FilePath -> FilePath
desktopfile f = f ++ ".desktop"
{- Directory used for installation of system wide data files.. -}
systemDataDir :: FilePath
systemDataDir = "/usr/share"
{- Directory used for installation of system wide config files. -}
systemConfigDir :: FilePath
systemConfigDir = "/etc/xdg"
{- Directory for user data files. -}
userDataDir :: IO FilePath
userDataDir = xdgEnvHome "DATA_HOME" ".local/share"
{- Directory for user config files. -}
userConfigDir :: IO FilePath
userConfigDir = xdgEnvHome "CONFIG_HOME" ".config"
{- Directory for the user's Desktop, may be localized.
-
- This is not looked up very fast; the config file is in a shell format
- that is best parsed by shell, so xdg-user-dir is used, with a fallback
- to ~/Desktop. -}
userDesktopDir :: IO FilePath
userDesktopDir = maybe fallback return =<< (parse <$> xdg_user_dir)
where
parse = maybe Nothing (headMaybe . lines)
xdg_user_dir = catchMaybeIO $
readProcess "xdg-user-dir" ["DESKTOP"]
fallback = xdgEnvHome "DESKTOP_DIR" "Desktop"
xdgEnvHome :: String -> String -> IO String
xdgEnvHome envbase homedef = do
home <- myHomeDir
catchDefaultIO (getEnv $ "XDG_" ++ envbase) (home </> homedef)

View file

@ -11,8 +11,7 @@ import qualified Data.ByteString.Lazy as L
import System.Posix.Types
import Control.Applicative
import Control.Concurrent
import Control.Exception (finally, bracket)
import System.Exit
import Control.Exception (bracket)
import System.Posix.Env (setEnv, unsetEnv, getEnv)
import Common
@ -39,18 +38,21 @@ stdParams params = do
readStrict :: [CommandParam] -> IO String
readStrict params = do
params' <- stdParams params
pOpen ReadFromPipe "gpg" params' hGetContentsStrict
withHandle StdoutHandle createProcessSuccess (proc "gpg" params') $ \h -> do
hSetBinaryMode h True
hGetContentsStrict h
{- Runs gpg, piping an input value to it, and returning its stdout,
- strictly. -}
pipeStrict :: [CommandParam] -> String -> IO String
pipeStrict params input = do
params' <- stdParams params
(pid, fromh, toh) <- hPipeBoth "gpg" params'
_ <- forkIO $ finally (hPutStr toh input) (hClose toh)
output <- hGetContentsStrict fromh
forceSuccess pid
return output
withBothHandles createProcessSuccess (proc "gpg" params') $ \(to, from) -> do
hSetBinaryMode to True
hSetBinaryMode from True
hPutStr to input
hClose to
hGetContentsStrict from
{- Runs gpg with some parameters, first feeding it a passphrase via
- --passphrase-fd, then feeding it an input, and passing a handle
@ -70,19 +72,13 @@ passphraseHandle params passphrase a b = do
let passphrasefd = [Param "--passphrase-fd", Param $ show pfd]
params' <- stdParams $ passphrasefd ++ params
(pid, fromh, toh) <- hPipeBoth "gpg" params'
pid2 <- forkProcess $ do
L.hPut toh =<< a
hClose toh
exitSuccess
hClose toh
ret <- b fromh
-- cleanup
forceSuccess pid
_ <- getProcessStatus True False pid2
closeFd frompipe
return ret
closeFd frompipe `after`
withBothHandles createProcessSuccess (proc "gpg" params') go
where
go (to, from) = do
L.hPut to =<< a
hClose to
b from
{- Finds gpg public keys matching some string. (Could be an email address,
- a key id, or a name. -}

View file

@ -160,12 +160,9 @@ tooManyWatches hook dir = do
querySysctl :: Read a => [CommandParam] -> IO (Maybe a)
querySysctl ps = do
v <- catchMaybeIO $ hPipeFrom "sysctl" $ toCommand ps
v <- catchMaybeIO $ readProcess "sysctl" (toCommand ps)
case v of
Nothing -> return Nothing
Just (pid, h) -> do
val <- parsesysctl <$> hGetContentsStrict h
void $ getProcessStatus True False $ processID pid
return val
Just s -> return $ parsesysctl s
where
parsesysctl s = readish =<< lastMaybe (words s)

View file

@ -14,8 +14,6 @@ module Utility.Kqueue (
waitChange,
Change(..),
changedFile,
isAdd,
isDelete,
runHooks,
) where
@ -34,19 +32,19 @@ import Control.Concurrent
data Change
= Deleted FilePath
| DeletedDir FilePath
| Added FilePath
deriving (Show)
isAdd :: Change -> Bool
isAdd (Added _) = True
isAdd (Deleted _) = False
isDelete :: Change -> Bool
isDelete = not . isAdd
isAdd (DeletedDir _) = False
changedFile :: Change -> FilePath
changedFile (Added f) = f
changedFile (Deleted f) = f
changedFile (DeletedDir f) = f
data Kqueue = Kqueue
{ kqueueFd :: Fd
@ -59,27 +57,43 @@ type Pruner = FilePath -> Bool
type DirMap = M.Map Fd DirInfo
{- A directory, and its last known contents (with filenames relative to it) -}
{- Enough information to uniquely identify a file in a directory,
- but not too much. -}
data DirEnt = DirEnt
{ dirEnt :: FilePath -- relative to the parent directory
, _dirInode :: FileID -- included to notice file replacements
, isSubDir :: Bool
}
deriving (Eq, Ord, Show)
{- A directory, and its last known contents. -}
data DirInfo = DirInfo
{ dirName :: FilePath
, dirCache :: S.Set FilePath
, dirCache :: S.Set DirEnt
}
deriving (Show)
getDirInfo :: FilePath -> IO DirInfo
getDirInfo dir = do
contents <- S.fromList . filter (not . dirCruft)
<$> getDirectoryContents dir
l <- filter (not . dirCruft) <$> getDirectoryContents dir
contents <- S.fromList . catMaybes <$> mapM getDirEnt l
return $ DirInfo dir contents
where
getDirEnt f = catchMaybeIO $ do
s <- getFileStatus (dir </> f)
return $ DirEnt f (fileID s) (isDirectory s)
{- Difference between the dirCaches of two DirInfos. -}
(//) :: DirInfo -> DirInfo -> [Change]
oldc // newc = deleted ++ added
where
deleted = calc Deleted oldc newc
added = calc Added newc oldc
calc a x y = map a . map (dirName x </>) $
S.toList $ S.difference (dirCache x) (dirCache y)
deleted = calc gendel oldc newc
added = calc genadd newc oldc
gendel x = (if isSubDir x then DeletedDir else Deleted) $
dirName oldc </> dirEnt x
genadd x = Added $ dirName newc </> dirEnt x
calc a x y = map a $ S.toList $
S.difference (dirCache x) (dirCache y)
{- Builds a map of directories in a tree, possibly pruning some.
- Opens each directory in the tree, and records its current contents. -}
@ -99,7 +113,7 @@ scanRecursive topdir prune = M.fromList <$> walk [] [topdir]
case mfd of
Nothing -> walk c rest
Just fd -> do
let subdirs = map (dir </>) $
let subdirs = map (dir </>) . map dirEnt $
S.toList $ dirCache info
walk ((fd, info):c) (subdirs ++ rest)
@ -123,15 +137,16 @@ removeSubDir dirmap dir = do
findDirContents :: DirMap -> FilePath -> [FilePath]
findDirContents dirmap dir = concatMap absolutecontents $ search
where
absolutecontents i = map (dirName i </>) (S.toList $ dirCache i)
absolutecontents i = map (dirName i </>)
(map dirEnt $ S.toList $ dirCache i)
search = map snd $ M.toList $
M.filter (\i -> dirName i == dir) dirmap
foreign import ccall unsafe "libkqueue.h init_kqueue" c_init_kqueue
foreign import ccall safe "libkqueue.h init_kqueue" c_init_kqueue
:: IO Fd
foreign import ccall unsafe "libkqueue.h addfds_kqueue" c_addfds_kqueue
foreign import ccall safe "libkqueue.h addfds_kqueue" c_addfds_kqueue
:: Fd -> CInt -> Ptr Fd -> IO ()
foreign import ccall unsafe "libkqueue.h waitchange_kqueue" c_waitchange_kqueue
foreign import ccall safe "libkqueue.h waitchange_kqueue" c_waitchange_kqueue
:: Fd -> IO Fd
{- Initializes a Kqueue to watch a directory, and all its subdirectories. -}
@ -224,12 +239,14 @@ runHooks kq hooks = do
(q', changes) <- waitChange q
forM_ changes $ dispatch (kqueueMap q')
loop q'
-- Kqueue returns changes for both whole directories
-- being added and deleted, and individual files being
-- added and deleted.
dispatch dirmap change
| isAdd change = withstatus change $ dispatchadd dirmap
| otherwise = callhook delDirHook Nothing change
dispatch _ change@(Deleted _) =
callhook delHook Nothing change
dispatch _ change@(DeletedDir _) =
callhook delDirHook Nothing change
dispatch dirmap change@(Added _) =
withstatus change $ dispatchadd dirmap
dispatchadd dirmap change s
| Files.isSymbolicLink s =
callhook addSymlinkHook (Just s) change
@ -237,12 +254,15 @@ runHooks kq hooks = do
| Files.isRegularFile s =
callhook addHook (Just s) change
| otherwise = noop
recursiveadd dirmap change = do
let contents = findDirContents dirmap $ changedFile change
forM_ contents $ \f ->
withstatus (Added f) $ dispatchadd dirmap
callhook h s change = case h hooks of
Nothing -> noop
Just a -> a (changedFile change) s
withstatus change a = maybe noop (a change) =<<
(catchMaybeIO (getSymbolicLinkStatus (changedFile change)))

Some files were not shown because too many files have changed in this diff Show more