tagging package git-annex version 4.20131002
-----BEGIN PGP SIGNATURE----- Version: GnuPG v1.4.14 (GNU/Linux) iQIVAwUAUkx+F8kQ2SIlEuPHAQh38A//U0P0puOLMTiv0P9jlLNKWPbBwr4uqEII BXvfa6L6Dq/0bUYuoPL2qEq8bjee26RXERCPG4xFy4FjWffR76l66t/oMvLI8ot4 oO098UyG4NqUqzK8iFXnb/WP8EcFUuPg/V2Jio4K6QIrZ3dsK0EcRyNlbNjuakp4 +/rN6ZnFEA0KEZ+4FUFW3NAlhgHqoAaUQIfUyYlyRD0fEwkXpImGkPPGVHyfELun mTlJQyyVzJw16SCGfliWf4X+BA5UbSRPlb0lJm94MRkF8UIqStH0ZfA6LPf86b7S PR9MfwOA3Y8858QIVI0prjIpVonUZ43ixhtA2/WpJVR8oUp/jKw/Njjcw4AHlWjI n93d0/vlPcWCu0yFPP+M1mnKJ5c00wDDWM4AFMi6VZCFzOaIlSfjfUQpY3r8KBoD 6FwxOSiDgNtkRQPx20a3CPVKN8bJxn5+MxXJI84SVN4ynjb85y9Tb1nI2VEtI3aS eIv36ahBSa9P4lgn/txLtTVZq6P9Xt3JFXrKarlUC3t2Rbe44q9MPbog5zmo37k0 GRvIU9XbmGJQfzNGtEeLYzkc5yT/dDadBeQuLnK21Z5idauETtYO32pA8Q6TQ3IN b68skRJ6okHiC6g0KWM82ALelolcLO0++EUV+elq/7Xip6sPuxbIdoohl6juL7bu PmkPE8+F5Xg= =Gfcv -----END PGP SIGNATURE----- Merge tag '4.20131002' into debian-wheezy-backport tagging package git-annex version 4.20131002 # gpg: Signature made Wed Oct 2 20:12:07 2013 UTC using RSA key ID 2512E3C7 # gpg: Can't check signature: public key not found
This commit is contained in:
commit
e420a00184
315 changed files with 7794 additions and 7916 deletions
2
Annex.hs
2
Annex.hs
|
@ -108,6 +108,7 @@ data AnnexState = AnnexState
|
|||
, fields :: M.Map String String
|
||||
, cleanup :: M.Map String (Annex ())
|
||||
, inodeschanged :: Maybe Bool
|
||||
, useragent :: Maybe String
|
||||
}
|
||||
|
||||
newState :: Git.Repo -> AnnexState
|
||||
|
@ -141,6 +142,7 @@ newState gitrepo = AnnexState
|
|||
, fields = M.empty
|
||||
, cleanup = M.empty
|
||||
, inodeschanged = Nothing
|
||||
, useragent = Nothing
|
||||
}
|
||||
|
||||
{- Makes an Annex state object for the specified git repo.
|
||||
|
|
|
@ -25,7 +25,7 @@ checkIgnoreHandle :: Annex (Maybe Git.CheckIgnoreHandle)
|
|||
checkIgnoreHandle = maybe startup return =<< Annex.getState Annex.checkignorehandle
|
||||
where
|
||||
startup = do
|
||||
v <- inRepo $ Git.checkIgnoreStart
|
||||
v <- inRepo Git.checkIgnoreStart
|
||||
when (isNothing v) $
|
||||
warning "The installed version of git is too old for .gitignores to be honored by git-annex."
|
||||
Annex.changeState $ \s -> s { Annex.checkignorehandle = Just v }
|
||||
|
|
|
@ -43,7 +43,7 @@ import qualified Annex.Queue
|
|||
import qualified Annex.Branch
|
||||
import Utility.DiskFree
|
||||
import Utility.FileMode
|
||||
import qualified Utility.Url as Url
|
||||
import qualified Annex.Url as Url
|
||||
import Types.Key
|
||||
import Utility.DataUnits
|
||||
import Utility.CopyFile
|
||||
|
@ -275,7 +275,7 @@ moveAnnex key src = withObjectLoc key storeobject storedirect
|
|||
thawContentDir =<< calcRepo (gitAnnexLocation key)
|
||||
thawContent src
|
||||
v <- isAnnexLink f
|
||||
if (Just key == v)
|
||||
if Just key == v
|
||||
then do
|
||||
updateInodeCache key src
|
||||
replaceFile f $ liftIO . moveFile src
|
||||
|
@ -458,7 +458,7 @@ downloadUrl urls file = go =<< annexWebDownloadCommand <$> Annex.getGitConfig
|
|||
go Nothing = do
|
||||
opts <- map Param . annexWebOptions <$> Annex.getGitConfig
|
||||
headers <- getHttpHeaders
|
||||
liftIO $ anyM (\u -> Url.download u headers opts file) urls
|
||||
anyM (\u -> Url.withUserAgent $ Url.download u headers opts file) urls
|
||||
go (Just basecmd) = liftIO $ anyM (downloadcmd basecmd) urls
|
||||
downloadcmd basecmd url =
|
||||
boolSystem "sh" [Param "-c", Param $ gencmd url basecmd]
|
||||
|
|
|
@ -199,7 +199,7 @@ compareInodeCachesWith = ifM inodesChanged ( return Weakly, return Strongly )
|
|||
addContentWhenNotPresent :: Key -> FilePath -> FilePath -> Annex ()
|
||||
addContentWhenNotPresent key contentfile associatedfile = do
|
||||
v <- isAnnexLink associatedfile
|
||||
when (Just key == v) $ do
|
||||
when (Just key == v) $
|
||||
replaceFile associatedfile $
|
||||
liftIO . void . copyFileExternal contentfile
|
||||
updateInodeCache key associatedfile
|
||||
|
|
|
@ -32,7 +32,7 @@ import Utility.Env
|
|||
checkEnvironment :: Annex ()
|
||||
checkEnvironment = do
|
||||
gitusername <- fromRepo $ Git.Config.getMaybe "user.name"
|
||||
when (gitusername == Nothing || gitusername == Just "") $
|
||||
when (isNothing gitusername || gitusername == Just "") $
|
||||
liftIO checkEnvironmentIO
|
||||
|
||||
checkEnvironmentIO :: IO ()
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
module Annex.Exception (
|
||||
bracketIO,
|
||||
tryAnnex,
|
||||
tryAnnexIO,
|
||||
throwAnnex,
|
||||
catchAnnex,
|
||||
) where
|
||||
|
@ -24,12 +25,16 @@ import Common.Annex
|
|||
|
||||
{- Runs an Annex action, with setup and cleanup both in the IO monad. -}
|
||||
bracketIO :: IO v -> (v -> IO b) -> (v -> Annex a) -> Annex a
|
||||
bracketIO setup cleanup go = M.bracket (liftIO setup) (liftIO . cleanup) go
|
||||
bracketIO setup cleanup = M.bracket (liftIO setup) (liftIO . cleanup)
|
||||
|
||||
{- try in the Annex monad -}
|
||||
tryAnnex :: Annex a -> Annex (Either SomeException a)
|
||||
tryAnnex = M.try
|
||||
|
||||
{- try in the Annex monad, but only catching IO exceptions -}
|
||||
tryAnnexIO :: Annex a -> Annex (Either IOException a)
|
||||
tryAnnexIO = M.try
|
||||
|
||||
{- throw in the Annex monad -}
|
||||
throwAnnex :: Exception e => e -> Annex a
|
||||
throwAnnex = M.throw
|
||||
|
|
|
@ -68,9 +68,9 @@ getAnnexLinkTarget file = ifM (coreSymlinks <$> Annex.getGitConfig)
|
|||
-- characters, or whitespace, we
|
||||
-- certianly don't have a link to a
|
||||
-- git-annex key.
|
||||
if any (`elem` s) "\0\n\r \t"
|
||||
then return ""
|
||||
else return s
|
||||
return $ if any (`elem` s) "\0\n\r \t"
|
||||
then ""
|
||||
else s
|
||||
|
||||
{- Creates a link on disk.
|
||||
-
|
||||
|
|
|
@ -14,7 +14,7 @@ import qualified Annex
|
|||
import Utility.Quvi
|
||||
import Utility.Url
|
||||
|
||||
withQuviOptions :: forall a. (Query a) -> [CommandParam] -> URLString -> Annex a
|
||||
withQuviOptions :: forall a. Query a -> [CommandParam] -> URLString -> Annex a
|
||||
withQuviOptions a ps url = do
|
||||
opts <- map Param . annexQuviOptions <$> Annex.getGitConfig
|
||||
liftIO $ a (ps++opts) url
|
||||
|
|
|
@ -42,7 +42,7 @@ sshCachingOptions (host, port) opts = go =<< sshInfo (host, port)
|
|||
-- If the lock pool is empty, this is the first ssh of this
|
||||
-- run. There could be stale ssh connections hanging around
|
||||
-- from a previous git-annex run that was interrupted.
|
||||
cleanstale = whenM (not . any isLock . M.keys <$> getPool) $
|
||||
cleanstale = whenM (not . any isLock . M.keys <$> getPool)
|
||||
sshCleanup
|
||||
|
||||
{- Returns a filename to use for a ssh connection caching socket, and
|
||||
|
@ -57,9 +57,9 @@ sshInfo (host, port) = go =<< sshCacheDir
|
|||
then return (Just socketfile, sshConnectionCachingParams socketfile)
|
||||
else do
|
||||
socketfile' <- liftIO $ relPathCwdToFile socketfile
|
||||
if valid_unix_socket_path socketfile'
|
||||
then return (Just socketfile', sshConnectionCachingParams socketfile')
|
||||
else return (Nothing, [])
|
||||
return $ if valid_unix_socket_path socketfile'
|
||||
then (Just socketfile', sshConnectionCachingParams socketfile')
|
||||
else (Nothing, [])
|
||||
|
||||
sshConnectionCachingParams :: FilePath -> [CommandParam]
|
||||
sshConnectionCachingParams socketfile =
|
||||
|
|
27
Annex/Url.hs
Normal file
27
Annex/Url.hs
Normal file
|
@ -0,0 +1,27 @@
|
|||
{- Url downloading, with git-annex user agent.
|
||||
-
|
||||
- Copyright 2013 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
module Annex.Url (
|
||||
module U,
|
||||
withUserAgent,
|
||||
getUserAgent,
|
||||
) where
|
||||
|
||||
import Common.Annex
|
||||
import qualified Annex
|
||||
import Utility.Url as U
|
||||
import qualified Build.SysConfig as SysConfig
|
||||
|
||||
defaultUserAgent :: U.UserAgent
|
||||
defaultUserAgent = "git-annex/" ++ SysConfig.packageversion
|
||||
|
||||
getUserAgent :: Annex (Maybe U.UserAgent)
|
||||
getUserAgent = Annex.getState $
|
||||
Just . fromMaybe defaultUserAgent . Annex.useragent
|
||||
|
||||
withUserAgent :: (Maybe U.UserAgent -> IO a) -> Annex a
|
||||
withUserAgent a = liftIO . a =<< getUserAgent
|
36
Assistant/Gpg.hs
Normal file
36
Assistant/Gpg.hs
Normal file
|
@ -0,0 +1,36 @@
|
|||
{- git-annex assistant gpg stuff
|
||||
-
|
||||
- Copyright 2013 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE QuasiQuotes, TemplateHaskell, OverloadedStrings #-}
|
||||
|
||||
module Assistant.Gpg where
|
||||
|
||||
import Utility.Gpg
|
||||
import Utility.UserInfo
|
||||
import Types.Remote (RemoteConfigKey)
|
||||
|
||||
import qualified Data.Map as M
|
||||
|
||||
{- Generates a gpg user id that is not used by any existing secret key -}
|
||||
newUserId :: IO UserId
|
||||
newUserId = do
|
||||
oldkeys <- secretKeys
|
||||
username <- myUserName
|
||||
let basekeyname = username ++ "'s git-annex encryption key"
|
||||
return $ Prelude.head $ filter (\n -> M.null $ M.filter (== n) oldkeys)
|
||||
( basekeyname
|
||||
: map (\n -> basekeyname ++ show n) ([2..] :: [Int])
|
||||
)
|
||||
|
||||
data EnableEncryption = HybridEncryption | SharedEncryption | NoEncryption
|
||||
deriving (Eq)
|
||||
|
||||
{- Generates Remote configuration for encryption. -}
|
||||
configureEncryption :: EnableEncryption -> (RemoteConfigKey, String)
|
||||
configureEncryption SharedEncryption = ("encryption", "shared")
|
||||
configureEncryption NoEncryption = ("encryption", "none")
|
||||
configureEncryption HybridEncryption = ("encryption", "hybrid")
|
|
@ -9,49 +9,31 @@ module Assistant.MakeRemote where
|
|||
|
||||
import Assistant.Common
|
||||
import Assistant.Ssh
|
||||
import Assistant.Sync
|
||||
import qualified Types.Remote as R
|
||||
import qualified Remote
|
||||
import Remote.List
|
||||
import qualified Remote.Rsync as Rsync
|
||||
import qualified Remote.GCrypt as GCrypt
|
||||
import qualified Git
|
||||
import qualified Git.Command
|
||||
import qualified Command.InitRemote
|
||||
import Logs.UUID
|
||||
import Logs.Remote
|
||||
import Git.Remote
|
||||
import Config
|
||||
import Config.Cost
|
||||
import Creds
|
||||
import Assistant.Gpg
|
||||
import Utility.Gpg (KeyId)
|
||||
|
||||
import qualified Data.Text as T
|
||||
import qualified Data.Map as M
|
||||
|
||||
{- Sets up and begins syncing with a new ssh or rsync remote. -}
|
||||
makeSshRemote :: Bool -> SshData -> Maybe Cost -> Assistant Remote
|
||||
makeSshRemote forcersync sshdata mcost = do
|
||||
r <- liftAnnex $
|
||||
addRemote $ maker (sshRepoName sshdata) sshurl
|
||||
liftAnnex $ maybe noop (setRemoteCost r) mcost
|
||||
syncRemote r
|
||||
return r
|
||||
{- Sets up a new git or rsync remote, accessed over ssh. -}
|
||||
makeSshRemote :: SshData -> Annex RemoteName
|
||||
makeSshRemote sshdata = maker (sshRepoName sshdata) (genSshUrl sshdata)
|
||||
where
|
||||
rsync = forcersync || rsyncOnly sshdata
|
||||
maker
|
||||
| rsync = makeRsyncRemote
|
||||
| onlyCapability sshdata RsyncCapable = makeRsyncRemote
|
||||
| otherwise = makeGitRemote
|
||||
sshurl = T.unpack $ T.concat $
|
||||
if rsync
|
||||
then [u, h, T.pack ":", sshDirectory sshdata, T.pack "/"]
|
||||
else [T.pack "ssh://", u, h, d, T.pack "/"]
|
||||
where
|
||||
u = maybe (T.pack "") (\v -> T.concat [v, T.pack "@"]) $ sshUserName sshdata
|
||||
h = sshHostName sshdata
|
||||
d
|
||||
| T.pack "/" `T.isPrefixOf` sshDirectory sshdata = sshDirectory sshdata
|
||||
| T.pack "~/" `T.isPrefixOf` sshDirectory sshdata = T.concat [T.pack "/", sshDirectory sshdata]
|
||||
| otherwise = T.concat [T.pack "/~/", sshDirectory sshdata]
|
||||
|
||||
|
||||
{- Runs an action that returns a name of the remote, and finishes adding it. -}
|
||||
addRemote :: Annex RemoteName -> Annex Remote
|
||||
addRemote a = do
|
||||
|
@ -74,6 +56,16 @@ makeRsyncRemote name location = makeRemote name location $ const $ void $
|
|||
, ("type", "rsync")
|
||||
]
|
||||
|
||||
{- Inits a gcrypt special remote, and returns its name. -}
|
||||
makeGCryptRemote :: RemoteName -> String -> KeyId -> Annex RemoteName
|
||||
makeGCryptRemote remotename location keyid =
|
||||
initSpecialRemote remotename GCrypt.remote $ M.fromList
|
||||
[ ("type", "gcrypt")
|
||||
, ("gitrepo", location)
|
||||
, configureEncryption HybridEncryption
|
||||
, ("keyid", keyid)
|
||||
]
|
||||
|
||||
type SpecialRemoteMaker = RemoteName -> RemoteType -> R.RemoteConfig -> Annex RemoteName
|
||||
|
||||
{- Inits a new special remote. The name is used as a suggestion, but
|
||||
|
@ -126,7 +118,6 @@ makeRemote basename location a = do
|
|||
g <- gitRepo
|
||||
if not (any samelocation $ Git.remotes g)
|
||||
then do
|
||||
|
||||
let name = uniqueRemoteName basename 0 g
|
||||
a name
|
||||
return name
|
||||
|
|
|
@ -12,7 +12,9 @@ import Assistant.Ssh
|
|||
import Assistant.Pairing
|
||||
import Assistant.Pairing.Network
|
||||
import Assistant.MakeRemote
|
||||
import Assistant.Sync
|
||||
import Config.Cost
|
||||
import Config
|
||||
|
||||
import Network.Socket
|
||||
import qualified Data.Text as T
|
||||
|
@ -22,7 +24,7 @@ import qualified Data.Text as T
|
|||
setupAuthorizedKeys :: PairMsg -> FilePath -> IO ()
|
||||
setupAuthorizedKeys msg repodir = do
|
||||
validateSshPubKey pubkey
|
||||
unlessM (liftIO $ addAuthorizedKeys False repodir pubkey) $
|
||||
unlessM (liftIO $ addAuthorizedKeys True repodir pubkey) $
|
||||
error "failed setting up ssh authorized keys"
|
||||
where
|
||||
pubkey = remoteSshPubKey $ pairMsgData msg
|
||||
|
@ -43,7 +45,9 @@ finishedLocalPairing msg keypair = do
|
|||
, "git-annex-shell -c configlist " ++ T.unpack (sshDirectory sshdata)
|
||||
]
|
||||
Nothing
|
||||
void $ makeSshRemote False sshdata (Just semiExpensiveRemoteCost)
|
||||
r <- liftAnnex $ addRemote $ makeSshRemote sshdata
|
||||
liftAnnex $ setRemoteCost r semiExpensiveRemoteCost
|
||||
syncRemote r
|
||||
|
||||
{- Mostly a straightforward conversion. Except:
|
||||
- * Determine the best hostname to use to contact the host.
|
||||
|
@ -63,7 +67,7 @@ pairMsgToSshData msg = do
|
|||
, sshRepoName = genSshRepoName hostname dir
|
||||
, sshPort = 22
|
||||
, needsPubKey = True
|
||||
, rsyncOnly = False
|
||||
, sshCapabilities = [GitAnnexShellCapable, GitCapable, RsyncCapable]
|
||||
}
|
||||
|
||||
{- Finds the best hostname to use for the host that sent the PairMsg.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{- git-annex assistant ssh utilities
|
||||
-
|
||||
- Copyright 2012 Joey Hess <joey@kitenet.net>
|
||||
- Copyright 2012-2013 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
@ -11,6 +11,7 @@ import Common.Annex
|
|||
import Utility.Tmp
|
||||
import Utility.UserInfo
|
||||
import Utility.Shell
|
||||
import Utility.Rsync
|
||||
import Git.Remote
|
||||
|
||||
import Data.Text (Text)
|
||||
|
@ -25,10 +26,19 @@ data SshData = SshData
|
|||
, sshRepoName :: String
|
||||
, sshPort :: Int
|
||||
, needsPubKey :: Bool
|
||||
, rsyncOnly :: Bool
|
||||
, sshCapabilities :: [SshServerCapability]
|
||||
}
|
||||
deriving (Read, Show, Eq)
|
||||
|
||||
data SshServerCapability = GitAnnexShellCapable | GitCapable | RsyncCapable
|
||||
deriving (Read, Show, Eq)
|
||||
|
||||
hasCapability :: SshData -> SshServerCapability -> Bool
|
||||
hasCapability d c = c `elem` sshCapabilities d
|
||||
|
||||
onlyCapability :: SshData -> SshServerCapability -> Bool
|
||||
onlyCapability d c = all (== c) (sshCapabilities d)
|
||||
|
||||
data SshKeyPair = SshKeyPair
|
||||
{ sshPubKey :: String
|
||||
, sshPrivKey :: String
|
||||
|
@ -52,6 +62,48 @@ sshDir = do
|
|||
genSshHost :: Text -> Maybe Text -> String
|
||||
genSshHost host user = maybe "" (\v -> T.unpack v ++ "@") user ++ T.unpack host
|
||||
|
||||
{- Generates a ssh or rsync url from a SshData. -}
|
||||
genSshUrl :: SshData -> String
|
||||
genSshUrl sshdata = addtrailingslash $ T.unpack $ T.concat $
|
||||
if (onlyCapability sshdata RsyncCapable)
|
||||
then [u, h, T.pack ":", sshDirectory sshdata]
|
||||
else [T.pack "ssh://", u, h, d]
|
||||
where
|
||||
u = maybe (T.pack "") (\v -> T.concat [v, T.pack "@"]) $ sshUserName sshdata
|
||||
h = sshHostName sshdata
|
||||
d
|
||||
| T.pack "/" `T.isPrefixOf` sshDirectory sshdata = sshDirectory sshdata
|
||||
| T.pack "~/" `T.isPrefixOf` sshDirectory sshdata = T.concat [T.pack "/", sshDirectory sshdata]
|
||||
| otherwise = T.concat [T.pack "/~/", sshDirectory sshdata]
|
||||
addtrailingslash s
|
||||
| "/" `isSuffixOf` s = s
|
||||
| otherwise = s ++ "/"
|
||||
|
||||
{- Reverses genSshUrl -}
|
||||
parseSshUrl :: String -> Maybe SshData
|
||||
parseSshUrl u
|
||||
| "ssh://" `isPrefixOf` u = fromssh (drop (length "ssh://") u)
|
||||
| otherwise = fromrsync u
|
||||
where
|
||||
mkdata (userhost, dir) = Just $ SshData
|
||||
{ sshHostName = T.pack host
|
||||
, sshUserName = if null user then Nothing else Just $ T.pack user
|
||||
, sshDirectory = T.pack dir
|
||||
, sshRepoName = genSshRepoName host dir
|
||||
-- dummy values, cannot determine from url
|
||||
, sshPort = 22
|
||||
, needsPubKey = True
|
||||
, sshCapabilities = []
|
||||
}
|
||||
where
|
||||
(user, host) = if '@' `elem` userhost
|
||||
then separate (== '@') userhost
|
||||
else ("", userhost)
|
||||
fromrsync s
|
||||
| not (rsyncUrlIsShell u) = Nothing
|
||||
| otherwise = mkdata $ separate (== ':') s
|
||||
fromssh = mkdata . break (== '/')
|
||||
|
||||
{- Generates a git remote name, like host_dir or host -}
|
||||
genSshRepoName :: String -> FilePath -> String
|
||||
genSshRepoName host dir
|
||||
|
@ -92,12 +144,12 @@ validateSshPubKey pubkey
|
|||
safeincomment c = isAlphaNum c || c == '@' || c == '-' || c == '_' || c == '.'
|
||||
|
||||
addAuthorizedKeys :: Bool -> FilePath -> SshPubKey -> IO Bool
|
||||
addAuthorizedKeys rsynconly dir pubkey = boolSystem "sh"
|
||||
[ Param "-c" , Param $ addAuthorizedKeysCommand rsynconly dir pubkey ]
|
||||
addAuthorizedKeys gitannexshellonly dir pubkey = boolSystem "sh"
|
||||
[ Param "-c" , Param $ addAuthorizedKeysCommand gitannexshellonly dir pubkey ]
|
||||
|
||||
removeAuthorizedKeys :: Bool -> FilePath -> SshPubKey -> IO ()
|
||||
removeAuthorizedKeys rsynconly dir pubkey = do
|
||||
let keyline = authorizedKeysLine rsynconly dir pubkey
|
||||
removeAuthorizedKeys gitannexshellonly dir pubkey = do
|
||||
let keyline = authorizedKeysLine gitannexshellonly dir pubkey
|
||||
sshdir <- sshDir
|
||||
let keyfile = sshdir </> "authorized_keys"
|
||||
ls <- lines <$> readFileStrict keyfile
|
||||
|
@ -110,7 +162,7 @@ removeAuthorizedKeys rsynconly dir pubkey = do
|
|||
- present.
|
||||
-}
|
||||
addAuthorizedKeysCommand :: Bool -> FilePath -> SshPubKey -> String
|
||||
addAuthorizedKeysCommand rsynconly dir pubkey = intercalate "&&"
|
||||
addAuthorizedKeysCommand gitannexshellonly dir pubkey = intercalate "&&"
|
||||
[ "mkdir -p ~/.ssh"
|
||||
, intercalate "; "
|
||||
[ "if [ ! -e " ++ wrapper ++ " ]"
|
||||
|
@ -122,7 +174,7 @@ addAuthorizedKeysCommand rsynconly dir pubkey = intercalate "&&"
|
|||
, "chmod 600 ~/.ssh/authorized_keys"
|
||||
, unwords
|
||||
[ "echo"
|
||||
, shellEscape $ authorizedKeysLine rsynconly dir pubkey
|
||||
, shellEscape $ authorizedKeysLine gitannexshellonly dir pubkey
|
||||
, ">>~/.ssh/authorized_keys"
|
||||
]
|
||||
]
|
||||
|
@ -141,11 +193,11 @@ addAuthorizedKeysCommand rsynconly dir pubkey = intercalate "&&"
|
|||
runshell var = "exec git-annex-shell -c \"" ++ var ++ "\""
|
||||
|
||||
authorizedKeysLine :: Bool -> FilePath -> SshPubKey -> String
|
||||
authorizedKeysLine rsynconly dir pubkey
|
||||
authorizedKeysLine gitannexshellonly dir pubkey
|
||||
| gitannexshellonly = limitcommand ++ pubkey
|
||||
{- TODO: Locking down rsync is difficult, requiring a rather
|
||||
- long perl script. -}
|
||||
| rsynconly = pubkey
|
||||
| otherwise = limitcommand ++ pubkey
|
||||
| otherwise = pubkey
|
||||
where
|
||||
limitcommand = "command=\"GIT_ANNEX_SHELL_DIRECTORY="++shellEscape dir++" ~/.ssh/git-annex-shell\",no-agent-forwarding,no-port-forwarding,no-X11-forwarding "
|
||||
|
||||
|
|
|
@ -319,10 +319,10 @@ handleAdds delayadd cs = returnWhen (null incomplete) $ do
|
|||
add change@(InProcessAddChange { keySource = ks }) =
|
||||
catchDefaultIO Nothing <~> do
|
||||
sanitycheck ks $ do
|
||||
key <- liftAnnex $ do
|
||||
(mkey, mcache) <- liftAnnex $ do
|
||||
showStart "add" $ keyFilename ks
|
||||
Command.Add.ingest $ Just ks
|
||||
maybe (failedingest change) (done change $ keyFilename ks) key
|
||||
maybe (failedingest change) (done change mcache $ keyFilename ks) mkey
|
||||
add _ = return Nothing
|
||||
|
||||
{- In direct mode, avoid overhead of re-injesting a renamed
|
||||
|
@ -349,7 +349,7 @@ handleAdds delayadd cs = returnWhen (null incomplete) $ do
|
|||
fastadd change key = do
|
||||
let source = keySource change
|
||||
liftAnnex $ Command.Add.finishIngestDirect key source
|
||||
done change (keyFilename source) key
|
||||
done change Nothing (keyFilename source) key
|
||||
|
||||
removedKeysMap :: InodeComparisonType -> [Change] -> Annex (M.Map InodeCacheKey Key)
|
||||
removedKeysMap ct l = do
|
||||
|
@ -365,11 +365,11 @@ handleAdds delayadd cs = returnWhen (null incomplete) $ do
|
|||
liftAnnex showEndFail
|
||||
return Nothing
|
||||
|
||||
done change file key = liftAnnex $ do
|
||||
done change mcache file key = liftAnnex $ do
|
||||
logStatus key InfoPresent
|
||||
link <- ifM isDirect
|
||||
( inRepo $ gitAnnexLink file key
|
||||
, Command.Add.link file key True
|
||||
, Command.Add.link file key mcache
|
||||
)
|
||||
whenM (pure DirWatcher.eventsCoalesce <||> isDirect) $ do
|
||||
stageSymlink file =<< hashSymlink link
|
||||
|
|
|
@ -81,8 +81,7 @@ transferScannerThread urlrenderer = namedThread "TransferScanner" $ do
|
|||
{- This is a cheap scan for failed transfers involving a remote. -}
|
||||
failedTransferScan :: Remote -> Assistant ()
|
||||
failedTransferScan r = do
|
||||
failed <- liftAnnex $ getFailedTransfers (Remote.uuid r)
|
||||
liftAnnex $ mapM_ removeFailedTransfer $ map fst failed
|
||||
failed <- liftAnnex $ clearFailedTransfers (Remote.uuid r)
|
||||
mapM_ retry failed
|
||||
where
|
||||
retry (t, info)
|
||||
|
@ -98,7 +97,7 @@ failedTransferScan r = do
|
|||
- key, so it's not redundantly checked here. -}
|
||||
requeue t info
|
||||
requeue t info = queueTransferWhenSmall "retrying failed transfer" (associatedFile info) t r
|
||||
|
||||
|
||||
{- This is a expensive scan through the full git work tree, finding
|
||||
- files to transfer. The scan is blocked when the transfer queue gets
|
||||
- too large.
|
||||
|
@ -118,8 +117,12 @@ expensiveScan :: UrlRenderer -> [Remote] -> Assistant ()
|
|||
expensiveScan urlrenderer rs = unless onlyweb $ batch <~> do
|
||||
debug ["starting scan of", show visiblers]
|
||||
|
||||
let us = map Remote.uuid rs
|
||||
|
||||
mapM_ (liftAnnex . clearFailedTransfers) us
|
||||
|
||||
unwantedrs <- liftAnnex $ S.fromList
|
||||
<$> filterM inUnwantedGroup (map Remote.uuid rs)
|
||||
<$> filterM inUnwantedGroup us
|
||||
|
||||
g <- liftAnnex gitRepo
|
||||
(files, cleanup) <- liftIO $ LsFiles.inRepo [] g
|
||||
|
|
|
@ -17,7 +17,7 @@ import Assistant.XMPP.Client
|
|||
|
||||
{- The main configuration screen. -}
|
||||
getConfigurationR :: Handler Html
|
||||
getConfigurationR = ifM (inFirstRun)
|
||||
getConfigurationR = ifM inFirstRun
|
||||
( redirect FirstRepositoryR
|
||||
, page "Configuration" (Just Configuration) $ do
|
||||
#ifdef WITH_XMPP
|
||||
|
|
|
@ -11,7 +11,6 @@ module Assistant.WebApp.Configurators.AWS where
|
|||
|
||||
import Assistant.WebApp.Common
|
||||
import Assistant.MakeRemote
|
||||
import Assistant.Sync
|
||||
#ifdef WITH_S3
|
||||
import qualified Remote.S3 as S3
|
||||
#endif
|
||||
|
@ -22,8 +21,10 @@ import qualified Remote
|
|||
import qualified Types.Remote as Remote
|
||||
import Types.Remote (RemoteConfig)
|
||||
import Types.StandardGroups
|
||||
import Logs.PreferredContent
|
||||
import Creds
|
||||
import Assistant.Gpg
|
||||
import Git.Remote
|
||||
import Assistant.WebApp.Utility
|
||||
|
||||
import qualified Data.Text as T
|
||||
import qualified Data.Map as M
|
||||
|
@ -93,10 +94,10 @@ awsCredsAForm defcreds = AWSCreds
|
|||
<*> secretAccessKeyField (T.pack . snd <$> defcreds)
|
||||
|
||||
accessKeyIDField :: Widget -> Maybe Text -> MkAForm Text
|
||||
accessKeyIDField help def = areq (textField `withNote` help) "Access Key ID" def
|
||||
accessKeyIDField help = areq (textField `withNote` help) "Access Key ID"
|
||||
|
||||
accessKeyIDFieldWithHelp :: Maybe Text -> MkAForm Text
|
||||
accessKeyIDFieldWithHelp def = accessKeyIDField help def
|
||||
accessKeyIDFieldWithHelp = accessKeyIDField help
|
||||
where
|
||||
help = [whamlet|
|
||||
<a href="https://portal.aws.amazon.com/gp/aws/securityCredentials#id_block">
|
||||
|
@ -104,7 +105,7 @@ accessKeyIDFieldWithHelp def = accessKeyIDField help def
|
|||
|]
|
||||
|
||||
secretAccessKeyField :: Maybe Text -> MkAForm Text
|
||||
secretAccessKeyField def = areq passwordField "Secret Access Key" def
|
||||
secretAccessKeyField = areq passwordField "Secret Access Key"
|
||||
|
||||
datacenterField :: AWS.Service -> MkAForm Text
|
||||
datacenterField service = areq (selectFieldList list) "Datacenter" defregion
|
||||
|
@ -124,16 +125,13 @@ postAddS3R = awsConfigurator $ do
|
|||
case result of
|
||||
FormSuccess input -> liftH $ do
|
||||
let name = T.unpack $ repoName input
|
||||
makeAWSRemote initSpecialRemote S3.remote (extractCreds input) name setgroup $ M.fromList
|
||||
makeAWSRemote initSpecialRemote S3.remote TransferGroup (extractCreds input) name $ M.fromList
|
||||
[ configureEncryption $ enableEncryption input
|
||||
, ("type", "S3")
|
||||
, ("datacenter", T.unpack $ datacenter input)
|
||||
, ("storageclass", show $ storageClass input)
|
||||
]
|
||||
_ -> $(widgetFile "configurators/adds3")
|
||||
where
|
||||
setgroup r = liftAnnex $
|
||||
setStandardGroup (Remote.uuid r) TransferGroup
|
||||
#else
|
||||
postAddS3R = error "S3 not supported by this build"
|
||||
#endif
|
||||
|
@ -150,15 +148,12 @@ postAddGlacierR = glacierConfigurator $ do
|
|||
case result of
|
||||
FormSuccess input -> liftH $ do
|
||||
let name = T.unpack $ repoName input
|
||||
makeAWSRemote initSpecialRemote Glacier.remote (extractCreds input) name setgroup $ M.fromList
|
||||
makeAWSRemote initSpecialRemote Glacier.remote SmallArchiveGroup (extractCreds input) name $ M.fromList
|
||||
[ configureEncryption $ enableEncryption input
|
||||
, ("type", "glacier")
|
||||
, ("datacenter", T.unpack $ datacenter input)
|
||||
]
|
||||
_ -> $(widgetFile "configurators/addglacier")
|
||||
where
|
||||
setgroup r = liftAnnex $
|
||||
setStandardGroup (Remote.uuid r) SmallArchiveGroup
|
||||
#else
|
||||
postAddGlacierR = error "S3 not supported by this build"
|
||||
#endif
|
||||
|
@ -198,7 +193,7 @@ enableAWSRemote remotetype uuid = do
|
|||
m <- liftAnnex readRemoteLog
|
||||
let name = fromJust $ M.lookup "name" $
|
||||
fromJust $ M.lookup uuid m
|
||||
makeAWSRemote enableSpecialRemote remotetype creds name (const noop) M.empty
|
||||
makeAWSRemote enableSpecialRemote remotetype SmallArchiveGroup creds name M.empty
|
||||
_ -> do
|
||||
description <- liftAnnex $
|
||||
T.pack <$> Remote.prettyUUID uuid
|
||||
|
@ -207,14 +202,11 @@ enableAWSRemote remotetype uuid = do
|
|||
enableAWSRemote _ _ = error "S3 not supported by this build"
|
||||
#endif
|
||||
|
||||
makeAWSRemote :: SpecialRemoteMaker -> RemoteType -> AWSCreds -> String -> (Remote -> Handler ()) -> RemoteConfig -> Handler ()
|
||||
makeAWSRemote maker remotetype (AWSCreds ak sk) name setup config = do
|
||||
makeAWSRemote :: SpecialRemoteMaker -> RemoteType -> StandardGroup -> AWSCreds -> RemoteName -> RemoteConfig -> Handler ()
|
||||
makeAWSRemote maker remotetype defaultgroup (AWSCreds ak sk) name config = do
|
||||
liftIO $ AWS.setCredsEnv (T.unpack ak, T.unpack sk)
|
||||
r <- liftAnnex $ addRemote $ do
|
||||
setupCloudRemote defaultgroup Nothing $
|
||||
maker hostname remotetype config
|
||||
setup r
|
||||
liftAssistant $ syncRemote r
|
||||
redirect $ EditNewCloudRepositoryR $ Remote.uuid r
|
||||
where
|
||||
{- AWS services use the remote name as the basis for a host
|
||||
- name, so filter it to contain valid characters. -}
|
||||
|
|
|
@ -22,6 +22,7 @@ import Logs.Trust
|
|||
import Logs.Remote
|
||||
import Logs.PreferredContent
|
||||
import Types.StandardGroups
|
||||
import Annex.UUID
|
||||
|
||||
import System.IO.HVFS (SystemFS(..))
|
||||
import qualified Data.Text as T
|
||||
|
@ -29,9 +30,13 @@ import qualified Data.Map as M
|
|||
import System.Path
|
||||
|
||||
notCurrentRepo :: UUID -> Handler Html -> Handler Html
|
||||
notCurrentRepo uuid a = go =<< liftAnnex (Remote.remoteFromUUID uuid)
|
||||
notCurrentRepo uuid a = do
|
||||
u <- liftAnnex getUUID
|
||||
if u == uuid
|
||||
then redirect DeleteCurrentRepositoryR
|
||||
else go =<< liftAnnex (Remote.remoteFromUUID uuid)
|
||||
where
|
||||
go Nothing = redirect DeleteCurrentRepositoryR
|
||||
go Nothing = error "Unknown UUID"
|
||||
go (Just _) = a
|
||||
|
||||
getDisableRepositoryR :: UUID -> Handler Html
|
||||
|
|
|
@ -62,7 +62,7 @@ getRepoConfig uuid mremote = do
|
|||
Nothing -> (RepoGroupCustom $ unwords $ S.toList groups, Nothing)
|
||||
Just g -> (RepoGroupStandard g, associatedDirectory remoteconfig g)
|
||||
|
||||
description <- maybe Nothing (Just . T.pack) . M.lookup uuid <$> uuidMap
|
||||
description <- fmap T.pack . M.lookup uuid <$> uuidMap
|
||||
|
||||
syncable <- case mremote of
|
||||
Just r -> return $ remoteAnnexSync $ Remote.gitconfig r
|
||||
|
@ -99,7 +99,7 @@ setRepoConfig uuid mremote oldc newc = do
|
|||
, Param $ T.unpack $ repoName oldc
|
||||
, Param name
|
||||
]
|
||||
void $ Remote.remoteListRefresh
|
||||
void Remote.remoteListRefresh
|
||||
liftAssistant updateSyncRemotes
|
||||
when associatedDirectoryChanged $ case repoAssociatedDirectory newc of
|
||||
Nothing -> noop
|
||||
|
@ -120,11 +120,9 @@ setRepoConfig uuid mremote oldc newc = do
|
|||
- so avoid queueing a duplicate scan. -}
|
||||
when (repoSyncable newc && not syncableChanged) $ liftAssistant $
|
||||
case mremote of
|
||||
Just remote -> do
|
||||
addScanRemotes True [remote]
|
||||
Nothing -> do
|
||||
addScanRemotes True
|
||||
=<< syncDataRemotes <$> getDaemonStatus
|
||||
Just remote -> addScanRemotes True [remote]
|
||||
Nothing -> addScanRemotes True
|
||||
=<< syncDataRemotes <$> getDaemonStatus
|
||||
when syncableChanged $
|
||||
changeSyncable mremote (repoSyncable newc)
|
||||
where
|
||||
|
@ -242,4 +240,4 @@ encrypted using gpg key:
|
|||
<li>
|
||||
^{gpgKeyDisplay k (M.lookup k knownkeys)}
|
||||
|]
|
||||
getRepoEncryption _ _ = [whamlet||] -- local repo
|
||||
getRepoEncryption _ _ = return () -- local repo
|
||||
|
|
|
@ -20,10 +20,10 @@ import qualified Remote
|
|||
import qualified Types.Remote as Remote
|
||||
import Types.StandardGroups
|
||||
import Types.Remote (RemoteConfig)
|
||||
import Logs.PreferredContent
|
||||
import Logs.Remote
|
||||
import qualified Utility.Url as Url
|
||||
import qualified Annex.Url as Url
|
||||
import Creds
|
||||
import Assistant.Gpg
|
||||
|
||||
import qualified Data.Text as T
|
||||
import qualified Data.Map as M
|
||||
|
@ -111,7 +111,7 @@ previouslyUsedIACreds = previouslyUsedCredPair AWS.creds S3.remote $
|
|||
#endif
|
||||
|
||||
accessKeyIDFieldWithHelp :: Maybe Text -> MkAForm Text
|
||||
accessKeyIDFieldWithHelp def = AWS.accessKeyIDField help def
|
||||
accessKeyIDFieldWithHelp = AWS.accessKeyIDField help
|
||||
where
|
||||
help = [whamlet|
|
||||
<a href="http://archive.org/account/s3.php">
|
||||
|
@ -130,7 +130,7 @@ postAddIAR = iaConfigurator $ do
|
|||
case result of
|
||||
FormSuccess input -> liftH $ do
|
||||
let name = escapeBucket $ T.unpack $ itemName input
|
||||
AWS.makeAWSRemote initSpecialRemote S3.remote (extractCreds input) name setgroup $
|
||||
AWS.makeAWSRemote initSpecialRemote S3.remote PublicGroup (extractCreds input) name $
|
||||
M.fromList $ catMaybes
|
||||
[ Just $ configureEncryption NoEncryption
|
||||
, Just ("type", "S3")
|
||||
|
@ -146,9 +146,6 @@ postAddIAR = iaConfigurator $ do
|
|||
, Just ("preferreddir", name)
|
||||
]
|
||||
_ -> $(widgetFile "configurators/addia")
|
||||
where
|
||||
setgroup r = liftAnnex $
|
||||
setStandardGroup (Remote.uuid r) PublicGroup
|
||||
#else
|
||||
postAddIAR = error "S3 not supported by this build"
|
||||
#endif
|
||||
|
@ -174,7 +171,7 @@ enableIARemote uuid = do
|
|||
m <- liftAnnex readRemoteLog
|
||||
let name = fromJust $ M.lookup "name" $
|
||||
fromJust $ M.lookup uuid m
|
||||
AWS.makeAWSRemote enableSpecialRemote S3.remote creds name (const noop) M.empty
|
||||
AWS.makeAWSRemote enableSpecialRemote S3.remote PublicGroup creds name M.empty
|
||||
_ -> do
|
||||
description <- liftAnnex $
|
||||
T.pack <$> Remote.prettyUUID uuid
|
||||
|
@ -193,7 +190,8 @@ escapeHeader = escapeURIString (\c -> isUnescapedInURI c && c /= ' ')
|
|||
|
||||
getRepoInfo :: RemoteConfig -> Widget
|
||||
getRepoInfo c = do
|
||||
exists <- liftIO $ catchDefaultIO False $ fst <$> Url.exists url []
|
||||
ua <- liftAnnex Url.getUserAgent
|
||||
exists <- liftIO $ catchDefaultIO False $ fst <$> Url.exists url [] ua
|
||||
[whamlet|
|
||||
<a href="#{url}">
|
||||
Internet Archive item
|
||||
|
|
|
@ -38,7 +38,6 @@ import Config
|
|||
import Utility.Gpg
|
||||
import qualified Annex.Branch
|
||||
import qualified Remote.GCrypt as GCrypt
|
||||
import qualified Git.GCrypt
|
||||
import qualified Types.Remote
|
||||
|
||||
import qualified Data.Text as T
|
||||
|
@ -101,7 +100,7 @@ checkRepositoryPath p = do
|
|||
Nothing -> Right $ Just $ T.pack basepath
|
||||
Just prob -> Left prob
|
||||
where
|
||||
runcheck (chk, msg) = ifM (chk) ( return $ Just msg, return Nothing )
|
||||
runcheck (chk, msg) = ifM chk ( return $ Just msg, return Nothing )
|
||||
expandTilde home ('~':'/':path) = home </> path
|
||||
expandTilde _ path = path
|
||||
|
||||
|
@ -114,7 +113,7 @@ checkRepositoryPath p = do
|
|||
- browsed to a directory with git-annex and run it from there. -}
|
||||
defaultRepositoryPath :: Bool -> IO FilePath
|
||||
defaultRepositoryPath firstrun = do
|
||||
cwd <- liftIO $ getCurrentDirectory
|
||||
cwd <- liftIO getCurrentDirectory
|
||||
home <- myHomeDir
|
||||
if home == cwd && firstrun
|
||||
then inhome
|
||||
|
@ -137,7 +136,7 @@ newRepositoryForm defpath msg = do
|
|||
(Just $ T.pack $ addTrailingPathSeparator defpath)
|
||||
let (err, errmsg) = case pathRes of
|
||||
FormMissing -> (False, "")
|
||||
FormFailure l -> (True, concat $ map T.unpack l)
|
||||
FormFailure l -> (True, concatMap T.unpack l)
|
||||
FormSuccess _ -> (False, "")
|
||||
let form = do
|
||||
webAppFormAuthToken
|
||||
|
@ -196,8 +195,8 @@ postNewRepositoryR = page "Add another repository" (Just Configuration) $ do
|
|||
mainrepo <- fromJust . relDir <$> liftH getYesod
|
||||
$(widgetFile "configurators/newrepository/combine")
|
||||
|
||||
getCombineRepositoryR :: FilePathAndUUID -> Handler Html
|
||||
getCombineRepositoryR (FilePathAndUUID newrepopath newrepouuid) = do
|
||||
getCombineRepositoryR :: FilePath -> UUID -> Handler Html
|
||||
getCombineRepositoryR newrepopath newrepouuid = do
|
||||
r <- combineRepos newrepopath remotename
|
||||
liftAssistant $ syncRemote r
|
||||
redirect $ EditRepositoryR newrepouuid
|
||||
|
@ -231,7 +230,7 @@ getAddDriveR :: Handler Html
|
|||
getAddDriveR = postAddDriveR
|
||||
postAddDriveR :: Handler Html
|
||||
postAddDriveR = page "Add a removable drive" (Just Configuration) $ do
|
||||
removabledrives <- liftIO $ driveList
|
||||
removabledrives <- liftIO driveList
|
||||
writabledrives <- liftIO $
|
||||
filterM (canWrite . T.unpack . mountPoint) removabledrives
|
||||
((res, form), enctype) <- liftH $ runFormPost $
|
||||
|
@ -253,7 +252,7 @@ getConfirmAddDriveR drive = ifM (liftIO $ probeRepoExists dir)
|
|||
mu <- liftIO $ probeUUID dir
|
||||
case mu of
|
||||
Nothing -> maybe askcombine isknownuuid
|
||||
=<< liftIO (probeGCryptRemoteUUID dir)
|
||||
=<< liftAnnex (probeGCryptRemoteUUID dir)
|
||||
Just driveuuid -> isknownuuid driveuuid
|
||||
, newrepo
|
||||
)
|
||||
|
@ -276,17 +275,14 @@ getConfirmAddDriveR drive = ifM (liftIO $ probeRepoExists dir)
|
|||
setupDriveModal :: Widget
|
||||
setupDriveModal = $(widgetFile "configurators/adddrive/setupmodal")
|
||||
|
||||
genKeyModal :: Widget
|
||||
genKeyModal = $(widgetFile "configurators/genkeymodal")
|
||||
|
||||
getGenKeyForDriveR :: RemovableDrive -> Handler Html
|
||||
getGenKeyForDriveR drive = withNewSecretKey $ \key -> do
|
||||
getGenKeyForDriveR drive = withNewSecretKey $ \keyid ->
|
||||
{- Generating a key takes a long time, and
|
||||
- the removable drive may have been disconnected
|
||||
- in the meantime. Check that it is still mounted
|
||||
- before finishing. -}
|
||||
ifM (liftIO $ any (\d -> mountPoint d == mountPoint drive) <$> driveList)
|
||||
( getFinishAddDriveR drive (RepoKey key)
|
||||
( getFinishAddDriveR drive (RepoKey keyid)
|
||||
, getAddDriveR
|
||||
)
|
||||
|
||||
|
@ -294,39 +290,22 @@ getFinishAddDriveR :: RemovableDrive -> RepoKey -> Handler Html
|
|||
getFinishAddDriveR drive = go
|
||||
where
|
||||
{- Set up new gcrypt special remote. -}
|
||||
go (RepoKey keyid) = ifM (liftIO $ inPath "git-remote-gcrypt")
|
||||
( makewith $ \_ -> do
|
||||
r <- liftAnnex $ addRemote $
|
||||
initSpecialRemote remotename GCrypt.remote $ M.fromList
|
||||
[ ("type", "gcrypt")
|
||||
, ("gitrepo", dir)
|
||||
, configureEncryption HybridEncryption
|
||||
, ("keyid", keyid)
|
||||
]
|
||||
return (Types.Remote.uuid r, r)
|
||||
, page "Encrypt repository" (Just Configuration) $
|
||||
$(widgetFile "configurators/needgcrypt")
|
||||
)
|
||||
go NoRepoKey = do
|
||||
pr <- liftAnnex $ inRepo $ Git.GCrypt.probeRepo dir
|
||||
case pr of
|
||||
Git.GCrypt.Decryptable -> do
|
||||
mu <- liftIO $ probeGCryptRemoteUUID dir
|
||||
case mu of
|
||||
Just u -> enablegcryptremote u
|
||||
Nothing -> error "The drive contains a gcrypt repository that is not a git-annex special remote. This is not supported."
|
||||
Git.GCrypt.NotDecryptable ->
|
||||
error $ "The drive contains a git repository that is encrypted with a GnuPG key that you do not have."
|
||||
Git.GCrypt.NotEncrypted -> makeunencrypted
|
||||
enablegcryptremote u = do
|
||||
mname <- liftAnnex $ getGCryptRemoteName u dir
|
||||
case mname of
|
||||
Nothing -> error $ "Cannot find configuration for the gcrypt remote at " ++ dir
|
||||
Just name -> makewith $ const $ do
|
||||
r <- liftAnnex $ addRemote $
|
||||
enableSpecialRemote name GCrypt.remote $ M.fromList
|
||||
[("gitrepo", dir)]
|
||||
return (u, r)
|
||||
go (RepoKey keyid) = whenGcryptInstalled $ makewith $ const $ do
|
||||
r <- liftAnnex $ addRemote $
|
||||
makeGCryptRemote remotename dir keyid
|
||||
return (Types.Remote.uuid r, r)
|
||||
go NoRepoKey = checkGCryptRepoEncryption dir makeunencrypted $ do
|
||||
mu <- liftAnnex $ probeGCryptRemoteUUID dir
|
||||
case mu of
|
||||
Just u -> enableexistinggcryptremote u
|
||||
Nothing -> error "The drive contains a gcrypt repository that is not a git-annex special remote. This is not supported."
|
||||
enableexistinggcryptremote u = do
|
||||
remotename' <- liftAnnex $ getGCryptRemoteName u dir
|
||||
makewith $ const $ do
|
||||
r <- liftAnnex $ addRemote $
|
||||
enableSpecialRemote remotename' GCrypt.remote $ M.fromList
|
||||
[("gitrepo", dir)]
|
||||
return (u, r)
|
||||
{- Making a new unencrypted repo, or combining with an existing one. -}
|
||||
makeunencrypted = makewith $ \isnew -> (,)
|
||||
<$> liftIO (initRepo isnew False dir $ Just remotename)
|
||||
|
@ -350,7 +329,7 @@ getFinishAddDriveR drive = go
|
|||
- Next call syncRemote to get them in sync. -}
|
||||
combineRepos :: FilePath -> String -> Handler Remote
|
||||
combineRepos dir name = liftAnnex $ do
|
||||
hostname <- maybe "host" id <$> liftIO getHostname
|
||||
hostname <- fromMaybe "host" <$> liftIO getHostname
|
||||
hostlocation <- fromRepo Git.repoLocation
|
||||
liftIO $ inDir dir $ void $ makeGitRemote hostname hostlocation
|
||||
addRemote $ makeGitRemote name dir
|
||||
|
@ -401,7 +380,7 @@ startFullAssistant path repogroup setup = do
|
|||
u <- initRepo isnew True path Nothing
|
||||
inDir path $ do
|
||||
setStandardGroup u repogroup
|
||||
maybe noop id setup
|
||||
fromMaybe noop setup
|
||||
addAutoStartFile path
|
||||
setCurrentDirectory path
|
||||
fromJust $ postFirstRun webapp
|
||||
|
@ -461,13 +440,12 @@ initRepo False _ dir desc = inDir dir $ do
|
|||
getUUID
|
||||
|
||||
initRepo' :: Maybe String -> Annex ()
|
||||
initRepo' desc = do
|
||||
unlessM isInitialized $ do
|
||||
initialize desc
|
||||
{- Ensure branch gets committed right away so it is
|
||||
- available for merging when a removable drive repo is being
|
||||
- added. -}
|
||||
Annex.Branch.commit "update"
|
||||
initRepo' desc = unlessM isInitialized $ do
|
||||
initialize desc
|
||||
{- Ensure branch gets committed right away so it is
|
||||
- available for merging when a removable drive repo is being
|
||||
- added. -}
|
||||
Annex.Branch.commit "update"
|
||||
|
||||
{- Checks if the user can write to a directory.
|
||||
-
|
||||
|
@ -490,11 +468,3 @@ probeUUID :: FilePath -> IO (Maybe UUID)
|
|||
probeUUID dir = catchDefaultIO Nothing $ inDir dir $ do
|
||||
u <- getUUID
|
||||
return $ if u == NoUUID then Nothing else Just u
|
||||
|
||||
{- Gets the UUID of the gcrypt repo at a location, which may not exist.
|
||||
- Only works if the gcrypt repo was created as a git-annex remote. -}
|
||||
probeGCryptRemoteUUID :: FilePath -> IO (Maybe UUID)
|
||||
probeGCryptRemoteUUID dir = catchDefaultIO Nothing $ do
|
||||
r <- Git.Construct.fromAbsPath dir
|
||||
(genUUIDInNameSpace gCryptNameSpace <$>) . fst
|
||||
<$> GCrypt.getGCryptId r
|
||||
|
|
|
@ -152,7 +152,7 @@ postFinishLocalPairR msg = promptSecret (Just msg) $ \_ secret -> do
|
|||
where
|
||||
alert = pairRequestAcknowledgedAlert (pairRepo msg) . Just
|
||||
setup repodir = setupAuthorizedKeys msg repodir
|
||||
cleanup repodir = removeAuthorizedKeys False repodir $
|
||||
cleanup repodir = removeAuthorizedKeys True repodir $
|
||||
remoteSshPubKey $ pairMsgData msg
|
||||
uuid = Just $ pairUUID $ pairMsgData msg
|
||||
#else
|
||||
|
@ -300,7 +300,7 @@ secretProblem :: Secret -> Maybe Text
|
|||
secretProblem s
|
||||
| B.null s = Just "The secret phrase cannot be left empty. (Remember that punctuation and white space is ignored.)"
|
||||
| B.length s < 6 = Just "Enter a longer secret phrase, at least 6 characters, but really, a phrase is best! This is not a password you'll need to enter every day."
|
||||
| s == toSecret sampleQuote = Just "Speaking of foolishness, don't paste in the example I gave. Enter a different phrase, please!"
|
||||
| s == toSecret sampleQuote = Just "Speaking of foolishness, don't paste in the example I gave. Enter a different phrase, please!"
|
||||
| otherwise = Nothing
|
||||
|
||||
toSecret :: Text -> Secret
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{- git-annex assistant webapp configurator for ssh-based remotes
|
||||
-
|
||||
- Copyright 2012 Joey Hess <joey@kitenet.net>
|
||||
- Copyright 2012-2013 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
@ -11,18 +11,25 @@
|
|||
module Assistant.WebApp.Configurators.Ssh where
|
||||
|
||||
import Assistant.WebApp.Common
|
||||
import Assistant.WebApp.Gpg
|
||||
import Assistant.Ssh
|
||||
import Assistant.MakeRemote
|
||||
import Utility.Rsync (rsyncUrlIsShell)
|
||||
import Logs.Remote
|
||||
import Remote
|
||||
import Logs.PreferredContent
|
||||
import Types.StandardGroups
|
||||
import Utility.UserInfo
|
||||
import Utility.Gpg
|
||||
import Types.Remote (RemoteConfig)
|
||||
import Git.Remote
|
||||
import Assistant.WebApp.Utility
|
||||
import qualified Remote.GCrypt as GCrypt
|
||||
import Annex.UUID
|
||||
import Logs.UUID
|
||||
|
||||
import qualified Data.Text as T
|
||||
import qualified Data.Map as M
|
||||
import Network.Socket
|
||||
import Data.Ord
|
||||
|
||||
sshConfigurator :: Widget -> Handler Html
|
||||
sshConfigurator = page "Add a remote server" (Just Configuration)
|
||||
|
@ -47,7 +54,7 @@ mkSshData s = SshData
|
|||
(maybe "" T.unpack $ inputDirectory s)
|
||||
, sshPort = inputPort s
|
||||
, needsPubKey = False
|
||||
, rsyncOnly = False
|
||||
, sshCapabilities = [] -- untested
|
||||
}
|
||||
|
||||
mkSshInput :: SshData -> SshInput
|
||||
|
@ -81,7 +88,7 @@ sshInputAForm hostnamefield def = SshInput
|
|||
let h = T.unpack t
|
||||
let canonname = Just $ defaultHints { addrFlags = [AI_CANONNAME] }
|
||||
r <- catchMaybeIO $ getAddrInfo canonname (Just h) Nothing
|
||||
return $ case catMaybes . map addrCanonName <$> r of
|
||||
return $ case mapMaybe addrCanonName <$> r of
|
||||
-- canonicalize input hostname if it had no dot
|
||||
Just (fullname:_)
|
||||
| '.' `elem` h -> Right t
|
||||
|
@ -96,30 +103,27 @@ sshInputAForm hostnamefield def = SshInput
|
|||
data ServerStatus
|
||||
= UntestedServer
|
||||
| UnusableServer Text -- reason why it's not usable
|
||||
| UsableRsyncServer
|
||||
| UsableSshInput
|
||||
| UsableServer [SshServerCapability]
|
||||
deriving (Eq)
|
||||
|
||||
usable :: ServerStatus -> Bool
|
||||
usable UntestedServer = False
|
||||
usable (UnusableServer _) = False
|
||||
usable UsableRsyncServer = True
|
||||
usable UsableSshInput = True
|
||||
capabilities :: ServerStatus -> [SshServerCapability]
|
||||
capabilities (UsableServer cs) = cs
|
||||
capabilities _ = []
|
||||
|
||||
getAddSshR :: Handler Html
|
||||
getAddSshR = postAddSshR
|
||||
postAddSshR :: Handler Html
|
||||
postAddSshR = sshConfigurator $ do
|
||||
u <- liftIO $ T.pack <$> myUserName
|
||||
username <- liftIO $ T.pack <$> myUserName
|
||||
((result, form), enctype) <- liftH $
|
||||
runFormPost $ renderBootstrap $ sshInputAForm textField $
|
||||
SshInput Nothing (Just u) Nothing 22
|
||||
SshInput Nothing (Just username) Nothing 22
|
||||
case result of
|
||||
FormSuccess sshinput -> do
|
||||
s <- liftIO $ testServer sshinput
|
||||
case s of
|
||||
Left status -> showform form enctype status
|
||||
Right sshdata -> liftH $ redirect $ ConfirmSshR sshdata
|
||||
Right (sshdata, u) -> liftH $ redirect $ ConfirmSshR sshdata u
|
||||
_ -> showform form enctype UntestedServer
|
||||
where
|
||||
showform form enctype status = $(widgetFile "configurators/ssh/add")
|
||||
|
@ -127,64 +131,64 @@ postAddSshR = sshConfigurator $ do
|
|||
sshTestModal :: Widget
|
||||
sshTestModal = $(widgetFile "configurators/ssh/testmodal")
|
||||
|
||||
{- To enable an existing rsync special remote, parse the SshInput from
|
||||
- its rsyncurl, and display a form whose only real purpose is to check
|
||||
- if ssh public keys need to be set up. From there, we can proceed with
|
||||
- the usual repo setup; all that code is idempotent.
|
||||
-
|
||||
- Note that there's no EnableSshR because ssh remotes are not special
|
||||
- remotes, and so their configuration is not shared between repositories.
|
||||
-}
|
||||
sshSetupModal :: SshData -> Widget
|
||||
sshSetupModal sshdata = $(widgetFile "configurators/ssh/setupmodal")
|
||||
|
||||
getEnableRsyncR :: UUID -> Handler Html
|
||||
getEnableRsyncR = postEnableRsyncR
|
||||
postEnableRsyncR :: UUID -> Handler Html
|
||||
postEnableRsyncR u = do
|
||||
postEnableRsyncR = enableSpecialSshRemote getsshinput enableRsyncNet enablersync
|
||||
where
|
||||
enablersync sshdata u = redirect $ ConfirmSshR
|
||||
(sshdata { sshCapabilities = [RsyncCapable] }) u
|
||||
getsshinput = parseSshUrl <=< M.lookup "rsyncurl"
|
||||
|
||||
{- This only handles gcrypt repositories that are located on ssh servers;
|
||||
- ones on local drives are handled via another part of the UI. -}
|
||||
getEnableSshGCryptR :: UUID -> Handler Html
|
||||
getEnableSshGCryptR = postEnableSshGCryptR
|
||||
postEnableSshGCryptR :: UUID -> Handler Html
|
||||
postEnableSshGCryptR u = whenGcryptInstalled $
|
||||
enableSpecialSshRemote getsshinput enableRsyncNetGCrypt enablegcrypt u
|
||||
where
|
||||
enablegcrypt sshdata _ = prepSsh True sshdata $ \sshdata' ->
|
||||
sshConfigurator $
|
||||
checkExistingGCrypt sshdata' $
|
||||
error "Expected to find an encrypted git repository, but did not."
|
||||
getsshinput = parseSshUrl <=< M.lookup "gitrepo"
|
||||
|
||||
{- To enable a special remote that uses ssh as its transport,
|
||||
- parse a config key to get its url, and display a form whose
|
||||
- only real purpose is to check if ssh public keys need to be
|
||||
- set up.
|
||||
-}
|
||||
enableSpecialSshRemote :: (RemoteConfig -> Maybe SshData) -> (SshInput -> RemoteName -> Handler Html) -> (SshData -> UUID -> Handler Html) -> UUID -> Handler Html
|
||||
enableSpecialSshRemote getsshinput rsyncnetsetup genericsetup u = do
|
||||
m <- fromMaybe M.empty . M.lookup u <$> liftAnnex readRemoteLog
|
||||
case (parseSshRsyncUrl =<< M.lookup "rsyncurl" m, M.lookup "name" m) of
|
||||
case (mkSshInput . unmangle <$> getsshinput m, M.lookup "name" m) of
|
||||
(Just sshinput, Just reponame) -> sshConfigurator $ do
|
||||
((result, form), enctype) <- liftH $
|
||||
runFormPost $ renderBootstrap $ sshInputAForm textField sshinput
|
||||
case result of
|
||||
FormSuccess sshinput'
|
||||
| isRsyncNet (inputHostname sshinput') ->
|
||||
void $ liftH $ makeRsyncNet sshinput' reponame (const noop)
|
||||
void $ liftH $ rsyncnetsetup sshinput' reponame
|
||||
| otherwise -> do
|
||||
s <- liftIO $ testServer sshinput'
|
||||
case s of
|
||||
Left status -> showform form enctype status
|
||||
Right sshdata -> enable sshdata
|
||||
{ sshRepoName = reponame }
|
||||
Right (sshdata, _u) -> void $ liftH $ genericsetup
|
||||
( sshdata { sshRepoName = reponame } ) u
|
||||
_ -> showform form enctype UntestedServer
|
||||
_ -> redirect AddSshR
|
||||
where
|
||||
unmangle sshdata = sshdata
|
||||
{ sshHostName = T.pack $ unMangleSshHostName $
|
||||
T.unpack $ sshHostName sshdata
|
||||
}
|
||||
showform form enctype status = do
|
||||
description <- liftAnnex $ T.pack <$> prettyUUID u
|
||||
$(widgetFile "configurators/ssh/enable")
|
||||
enable sshdata = liftH $ redirect $ ConfirmSshR $
|
||||
sshdata { rsyncOnly = True }
|
||||
|
||||
{- Converts a rsyncurl value to a SshInput. But only if it's a ssh rsync
|
||||
- url; rsync:// urls or bare path names are not supported.
|
||||
-
|
||||
- The hostname is stored mangled in the remote log for rsync special
|
||||
- remotes configured by this webapp. So that mangling has to reversed
|
||||
- here to get back the original hostname.
|
||||
-}
|
||||
parseSshRsyncUrl :: String -> Maybe SshInput
|
||||
parseSshRsyncUrl u
|
||||
| not (rsyncUrlIsShell u) = Nothing
|
||||
| otherwise = Just $ SshInput
|
||||
{ inputHostname = val $ unMangleSshHostName host
|
||||
, inputUsername = if null user then Nothing else val user
|
||||
, inputDirectory = val dir
|
||||
, inputPort = 22
|
||||
}
|
||||
where
|
||||
val = Just . T.pack
|
||||
(userhost, dir) = separate (== ':') u
|
||||
(user, host) = if '@' `elem` userhost
|
||||
then separate (== '@') userhost
|
||||
else (userhost, "")
|
||||
|
||||
{- Test if we can ssh into the server.
|
||||
-
|
||||
|
@ -193,33 +197,41 @@ parseSshRsyncUrl u
|
|||
- passwordless login is already enabled, use it. Otherwise,
|
||||
- a special ssh key will need to be generated just for this server.
|
||||
-
|
||||
- Once logged into the server, probe to see if git-annex-shell is
|
||||
- available, or rsync. Note that, ~/.ssh/git-annex-shell may be
|
||||
- Once logged into the server, probe to see if git-annex-shell,
|
||||
- git, and rsync are available.
|
||||
- Note that, ~/.ssh/git-annex-shell may be
|
||||
- present, while git-annex-shell is not in PATH.
|
||||
-
|
||||
- Also probe to see if there is already a git repository at the location
|
||||
- with either an annex-uuid or a gcrypt-id set. (If not, returns NoUUID.)
|
||||
-}
|
||||
testServer :: SshInput -> IO (Either ServerStatus SshData)
|
||||
testServer :: SshInput -> IO (Either ServerStatus (SshData, UUID))
|
||||
testServer (SshInput { inputHostname = Nothing }) = return $
|
||||
Left $ UnusableServer "Please enter a host name."
|
||||
testServer sshinput@(SshInput { inputHostname = Just hn }) = do
|
||||
status <- probe [sshOpt "NumberOfPasswordPrompts" "0"]
|
||||
if usable status
|
||||
then ret status False
|
||||
else do
|
||||
status' <- probe []
|
||||
if usable status'
|
||||
then ret status' True
|
||||
else return $ Left status'
|
||||
(status, u) <- probe [sshOpt "NumberOfPasswordPrompts" "0"]
|
||||
case capabilities status of
|
||||
[] -> do
|
||||
(status', u') <- probe []
|
||||
case capabilities status' of
|
||||
[] -> return $ Left status'
|
||||
cs -> ret cs True u'
|
||||
cs -> ret cs False u
|
||||
where
|
||||
ret status needspubkey = return $ Right $ (mkSshData sshinput)
|
||||
{ needsPubKey = needspubkey
|
||||
, rsyncOnly = status == UsableRsyncServer
|
||||
}
|
||||
ret cs needspubkey u = do
|
||||
let sshdata = (mkSshData sshinput)
|
||||
{ needsPubKey = needspubkey
|
||||
, sshCapabilities = cs
|
||||
}
|
||||
return $ Right (sshdata, u)
|
||||
probe extraopts = do
|
||||
let remotecommand = shellWrap $ intercalate ";"
|
||||
[ report "loggedin"
|
||||
, checkcommand "git-annex-shell"
|
||||
, checkcommand "git"
|
||||
, checkcommand "rsync"
|
||||
, checkcommand shim
|
||||
, getgitconfig (T.unpack <$> inputDirectory sshinput)
|
||||
]
|
||||
knownhost <- knownHost hn
|
||||
let sshopts = filter (not . null) $ extraopts ++
|
||||
|
@ -235,21 +247,35 @@ testServer sshinput@(SshInput { inputHostname = Just hn }) = do
|
|||
, remotecommand
|
||||
]
|
||||
parsetranscript . fst <$> sshTranscript sshopts Nothing
|
||||
parsetranscript s
|
||||
| reported "git-annex-shell" = UsableSshInput
|
||||
| reported shim = UsableSshInput
|
||||
| reported "rsync" = UsableRsyncServer
|
||||
| reported "loggedin" = UnusableServer
|
||||
"Neither rsync nor git-annex are installed on the server. Perhaps you should go install them?"
|
||||
| otherwise = UnusableServer $ T.pack $
|
||||
"Failed to ssh to the server. Transcript: " ++ s
|
||||
parsetranscript s =
|
||||
let cs = map snd $ filter (reported . fst)
|
||||
[ ("git-annex-shell", GitAnnexShellCapable)
|
||||
, (shim, GitAnnexShellCapable)
|
||||
, ("git", GitCapable)
|
||||
, ("rsync", RsyncCapable)
|
||||
]
|
||||
u = fromMaybe NoUUID $ headMaybe $ mapMaybe finduuid $
|
||||
map (separate (== '=')) $ lines s
|
||||
in if null cs
|
||||
then (UnusableServer unusablereason, u)
|
||||
else (UsableServer cs, u)
|
||||
where
|
||||
reported r = token r `isInfixOf` s
|
||||
unusablereason = if reported "loggedin"
|
||||
then "Neither rsync nor git-annex are installed on the server. Perhaps you should go install them?"
|
||||
else T.pack $ "Failed to ssh to the server. Transcript: " ++ s
|
||||
finduuid (k, v)
|
||||
| k == "annex.uuid" = Just $ toUUID v
|
||||
| k == GCrypt.coreGCryptId = Just $ genUUIDInNameSpace gCryptNameSpace v
|
||||
| otherwise = Nothing
|
||||
|
||||
checkcommand c = "if which " ++ c ++ "; then " ++ report c ++ "; fi"
|
||||
token r = "git-annex-probe " ++ r
|
||||
report r = "echo " ++ token r
|
||||
shim = "~/.ssh/git-annex-shell"
|
||||
getgitconfig (Just d)
|
||||
| not (null d) = "cd " ++ shellEscape d ++ " && git config --list"
|
||||
getgitconfig _ = "echo"
|
||||
|
||||
{- Runs a ssh command; if it fails shows the user the transcript,
|
||||
- and if it succeeds, runs an action. -}
|
||||
|
@ -264,54 +290,124 @@ showSshErr :: String -> Handler Html
|
|||
showSshErr msg = sshConfigurator $
|
||||
$(widgetFile "configurators/ssh/error")
|
||||
|
||||
getConfirmSshR :: SshData -> Handler Html
|
||||
getConfirmSshR sshdata = sshConfigurator $
|
||||
$(widgetFile "configurators/ssh/confirm")
|
||||
{- The UUID will be NoUUID when the repository does not already exist. -}
|
||||
getConfirmSshR :: SshData -> UUID -> Handler Html
|
||||
getConfirmSshR sshdata u
|
||||
| u == NoUUID = handlenew
|
||||
| otherwise = handleexisting =<< (M.lookup u <$> liftAnnex uuidMap)
|
||||
where
|
||||
handlenew = sshConfigurator $ do
|
||||
secretkeys <- sortBy (comparing snd) . M.toList
|
||||
<$> liftIO secretKeys
|
||||
$(widgetFile "configurators/ssh/confirm")
|
||||
handleexisting Nothing = sshConfigurator $
|
||||
-- Not a UUID we know, so prompt about combining.
|
||||
$(widgetFile "configurators/ssh/combine")
|
||||
handleexisting (Just _) = prepSsh False sshdata $ \sshdata' -> do
|
||||
m <- liftAnnex readRemoteLog
|
||||
case M.lookup "type" =<< M.lookup u m of
|
||||
Just "gcrypt" -> combineExistingGCrypt sshdata' u
|
||||
-- This handles enabling git repositories
|
||||
-- that already exist.
|
||||
_ -> makeSshRepo sshdata'
|
||||
|
||||
{- The user has confirmed they want to combine with a ssh repository,
|
||||
- which is not known to us. So it might be using gcrypt. -}
|
||||
getCombineSshR :: SshData -> Handler Html
|
||||
getCombineSshR sshdata = prepSsh False sshdata $ \sshdata' ->
|
||||
sshConfigurator $
|
||||
checkExistingGCrypt sshdata' $
|
||||
void $ liftH $ makeSshRepo sshdata'
|
||||
|
||||
getRetrySshR :: SshData -> Handler ()
|
||||
getRetrySshR sshdata = do
|
||||
s <- liftIO $ testServer $ mkSshInput sshdata
|
||||
redirect $ either (const $ ConfirmSshR sshdata) ConfirmSshR s
|
||||
redirect $ either (const $ ConfirmSshR sshdata NoUUID) (uncurry ConfirmSshR) s
|
||||
|
||||
getMakeSshGitR :: SshData -> Handler Html
|
||||
getMakeSshGitR = makeSsh False setupGroup
|
||||
getMakeSshGitR sshdata = prepSsh False sshdata makeSshRepo
|
||||
|
||||
getMakeSshRsyncR :: SshData -> Handler Html
|
||||
getMakeSshRsyncR = makeSsh True setupGroup
|
||||
getMakeSshRsyncR sshdata = prepSsh False (rsyncOnly sshdata) makeSshRepo
|
||||
|
||||
makeSsh :: Bool -> (Remote -> Handler ()) -> SshData -> Handler Html
|
||||
makeSsh rsync setup sshdata
|
||||
rsyncOnly :: SshData -> SshData
|
||||
rsyncOnly sshdata = sshdata { sshCapabilities = [RsyncCapable] }
|
||||
|
||||
getMakeSshGCryptR :: SshData -> RepoKey -> Handler Html
|
||||
getMakeSshGCryptR sshdata NoRepoKey = whenGcryptInstalled $
|
||||
withNewSecretKey $ getMakeSshGCryptR sshdata . RepoKey
|
||||
getMakeSshGCryptR sshdata (RepoKey keyid) = whenGcryptInstalled $
|
||||
prepSsh True sshdata $ makeGCryptRepo keyid
|
||||
|
||||
{- Detect if the user entered a location with an existing, known
|
||||
- gcrypt repository, and enable it. Otherwise, runs the action. -}
|
||||
checkExistingGCrypt :: SshData -> Widget -> Widget
|
||||
checkExistingGCrypt sshdata nope = ifM (liftIO isGcryptInstalled)
|
||||
( checkGCryptRepoEncryption repourl nope $ do
|
||||
mu <- liftAnnex $ probeGCryptRemoteUUID repourl
|
||||
case mu of
|
||||
Just u -> void $ liftH $
|
||||
combineExistingGCrypt sshdata u
|
||||
Nothing -> error "The location contains a gcrypt repository that is not a git-annex special remote. This is not supported."
|
||||
, nope
|
||||
)
|
||||
where
|
||||
repourl = genSshUrl sshdata
|
||||
|
||||
{- Enables an existing gcrypt special remote. -}
|
||||
enableGCrypt :: SshData -> RemoteName -> Handler Html
|
||||
enableGCrypt sshdata reponame =
|
||||
setupCloudRemote TransferGroup Nothing $
|
||||
enableSpecialRemote reponame GCrypt.remote $ M.fromList
|
||||
[("gitrepo", genSshUrl sshdata)]
|
||||
|
||||
{- Combining with a gcrypt repository that may not be
|
||||
- known in remote.log, so probe the gcrypt repo. -}
|
||||
combineExistingGCrypt :: SshData -> UUID -> Handler Html
|
||||
combineExistingGCrypt sshdata u = do
|
||||
reponame <- liftAnnex $ getGCryptRemoteName u repourl
|
||||
enableGCrypt sshdata reponame
|
||||
where
|
||||
repourl = genSshUrl sshdata
|
||||
|
||||
{- Sets up remote repository for ssh, or directory for rsync. -}
|
||||
prepSsh :: Bool -> SshData -> (SshData -> Handler Html) -> Handler Html
|
||||
prepSsh newgcrypt sshdata a
|
||||
| needsPubKey sshdata = do
|
||||
keypair <- liftIO genSshKeyPair
|
||||
sshdata' <- liftIO $ setupSshKeyPair keypair sshdata
|
||||
makeSsh' rsync setup sshdata sshdata' (Just keypair)
|
||||
prepSsh' newgcrypt sshdata sshdata' (Just keypair) a
|
||||
| sshPort sshdata /= 22 = do
|
||||
sshdata' <- liftIO $ setSshConfig sshdata []
|
||||
makeSsh' rsync setup sshdata sshdata' Nothing
|
||||
| otherwise = makeSsh' rsync setup sshdata sshdata Nothing
|
||||
prepSsh' newgcrypt sshdata sshdata' Nothing a
|
||||
| otherwise = prepSsh' newgcrypt sshdata sshdata Nothing a
|
||||
|
||||
makeSsh' :: Bool -> (Remote -> Handler ()) -> SshData -> SshData -> Maybe SshKeyPair -> Handler Html
|
||||
makeSsh' rsync setup origsshdata sshdata keypair = do
|
||||
sshSetup ["-p", show (sshPort origsshdata), sshhost, remoteCommand] "" $
|
||||
makeSshRepo rsync setup sshdata
|
||||
prepSsh' :: Bool -> SshData -> SshData -> Maybe SshKeyPair -> (SshData -> Handler Html) -> Handler Html
|
||||
prepSsh' newgcrypt origsshdata sshdata keypair a = sshSetup
|
||||
[ "-p", show (sshPort origsshdata)
|
||||
, genSshHost (sshHostName origsshdata) (sshUserName origsshdata)
|
||||
, remoteCommand
|
||||
] "" (a sshdata)
|
||||
where
|
||||
sshhost = genSshHost (sshHostName origsshdata) (sshUserName origsshdata)
|
||||
remotedir = T.unpack $ sshDirectory sshdata
|
||||
remoteCommand = shellWrap $ intercalate "&&" $ catMaybes
|
||||
[ Just $ "mkdir -p " ++ shellEscape remotedir
|
||||
, Just $ "cd " ++ shellEscape remotedir
|
||||
, if rsync then Nothing else Just "if [ ! -d .git ]; then git init --bare --shared; fi"
|
||||
, if rsync then Nothing else Just "git annex init"
|
||||
, if needsPubKey sshdata
|
||||
then addAuthorizedKeysCommand (rsync || rsyncOnly sshdata) remotedir . sshPubKey <$> keypair
|
||||
, if rsynconly then Nothing else Just "if [ ! -d .git ]; then git init --bare --shared; fi"
|
||||
, if rsynconly || newgcrypt then Nothing else Just "git annex init"
|
||||
, if needsPubKey origsshdata
|
||||
then addAuthorizedKeysCommand (hasCapability origsshdata GitAnnexShellCapable) remotedir . sshPubKey <$> keypair
|
||||
else Nothing
|
||||
]
|
||||
rsynconly = onlyCapability origsshdata RsyncCapable
|
||||
|
||||
makeSshRepo :: Bool -> (Remote -> Handler ()) -> SshData -> Handler Html
|
||||
makeSshRepo forcersync setup sshdata = do
|
||||
r <- liftAssistant $ makeSshRemote forcersync sshdata Nothing
|
||||
setup r
|
||||
redirect $ EditNewCloudRepositoryR $ Remote.uuid r
|
||||
makeSshRepo :: SshData -> Handler Html
|
||||
makeSshRepo sshdata = setupCloudRemote TransferGroup Nothing $
|
||||
makeSshRemote sshdata
|
||||
|
||||
makeGCryptRepo :: KeyId -> SshData -> Handler Html
|
||||
makeGCryptRepo keyid sshdata = setupCloudRemote TransferGroup Nothing $
|
||||
makeGCryptRemote (sshRepoName sshdata) (genSshUrl sshdata) keyid
|
||||
|
||||
getAddRsyncNetR :: Handler Html
|
||||
getAddRsyncNetR = postAddRsyncNetR
|
||||
|
@ -320,19 +416,18 @@ postAddRsyncNetR = do
|
|||
((result, form), enctype) <- runFormPost $
|
||||
renderBootstrap $ sshInputAForm hostnamefield $
|
||||
SshInput Nothing Nothing Nothing 22
|
||||
let showform status = page "Add a Rsync.net repository" (Just Configuration) $
|
||||
$(widgetFile "configurators/addrsync.net")
|
||||
let showform status = inpage $
|
||||
$(widgetFile "configurators/rsync.net/add")
|
||||
case result of
|
||||
FormSuccess sshinput
|
||||
| isRsyncNet (inputHostname sshinput) -> do
|
||||
let reponame = genSshRepoName "rsync.net"
|
||||
(maybe "" T.unpack $ inputDirectory sshinput)
|
||||
makeRsyncNet sshinput reponame setupGroup
|
||||
| isRsyncNet (inputHostname sshinput) ->
|
||||
go sshinput
|
||||
| otherwise ->
|
||||
showform $ UnusableServer
|
||||
"That is not a rsync.net host name."
|
||||
_ -> showform UntestedServer
|
||||
where
|
||||
inpage = page "Add a Rsync.net repository" (Just Configuration)
|
||||
hostnamefield = textField `withExpandableNote` ("Help", help)
|
||||
help = [whamlet|
|
||||
<div>
|
||||
|
@ -342,16 +437,51 @@ postAddRsyncNetR = do
|
|||
The host name will be something like "usw-s001.rsync.net", and the #
|
||||
user name something like "7491"
|
||||
|]
|
||||
go sshinput = do
|
||||
let reponame = genSshRepoName "rsync.net"
|
||||
(maybe "" T.unpack $ inputDirectory sshinput)
|
||||
prepRsyncNet sshinput reponame $ \sshdata -> inpage $
|
||||
checkExistingGCrypt sshdata $ do
|
||||
secretkeys <- sortBy (comparing snd) . M.toList
|
||||
<$> liftIO secretKeys
|
||||
$(widgetFile "configurators/rsync.net/encrypt")
|
||||
|
||||
makeRsyncNet :: SshInput -> String -> (Remote -> Handler ()) -> Handler Html
|
||||
makeRsyncNet sshinput reponame setup = do
|
||||
getMakeRsyncNetSharedR :: SshData -> Handler Html
|
||||
getMakeRsyncNetSharedR = makeSshRepo . rsyncOnly
|
||||
|
||||
{- Make a gcrypt special remote on rsync.net. -}
|
||||
getMakeRsyncNetGCryptR :: SshData -> RepoKey -> Handler Html
|
||||
getMakeRsyncNetGCryptR sshdata NoRepoKey = whenGcryptInstalled $
|
||||
withNewSecretKey $ getMakeRsyncNetGCryptR sshdata . RepoKey
|
||||
getMakeRsyncNetGCryptR sshdata (RepoKey keyid) = whenGcryptInstalled $
|
||||
sshSetup [sshhost, gitinit] [] $ makeGCryptRepo keyid sshdata
|
||||
where
|
||||
sshhost = genSshHost (sshHostName sshdata) (sshUserName sshdata)
|
||||
gitinit = "git init --bare " ++ T.unpack (sshDirectory sshdata)
|
||||
|
||||
enableRsyncNet :: SshInput -> String -> Handler Html
|
||||
enableRsyncNet sshinput reponame =
|
||||
prepRsyncNet sshinput reponame $ makeSshRepo . rsyncOnly
|
||||
|
||||
enableRsyncNetGCrypt :: SshInput -> RemoteName -> Handler Html
|
||||
enableRsyncNetGCrypt sshinput reponame =
|
||||
prepRsyncNet sshinput reponame $ \sshdata ->
|
||||
checkGCryptRepoEncryption (genSshUrl sshdata) notencrypted $
|
||||
enableGCrypt sshdata reponame
|
||||
where
|
||||
notencrypted = error "Unexpectedly found a non-encrypted git repository, instead of the expected encrypted git repository."
|
||||
|
||||
{- Prepares rsync.net ssh key, and if successful, runs an action with
|
||||
- its SshData. -}
|
||||
prepRsyncNet :: SshInput -> String -> (SshData -> Handler Html) -> Handler Html
|
||||
prepRsyncNet sshinput reponame a = do
|
||||
knownhost <- liftIO $ maybe (return False) knownHost (inputHostname sshinput)
|
||||
keypair <- liftIO $ genSshKeyPair
|
||||
keypair <- liftIO genSshKeyPair
|
||||
sshdata <- liftIO $ setupSshKeyPair keypair $
|
||||
(mkSshData sshinput)
|
||||
{ sshRepoName = reponame
|
||||
, needsPubKey = True
|
||||
, rsyncOnly = True
|
||||
, sshCapabilities = [RsyncCapable]
|
||||
}
|
||||
{- I'd prefer to separate commands with && , but
|
||||
- rsync.net's shell does not support that.
|
||||
|
@ -371,12 +501,8 @@ makeRsyncNet sshinput reponame setup = do
|
|||
, genSshHost (sshHostName sshdata) (sshUserName sshdata)
|
||||
, remotecommand
|
||||
]
|
||||
sshSetup sshopts (sshPubKey keypair) $
|
||||
makeSshRepo True setup sshdata
|
||||
sshSetup sshopts (sshPubKey keypair) $ a sshdata
|
||||
|
||||
isRsyncNet :: Maybe Text -> Bool
|
||||
isRsyncNet Nothing = False
|
||||
isRsyncNet (Just host) = ".rsync.net" `T.isSuffixOf` T.toLower host
|
||||
|
||||
setupGroup :: Remote -> Handler ()
|
||||
setupGroup r = liftAnnex $ setStandardGroup (Remote.uuid r) TransferGroup
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE CPP, QuasiQuotes, TemplateHaskell, OverloadedStrings #-}
|
||||
{-# LANGUAGE CPP, TemplateHaskell, OverloadedStrings #-}
|
||||
|
||||
module Assistant.WebApp.Configurators.WebDAV where
|
||||
|
||||
|
@ -14,12 +14,13 @@ import Creds
|
|||
#ifdef WITH_WEBDAV
|
||||
import qualified Remote.WebDAV as WebDAV
|
||||
import Assistant.MakeRemote
|
||||
import Assistant.Sync
|
||||
import qualified Remote
|
||||
import Types.Remote (RemoteConfig)
|
||||
import Types.StandardGroups
|
||||
import Logs.PreferredContent
|
||||
import Logs.Remote
|
||||
import Assistant.Gpg
|
||||
import Assistant.WebApp.Utility
|
||||
import Git.Remote
|
||||
|
||||
import qualified Data.Map as M
|
||||
#endif
|
||||
|
@ -69,7 +70,7 @@ postAddBoxComR = boxConfigurator $ do
|
|||
runFormPost $ renderBootstrap $ boxComAForm defcreds
|
||||
case result of
|
||||
FormSuccess input -> liftH $
|
||||
makeWebDavRemote initSpecialRemote "box.com" (toCredPair input) setgroup $ M.fromList
|
||||
makeWebDavRemote initSpecialRemote "box.com" (toCredPair input) $ M.fromList
|
||||
[ configureEncryption $ enableEncryption input
|
||||
, ("embedcreds", if embedCreds input then "yes" else "no")
|
||||
, ("type", "webdav")
|
||||
|
@ -80,9 +81,6 @@ postAddBoxComR = boxConfigurator $ do
|
|||
, ("chunksize", "10mb")
|
||||
]
|
||||
_ -> $(widgetFile "configurators/addbox.com")
|
||||
where
|
||||
setgroup r = liftAnnex $
|
||||
setStandardGroup (Remote.uuid r) TransferGroup
|
||||
#else
|
||||
postAddBoxComR = error "WebDAV not supported by this build"
|
||||
#endif
|
||||
|
@ -100,7 +98,7 @@ postEnableWebDAVR uuid = do
|
|||
getRemoteCredPairFor "webdav" c (WebDAV.davCreds uuid)
|
||||
case mcreds of
|
||||
Just creds -> webDAVConfigurator $ liftH $
|
||||
makeWebDavRemote enableSpecialRemote name creds (const noop) M.empty
|
||||
makeWebDavRemote enableSpecialRemote name creds M.empty
|
||||
Nothing
|
||||
| "box.com/" `isInfixOf` url ->
|
||||
boxConfigurator $ showform name url
|
||||
|
@ -115,7 +113,7 @@ postEnableWebDAVR uuid = do
|
|||
runFormPost $ renderBootstrap $ webDAVCredsAForm defcreds
|
||||
case result of
|
||||
FormSuccess input -> liftH $
|
||||
makeWebDavRemote enableSpecialRemote name (toCredPair input) (const noop) M.empty
|
||||
makeWebDavRemote enableSpecialRemote name (toCredPair input) M.empty
|
||||
_ -> do
|
||||
description <- liftAnnex $
|
||||
T.pack <$> Remote.prettyUUID uuid
|
||||
|
@ -125,13 +123,11 @@ postEnableWebDAVR _ = error "WebDAV not supported by this build"
|
|||
#endif
|
||||
|
||||
#ifdef WITH_WEBDAV
|
||||
makeWebDavRemote :: SpecialRemoteMaker -> String -> CredPair -> (Remote -> Handler ()) -> RemoteConfig -> Handler ()
|
||||
makeWebDavRemote maker name creds setup config = do
|
||||
makeWebDavRemote :: SpecialRemoteMaker -> RemoteName -> CredPair -> RemoteConfig -> Handler ()
|
||||
makeWebDavRemote maker name creds config = do
|
||||
liftIO $ WebDAV.setCredsEnv creds
|
||||
r <- liftAnnex $ addRemote $ maker name WebDAV.remote config
|
||||
setup r
|
||||
liftAssistant $ syncRemote r
|
||||
redirect $ EditNewCloudRepositoryR $ Remote.uuid r
|
||||
setupCloudRemote TransferGroup Nothing $
|
||||
maker name WebDAV.remote config
|
||||
|
||||
{- Only returns creds previously used for the same hostname. -}
|
||||
previouslyUsedWebDAVCreds :: String -> Annex (Maybe CredPair)
|
||||
|
|
|
@ -151,6 +151,8 @@ buddyListDisplay = do
|
|||
catMaybes . map (buddySummary pairedwith)
|
||||
<$> (getBuddyList <<~ buddyList)
|
||||
$(widgetFile "configurators/xmpp/buddylist")
|
||||
#else
|
||||
noop
|
||||
#endif
|
||||
where
|
||||
ident = "buddylist"
|
||||
|
|
|
@ -52,7 +52,7 @@ simplifyTransfers [] = []
|
|||
simplifyTransfers (x:[]) = [x]
|
||||
simplifyTransfers (v@(t1, _):r@((t2, _):l))
|
||||
| equivilantTransfer t1 t2 = simplifyTransfers (v:l)
|
||||
| otherwise = v : (simplifyTransfers r)
|
||||
| otherwise = v : simplifyTransfers r
|
||||
|
||||
{- Called by client to get a display of currently in process transfers.
|
||||
-
|
||||
|
@ -78,7 +78,7 @@ dashboard warnNoScript = do
|
|||
$(widgetFile "dashboard/main")
|
||||
|
||||
getDashboardR :: Handler Html
|
||||
getDashboardR = ifM (inFirstRun)
|
||||
getDashboardR = ifM inFirstRun
|
||||
( redirect ConfigurationR
|
||||
, page "" (Just DashBoard) $ dashboard True
|
||||
)
|
||||
|
@ -107,7 +107,7 @@ postFileBrowserR = void openFileBrowser
|
|||
{- Used by non-javascript browsers, where clicking on the link actually
|
||||
- opens this page, so we redirect back to the referrer. -}
|
||||
getFileBrowserR :: Handler ()
|
||||
getFileBrowserR = whenM openFileBrowser $ redirectBack
|
||||
getFileBrowserR = whenM openFileBrowser redirectBack
|
||||
|
||||
{- Opens the system file browser on the repo, or, as a fallback,
|
||||
- goes to a file:// url. Returns True if it's ok to redirect away
|
||||
|
@ -137,14 +137,17 @@ openFileBrowser = do
|
|||
{- Transfer controls. The GET is done in noscript mode and redirects back
|
||||
- to the referring page. The POST is called by javascript. -}
|
||||
getPauseTransferR :: Transfer -> Handler ()
|
||||
getPauseTransferR t = pauseTransfer t >> redirectBack
|
||||
getPauseTransferR = noscript postPauseTransferR
|
||||
postPauseTransferR :: Transfer -> Handler ()
|
||||
postPauseTransferR t = pauseTransfer t
|
||||
postPauseTransferR = pauseTransfer
|
||||
getStartTransferR :: Transfer -> Handler ()
|
||||
getStartTransferR t = startTransfer t >> redirectBack
|
||||
getStartTransferR = noscript postStartTransferR
|
||||
postStartTransferR :: Transfer -> Handler ()
|
||||
postStartTransferR t = startTransfer t
|
||||
postStartTransferR = startTransfer
|
||||
getCancelTransferR :: Transfer -> Handler ()
|
||||
getCancelTransferR t = cancelTransfer False t >> redirectBack
|
||||
getCancelTransferR = noscript postCancelTransferR
|
||||
postCancelTransferR :: Transfer -> Handler ()
|
||||
postCancelTransferR t = cancelTransfer False t
|
||||
postCancelTransferR = cancelTransfer False
|
||||
|
||||
noscript :: (Transfer -> Handler ()) -> Transfer -> Handler ()
|
||||
noscript a t = a t >> redirectBack
|
||||
|
|
|
@ -38,5 +38,5 @@ getLicenseR = do
|
|||
$(widgetFile "documentation/license")
|
||||
|
||||
getRepoGroupR :: Handler Html
|
||||
getRepoGroupR = page "About repository groups" (Just About) $ do
|
||||
getRepoGroupR = page "About repository groups" (Just About) $
|
||||
$(widgetFile "documentation/repogroup")
|
||||
|
|
|
@ -12,8 +12,8 @@
|
|||
|
||||
module Assistant.WebApp.Form where
|
||||
|
||||
import Types.Remote (RemoteConfigKey)
|
||||
import Assistant.WebApp.Types
|
||||
import Assistant.Gpg
|
||||
|
||||
import Yesod hiding (textField, passwordField)
|
||||
import Yesod.Form.Fields as F
|
||||
|
@ -75,9 +75,6 @@ withExpandableNote field (toggle, note) = withNote field $ [whamlet|
|
|||
where
|
||||
ident = "toggle_" ++ toggle
|
||||
|
||||
data EnableEncryption = HybridEncryption | SharedEncryption | NoEncryption
|
||||
deriving (Eq)
|
||||
|
||||
{- Adds a check box to an AForm to control encryption. -}
|
||||
#if MIN_VERSION_yesod(1,2,0)
|
||||
enableEncryptionField :: (RenderMessage site FormMessage) => AForm (HandlerT site IO) EnableEncryption
|
||||
|
@ -91,9 +88,3 @@ enableEncryptionField = areq (selectFieldList choices) "Encryption" (Just Shared
|
|||
[ ("Encrypt all data", SharedEncryption)
|
||||
, ("Disable encryption", NoEncryption)
|
||||
]
|
||||
|
||||
{- Generates Remote configuration for encryption. -}
|
||||
configureEncryption :: EnableEncryption -> (RemoteConfigKey, String)
|
||||
configureEncryption SharedEncryption = ("encryption", "shared")
|
||||
configureEncryption NoEncryption = ("encryption", "none")
|
||||
configureEncryption HybridEncryption = ("encryption", "hybrid")
|
||||
|
|
|
@ -5,17 +5,19 @@
|
|||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE QuasiQuotes, TemplateHaskell #-}
|
||||
{-# LANGUAGE QuasiQuotes, TemplateHaskell, OverloadedStrings #-}
|
||||
|
||||
module Assistant.WebApp.Gpg where
|
||||
|
||||
import Assistant.WebApp.Common
|
||||
import Assistant.Gpg
|
||||
import Utility.Gpg
|
||||
import Utility.UserInfo
|
||||
import qualified Git.Command
|
||||
import qualified Git.Remote
|
||||
import qualified Git.Construct
|
||||
import qualified Annex.Branch
|
||||
import qualified Git.GCrypt
|
||||
import qualified Remote.GCrypt as GCrypt
|
||||
import Assistant.MakeRemote
|
||||
import Logs.Remote
|
||||
|
||||
|
@ -25,27 +27,28 @@ gpgKeyDisplay :: KeyId -> Maybe UserId -> Widget
|
|||
gpgKeyDisplay keyid userid = [whamlet|
|
||||
<span title="key id #{keyid}">
|
||||
<i .icon-user></i> #
|
||||
^{displayname}
|
||||
$maybe name <- userid
|
||||
#{name}
|
||||
$nothing
|
||||
key id #{keyid}
|
||||
|]
|
||||
where
|
||||
displayname = case userid of
|
||||
Just name | not (null name) -> [whamlet|#{name}|]
|
||||
_ -> [whamlet|key id #{keyid}|]
|
||||
|
||||
{- Generates a gpg user id that is not used by any existing secret key -}
|
||||
newUserId :: IO UserId
|
||||
newUserId = do
|
||||
oldkeys <- secretKeys
|
||||
username <- myUserName
|
||||
let basekeyname = username ++ "'s git-annex encryption key"
|
||||
return $ Prelude.head $ filter (\n -> M.null $ M.filter (== n) oldkeys)
|
||||
( basekeyname
|
||||
: map (\n -> basekeyname ++ show n) ([2..] :: [Int])
|
||||
)
|
||||
genKeyModal :: Widget
|
||||
genKeyModal = $(widgetFile "configurators/genkeymodal")
|
||||
|
||||
isGcryptInstalled :: IO Bool
|
||||
isGcryptInstalled = inPath "git-remote-gcrypt"
|
||||
|
||||
whenGcryptInstalled :: Handler Html -> Handler Html
|
||||
whenGcryptInstalled a = ifM (liftIO isGcryptInstalled)
|
||||
( a
|
||||
, page "Need git-remote-gcrypt" (Just Configuration) $
|
||||
$(widgetFile "configurators/needgcrypt")
|
||||
)
|
||||
|
||||
withNewSecretKey :: (KeyId -> Handler Html) -> Handler Html
|
||||
withNewSecretKey use = do
|
||||
userid <- liftIO $ newUserId
|
||||
userid <- liftIO newUserId
|
||||
liftIO $ genSecretKey RSA "" userid maxRecommendedKeySize
|
||||
results <- M.keys . M.filter (== userid) <$> liftIO secretKeys
|
||||
case results of
|
||||
|
@ -60,16 +63,34 @@ withNewSecretKey use = do
|
|||
- branch from the gcrypt remote and merges it in, and then looks up
|
||||
- the name.
|
||||
-}
|
||||
getGCryptRemoteName :: UUID -> String -> Annex (Maybe Git.Remote.RemoteName)
|
||||
getGCryptRemoteName :: UUID -> String -> Annex Git.Remote.RemoteName
|
||||
getGCryptRemoteName u repoloc = do
|
||||
tmpremote <- uniqueRemoteName "tmpgcryptremote" 0 <$> gitRepo
|
||||
void $ inRepo $ Git.Command.runBool
|
||||
[Params "remote add", Param tmpremote, Param $ Git.GCrypt.urlPrefix ++ repoloc]
|
||||
mname <- ifM (inRepo $ Git.Command.runBool [Param "fetch", Param tmpremote])
|
||||
( do
|
||||
void $ Annex.Branch.forceUpdate
|
||||
void Annex.Branch.forceUpdate
|
||||
(M.lookup "name" <=< M.lookup u) <$> readRemoteLog
|
||||
, return Nothing
|
||||
)
|
||||
void $ inRepo $ Git.Remote.remove tmpremote
|
||||
return mname
|
||||
maybe missing return mname
|
||||
where
|
||||
missing = error $ "Cannot find configuration for the gcrypt remote at " ++ repoloc
|
||||
|
||||
checkGCryptRepoEncryption :: (Monad m, LiftAnnex m) => String -> m a -> m a -> m a
|
||||
checkGCryptRepoEncryption location notencrypted encrypted =
|
||||
dispatch =<< liftAnnex (inRepo $ Git.GCrypt.probeRepo location)
|
||||
where
|
||||
dispatch Git.GCrypt.Decryptable = encrypted
|
||||
dispatch Git.GCrypt.NotEncrypted = notencrypted
|
||||
dispatch Git.GCrypt.NotDecryptable =
|
||||
error "This git repository is encrypted with a GnuPG key that you do not have."
|
||||
|
||||
{- Gets the UUID of the gcrypt repo at a location, which may not exist.
|
||||
- Only works if the gcrypt repo was created as a git-annex remote. -}
|
||||
probeGCryptRemoteUUID :: String -> Annex (Maybe UUID)
|
||||
probeGCryptRemoteUUID repolocation = do
|
||||
r <- inRepo $ Git.Construct.fromRemoteLocation repolocation
|
||||
GCrypt.getGCryptUUID False r
|
||||
|
|
|
@ -80,7 +80,7 @@ getNotifierBuddyListR = notifierUrl BuddyListR getBuddyListBroadcaster
|
|||
getNotifierRepoListR :: RepoSelector -> Handler RepPlain
|
||||
getNotifierRepoListR reposelector = notifierUrl route getRepoListBroadcaster
|
||||
where
|
||||
route nid = RepoListR $ RepoListNotificationId nid reposelector
|
||||
route nid = RepoListR nid reposelector
|
||||
|
||||
getTransferBroadcaster :: Assistant NotificationBroadcaster
|
||||
getTransferBroadcaster = transferNotifier <$> getDaemonStatus
|
||||
|
|
|
@ -56,7 +56,7 @@ getSwitchToRepositoryR repo = do
|
|||
( return url
|
||||
, delayed $ waiturl urlfile
|
||||
)
|
||||
listening url = catchBoolIO $ fst <$> Url.exists url []
|
||||
listening url = catchBoolIO $ fst <$> Url.exists url [] Nothing
|
||||
delayed a = do
|
||||
threadDelay 100000 -- 1/10th of a second
|
||||
a
|
||||
|
|
|
@ -38,7 +38,7 @@ firstRunNavBar :: [NavBarItem]
|
|||
firstRunNavBar = [Configuration, About]
|
||||
|
||||
selectNavBar :: Handler [NavBarItem]
|
||||
selectNavBar = ifM (inFirstRun) (return firstRunNavBar, return defaultNavBar)
|
||||
selectNavBar = ifM inFirstRun (return firstRunNavBar, return defaultNavBar)
|
||||
|
||||
{- A standard page of the webapp, with a title, a sidebar, and that may
|
||||
- be highlighted on the navbar. -}
|
||||
|
|
|
@ -24,8 +24,10 @@ import Logs.Trust
|
|||
import Logs.Group
|
||||
import Config
|
||||
import Git.Config
|
||||
import Git.Remote
|
||||
import Assistant.Sync
|
||||
import Config.Cost
|
||||
import Utility.NotificationBroadcaster
|
||||
import qualified Git
|
||||
#ifdef WITH_XMPP
|
||||
#endif
|
||||
|
@ -33,6 +35,7 @@ import qualified Git
|
|||
import qualified Data.Map as M
|
||||
import qualified Data.Set as S
|
||||
import qualified Data.Text as T
|
||||
import Data.Function
|
||||
|
||||
data Actions
|
||||
= DisabledRepoActions
|
||||
|
@ -82,8 +85,8 @@ notWanted _ = False
|
|||
-
|
||||
- Returns a div, which will be inserted into the calling page.
|
||||
-}
|
||||
getRepoListR :: RepoListNotificationId -> Handler Html
|
||||
getRepoListR (RepoListNotificationId nid reposelector) = do
|
||||
getRepoListR :: NotificationId -> RepoSelector -> Handler Html
|
||||
getRepoListR nid reposelector = do
|
||||
waitNotifier getRepoListBroadcaster nid
|
||||
p <- widgetToPageContent $ repoListDisplay reposelector
|
||||
giveUrlRenderer $ [hamlet|^{pageBody p}|]
|
||||
|
@ -98,7 +101,7 @@ mainRepoSelector = RepoSelector
|
|||
|
||||
{- List of cloud repositories, configured and not. -}
|
||||
cloudRepoList :: Widget
|
||||
cloudRepoList = repoListDisplay $ RepoSelector
|
||||
cloudRepoList = repoListDisplay RepoSelector
|
||||
{ onlyCloud = True
|
||||
, onlyConfigured = False
|
||||
, includeHere = False
|
||||
|
@ -156,9 +159,10 @@ repoList reposelector
|
|||
else return l
|
||||
unconfigured = liftAnnex $ do
|
||||
m <- readRemoteLog
|
||||
g <- gitRepo
|
||||
map snd . catMaybes . filter selectedremote
|
||||
. map (findinfo m)
|
||||
<$> (trustExclude DeadTrusted $ M.keys m)
|
||||
. map (findinfo m g)
|
||||
<$> trustExclude DeadTrusted (M.keys m)
|
||||
selectedrepo r
|
||||
| Remote.readonly r = False
|
||||
| onlyCloud reposelector = Git.repoIsUrl (Remote.repo r) && not (isXMPPRemote r)
|
||||
|
@ -167,7 +171,7 @@ repoList reposelector
|
|||
selectedremote (Just (iscloud, _))
|
||||
| onlyCloud reposelector = iscloud
|
||||
| otherwise = True
|
||||
findinfo m u = case M.lookup "type" =<< M.lookup u m of
|
||||
findinfo m g u = case getconfig "type" of
|
||||
Just "rsync" -> val True EnableRsyncR
|
||||
Just "directory" -> val False EnableDirectoryR
|
||||
#ifdef WITH_S3
|
||||
|
@ -177,11 +181,19 @@ repoList reposelector
|
|||
#ifdef WITH_WEBDAV
|
||||
Just "webdav" -> val True EnableWebDAVR
|
||||
#endif
|
||||
Just "gcrypt" ->
|
||||
-- Skip gcrypt repos on removable drives;
|
||||
-- handled separately.
|
||||
case getconfig "gitrepo" of
|
||||
Just rr | remoteLocationIsUrl (parseRemoteLocation rr g) ->
|
||||
val True EnableSshGCryptR
|
||||
_ -> Nothing
|
||||
_ -> Nothing
|
||||
where
|
||||
getconfig k = M.lookup k =<< M.lookup u m
|
||||
val iscloud r = Just (iscloud, (u, DisabledRepoActions $ r u))
|
||||
list l = liftAnnex $ do
|
||||
let l' = nubBy (\x y -> fst x == fst y) l
|
||||
let l' = nubBy ((==) `on` fst) l
|
||||
l'' <- zip
|
||||
<$> Remote.prettyListUUIDs (map fst l')
|
||||
<*> pure l'
|
||||
|
@ -247,7 +259,7 @@ getRetryUnfinishedRepositoriesR = do
|
|||
redirect DashboardR
|
||||
where
|
||||
unstall r = do
|
||||
liftIO $ fixSshKeyPair
|
||||
liftIO fixSshKeyPair
|
||||
liftAnnex $ setConfig
|
||||
(remoteConfig (Remote.repo r) "ignore")
|
||||
(boolConfig False)
|
||||
|
|
|
@ -150,9 +150,6 @@ data RepoSelector = RepoSelector
|
|||
}
|
||||
deriving (Read, Show, Eq)
|
||||
|
||||
data RepoListNotificationId = RepoListNotificationId NotificationId RepoSelector
|
||||
deriving (Read, Show, Eq)
|
||||
|
||||
data RemovableDrive = RemovableDrive
|
||||
{ diskFree :: Maybe Integer
|
||||
, mountPoint :: Text
|
||||
|
@ -163,15 +160,6 @@ data RemovableDrive = RemovableDrive
|
|||
data RepoKey = RepoKey KeyId | NoRepoKey
|
||||
deriving (Read, Show, Eq, Ord)
|
||||
|
||||
{- Only needed to work around old-yesod bug that emits a warning message
|
||||
- when a route has two parameters. -}
|
||||
data FilePathAndUUID = FilePathAndUUID FilePath UUID
|
||||
deriving (Read, Show, Eq)
|
||||
|
||||
instance PathPiece FilePathAndUUID where
|
||||
toPathPiece = pack . show
|
||||
fromPathPiece = readish . unpack
|
||||
|
||||
instance PathPiece RemovableDrive where
|
||||
toPathPiece = pack . show
|
||||
fromPathPiece = readish . unpack
|
||||
|
@ -216,10 +204,6 @@ instance PathPiece PairKey where
|
|||
toPathPiece = pack . show
|
||||
fromPathPiece = readish . unpack
|
||||
|
||||
instance PathPiece RepoListNotificationId where
|
||||
toPathPiece = pack . show
|
||||
fromPathPiece = readish . unpack
|
||||
|
||||
instance PathPiece RepoSelector where
|
||||
toPathPiece = pack . show
|
||||
fromPathPiece = readish . unpack
|
||||
|
|
|
@ -20,19 +20,25 @@ import qualified Remote.List as Remote
|
|||
import qualified Assistant.Threads.Transferrer as Transferrer
|
||||
import Logs.Transfer
|
||||
import qualified Config
|
||||
import Config.Cost
|
||||
import Config.Files
|
||||
import Git.Config
|
||||
import Assistant.Threads.Watcher
|
||||
import Assistant.NamedThread
|
||||
import Types.StandardGroups
|
||||
import Git.Remote
|
||||
import Logs.PreferredContent
|
||||
import Assistant.MakeRemote
|
||||
|
||||
import qualified Data.Map as M
|
||||
import Control.Concurrent
|
||||
import System.Posix.Signals (signalProcessGroup, sigTERM, sigKILL)
|
||||
import System.Posix.Process (getProcessGroupIDOf)
|
||||
import Utility.Yesod
|
||||
|
||||
{- Use Nothing to change autocommit setting; or a remote to change
|
||||
- its sync setting. -}
|
||||
changeSyncable :: (Maybe Remote) -> Bool -> Handler ()
|
||||
changeSyncable :: Maybe Remote -> Bool -> Handler ()
|
||||
changeSyncable Nothing enable = do
|
||||
liftAnnex $ Config.setConfig key (boolConfig enable)
|
||||
liftIO . maybe noop (`throwTo` signal)
|
||||
|
@ -47,7 +53,7 @@ changeSyncable (Just r) True = do
|
|||
liftAssistant $ syncRemote r
|
||||
changeSyncable (Just r) False = do
|
||||
changeSyncFlag r False
|
||||
liftAssistant $ updateSyncRemotes
|
||||
liftAssistant updateSyncRemotes
|
||||
{- Stop all transfers to or from this remote.
|
||||
- XXX Can't stop any ongoing scan, or git syncs. -}
|
||||
void $ liftAssistant $ dequeueTransfers tofrom
|
||||
|
@ -60,7 +66,7 @@ changeSyncable (Just r) False = do
|
|||
changeSyncFlag :: Remote -> Bool -> Handler ()
|
||||
changeSyncFlag r enabled = liftAnnex $ do
|
||||
Config.setConfig key (boolConfig enabled)
|
||||
void $ Remote.remoteListRefresh
|
||||
void Remote.remoteListRefresh
|
||||
where
|
||||
key = Config.remoteConfig (Remote.repo r) "sync"
|
||||
|
||||
|
@ -118,3 +124,15 @@ startTransfer t = do
|
|||
|
||||
getCurrentTransfers :: Handler TransferMap
|
||||
getCurrentTransfers = currentTransfers <$> liftAssistant getDaemonStatus
|
||||
|
||||
{- Runs an action that creates or enables a cloud remote,
|
||||
- and finishes setting it up, then starts syncing with it,
|
||||
- and finishes by displaying the page to edit it. -}
|
||||
setupCloudRemote :: StandardGroup -> Maybe Cost -> Annex RemoteName -> Handler a
|
||||
setupCloudRemote defaultgroup mcost maker = do
|
||||
r <- liftAnnex $ addRemote maker
|
||||
liftAnnex $ do
|
||||
setStandardGroup (Remote.uuid r) defaultgroup
|
||||
maybe noop (Config.setRemoteCost r) mcost
|
||||
liftAssistant $ syncRemote r
|
||||
redirect $ EditNewCloudRepositoryR $ Remote.uuid r
|
||||
|
|
|
@ -26,7 +26,7 @@
|
|||
/config/repository/new/androidcamera AndroidCameraRepositoryR GET
|
||||
/config/repository/switcher RepositorySwitcherR GET
|
||||
/config/repository/switchto/#FilePath SwitchToRepositoryR GET
|
||||
/config/repository/combine/#FilePathAndUUID CombineRepositoryR GET
|
||||
/config/repository/combine/#FilePath/#UUID CombineRepositoryR GET
|
||||
/config/repository/edit/#UUID EditRepositoryR GET POST
|
||||
/config/repository/edit/new/#UUID EditNewRepositoryR GET POST
|
||||
/config/repository/edit/new/cloud/#UUID EditNewCloudRepositoryR GET POST
|
||||
|
@ -40,11 +40,15 @@
|
|||
/config/repository/add/drive/genkey/#RemovableDrive GenKeyForDriveR GET
|
||||
/config/repository/add/drive/finish/#RemovableDrive/#RepoKey FinishAddDriveR GET
|
||||
/config/repository/add/ssh AddSshR GET POST
|
||||
/config/repository/add/ssh/confirm/#SshData ConfirmSshR GET
|
||||
/config/repository/add/ssh/confirm/#SshData/#UUID ConfirmSshR GET
|
||||
/config/repository/add/ssh/retry/#SshData RetrySshR GET
|
||||
/config/repository/add/ssh/make/git/#SshData MakeSshGitR GET
|
||||
/config/repository/add/ssh/make/rsync/#SshData MakeSshRsyncR GET
|
||||
/config/repository/add/ssh/make/gcrypt/#SshData/#RepoKey MakeSshGCryptR GET
|
||||
/config/repository/add/ssh/combine/#SshData CombineSshR GET
|
||||
/config/repository/add/cloud/rsync.net AddRsyncNetR GET POST
|
||||
/config/repository/add/cloud/rsync.net/shared/#SshData MakeRsyncNetSharedR GET
|
||||
/config/repository/add/cloud/rsync.net/gcrypt/#SshData/#RepoKey MakeRsyncNetGCryptR GET
|
||||
/config/repository/add/cloud/S3 AddS3R GET POST
|
||||
/config/repository/add/cloud/IA AddIAR GET POST
|
||||
/config/repository/add/cloud/glacier AddGlacierR GET POST
|
||||
|
@ -63,6 +67,7 @@
|
|||
/config/repository/pair/xmpp/friend/finish/#PairKey FinishXMPPPairFriendR GET
|
||||
|
||||
/config/repository/enable/rsync/#UUID EnableRsyncR GET POST
|
||||
/config/repository/enable/gcrypt/#UUID EnableSshGCryptR GET POST
|
||||
/config/repository/enable/directory/#UUID EnableDirectoryR GET
|
||||
/config/repository/enable/S3/#UUID EnableS3R GET POST
|
||||
/config/repository/enable/IA/#UUID EnableIAR GET POST
|
||||
|
@ -87,7 +92,7 @@
|
|||
/buddylist/#NotificationId BuddyListR GET
|
||||
/notifier/buddylist NotifierBuddyListR GET
|
||||
|
||||
/repolist/#RepoListNotificationId RepoListR GET
|
||||
/repolist/#NotificationId/#RepoSelector RepoListR GET
|
||||
/notifier/repolist/#RepoSelector NotifierRepoListR GET
|
||||
|
||||
/alert/close/#AlertId CloseAlert GET
|
||||
|
|
|
@ -27,12 +27,12 @@ import qualified Types.Backend as B
|
|||
import Config
|
||||
|
||||
-- When adding a new backend, import it here and add it to the list.
|
||||
import qualified Backend.SHA
|
||||
import qualified Backend.Hash
|
||||
import qualified Backend.WORM
|
||||
import qualified Backend.URL
|
||||
|
||||
list :: [Backend]
|
||||
list = Backend.SHA.backends ++ Backend.WORM.backends ++ Backend.URL.backends
|
||||
list = Backend.Hash.backends ++ Backend.WORM.backends ++ Backend.URL.backends
|
||||
|
||||
{- List of backends in the order to try them when storing a new key. -}
|
||||
orderedList :: Annex [Backend]
|
||||
|
|
162
Backend/Hash.hs
Normal file
162
Backend/Hash.hs
Normal file
|
@ -0,0 +1,162 @@
|
|||
{- git-annex hashing backends
|
||||
-
|
||||
- Copyright 2011-2013 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
module Backend.Hash (backends) where
|
||||
|
||||
import Common.Annex
|
||||
import qualified Annex
|
||||
import Types.Backend
|
||||
import Types.Key
|
||||
import Types.KeySource
|
||||
import Utility.Hash
|
||||
import Utility.ExternalSHA
|
||||
|
||||
import qualified Build.SysConfig as SysConfig
|
||||
import qualified Data.ByteString.Lazy as L
|
||||
import Data.Char
|
||||
|
||||
data Hash = SHAHash HashSize | SkeinHash HashSize
|
||||
type HashSize = Int
|
||||
|
||||
{- Order is slightly significant; want SHA256 first, and more general
|
||||
- sizes earlier. -}
|
||||
hashes :: [Hash]
|
||||
hashes = concat
|
||||
[ map SHAHash [256, 1, 512, 224, 384]
|
||||
, map SkeinHash [256, 512]
|
||||
]
|
||||
|
||||
{- The SHA256E backend is the default, so genBackendE comes first. -}
|
||||
backends :: [Backend]
|
||||
backends = catMaybes $ map genBackendE hashes ++ map genBackend hashes
|
||||
|
||||
genBackend :: Hash -> Maybe Backend
|
||||
genBackend hash = Just Backend
|
||||
{ name = hashName hash
|
||||
, getKey = keyValue hash
|
||||
, fsckKey = Just $ checkKeyChecksum hash
|
||||
, canUpgradeKey = Just needsUpgrade
|
||||
}
|
||||
|
||||
genBackendE :: Hash -> Maybe Backend
|
||||
genBackendE hash = do
|
||||
b <- genBackend hash
|
||||
return $ b
|
||||
{ name = hashNameE hash
|
||||
, getKey = keyValueE hash
|
||||
}
|
||||
|
||||
hashName :: Hash -> String
|
||||
hashName (SHAHash size) = "SHA" ++ show size
|
||||
hashName (SkeinHash size) = "SKEIN" ++ show size
|
||||
|
||||
hashNameE :: Hash -> String
|
||||
hashNameE hash = hashName hash ++ "E"
|
||||
|
||||
{- A key is a hash of its contents. -}
|
||||
keyValue :: Hash -> KeySource -> Annex (Maybe Key)
|
||||
keyValue hash source = do
|
||||
let file = contentLocation source
|
||||
stat <- liftIO $ getFileStatus file
|
||||
let filesize = fromIntegral $ fileSize stat
|
||||
s <- hashFile hash file filesize
|
||||
return $ Just $ stubKey
|
||||
{ keyName = s
|
||||
, keyBackendName = hashName hash
|
||||
, keySize = Just filesize
|
||||
}
|
||||
|
||||
{- Extension preserving keys. -}
|
||||
keyValueE :: Hash -> KeySource -> Annex (Maybe Key)
|
||||
keyValueE hash source = keyValue hash source >>= maybe (return Nothing) addE
|
||||
where
|
||||
addE k = return $ Just $ k
|
||||
{ keyName = keyName k ++ selectExtension (keyFilename source)
|
||||
, keyBackendName = hashNameE hash
|
||||
}
|
||||
|
||||
selectExtension :: FilePath -> String
|
||||
selectExtension f
|
||||
| null es = ""
|
||||
| otherwise = intercalate "." ("":es)
|
||||
where
|
||||
es = filter (not . null) $ reverse $
|
||||
take 2 $ takeWhile shortenough $
|
||||
reverse $ split "." $ filter validExtension $ takeExtensions f
|
||||
shortenough e = length e <= 4 -- long enough for "jpeg"
|
||||
|
||||
{- A key's checksum is checked during fsck. -}
|
||||
checkKeyChecksum :: Hash -> Key -> FilePath -> Annex Bool
|
||||
checkKeyChecksum hash key file = do
|
||||
fast <- Annex.getState Annex.fast
|
||||
mstat <- liftIO $ catchMaybeIO $ getFileStatus file
|
||||
case (mstat, fast) of
|
||||
(Just stat, False) -> do
|
||||
let filesize = fromIntegral $ fileSize stat
|
||||
check <$> hashFile hash file filesize
|
||||
_ -> return True
|
||||
where
|
||||
expected = keyHash key
|
||||
check s
|
||||
| s == expected = True
|
||||
{- A bug caused checksums to be prefixed with \ in some
|
||||
- cases; still accept these as legal now that the bug has been
|
||||
- fixed. -}
|
||||
| '\\' : s == expected = True
|
||||
| otherwise = False
|
||||
|
||||
keyHash :: Key -> String
|
||||
keyHash key = dropExtensions (keyName key)
|
||||
|
||||
validExtension :: Char -> Bool
|
||||
validExtension c
|
||||
| isAlphaNum c = True
|
||||
| c == '.' = True
|
||||
| otherwise = False
|
||||
|
||||
{- Upgrade keys that have the \ prefix on their sha due to a bug, or
|
||||
- that contain non-alphanumeric characters in their extension. -}
|
||||
needsUpgrade :: Key -> Bool
|
||||
needsUpgrade key = "\\" `isPrefixOf` keyHash key ||
|
||||
any (not . validExtension) (takeExtensions $ keyName key)
|
||||
|
||||
hashFile :: Hash -> FilePath -> Integer -> Annex String
|
||||
hashFile hash file filesize = do
|
||||
showAction "checksum"
|
||||
liftIO $ go hash
|
||||
where
|
||||
go (SHAHash hashsize) = case shaHasher hashsize filesize of
|
||||
Left sha -> sha <$> L.readFile file
|
||||
Right command ->
|
||||
either error return
|
||||
=<< externalSHA command hashsize file
|
||||
go (SkeinHash hashsize) = skeinHasher hashsize <$> L.readFile file
|
||||
|
||||
shaHasher :: HashSize -> Integer -> Either (L.ByteString -> String) String
|
||||
shaHasher hashsize filesize
|
||||
| hashsize == 1 = use SysConfig.sha1 sha1
|
||||
| hashsize == 256 = use SysConfig.sha256 sha256
|
||||
| hashsize == 224 = use SysConfig.sha224 sha224
|
||||
| hashsize == 384 = use SysConfig.sha384 sha384
|
||||
| hashsize == 512 = use SysConfig.sha512 sha512
|
||||
| otherwise = error $ "bad sha size " ++ show hashsize
|
||||
where
|
||||
use Nothing hasher = Left $ show . hasher
|
||||
use (Just c) hasher
|
||||
{- Use builtin, but slightly slower hashing for
|
||||
- smallish files. Cryptohash benchmarks 90 to 101%
|
||||
- faster than external hashers, depending on the hash
|
||||
- and system. So there is no point forking an external
|
||||
- process unless the file is large. -}
|
||||
| filesize < 1048576 = use Nothing hasher
|
||||
| otherwise = Right c
|
||||
|
||||
skeinHasher :: HashSize -> (L.ByteString -> String)
|
||||
skeinHasher hashsize
|
||||
| hashsize == 256 = show . skein256
|
||||
| hashsize == 512 = show . skein512
|
||||
| otherwise = error $ "bad skein size " ++ show hashsize
|
146
Backend/SHA.hs
146
Backend/SHA.hs
|
@ -1,146 +0,0 @@
|
|||
{- git-annex SHA backends
|
||||
-
|
||||
- Copyright 2011,2012 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
module Backend.SHA (backends) where
|
||||
|
||||
import Common.Annex
|
||||
import qualified Annex
|
||||
import Types.Backend
|
||||
import Types.Key
|
||||
import Types.KeySource
|
||||
import Utility.ExternalSHA
|
||||
|
||||
import qualified Build.SysConfig as SysConfig
|
||||
import Data.Digest.Pure.SHA
|
||||
import qualified Data.ByteString.Lazy as L
|
||||
import Data.Char
|
||||
|
||||
type SHASize = Int
|
||||
|
||||
{- Order is slightly significant; want SHA256 first, and more general
|
||||
- sizes earlier. -}
|
||||
sizes :: [Int]
|
||||
sizes = [256, 1, 512, 224, 384]
|
||||
|
||||
{- The SHA256E backend is the default. -}
|
||||
backends :: [Backend]
|
||||
backends = catMaybes $ map genBackendE sizes ++ map genBackend sizes
|
||||
|
||||
genBackend :: SHASize -> Maybe Backend
|
||||
genBackend size = Just $ Backend
|
||||
{ name = shaName size
|
||||
, getKey = keyValue size
|
||||
, fsckKey = Just $ checkKeyChecksum size
|
||||
, canUpgradeKey = Just $ needsUpgrade
|
||||
}
|
||||
|
||||
genBackendE :: SHASize -> Maybe Backend
|
||||
genBackendE size = do
|
||||
b <- genBackend size
|
||||
return $ b
|
||||
{ name = shaNameE size
|
||||
, getKey = keyValueE size
|
||||
}
|
||||
|
||||
shaName :: SHASize -> String
|
||||
shaName size = "SHA" ++ show size
|
||||
|
||||
shaNameE :: SHASize -> String
|
||||
shaNameE size = shaName size ++ "E"
|
||||
|
||||
shaN :: SHASize -> FilePath -> Integer -> Annex String
|
||||
shaN shasize file filesize = do
|
||||
showAction "checksum"
|
||||
liftIO $ case shaCommand shasize filesize of
|
||||
Left sha -> sha <$> L.readFile file
|
||||
Right command ->
|
||||
either error return
|
||||
=<< externalSHA command shasize file
|
||||
|
||||
shaCommand :: SHASize -> Integer -> Either (L.ByteString -> String) String
|
||||
shaCommand shasize filesize
|
||||
| shasize == 1 = use SysConfig.sha1 sha1
|
||||
| shasize == 256 = use SysConfig.sha256 sha256
|
||||
| shasize == 224 = use SysConfig.sha224 sha224
|
||||
| shasize == 384 = use SysConfig.sha384 sha384
|
||||
| shasize == 512 = use SysConfig.sha512 sha512
|
||||
| otherwise = error $ "bad sha size " ++ show shasize
|
||||
where
|
||||
use Nothing sha = Left $ showDigest . sha
|
||||
use (Just c) sha
|
||||
{- use builtin, but slower sha for small files
|
||||
- benchmarking indicates it's faster up to
|
||||
- and slightly beyond 50 kb files -}
|
||||
| filesize < 51200 = use Nothing sha
|
||||
| otherwise = Right c
|
||||
|
||||
{- A key is a checksum of its contents. -}
|
||||
keyValue :: SHASize -> KeySource -> Annex (Maybe Key)
|
||||
keyValue shasize source = do
|
||||
let file = contentLocation source
|
||||
stat <- liftIO $ getFileStatus file
|
||||
let filesize = fromIntegral $ fileSize stat
|
||||
s <- shaN shasize file filesize
|
||||
return $ Just $ stubKey
|
||||
{ keyName = s
|
||||
, keyBackendName = shaName shasize
|
||||
, keySize = Just filesize
|
||||
}
|
||||
|
||||
{- Extension preserving keys. -}
|
||||
keyValueE :: SHASize -> KeySource -> Annex (Maybe Key)
|
||||
keyValueE size source = keyValue size source >>= maybe (return Nothing) addE
|
||||
where
|
||||
addE k = return $ Just $ k
|
||||
{ keyName = keyName k ++ selectExtension (keyFilename source)
|
||||
, keyBackendName = shaNameE size
|
||||
}
|
||||
|
||||
selectExtension :: FilePath -> String
|
||||
selectExtension f
|
||||
| null es = ""
|
||||
| otherwise = intercalate "." ("":es)
|
||||
where
|
||||
es = filter (not . null) $ reverse $
|
||||
take 2 $ takeWhile shortenough $
|
||||
reverse $ split "." $ filter validExtension $ takeExtensions f
|
||||
shortenough e = length e <= 4 -- long enough for "jpeg"
|
||||
|
||||
{- A key's checksum is checked during fsck. -}
|
||||
checkKeyChecksum :: SHASize -> Key -> FilePath -> Annex Bool
|
||||
checkKeyChecksum size key file = do
|
||||
fast <- Annex.getState Annex.fast
|
||||
mstat <- liftIO $ catchMaybeIO $ getFileStatus file
|
||||
case (mstat, fast) of
|
||||
(Just stat, False) -> do
|
||||
let filesize = fromIntegral $ fileSize stat
|
||||
check <$> shaN size file filesize
|
||||
_ -> return True
|
||||
where
|
||||
sha = keySha key
|
||||
check s
|
||||
| s == sha = True
|
||||
{- A bug caused checksums to be prefixed with \ in some
|
||||
- cases; still accept these as legal now that the bug has been
|
||||
- fixed. -}
|
||||
| '\\' : s == sha = True
|
||||
| otherwise = False
|
||||
|
||||
keySha :: Key -> String
|
||||
keySha key = dropExtensions (keyName key)
|
||||
|
||||
validExtension :: Char -> Bool
|
||||
validExtension c
|
||||
| isAlphaNum c = True
|
||||
| c == '.' = True
|
||||
| otherwise = False
|
||||
|
||||
{- Upgrade keys that have the \ prefix on their sha due to a bug, or
|
||||
- that contain non-alphanumeric characters in their extension. -}
|
||||
needsUpgrade :: Key -> Bool
|
||||
needsUpgrade key = "\\" `isPrefixOf` keySha key ||
|
||||
any (not . validExtension) (takeExtensions $ keyName key)
|
|
@ -294,6 +294,8 @@ expandExpressionSplice s lls = concat [before, spliced:padding, end]
|
|||
{- Tweaks code output by GHC in splices to actually build. Yipes. -}
|
||||
mangleCode :: String -> String
|
||||
mangleCode = flip_colon
|
||||
. remove_unnecessary_type_signatures
|
||||
. lambdaparenhack
|
||||
. lambdaparens
|
||||
. declaration_parens
|
||||
. case_layout
|
||||
|
@ -331,6 +333,12 @@ mangleCode = flip_colon
|
|||
preindent <- many1 $ oneOf " \n"
|
||||
string "\\ "
|
||||
lambdaparams <- restofline
|
||||
continuedlambdaparams <- many $ try $ do
|
||||
indent <- many1 $ char ' '
|
||||
p <- satisfy isLetter
|
||||
aram <- many $ satisfy isAlphaNum <|> oneOf "_"
|
||||
newline
|
||||
return $ indent ++ p:aram ++ "\n"
|
||||
indent <- many1 $ char ' '
|
||||
string "-> "
|
||||
firstline <- restofline
|
||||
|
@ -342,10 +350,46 @@ mangleCode = flip_colon
|
|||
return $ concat
|
||||
[ prefix:preindent
|
||||
, "(\\ " ++ lambdaparams ++ "\n"
|
||||
, concat continuedlambdaparams
|
||||
, indent ++ "-> "
|
||||
, lambdaparens $ intercalate "\n" (firstline:lambdalines)
|
||||
, ")\n"
|
||||
]
|
||||
|
||||
{- Hack to add missing parens in a specific case in yesod
|
||||
- static route code.
|
||||
-
|
||||
- StaticR
|
||||
- yesod_dispatch_env_a4iDV
|
||||
- (\ p_a4iE2 r_a4iE3
|
||||
- -> r_a4iE3 {Network.Wai.pathInfo = p_a4iE2}
|
||||
- xrest_a4iDT req_a4iDW)) }
|
||||
-
|
||||
- Need to add another paren around the lambda, and close it
|
||||
- before its parameters. lambdaparens misses this one because
|
||||
- there is already one paren present.
|
||||
-
|
||||
- FIXME: This is a hack. lambdaparens could just always add a
|
||||
- layer of parens even when a lambda seems to be in parent.
|
||||
-}
|
||||
lambdaparenhack = parsecAndReplace $ do
|
||||
indent <- many1 $ char ' '
|
||||
staticr <- string "StaticR"
|
||||
newline
|
||||
string indent
|
||||
yesod_dispatch_env <- restofline
|
||||
string indent
|
||||
lambdaprefix <- string "(\\ "
|
||||
l1 <- restofline
|
||||
string indent
|
||||
lambdaarrow <- string " ->"
|
||||
l2 <- restofline
|
||||
return $ unlines
|
||||
[ indent ++ staticr
|
||||
, indent ++ yesod_dispatch_env
|
||||
, indent ++ "(" ++ lambdaprefix ++ l1
|
||||
, indent ++ lambdaarrow ++ l2 ++ ")"
|
||||
]
|
||||
|
||||
restofline = manyTill (noneOf "\n") newline
|
||||
|
||||
|
@ -439,6 +483,19 @@ mangleCode = flip_colon
|
|||
- declarations. -}
|
||||
declaration_parens = replace "StaticR Route Static" "StaticR (Route Static)"
|
||||
|
||||
{- A type signature is sometimes given for an entire lambda,
|
||||
- which is not properly parenthesized or laid out. This is a
|
||||
- hack to remove one specific case where this happens and the
|
||||
- signature is easily inferred, so is just removed.
|
||||
-}
|
||||
remove_unnecessary_type_signatures = parsecAndReplace $ do
|
||||
string " ::"
|
||||
newline
|
||||
many1 $ char ' '
|
||||
string "Text.Css.Block Text.Css.Resolved"
|
||||
newline
|
||||
return ""
|
||||
|
||||
{- GHC may add full package and version qualifications for
|
||||
- symbols from unimported modules. We don't want these.
|
||||
-
|
||||
|
|
|
@ -77,7 +77,7 @@ start file = ifAnnexed file addpresent add
|
|||
-- is present but not yet added to git
|
||||
showStart "add" file
|
||||
liftIO $ removeFile file
|
||||
next $ next $ cleanup file key =<< inAnnex key
|
||||
next $ next $ cleanup file key Nothing =<< inAnnex key
|
||||
|
||||
{- The file that's being added is locked down before a key is generated,
|
||||
- to prevent it from being modified in between. This lock down is not
|
||||
|
@ -98,13 +98,13 @@ start file = ifAnnexed file addpresent add
|
|||
- Lockdown can fail if a file gets deleted, and Nothing will be returned.
|
||||
-}
|
||||
lockDown :: FilePath -> Annex (Maybe KeySource)
|
||||
lockDown file = ifM (crippledFileSystem)
|
||||
lockDown file = ifM crippledFileSystem
|
||||
( liftIO $ catchMaybeIO nohardlink
|
||||
, do
|
||||
tmp <- fromRepo gitAnnexTmpDir
|
||||
createAnnexDirectory tmp
|
||||
unlessM (isDirect) $ liftIO $
|
||||
void $ tryIO $ preventWrite file
|
||||
unlessM isDirect $
|
||||
void $ liftIO $ tryIO $ preventWrite file
|
||||
liftIO $ catchMaybeIO $ do
|
||||
(tmpfile, h) <- openTempFile tmp $
|
||||
relatedTemplate $ takeFileName file
|
||||
|
@ -115,7 +115,7 @@ lockDown file = ifM (crippledFileSystem)
|
|||
where
|
||||
nohardlink = do
|
||||
cache <- genInodeCache file
|
||||
return $ KeySource
|
||||
return KeySource
|
||||
{ keyFilename = file
|
||||
, contentLocation = file
|
||||
, inodeCache = cache
|
||||
|
@ -123,7 +123,7 @@ lockDown file = ifM (crippledFileSystem)
|
|||
withhardlink tmpfile = do
|
||||
createLink file tmpfile
|
||||
cache <- genInodeCache tmpfile
|
||||
return $ KeySource
|
||||
return KeySource
|
||||
{ keyFilename = file
|
||||
, contentLocation = tmpfile
|
||||
, inodeCache = cache
|
||||
|
@ -134,8 +134,8 @@ lockDown file = ifM (crippledFileSystem)
|
|||
- In direct mode, leaves the file alone, and just updates bookkeeping
|
||||
- information.
|
||||
-}
|
||||
ingest :: (Maybe KeySource) -> Annex (Maybe Key)
|
||||
ingest Nothing = return Nothing
|
||||
ingest :: Maybe KeySource -> Annex (Maybe Key, Maybe InodeCache)
|
||||
ingest Nothing = return (Nothing, Nothing)
|
||||
ingest (Just source) = do
|
||||
backend <- chooseBackend $ keyFilename source
|
||||
k <- genKey source backend
|
||||
|
@ -147,24 +147,24 @@ ingest (Just source) = do
|
|||
where
|
||||
go k cache = ifM isDirect ( godirect k cache , goindirect k cache )
|
||||
|
||||
goindirect (Just (key, _)) _ = do
|
||||
goindirect (Just (key, _)) mcache = do
|
||||
catchAnnex (moveAnnex key $ contentLocation source)
|
||||
(undo (keyFilename source) key)
|
||||
liftIO $ nukeFile $ keyFilename source
|
||||
return $ Just key
|
||||
return $ (Just key, mcache)
|
||||
goindirect Nothing _ = failure "failed to generate a key"
|
||||
|
||||
godirect (Just (key, _)) (Just cache) = do
|
||||
addInodeCache key cache
|
||||
finishIngestDirect key source
|
||||
return $ Just key
|
||||
return $ (Just key, Just cache)
|
||||
godirect _ _ = failure "failed to generate a key"
|
||||
|
||||
failure msg = do
|
||||
warning $ keyFilename source ++ " " ++ msg
|
||||
when (contentLocation source /= keyFilename source) $
|
||||
liftIO $ nukeFile $ contentLocation source
|
||||
return Nothing
|
||||
return (Nothing, Nothing)
|
||||
|
||||
finishIngestDirect :: Key -> KeySource -> Annex ()
|
||||
finishIngestDirect key source = do
|
||||
|
@ -178,9 +178,10 @@ finishIngestDirect key source = do
|
|||
addContentWhenNotPresent key (keyFilename source)
|
||||
|
||||
perform :: FilePath -> CommandPerform
|
||||
perform file =
|
||||
maybe stop (\key -> next $ cleanup file key True)
|
||||
=<< ingest =<< lockDown file
|
||||
perform file = lockDown file >>= ingest >>= go
|
||||
where
|
||||
go (Just key, cache) = next $ cleanup file key cache True
|
||||
go (Nothing, _) = stop
|
||||
|
||||
{- On error, put the file back so it doesn't seem to have vanished.
|
||||
- This can be called before or after the symlink is in place. -}
|
||||
|
@ -199,18 +200,17 @@ undo file key e = do
|
|||
liftIO $ moveFile src file
|
||||
|
||||
{- Creates the symlink to the annexed content, returns the link target. -}
|
||||
link :: FilePath -> Key -> Bool -> Annex String
|
||||
link file key hascontent = flip catchAnnex (undo file key) $ do
|
||||
link :: FilePath -> Key -> Maybe InodeCache -> Annex String
|
||||
link file key mcache = flip catchAnnex (undo file key) $ do
|
||||
l <- inRepo $ gitAnnexLink file key
|
||||
replaceFile file $ makeAnnexLink l
|
||||
|
||||
#ifndef __ANDROID__
|
||||
when hascontent $ do
|
||||
-- touch the symlink to have the same mtime as the
|
||||
-- file it points to
|
||||
liftIO $ do
|
||||
mtime <- modificationTime <$> getFileStatus file
|
||||
touch file (TimeSpec mtime) False
|
||||
-- touch symlink to have same time as the original file,
|
||||
-- as provided in the InodeCache
|
||||
case mcache of
|
||||
Just c -> liftIO $ touch file (TimeSpec $ inodeCacheToMtime c) False
|
||||
Nothing -> noop
|
||||
#endif
|
||||
|
||||
return l
|
||||
|
@ -224,28 +224,28 @@ link file key hascontent = flip catchAnnex (undo file key) $ do
|
|||
- Also, using git add allows it to skip gitignored files, unless forced
|
||||
- to include them.
|
||||
-}
|
||||
addLink :: FilePath -> Key -> Bool -> Annex ()
|
||||
addLink file key hascontent = ifM (coreSymlinks <$> Annex.getGitConfig)
|
||||
addLink :: FilePath -> Key -> Maybe InodeCache -> Annex ()
|
||||
addLink file key mcache = ifM (coreSymlinks <$> Annex.getGitConfig)
|
||||
( do
|
||||
_ <- link file key hascontent
|
||||
_ <- link file key mcache
|
||||
params <- ifM (Annex.getState Annex.force)
|
||||
( return [Param "-f"]
|
||||
, return []
|
||||
)
|
||||
Annex.Queue.addCommand "add" (params++[Param "--"]) [file]
|
||||
, do
|
||||
l <- link file key hascontent
|
||||
l <- link file key mcache
|
||||
addAnnexLink l file
|
||||
)
|
||||
|
||||
cleanup :: FilePath -> Key -> Bool -> CommandCleanup
|
||||
cleanup file key hascontent = do
|
||||
cleanup :: FilePath -> Key -> Maybe InodeCache -> Bool -> CommandCleanup
|
||||
cleanup file key mcache hascontent = do
|
||||
when hascontent $
|
||||
logStatus key InfoPresent
|
||||
ifM (isDirect <&&> pure hascontent)
|
||||
( do
|
||||
l <- inRepo $ gitAnnexLink file key
|
||||
stageSymlink file =<< hashSymlink l
|
||||
, addLink file key hascontent
|
||||
, addLink file key mcache
|
||||
)
|
||||
return True
|
||||
|
|
|
@ -29,7 +29,7 @@ start = startUnused "addunused" perform
|
|||
perform :: Key -> CommandPerform
|
||||
perform key = next $ do
|
||||
logStatus key InfoPresent
|
||||
Command.Add.addLink file key False
|
||||
Command.Add.addLink file key Nothing
|
||||
return True
|
||||
where
|
||||
file = "unused." ++ key2file key
|
||||
|
|
|
@ -17,8 +17,8 @@ import Backend
|
|||
import qualified Command.Add
|
||||
import qualified Annex
|
||||
import qualified Annex.Queue
|
||||
import qualified Annex.Url as Url
|
||||
import qualified Backend.URL
|
||||
import qualified Utility.Url as Url
|
||||
import Annex.Content
|
||||
import Logs.Web
|
||||
import qualified Option
|
||||
|
@ -123,7 +123,7 @@ perform relaxed url file = ifAnnexed file addurl geturl
|
|||
next $ return True
|
||||
| otherwise = do
|
||||
headers <- getHttpHeaders
|
||||
ifM (liftIO $ Url.check url headers $ keySize key)
|
||||
ifM (Url.withUserAgent $ Url.check url headers $ keySize key)
|
||||
( do
|
||||
setUrlPresent key url
|
||||
next $ return True
|
||||
|
@ -174,7 +174,7 @@ download url file = do
|
|||
size <- ifM (liftIO $ isJust <$> checkDaemon pidfile)
|
||||
( do
|
||||
headers <- getHttpHeaders
|
||||
liftIO $ snd <$> Url.exists url headers
|
||||
snd <$> Url.withUserAgent (Url.exists url headers)
|
||||
, return Nothing
|
||||
)
|
||||
Backend.URL.fromUrl url size
|
||||
|
@ -189,7 +189,7 @@ cleanup url file key mtmp = do
|
|||
when (isJust mtmp) $
|
||||
logStatus key InfoPresent
|
||||
setUrlPresent key url
|
||||
Command.Add.addLink file key False
|
||||
Command.Add.addLink file key Nothing
|
||||
whenM isDirect $ do
|
||||
void $ addAssociatedFile key file
|
||||
{- For moveAnnex to work in direct mode, the symlink
|
||||
|
@ -203,7 +203,7 @@ nodownload relaxed url file = do
|
|||
headers <- getHttpHeaders
|
||||
(exists, size) <- if relaxed
|
||||
then pure (True, Nothing)
|
||||
else liftIO $ Url.exists url headers
|
||||
else Url.withUserAgent $ Url.exists url headers
|
||||
if exists
|
||||
then do
|
||||
key <- Backend.URL.fromUrl url size
|
||||
|
|
|
@ -10,6 +10,8 @@ module Command.ConfigList where
|
|||
import Common.Annex
|
||||
import Command
|
||||
import Annex.UUID
|
||||
import qualified Git.Config
|
||||
import Remote.GCrypt (coreGCryptId)
|
||||
|
||||
def :: [Command]
|
||||
def = [noCommit $ command "configlist" paramNothing seek
|
||||
|
@ -21,5 +23,8 @@ seek = [withNothing start]
|
|||
start :: CommandStart
|
||||
start = do
|
||||
u <- getUUID
|
||||
liftIO $ putStrLn $ "annex.uuid=" ++ fromUUID u
|
||||
showConfig "annex.uuid" $ fromUUID u
|
||||
showConfig coreGCryptId =<< fromRepo (Git.Config.get coreGCryptId "")
|
||||
stop
|
||||
where
|
||||
showConfig k v = liftIO $ putStrLn $ k ++ "=" ++ v
|
||||
|
|
|
@ -7,6 +7,8 @@
|
|||
|
||||
module Command.Direct where
|
||||
|
||||
import Control.Exception.Extensible
|
||||
|
||||
import Common.Annex
|
||||
import Command
|
||||
import qualified Git
|
||||
|
@ -15,6 +17,7 @@ import qualified Git.LsFiles
|
|||
import Config
|
||||
import Annex.Direct
|
||||
import Annex.Version
|
||||
import Annex.Exception
|
||||
|
||||
def :: [Command]
|
||||
def = [notBareRepo $ noDaemonRunning $
|
||||
|
@ -51,10 +54,17 @@ perform = do
|
|||
Nothing -> noop
|
||||
Just a -> do
|
||||
showStart "direct" f
|
||||
a
|
||||
showEndOk
|
||||
r <- tryAnnex a
|
||||
case r of
|
||||
Left e -> warnlocked e
|
||||
Right _ -> showEndOk
|
||||
return Nothing
|
||||
|
||||
warnlocked :: SomeException -> Annex ()
|
||||
warnlocked e = do
|
||||
warning $ show e
|
||||
warning "leaving this file as-is; correct this problem and run git annex fsck on it"
|
||||
|
||||
cleanup :: CommandCleanup
|
||||
cleanup = do
|
||||
showStart "direct" ""
|
||||
|
|
|
@ -43,7 +43,7 @@ unknownNameError prefix = do
|
|||
error $ prefix ++
|
||||
if null names
|
||||
then ""
|
||||
else " Known special remotes: " ++ intercalate " " names
|
||||
else " Known special remotes: " ++ unwords names
|
||||
|
||||
perform :: RemoteType -> UUID -> R.RemoteConfig -> CommandPerform
|
||||
perform t u c = do
|
||||
|
|
|
@ -104,7 +104,7 @@ withIncremental = withValue $ do
|
|||
Nothing -> noop
|
||||
Just started -> do
|
||||
now <- liftIO getPOSIXTime
|
||||
when (now - realToFrac started >= delta) $
|
||||
when (now - realToFrac started >= delta)
|
||||
resetStartTime
|
||||
return True
|
||||
|
||||
|
@ -187,7 +187,7 @@ performAll key backend = check
|
|||
]
|
||||
|
||||
check :: [Annex Bool] -> Annex Bool
|
||||
check cs = all id <$> sequence cs
|
||||
check cs = and <$> sequence cs
|
||||
|
||||
{- Checks that the file's link points correctly to the content.
|
||||
-
|
||||
|
@ -225,7 +225,7 @@ verifyLocationLog key desc = do
|
|||
|
||||
{- In direct mode, modified files will show up as not present,
|
||||
- but that is expected and not something to do anything about. -}
|
||||
if (direct && not present)
|
||||
if direct && not present
|
||||
then return True
|
||||
else verifyLocationLog' key desc present u (logChange key u)
|
||||
|
||||
|
@ -345,7 +345,7 @@ checkBackend backend key mfile = go =<< isDirect
|
|||
checkBackendRemote :: Backend -> Key -> Remote -> Maybe FilePath -> Annex Bool
|
||||
checkBackendRemote backend key remote = maybe (return True) go
|
||||
where
|
||||
go file = checkBackendOr (badContentRemote remote) backend key file
|
||||
go = checkBackendOr (badContentRemote remote) backend key
|
||||
|
||||
checkBackendOr :: (Key -> Annex String) -> Backend -> Key -> FilePath -> Annex Bool
|
||||
checkBackendOr bad backend key file =
|
||||
|
@ -406,7 +406,7 @@ badContentDirect :: FilePath -> Key -> Annex String
|
|||
badContentDirect file key = do
|
||||
void $ liftIO $ catchMaybeIO $ touchFile file
|
||||
logStatus key InfoMissing
|
||||
return $ "left in place for you to examine"
|
||||
return "left in place for you to examine"
|
||||
|
||||
badContentRemote :: Remote -> Key -> Annex String
|
||||
badContentRemote remote key = do
|
||||
|
|
35
Command/GCryptSetup.hs
Normal file
35
Command/GCryptSetup.hs
Normal file
|
@ -0,0 +1,35 @@
|
|||
{- git-annex command
|
||||
-
|
||||
- Copyright 2013 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
module Command.GCryptSetup where
|
||||
|
||||
import Common.Annex
|
||||
import Command
|
||||
import Annex.UUID
|
||||
import qualified Remote.GCrypt
|
||||
import qualified Git
|
||||
|
||||
def :: [Command]
|
||||
def = [dontCheck repoExists $ noCommit $
|
||||
command "gcryptsetup" paramValue seek
|
||||
SectionPlumbing "sets up gcrypt repository"]
|
||||
|
||||
seek :: [CommandSeek]
|
||||
seek = [withStrings start]
|
||||
|
||||
start :: String -> CommandStart
|
||||
start gcryptid = next $ next $ do
|
||||
g <- gitRepo
|
||||
u <- getUUID
|
||||
gu <- Remote.GCrypt.getGCryptUUID True g
|
||||
if u == NoUUID && gu == Nothing
|
||||
then if Git.repoIsLocalBare g
|
||||
then do
|
||||
void $ Remote.GCrypt.setupRepo gcryptid g
|
||||
return True
|
||||
else error "cannot use gcrypt in a non-bare repository"
|
||||
else error "gcryptsetup permission denied"
|
|
@ -75,7 +75,7 @@ getKeyFile key afile dest = dispatch =<< Remote.keyPossibilities key
|
|||
( docopy r (trycopy full rs)
|
||||
, trycopy full rs
|
||||
)
|
||||
showlocs = Remote.showLocations key [] $
|
||||
showlocs = Remote.showLocations key []
|
||||
"No other repository is known to contain the file."
|
||||
-- This check is to avoid an ugly message if a remote is a
|
||||
-- drive that is not mounted.
|
||||
|
|
|
@ -17,7 +17,7 @@ import Data.Time.Clock
|
|||
import Common.Annex
|
||||
import qualified Annex
|
||||
import Command
|
||||
import qualified Utility.Url as Url
|
||||
import qualified Annex.Url as Url
|
||||
import Logs.Web
|
||||
import qualified Option
|
||||
import qualified Utility.Format
|
||||
|
@ -50,8 +50,7 @@ perform relaxed cache url = do
|
|||
v <- findEnclosures url
|
||||
case v of
|
||||
Just l | not (null l) -> do
|
||||
ok <- all id
|
||||
<$> mapM (downloadEnclosure relaxed cache) l
|
||||
ok <- and <$> mapM (downloadEnclosure relaxed cache) l
|
||||
unless ok $
|
||||
feedProblem url "problem downloading item"
|
||||
next $ cleanup url True
|
||||
|
@ -103,9 +102,10 @@ findEnclosures url = extract <$> downloadFeed url
|
|||
downloadFeed :: URLString -> Annex (Maybe Feed)
|
||||
downloadFeed url = do
|
||||
showOutput
|
||||
ua <- Url.getUserAgent
|
||||
liftIO $ withTmpFile "feed" $ \f h -> do
|
||||
fileEncoding h
|
||||
ifM (Url.download url [] [] f)
|
||||
ifM (Url.download url [] [] f ua)
|
||||
( liftIO $ parseFeedString <$> hGetContentsStrict h
|
||||
, return Nothing
|
||||
)
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
module Command.Indirect where
|
||||
|
||||
import System.PosixCompat.Files
|
||||
import Control.Exception.Extensible
|
||||
|
||||
import Common.Annex
|
||||
import Command
|
||||
|
@ -22,7 +23,9 @@ import Annex.Content
|
|||
import Annex.CatFile
|
||||
import Annex.Version
|
||||
import Annex.Perms
|
||||
import Annex.Exception
|
||||
import Init
|
||||
import qualified Command.Add
|
||||
|
||||
def :: [Command]
|
||||
def = [notBareRepo $ noDaemonRunning $
|
||||
|
@ -46,7 +49,7 @@ start = ifM isDirect
|
|||
perform :: CommandPerform
|
||||
perform = do
|
||||
showStart "commit" ""
|
||||
whenM (stageDirect) $ do
|
||||
whenM stageDirect $ do
|
||||
showOutput
|
||||
void $ inRepo $ Git.Command.runBool
|
||||
[ Param "commit"
|
||||
|
@ -87,15 +90,24 @@ perform = do
|
|||
thawContentDir =<< calcRepo (gitAnnexLocation k)
|
||||
cleandirect k -- clean before content directory gets frozen
|
||||
whenM (liftIO $ not . isSymbolicLink <$> getSymbolicLinkStatus f) $ do
|
||||
moveAnnex k f
|
||||
l <- inRepo $ gitAnnexLink f k
|
||||
liftIO $ createSymbolicLink l f
|
||||
v <-tryAnnexIO (moveAnnex k f)
|
||||
case v of
|
||||
Right _ -> do
|
||||
l <- inRepo $ gitAnnexLink f k
|
||||
liftIO $ createSymbolicLink l f
|
||||
Left e -> catchAnnex (Command.Add.undo f k e)
|
||||
warnlocked
|
||||
showEndOk
|
||||
|
||||
warnlocked :: SomeException -> Annex ()
|
||||
warnlocked e = do
|
||||
warning $ show e
|
||||
warning "leaving this file as-is; correct this problem and run git annex add on it"
|
||||
|
||||
cleandirect k = do
|
||||
liftIO . nukeFile =<< calcRepo (gitAnnexInodeCache k)
|
||||
liftIO . nukeFile =<< calcRepo (gitAnnexMapping k)
|
||||
|
||||
|
||||
cleanup :: CommandCleanup
|
||||
cleanup = do
|
||||
setVersion defaultVersion
|
||||
|
|
|
@ -22,6 +22,7 @@ import Logs.UUID
|
|||
import Annex.UUID
|
||||
import qualified Option
|
||||
import qualified Annex
|
||||
import Git.Remote
|
||||
|
||||
def :: [Command]
|
||||
def = [noCommit $ withOptions [allrepos] $ command "list" paramPaths seek
|
||||
|
@ -68,13 +69,12 @@ start l file (key, _) = do
|
|||
liftIO $ putStrLn $ format (map (\(u, _, t) -> (t, S.member u ls)) l) file
|
||||
stop
|
||||
|
||||
type RemoteName = String
|
||||
type Present = Bool
|
||||
|
||||
header :: [(RemoteName, TrustLevel)] -> String
|
||||
header remotes = (unlines $ zipWith formatheader [0..] remotes) ++ (pipes (length remotes))
|
||||
header remotes = unlines (zipWith formatheader [0..] remotes) ++ pipes (length remotes)
|
||||
where
|
||||
formatheader n (remotename, trustlevel) = (pipes n) ++ remotename ++ (trust trustlevel)
|
||||
formatheader n (remotename, trustlevel) = pipes n ++ remotename ++ trust trustlevel
|
||||
pipes = flip replicate '|'
|
||||
trust UnTrusted = " (untrusted)"
|
||||
trust _ = ""
|
||||
|
|
|
@ -20,7 +20,7 @@ import qualified Annex
|
|||
import Annex.UUID
|
||||
import Logs.UUID
|
||||
import Logs.Trust
|
||||
import Remote.Helper.Ssh
|
||||
import qualified Remote.Helper.Ssh as Ssh
|
||||
import qualified Utility.Dot as Dot
|
||||
|
||||
-- a link from the first repository to the second (its remote)
|
||||
|
@ -203,9 +203,9 @@ tryScan r
|
|||
where
|
||||
p = proc cmd $ toCommand params
|
||||
|
||||
configlist = onRemote r (pipedconfig, Nothing) "configlist" [] []
|
||||
configlist = Ssh.onRemote r (pipedconfig, Nothing) "configlist" [] []
|
||||
manualconfiglist = do
|
||||
sshparams <- sshToRepo r [Param sshcmd]
|
||||
sshparams <- Ssh.toRepo r [Param sshcmd]
|
||||
liftIO $ pipedconfig "ssh" sshparams
|
||||
where
|
||||
sshcmd = cddir ++ " && " ++
|
||||
|
|
|
@ -38,7 +38,7 @@ start :: Maybe Remote -> Maybe Remote -> Bool -> FilePath -> (Key, Backend) -> C
|
|||
start to from move file (key, _) = start' to from move (Just file) key
|
||||
|
||||
startKey :: Maybe Remote -> Maybe Remote -> Bool -> Key -> CommandStart
|
||||
startKey to from move key = start' to from move Nothing key
|
||||
startKey to from move = start' to from move Nothing
|
||||
|
||||
start' :: Maybe Remote -> Maybe Remote -> Bool -> AssociatedFile -> Key -> CommandStart
|
||||
start' to from move afile key = do
|
||||
|
|
|
@ -24,7 +24,7 @@ def = [command "pre-commit" paramPaths seek SectionPlumbing
|
|||
seek :: [CommandSeek]
|
||||
seek =
|
||||
-- fix symlinks to files being committed
|
||||
[ whenNotDirect $ withFilesToBeCommitted $ whenAnnexed $ Command.Fix.start
|
||||
[ whenNotDirect $ withFilesToBeCommitted $ whenAnnexed Command.Fix.start
|
||||
-- inject unlocked files into the annex
|
||||
, whenNotDirect $ withFilesUnlockedToBeCommitted startIndirect
|
||||
-- update direct mode mappings for committed files
|
||||
|
|
|
@ -66,6 +66,6 @@ cleanup file oldkey newkey = do
|
|||
|
||||
-- Update symlink to use the new key.
|
||||
liftIO $ removeFile file
|
||||
Command.Add.addLink file newkey True
|
||||
Command.Add.addLink file newkey Nothing
|
||||
logStatus newkey InfoPresent
|
||||
return True
|
||||
|
|
|
@ -32,7 +32,7 @@ seek = [withKeys start]
|
|||
start :: Key -> CommandStart
|
||||
start key = ifM (inAnnex key)
|
||||
( error "key is already present in annex"
|
||||
, fieldTransfer Download key $ \_p -> do
|
||||
, fieldTransfer Download key $ \_p ->
|
||||
ifM (getViaTmp key go)
|
||||
( do
|
||||
-- forcibly quit after receiving one key,
|
||||
|
@ -72,7 +72,18 @@ start key = ifM (inAnnex key)
|
|||
return $ size == size'
|
||||
if oksize
|
||||
then case Backend.maybeLookupBackendName (Types.Key.keyBackendName key) of
|
||||
Nothing -> return False
|
||||
Just backend -> maybe (return True) (\a -> a key tmp)
|
||||
Nothing -> do
|
||||
warning "recvkey: received key from direct mode repository using unknown backend; cannot check; discarding"
|
||||
return False
|
||||
Just backend -> maybe (return True) runfsck
|
||||
(Types.Backend.fsckKey backend)
|
||||
else return False
|
||||
else do
|
||||
warning "recvkey: received key with wrong size; discarding"
|
||||
return False
|
||||
where
|
||||
runfsck check = ifM (check key tmp)
|
||||
( return True
|
||||
, do
|
||||
warning "recvkey: received key from direct mode repository seems to have changed as it was transferred; discarding"
|
||||
return False
|
||||
)
|
||||
|
|
|
@ -34,7 +34,7 @@ start (src:dest:[])
|
|||
start _ = error "specify a src file and a dest file"
|
||||
|
||||
perform :: FilePath -> FilePath -> (Key, Backend) -> CommandPerform
|
||||
perform src _dest (key, backend) = do
|
||||
perform src _dest (key, backend) =
|
||||
{- Check the content before accepting it. -}
|
||||
ifM (Command.Fsck.checkKeySizeOr reject key src
|
||||
<&&> Command.Fsck.checkBackendOr reject backend key src)
|
||||
|
|
|
@ -46,6 +46,6 @@ fieldTransfer direction key a = do
|
|||
ok <- maybe (a $ const noop)
|
||||
(\u -> runTransfer (Transfer direction (toUUID u) key) afile noRetry a)
|
||||
=<< Fields.getField Fields.remoteUUID
|
||||
if ok
|
||||
then liftIO exitSuccess
|
||||
else liftIO exitFailure
|
||||
liftIO $ if ok
|
||||
then exitSuccess
|
||||
else exitFailure
|
||||
|
|
|
@ -17,8 +17,6 @@ import Data.Ord
|
|||
import System.PosixCompat.Files
|
||||
|
||||
import Common.Annex
|
||||
import qualified Types.Backend as B
|
||||
import qualified Types.Remote as R
|
||||
import qualified Remote
|
||||
import qualified Command.Unused
|
||||
import qualified Git
|
||||
|
@ -28,7 +26,6 @@ import Utility.DataUnits
|
|||
import Utility.DiskFree
|
||||
import Annex.Content
|
||||
import Types.Key
|
||||
import Backend
|
||||
import Logs.UUID
|
||||
import Logs.Trust
|
||||
import Remote
|
||||
|
@ -116,9 +113,7 @@ selStats fast_stats slow_stats = do
|
|||
-}
|
||||
global_fast_stats :: [Stat]
|
||||
global_fast_stats =
|
||||
[ supported_backends
|
||||
, supported_remote_types
|
||||
, repository_mode
|
||||
[ repository_mode
|
||||
, remote_list Trusted
|
||||
, remote_list SemiTrusted
|
||||
, remote_list UnTrusted
|
||||
|
@ -171,14 +166,6 @@ showStat s = maybe noop calc =<< s
|
|||
(lift . showHeader) desc
|
||||
lift . showRaw =<< a
|
||||
|
||||
supported_backends :: Stat
|
||||
supported_backends = stat "supported backends" $ json unwords $
|
||||
return $ map B.name Backend.list
|
||||
|
||||
supported_remote_types :: Stat
|
||||
supported_remote_types = stat "supported remote types" $ json unwords $
|
||||
return $ map R.typename Remote.remoteTypes
|
||||
|
||||
repository_mode :: Stat
|
||||
repository_mode = stat "repository mode" $ json id $ lift $
|
||||
ifM isDirect
|
||||
|
@ -238,10 +225,10 @@ transfer_list :: Stat
|
|||
transfer_list = stat "transfers in progress" $ nojson $ lift $ do
|
||||
uuidmap <- Remote.remoteMap id
|
||||
ts <- getTransfers
|
||||
if null ts
|
||||
then return "none"
|
||||
else return $ multiLine $
|
||||
map (\(t, i) -> line uuidmap t i) $ sort ts
|
||||
return $ if null ts
|
||||
then "none"
|
||||
else multiLine $
|
||||
map (uncurry $ line uuidmap) $ sort ts
|
||||
where
|
||||
line uuidmap t i = unwords
|
||||
[ showLcDirection (transferDirection t) ++ "ing"
|
||||
|
@ -340,7 +327,7 @@ emptyKeyData :: KeyData
|
|||
emptyKeyData = KeyData 0 0 0 M.empty
|
||||
|
||||
emptyNumCopiesStats :: NumCopiesStats
|
||||
emptyNumCopiesStats = NumCopiesStats $ M.empty
|
||||
emptyNumCopiesStats = NumCopiesStats M.empty
|
||||
|
||||
foldKeys :: [Key] -> KeyData
|
||||
foldKeys = foldl' (flip addKey) emptyKeyData
|
||||
|
|
|
@ -86,20 +86,19 @@ syncRemotes rs = ifM (Annex.getState Annex.fast) ( nub <$> pickfast , wanted )
|
|||
fastest = fromMaybe [] . headMaybe . Remote.byCost
|
||||
|
||||
commit :: CommandStart
|
||||
commit = next $ next $ do
|
||||
ifM isDirect
|
||||
( do
|
||||
void $ stageDirect
|
||||
runcommit []
|
||||
, runcommit [Param "-a"]
|
||||
)
|
||||
commit = next $ next $ ifM isDirect
|
||||
( do
|
||||
void stageDirect
|
||||
runcommit []
|
||||
, runcommit [Param "-a"]
|
||||
)
|
||||
where
|
||||
runcommit ps = do
|
||||
showStart "commit" ""
|
||||
showOutput
|
||||
Annex.Branch.commit "update"
|
||||
-- Commit will fail when the tree is clean, so ignore failure.
|
||||
let params = (Param "commit") : ps ++
|
||||
let params = Param "commit" : ps ++
|
||||
[Param "-m", Param "git-annex automatic sync"]
|
||||
_ <- inRepo $ tryIO . Git.Command.runQuiet params
|
||||
return True
|
||||
|
@ -151,12 +150,12 @@ pullRemote remote branch = do
|
|||
- were committed (or pushed changes, if this is a bare remote),
|
||||
- while the synced/master may have changes that some
|
||||
- other remote synced to this remote. So, merge them both. -}
|
||||
mergeRemote :: Remote -> (Maybe Git.Ref) -> CommandCleanup
|
||||
mergeRemote :: Remote -> Maybe Git.Ref -> CommandCleanup
|
||||
mergeRemote remote b = case b of
|
||||
Nothing -> do
|
||||
branch <- inRepo Git.Branch.currentUnsafe
|
||||
all id <$> (mapM merge $ branchlist branch)
|
||||
Just _ -> all id <$> (mapM merge =<< tomerge (branchlist b))
|
||||
and <$> mapM merge (branchlist branch)
|
||||
Just _ -> and <$> (mapM merge =<< tomerge (branchlist b))
|
||||
where
|
||||
merge = mergeFrom . remoteBranch remote
|
||||
tomerge branches = filterM (changed remote) branches
|
||||
|
@ -221,7 +220,7 @@ pushBranch remote branch g = tryIO (directpush g) `after` syncpush g
|
|||
|
||||
mergeAnnex :: CommandStart
|
||||
mergeAnnex = do
|
||||
void $ Annex.Branch.forceUpdate
|
||||
void Annex.Branch.forceUpdate
|
||||
stop
|
||||
|
||||
{- Merges from a branch into the current branch. -}
|
||||
|
@ -244,7 +243,7 @@ mergeFrom branch = do
|
|||
mergeDirectCleanup d oldsha newsha
|
||||
_ -> noop
|
||||
return r
|
||||
runmerge a = ifM (a)
|
||||
runmerge a = ifM a
|
||||
( return True
|
||||
, resolveMerge
|
||||
)
|
||||
|
@ -268,7 +267,7 @@ resolveMerge :: Annex Bool
|
|||
resolveMerge = do
|
||||
top <- fromRepo Git.repoPath
|
||||
(fs, cleanup) <- inRepo (LsFiles.unmerged [top])
|
||||
merged <- all id <$> mapM resolveMerge' fs
|
||||
merged <- and <$> mapM resolveMerge' fs
|
||||
void $ liftIO cleanup
|
||||
|
||||
(deleted, cleanup2) <- inRepo (LsFiles.deleted [top])
|
||||
|
@ -291,7 +290,7 @@ resolveMerge' u
|
|||
withKey LsFiles.valUs $ \keyUs ->
|
||||
withKey LsFiles.valThem $ \keyThem -> do
|
||||
ifM isDirect
|
||||
( maybe noop (\k -> removeDirect k file) keyUs
|
||||
( maybe noop (`removeDirect` file) keyUs
|
||||
, liftIO $ nukeFile file
|
||||
)
|
||||
Annex.Queue.addCommand "rm" [Params "--quiet -f --"] [file]
|
||||
|
@ -307,14 +306,13 @@ resolveMerge' u
|
|||
makelink keyThem
|
||||
return True
|
||||
file = LsFiles.unmergedFile u
|
||||
issymlink select = any (select (LsFiles.unmergedBlobType u) ==)
|
||||
[Just SymlinkBlob, Nothing]
|
||||
issymlink select = select (LsFiles.unmergedBlobType u) `elem` [Just SymlinkBlob, Nothing]
|
||||
makelink (Just key) = do
|
||||
let dest = mergeFile file key
|
||||
l <- inRepo $ gitAnnexLink dest key
|
||||
replaceFile dest $ makeAnnexLink l
|
||||
stageSymlink dest =<< hashSymlink l
|
||||
whenM (isDirect) $
|
||||
whenM isDirect $
|
||||
toDirect key dest
|
||||
makelink _ = noop
|
||||
withKey select a = do
|
||||
|
|
|
@ -36,7 +36,7 @@ seek = [withWords start]
|
|||
-}
|
||||
start :: [String] -> CommandStart
|
||||
start (k:[]) = do
|
||||
case (file2key k) of
|
||||
case file2key k of
|
||||
Nothing -> error "bad key"
|
||||
(Just key) -> whenM (inAnnex key) $ do
|
||||
file <- Fields.getField Fields.associatedFile
|
||||
|
|
|
@ -41,7 +41,7 @@ seek = [withField readFdOption convertFd $ \readh ->
|
|||
|
||||
convertFd :: Maybe String -> Annex (Maybe Handle)
|
||||
convertFd Nothing = return Nothing
|
||||
convertFd (Just s) = liftIO $ do
|
||||
convertFd (Just s) = liftIO $
|
||||
case readish s of
|
||||
Nothing -> error "bad fd"
|
||||
Just fd -> Just <$> fdToHandle fd
|
||||
|
|
|
@ -46,7 +46,7 @@ performIndirect file key = do
|
|||
-- git as a normal non-annexed file, to thinking that the
|
||||
-- file has been unlocked and needs to be re-annexed.
|
||||
(s, reap) <- inRepo $ LsFiles.staged [file]
|
||||
when (not $ null s) $
|
||||
unless (null s) $
|
||||
inRepo $ Git.Command.run
|
||||
[ Param "commit"
|
||||
, Param "-q"
|
||||
|
|
|
@ -293,10 +293,9 @@ withKeysReferencedInGitRef a ref = do
|
|||
forM_ ts $ tKey lookAtWorkingTree >=> maybe noop a
|
||||
liftIO $ void clean
|
||||
where
|
||||
tKey True = Backend.lookupFile . DiffTree.file >=*>
|
||||
fmap fst
|
||||
tKey False = catFile ref . DiffTree.file >=*>
|
||||
fileKey . takeFileName . encodeW8 . L.unpack
|
||||
tKey True = fmap fst <$$> Backend.lookupFile . DiffTree.file
|
||||
tKey False = fileKey . takeFileName . encodeW8 . L.unpack <$$>
|
||||
catFile ref . DiffTree.file
|
||||
|
||||
{- Looks in the specified directory for bad/tmp keys, and returns a list
|
||||
- of those that might still have value, or might be stale and removable.
|
||||
|
|
|
@ -12,6 +12,10 @@ import Command
|
|||
import qualified Build.SysConfig as SysConfig
|
||||
import Annex.Version
|
||||
import BuildFlags
|
||||
import qualified Types.Backend as B
|
||||
import qualified Types.Remote as R
|
||||
import qualified Remote
|
||||
import qualified Backend
|
||||
|
||||
def :: [Command]
|
||||
def = [noCommit $ noRepo showPackageVersion $ dontCheck repoExists $
|
||||
|
@ -25,13 +29,20 @@ start = do
|
|||
v <- getVersion
|
||||
liftIO $ do
|
||||
showPackageVersion
|
||||
putStrLn $ "local repository version: " ++ fromMaybe "unknown" v
|
||||
putStrLn $ "default repository version: " ++ defaultVersion
|
||||
putStrLn $ "supported repository versions: " ++ unwords supportedVersions
|
||||
putStrLn $ "upgrade supported from repository versions: " ++ unwords upgradableVersions
|
||||
info "local repository version" $ fromMaybe "unknown" v
|
||||
info "default repository version" defaultVersion
|
||||
info "supported repository versions" $
|
||||
unwords supportedVersions
|
||||
info "upgrade supported from repository versions" $
|
||||
unwords upgradableVersions
|
||||
stop
|
||||
|
||||
showPackageVersion :: IO ()
|
||||
showPackageVersion = do
|
||||
putStrLn $ "git-annex version: " ++ SysConfig.packageversion
|
||||
putStrLn $ "build flags: " ++ unwords buildFlags
|
||||
info "git-annex version" SysConfig.packageversion
|
||||
info "build flags" $ unwords buildFlags
|
||||
info "key/value backends" $ unwords $ map B.name Backend.list
|
||||
info "remote types" $ unwords $ map R.typename Remote.remoteTypes
|
||||
|
||||
info :: String -> String -> IO ()
|
||||
info k v = putStrLn $ k ++ ": " ++ v
|
||||
|
|
|
@ -123,14 +123,14 @@ genCfg cfg descs = unlines $ concat [intro, trust, groups, preferredcontent]
|
|||
settings field desc showvals showdefaults = concat
|
||||
[ desc
|
||||
, concatMap showvals $ sort $ map swap $ M.toList $ field cfg
|
||||
, concatMap (\u -> lcom $ showdefaults u) $ missing field
|
||||
, concatMap (lcom . showdefaults) $ missing field
|
||||
]
|
||||
|
||||
line setting u value =
|
||||
[ com $ "(for " ++ (fromMaybe "" $ M.lookup u descs) ++ ")"
|
||||
[ com $ "(for " ++ fromMaybe "" (M.lookup u descs) ++ ")"
|
||||
, unwords [setting, fromUUID u, "=", value]
|
||||
]
|
||||
lcom = map (\l -> if "#" `isPrefixOf` l then l else "#" ++ l)
|
||||
lcom = map (\l -> if "#" `isPrefixOf` l then l else '#' : l)
|
||||
missing field = S.toList $ M.keysSet descs `S.difference` M.keysSet (field cfg)
|
||||
|
||||
{- If there's a parse error, returns a new version of the file,
|
||||
|
@ -139,7 +139,7 @@ parseCfg :: Cfg -> String -> Either String Cfg
|
|||
parseCfg curcfg = go [] curcfg . lines
|
||||
where
|
||||
go c cfg []
|
||||
| null (catMaybes $ map fst c) = Right cfg
|
||||
| null (mapMaybe fst c) = Right cfg
|
||||
| otherwise = Left $ unlines $
|
||||
badheader ++ concatMap showerr (reverse c)
|
||||
go c cfg (l:ls) = case parse (dropWhile isSpace l) cfg of
|
||||
|
|
|
@ -55,7 +55,7 @@ start = start' True
|
|||
|
||||
start' :: Bool -> Maybe HostName -> CommandStart
|
||||
start' allowauto listenhost = do
|
||||
liftIO $ ensureInstalled
|
||||
liftIO ensureInstalled
|
||||
ifM isInitialized ( go , auto )
|
||||
stop
|
||||
where
|
||||
|
@ -209,7 +209,7 @@ openBrowser mcmd htmlshim realurl outh errh = do
|
|||
, std_err = maybe Inherit UseHandle errh
|
||||
}
|
||||
exitcode <- waitForProcess pid
|
||||
unless (exitcode == ExitSuccess) $ do
|
||||
unless (exitcode == ExitSuccess) $
|
||||
hPutStrLn (fromMaybe stderr errh) "failed to start web browser"
|
||||
|
||||
{- web.browser is a generic git config setting for a web browser program -}
|
||||
|
|
|
@ -28,6 +28,7 @@ import Utility.Process as X
|
|||
import Utility.Path as X
|
||||
import Utility.Directory as X
|
||||
import Utility.Monad as X
|
||||
import Utility.Data as X
|
||||
import Utility.Applicative as X
|
||||
import Utility.FileSystemEncoding as X
|
||||
|
||||
|
|
|
@ -65,7 +65,7 @@ costBetween x y
|
|||
| x == y = x
|
||||
| x > y = -- avoid fractions unless needed
|
||||
let mid = y + (x - y) / 2
|
||||
mid' = fromIntegral ((floor mid) :: Int)
|
||||
mid' = fromIntegral (floor mid :: Int)
|
||||
in if mid' > y then mid' else mid
|
||||
| otherwise = costBetween y x
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ modifyAutoStartFile func = do
|
|||
when (dirs' /= dirs) $ do
|
||||
f <- autoStartFile
|
||||
createDirectoryIfMissing True (parentDir f)
|
||||
viaTmp writeFile f $ unlines $ dirs'
|
||||
viaTmp writeFile f $ unlines dirs'
|
||||
|
||||
{- Adds a directory to the autostart file. If the directory is already
|
||||
- present, it's moved to the top, so it will be used as the default
|
||||
|
|
8
Creds.hs
8
Creds.hs
|
@ -16,10 +16,9 @@ import Crypto
|
|||
import Types.Remote (RemoteConfig, RemoteConfigKey)
|
||||
import Remote.Helper.Encryptable (remoteCipher, embedCreds)
|
||||
#ifndef mingw32_HOST_OS
|
||||
import Utility.Env (setEnv)
|
||||
import Utility.Env (setEnv, getEnv)
|
||||
#endif
|
||||
|
||||
import System.Environment
|
||||
import qualified Data.ByteString.Lazy.Char8 as L
|
||||
import qualified Data.Map as M
|
||||
import Utility.Base64
|
||||
|
@ -101,11 +100,10 @@ getRemoteCredPair c storage = maybe fromcache (return . Just) =<< fromenv
|
|||
{- Gets a CredPair from the environment. -}
|
||||
getEnvCredPair :: CredPairStorage -> IO (Maybe CredPair)
|
||||
getEnvCredPair storage = liftM2 (,)
|
||||
<$> get uenv
|
||||
<*> get penv
|
||||
<$> getEnv uenv
|
||||
<*> getEnv penv
|
||||
where
|
||||
(uenv, penv) = credPairEnvironment storage
|
||||
get = catchMaybeIO . getEnv
|
||||
|
||||
{- Stores a CredPair in the environment. -}
|
||||
setEnvCredPair :: CredPair -> CredPairStorage -> IO ()
|
||||
|
|
|
@ -102,7 +102,7 @@ updateEncryptedCipher newkeys encipher@(EncryptedCipher _ variant (KeyIds ks)) =
|
|||
cipher <- decryptCipher encipher
|
||||
encryptCipher cipher variant $ KeyIds ks'
|
||||
where
|
||||
listKeyIds = mapM (Gpg.findPubKeys >=*> keyIds) >=*> concat
|
||||
listKeyIds = concat <$$> mapM (keyIds <$$> Gpg.findPubKeys)
|
||||
|
||||
describeCipher :: StorableCipher -> String
|
||||
describeCipher (SharedCipher _) = "shared cipher"
|
||||
|
|
|
@ -18,7 +18,6 @@ module Git.CatFile (
|
|||
import System.IO
|
||||
import qualified Data.ByteString as S
|
||||
import qualified Data.ByteString.Lazy as L
|
||||
import Data.Digest.Pure.SHA
|
||||
import Data.Char
|
||||
import System.Process (std_out, std_err)
|
||||
import Numeric
|
||||
|
@ -31,6 +30,7 @@ import Git.Command
|
|||
import Git.Types
|
||||
import Git.FilePath
|
||||
import qualified Utility.CoProcess as CoProcess
|
||||
import Utility.Hash
|
||||
|
||||
data CatFileHandle = CatFileHandle CoProcess.CoProcessHandle Repo
|
||||
|
||||
|
@ -103,7 +103,7 @@ catObjectDetails (CatFileHandle hdl repo) object = CoProcess.query hdl send rece
|
|||
}
|
||||
fileEncoding h
|
||||
content <- L.hGetContents h
|
||||
let sha = (\s -> length s `seq` s) (showDigest $ sha1 content)
|
||||
let sha = (\s -> length s `seq` s) (show $ sha1 content)
|
||||
ok <- checkSuccessProcess pid
|
||||
return $ if ok
|
||||
then Just (content, Ref sha)
|
||||
|
|
|
@ -10,6 +10,7 @@ module Git.Config where
|
|||
import qualified Data.Map as M
|
||||
import Data.Char
|
||||
import System.Process (cwd, env)
|
||||
import Control.Exception.Extensible
|
||||
|
||||
import Common
|
||||
import Git
|
||||
|
@ -153,3 +154,37 @@ boolConfig False = "false"
|
|||
|
||||
isBare :: Repo -> Bool
|
||||
isBare r = fromMaybe False $ isTrue =<< getMaybe "core.bare" r
|
||||
|
||||
{- Runs a command to get the configuration of a repo,
|
||||
- and returns a repo populated with the configuration, as well as the raw
|
||||
- output of the command. -}
|
||||
fromPipe :: Repo -> String -> [CommandParam] -> IO (Either SomeException (Repo, String))
|
||||
fromPipe r cmd params = try $
|
||||
withHandle StdoutHandle createProcessSuccess p $ \h -> do
|
||||
fileEncoding h
|
||||
val <- hGetContentsStrict h
|
||||
r' <- store val r
|
||||
return (r', val)
|
||||
where
|
||||
p = proc cmd $ toCommand params
|
||||
|
||||
{- Reads git config from a specified file and returns the repo populated
|
||||
- with the configuration. -}
|
||||
fromFile :: Repo -> FilePath -> IO (Either SomeException (Repo, String))
|
||||
fromFile r f = fromPipe r "git"
|
||||
[ Param "config"
|
||||
, Param "--file"
|
||||
, File f
|
||||
, Param "--list"
|
||||
]
|
||||
|
||||
{- Changes a git config setting in the specified config file.
|
||||
- (Creates the file if it does not already exist.) -}
|
||||
changeFile :: FilePath -> String -> String -> IO Bool
|
||||
changeFile f k v = boolSystem "git"
|
||||
[ Param "config"
|
||||
, Param "--file"
|
||||
, File f
|
||||
, Param k
|
||||
, Param v
|
||||
]
|
||||
|
|
|
@ -23,8 +23,6 @@ module Git.Construct (
|
|||
checkForRepo,
|
||||
) where
|
||||
|
||||
{-# LANGUAGE CPP #-}
|
||||
|
||||
#ifndef mingw32_HOST_OS
|
||||
import System.Posix.User
|
||||
#else
|
||||
|
@ -36,6 +34,7 @@ import Network.URI
|
|||
import Common
|
||||
import Git.Types
|
||||
import Git
|
||||
import Git.Remote
|
||||
import qualified Git.Url as Url
|
||||
import Utility.UserInfo
|
||||
|
||||
|
@ -143,51 +142,10 @@ remoteNamedFromKey k = remoteNamed basename
|
|||
{- Constructs a new Repo for one of a Repo's remotes using a given
|
||||
- location (ie, an url). -}
|
||||
fromRemoteLocation :: String -> Repo -> IO Repo
|
||||
fromRemoteLocation s repo = gen $ calcloc s
|
||||
fromRemoteLocation s repo = gen $ parseRemoteLocation s repo
|
||||
where
|
||||
gen v
|
||||
#ifdef mingw32_HOST_OS
|
||||
| dosstyle v = fromRemotePath (dospath v) repo
|
||||
#endif
|
||||
| scpstyle v = fromUrl $ scptourl v
|
||||
| urlstyle v = fromUrl v
|
||||
| otherwise = fromRemotePath v repo
|
||||
-- insteadof config can rewrite remote location
|
||||
calcloc l
|
||||
| null insteadofs = l
|
||||
| otherwise = replacement ++ drop (length bestvalue) l
|
||||
where
|
||||
replacement = drop (length prefix) $
|
||||
take (length bestkey - length suffix) bestkey
|
||||
(bestkey, bestvalue) = maximumBy longestvalue insteadofs
|
||||
longestvalue (_, a) (_, b) = compare b a
|
||||
insteadofs = filterconfig $ \(k, v) ->
|
||||
startswith prefix k &&
|
||||
endswith suffix k &&
|
||||
startswith v l
|
||||
filterconfig f = filter f $
|
||||
concatMap splitconfigs $ M.toList $ fullconfig repo
|
||||
splitconfigs (k, vs) = map (\v -> (k, v)) vs
|
||||
(prefix, suffix) = ("url." , ".insteadof")
|
||||
urlstyle v = isURI v || ":" `isInfixOf` v && "//" `isInfixOf` v
|
||||
-- git remotes can be written scp style -- [user@]host:dir
|
||||
-- but foo::bar is a git-remote-helper location instead
|
||||
scpstyle v = ":" `isInfixOf` v
|
||||
&& not ("//" `isInfixOf` v)
|
||||
&& not ("::" `isInfixOf` v)
|
||||
scptourl v = "ssh://" ++ host ++ slash dir
|
||||
where
|
||||
(host, dir) = separate (== ':') v
|
||||
slash d | d == "" = "/~/" ++ d
|
||||
| "/" `isPrefixOf` d = d
|
||||
| "~" `isPrefixOf` d = '/':d
|
||||
| otherwise = "/~/" ++ d
|
||||
#ifdef mingw32_HOST_OS
|
||||
-- git on Windows will write a path to .git/config with "drive:",
|
||||
-- which is not to be confused with a "host:"
|
||||
dosstyle = hasDrive
|
||||
dospath = fromInternalGitPath
|
||||
#endif
|
||||
gen (RemotePath p) = fromRemotePath p repo
|
||||
gen (RemoteUrl u) = fromUrl u
|
||||
|
||||
{- Constructs a Repo from the path specified in the git remotes of
|
||||
- another Repo. -}
|
||||
|
|
|
@ -15,6 +15,7 @@ import Git.Construct
|
|||
import qualified Git.Config as Config
|
||||
import qualified Git.Command as Command
|
||||
import Utility.Gpg
|
||||
import Git.Remote
|
||||
|
||||
urlPrefix :: String
|
||||
urlPrefix = "gcrypt::"
|
||||
|
@ -66,7 +67,6 @@ probeRepo loc baserepo = do
|
|||
ExitFailure 1 -> NotDecryptable
|
||||
ExitFailure _ -> NotEncrypted
|
||||
|
||||
type RemoteName = String
|
||||
type GCryptId = String
|
||||
|
||||
{- gcrypt gives each encrypted repository a uique gcrypt-id,
|
||||
|
|
|
@ -103,7 +103,7 @@ stagedDetails' ps l repo = do
|
|||
where
|
||||
(metadata, file) = separate (== '\t') s
|
||||
(mode, rest) = separate (== ' ') metadata
|
||||
readmode = headMaybe . readOct >=*> fst
|
||||
readmode = fst <$$> headMaybe . readOct
|
||||
|
||||
{- Returns a list of the files in the specified locations that are staged
|
||||
- for commit, and whose type has changed. -}
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
{-# LANGUAGE CPP #-}
|
||||
|
||||
module Git.Remote where
|
||||
|
||||
import Common
|
||||
|
@ -13,6 +15,8 @@ import qualified Git.Command
|
|||
import qualified Git.BuildVersion
|
||||
|
||||
import Data.Char
|
||||
import qualified Data.Map as M
|
||||
import Network.URI
|
||||
|
||||
type RemoteName = String
|
||||
|
||||
|
@ -48,3 +52,58 @@ remove remotename = Git.Command.run
|
|||
else "remove"
|
||||
, Param remotename
|
||||
]
|
||||
|
||||
data RemoteLocation = RemoteUrl String | RemotePath FilePath
|
||||
|
||||
remoteLocationIsUrl :: RemoteLocation -> Bool
|
||||
remoteLocationIsUrl (RemoteUrl _) = True
|
||||
remoteLocationIsUrl _ = False
|
||||
|
||||
{- Determines if a given remote location is an url, or a local
|
||||
- path. Takes the repository's insteadOf configuration into account. -}
|
||||
parseRemoteLocation :: String -> Repo -> RemoteLocation
|
||||
parseRemoteLocation s repo = ret $ calcloc s
|
||||
where
|
||||
ret v
|
||||
#ifdef mingw32_HOST_OS
|
||||
| dosstyle v = RemotePath (dospath v)
|
||||
#endif
|
||||
| scpstyle v = RemoteUrl (scptourl v)
|
||||
| urlstyle v = RemoteUrl v
|
||||
| otherwise = RemotePath v
|
||||
-- insteadof config can rewrite remote location
|
||||
calcloc l
|
||||
| null insteadofs = l
|
||||
| otherwise = replacement ++ drop (length bestvalue) l
|
||||
where
|
||||
replacement = drop (length prefix) $
|
||||
take (length bestkey - length suffix) bestkey
|
||||
(bestkey, bestvalue) = maximumBy longestvalue insteadofs
|
||||
longestvalue (_, a) (_, b) = compare b a
|
||||
insteadofs = filterconfig $ \(k, v) ->
|
||||
startswith prefix k &&
|
||||
endswith suffix k &&
|
||||
startswith v l
|
||||
filterconfig f = filter f $
|
||||
concatMap splitconfigs $ M.toList $ fullconfig repo
|
||||
splitconfigs (k, vs) = map (\v -> (k, v)) vs
|
||||
(prefix, suffix) = ("url." , ".insteadof")
|
||||
urlstyle v = isURI v || ":" `isInfixOf` v && "//" `isInfixOf` v
|
||||
-- git remotes can be written scp style -- [user@]host:dir
|
||||
-- but foo::bar is a git-remote-helper location instead
|
||||
scpstyle v = ":" `isInfixOf` v
|
||||
&& not ("//" `isInfixOf` v)
|
||||
&& not ("::" `isInfixOf` v)
|
||||
scptourl v = "ssh://" ++ host ++ slash dir
|
||||
where
|
||||
(host, dir) = separate (== ':') v
|
||||
slash d | d == "" = "/~/" ++ d
|
||||
| "/" `isPrefixOf` d = d
|
||||
| "~" `isPrefixOf` d = '/':d
|
||||
| otherwise = "/~/" ++ d
|
||||
#ifdef mingw32_HOST_OS
|
||||
-- git on Windows will write a path to .git/config with "drive:",
|
||||
-- which is not to be confused with a "host:"
|
||||
dosstyle = hasDrive
|
||||
dospath = fromInternalGitPath
|
||||
#endif
|
||||
|
|
|
@ -48,6 +48,8 @@ options = Option.common ++
|
|||
"skip files smaller than a size"
|
||||
, Option ['T'] ["time-limit"] (ReqArg Limit.addTimeLimit paramTime)
|
||||
"stop after the specified amount of time"
|
||||
, Option [] ["user-agent"] (ReqArg setuseragent paramName)
|
||||
"override default User-Agent"
|
||||
, Option [] ["trust-glacier"] (NoArg (Annex.setFlag "trustglacier"))
|
||||
"Trust Amazon Glacier inventory"
|
||||
] ++ Option.matcher
|
||||
|
@ -55,6 +57,7 @@ options = Option.common ++
|
|||
setnumcopies v = maybe noop
|
||||
(\n -> Annex.changeState $ \s -> s { Annex.forcenumcopies = Just n })
|
||||
(readish v)
|
||||
setuseragent v = Annex.changeState $ \s -> s { Annex.useragent = Just v }
|
||||
setgitconfig v = Annex.changeGitRepo =<< inRepo (Git.Config.store v)
|
||||
trustArg t = ReqArg (Remote.forceTrust t) paramRemote
|
||||
|
||||
|
|
|
@ -19,6 +19,9 @@ import Annex (setField)
|
|||
import qualified Option
|
||||
import Fields
|
||||
import Utility.UserInfo
|
||||
import Remote.GCrypt (getGCryptUUID)
|
||||
import qualified Annex
|
||||
import Init
|
||||
|
||||
import qualified Command.ConfigList
|
||||
import qualified Command.InAnnex
|
||||
|
@ -27,20 +30,22 @@ import qualified Command.RecvKey
|
|||
import qualified Command.SendKey
|
||||
import qualified Command.TransferInfo
|
||||
import qualified Command.Commit
|
||||
import qualified Command.GCryptSetup
|
||||
|
||||
cmds_readonly :: [Command]
|
||||
cmds_readonly = concat
|
||||
[ Command.ConfigList.def
|
||||
, Command.InAnnex.def
|
||||
, Command.SendKey.def
|
||||
, Command.TransferInfo.def
|
||||
[ gitAnnexShellCheck Command.ConfigList.def
|
||||
, gitAnnexShellCheck Command.InAnnex.def
|
||||
, gitAnnexShellCheck Command.SendKey.def
|
||||
, gitAnnexShellCheck Command.TransferInfo.def
|
||||
]
|
||||
|
||||
cmds_notreadonly :: [Command]
|
||||
cmds_notreadonly = concat
|
||||
[ Command.RecvKey.def
|
||||
, Command.DropKey.def
|
||||
, Command.Commit.def
|
||||
[ gitAnnexShellCheck Command.RecvKey.def
|
||||
, gitAnnexShellCheck Command.DropKey.def
|
||||
, gitAnnexShellCheck Command.Commit.def
|
||||
, Command.GCryptSetup.def
|
||||
]
|
||||
|
||||
cmds :: [Command]
|
||||
|
@ -50,17 +55,22 @@ cmds = map adddirparam $ cmds_readonly ++ cmds_notreadonly
|
|||
|
||||
options :: [OptDescr (Annex ())]
|
||||
options = Option.common ++
|
||||
[ Option [] ["uuid"] (ReqArg checkuuid paramUUID) "local repository uuid"
|
||||
[ Option [] ["uuid"] (ReqArg checkUUID paramUUID) "local repository uuid"
|
||||
]
|
||||
where
|
||||
checkuuid expected = getUUID >>= check
|
||||
checkUUID expected = getUUID >>= check
|
||||
where
|
||||
check u | u == toUUID expected = noop
|
||||
check NoUUID = unexpected "uninitialized repository"
|
||||
check u = unexpected $ "UUID " ++ fromUUID u
|
||||
unexpected s = error $
|
||||
"expected repository UUID " ++
|
||||
expected ++ " but found " ++ s
|
||||
check NoUUID = checkGCryptUUID expected
|
||||
check u = unexpectedUUID expected u
|
||||
checkGCryptUUID expected = check =<< getGCryptUUID True =<< gitRepo
|
||||
where
|
||||
check (Just u) | u == toUUID expected = noop
|
||||
check Nothing = unexpected expected "uninitialized repository"
|
||||
check (Just u) = unexpectedUUID expected u
|
||||
unexpectedUUID expected u = unexpected expected $ "UUID " ++ fromUUID u
|
||||
unexpected expected s = error $
|
||||
"expected repository UUID " ++ expected ++ " but found " ++ s
|
||||
|
||||
header :: String
|
||||
header = "git-annex-shell [-c] command [parameters ...] [option ...]"
|
||||
|
@ -180,3 +190,11 @@ checkEnv var = do
|
|||
Nothing -> noop
|
||||
Just "" -> noop
|
||||
Just _ -> error $ "Action blocked by " ++ var
|
||||
|
||||
{- Modifies a Command to check that it is run in either a git-annex
|
||||
- repository, or a repository with a gcrypt-id set. -}
|
||||
gitAnnexShellCheck :: [Command] -> [Command]
|
||||
gitAnnexShellCheck = map $ addCheck okforshell . dontCheck repoExists
|
||||
where
|
||||
okforshell = unlessM (isInitialized <||> isJust . gcryptId <$> Annex.getGitConfig) $
|
||||
error "Not a git-annex or gcrypt repository."
|
||||
|
|
|
@ -10,6 +10,7 @@ module Locations (
|
|||
fileKey,
|
||||
keyPaths,
|
||||
keyPath,
|
||||
objectDir,
|
||||
gitAnnexLocation,
|
||||
gitAnnexLink,
|
||||
gitAnnexMapping,
|
||||
|
|
|
@ -262,6 +262,12 @@ getFailedTransfers u = catMaybes <$> (liftIO . getpairs =<< concat <$> findfiles
|
|||
findfiles = liftIO . mapM dirContentsRecursive
|
||||
=<< mapM (fromRepo . failedTransferDir u) [Download, Upload]
|
||||
|
||||
clearFailedTransfers :: UUID -> Annex [(Transfer, TransferInfo)]
|
||||
clearFailedTransfers u = do
|
||||
failed <- getFailedTransfers u
|
||||
mapM_ (removeFailedTransfer . fst) failed
|
||||
return failed
|
||||
|
||||
removeFailedTransfer :: Transfer -> Annex ()
|
||||
removeFailedTransfer t = do
|
||||
f <- fromRepo $ failedTransferFile t
|
||||
|
|
|
@ -71,7 +71,7 @@ parseTransitionLine s = TransitionLine <$> pdate ds <*> readish ts
|
|||
ws = words s
|
||||
ts = Prelude.head ws
|
||||
ds = unwords $ Prelude.tail ws
|
||||
pdate = parseTime defaultTimeLocale "%s%Qs" >=*> utcTimeToPOSIXSeconds
|
||||
pdate = utcTimeToPOSIXSeconds <$$> parseTime defaultTimeLocale "%s%Qs"
|
||||
|
||||
combineTransitions :: [Transitions] -> Transitions
|
||||
combineTransitions = S.unions
|
||||
|
@ -82,6 +82,5 @@ transitionList = map transition . S.elems
|
|||
{- Typically ran with Annex.Branch.change, but we can't import Annex.Branch
|
||||
- here since it depends on this module. -}
|
||||
recordTransitions :: (FilePath -> (String -> String) -> Annex ()) -> Transitions -> Annex ()
|
||||
recordTransitions changer t = do
|
||||
changer transitionsLog $
|
||||
showTransitions . S.union t . parseTransitionsStrictly "local"
|
||||
recordTransitions changer t = changer transitionsLog $
|
||||
showTransitions . S.union t . parseTransitionsStrictly "local"
|
||||
|
|
8
Makefile
8
Makefile
|
@ -160,12 +160,12 @@ osxapp: Build/Standalone Build/OSXMkLibs
|
|||
rm -f tmp/git-annex.dmg.bz2
|
||||
bzip2 --fast tmp/git-annex.dmg
|
||||
|
||||
ANDROID_FLAGS?=
|
||||
ANDROID_FLAGS?=-f-XMPP
|
||||
# Cross compile for Android.
|
||||
# Uses https://github.com/neurocyte/ghc-android
|
||||
android: Build/EvilSplicer
|
||||
echo "Running native build, to get TH splices.."
|
||||
if [ ! -e dist/setup/setup ]; then $(CABAL) configure -f"-Production $(ANDROID_FLAGS)" -O0; fi
|
||||
if [ ! -e dist/setup/setup ]; then $(CABAL) configure -f-Production -O0 $(ANDROID_FLAGS); fi
|
||||
mkdir -p tmp
|
||||
if ! $(CABAL) build --ghc-options=-ddump-splices 2> tmp/dump-splices; then tail tmp/dump-splices >&2; exit 1; fi
|
||||
echo "Setting up Android build tree.."
|
||||
|
@ -183,9 +183,9 @@ android: Build/EvilSplicer
|
|||
# Cabal cannot cross compile with custom build type, so workaround.
|
||||
sed -i 's/Build-type: Custom/Build-type: Simple/' tmp/androidtree/git-annex.cabal
|
||||
if [ ! -e tmp/androidtree/dist/setup/setup ]; then \
|
||||
cd tmp/androidtree && $$HOME/.ghc/android-14/arm-linux-androideabi-4.7/arm-linux-androideabi/bin/cabal configure -f"Android $(ANDROID_FLAGS)"; \
|
||||
cd tmp/androidtree && $$HOME/.ghc/android-14/arm-linux-androideabi-4.7/arm-linux-androideabi/bin/cabal configure -fAndroid $(ANDROID_FLAGS); \
|
||||
fi
|
||||
cd tmp/androidtree && $(CABAL) build
|
||||
cd tmp/androidtree && $$HOME/.ghc/android-14/arm-linux-androideabi-4.7/arm-linux-androideabi/bin/cabal build
|
||||
|
||||
adb:
|
||||
ANDROID_FLAGS="-Production" $(MAKE) android
|
||||
|
|
23
Remote.hs
23
Remote.hs
|
@ -56,6 +56,7 @@ import Logs.Trust
|
|||
import Logs.Location hiding (logStatus)
|
||||
import Remote.List
|
||||
import Config
|
||||
import Git.Remote
|
||||
|
||||
{- Map from UUIDs of Remotes to a calculated value. -}
|
||||
remoteMap :: (Remote -> a) -> Annex (M.Map UUID a)
|
||||
|
@ -68,7 +69,7 @@ remoteMap c = M.fromList . map (\r -> (uuid r, c r)) .
|
|||
uuidDescriptions :: Annex (M.Map UUID String)
|
||||
uuidDescriptions = M.unionWith addName <$> uuidMap <*> remoteMap name
|
||||
|
||||
addName :: String -> String -> String
|
||||
addName :: String -> RemoteName -> String
|
||||
addName desc n
|
||||
| desc == n = desc
|
||||
| null desc = n
|
||||
|
@ -76,12 +77,12 @@ addName desc n
|
|||
|
||||
{- When a name is specified, looks up the remote matching that name.
|
||||
- (Or it can be a UUID.) -}
|
||||
byName :: Maybe String -> Annex (Maybe Remote)
|
||||
byName :: Maybe RemoteName -> Annex (Maybe Remote)
|
||||
byName Nothing = return Nothing
|
||||
byName (Just n) = either error Just <$> byName' n
|
||||
|
||||
{- Like byName, but the remote must have a configured UUID. -}
|
||||
byNameWithUUID :: Maybe String -> Annex (Maybe Remote)
|
||||
byNameWithUUID :: Maybe RemoteName -> Annex (Maybe Remote)
|
||||
byNameWithUUID = checkuuid <=< byName
|
||||
where
|
||||
checkuuid Nothing = return Nothing
|
||||
|
@ -93,7 +94,7 @@ byNameWithUUID = checkuuid <=< byName
|
|||
else error e
|
||||
| otherwise = return $ Just r
|
||||
|
||||
byName' :: String -> Annex (Either String Remote)
|
||||
byName' :: RemoteName -> Annex (Either String Remote)
|
||||
byName' "" = return $ Left "no remote specified"
|
||||
byName' n = handle . filter matching <$> remoteList
|
||||
where
|
||||
|
@ -104,7 +105,7 @@ byName' n = handle . filter matching <$> remoteList
|
|||
{- Looks up a remote by name (or by UUID, or even by description),
|
||||
- and returns its UUID. Finds even remotes that are not configured in
|
||||
- .git/config. -}
|
||||
nameToUUID :: String -> Annex UUID
|
||||
nameToUUID :: RemoteName -> Annex UUID
|
||||
nameToUUID "." = getUUID -- special case for current repo
|
||||
nameToUUID "here" = getUUID
|
||||
nameToUUID "" = error "no remote specified"
|
||||
|
@ -167,13 +168,19 @@ prettyListUUIDs uuids = do
|
|||
prettyUUID :: UUID -> Annex String
|
||||
prettyUUID u = concat <$> prettyListUUIDs [u]
|
||||
|
||||
{- Gets the remote associated with a UUID.
|
||||
- There's no associated remote when this is the UUID of the local repo. -}
|
||||
{- Gets the remote associated with a UUID. -}
|
||||
remoteFromUUID :: UUID -> Annex (Maybe Remote)
|
||||
remoteFromUUID u = ifM ((==) u <$> getUUID)
|
||||
( return Nothing
|
||||
, Just . fromMaybe (error "Unknown UUID") . M.lookup u <$> remoteMap id
|
||||
, do
|
||||
maybe tryharder (return . Just) =<< findinmap
|
||||
)
|
||||
where
|
||||
findinmap = M.lookup u <$> remoteMap id
|
||||
{- Re-read remote list in case a new remote has popped up. -}
|
||||
tryharder = do
|
||||
void remoteListRefresh
|
||||
findinmap
|
||||
|
||||
{- Filters a list of remotes to ones that have the listed uuids. -}
|
||||
remotesWithUUID :: [Remote] -> [UUID] -> [Remote]
|
||||
|
|
|
@ -10,6 +10,7 @@ module Remote.Bup (remote) where
|
|||
import qualified Data.ByteString.Lazy as L
|
||||
import qualified Data.Map as M
|
||||
import System.Process
|
||||
import Data.ByteString.Lazy.UTF8 (fromString)
|
||||
|
||||
import Common.Annex
|
||||
import Types.Remote
|
||||
|
@ -21,12 +22,12 @@ import qualified Git.Construct
|
|||
import qualified Git.Ref
|
||||
import Config
|
||||
import Config.Cost
|
||||
import Remote.Helper.Ssh
|
||||
import qualified Remote.Helper.Ssh as Ssh
|
||||
import Remote.Helper.Special
|
||||
import Remote.Helper.Encryptable
|
||||
import Remote.Helper.Messages
|
||||
import Crypto
|
||||
import Data.ByteString.Lazy.UTF8 (fromString)
|
||||
import Data.Digest.Pure.SHA
|
||||
import Utility.Hash
|
||||
import Utility.UserInfo
|
||||
import Annex.Content
|
||||
import Annex.UUID
|
||||
|
@ -185,7 +186,7 @@ rollback k bupr = go =<< liftIO (bup2GitRemote bupr)
|
|||
checkPresent :: Git.Repo -> Git.Repo -> Key -> Annex (Either String Bool)
|
||||
checkPresent r bupr k
|
||||
| Git.repoIsUrl bupr = do
|
||||
showAction $ "checking " ++ Git.repoDescribe r
|
||||
showChecking r
|
||||
ok <- onBupRemote bupr boolSystem "git" params
|
||||
return $ Right ok
|
||||
| otherwise = liftIO $ catchMsgIO $
|
||||
|
@ -220,7 +221,7 @@ storeBupUUID u buprepo = do
|
|||
|
||||
onBupRemote :: Git.Repo -> (FilePath -> [CommandParam] -> IO a) -> FilePath -> [CommandParam] -> Annex a
|
||||
onBupRemote r a command params = do
|
||||
sshparams <- sshToRepo r [Param $
|
||||
sshparams <- Ssh.toRepo r [Param $
|
||||
"cd " ++ dir ++ " && " ++ unwords (command : toCommand params)]
|
||||
liftIO $ a "ssh" sshparams
|
||||
where
|
||||
|
@ -277,7 +278,7 @@ bup2GitRemote r
|
|||
bupRef :: Key -> String
|
||||
bupRef k
|
||||
| Git.Ref.legal True shown = shown
|
||||
| otherwise = "git-annex-" ++ showDigest (sha256 (fromString shown))
|
||||
| otherwise = "git-annex-" ++ show (sha256 (fromString shown))
|
||||
where
|
||||
shown = key2file k
|
||||
|
||||
|
|
|
@ -12,7 +12,6 @@ module Remote.Directory (remote) where
|
|||
import qualified Data.ByteString.Lazy as L
|
||||
import qualified Data.ByteString as S
|
||||
import qualified Data.Map as M
|
||||
import qualified Control.Exception as E
|
||||
import Data.Int
|
||||
|
||||
import Common.Annex
|
||||
|
@ -109,7 +108,7 @@ withCheckedFiles check (Just _) d k a = go $ locations d k
|
|||
ifM (check chunkcount)
|
||||
( do
|
||||
chunks <- listChunks f <$> readFile chunkcount
|
||||
ifM (all id <$> mapM check chunks)
|
||||
ifM (and <$> mapM check chunks)
|
||||
( a chunks , return False )
|
||||
, go fs
|
||||
)
|
||||
|
@ -159,7 +158,7 @@ storeSplit' :: MeterUpdate -> Int64 -> [FilePath] -> [S.ByteString] -> [FilePath
|
|||
storeSplit' _ _ [] _ _ = error "ran out of dests"
|
||||
storeSplit' _ _ _ [] c = return $ reverse c
|
||||
storeSplit' meterupdate chunksize (d:dests) bs c = do
|
||||
bs' <- E.bracket (openFile d WriteMode) hClose $
|
||||
bs' <- withFile d WriteMode $
|
||||
feed zeroBytesProcessed chunksize bs
|
||||
storeSplit' meterupdate chunksize dests bs' (d:c)
|
||||
where
|
||||
|
@ -206,7 +205,7 @@ retrieve :: FilePath -> ChunkSize -> Key -> AssociatedFile -> FilePath -> MeterU
|
|||
retrieve d chunksize k _ f p = metered (Just p) k $ \meterupdate ->
|
||||
liftIO $ withStoredFiles chunksize d k $ \files ->
|
||||
catchBoolIO $ do
|
||||
meteredWriteFileChunks meterupdate f files $ L.readFile
|
||||
meteredWriteFileChunks meterupdate f files L.readFile
|
||||
return True
|
||||
|
||||
retrieveEncrypted :: FilePath -> ChunkSize -> (Cipher, Key) -> Key -> FilePath -> MeterUpdate -> Annex Bool
|
||||
|
@ -217,7 +216,7 @@ retrieveEncrypted d chunksize (cipher, enck) k f p = metered (Just p) k $ \meter
|
|||
readBytes $ meteredWriteFile meterupdate f
|
||||
return True
|
||||
where
|
||||
feeder files h = forM_ files $ \file -> L.hPut h =<< L.readFile file
|
||||
feeder files h = forM_ files $ L.hPut h <=< L.readFile
|
||||
|
||||
retrieveCheap :: FilePath -> ChunkSize -> Key -> FilePath -> Annex Bool
|
||||
retrieveCheap _ (Just _) _ _ = return False -- no cheap retrieval for chunks
|
||||
|
|
234
Remote/GCrypt.hs
234
Remote/GCrypt.hs
|
@ -5,10 +5,17 @@
|
|||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
module Remote.GCrypt (remote, gen, getGCryptId) where
|
||||
module Remote.GCrypt (
|
||||
remote,
|
||||
gen,
|
||||
getGCryptUUID,
|
||||
coreGCryptId,
|
||||
setupRepo
|
||||
) where
|
||||
|
||||
import qualified Data.Map as M
|
||||
import qualified Data.ByteString.Lazy as L
|
||||
import Control.Exception.Extensible
|
||||
|
||||
import Common.Annex
|
||||
import Types.Remote
|
||||
|
@ -27,14 +34,19 @@ import Config.Cost
|
|||
import Remote.Helper.Git
|
||||
import Remote.Helper.Encryptable
|
||||
import Remote.Helper.Special
|
||||
import Remote.Helper.Messages
|
||||
import qualified Remote.Helper.Ssh as Ssh
|
||||
import Utility.Metered
|
||||
import Crypto
|
||||
import Annex.UUID
|
||||
import Annex.Ssh
|
||||
import qualified Remote.Rsync
|
||||
import Utility.Rsync
|
||||
import Utility.Tmp
|
||||
import Logs.Remote
|
||||
import Logs.Transfer
|
||||
import Utility.Gpg
|
||||
import Annex.Content
|
||||
|
||||
remote :: RemoteType
|
||||
remote = RemoteType {
|
||||
|
@ -52,9 +64,9 @@ gen gcryptr u c gc = do
|
|||
-- get underlying git repo with real path, not gcrypt path
|
||||
r <- liftIO $ Git.GCrypt.encryptedRemote g gcryptr
|
||||
let r' = r { Git.remoteName = Git.remoteName gcryptr }
|
||||
(mgcryptid, r'') <- liftIO $ getGCryptId r'
|
||||
-- doublecheck that local cache matches underlying repo's gcrypt-id
|
||||
-- (which might not be set)
|
||||
-- doublecheck that cache matches underlying repo's gcrypt-id
|
||||
-- (which might not be set), only for local repos
|
||||
(mgcryptid, r'') <- getGCryptId True r'
|
||||
case (mgcryptid, Git.GCrypt.remoteRepoId g (Git.remoteName gcryptr)) of
|
||||
(Just gcryptid, Just cachedgcryptid)
|
||||
| gcryptid /= cachedgcryptid -> resetup gcryptid r''
|
||||
|
@ -67,7 +79,7 @@ gen gcryptr u c gc = do
|
|||
-- correctly.
|
||||
resetup gcryptid r = do
|
||||
let u' = genUUIDInNameSpace gCryptNameSpace gcryptid
|
||||
v <- (M.lookup u' <$> readRemoteLog)
|
||||
v <- M.lookup u' <$> readRemoteLog
|
||||
case (Git.remoteName gcryptr, v) of
|
||||
(Just remotename, Just c') -> do
|
||||
setGcryptEncryption c' remotename
|
||||
|
@ -78,22 +90,11 @@ gen gcryptr u c gc = do
|
|||
warning $ "not using unknown gcrypt repository pointed to by remote " ++ Git.repoDescribe r
|
||||
return Nothing
|
||||
|
||||
{- gcrypt repos set up by git-annex as special remotes have a
|
||||
- core.gcrypt-id setting in their config, which can be mapped back to
|
||||
- the remote's UUID. This only works for local repos.
|
||||
- (Also returns a version of input repo with its config read.) -}
|
||||
getGCryptId :: Git.Repo -> IO (Maybe Git.GCrypt.GCryptId, Git.Repo)
|
||||
getGCryptId r
|
||||
| Git.repoIsLocalUnknown r = do
|
||||
r' <- catchDefaultIO r $ Git.Config.read r
|
||||
return (Git.Config.getMaybe "core.gcrypt-id" r', r')
|
||||
| otherwise = return (Nothing, r)
|
||||
|
||||
gen' :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
|
||||
gen' r u c gc = do
|
||||
cst <- remoteCost gc $
|
||||
if repoCheap r then nearlyCheapRemoteCost else expensiveRemoteCost
|
||||
(rsynctransport, rsyncurl) <- rsyncTransport r
|
||||
(rsynctransport, rsyncurl) <- rsyncTransportToObjects r
|
||||
let rsyncopts = Remote.Rsync.genRsyncOpts c gc rsynctransport rsyncurl
|
||||
let this = Remote
|
||||
{ uuid = u
|
||||
|
@ -119,7 +120,12 @@ gen' r u c gc = do
|
|||
(retrieve this rsyncopts)
|
||||
this
|
||||
|
||||
rsyncTransport :: Git.Repo -> Annex ([CommandParam], String)
|
||||
rsyncTransportToObjects :: Git.Repo -> Annex ([CommandParam], String)
|
||||
rsyncTransportToObjects r = do
|
||||
(rsynctransport, rsyncurl, _) <- rsyncTransport r
|
||||
return (rsynctransport, rsyncurl ++ "/annex/objects")
|
||||
|
||||
rsyncTransport :: Git.Repo -> Annex ([CommandParam], String, AccessMethod)
|
||||
rsyncTransport r
|
||||
| "ssh://" `isPrefixOf` loc = sshtransport $ break (== '/') $ drop (length "ssh://") loc
|
||||
| "//:" `isInfixOf` loc = othertransport
|
||||
|
@ -128,9 +134,12 @@ rsyncTransport r
|
|||
where
|
||||
loc = Git.repoLocation r
|
||||
sshtransport (host, path) = do
|
||||
let rsyncpath = if "/~/" `isPrefixOf` path
|
||||
then drop 3 path
|
||||
else path
|
||||
opts <- sshCachingOptions (host, Nothing) []
|
||||
return (rsyncShell $ Param "ssh" : opts, host ++ ":" ++ path)
|
||||
othertransport = return ([], loc)
|
||||
return (rsyncShell $ Param "ssh" : opts, host ++ ":" ++ rsyncpath, AccessShell)
|
||||
othertransport = return ([], loc, AccessDirect)
|
||||
|
||||
noCrypto :: Annex a
|
||||
noCrypto = error "cannot use gcrypt remote without encryption enabled"
|
||||
|
@ -155,7 +164,7 @@ gCryptSetup mu c = go $ M.lookup "gitrepo" c
|
|||
|
||||
{- Run a git fetch and a push to the git repo in order to get
|
||||
- its gcrypt-id set up, so that later git annex commands
|
||||
- will use the remote as a ggcrypt remote. The fetch is
|
||||
- will use the remote as a gcrypt remote. The fetch is
|
||||
- needed if the repo already exists; the push is needed
|
||||
- if the repo has not yet been initialized by gcrypt. -}
|
||||
void $ inRepo $ Git.Command.runBool
|
||||
|
@ -165,25 +174,85 @@ gCryptSetup mu c = go $ M.lookup "gitrepo" c
|
|||
void $ inRepo $ Git.Command.runBool
|
||||
[ Param "push"
|
||||
, Param remotename
|
||||
, Param $ show $ Annex.Branch.fullname
|
||||
, Param $ show Annex.Branch.fullname
|
||||
]
|
||||
g <- inRepo Git.Config.reRead
|
||||
case Git.GCrypt.remoteRepoId g (Just remotename) of
|
||||
Nothing -> error "unable to determine gcrypt-id of remote"
|
||||
Just gcryptid -> do
|
||||
let u = genUUIDInNameSpace gCryptNameSpace gcryptid
|
||||
if Just u == mu || mu == Nothing
|
||||
if Just u == mu || isNothing mu
|
||||
then do
|
||||
-- Store gcrypt-id in local
|
||||
-- gcrypt repository, for later
|
||||
-- double-check.
|
||||
r <- inRepo $ Git.Construct.fromRemoteLocation gitrepo
|
||||
when (Git.repoIsLocalUnknown r) $ do
|
||||
r' <- liftIO $ Git.Config.read r
|
||||
liftIO $ Git.Command.run [Param "config", Param "core.gcrypt-id", Param gcryptid] r'
|
||||
gitConfigSpecialRemote u c' "gcrypt" "true"
|
||||
method <- setupRepo gcryptid =<< inRepo (Git.Construct.fromRemoteLocation gitrepo)
|
||||
gitConfigSpecialRemote u c' "gcrypt" (fromAccessMethod method)
|
||||
return (c', u)
|
||||
else error "uuid mismatch"
|
||||
else error $ "uuid mismatch " ++ show (u, mu, gcryptid)
|
||||
|
||||
{- Sets up the gcrypt repository. The repository is either a local
|
||||
- repo, or it is accessed via rsync directly, or it is accessed over ssh
|
||||
- and git-annex-shell is available to manage it.
|
||||
-
|
||||
- The GCryptID is recorded in the repository's git config for later use.
|
||||
- Also, if the git config has receive.denyNonFastForwards set, disable
|
||||
- it; gcrypt relies on being able to fast-forward branches.
|
||||
-}
|
||||
setupRepo :: Git.GCrypt.GCryptId -> Git.Repo -> Annex AccessMethod
|
||||
setupRepo gcryptid r
|
||||
| Git.repoIsUrl r = do
|
||||
(_, _, accessmethod) <- rsyncTransport r
|
||||
case accessmethod of
|
||||
AccessDirect -> rsyncsetup
|
||||
AccessShell -> ifM gitannexshellsetup
|
||||
( return AccessShell
|
||||
, rsyncsetup
|
||||
)
|
||||
| Git.repoIsLocalUnknown r = localsetup =<< liftIO (Git.Config.read r)
|
||||
| otherwise = localsetup r
|
||||
where
|
||||
localsetup r' = do
|
||||
let setconfig k v = liftIO $ Git.Command.run [Param "config", Param k, Param v] r'
|
||||
setconfig coreGCryptId gcryptid
|
||||
setconfig denyNonFastForwards (Git.Config.boolConfig False)
|
||||
return AccessDirect
|
||||
|
||||
{- As well as modifying the remote's git config,
|
||||
- create the objectDir on the remote,
|
||||
- which is needed for direct rsync of objects to work.
|
||||
-}
|
||||
rsyncsetup = Remote.Rsync.withRsyncScratchDir $ \tmp -> do
|
||||
liftIO $ createDirectoryIfMissing True $ tmp </> objectDir
|
||||
(rsynctransport, rsyncurl, _) <- rsyncTransport r
|
||||
let tmpconfig = tmp </> "config"
|
||||
void $ liftIO $ rsync $ rsynctransport ++
|
||||
[ Param $ rsyncurl ++ "/config"
|
||||
, Param tmpconfig
|
||||
]
|
||||
liftIO $ do
|
||||
void $ Git.Config.changeFile tmpconfig coreGCryptId gcryptid
|
||||
void $ Git.Config.changeFile tmpconfig denyNonFastForwards (Git.Config.boolConfig False)
|
||||
ok <- liftIO $ rsync $ rsynctransport ++
|
||||
[ Params "--recursive"
|
||||
, Param $ tmp ++ "/"
|
||||
, Param rsyncurl
|
||||
]
|
||||
unless ok $
|
||||
error "Failed to connect to remote to set it up."
|
||||
return AccessDirect
|
||||
|
||||
{- Ask git-annex-shell to configure the repository as a gcrypt
|
||||
- repository. May fail if it is too old. -}
|
||||
gitannexshellsetup = Ssh.onRemote r (boolSystem, False)
|
||||
"gcryptsetup" [ Param gcryptid ] []
|
||||
|
||||
denyNonFastForwards = "receive.denyNonFastForwards"
|
||||
|
||||
shellOrRsync :: Remote -> Annex a -> Annex a -> Annex a
|
||||
shellOrRsync r ashell arsync = case method of
|
||||
AccessShell -> ashell
|
||||
_ -> arsync
|
||||
where
|
||||
method = toAccessMethod $ fromMaybe "" $
|
||||
remoteAnnexGCrypt $ gitconfig r
|
||||
|
||||
{- Configure gcrypt to use the same list of keyids that
|
||||
- were passed to initremote as its participants.
|
||||
|
@ -210,26 +279,32 @@ setGcryptEncryption c remotename = do
|
|||
store :: Remote -> Remote.Rsync.RsyncOpts -> (Cipher, Key) -> Key -> MeterUpdate -> Annex Bool
|
||||
store r rsyncopts (cipher, enck) k p
|
||||
| not $ Git.repoIsUrl (repo r) = guardUsable (repo r) False $
|
||||
sendwith $ \meterupdate h -> do
|
||||
metered (Just p) k $ \meterupdate -> spoolencrypted $ \h -> do
|
||||
let dest = gCryptLocation r enck
|
||||
createDirectoryIfMissing True $ parentDir dest
|
||||
readBytes (meteredWriteFile meterupdate dest) h
|
||||
return True
|
||||
| Git.repoIsSsh (repo r) = Remote.Rsync.storeEncrypted rsyncopts gpgopts (cipher, enck) k p
|
||||
| Git.repoIsSsh (repo r) = shellOrRsync r storeshell storersync
|
||||
| otherwise = unsupportedUrl
|
||||
where
|
||||
gpgopts = getGpgEncParams r
|
||||
dest = gCryptLocation r enck
|
||||
sendwith a = metered (Just p) k $ \meterupdate ->
|
||||
Annex.Content.sendAnnex k noop $ \src ->
|
||||
liftIO $ catchBoolIO $
|
||||
encrypt gpgopts cipher (feedFile src) (a meterupdate)
|
||||
storersync = Remote.Rsync.storeEncrypted rsyncopts gpgopts (cipher, enck) k p
|
||||
storeshell = withTmp enck $ \tmp ->
|
||||
ifM (spoolencrypted $ readBytes $ \b -> catchBoolIO $ L.writeFile tmp b >> return True)
|
||||
( Ssh.rsyncHelper (Just p)
|
||||
=<< Ssh.rsyncParamsRemote False r Upload enck tmp Nothing
|
||||
, return False
|
||||
)
|
||||
spoolencrypted a = Annex.Content.sendAnnex k noop $ \src ->
|
||||
liftIO $ catchBoolIO $
|
||||
encrypt gpgopts cipher (feedFile src) a
|
||||
|
||||
retrieve :: Remote -> Remote.Rsync.RsyncOpts -> (Cipher, Key) -> Key -> FilePath -> MeterUpdate -> Annex Bool
|
||||
retrieve r rsyncopts (cipher, enck) k d p
|
||||
| not $ Git.repoIsUrl (repo r) = guardUsable (repo r) False $ do
|
||||
retrievewith $ L.readFile src
|
||||
return True
|
||||
| Git.repoIsSsh (repo r) = Remote.Rsync.retrieveEncrypted rsyncopts (cipher, enck) k d p
|
||||
| Git.repoIsSsh (repo r) = shellOrRsync r retrieveshell retrieversync
|
||||
| otherwise = unsupportedUrl
|
||||
where
|
||||
src = gCryptLocation r enck
|
||||
|
@ -237,30 +312,89 @@ retrieve r rsyncopts (cipher, enck) k d p
|
|||
a >>= \b ->
|
||||
decrypt cipher (feedBytes b)
|
||||
(readBytes $ meteredWriteFile meterupdate d)
|
||||
retrieversync = Remote.Rsync.retrieveEncrypted rsyncopts (cipher, enck) k d p
|
||||
retrieveshell = withTmp enck $ \tmp ->
|
||||
ifM (Ssh.rsyncHelper (Just p) =<< Ssh.rsyncParamsRemote False r Download enck tmp Nothing)
|
||||
( liftIO $ catchBoolIO $ do
|
||||
decrypt cipher (feedFile tmp) $
|
||||
readBytes $ L.writeFile d
|
||||
return True
|
||||
, return False
|
||||
)
|
||||
|
||||
remove :: Remote -> Remote.Rsync.RsyncOpts -> Key -> Annex Bool
|
||||
remove r rsyncopts k
|
||||
| not $ Git.repoIsUrl (repo r) = guardUsable (repo r) False $ do
|
||||
liftIO $ removeDirectoryRecursive (parentDir dest)
|
||||
liftIO $ removeDirectoryRecursive $ parentDir $ gCryptLocation r k
|
||||
return True
|
||||
| Git.repoIsSsh (repo r) = Remote.Rsync.remove rsyncopts k
|
||||
| Git.repoIsSsh (repo r) = shellOrRsync r removeshell removersync
|
||||
| otherwise = unsupportedUrl
|
||||
where
|
||||
dest = gCryptLocation r k
|
||||
removersync = Remote.Rsync.remove rsyncopts k
|
||||
removeshell = Ssh.dropKey (repo r) k
|
||||
|
||||
checkPresent :: Remote -> Remote.Rsync.RsyncOpts -> Key -> Annex (Either String Bool)
|
||||
checkPresent r rsyncopts k
|
||||
| not $ Git.repoIsUrl (repo r) =
|
||||
guardUsable (repo r) unknown $
|
||||
liftIO $ catchDefaultIO unknown $
|
||||
guardUsable (repo r) (cantCheck $ repo r) $
|
||||
liftIO $ catchDefaultIO (cantCheck $ repo r) $
|
||||
Right <$> doesFileExist (gCryptLocation r k)
|
||||
| Git.repoIsSsh (repo r) = Remote.Rsync.checkPresent (repo r) rsyncopts k
|
||||
| Git.repoIsSsh (repo r) = shellOrRsync r checkshell checkrsync
|
||||
| otherwise = unsupportedUrl
|
||||
where
|
||||
unknown = Left $ "unable to check " ++ Git.repoDescribe (repo r) ++ show (repo r)
|
||||
checkrsync = Remote.Rsync.checkPresent (repo r) rsyncopts k
|
||||
checkshell = Ssh.inAnnex (repo r) k
|
||||
|
||||
{- Annexed objects are stored directly under the top of the gcrypt repo
|
||||
- (not in annex/objects), and are hashed using lower-case directories for max
|
||||
{- Annexed objects are hashed using lower-case directories for max
|
||||
- portability. -}
|
||||
gCryptLocation :: Remote -> Key -> FilePath
|
||||
gCryptLocation r key = Git.repoLocation (repo r) </> keyPath key hashDirLower
|
||||
gCryptLocation r key = Git.repoLocation (repo r) </> objectDir </> keyPath key hashDirLower
|
||||
|
||||
data AccessMethod = AccessDirect | AccessShell
|
||||
|
||||
fromAccessMethod :: AccessMethod -> String
|
||||
fromAccessMethod AccessShell = "shell"
|
||||
fromAccessMethod AccessDirect = "true"
|
||||
|
||||
toAccessMethod :: String -> AccessMethod
|
||||
toAccessMethod "shell" = AccessShell
|
||||
toAccessMethod _ = AccessDirect
|
||||
|
||||
getGCryptUUID :: Bool -> Git.Repo -> Annex (Maybe UUID)
|
||||
getGCryptUUID fast r = (genUUIDInNameSpace gCryptNameSpace <$>) . fst
|
||||
<$> getGCryptId fast r
|
||||
|
||||
coreGCryptId :: String
|
||||
coreGCryptId = "core.gcrypt-id"
|
||||
|
||||
{- gcrypt repos set up by git-annex as special remotes have a
|
||||
- core.gcrypt-id setting in their config, which can be mapped back to
|
||||
- the remote's UUID.
|
||||
-
|
||||
- In fast mode, only checks local repos. To check a remote repo,
|
||||
- tries git-annex-shell and direct rsync of the git config file.
|
||||
-
|
||||
- (Also returns a version of input repo with its config read.) -}
|
||||
getGCryptId :: Bool -> Git.Repo -> Annex (Maybe Git.GCrypt.GCryptId, Git.Repo)
|
||||
getGCryptId fast r
|
||||
| Git.repoIsLocal r || Git.repoIsLocalUnknown r = extract <$>
|
||||
liftIO (catchMaybeIO $ Git.Config.read r)
|
||||
| not fast = extract . liftM fst <$> getM (eitherToMaybe <$>)
|
||||
[ Ssh.onRemote r (Git.Config.fromPipe r, Left undefined) "configlist" [] []
|
||||
, getConfigViaRsync r
|
||||
]
|
||||
| otherwise = return (Nothing, r)
|
||||
where
|
||||
extract Nothing = (Nothing, r)
|
||||
extract (Just r') = (Git.Config.getMaybe coreGCryptId r', r')
|
||||
|
||||
getConfigViaRsync :: Git.Repo -> Annex (Either SomeException (Git.Repo, String))
|
||||
getConfigViaRsync r = do
|
||||
(rsynctransport, rsyncurl, _) <- rsyncTransport r
|
||||
liftIO $ do
|
||||
withTmpFile "tmpconfig" $ \tmpconfig _ -> do
|
||||
void $ rsync $ rsynctransport ++
|
||||
[ Param $ rsyncurl ++ "/config"
|
||||
, Param tmpconfig
|
||||
]
|
||||
Git.Config.fromFile r tmpconfig
|
||||
|
|
142
Remote/Git.hs
142
Remote/Git.hs
|
@ -14,8 +14,6 @@ module Remote.Git (
|
|||
) where
|
||||
|
||||
import Common.Annex
|
||||
import Utility.Rsync
|
||||
import Remote.Helper.Ssh
|
||||
import Annex.Ssh
|
||||
import Types.Remote
|
||||
import Types.GitConfig
|
||||
|
@ -32,7 +30,7 @@ import Annex.Exception
|
|||
import qualified Annex.Content
|
||||
import qualified Annex.BranchState
|
||||
import qualified Annex.Branch
|
||||
import qualified Utility.Url as Url
|
||||
import qualified Annex.Url as Url
|
||||
import Utility.Tmp
|
||||
import Config
|
||||
import Config.Cost
|
||||
|
@ -45,6 +43,8 @@ import Utility.Metered
|
|||
import Utility.CopyFile
|
||||
#endif
|
||||
import Remote.Helper.Git
|
||||
import Remote.Helper.Messages
|
||||
import qualified Remote.Helper.Ssh as Ssh
|
||||
import qualified Remote.GCrypt
|
||||
|
||||
import Control.Concurrent
|
||||
|
@ -143,7 +143,7 @@ tryGitConfigRead :: Git.Repo -> Annex Git.Repo
|
|||
tryGitConfigRead r
|
||||
| haveconfig r = return r -- already read
|
||||
| Git.repoIsSsh r = store $ do
|
||||
v <- onRemote r (pipedconfig, Left undefined) "configlist" [] []
|
||||
v <- Ssh.onRemote r (pipedconfig, Left undefined) "configlist" [] []
|
||||
case v of
|
||||
Right r'
|
||||
| haveconfig r' -> return r'
|
||||
|
@ -165,23 +165,22 @@ tryGitConfigRead r
|
|||
safely a = either (const $ return r) return
|
||||
=<< liftIO (try a :: IO (Either SomeException Git.Repo))
|
||||
|
||||
pipedconfig cmd params = try run :: IO (Either SomeException Git.Repo)
|
||||
where
|
||||
run = withHandle StdoutHandle createProcessSuccess p $ \h -> do
|
||||
fileEncoding h
|
||||
val <- hGetContentsStrict h
|
||||
r' <- Git.Config.store val r
|
||||
when (getUncachedUUID r' == NoUUID && not (null val)) $ do
|
||||
warningIO $ "Failed to get annex.uuid configuration of repository " ++ Git.repoDescribe r
|
||||
warningIO $ "Instead, got: " ++ show val
|
||||
warningIO $ "This is unexpected; please check the network transport!"
|
||||
return r'
|
||||
p = proc cmd $ toCommand params
|
||||
pipedconfig cmd params = do
|
||||
v <- Git.Config.fromPipe r cmd params
|
||||
case v of
|
||||
Right (r', val) -> do
|
||||
when (getUncachedUUID r' == NoUUID && not (null val)) $ do
|
||||
warningIO $ "Failed to get annex.uuid configuration of repository " ++ Git.repoDescribe r
|
||||
warningIO $ "Instead, got: " ++ show val
|
||||
warningIO $ "This is unexpected; please check the network transport!"
|
||||
return $ Right r'
|
||||
Left l -> return $ Left l
|
||||
|
||||
geturlconfig headers = do
|
||||
ua <- Url.getUserAgent
|
||||
v <- liftIO $ withTmpFile "git-annex.tmp" $ \tmpfile h -> do
|
||||
hClose h
|
||||
ifM (Url.downloadQuiet (Git.repoLocation r ++ "/config") headers [] tmpfile)
|
||||
ifM (Url.downloadQuiet (Git.repoLocation r ++ "/config") headers [] tmpfile ua)
|
||||
( pipedconfig "git" [Param "config", Param "--null", Param "--list", Param "--file", File tmpfile]
|
||||
, return $ Left undefined
|
||||
)
|
||||
|
@ -211,7 +210,7 @@ tryGitConfigRead r
|
|||
Nothing -> return r
|
||||
Just n -> do
|
||||
whenM (inRepo $ Git.Command.runBool [Param "fetch", Param "--quiet", Param n]) $
|
||||
set_ignore $ "does not have git-annex installed"
|
||||
set_ignore "does not have git-annex installed"
|
||||
return r
|
||||
|
||||
set_ignore msg = case Git.remoteName r of
|
||||
|
@ -241,28 +240,19 @@ inAnnex r key
|
|||
| otherwise = checklocal
|
||||
where
|
||||
checkhttp headers = do
|
||||
showchecking
|
||||
liftIO $ ifM (anyM (\u -> Url.check u headers (keySize key)) (keyUrls r key))
|
||||
showChecking r
|
||||
ifM (anyM (\u -> Url.withUserAgent $ Url.check u headers (keySize key)) (keyUrls r key))
|
||||
( return $ Right True
|
||||
, return $ Left "not found"
|
||||
)
|
||||
checkremote = do
|
||||
showchecking
|
||||
onRemote r (check, unknown) "inannex" [Param (key2file key)] []
|
||||
where
|
||||
check c p = dispatch <$> safeSystem c p
|
||||
dispatch ExitSuccess = Right True
|
||||
dispatch (ExitFailure 1) = Right False
|
||||
dispatch _ = unknown
|
||||
checklocal = guardUsable r unknown $ dispatch <$> check
|
||||
checkremote = Ssh.inAnnex r key
|
||||
checklocal = guardUsable r (cantCheck r) $ dispatch <$> check
|
||||
where
|
||||
check = liftIO $ catchMsgIO $ onLocal r $
|
||||
Annex.Content.inAnnexSafe key
|
||||
dispatch (Left e) = Left e
|
||||
dispatch (Right (Just b)) = Right b
|
||||
dispatch (Right Nothing) = unknown
|
||||
unknown = Left $ "unable to check " ++ Git.repoDescribe r
|
||||
showchecking = showAction $ "checking " ++ Git.repoDescribe r
|
||||
dispatch (Right Nothing) = cantCheck r
|
||||
|
||||
keyUrls :: Git.Repo -> Key -> [String]
|
||||
keyUrls r key = map tourl locs
|
||||
|
@ -285,12 +275,8 @@ dropKey r key
|
|||
logStatus key InfoMissing
|
||||
Annex.Content.saveState True
|
||||
return True
|
||||
| Git.repoIsHttp (repo r) = error "dropping from http repo not supported"
|
||||
| otherwise = commitOnCleanup r $ onRemote (repo r) (boolSystem, False) "dropkey"
|
||||
[ Params "--quiet --force"
|
||||
, Param $ key2file key
|
||||
]
|
||||
[]
|
||||
| Git.repoIsHttp (repo r) = error "dropping from http remote not supported"
|
||||
| otherwise = commitOnCleanup r $ Ssh.dropKey (repo r) key
|
||||
|
||||
{- Tries to copy a key's content from a remote's annex to a file. -}
|
||||
copyFromRemote :: Remote -> Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex Bool
|
||||
|
@ -298,7 +284,7 @@ copyFromRemote r key file dest _p = copyFromRemote' r key file dest
|
|||
copyFromRemote' :: Remote -> Key -> AssociatedFile -> FilePath -> Annex Bool
|
||||
copyFromRemote' r key file dest
|
||||
| not $ Git.repoIsUrl (repo r) = guardUsable (repo r) False $ do
|
||||
let params = rsyncParams r
|
||||
let params = Ssh.rsyncParams r
|
||||
u <- getUUID
|
||||
-- run copy from perspective of remote
|
||||
liftIO $ onLocal (repo r) $ do
|
||||
|
@ -310,11 +296,12 @@ copyFromRemote' r key file dest
|
|||
upload u key file noRetry
|
||||
(rsyncOrCopyFile params object dest)
|
||||
<&&> checksuccess
|
||||
| Git.repoIsSsh (repo r) = feedprogressback $ \feeder ->
|
||||
rsyncHelper (Just feeder)
|
||||
=<< rsyncParamsRemote r Download key dest file
|
||||
| Git.repoIsSsh (repo r) = feedprogressback $ \feeder -> do
|
||||
direct <- isDirect
|
||||
Ssh.rsyncHelper (Just feeder)
|
||||
=<< Ssh.rsyncParamsRemote direct r Download key dest file
|
||||
| Git.repoIsHttp (repo r) = Annex.Content.downloadUrl (keyUrls (repo r) key) dest
|
||||
| otherwise = error "copying from non-ssh, non-http repo not supported"
|
||||
| otherwise = error "copying from non-ssh, non-http remote not supported"
|
||||
where
|
||||
{- Feed local rsync's progress info back to the remote,
|
||||
- by forking a feeder thread that runs
|
||||
|
@ -339,9 +326,9 @@ copyFromRemote' r key file dest
|
|||
u <- getUUID
|
||||
let fields = (Fields.remoteUUID, fromUUID u)
|
||||
: maybe [] (\f -> [(Fields.associatedFile, f)]) file
|
||||
Just (cmd, params) <- git_annex_shell (repo r) "transferinfo"
|
||||
Just (cmd, params) <- Ssh.git_annex_shell (repo r) "transferinfo"
|
||||
[Param $ key2file key] fields
|
||||
v <- liftIO $ (newEmptySV :: IO (MSampleVar Integer))
|
||||
v <- liftIO (newEmptySV :: IO (MSampleVar Integer))
|
||||
tid <- liftIO $ forkIO $ void $ tryIO $ do
|
||||
bytes <- readSV v
|
||||
p <- createProcess $
|
||||
|
@ -352,7 +339,7 @@ copyFromRemote' r key file dest
|
|||
hClose $ stderrHandle p
|
||||
let h = stdinHandle p
|
||||
let send b = do
|
||||
hPutStrLn h $ show b
|
||||
hPrint h b
|
||||
hFlush h
|
||||
send bytes
|
||||
forever $
|
||||
|
@ -384,8 +371,10 @@ copyToRemote r key file p
|
|||
guardUsable (repo r) False $ commitOnCleanup r $
|
||||
copylocal =<< Annex.Content.prepSendAnnex key
|
||||
| Git.repoIsSsh (repo r) = commitOnCleanup r $
|
||||
Annex.Content.sendAnnex key noop $ \object ->
|
||||
rsyncHelper (Just p) =<< rsyncParamsRemote r Upload key object file
|
||||
Annex.Content.sendAnnex key noop $ \object -> do
|
||||
direct <- isDirect
|
||||
Ssh.rsyncHelper (Just p)
|
||||
=<< Ssh.rsyncParamsRemote direct r Upload key object file
|
||||
| otherwise = error "copying to non-ssh repo not supported"
|
||||
where
|
||||
copylocal Nothing = return False
|
||||
|
@ -394,7 +383,7 @@ copyToRemote r key file p
|
|||
-- the remote's Annex, but it needs access to the current
|
||||
-- Annex monad's state.
|
||||
checksuccessio <- Annex.withCurrentState checksuccess
|
||||
let params = rsyncParams r
|
||||
let params = Ssh.rsyncParams r
|
||||
u <- getUUID
|
||||
-- run copy from perspective of remote
|
||||
liftIO $ onLocal (repo r) $ ifM (Annex.Content.inAnnex key)
|
||||
|
@ -428,7 +417,7 @@ rsyncOrCopyFile rsyncparams src dest p =
|
|||
#else
|
||||
ifM (sameDeviceIds src dest) (docopy, dorsync)
|
||||
where
|
||||
sameDeviceIds a b = (==) <$> (getDeviceId a) <*> (getDeviceId b)
|
||||
sameDeviceIds a b = (==) <$> getDeviceId a <*> getDeviceId b
|
||||
getDeviceId f = deviceID <$> liftIO (getFileStatus $ parentDir f)
|
||||
docopy = liftIO $ bracket
|
||||
(forkIO $ watchfilesize zeroBytesProcessed)
|
||||
|
@ -446,56 +435,9 @@ rsyncOrCopyFile rsyncparams src dest p =
|
|||
watchfilesize sz
|
||||
_ -> watchfilesize oldsz
|
||||
#endif
|
||||
dorsync = rsyncHelper (Just p) $
|
||||
dorsync = Ssh.rsyncHelper (Just p) $
|
||||
rsyncparams ++ [File src, File dest]
|
||||
|
||||
rsyncHelper :: Maybe MeterUpdate -> [CommandParam] -> Annex Bool
|
||||
rsyncHelper callback params = do
|
||||
showOutput -- make way for progress bar
|
||||
ifM (liftIO $ (maybe rsync rsyncProgress callback) params)
|
||||
( return True
|
||||
, do
|
||||
showLongNote "rsync failed -- run git annex again to resume file transfer"
|
||||
return False
|
||||
)
|
||||
|
||||
{- Generates rsync parameters that ssh to the remote and asks it
|
||||
- to either receive or send the key's content. -}
|
||||
rsyncParamsRemote :: Remote -> Direction -> Key -> FilePath -> AssociatedFile -> Annex [CommandParam]
|
||||
rsyncParamsRemote r direction key file afile = do
|
||||
u <- getUUID
|
||||
direct <- isDirect
|
||||
let fields = (Fields.remoteUUID, fromUUID u)
|
||||
: (Fields.direct, if direct then "1" else "")
|
||||
: maybe [] (\f -> [(Fields.associatedFile, f)]) afile
|
||||
Just (shellcmd, shellparams) <- git_annex_shell (repo r)
|
||||
(if direction == Download then "sendkey" else "recvkey")
|
||||
[ Param $ key2file key ]
|
||||
fields
|
||||
-- Convert the ssh command into rsync command line.
|
||||
let eparam = rsyncShell (Param shellcmd:shellparams)
|
||||
let o = rsyncParams r
|
||||
if direction == Download
|
||||
then return $ o ++ rsyncopts eparam dummy (File file)
|
||||
else return $ o ++ rsyncopts eparam (File file) dummy
|
||||
where
|
||||
rsyncopts ps source dest
|
||||
| end ps == [dashdash] = ps ++ [source, dest]
|
||||
| otherwise = ps ++ [dashdash, source, dest]
|
||||
dashdash = Param "--"
|
||||
{- The rsync shell parameter controls where rsync
|
||||
- goes, so the source/dest parameter can be a dummy value,
|
||||
- that just enables remote rsync mode.
|
||||
- For maximum compatability with some patched rsyncs,
|
||||
- the dummy value needs to still contain a hostname,
|
||||
- even though this hostname will never be used. -}
|
||||
dummy = Param "dummy:"
|
||||
|
||||
-- --inplace to resume partial files
|
||||
rsyncParams :: Remote -> [CommandParam]
|
||||
rsyncParams r = [Params "--progress --inplace"] ++
|
||||
map Param (remoteAnnexRsyncOptions $ gitconfig r)
|
||||
|
||||
commitOnCleanup :: Remote -> Annex a -> Annex a
|
||||
commitOnCleanup r a = go `after` a
|
||||
where
|
||||
|
@ -506,12 +448,12 @@ commitOnCleanup r a = go `after` a
|
|||
Annex.Branch.commit "update"
|
||||
| otherwise = void $ do
|
||||
Just (shellcmd, shellparams) <-
|
||||
git_annex_shell (repo r) "commit" [] []
|
||||
Ssh.git_annex_shell (repo r) "commit" [] []
|
||||
|
||||
-- Throw away stderr, since the remote may not
|
||||
-- have a new enough git-annex shell to
|
||||
-- support committing.
|
||||
liftIO $ catchMaybeIO $ do
|
||||
liftIO $ catchMaybeIO $
|
||||
withQuietOutput createProcessSuccess $
|
||||
proc shellcmd $
|
||||
toCommand shellparams
|
||||
|
|
|
@ -98,7 +98,7 @@ store r k _f p
|
|||
storeHelper r k $ streamMeteredFile src meterupdate
|
||||
|
||||
storeEncrypted :: Remote -> (Cipher, Key) -> Key -> MeterUpdate -> Annex Bool
|
||||
storeEncrypted r (cipher, enck) k p = sendAnnex k (void $ remove r enck) $ \src -> do
|
||||
storeEncrypted r (cipher, enck) k p = sendAnnex k (void $ remove r enck) $ \src ->
|
||||
metered (Just p) k $ \meterupdate ->
|
||||
storeHelper r enck $ \h ->
|
||||
encrypt (getGpgEncParams r) cipher (feedFile src)
|
||||
|
@ -209,7 +209,7 @@ checkPresent r k = do
|
|||
]
|
||||
|
||||
glacierAction :: Remote -> [CommandParam] -> Annex Bool
|
||||
glacierAction r params = runGlacier (config r) (uuid r) params
|
||||
glacierAction r = runGlacier (config r) (uuid r)
|
||||
|
||||
runGlacier :: RemoteConfig -> UUID -> [CommandParam] -> Annex Bool
|
||||
runGlacier c u params = go =<< glacierEnv c u
|
||||
|
@ -222,7 +222,7 @@ glacierParams :: RemoteConfig -> [CommandParam] -> [CommandParam]
|
|||
glacierParams c params = datacenter:params
|
||||
where
|
||||
datacenter = Param $ "--region=" ++
|
||||
(fromJust $ M.lookup "datacenter" c)
|
||||
fromJust (M.lookup "datacenter" c)
|
||||
|
||||
glacierEnv :: RemoteConfig -> UUID -> Annex (Maybe [(String, String)])
|
||||
glacierEnv c u = go =<< getRemoteCredPairFor "glacier" c creds
|
||||
|
@ -282,7 +282,7 @@ jobList r keys = go =<< glacierEnv (config r) (uuid r)
|
|||
enckeys <- forM keys $ \k ->
|
||||
maybe k snd <$> cipherKey (config r) k
|
||||
let keymap = M.fromList $ zip enckeys keys
|
||||
let convert = catMaybes . map (`M.lookup` keymap)
|
||||
let convert = mapMaybe (`M.lookup` keymap)
|
||||
return (convert succeeded, convert failed)
|
||||
|
||||
parse c [] = c
|
||||
|
|
|
@ -68,7 +68,7 @@ storeChunks key tmp dest chunksize storer recorder finalizer = either onerr retu
|
|||
where
|
||||
go = do
|
||||
stored <- storer tmpdests
|
||||
when (chunksize /= Nothing) $ do
|
||||
when (isNothing chunksize) $ do
|
||||
let chunkcount = basef ++ chunkCount
|
||||
recorder chunkcount (show $ length stored)
|
||||
finalizer tmp dest
|
||||
|
@ -79,7 +79,7 @@ storeChunks key tmp dest chunksize storer recorder finalizer = either onerr retu
|
|||
|
||||
basef = tmp ++ keyFile key
|
||||
tmpdests
|
||||
| chunksize == Nothing = [basef]
|
||||
| isNothing chunksize = [basef]
|
||||
| otherwise = map (basef ++ ) chunkStream
|
||||
|
||||
{- Given a list of destinations to use, chunks the data according to the
|
||||
|
@ -123,5 +123,5 @@ storeChunked chunksize dests storer content = either onerr return
|
|||
meteredWriteFileChunks :: MeterUpdate -> FilePath -> [v] -> (v -> IO L.ByteString) -> IO ()
|
||||
meteredWriteFileChunks meterupdate dest chunks feeder =
|
||||
withBinaryFile dest WriteMode $ \h ->
|
||||
forM_ chunks $ \c ->
|
||||
meteredWrite meterupdate h =<< feeder c
|
||||
forM_ chunks $
|
||||
meteredWrite meterupdate h <=< feeder
|
||||
|
|
|
@ -35,8 +35,8 @@ addHooks' r starthook stophook = r'
|
|||
{ storeKey = \k f p -> wrapper $ storeKey r k f p
|
||||
, retrieveKeyFile = \k f d p -> wrapper $ retrieveKeyFile r k f d p
|
||||
, retrieveKeyFileCheap = \k f -> wrapper $ retrieveKeyFileCheap r k f
|
||||
, removeKey = \k -> wrapper $ removeKey r k
|
||||
, hasKey = \k -> wrapper $ hasKey r k
|
||||
, removeKey = wrapper . removeKey r
|
||||
, hasKey = wrapper . hasKey r
|
||||
}
|
||||
where
|
||||
wrapper = runHooks r' starthook stophook
|
||||
|
@ -45,7 +45,7 @@ runHooks :: Remote -> Maybe String -> Maybe String -> Annex a -> Annex a
|
|||
runHooks r starthook stophook a = do
|
||||
dir <- fromRepo gitAnnexRemotesDir
|
||||
let lck = dir </> remoteid ++ ".lck"
|
||||
whenM (not . any (== lck) . M.keys <$> getPool) $ do
|
||||
whenM (notElem lck . M.keys <$> getPool) $ do
|
||||
liftIO $ createDirectoryIfMissing True dir
|
||||
firstrun lck
|
||||
a
|
||||
|
|
17
Remote/Helper/Messages.hs
Normal file
17
Remote/Helper/Messages.hs
Normal file
|
@ -0,0 +1,17 @@
|
|||
{- git-annex remote messages
|
||||
-
|
||||
- Copyright 2013 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
||||
module Remote.Helper.Messages where
|
||||
|
||||
import Common.Annex
|
||||
import qualified Git
|
||||
|
||||
showChecking :: Git.Repo -> Annex ()
|
||||
showChecking r = showAction $ "checking " ++ Git.repoDescribe r
|
||||
|
||||
cantCheck :: Git.Repo -> Either String Bool
|
||||
cantCheck r = Left $ "unable to check " ++ Git.repoDescribe r
|
|
@ -1,6 +1,6 @@
|
|||
{- git-annex remote access with ssh
|
||||
{- git-annex remote access with ssh and git-annex-shell
|
||||
-
|
||||
- Copyright 2011,2012 Joey Hess <joey@kitenet.net>
|
||||
- Copyright 2011-2013 Joey Hess <joey@kitenet.net>
|
||||
-
|
||||
- Licensed under the GNU GPL version 3 or higher.
|
||||
-}
|
||||
|
@ -12,19 +12,26 @@ import qualified Git
|
|||
import qualified Git.Url
|
||||
import Annex.UUID
|
||||
import Annex.Ssh
|
||||
import Fields
|
||||
import Fields (Field, fieldName)
|
||||
import qualified Fields
|
||||
import Types.GitConfig
|
||||
import Types.Key
|
||||
import Remote.Helper.Messages
|
||||
import Utility.Metered
|
||||
import Utility.Rsync
|
||||
import Types.Remote
|
||||
import Logs.Transfer
|
||||
|
||||
{- Generates parameters to ssh to a repository's host and run a command.
|
||||
- Caller is responsible for doing any neccessary shellEscaping of the
|
||||
- passed command. -}
|
||||
sshToRepo :: Git.Repo -> [CommandParam] -> Annex [CommandParam]
|
||||
sshToRepo repo sshcmd = do
|
||||
toRepo :: Git.Repo -> [CommandParam] -> Annex [CommandParam]
|
||||
toRepo r sshcmd = do
|
||||
g <- fromRepo id
|
||||
let c = extractRemoteGitConfig g (Git.repoDescribe repo)
|
||||
let c = extractRemoteGitConfig g (Git.repoDescribe r)
|
||||
let opts = map Param $ remoteAnnexSshOptions c
|
||||
let host = Git.Url.hostuser repo
|
||||
params <- sshCachingOptions (host, Git.Url.port repo) opts
|
||||
let host = Git.Url.hostuser r
|
||||
params <- sshCachingOptions (host, Git.Url.port r) opts
|
||||
return $ params ++ Param host : sshcmd
|
||||
|
||||
{- Generates parameters to run a git-annex-shell command on a remote
|
||||
|
@ -33,17 +40,17 @@ git_annex_shell :: Git.Repo -> String -> [CommandParam] -> [(Field, String)] ->
|
|||
git_annex_shell r command params fields
|
||||
| not $ Git.repoIsUrl r = return $ Just (shellcmd, shellopts ++ fieldopts)
|
||||
| Git.repoIsSsh r = do
|
||||
uuid <- getRepoUUID r
|
||||
sshparams <- sshToRepo r [Param $ sshcmd uuid ]
|
||||
u <- getRepoUUID r
|
||||
sshparams <- toRepo r [Param $ sshcmd u ]
|
||||
return $ Just ("ssh", sshparams)
|
||||
| otherwise = return Nothing
|
||||
where
|
||||
dir = Git.repoPath r
|
||||
shellcmd = "git-annex-shell"
|
||||
shellopts = Param command : File dir : params
|
||||
sshcmd uuid = unwords $
|
||||
sshcmd u = unwords $
|
||||
shellcmd : map shellEscape (toCommand shellopts) ++
|
||||
uuidcheck uuid ++
|
||||
uuidcheck u ++
|
||||
map shellEscape (toCommand fieldopts)
|
||||
uuidcheck NoUUID = []
|
||||
uuidcheck (UUID u) = ["--uuid", u]
|
||||
|
@ -71,3 +78,69 @@ onRemote r (with, errorval) command params fields = do
|
|||
case s of
|
||||
Just (c, ps) -> liftIO $ with c ps
|
||||
Nothing -> return errorval
|
||||
|
||||
{- Checks if a remote contains a key. -}
|
||||
inAnnex :: Git.Repo -> Key -> Annex (Either String Bool)
|
||||
inAnnex r k = do
|
||||
showChecking r
|
||||
onRemote r (check, cantCheck r) "inannex" [Param $ key2file k] []
|
||||
where
|
||||
check c p = dispatch <$> safeSystem c p
|
||||
dispatch ExitSuccess = Right True
|
||||
dispatch (ExitFailure 1) = Right False
|
||||
dispatch _ = cantCheck r
|
||||
|
||||
{- Removes a key from a remote. -}
|
||||
dropKey :: Git.Repo -> Key -> Annex Bool
|
||||
dropKey r key = onRemote r (boolSystem, False) "dropkey"
|
||||
[ Params "--quiet --force"
|
||||
, Param $ key2file key
|
||||
]
|
||||
[]
|
||||
|
||||
rsyncHelper :: Maybe MeterUpdate -> [CommandParam] -> Annex Bool
|
||||
rsyncHelper callback params = do
|
||||
showOutput -- make way for progress bar
|
||||
ifM (liftIO $ (maybe rsync rsyncProgress callback) params)
|
||||
( return True
|
||||
, do
|
||||
showLongNote "rsync failed -- run git annex again to resume file transfer"
|
||||
return False
|
||||
)
|
||||
|
||||
{- Generates rsync parameters that ssh to the remote and asks it
|
||||
- to either receive or send the key's content. -}
|
||||
rsyncParamsRemote :: Bool -> Remote -> Direction -> Key -> FilePath -> AssociatedFile -> Annex [CommandParam]
|
||||
rsyncParamsRemote direct r direction key file afile = do
|
||||
u <- getUUID
|
||||
let fields = (Fields.remoteUUID, fromUUID u)
|
||||
: (Fields.direct, if direct then "1" else "")
|
||||
: maybe [] (\f -> [(Fields.associatedFile, f)]) afile
|
||||
Just (shellcmd, shellparams) <- git_annex_shell (repo r)
|
||||
(if direction == Download then "sendkey" else "recvkey")
|
||||
[ Param $ key2file key ]
|
||||
fields
|
||||
-- Convert the ssh command into rsync command line.
|
||||
let eparam = rsyncShell (Param shellcmd:shellparams)
|
||||
let o = rsyncParams r
|
||||
return $ if direction == Download
|
||||
then o ++ rsyncopts eparam dummy (File file)
|
||||
else o ++ rsyncopts eparam (File file) dummy
|
||||
where
|
||||
rsyncopts ps source dest
|
||||
| end ps == [dashdash] = ps ++ [source, dest]
|
||||
| otherwise = ps ++ [dashdash, source, dest]
|
||||
dashdash = Param "--"
|
||||
{- The rsync shell parameter controls where rsync
|
||||
- goes, so the source/dest parameter can be a dummy value,
|
||||
- that just enables remote rsync mode.
|
||||
- For maximum compatability with some patched rsyncs,
|
||||
- the dummy value needs to still contain a hostname,
|
||||
- even though this hostname will never be used. -}
|
||||
dummy = Param "dummy:"
|
||||
|
||||
-- --inplace to resume partial files
|
||||
rsyncParams :: Remote -> [CommandParam]
|
||||
rsyncParams r = Params "--progress --inplace" :
|
||||
map Param (remoteAnnexRsyncOptions $ gitconfig r)
|
||||
|
||||
|
|
|
@ -93,7 +93,7 @@ lookupHook hookname action = do
|
|||
command <- getConfig (annexConfig hook) ""
|
||||
if null command
|
||||
then do
|
||||
fallback <- getConfig (annexConfig $ hookfallback) ""
|
||||
fallback <- getConfig (annexConfig hookfallback) ""
|
||||
if null fallback
|
||||
then do
|
||||
warning $ "missing configuration for " ++ hook ++ " or " ++ hookfallback
|
||||
|
|
|
@ -80,7 +80,7 @@ remoteListRefresh = do
|
|||
remoteList
|
||||
|
||||
{- Generates a Remote. -}
|
||||
remoteGen :: (M.Map UUID RemoteConfig) -> RemoteType -> Git.Repo -> Annex (Maybe Remote)
|
||||
remoteGen :: M.Map UUID RemoteConfig -> RemoteType -> Git.Repo -> Annex (Maybe Remote)
|
||||
remoteGen m t r = do
|
||||
u <- getRepoUUID r
|
||||
g <- fromRepo id
|
||||
|
|
|
@ -86,7 +86,7 @@ gen r u c gc = do
|
|||
then Just $ rsyncUrl o
|
||||
else Nothing
|
||||
, readonly = False
|
||||
, globallyAvailable = not $ islocal
|
||||
, globallyAvailable = not islocal
|
||||
, remotetype = remote
|
||||
}
|
||||
|
||||
|
@ -236,7 +236,7 @@ sendParams = ifM crippledFileSystem
|
|||
|
||||
{- Runs an action in an empty scratch directory that can be used to build
|
||||
- up trees for rsync. -}
|
||||
withRsyncScratchDir :: (FilePath -> Annex Bool) -> Annex Bool
|
||||
withRsyncScratchDir :: (FilePath -> Annex a) -> Annex a
|
||||
withRsyncScratchDir a = do
|
||||
#ifndef mingw32_HOST_OS
|
||||
v <- liftIO getProcessID
|
||||
|
@ -262,7 +262,7 @@ rsyncRetrieve o k dest callback =
|
|||
, File dest
|
||||
]
|
||||
|
||||
rsyncRemote :: RsyncOpts -> (Maybe MeterUpdate) -> [CommandParam] -> Annex Bool
|
||||
rsyncRemote :: RsyncOpts -> Maybe MeterUpdate -> [CommandParam] -> Annex Bool
|
||||
rsyncRemote o callback params = do
|
||||
showOutput -- make way for progress bar
|
||||
ifM (liftIO $ (maybe rsync rsyncProgress callback) ps)
|
||||
|
|
|
@ -19,7 +19,7 @@ import Config.Cost
|
|||
import Logs.Web
|
||||
import Types.Key
|
||||
import Utility.Metered
|
||||
import qualified Utility.Url as Url
|
||||
import qualified Annex.Url as Url
|
||||
#ifdef WITH_QUVI
|
||||
import Annex.Quvi
|
||||
import qualified Utility.Quvi as Quvi
|
||||
|
@ -118,7 +118,7 @@ checkKey' key us = firsthit us (Right False) $ \u -> do
|
|||
#endif
|
||||
DefaultDownloader -> do
|
||||
headers <- getHttpHeaders
|
||||
liftIO $ Right <$> Url.check u' headers (keySize key)
|
||||
Right <$> Url.withUserAgent (Url.check u' headers $ keySize key)
|
||||
where
|
||||
firsthit [] miss _ = return miss
|
||||
firsthit (u:rest) _ a = do
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue