add RemoteStateHandle

This solves the problem of sameas remotes trampling over per-remote
state. Used for:

* per-remote state, of course
* per-remote metadata, also of course
* per-remote content identifiers, because two remote implementations
  could in theory generate the same content identifier for two different
  peices of content

While chunk logs are per-remote data, they don't use this, because the
number and size of chunks stored is a common property across sameas
remotes.

External special remote had a complication, where it was theoretically
possible for a remote to send SETSTATE or GETSTATE during INITREMOTE or
EXPORTSUPPORTED. Since the uuid of the remote is typically generate in
Remote.setup, it would only be possible to pass a Maybe
RemoteStateHandle into it, and it would otherwise have to construct its
own. Rather than go that route, I decided to send an ERROR in this case.
It seems unlikely that any existing external special remote will be
affected. They would have to make up a git-annex key, and set state for
some reason during INITREMOTE. I can imagine such a hack, but it doesn't
seem worth complicating the code in such an ugly way to support it.

Unfortunately, both TestRemote and Annex.Import needed the Remote
to have a new field added that holds its RemoteStateHandle.
This commit is contained in:
Joey Hess 2019-10-14 12:33:27 -04:00
parent 37f0abbca8
commit 9828f45d85
No known key found for this signature in database
GPG key ID: DB12DB0FF05F8F38
31 changed files with 274 additions and 209 deletions

View file

@ -40,8 +40,8 @@ remote = RemoteType
, importSupported = importIsSupported
}
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
gen r u c gc = do
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
gen r u c gc rs = do
let this = Remote
{ uuid = u
-- adb operates over USB or wifi, so is not as cheap
@ -90,6 +90,7 @@ gen r u c gc = do
]
, claimUrl = Nothing
, checkUrl = Nothing
, remoteStateHandle = rs
}
return $ Just $ specialRemote c
(simplyPrepare $ store serial adir)

View file

@ -52,8 +52,8 @@ list _autoinit = do
r <- liftIO $ Git.Construct.remoteNamed "bittorrent" (pure Git.Construct.fromUnknown)
return [r]
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
gen r _ c gc = do
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
gen r _ c gc rs = do
cst <- remoteCost gc expensiveRemoteCost
return $ Just Remote
{ uuid = bitTorrentUUID
@ -85,6 +85,7 @@ gen r _ c gc = do
, getInfo = return []
, claimUrl = Just (pure . isSupportedUrl)
, checkUrl = Just checkTorrentUrl
, remoteStateHandle = rs
}
downloadKey :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)

View file

@ -44,8 +44,8 @@ remote = RemoteType
, importSupported = importUnsupported
}
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
gen r u c gc = do
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
gen r u c gc rs = do
bupr <- liftIO $ bup2GitRemote buprepo
cst <- remoteCost gc $
if bupLocal buprepo
@ -86,6 +86,7 @@ gen r u c gc = do
, getInfo = return [("repo", buprepo)]
, claimUrl = Nothing
, checkUrl = Nothing
, remoteStateHandle = rs
}
return $ Just $ specialRemote' specialcfg c
(simplyPrepare $ store this buprepo)

View file

@ -39,8 +39,8 @@ remote = RemoteType
, importSupported = importUnsupported
}
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
gen r u c gc = do
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
gen r u c gc rs = do
cst <- remoteCost gc $
if ddarLocal ddarrepo
then nearlyCheapRemoteCost
@ -85,6 +85,7 @@ gen r u c gc = do
, getInfo = return [("repo", ddarRepoLocation ddarrepo)]
, claimUrl = Nothing
, checkUrl = Nothing
, remoteStateHandle = rs
}
ddarrepo = maybe (giveup "missing ddarrepo") (DdarRepo gc) (remoteAnnexDdarRepo gc)
specialcfg = (specialRemoteCfg c)

View file

@ -45,8 +45,8 @@ remote = RemoteType
, importSupported = importIsSupported
}
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
gen r u c gc = do
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
gen r u c gc rs = do
cst <- remoteCost gc cheapRemoteCost
let chunkconfig = getChunkConfig c
return $ Just $ specialRemote c
@ -97,11 +97,12 @@ gen r u c gc = do
, appendonly = False
, availability = LocallyAvailable
, remotetype = remote
, mkUnavailable = gen r u c $
gc { remoteAnnexDirectory = Just "/dev/null" }
, mkUnavailable = gen r u c
(gc { remoteAnnexDirectory = Just "/dev/null" }) rs
, getInfo = return [("directory", dir)]
, claimUrl = Nothing
, checkUrl = Nothing
, remoteStateHandle = rs
}
where
dir = fromMaybe (giveup "missing directory") $ remoteAnnexDirectory gc

View file

@ -50,8 +50,8 @@ remote = RemoteType
, importSupported = importUnsupported
}
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
gen r u c gc
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
gen r u c gc rs
-- readonly mode only downloads urls; does not use external program
| remoteAnnexReadOnly gc = do
cst <- remoteCost gc expensiveRemoteCost
@ -67,7 +67,7 @@ gen r u c gc
exportUnsupported
exportUnsupported
| otherwise = do
external <- newExternal externaltype u c gc
external <- newExternal externaltype u c gc (Just rs)
Annex.addCleanup (RemoteCleanup u) $ stopExternal external
cst <- getCost external r gc
avail <- getAvailability external r gc
@ -132,11 +132,12 @@ gen r u c gc
, availability = avail
, remotetype = remote
{ exportSupported = cheapexportsupported }
, mkUnavailable = gen r u c $
gc { remoteAnnexExternalType = Just "!dne!" }
, mkUnavailable = gen r u c
(gc { remoteAnnexExternalType = Just "!dne!" }) rs
, getInfo = togetinfo
, claimUrl = toclaimurl
, checkUrl = tocheckurl
, remoteStateHandle = rs
}
return $ Just $ specialRemote c
(simplyPrepare tostore)
@ -158,7 +159,7 @@ externalSetup _ mu _ c gc = do
setConfig (remoteConfig (fromJust (lookupName c)) "readonly") (boolConfig True)
return c'
_ -> do
external <- newExternal externaltype u c' gc
external <- newExternal externaltype u c' gc Nothing
handleRequest external INITREMOTE Nothing $ \resp -> case resp of
INITREMOTE_SUCCESS -> result ()
INITREMOTE_FAILURE errmsg -> Just $ giveup errmsg
@ -174,7 +175,7 @@ checkExportSupported c gc = do
let externaltype = fromMaybe (giveup "Specify externaltype=") $
remoteAnnexExternalType gc <|> M.lookup "externaltype" c
checkExportSupported'
=<< newExternal externaltype NoUUID c gc
=<< newExternal externaltype NoUUID c gc Nothing
checkExportSupported' :: External -> Annex Bool
checkExportSupported' external = go `catchNonAsync` (const (return False))
@ -414,11 +415,16 @@ handleRequest' st external req mp responsehandler
<$> preferredContentMapRaw
send $ VALUE expr
handleRemoteRequest (SETSTATE key state) =
setRemoteState (externalUUID external) key state
handleRemoteRequest (GETSTATE key) = do
state <- fromMaybe ""
<$> getRemoteState (externalUUID external) key
send $ VALUE state
case externalRemoteStateHandle external of
Just h -> setRemoteState h key state
Nothing -> senderror "cannot send SETSTATE here"
handleRemoteRequest (GETSTATE key) =
case externalRemoteStateHandle external of
Just h -> do
state <- fromMaybe ""
<$> getRemoteState h key
send $ VALUE state
Nothing -> senderror "cannot send GETSTATE here"
handleRemoteRequest (SETURLPRESENT key url) =
setUrlPresent key url
handleRemoteRequest (SETURLMISSING key url) =
@ -432,12 +438,12 @@ handleRequest' st external req mp responsehandler
send (VALUE "") -- end of list
handleRemoteRequest (DEBUG msg) = liftIO $ debugM "external" msg
handleRemoteRequest (INFO msg) = showInfo msg
handleRemoteRequest (VERSION _) =
sendMessage st external (ERROR "too late to send VERSION")
handleRemoteRequest (VERSION _) = senderror "too late to send VERSION"
handleAsyncMessage (ERROR err) = giveup $ "external special remote error: " ++ err
send = sendMessage st external
senderror = sendMessage st external . ERROR
credstorage setting = CredPairStorage
{ credPairFile = base

View file

@ -37,7 +37,7 @@ import Types.StandardGroups (PreferredContentExpression)
import Utility.Metered (BytesProcessed(..))
import Types.Transfer (Direction(..))
import Config.Cost (Cost)
import Types.Remote (RemoteConfig)
import Types.Remote (RemoteConfig, RemoteStateHandle)
import Types.Export
import Types.Availability (Availability(..))
import Types.Key
@ -57,16 +57,18 @@ data External = External
, externalLastPid :: TVar PID
, externalDefaultConfig :: RemoteConfig
, externalGitConfig :: RemoteGitConfig
, externalRemoteStateHandle :: Maybe RemoteStateHandle
}
newExternal :: ExternalType -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex External
newExternal externaltype u c gc = liftIO $ External
newExternal :: ExternalType -> UUID -> RemoteConfig -> RemoteGitConfig -> Maybe RemoteStateHandle -> Annex External
newExternal externaltype u c gc rs = liftIO $ External
<$> pure externaltype
<*> pure u
<*> atomically (newTVar [])
<*> atomically (newTVar 0)
<*> pure c
<*> pure gc
<*> pure rs
type ExternalType = String

View file

@ -65,16 +65,16 @@ remote = RemoteType
, importSupported = importUnsupported
}
chainGen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
chainGen gcryptr u c gc = do
chainGen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
chainGen gcryptr u c gc rs = do
g <- gitRepo
-- get underlying git repo with real path, not gcrypt path
r <- liftIO $ Git.GCrypt.encryptedRemote g gcryptr
let r' = r { Git.remoteName = Git.remoteName gcryptr }
gen r' u c gc
gen r' u c gc rs
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
gen baser u c gc = do
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
gen baser u c gc rs = do
-- doublecheck that cache matches underlying repo's gcrypt-id
-- (which might not be set), only for local repos
(mgcryptid, r) <- getGCryptId True baser gc
@ -82,7 +82,7 @@ gen baser u c gc = do
case (mgcryptid, Git.GCrypt.remoteRepoId g (Git.remoteName baser)) of
(Just gcryptid, Just cachedgcryptid)
| gcryptid /= cachedgcryptid -> resetup gcryptid r
_ -> gen' r u c gc
_ -> gen' r u c gc rs
where
-- A different drive may have been mounted, making a different
-- gcrypt remote available. So need to set the cached
@ -97,13 +97,13 @@ gen baser u c gc = do
setGcryptEncryption c' remotename
storeUUIDIn (remoteConfig baser "uuid") u'
setConfig (ConfigKey $ Git.GCrypt.remoteConfigKey "gcrypt-id" remotename) gcryptid
gen' r u' c' gc
gen' r u' c' gc rs
_ -> do
warning $ "not using unknown gcrypt repository pointed to by remote " ++ Git.repoDescribe r
return Nothing
gen' :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
gen' r u c gc = do
gen' :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
gen' r u c gc rs = do
cst <- remoteCost gc $
if repoCheap r then nearlyCheapRemoteCost else expensiveRemoteCost
(rsynctransport, rsyncurl) <- rsyncTransportToObjects r gc
@ -137,6 +137,7 @@ gen' r u c gc = do
, getInfo = gitRepoInfo this
, claimUrl = Nothing
, checkUrl = Nothing
, remoteStateHandle = rs
}
return $ Just $ specialRemote' specialcfg c
(simplyPrepare $ store this rsyncopts)

View file

@ -146,17 +146,17 @@ configRead autoinit r = do
(False, _, NoUUID) -> tryGitConfigRead autoinit r
_ -> return r
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
gen r u c gc
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
gen r u c gc rs
-- Remote.GitLFS may be used with a repo that is also encrypted
-- with gcrypt so is checked first.
| remoteAnnexGitLFS gc = Remote.GitLFS.gen r u c gc
| Git.GCrypt.isEncrypted r = Remote.GCrypt.chainGen r u c gc
| remoteAnnexGitLFS gc = Remote.GitLFS.gen r u c gc rs
| Git.GCrypt.isEncrypted r = Remote.GCrypt.chainGen r u c gc rs
| otherwise = case repoP2PAddress r of
Nothing -> do
st <- mkState r u gc
go st <$> remoteCost gc defcst
Just addr -> Remote.P2P.chainGen addr r u c gc
Just addr -> Remote.P2P.chainGen addr r u c gc rs
where
defcst = if repoCheap r then cheapRemoteCost else expensiveRemoteCost
go st cst = Just new
@ -190,14 +190,15 @@ gen r u c gc
, appendonly = False
, availability = availabilityCalc r
, remotetype = remote
, mkUnavailable = unavailable r u c gc
, mkUnavailable = unavailable r u c gc rs
, getInfo = gitRepoInfo new
, claimUrl = Nothing
, checkUrl = Nothing
, remoteStateHandle = rs
}
unavailable :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
unavailable r u c gc = gen r' u c gc
unavailable :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
unavailable r = gen r'
where
r' = case Git.location r of
Git.Local { Git.gitdir = d } ->

View file

@ -57,8 +57,8 @@ remote = RemoteType
, importSupported = importUnsupported
}
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
gen r u c gc = do
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
gen r u c gc rs = do
-- If the repo uses gcrypt, get the underlaying repo without the
-- gcrypt url, to do LFS endpoint discovery on.
r' <- if Git.GCrypt.isEncrypted r
@ -70,10 +70,10 @@ gen r u c gc = do
h <- liftIO $ newTVarIO $ LFSHandle Nothing Nothing sem r' gc
cst <- remoteCost gc expensiveRemoteCost
return $ Just $ specialRemote' specialcfg c
(simplyPrepare $ store u h)
(simplyPrepare $ retrieve u h)
(simplyPrepare $ store rs h)
(simplyPrepare $ retrieve rs h)
(simplyPrepare $ remove h)
(simplyPrepare $ checkKey u h)
(simplyPrepare $ checkKey rs h)
(this cst)
where
this cst = Remote
@ -109,6 +109,7 @@ gen r u c gc = do
, getInfo = gitRepoInfo (this cst)
, claimUrl = Nothing
, checkUrl = Nothing
, remoteStateHandle = rs
}
specialcfg = (specialRemoteCfg c)
-- chunking would not improve git-lfs
@ -327,8 +328,8 @@ extractKeySize k
| isEncKey k = Nothing
| otherwise = keySize k
mkUploadRequest :: UUID -> Key -> FilePath -> Annex (LFS.TransferRequest, LFS.SHA256, Integer)
mkUploadRequest u k content = case (extractKeySha256 k, extractKeySize k) of
mkUploadRequest :: RemoteStateHandle -> Key -> FilePath -> Annex (LFS.TransferRequest, LFS.SHA256, Integer)
mkUploadRequest rs k content = case (extractKeySha256 k, extractKeySize k) of
(Just sha256, Just size) ->
ret sha256 size
(_, Just size) -> do
@ -355,12 +356,12 @@ mkUploadRequest u k content = case (extractKeySha256 k, extractKeySize k) of
}
return (req, sha256, size)
remembersha256 sha256 = setRemoteState u k (T.unpack sha256)
rememberboth sha256 size = setRemoteState u k $
remembersha256 sha256 = setRemoteState rs k (T.unpack sha256)
rememberboth sha256 size = setRemoteState rs k $
show size ++ " " ++ T.unpack sha256
mkDownloadRequest :: UUID -> Key -> Annex (Maybe (LFS.TransferRequest, LFS.SHA256, Integer))
mkDownloadRequest u k = case (extractKeySha256 k, extractKeySize k) of
mkDownloadRequest :: RemoteStateHandle -> Key -> Annex (Maybe (LFS.TransferRequest, LFS.SHA256, Integer))
mkDownloadRequest rs k = case (extractKeySha256 k, extractKeySize k) of
(Just sha256, Just size) -> ret sha256 size
(_, Just size) ->
remembersha256 >>= \case
@ -383,8 +384,8 @@ mkDownloadRequest u k = case (extractKeySha256 k, extractKeySize k) of
, LFS.req_objects = [obj]
}
return $ Just (req, sha256, size)
remembersha256 = fmap T.pack <$> getRemoteState u k
rememberboth = maybe Nothing parse <$> getRemoteState u k
remembersha256 = fmap T.pack <$> getRemoteState rs k
rememberboth = maybe Nothing parse <$> getRemoteState rs k
where
parse s = case words s of
[ssize, ssha256] -> do
@ -392,11 +393,11 @@ mkDownloadRequest u k = case (extractKeySha256 k, extractKeySize k) of
return (T.pack ssha256, size)
_ -> Nothing
store :: UUID -> TVar LFSHandle -> Storer
store u h = fileStorer $ \k src p -> getLFSEndpoint LFS.RequestUpload h >>= \case
store :: RemoteStateHandle -> TVar LFSHandle -> Storer
store rs h = fileStorer $ \k src p -> getLFSEndpoint LFS.RequestUpload h >>= \case
Nothing -> return False
Just endpoint -> flip catchNonAsync failederr $ do
(req, sha256, size) <- mkUploadRequest u k src
(req, sha256, size) <- mkUploadRequest rs k src
sendTransferRequest req endpoint >>= \case
Left err -> do
warning err
@ -424,10 +425,10 @@ store u h = fileStorer $ \k src p -> getLFSEndpoint LFS.RequestUpload h >>= \cas
warning (show e)
return False
retrieve :: UUID -> TVar LFSHandle -> Retriever
retrieve u h = fileRetriever $ \dest k p -> getLFSEndpoint LFS.RequestDownload h >>= \case
retrieve :: RemoteStateHandle -> TVar LFSHandle -> Retriever
retrieve rs h = fileRetriever $ \dest k p -> getLFSEndpoint LFS.RequestDownload h >>= \case
Nothing -> giveup "unable to connect to git-lfs endpoint"
Just endpoint -> mkDownloadRequest u k >>= \case
Just endpoint -> mkDownloadRequest rs k >>= \case
Nothing -> giveup "unable to download this object from git-lfs"
Just (req, sha256, size) -> sendTransferRequest req endpoint >>= \case
Left err -> giveup (show err)
@ -448,10 +449,10 @@ retrieve u h = fileRetriever $ \dest k p -> getLFSEndpoint LFS.RequestDownload h
uo <- getUrlOptions
liftIO $ downloadConduit p req dest uo
checkKey :: UUID -> TVar LFSHandle -> CheckPresent
checkKey u h key = getLFSEndpoint LFS.RequestDownload h >>= \case
checkKey :: RemoteStateHandle -> TVar LFSHandle -> CheckPresent
checkKey rs h key = getLFSEndpoint LFS.RequestDownload h >>= \case
Nothing -> giveup "unable to connect to git-lfs endpoint"
Just endpoint -> mkDownloadRequest u key >>= \case
Just endpoint -> mkDownloadRequest rs key >>= \case
-- Unable to find enough information to request the key
-- from git-lfs, so it's not present there.
Nothing -> return False

View file

@ -39,8 +39,8 @@ remote = RemoteType
, importSupported = importUnsupported
}
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
gen r u c gc = new <$> remoteCost gc veryExpensiveRemoteCost
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
gen r u c gc rs = new <$> remoteCost gc veryExpensiveRemoteCost
where
new cst = Just $ specialRemote' specialcfg c
(prepareStore this)
@ -83,6 +83,7 @@ gen r u c gc = new <$> remoteCost gc veryExpensiveRemoteCost
[ ("glacier vault", getVault c) ]
, claimUrl = Nothing
, checkUrl = Nothing
, remoteStateHandle = rs
}
specialcfg = (specialRemoteCfg c)
-- Disabled until jobList gets support for chunks.

View file

@ -99,8 +99,8 @@ adjustExportImportRemoteType rt = rt { setup = setup' }
-- | Adjust a remote to support exporttree=yes and importree=yes.
--
-- Note that all remotes with importree=yes also have exporttree=yes.
adjustExportImport :: Remote -> Annex Remote
adjustExportImport r = case M.lookup "exporttree" (config r) of
adjustExportImport :: Remote -> RemoteStateHandle -> Annex Remote
adjustExportImport r rs = case M.lookup "exporttree" (config r) of
Nothing -> return $ notexport r
Just c -> case yesNo c of
Just True -> ifM (isExportSupported r)
@ -136,7 +136,7 @@ adjustExportImport r = case M.lookup "exporttree" (config r) of
let keycids k = do
db <- getciddb ciddbv
liftIO $ ContentIdentifier.getContentIdentifiers db (uuid r') k
liftIO $ ContentIdentifier.getContentIdentifiers db rs k
let checkpresent k loc =
checkPresentExportWithContentIdentifier
@ -152,16 +152,16 @@ adjustExportImport r = case M.lookup "exporttree" (config r) of
updateexportdb exportdb exportdbv
oldks <- liftIO $ Export.getExportTreeKey exportdb loc
oldcids <- liftIO $ concat
<$> mapM (ContentIdentifier.getContentIdentifiers db (uuid r')) oldks
<$> mapM (ContentIdentifier.getContentIdentifiers db rs) oldks
storeExportWithContentIdentifier (importActions r') f k loc oldcids p >>= \case
Left err -> do
warning err
return False
Right newcid -> do
withExclusiveLock gitAnnexContentIdentifierLock $ do
liftIO $ ContentIdentifier.recordContentIdentifier db (uuid r') newcid k
liftIO $ ContentIdentifier.recordContentIdentifier db rs newcid k
liftIO $ ContentIdentifier.flushDbQueue db
recordContentIdentifier (uuid r') newcid k
recordContentIdentifier rs newcid k
return True
, removeExport = \k loc ->
removeExportWithContentIdentifier (importActions r') k loc

View file

@ -35,8 +35,8 @@ remote = RemoteType
, importSupported = importUnsupported
}
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
gen r u c gc = do
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
gen r u c gc rs = do
cst <- remoteCost gc expensiveRemoteCost
return $ Just $ specialRemote c
(simplyPrepare $ store hooktype)
@ -70,11 +70,13 @@ gen r u c gc = do
, appendonly = False
, availability = GloballyAvailable
, remotetype = remote
, mkUnavailable = gen r u c $
gc { remoteAnnexHookType = Just "!dne!" }
, mkUnavailable = gen r u c
(gc { remoteAnnexHookType = Just "!dne!" })
rs
, getInfo = return [("hooktype", hooktype)]
, claimUrl = Nothing
, checkUrl = Nothing
, remoteStateHandle = rs
}
where
hooktype = fromMaybe (giveup "missing hooktype") $ remoteAnnexHookType gc

View file

@ -1,12 +1,12 @@
{-# LANGUAGE CPP #-}
{- git-annex remote list
-
- Copyright 2011,2012 Joey Hess <id@joeyh.name>
- Copyright 2011-2019 Joey Hess <id@joeyh.name>
-
- Licensed under the GNU AGPL version 3 or higher.
-}
{-# LANGUAGE CPP #-}
module Remote.List where
import qualified Data.Map as M
@ -15,6 +15,7 @@ import Annex.Common
import qualified Annex
import Logs.Remote
import Types.Remote
import Types.RemoteState
import Annex.UUID
import Remote.Helper.Hooks
import Remote.Helper.ReadOnly
@ -106,10 +107,11 @@ remoteGen m t g = do
u <- getRepoUUID g
gc <- Annex.getRemoteGitConfig g
let cu = fromMaybe u $ remoteAnnexConfigUUID gc
let rs = RemoteStateHandle cu
let c = fromMaybe M.empty $ M.lookup cu m
generate t g u c gc >>= \case
generate t g u c gc rs >>= \case
Nothing -> return Nothing
Just r -> Just <$> adjustExportImport (adjustReadOnly (addHooks r))
Just r -> Just <$> adjustExportImport (adjustReadOnly (addHooks r)) rs
{- Updates a local git Remote, re-reading its git config. -}
updateRemote :: Remote -> Annex (Maybe Remote)

View file

@ -35,14 +35,14 @@ remote = RemoteType
-- Remote.Git takes care of enumerating P2P remotes,
-- and will call chainGen on them.
, enumerate = const (return [])
, generate = \_ _ _ _ -> return Nothing
, generate = \_ _ _ _ _ -> return Nothing
, setup = error "P2P remotes are set up using git-annex p2p"
, exportSupported = exportUnsupported
, importSupported = importUnsupported
}
chainGen :: P2PAddress -> Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
chainGen addr r u c gc = do
chainGen :: P2PAddress -> Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
chainGen addr r u c gc rs = do
connpool <- mkConnectionPool
cst <- remoteCost gc veryExpensiveRemoteCost
let protorunner = runProto u addr connpool
@ -76,6 +76,7 @@ chainGen addr r u c gc = do
, getInfo = gitRepoInfo this
, claimUrl = Nothing
, checkUrl = Nothing
, remoteStateHandle = rs
}
return (Just this)

View file

@ -54,8 +54,8 @@ remote = RemoteType
, importSupported = importUnsupported
}
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
gen r u c gc = do
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
gen r u c gc rs = do
cst <- remoteCost gc expensiveRemoteCost
(transport, url) <- rsyncTransport gc $
fromMaybe (giveup "missing rsyncurl") $ remoteAnnexRsyncUrl gc
@ -104,6 +104,7 @@ gen r u c gc = do
, getInfo = return [("url", url)]
, claimUrl = Nothing
, checkUrl = Nothing
, remoteStateHandle = rs
}
where
specialcfg = (specialRemoteCfg c)

View file

@ -78,8 +78,8 @@ remote = RemoteType
, importSupported = importIsSupported
}
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
gen r u c gc = do
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
gen r u c gc rs = do
cst <- remoteCost gc expensiveRemoteCost
info <- extractS3Info c
hdl <- mkS3HandleVar c gc u
@ -88,9 +88,9 @@ gen r u c gc = do
where
new cst info hdl magic = Just $ specialRemote c
(simplyPrepare $ store hdl this info magic)
(simplyPrepare $ retrieve hdl this c info)
(simplyPrepare $ retrieve hdl this rs c info)
(simplyPrepare $ remove hdl this info)
(simplyPrepare $ checkKey hdl this c info)
(simplyPrepare $ checkKey hdl this rs c info)
this
where
this = Remote
@ -108,23 +108,23 @@ gen r u c gc = do
, checkPresent = checkPresentDummy
, checkPresentCheap = False
, exportActions = ExportActions
{ storeExport = storeExportS3 hdl this info magic
{ storeExport = storeExportS3 hdl this rs info magic
, retrieveExport = retrieveExportS3 hdl this info
, removeExport = removeExportS3 hdl this info
, removeExport = removeExportS3 hdl this rs info
, checkPresentExport = checkPresentExportS3 hdl this info
-- S3 does not have directories.
, removeExportDirectory = Nothing
, renameExport = renameExportS3 hdl this info
, renameExport = renameExportS3 hdl this rs info
}
, importActions = ImportActions
{ listImportableContents = listImportableContentsS3 hdl this info
, retrieveExportWithContentIdentifier = retrieveExportWithContentIdentifierS3 hdl this info
, storeExportWithContentIdentifier = storeExportWithContentIdentifierS3 hdl this info magic
, removeExportWithContentIdentifier = removeExportWithContentIdentifierS3 hdl this info
, retrieveExportWithContentIdentifier = retrieveExportWithContentIdentifierS3 hdl this rs info
, storeExportWithContentIdentifier = storeExportWithContentIdentifierS3 hdl this rs info magic
, removeExportWithContentIdentifier = removeExportWithContentIdentifierS3 hdl this rs info
, removeExportDirectoryWhenEmpty = Nothing
, checkPresentExportWithContentIdentifier = checkPresentExportWithContentIdentifierS3 hdl this info
}
, whereisKey = Just (getPublicWebUrls u info c)
, whereisKey = Just (getPublicWebUrls u rs info c)
, remoteFsck = Nothing
, repairRepo = Nothing
, config = c
@ -135,10 +135,11 @@ gen r u c gc = do
, appendonly = versioning info
, availability = GloballyAvailable
, remotetype = remote
, mkUnavailable = gen r u (M.insert "host" "!dne!" c) gc
, mkUnavailable = gen r u (M.insert "host" "!dne!" c) gc rs
, getInfo = includeCredsInfo c (AWS.creds u) (s3Info c info)
, claimUrl = Nothing
, checkUrl = Nothing
, remoteStateHandle = rs
}
s3Setup :: SetupStage -> Maybe UUID -> Maybe CredPair -> RemoteConfig -> RemoteGitConfig -> Annex (RemoteConfig, UUID)
@ -293,16 +294,16 @@ storeHelper info h magic f object p = liftIO $ case partSize info of
{- Implemented as a fileRetriever, that uses conduit to stream the chunks
- out to the file. Would be better to implement a byteRetriever, but
- that is difficult. -}
retrieve :: S3HandleVar -> Remote -> RemoteConfig -> S3Info -> Retriever
retrieve hv r c info = fileRetriever $ \f k p -> withS3Handle hv $ \case
retrieve :: S3HandleVar -> Remote -> RemoteStateHandle -> RemoteConfig -> S3Info -> Retriever
retrieve hv r rs c info = fileRetriever $ \f k p -> withS3Handle hv $ \case
(Just h) ->
eitherS3VersionID info (uuid r) c k (T.pack $ bucketObject info k) >>= \case
eitherS3VersionID info rs c k (T.pack $ bucketObject info k) >>= \case
Left failreason -> do
warning failreason
giveup "cannot download content"
Right loc -> retrieveHelper info h loc f p
Nothing ->
getPublicWebUrls' (uuid r) info c k >>= \case
getPublicWebUrls' (uuid r) rs info c k >>= \case
Left failreason -> do
warning failreason
giveup "cannot download content"
@ -330,17 +331,17 @@ remove hv r info k = withS3HandleOrFail (uuid r) hv $ \h -> liftIO $ runResource
S3.DeleteObject (T.pack $ bucketObject info k) (bucket info)
return $ either (const False) (const True) res
checkKey :: S3HandleVar -> Remote -> RemoteConfig -> S3Info -> CheckPresent
checkKey hv r c info k = withS3Handle hv $ \case
checkKey :: S3HandleVar -> Remote -> RemoteStateHandle -> RemoteConfig -> S3Info -> CheckPresent
checkKey hv r rs c info k = withS3Handle hv $ \case
Just h -> do
showChecking r
eitherS3VersionID info (uuid r) c k (T.pack $ bucketObject info k) >>= \case
eitherS3VersionID info rs c k (T.pack $ bucketObject info k) >>= \case
Left failreason -> do
warning failreason
giveup "cannot check content"
Right loc -> checkKeyHelper info h loc
Nothing ->
getPublicWebUrls' (uuid r) info c k >>= \case
getPublicWebUrls' (uuid r) rs info c k >>= \case
Left failreason -> do
warning failreason
giveup "cannot check content"
@ -366,12 +367,12 @@ checkKeyHelper' info h o limit = liftIO $ runResourceT $ do
where
req = limit $ S3.headObject (bucket info) o
storeExportS3 :: S3HandleVar -> Remote -> S3Info -> Maybe Magic -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex Bool
storeExportS3 hv r info magic f k loc p = fst
<$> storeExportS3' hv r info magic f k loc p
storeExportS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Maybe Magic -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex Bool
storeExportS3 hv r rs info magic f k loc p = fst
<$> storeExportS3' hv r rs info magic f k loc p
storeExportS3' :: S3HandleVar -> Remote -> S3Info -> Maybe Magic -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex (Bool, (Maybe S3Etag, Maybe S3VersionID))
storeExportS3' hv r info magic f k loc p = withS3Handle hv $ \case
storeExportS3' :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Maybe Magic -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex (Bool, (Maybe S3Etag, Maybe S3VersionID))
storeExportS3' hv r rs info magic f k loc p = withS3Handle hv $ \case
Just h -> catchNonAsync (go h) (\e -> warning (show e) >> return (False, (Nothing, Nothing)))
Nothing -> do
warning $ needS3Creds (uuid r)
@ -380,7 +381,7 @@ storeExportS3' hv r info magic f k loc p = withS3Handle hv $ \case
go h = do
let o = T.pack $ bucketExportLocation info loc
(metag, mvid) <- storeHelper info h magic f o p
setS3VersionID info (uuid r) k mvid
setS3VersionID info rs k mvid
return (True, (metag, mvid))
retrieveExportS3 :: S3HandleVar -> Remote -> S3Info -> Key -> ExportLocation -> FilePath -> MeterUpdate -> Annex Bool
@ -399,9 +400,9 @@ retrieveExportS3 hv r info _k loc f p =
liftIO . Url.download p (geturl exportloc) f
exportloc = bucketExportLocation info loc
removeExportS3 :: S3HandleVar -> Remote -> S3Info -> Key -> ExportLocation -> Annex Bool
removeExportS3 hv r info k loc = withS3Handle hv $ \case
Just h -> checkVersioning info (uuid r) k $
removeExportS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Key -> ExportLocation -> Annex Bool
removeExportS3 hv r rs info k loc = withS3Handle hv $ \case
Just h -> checkVersioning info rs k $
catchNonAsync (go h) (\e -> warning (show e) >> return False)
Nothing -> do
warning $ needS3Creds (uuid r)
@ -423,11 +424,11 @@ checkPresentExportS3 hv r info k loc = withS3Handle hv $ \case
giveup "No S3 credentials configured"
-- S3 has no move primitive; copy and delete.
renameExportS3 :: S3HandleVar -> Remote -> S3Info -> Key -> ExportLocation -> ExportLocation -> Annex (Maybe Bool)
renameExportS3 hv r info k src dest = Just <$> go
renameExportS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Key -> ExportLocation -> ExportLocation -> Annex (Maybe Bool)
renameExportS3 hv r rs info k src dest = Just <$> go
where
go = withS3Handle hv $ \case
Just h -> checkVersioning info (uuid r) k $
Just h -> checkVersioning info rs k $
catchNonAsync (go' h) (\_ -> return False)
Nothing -> do
warning $ needS3Creds (uuid r)
@ -543,8 +544,8 @@ mkImportableContentsVersioned info = build . groupfiles
| otherwise =
i : removemostrecent mtime rest
retrieveExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> S3Info -> ExportLocation -> ContentIdentifier -> FilePath -> Annex (Maybe Key) -> MeterUpdate -> Annex (Maybe Key)
retrieveExportWithContentIdentifierS3 hv r info loc cid dest mkkey p = withS3Handle hv $ \case
retrieveExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> ExportLocation -> ContentIdentifier -> FilePath -> Annex (Maybe Key) -> MeterUpdate -> Annex (Maybe Key)
retrieveExportWithContentIdentifierS3 hv r rs info loc cid dest mkkey p = withS3Handle hv $ \case
Nothing -> do
warning $ needS3Creds (uuid r)
return Nothing
@ -555,7 +556,7 @@ retrieveExportWithContentIdentifierS3 hv r info loc cid dest mkkey p = withS3Han
mk <- mkkey
case (mk, extractContentIdentifier cid o) of
(Just k, Right vid) ->
setS3VersionID info (uuid r) k vid
setS3VersionID info rs k vid
_ -> noop
return mk
where
@ -577,8 +578,8 @@ rewritePreconditionException a = catchJust (Url.matchStatusCodeException want) a
--
-- When the bucket is not versioned, data loss can result.
-- This is why that configuration requires --force to enable.
storeExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> S3Info -> Maybe Magic -> FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex (Either String ContentIdentifier)
storeExportWithContentIdentifierS3 hv r info magic src k loc _overwritablecids p
storeExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Maybe Magic -> FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex (Either String ContentIdentifier)
storeExportWithContentIdentifierS3 hv r rs info magic src k loc _overwritablecids p
| versioning info = go
-- FIXME Actual aws version that supports getting Etag for a store
-- is not known; patch not merged yet.
@ -590,7 +591,7 @@ storeExportWithContentIdentifierS3 hv r info magic src k loc _overwritablecids p
Left "git-annex is built with too old a version of the aws library to support this operation"
#endif
where
go = storeExportS3' hv r info magic src k loc p >>= \case
go = storeExportS3' hv r rs info magic src k loc p >>= \case
(False, _) -> return $ Left "failed to store content in S3 bucket"
(True, (_, Just vid)) -> return $ Right $
mkS3VersionedContentIdentifier vid
@ -605,9 +606,9 @@ storeExportWithContentIdentifierS3 hv r info magic src k loc _overwritablecids p
--
-- When the bucket is not versioned, data loss can result.
-- This is why that configuration requires --force to enable.
removeExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> S3Info -> Key -> ExportLocation -> [ContentIdentifier] -> Annex Bool
removeExportWithContentIdentifierS3 hv r info k loc _removeablecids =
removeExportS3 hv r info k loc
removeExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> Key -> ExportLocation -> [ContentIdentifier] -> Annex Bool
removeExportWithContentIdentifierS3 hv r rs info k loc _removeablecids =
removeExportS3 hv r rs info k loc
checkPresentExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> S3Info -> Key -> ExportLocation -> [ContentIdentifier] -> Annex Bool
checkPresentExportWithContentIdentifierS3 hv r info _k loc knowncids =
@ -980,11 +981,11 @@ s3Info c info = catMaybes
showstorageclass (S3.OtherStorageClass t) = T.unpack t
showstorageclass sc = show sc
getPublicWebUrls :: UUID -> S3Info -> RemoteConfig -> Key -> Annex [URLString]
getPublicWebUrls u info c k = either (const []) id <$> getPublicWebUrls' u info c k
getPublicWebUrls :: UUID -> RemoteStateHandle -> S3Info -> RemoteConfig -> Key -> Annex [URLString]
getPublicWebUrls u rs info c k = either (const []) id <$> getPublicWebUrls' u rs info c k
getPublicWebUrls' :: UUID -> S3Info -> RemoteConfig -> Key -> Annex (Either String [URLString])
getPublicWebUrls' u info c k
getPublicWebUrls' :: UUID -> RemoteStateHandle -> S3Info -> RemoteConfig -> Key -> Annex (Either String [URLString])
getPublicWebUrls' u rs info c k
| not (public info) = return $ Left $
"S3 bucket does not allow public access; " ++ needS3Creds u
| exportTree c = if versioning info
@ -1000,7 +1001,7 @@ getPublicWebUrls' u info c k
Nothing -> return nopublicurl
where
nopublicurl = Left "No publicurl is configured for this remote"
getversionid url = getS3VersionIDPublicUrls url info u k >>= \case
getversionid url = getS3VersionIDPublicUrls url info rs k >>= \case
[] -> return (Left "Remote is configured to use versioning, but no S3 version ID is recorded for this key")
l -> return (Right l)
@ -1101,20 +1102,20 @@ extractContentIdentifier (ContentIdentifier v) o =
"#" -> Left (T.drop 1 t)
_ -> Right (mkS3VersionID o (Just t))
setS3VersionID :: S3Info -> UUID -> Key -> Maybe S3VersionID -> Annex ()
setS3VersionID info u k vid
| versioning info = maybe noop (setS3VersionID' u k) vid
setS3VersionID :: S3Info -> RemoteStateHandle -> Key -> Maybe S3VersionID -> Annex ()
setS3VersionID info rs k vid
| versioning info = maybe noop (setS3VersionID' rs k) vid
| otherwise = noop
setS3VersionID' :: UUID -> Key -> S3VersionID -> Annex ()
setS3VersionID' u k vid = addRemoteMetaData k $
RemoteMetaData u (updateMetaData s3VersionField v emptyMetaData)
setS3VersionID' :: RemoteStateHandle -> Key -> S3VersionID -> Annex ()
setS3VersionID' rs k vid = addRemoteMetaData k rs $
updateMetaData s3VersionField v emptyMetaData
where
v = mkMetaValue (CurrentlySet True) (formatS3VersionID vid)
getS3VersionID :: UUID -> Key -> Annex [S3VersionID]
getS3VersionID u k = do
(RemoteMetaData _ m) <- getCurrentRemoteMetaData u k
getS3VersionID :: RemoteStateHandle -> Key -> Annex [S3VersionID]
getS3VersionID rs k = do
(RemoteMetaData _ m) <- getCurrentRemoteMetaData rs k
return $ mapMaybe parseS3VersionID $ map unwrap $ S.toList $
metaDataValues s3VersionField m
where
@ -1123,9 +1124,9 @@ getS3VersionID u k = do
s3VersionField :: MetaField
s3VersionField = mkMetaFieldUnchecked "V"
eitherS3VersionID :: S3Info -> UUID -> RemoteConfig -> Key -> S3.Object -> Annex (Either String (Either S3.Object S3VersionID))
eitherS3VersionID info u c k fallback
| versioning info = getS3VersionID u k >>= return . \case
eitherS3VersionID :: S3Info -> RemoteStateHandle -> RemoteConfig -> Key -> S3.Object -> Annex (Either String (Either S3.Object S3VersionID))
eitherS3VersionID info rs c k fallback
| versioning info = getS3VersionID rs k >>= return . \case
[] -> if exportTree c
then Left "Remote is configured to use versioning, but no S3 version ID is recorded for this key"
else Right (Left fallback)
@ -1141,9 +1142,9 @@ s3VersionIDPublicUrl mk info (S3VersionID obj vid) = concat
, T.unpack vid -- version ID is "url ready" so no escaping needed
]
getS3VersionIDPublicUrls :: (S3Info -> BucketObject -> URLString) -> S3Info -> UUID -> Key -> Annex [URLString]
getS3VersionIDPublicUrls mk info u k =
map (s3VersionIDPublicUrl mk info) <$> getS3VersionID u k
getS3VersionIDPublicUrls :: (S3Info -> BucketObject -> URLString) -> S3Info -> RemoteStateHandle -> Key -> Annex [URLString]
getS3VersionIDPublicUrls mk info rs k =
map (s3VersionIDPublicUrl mk info) <$> getS3VersionID rs k
-- Enable versioning on the bucket can only be done at init time;
-- setting versioning in a bucket that git-annex has already exported
@ -1189,9 +1190,9 @@ enableBucketVersioning ss info _ _ _ = do
-- were created without versioning, some unversioned files exported to
-- them, and then versioning enabled, and this is to avoid data loss in
-- those cases.
checkVersioning :: S3Info -> UUID -> Key -> Annex Bool -> Annex Bool
checkVersioning info u k a
| versioning info = getS3VersionID u k >>= \case
checkVersioning :: S3Info -> RemoteStateHandle -> Key -> Annex Bool -> Annex Bool
checkVersioning info rs k a
| versioning info = getS3VersionID rs k >>= \case
[] -> do
warning $ "Remote is configured to use versioning, but no S3 version ID is recorded for this key, so it cannot safely be modified."
return False

View file

@ -61,8 +61,8 @@ remote = RemoteType
, importSupported = importUnsupported
}
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
gen r u c gc = do
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
gen r u c gc rs = do
cst <- remoteCost gc expensiveRemoteCost
hdl <- liftIO $ TahoeHandle
<$> maybe (defaultTahoeConfigDir u) return (remoteAnnexTahoe gc)
@ -71,18 +71,18 @@ gen r u c gc = do
{ uuid = u
, cost = cst
, name = Git.repoDescribe r
, storeKey = store u hdl
, retrieveKeyFile = retrieve u hdl
, storeKey = store rs hdl
, retrieveKeyFile = retrieve rs hdl
, retrieveKeyFileCheap = \_ _ _ -> return False
-- Tahoe cryptographically verifies content.
, retrievalSecurityPolicy = RetrievalAllKeysSecure
, removeKey = remove
, lockContent = Nothing
, checkPresent = checkKey u hdl
, checkPresent = checkKey rs hdl
, checkPresentCheap = False
, exportActions = exportUnsupported
, importActions = importUnsupported
, whereisKey = Just (getWhereisKey u)
, whereisKey = Just (getWhereisKey rs)
, remoteFsck = Nothing
, repairRepo = Nothing
, config = c
@ -97,6 +97,7 @@ gen r u c gc = do
, getInfo = return []
, claimUrl = Nothing
, checkUrl = Nothing
, remoteStateHandle = rs
}
tahoeSetup :: SetupStage -> Maybe UUID -> Maybe CredPair -> RemoteConfig -> RemoteGitConfig -> Annex (RemoteConfig, UUID)
@ -119,14 +120,14 @@ tahoeSetup _ mu _ c _ = do
furlk = "introducer-furl"
missingfurl = giveup "Set TAHOE_FURL to the introducer furl to use."
store :: UUID -> TahoeHandle -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
store u hdl k _f _p = sendAnnex k noop $ \src ->
store :: RemoteStateHandle -> TahoeHandle -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
store rs hdl k _f _p = sendAnnex k noop $ \src ->
parsePut <$> liftIO (readTahoe hdl "put" [File src]) >>= maybe
(return False)
(\cap -> storeCapability u k cap >> return True)
(\cap -> storeCapability rs k cap >> return True)
retrieve :: UUID -> TahoeHandle -> Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)
retrieve u hdl k _f d _p = unVerified $ go =<< getCapability u k
retrieve :: RemoteStateHandle -> TahoeHandle -> Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)
retrieve rs hdl k _f d _p = unVerified $ go =<< getCapability rs k
where
go Nothing = return False
go (Just cap) = liftIO $ requestTahoe hdl "get" [Param cap, File d]
@ -136,8 +137,8 @@ remove _k = do
warning "content cannot be removed from tahoe remote"
return False
checkKey :: UUID -> TahoeHandle -> Key -> Annex Bool
checkKey u hdl k = go =<< getCapability u k
checkKey :: RemoteStateHandle -> TahoeHandle -> Key -> Annex Bool
checkKey rs hdl k = go =<< getCapability rs k
where
go Nothing = return False
go (Just cap) = liftIO $ do
@ -233,14 +234,14 @@ tahoeParams :: TahoeConfigDir -> String -> [CommandParam] -> [CommandParam]
tahoeParams configdir command params =
Param "-d" : File configdir : Param command : params
storeCapability :: UUID -> Key -> Capability -> Annex ()
storeCapability u k cap = setRemoteState u k cap
storeCapability :: RemoteStateHandle -> Key -> Capability -> Annex ()
storeCapability rs k cap = setRemoteState rs k cap
getCapability :: UUID -> Key -> Annex (Maybe Capability)
getCapability u k = getRemoteState u k
getCapability :: RemoteStateHandle -> Key -> Annex (Maybe Capability)
getCapability rs k = getRemoteState rs k
getWhereisKey :: UUID -> Key -> Annex [String]
getWhereisKey u k = disp <$> getCapability u k
getWhereisKey :: RemoteStateHandle -> Key -> Annex [String]
getWhereisKey rs k = disp <$> getCapability rs k
where
disp Nothing = []
disp (Just c) = [c]

View file

@ -40,8 +40,8 @@ list _autoinit = do
r <- liftIO $ Git.Construct.remoteNamed "web" (pure Git.Construct.fromUnknown)
return [r]
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
gen r _ c gc = do
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
gen r _ c gc rs = do
cst <- remoteCost gc expensiveRemoteCost
return $ Just Remote
{ uuid = webUUID
@ -74,6 +74,7 @@ gen r _ c gc = do
, getInfo = return []
, claimUrl = Nothing -- implicitly claims all urls
, checkUrl = Nothing
, remoteStateHandle = rs
}
downloadKey :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)

View file

@ -50,8 +50,8 @@ remote = RemoteType
, importSupported = importUnsupported
}
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
gen r u c gc = new <$> remoteCost gc expensiveRemoteCost
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
gen r u c gc rs = new <$> remoteCost gc expensiveRemoteCost
where
new cst = Just $ specialRemote c
(prepareDAV this $ store chunkconfig)
@ -95,11 +95,12 @@ gen r u c gc = new <$> remoteCost gc expensiveRemoteCost
, appendonly = False
, availability = GloballyAvailable
, remotetype = remote
, mkUnavailable = gen r u (M.insert "url" "http://!dne!/" c) gc
, mkUnavailable = gen r u (M.insert "url" "http://!dne!/" c) gc rs
, getInfo = includeCredsInfo c (davCreds u) $
[("url", fromMaybe "unknown" (M.lookup "url" c))]
, claimUrl = Nothing
, checkUrl = Nothing
, remoteStateHandle = rs
}
chunkconfig = getChunkConfig c