2014-12-08 23:14:24 +00:00
|
|
|
{- Web remote.
|
2011-07-01 19:24:07 +00:00
|
|
|
-
|
2015-01-21 16:50:09 +00:00
|
|
|
- Copyright 2011 Joey Hess <id@joeyh.name>
|
2011-07-01 19:24:07 +00:00
|
|
|
-
|
|
|
|
- Licensed under the GNU GPL version 3 or higher.
|
|
|
|
-}
|
|
|
|
|
2015-08-17 15:35:34 +00:00
|
|
|
module Remote.Web (remote, getWebUrls) where
|
2011-07-01 19:24:07 +00:00
|
|
|
|
2016-01-20 20:36:33 +00:00
|
|
|
import Annex.Common
|
2011-07-01 19:24:07 +00:00
|
|
|
import Types.Remote
|
2015-08-17 14:42:14 +00:00
|
|
|
import Remote.Helper.Messages
|
2011-07-01 19:24:07 +00:00
|
|
|
import qualified Git
|
2011-12-13 19:05:07 +00:00
|
|
|
import qualified Git.Construct
|
2012-01-02 18:20:20 +00:00
|
|
|
import Annex.Content
|
2013-03-13 20:16:01 +00:00
|
|
|
import Config.Cost
|
2011-10-15 20:25:51 +00:00
|
|
|
import Logs.Web
|
2014-12-17 17:57:52 +00:00
|
|
|
import Annex.UUID
|
2013-03-28 21:03:04 +00:00
|
|
|
import Utility.Metered
|
2013-09-28 18:35:21 +00:00
|
|
|
import qualified Annex.Url as Url
|
2013-08-22 22:25:21 +00:00
|
|
|
import Annex.Quvi
|
|
|
|
import qualified Utility.Quvi as Quvi
|
2011-07-01 19:24:07 +00:00
|
|
|
|
2011-12-31 08:11:39 +00:00
|
|
|
remote :: RemoteType
|
2011-07-01 19:24:07 +00:00
|
|
|
remote = RemoteType {
|
|
|
|
typename = "web",
|
|
|
|
enumerate = list,
|
|
|
|
generate = gen,
|
|
|
|
setup = error "not supported"
|
|
|
|
}
|
|
|
|
|
|
|
|
-- There is only one web remote, and it always exists.
|
|
|
|
-- (If the web should cease to exist, remove this module and redistribute
|
|
|
|
-- a new release to the survivors by carrier pigeon.)
|
2015-08-05 17:49:54 +00:00
|
|
|
list :: Bool -> Annex [Git.Repo]
|
|
|
|
list _autoinit = do
|
2015-02-12 19:33:05 +00:00
|
|
|
r <- liftIO $ Git.Construct.remoteNamed "web" (pure Git.Construct.fromUnknown)
|
2011-12-14 19:30:14 +00:00
|
|
|
return [r]
|
2011-07-01 19:24:07 +00:00
|
|
|
|
2013-09-12 19:54:35 +00:00
|
|
|
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
|
2013-11-02 23:54:59 +00:00
|
|
|
gen r _ c gc =
|
2014-12-16 19:26:13 +00:00
|
|
|
return $ Just Remote
|
|
|
|
{ uuid = webUUID
|
|
|
|
, cost = expensiveRemoteCost
|
|
|
|
, name = Git.repoDescribe r
|
|
|
|
, storeKey = uploadKey
|
|
|
|
, retrieveKeyFile = downloadKey
|
|
|
|
, retrieveKeyFileCheap = downloadKeyCheap
|
|
|
|
, removeKey = dropKey
|
2015-10-08 19:01:38 +00:00
|
|
|
, lockContent = Nothing
|
2014-12-16 19:26:13 +00:00
|
|
|
, checkPresent = checkKey
|
|
|
|
, checkPresentCheap = False
|
add API for exporting
Implemented so far for the directory special remote.
Several remotes don't make sense to export to. Regular Git remotes,
obviously, do not. Bup remotes almost certianly do not, since bup would
need to be used to extract the export; same store for Ddar. Web and
Bittorrent are download-only. GCrypt is always encrypted so exporting to
it would be pointless. There's probably no point complicating the Hook
remotes with exporting at this point. External, S3, Glacier, WebDAV,
Rsync, and possibly Tahoe should be modified to support export.
Thought about trying to reuse the storeKey/retrieveKeyFile/removeKey
interface, rather than adding a new interface. But, it seemed better to
keep it separate, to avoid a complicated interface that sometimes
encrypts/chunks key/value storage and sometimes users non-key/value
storage. Any common parts can be factored out.
Note that storeExport is not atomic.
doc/design/exporting_trees_to_special_remotes.mdwn has some things in
the "resuming exports" section that bear on this decision. Basically,
I don't think, at this time, that an atomic storeExport would help with
resuming, because exports are not key/value storage, and we can't be
sure that a partially uploaded file is the same content we're currently
trying to export.
Also, note that ExportLocation will always use unix path separators.
This is important, because users may export from a mix of windows and
unix, and it avoids complicating the API with path conversions,
and ensures that in such a mix, they always use the same locations for
exports.
This commit was sponsored by Bruno BEAUFILS on Patreon.
2017-08-29 17:00:41 +00:00
|
|
|
, storeExport = Nothing
|
|
|
|
, retrieveExport = Nothing
|
|
|
|
, removeExport = Nothing
|
|
|
|
, checkPresentExport = Nothing
|
|
|
|
, renameExport = Nothing
|
2015-08-17 15:35:34 +00:00
|
|
|
, whereisKey = Nothing
|
2014-12-16 19:26:13 +00:00
|
|
|
, remoteFsck = Nothing
|
|
|
|
, repairRepo = Nothing
|
|
|
|
, config = c
|
|
|
|
, gitconfig = gc
|
|
|
|
, localpath = Nothing
|
|
|
|
, repo = r
|
|
|
|
, readonly = True
|
|
|
|
, availability = GloballyAvailable
|
|
|
|
, remotetype = remote
|
|
|
|
, mkUnavailable = return Nothing
|
|
|
|
, getInfo = return []
|
|
|
|
, claimUrl = Nothing -- implicitly claims all urls
|
|
|
|
, checkUrl = Nothing
|
|
|
|
}
|
2011-07-01 19:24:07 +00:00
|
|
|
|
other 80% of avoding verification when hard linking to objects in shared repo
In c6632ee5c8e66c26ef18317f56ae02bae1e7e280, it actually only handled
uploading objects to a shared repository. To avoid verification when
downloading objects from a shared repository, was a lot harder.
On the plus side, if the process of downloading a file from a remote
is able to verify its content on the side, the remote can indicate this
now, and avoid the extra post-download verification.
As of yet, I don't have any remotes (except Git) using this ability.
Some more work would be needed to support it in special remotes.
It would make sense for tahoe to implicitly verify things downloaded from it;
as long as you trust your tahoe server (which typically runs locally),
there's cryptographic integrity. OTOH, despite bup being based on shas,
a bup repo under an attacker's control could have the git ref used for an
object changed, and so a bup repo shouldn't implicitly verify. Indeed,
tahoe seems unique in being trustworthy enough to implicitly verify.
2015-10-02 17:56:42 +00:00
|
|
|
downloadKey :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)
|
2015-11-17 01:00:54 +00:00
|
|
|
downloadKey key _af dest p = unVerified $ get =<< getWebUrls key
|
2012-11-11 04:51:07 +00:00
|
|
|
where
|
|
|
|
get [] = do
|
|
|
|
warning "no known url"
|
|
|
|
return False
|
|
|
|
get urls = do
|
|
|
|
showOutput -- make way for download progress bar
|
2013-08-22 22:25:21 +00:00
|
|
|
untilTrue urls $ \u -> do
|
|
|
|
let (u', downloader) = getDownloader u
|
|
|
|
case downloader of
|
2013-09-09 06:16:22 +00:00
|
|
|
QuviDownloader -> do
|
2015-11-17 01:00:54 +00:00
|
|
|
flip (downloadUrl key p) dest
|
2013-09-09 06:16:22 +00:00
|
|
|
=<< withQuviOptions Quvi.queryLinks [Quvi.httponly, Quvi.quiet] u'
|
2015-11-17 01:00:54 +00:00
|
|
|
_ -> downloadUrl key p [u'] dest
|
2011-07-01 19:24:07 +00:00
|
|
|
|
2015-04-14 20:35:10 +00:00
|
|
|
downloadKeyCheap :: Key -> AssociatedFile -> FilePath -> Annex Bool
|
|
|
|
downloadKeyCheap _ _ _ = return False
|
2012-01-20 17:23:11 +00:00
|
|
|
|
2012-09-21 18:50:14 +00:00
|
|
|
uploadKey :: Key -> AssociatedFile -> MeterUpdate -> Annex Bool
|
2012-09-19 20:08:37 +00:00
|
|
|
uploadKey _ _ _ = do
|
2011-07-01 19:24:07 +00:00
|
|
|
warning "upload to web not supported"
|
|
|
|
return False
|
|
|
|
|
2011-07-01 21:15:46 +00:00
|
|
|
dropKey :: Key -> Annex Bool
|
2012-11-29 21:01:07 +00:00
|
|
|
dropKey k = do
|
2014-12-08 23:14:24 +00:00
|
|
|
mapM_ (setUrlMissing webUUID k) =<< getWebUrls k
|
2012-11-29 21:01:07 +00:00
|
|
|
return True
|
2011-07-01 19:24:07 +00:00
|
|
|
|
2014-08-06 17:45:19 +00:00
|
|
|
checkKey :: Key -> Annex Bool
|
2011-07-01 21:15:46 +00:00
|
|
|
checkKey key = do
|
2014-12-08 23:14:24 +00:00
|
|
|
us <- getWebUrls key
|
2011-07-01 19:24:07 +00:00
|
|
|
if null us
|
2014-08-06 17:45:19 +00:00
|
|
|
then return False
|
2016-11-16 01:29:54 +00:00
|
|
|
else either giveup return =<< checkKey' key us
|
2013-09-09 06:16:22 +00:00
|
|
|
checkKey' :: Key -> [URLString] -> Annex (Either String Bool)
|
|
|
|
checkKey' key us = firsthit us (Right False) $ \u -> do
|
2013-08-22 22:25:21 +00:00
|
|
|
let (u', downloader) = getDownloader u
|
2015-08-17 14:42:14 +00:00
|
|
|
showChecking u'
|
2013-08-22 22:25:21 +00:00
|
|
|
case downloader of
|
|
|
|
QuviDownloader ->
|
2013-09-09 06:16:22 +00:00
|
|
|
Right <$> withQuviOptions Quvi.check [Quvi.httponly, Quvi.quiet] u'
|
2014-12-08 23:14:24 +00:00
|
|
|
_ -> do
|
2014-02-25 02:00:25 +00:00
|
|
|
Url.withUrlOptions $ catchMsgIO .
|
|
|
|
Url.checkBoth u' (keySize key)
|
2013-09-09 06:16:22 +00:00
|
|
|
where
|
2014-10-09 18:53:13 +00:00
|
|
|
firsthit [] miss _ = return miss
|
2013-09-09 06:16:22 +00:00
|
|
|
firsthit (u:rest) _ a = do
|
|
|
|
r <- a u
|
|
|
|
case r of
|
|
|
|
Right _ -> return r
|
|
|
|
Left _ -> firsthit rest r a
|
2014-12-08 23:14:24 +00:00
|
|
|
|
|
|
|
getWebUrls :: Key -> Annex [URLString]
|
|
|
|
getWebUrls key = filter supported <$> getUrls key
|
|
|
|
where
|
|
|
|
supported u = snd (getDownloader u)
|
|
|
|
`elem` [WebDownloader, QuviDownloader]
|