git-annex/Remote/Web.hs

132 lines
3.5 KiB
Haskell
Raw Normal View History

{- Web remote.
-
- Copyright 2011 Joey Hess <id@joeyh.name>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Remote.Web (remote, getWebUrls) where
import Annex.Common
import Types.Remote
2015-08-17 14:42:14 +00:00
import Remote.Helper.Messages
import qualified Git
import qualified Git.Construct
import Annex.Content
import Config.Cost
import Logs.Web
2014-12-17 17:57:52 +00:00
import Annex.UUID
import Utility.Metered
import qualified Annex.Url as Url
import Annex.Quvi
import qualified Utility.Quvi as Quvi
2011-12-31 08:11:39 +00:00
remote :: RemoteType
remote = RemoteType {
typename = "web",
enumerate = list,
generate = gen,
setup = error "not supported"
}
-- There is only one web remote, and it always exists.
-- (If the web should cease to exist, remove this module and redistribute
-- a new release to the survivors by carrier pigeon.)
list :: Bool -> Annex [Git.Repo]
list _autoinit = do
2015-02-12 19:33:05 +00:00
r <- liftIO $ Git.Construct.remoteNamed "web" (pure Git.Construct.fromUnknown)
2011-12-14 19:30:14 +00:00
return [r]
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
2013-11-02 23:54:59 +00:00
gen r _ c gc =
2014-12-16 19:26:13 +00:00
return $ Just Remote
{ uuid = webUUID
, cost = expensiveRemoteCost
, name = Git.repoDescribe r
, storeKey = uploadKey
, retrieveKeyFile = downloadKey
, retrieveKeyFileCheap = downloadKeyCheap
, removeKey = dropKey
, lockContent = Nothing
2014-12-16 19:26:13 +00:00
, checkPresent = checkKey
, checkPresentCheap = False
add API for exporting Implemented so far for the directory special remote. Several remotes don't make sense to export to. Regular Git remotes, obviously, do not. Bup remotes almost certianly do not, since bup would need to be used to extract the export; same store for Ddar. Web and Bittorrent are download-only. GCrypt is always encrypted so exporting to it would be pointless. There's probably no point complicating the Hook remotes with exporting at this point. External, S3, Glacier, WebDAV, Rsync, and possibly Tahoe should be modified to support export. Thought about trying to reuse the storeKey/retrieveKeyFile/removeKey interface, rather than adding a new interface. But, it seemed better to keep it separate, to avoid a complicated interface that sometimes encrypts/chunks key/value storage and sometimes users non-key/value storage. Any common parts can be factored out. Note that storeExport is not atomic. doc/design/exporting_trees_to_special_remotes.mdwn has some things in the "resuming exports" section that bear on this decision. Basically, I don't think, at this time, that an atomic storeExport would help with resuming, because exports are not key/value storage, and we can't be sure that a partially uploaded file is the same content we're currently trying to export. Also, note that ExportLocation will always use unix path separators. This is important, because users may export from a mix of windows and unix, and it avoids complicating the API with path conversions, and ensures that in such a mix, they always use the same locations for exports. This commit was sponsored by Bruno BEAUFILS on Patreon.
2017-08-29 17:00:41 +00:00
, storeExport = Nothing
, retrieveExport = Nothing
, removeExport = Nothing
, checkPresentExport = Nothing
, renameExport = Nothing
, whereisKey = Nothing
2014-12-16 19:26:13 +00:00
, remoteFsck = Nothing
, repairRepo = Nothing
, config = c
, gitconfig = gc
, localpath = Nothing
, repo = r
, readonly = True
, availability = GloballyAvailable
, remotetype = remote
, mkUnavailable = return Nothing
, getInfo = return []
, claimUrl = Nothing -- implicitly claims all urls
, checkUrl = Nothing
}
downloadKey :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)
downloadKey key _af dest p = unVerified $ get =<< getWebUrls key
2012-11-11 04:51:07 +00:00
where
get [] = do
warning "no known url"
return False
get urls = do
showOutput -- make way for download progress bar
untilTrue urls $ \u -> do
let (u', downloader) = getDownloader u
case downloader of
2013-09-09 06:16:22 +00:00
QuviDownloader -> do
flip (downloadUrl key p) dest
2013-09-09 06:16:22 +00:00
=<< withQuviOptions Quvi.queryLinks [Quvi.httponly, Quvi.quiet] u'
_ -> downloadUrl key p [u'] dest
downloadKeyCheap :: Key -> AssociatedFile -> FilePath -> Annex Bool
downloadKeyCheap _ _ _ = return False
2012-09-21 18:50:14 +00:00
uploadKey :: Key -> AssociatedFile -> MeterUpdate -> Annex Bool
uploadKey _ _ _ = do
warning "upload to web not supported"
return False
2011-07-01 21:15:46 +00:00
dropKey :: Key -> Annex Bool
2012-11-29 21:01:07 +00:00
dropKey k = do
mapM_ (setUrlMissing webUUID k) =<< getWebUrls k
2012-11-29 21:01:07 +00:00
return True
checkKey :: Key -> Annex Bool
2011-07-01 21:15:46 +00:00
checkKey key = do
us <- getWebUrls key
if null us
then return False
else either giveup return =<< checkKey' key us
2013-09-09 06:16:22 +00:00
checkKey' :: Key -> [URLString] -> Annex (Either String Bool)
checkKey' key us = firsthit us (Right False) $ \u -> do
let (u', downloader) = getDownloader u
2015-08-17 14:42:14 +00:00
showChecking u'
case downloader of
QuviDownloader ->
2013-09-09 06:16:22 +00:00
Right <$> withQuviOptions Quvi.check [Quvi.httponly, Quvi.quiet] u'
_ -> do
2014-02-25 02:00:25 +00:00
Url.withUrlOptions $ catchMsgIO .
Url.checkBoth u' (keySize key)
2013-09-09 06:16:22 +00:00
where
firsthit [] miss _ = return miss
2013-09-09 06:16:22 +00:00
firsthit (u:rest) _ a = do
r <- a u
case r of
Right _ -> return r
Left _ -> firsthit rest r a
getWebUrls :: Key -> Annex [URLString]
getWebUrls key = filter supported <$> getUrls key
where
supported u = snd (getDownloader u)
`elem` [WebDownloader, QuviDownloader]