Added remote.name.annex-web-options config
Which is a per-remote version of the annex.web-options config. Had to plumb RemoteGitConfig through to getUrlOptions. In cases where a special remote does not use curl, there was no need to do that and I used Nothing instead. In the case of the addurl and importfeed commands, it seemed best to say that running these commands is not using the web special remote per se, so the config is not used for those commands.
This commit is contained in:
parent
932fac7772
commit
e81fd72018
19 changed files with 152 additions and 99 deletions
21
Annex/Url.hs
21
Annex/Url.hs
|
@ -56,8 +56,8 @@ getUserAgent :: Annex U.UserAgent
|
||||||
getUserAgent = Annex.getRead $
|
getUserAgent = Annex.getRead $
|
||||||
fromMaybe defaultUserAgent . Annex.useragent
|
fromMaybe defaultUserAgent . Annex.useragent
|
||||||
|
|
||||||
getUrlOptions :: Annex U.UrlOptions
|
getUrlOptions :: Maybe RemoteGitConfig -> Annex U.UrlOptions
|
||||||
getUrlOptions = Annex.getState Annex.urloptions >>= \case
|
getUrlOptions mgc = Annex.getState Annex.urloptions >>= \case
|
||||||
Just uo -> return uo
|
Just uo -> return uo
|
||||||
Nothing -> do
|
Nothing -> do
|
||||||
uo <- mk
|
uo <- mk
|
||||||
|
@ -82,9 +82,14 @@ getUrlOptions = Annex.getState Annex.urloptions >>= \case
|
||||||
Just output -> pure (lines output)
|
Just output -> pure (lines output)
|
||||||
Nothing -> annexHttpHeaders <$> Annex.getGitConfig
|
Nothing -> annexHttpHeaders <$> Annex.getGitConfig
|
||||||
|
|
||||||
|
getweboptions = case mgc of
|
||||||
|
Just gc | not (null (remoteAnnexWebOptions gc)) ->
|
||||||
|
pure (remoteAnnexWebOptions gc)
|
||||||
|
_ -> annexWebOptions <$> Annex.getGitConfig
|
||||||
|
|
||||||
checkallowedaddr = words . annexAllowedIPAddresses <$> Annex.getGitConfig >>= \case
|
checkallowedaddr = words . annexAllowedIPAddresses <$> Annex.getGitConfig >>= \case
|
||||||
["all"] -> do
|
["all"] -> do
|
||||||
curlopts <- map Param . annexWebOptions <$> Annex.getGitConfig
|
curlopts <- map Param <$> getweboptions
|
||||||
allowedurlschemes <- annexAllowedUrlSchemes <$> Annex.getGitConfig
|
allowedurlschemes <- annexAllowedUrlSchemes <$> Annex.getGitConfig
|
||||||
let urldownloader = if null curlopts && not (any (`S.notMember` U.conduitUrlSchemes) allowedurlschemes)
|
let urldownloader = if null curlopts && not (any (`S.notMember` U.conduitUrlSchemes) allowedurlschemes)
|
||||||
then U.DownloadWithConduit $
|
then U.DownloadWithConduit $
|
||||||
|
@ -148,8 +153,8 @@ ipAddressesUnlimited :: Annex Bool
|
||||||
ipAddressesUnlimited =
|
ipAddressesUnlimited =
|
||||||
("all" == ) . annexAllowedIPAddresses <$> Annex.getGitConfig
|
("all" == ) . annexAllowedIPAddresses <$> Annex.getGitConfig
|
||||||
|
|
||||||
withUrlOptions :: (U.UrlOptions -> Annex a) -> Annex a
|
withUrlOptions :: Maybe RemoteGitConfig -> (U.UrlOptions -> Annex a) -> Annex a
|
||||||
withUrlOptions a = a =<< getUrlOptions
|
withUrlOptions mgc a = a =<< getUrlOptions mgc
|
||||||
|
|
||||||
-- When downloading an url, if authentication is needed, uses
|
-- When downloading an url, if authentication is needed, uses
|
||||||
-- git-credential to prompt for username and password.
|
-- git-credential to prompt for username and password.
|
||||||
|
@ -157,10 +162,10 @@ withUrlOptions a = a =<< getUrlOptions
|
||||||
-- Note that, when the downloader is curl, it will not use git-credential.
|
-- Note that, when the downloader is curl, it will not use git-credential.
|
||||||
-- If the user wants to, they can configure curl to use a netrc file that
|
-- If the user wants to, they can configure curl to use a netrc file that
|
||||||
-- handles authentication.
|
-- handles authentication.
|
||||||
withUrlOptionsPromptingCreds :: (U.UrlOptions -> Annex a) -> Annex a
|
withUrlOptionsPromptingCreds :: Maybe RemoteGitConfig -> (U.UrlOptions -> Annex a) -> Annex a
|
||||||
withUrlOptionsPromptingCreds a = do
|
withUrlOptionsPromptingCreds mgc a = do
|
||||||
g <- Annex.gitRepo
|
g <- Annex.gitRepo
|
||||||
uo <- getUrlOptions
|
uo <- getUrlOptions mgc
|
||||||
prompter <- mkPrompter
|
prompter <- mkPrompter
|
||||||
cc <- Annex.getRead Annex.gitcredentialcache
|
cc <- Annex.getRead Annex.gitcredentialcache
|
||||||
a $ uo
|
a $ uo
|
||||||
|
|
|
@ -74,7 +74,7 @@ youtubeDlNotAllowedMessage = unwords
|
||||||
-- <https://github.com/rg3/youtube-dl/issues/14864>)
|
-- <https://github.com/rg3/youtube-dl/issues/14864>)
|
||||||
youtubeDl :: URLString -> OsPath -> MeterUpdate -> Annex (Either String (Maybe OsPath))
|
youtubeDl :: URLString -> OsPath -> MeterUpdate -> Annex (Either String (Maybe OsPath))
|
||||||
youtubeDl url workdir p = ifM ipAddressesUnlimited
|
youtubeDl url workdir p = ifM ipAddressesUnlimited
|
||||||
( withUrlOptions $ youtubeDl' url workdir p
|
( withUrlOptions Nothing $ youtubeDl' url workdir p
|
||||||
, return $ Left youtubeDlNotAllowedMessage
|
, return $ Left youtubeDlNotAllowedMessage
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -194,7 +194,7 @@ youtubeDlTo key url dest p = do
|
||||||
-- without it. So, this first downloads part of the content and checks
|
-- without it. So, this first downloads part of the content and checks
|
||||||
-- if it's a html page; only then is youtube-dl used.
|
-- if it's a html page; only then is youtube-dl used.
|
||||||
htmlOnly :: URLString -> a -> Annex a -> Annex a
|
htmlOnly :: URLString -> a -> Annex a -> Annex a
|
||||||
htmlOnly url fallback a = withUrlOptions $ \uo ->
|
htmlOnly url fallback a = withUrlOptions Nothing $ \uo ->
|
||||||
liftIO (downloadPartial url uo htmlPrefixLength) >>= \case
|
liftIO (downloadPartial url uo htmlPrefixLength) >>= \case
|
||||||
Just bs | isHtmlBs bs -> a
|
Just bs | isHtmlBs bs -> a
|
||||||
_ -> return fallback
|
_ -> return fallback
|
||||||
|
@ -202,7 +202,7 @@ htmlOnly url fallback a = withUrlOptions $ \uo ->
|
||||||
-- Check if youtube-dl supports downloading content from an url.
|
-- Check if youtube-dl supports downloading content from an url.
|
||||||
youtubeDlSupported :: URLString -> Annex Bool
|
youtubeDlSupported :: URLString -> Annex Bool
|
||||||
youtubeDlSupported url = either (const False) id
|
youtubeDlSupported url = either (const False) id
|
||||||
<$> withUrlOptions (youtubeDlCheck' url)
|
<$> withUrlOptions Nothing (youtubeDlCheck' url)
|
||||||
|
|
||||||
-- Check if youtube-dl can find media in an url.
|
-- Check if youtube-dl can find media in an url.
|
||||||
--
|
--
|
||||||
|
@ -211,7 +211,7 @@ youtubeDlSupported url = either (const False) id
|
||||||
-- download won't succeed.
|
-- download won't succeed.
|
||||||
youtubeDlCheck :: URLString -> Annex (Either String Bool)
|
youtubeDlCheck :: URLString -> Annex (Either String Bool)
|
||||||
youtubeDlCheck url = ifM youtubeDlAllowed
|
youtubeDlCheck url = ifM youtubeDlAllowed
|
||||||
( withUrlOptions $ youtubeDlCheck' url
|
( withUrlOptions Nothing $ youtubeDlCheck' url
|
||||||
, return $ Left youtubeDlNotAllowedMessage
|
, return $ Left youtubeDlNotAllowedMessage
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -227,7 +227,7 @@ youtubeDlCheck' url uo
|
||||||
--
|
--
|
||||||
-- (This is not always identical to the filename it uses when downloading.)
|
-- (This is not always identical to the filename it uses when downloading.)
|
||||||
youtubeDlFileName :: URLString -> Annex (Either String OsPath)
|
youtubeDlFileName :: URLString -> Annex (Either String OsPath)
|
||||||
youtubeDlFileName url = withUrlOptions go
|
youtubeDlFileName url = withUrlOptions Nothing go
|
||||||
where
|
where
|
||||||
go uo
|
go uo
|
||||||
| supportedScheme uo url = flip catchIO (pure . Left . show) $
|
| supportedScheme uo url = flip catchIO (pure . Left . show) $
|
||||||
|
@ -238,7 +238,7 @@ youtubeDlFileName url = withUrlOptions go
|
||||||
-- Does not check if the url contains htmlOnly; use when that's already
|
-- Does not check if the url contains htmlOnly; use when that's already
|
||||||
-- been verified.
|
-- been verified.
|
||||||
youtubeDlFileNameHtmlOnly :: URLString -> Annex (Either String OsPath)
|
youtubeDlFileNameHtmlOnly :: URLString -> Annex (Either String OsPath)
|
||||||
youtubeDlFileNameHtmlOnly = withUrlOptions . youtubeDlFileNameHtmlOnly'
|
youtubeDlFileNameHtmlOnly = withUrlOptions Nothing . youtubeDlFileNameHtmlOnly'
|
||||||
|
|
||||||
youtubeDlFileNameHtmlOnly' :: URLString -> UrlOptions -> Annex (Either String OsPath)
|
youtubeDlFileNameHtmlOnly' :: URLString -> UrlOptions -> Annex (Either String OsPath)
|
||||||
youtubeDlFileNameHtmlOnly' url uo
|
youtubeDlFileNameHtmlOnly' url uo
|
||||||
|
|
|
@ -324,7 +324,7 @@ usingDistribution = isJust <$> getEnv "GIT_ANNEX_STANDLONE_ENV"
|
||||||
|
|
||||||
downloadDistributionInfo :: Assistant (Maybe GitAnnexDistribution)
|
downloadDistributionInfo :: Assistant (Maybe GitAnnexDistribution)
|
||||||
downloadDistributionInfo = do
|
downloadDistributionInfo = do
|
||||||
uo <- liftAnnex Url.getUrlOptions
|
uo <- liftAnnex $ Url.getUrlOptions Nothing
|
||||||
gpgcmd <- liftAnnex $ gpgCmd <$> Annex.getGitConfig
|
gpgcmd <- liftAnnex $ gpgCmd <$> Annex.getGitConfig
|
||||||
liftIO $ withTmpDir (literalOsPath "git-annex.tmp") $ \tmpdir -> do
|
liftIO $ withTmpDir (literalOsPath "git-annex.tmp") $ \tmpdir -> do
|
||||||
let infof = tmpdir </> literalOsPath "info"
|
let infof = tmpdir </> literalOsPath "info"
|
||||||
|
|
|
@ -179,7 +179,7 @@ escapeHeader = escapeURIString (\c -> isUnescapedInURI c && c /= ' ')
|
||||||
|
|
||||||
getRepoInfo :: RemoteConfig -> Widget
|
getRepoInfo :: RemoteConfig -> Widget
|
||||||
getRepoInfo c = do
|
getRepoInfo c = do
|
||||||
uo <- liftAnnex Url.getUrlOptions
|
uo <- liftAnnex $ Url.getUrlOptions Nothing
|
||||||
urlexists <- liftAnnex $ catchDefaultIO False $ Url.exists url uo
|
urlexists <- liftAnnex $ catchDefaultIO False $ Url.exists url uo
|
||||||
[whamlet|
|
[whamlet|
|
||||||
<a href="#{url}">
|
<a href="#{url}">
|
||||||
|
|
|
@ -7,6 +7,8 @@ git-annex (10.20250321) UNRELEASED; urgency=medium
|
||||||
* fsck: Avoid complaining about required content of dead repositories.
|
* fsck: Avoid complaining about required content of dead repositories.
|
||||||
* drop: Avoid redundant object directory thawing.
|
* drop: Avoid redundant object directory thawing.
|
||||||
* httpalso: Windows url fix.
|
* httpalso: Windows url fix.
|
||||||
|
* Added remote.name.annex-web-options config, which is a per-remote
|
||||||
|
version of the annex.web-options config.
|
||||||
|
|
||||||
-- Joey Hess <id@joeyh.name> Fri, 21 Mar 2025 12:27:11 -0400
|
-- Joey Hess <id@joeyh.name> Fri, 21 Mar 2025 12:27:11 -0400
|
||||||
|
|
||||||
|
|
|
@ -496,7 +496,7 @@ parseSpecialRemoteUrl url remotename = case parseURI url of
|
||||||
resolveSpecialRemoteWebUrl :: String -> Annex (Maybe String)
|
resolveSpecialRemoteWebUrl :: String -> Annex (Maybe String)
|
||||||
resolveSpecialRemoteWebUrl url
|
resolveSpecialRemoteWebUrl url
|
||||||
| "http://" `isPrefixOf` lcurl || "https://" `isPrefixOf` lcurl =
|
| "http://" `isPrefixOf` lcurl || "https://" `isPrefixOf` lcurl =
|
||||||
Url.withUrlOptionsPromptingCreds $ \uo ->
|
Url.withUrlOptionsPromptingCreds Nothing $ \uo ->
|
||||||
withTmpFile (literalOsPath "git-remote-annex") $ \tmp h -> do
|
withTmpFile (literalOsPath "git-remote-annex") $ \tmp h -> do
|
||||||
liftIO $ hClose h
|
liftIO $ hClose h
|
||||||
Url.download' nullMeterUpdate Nothing url tmp uo >>= \case
|
Url.download' nullMeterUpdate Nothing url tmp uo >>= \case
|
||||||
|
|
|
@ -251,7 +251,7 @@ startWeb addunlockedmatcher o si urlstring = go $ fromMaybe bad $ parseURIPortab
|
||||||
go url = startingAddUrl si urlstring o $
|
go url = startingAddUrl si urlstring o $
|
||||||
if relaxedOption (downloadOptions o)
|
if relaxedOption (downloadOptions o)
|
||||||
then go' url Url.assumeUrlExists
|
then go' url Url.assumeUrlExists
|
||||||
else Url.withUrlOptions (Url.getUrlInfo urlstring) >>= \case
|
else Url.withUrlOptions Nothing (Url.getUrlInfo urlstring) >>= \case
|
||||||
Right urlinfo -> go' url urlinfo
|
Right urlinfo -> go' url urlinfo
|
||||||
Left err -> do
|
Left err -> do
|
||||||
warning (UnquotedString err)
|
warning (UnquotedString err)
|
||||||
|
@ -352,7 +352,8 @@ downloadWeb addunlockedmatcher o url urlinfo file =
|
||||||
go =<< downloadWith' downloader urlkey webUUID url file
|
go =<< downloadWith' downloader urlkey webUUID url file
|
||||||
where
|
where
|
||||||
urlkey = addSizeUrlKey urlinfo $ Backend.URL.fromUrl url Nothing (verifiableOption o)
|
urlkey = addSizeUrlKey urlinfo $ Backend.URL.fromUrl url Nothing (verifiableOption o)
|
||||||
downloader f p = Url.withUrlOptions $ downloadUrl False urlkey p Nothing [url] f
|
downloader f p = Url.withUrlOptions Nothing $
|
||||||
|
downloadUrl False urlkey p Nothing [url] f
|
||||||
go Nothing = return Nothing
|
go Nothing = return Nothing
|
||||||
go (Just (tmp, backend)) = ifM (useYoutubeDl o <&&> liftIO (isHtmlFile tmp))
|
go (Just (tmp, backend)) = ifM (useYoutubeDl o <&&> liftIO (isHtmlFile tmp))
|
||||||
( tryyoutubedl tmp backend
|
( tryyoutubedl tmp backend
|
||||||
|
|
|
@ -268,7 +268,7 @@ findDownloads u f = catMaybes $ map mk (feedItems f)
|
||||||
downloadFeed :: URLString -> FilePath -> Annex Bool
|
downloadFeed :: URLString -> FilePath -> Annex Bool
|
||||||
downloadFeed url f
|
downloadFeed url f
|
||||||
| Url.parseURIRelaxed url == Nothing = giveup "invalid feed url"
|
| Url.parseURIRelaxed url == Nothing = giveup "invalid feed url"
|
||||||
| otherwise = Url.withUrlOptions $
|
| otherwise = Url.withUrlOptions Nothing $
|
||||||
Url.download nullMeterUpdate Nothing url (toOsPath f)
|
Url.download nullMeterUpdate Nothing url (toOsPath f)
|
||||||
|
|
||||||
startDownload :: AddUnlockedMatcher -> ImportFeedOptions -> Cache -> TMVar Bool -> ToDownload -> CommandStart
|
startDownload :: AddUnlockedMatcher -> ImportFeedOptions -> Cache -> TMVar Bool -> ToDownload -> CommandStart
|
||||||
|
@ -367,7 +367,7 @@ downloadEnclosure addunlockedmatcher opts cache cv todownload url =
|
||||||
let go urlinfo = Just . maybeToList <$> addUrlFile addunlockedmatcher dlopts url urlinfo f
|
let go urlinfo = Just . maybeToList <$> addUrlFile addunlockedmatcher dlopts url urlinfo f
|
||||||
if relaxedOption (downloadOptions opts)
|
if relaxedOption (downloadOptions opts)
|
||||||
then go Url.assumeUrlExists
|
then go Url.assumeUrlExists
|
||||||
else Url.withUrlOptions (Url.getUrlInfo url) >>= \case
|
else Url.withUrlOptions Nothing (Url.getUrlInfo url) >>= \case
|
||||||
Right urlinfo -> go urlinfo
|
Right urlinfo -> go urlinfo
|
||||||
Left err -> do
|
Left err -> do
|
||||||
warning (UnquotedString err)
|
warning (UnquotedString err)
|
||||||
|
|
|
@ -100,7 +100,7 @@ p2pHttpClientVersions' allowedversion rmt rmtrepo fallback clientaction =
|
||||||
case p2pHttpBaseUrl <$> remoteAnnexP2PHttpUrl (gitconfig rmt) of
|
case p2pHttpBaseUrl <$> remoteAnnexP2PHttpUrl (gitconfig rmt) of
|
||||||
Nothing -> error "internal"
|
Nothing -> error "internal"
|
||||||
Just baseurl -> do
|
Just baseurl -> do
|
||||||
mgr <- httpManager <$> getUrlOptions
|
mgr <- httpManager <$> getUrlOptions Nothing
|
||||||
let clientenv = mkClientEnv mgr baseurl
|
let clientenv = mkClientEnv mgr baseurl
|
||||||
ccv <- Annex.getRead Annex.gitcredentialcache
|
ccv <- Annex.getRead Annex.gitcredentialcache
|
||||||
Git.CredentialCache cc <- liftIO $ atomically $
|
Git.CredentialCache cc <- liftIO $ atomically $
|
||||||
|
|
|
@ -66,7 +66,7 @@ gen r _ rc gc rs = do
|
||||||
, cost = cst
|
, cost = cst
|
||||||
, name = Git.repoDescribe r
|
, name = Git.repoDescribe r
|
||||||
, storeKey = uploadKey
|
, storeKey = uploadKey
|
||||||
, retrieveKeyFile = downloadKey
|
, retrieveKeyFile = downloadKey gc
|
||||||
-- Bittorrent downloads out of order, but downloadTorrentContent
|
-- Bittorrent downloads out of order, but downloadTorrentContent
|
||||||
-- moves the downloaded file to the destination at the end.
|
-- moves the downloaded file to the destination at the end.
|
||||||
, retrieveKeyFileInOrder = pure True
|
, retrieveKeyFileInOrder = pure True
|
||||||
|
@ -94,12 +94,12 @@ gen r _ rc gc rs = do
|
||||||
, mkUnavailable = return Nothing
|
, mkUnavailable = return Nothing
|
||||||
, getInfo = return []
|
, getInfo = return []
|
||||||
, claimUrl = Just (pure . isSupportedUrl)
|
, claimUrl = Just (pure . isSupportedUrl)
|
||||||
, checkUrl = Just checkTorrentUrl
|
, checkUrl = Just (checkTorrentUrl gc)
|
||||||
, remoteStateHandle = rs
|
, remoteStateHandle = rs
|
||||||
}
|
}
|
||||||
|
|
||||||
downloadKey :: Key -> AssociatedFile -> OsPath -> MeterUpdate -> VerifyConfig -> Annex Verification
|
downloadKey :: RemoteGitConfig -> Key -> AssociatedFile -> OsPath -> MeterUpdate -> VerifyConfig -> Annex Verification
|
||||||
downloadKey key _file dest p _ = do
|
downloadKey gc key _file dest p _ = do
|
||||||
get . map (torrentUrlNum . fst . getDownloader) =<< getBitTorrentUrls key
|
get . map (torrentUrlNum . fst . getDownloader) =<< getBitTorrentUrls key
|
||||||
-- While bittorrent verifies the hash in the torrent file,
|
-- While bittorrent verifies the hash in the torrent file,
|
||||||
-- the torrent file itself is downloaded without verification,
|
-- the torrent file itself is downloaded without verification,
|
||||||
|
@ -112,7 +112,7 @@ downloadKey key _file dest p _ = do
|
||||||
ok <- untilTrue urls $ \(u, filenum) -> do
|
ok <- untilTrue urls $ \(u, filenum) -> do
|
||||||
registerTorrentCleanup u
|
registerTorrentCleanup u
|
||||||
checkDependencies
|
checkDependencies
|
||||||
ifM (downloadTorrentFile u)
|
ifM (downloadTorrentFile gc u)
|
||||||
( downloadTorrentContent key u dest filenum p
|
( downloadTorrentContent key u dest filenum p
|
||||||
, return False
|
, return False
|
||||||
)
|
)
|
||||||
|
@ -151,11 +151,11 @@ isTorrentMagnetUrl u = "magnet:" `isPrefixOf` u && checkbt (parseURIPortable u)
|
||||||
checkbt (Just uri) | "xt=urn:btih:" `isInfixOf` uriQuery uri = True
|
checkbt (Just uri) | "xt=urn:btih:" `isInfixOf` uriQuery uri = True
|
||||||
checkbt _ = False
|
checkbt _ = False
|
||||||
|
|
||||||
checkTorrentUrl :: URLString -> Annex UrlContents
|
checkTorrentUrl :: RemoteGitConfig -> URLString -> Annex UrlContents
|
||||||
checkTorrentUrl u = do
|
checkTorrentUrl gc u = do
|
||||||
checkDependencies
|
checkDependencies
|
||||||
registerTorrentCleanup u
|
registerTorrentCleanup u
|
||||||
ifM (downloadTorrentFile u)
|
ifM (downloadTorrentFile gc u)
|
||||||
( torrentContents u
|
( torrentContents u
|
||||||
, giveup "could not download torrent file"
|
, giveup "could not download torrent file"
|
||||||
)
|
)
|
||||||
|
@ -192,8 +192,8 @@ registerTorrentCleanup u = Annex.addCleanupAction (TorrentCleanup u) $
|
||||||
liftIO . removeWhenExistsWith removeFile =<< tmpTorrentFile u
|
liftIO . removeWhenExistsWith removeFile =<< tmpTorrentFile u
|
||||||
|
|
||||||
{- Downloads the torrent file. (Not its contents.) -}
|
{- Downloads the torrent file. (Not its contents.) -}
|
||||||
downloadTorrentFile :: URLString -> Annex Bool
|
downloadTorrentFile :: RemoteGitConfig -> URLString -> Annex Bool
|
||||||
downloadTorrentFile u = do
|
downloadTorrentFile gc u = do
|
||||||
torrent <- tmpTorrentFile u
|
torrent <- tmpTorrentFile u
|
||||||
ifM (liftIO $ doesFileExist torrent)
|
ifM (liftIO $ doesFileExist torrent)
|
||||||
( return True
|
( return True
|
||||||
|
@ -213,7 +213,7 @@ downloadTorrentFile u = do
|
||||||
withTmpFileIn othertmp (literalOsPath "torrent") $ \f h -> do
|
withTmpFileIn othertmp (literalOsPath "torrent") $ \f h -> do
|
||||||
liftIO $ hClose h
|
liftIO $ hClose h
|
||||||
resetAnnexFilePerm f
|
resetAnnexFilePerm f
|
||||||
ok <- Url.withUrlOptions $
|
ok <- Url.withUrlOptions (Just gc) $
|
||||||
Url.download nullMeterUpdate Nothing u f
|
Url.download nullMeterUpdate Nothing u f
|
||||||
when ok $
|
when ok $
|
||||||
liftIO $ moveFile f torrent
|
liftIO $ moveFile f torrent
|
||||||
|
|
|
@ -77,9 +77,9 @@ gen rt externalprogram r u rc gc rs
|
||||||
exportUnsupported
|
exportUnsupported
|
||||||
return $ Just $ specialRemote c
|
return $ Just $ specialRemote c
|
||||||
readonlyStorer
|
readonlyStorer
|
||||||
retrieveUrl
|
(retrieveUrl gc)
|
||||||
readonlyRemoveKey
|
readonlyRemoveKey
|
||||||
checkKeyUrl
|
(checkKeyUrl gc)
|
||||||
rmt
|
rmt
|
||||||
| otherwise = do
|
| otherwise = do
|
||||||
c <- parsedRemoteConfig remote rc
|
c <- parsedRemoteConfig remote rc
|
||||||
|
@ -834,16 +834,16 @@ checkUrlM external url =
|
||||||
where
|
where
|
||||||
mkmulti (u, s, f) = (u, s, toOsPath f)
|
mkmulti (u, s, f) = (u, s, toOsPath f)
|
||||||
|
|
||||||
retrieveUrl :: Retriever
|
retrieveUrl :: RemoteGitConfig -> Retriever
|
||||||
retrieveUrl = fileRetriever' $ \f k p iv -> do
|
retrieveUrl gc = fileRetriever' $ \f k p iv -> do
|
||||||
us <- getWebUrls k
|
us <- getWebUrls k
|
||||||
unlessM (withUrlOptions $ downloadUrl True k p iv us f) $
|
unlessM (withUrlOptions (Just gc) $ downloadUrl True k p iv us f) $
|
||||||
giveup "failed to download content"
|
giveup "failed to download content"
|
||||||
|
|
||||||
checkKeyUrl :: CheckPresent
|
checkKeyUrl :: RemoteGitConfig -> CheckPresent
|
||||||
checkKeyUrl k = do
|
checkKeyUrl gc k = do
|
||||||
us <- getWebUrls k
|
us <- getWebUrls k
|
||||||
anyM (\u -> withUrlOptions $ checkBoth u (fromKey keySize k)) us
|
anyM (\u -> withUrlOptions (Just gc) $ checkBoth u (fromKey keySize k)) us
|
||||||
|
|
||||||
getWebUrls :: Key -> Annex [URLString]
|
getWebUrls :: Key -> Annex [URLString]
|
||||||
getWebUrls key = filter supported <$> getUrls key
|
getWebUrls key = filter supported <$> getUrls key
|
||||||
|
|
|
@ -142,11 +142,11 @@ isGitRemoteAnnex r = "annex::" `isPrefixOf` Git.repoLocation r
|
||||||
- etc.
|
- etc.
|
||||||
-}
|
-}
|
||||||
gitSetup :: SetupStage -> Maybe UUID -> Maybe CredPair -> RemoteConfig -> RemoteGitConfig -> Annex (RemoteConfig, UUID)
|
gitSetup :: SetupStage -> Maybe UUID -> Maybe CredPair -> RemoteConfig -> RemoteGitConfig -> Annex (RemoteConfig, UUID)
|
||||||
gitSetup Init mu _ c _ = do
|
gitSetup Init mu _ c gc = do
|
||||||
let location = maybe (giveup "Specify location=url") fromProposedAccepted $
|
let location = maybe (giveup "Specify location=url") fromProposedAccepted $
|
||||||
M.lookup locationField c
|
M.lookup locationField c
|
||||||
r <- inRepo $ Git.Construct.fromRemoteLocation location False
|
r <- inRepo $ Git.Construct.fromRemoteLocation location False
|
||||||
r' <- tryGitConfigRead False r False
|
r' <- tryGitConfigRead gc False r False
|
||||||
let u = getUncachedUUID r'
|
let u = getUncachedUUID r'
|
||||||
if u == NoUUID
|
if u == NoUUID
|
||||||
then giveup "git repository does not have an annex uuid"
|
then giveup "git repository does not have an annex uuid"
|
||||||
|
@ -187,10 +187,10 @@ configRead autoinit r = do
|
||||||
case (repoCheap r, annexignore, hasuuid) of
|
case (repoCheap r, annexignore, hasuuid) of
|
||||||
(_, True, _) -> return r
|
(_, True, _) -> return r
|
||||||
(True, _, _)
|
(True, _, _)
|
||||||
| remoteAnnexCheckUUID gc -> tryGitConfigRead autoinit r hasuuid
|
| remoteAnnexCheckUUID gc -> tryGitConfigRead gc autoinit r hasuuid
|
||||||
| otherwise -> return r
|
| otherwise -> return r
|
||||||
(False, _, False) -> configSpecialGitRemotes r >>= \case
|
(False, _, False) -> configSpecialGitRemotes r >>= \case
|
||||||
Nothing -> tryGitConfigRead autoinit r False
|
Nothing -> tryGitConfigRead gc autoinit r False
|
||||||
Just r' -> return r'
|
Just r' -> return r'
|
||||||
_ -> return r
|
_ -> return r
|
||||||
|
|
||||||
|
@ -273,8 +273,8 @@ unavailable r u c gc = gen r' u c gc'
|
||||||
|
|
||||||
{- Tries to read the config for a specified remote, updates state, and
|
{- Tries to read the config for a specified remote, updates state, and
|
||||||
- returns the updated repo. -}
|
- returns the updated repo. -}
|
||||||
tryGitConfigRead :: Bool -> Git.Repo -> Bool -> Annex Git.Repo
|
tryGitConfigRead :: RemoteGitConfig -> Bool -> Git.Repo -> Bool -> Annex Git.Repo
|
||||||
tryGitConfigRead autoinit r hasuuid
|
tryGitConfigRead gc autoinit r hasuuid
|
||||||
| haveconfig r = return r -- already read
|
| haveconfig r = return r -- already read
|
||||||
| Git.repoIsSsh r = storeUpdatedRemote $ do
|
| Git.repoIsSsh r = storeUpdatedRemote $ do
|
||||||
v <- Ssh.onRemote NoConsumeStdin r
|
v <- Ssh.onRemote NoConsumeStdin r
|
||||||
|
@ -323,7 +323,7 @@ tryGitConfigRead autoinit r hasuuid
|
||||||
warning $ UnquotedString $ "Unable to parse git config from " ++ configloc
|
warning $ UnquotedString $ "Unable to parse git config from " ++ configloc
|
||||||
return $ Left exitcode
|
return $ Left exitcode
|
||||||
|
|
||||||
geturlconfig = Url.withUrlOptionsPromptingCreds $ \uo -> do
|
geturlconfig = Url.withUrlOptionsPromptingCreds (Just gc) $ \uo -> do
|
||||||
let url = Git.repoLocation r ++ "/config"
|
let url = Git.repoLocation r ++ "/config"
|
||||||
v <- withTmpFile (literalOsPath "git-annex.tmp") $ \tmpfile h -> do
|
v <- withTmpFile (literalOsPath "git-annex.tmp") $ \tmpfile h -> do
|
||||||
liftIO $ hClose h
|
liftIO $ hClose h
|
||||||
|
@ -449,7 +449,7 @@ inAnnex' repo rmt st@(State connpool duc _ _ _) key
|
||||||
checkp2phttp = p2pHttpClient rmt giveup (clientCheckPresent key)
|
checkp2phttp = p2pHttpClient rmt giveup (clientCheckPresent key)
|
||||||
checkhttp = do
|
checkhttp = do
|
||||||
gc <- Annex.getGitConfig
|
gc <- Annex.getGitConfig
|
||||||
Url.withUrlOptionsPromptingCreds $ \uo ->
|
Url.withUrlOptionsPromptingCreds (Just (gitconfig rmt)) $ \uo ->
|
||||||
anyM (\u -> Url.checkBoth u (fromKey keySize key) uo)
|
anyM (\u -> Url.checkBoth u (fromKey keySize key) uo)
|
||||||
(keyUrls gc repo rmt key)
|
(keyUrls gc repo rmt key)
|
||||||
checkremote = P2PHelper.checkpresent (Ssh.runProto rmt connpool (cantCheck rmt)) key
|
checkremote = P2PHelper.checkpresent (Ssh.runProto rmt connpool (cantCheck rmt)) key
|
||||||
|
@ -570,7 +570,7 @@ copyFromRemote'' repo r st@(State connpool _ _ _ _) key af dest meterupdate vc
|
||||||
| isP2PHttp r = copyp2phttp
|
| isP2PHttp r = copyp2phttp
|
||||||
| Git.repoIsHttp repo = verifyKeyContentIncrementally vc key $ \iv -> do
|
| Git.repoIsHttp repo = verifyKeyContentIncrementally vc key $ \iv -> do
|
||||||
gc <- Annex.getGitConfig
|
gc <- Annex.getGitConfig
|
||||||
ok <- Url.withUrlOptionsPromptingCreds $
|
ok <- Url.withUrlOptionsPromptingCreds (Just (gitconfig r)) $
|
||||||
Annex.Content.downloadUrl False key meterupdate iv (keyUrls gc repo r key) dest
|
Annex.Content.downloadUrl False key meterupdate iv (keyUrls gc repo r key) dest
|
||||||
unless ok $
|
unless ok $
|
||||||
giveup "failed to download content"
|
giveup "failed to download content"
|
||||||
|
@ -890,7 +890,7 @@ mkState r u gc = do
|
||||||
rv <- liftIO newEmptyMVar
|
rv <- liftIO newEmptyMVar
|
||||||
let getrepo = ifM (liftIO $ isEmptyMVar rv)
|
let getrepo = ifM (liftIO $ isEmptyMVar rv)
|
||||||
( do
|
( do
|
||||||
r' <- tryGitConfigRead False r True
|
r' <- tryGitConfigRead gc False r True
|
||||||
let t = (r', extractGitConfig FromGitConfig r')
|
let t = (r', extractGitConfig FromGitConfig r')
|
||||||
void $ liftIO $ tryPutMVar rv t
|
void $ liftIO $ tryPutMVar rv t
|
||||||
return t
|
return t
|
||||||
|
|
|
@ -101,7 +101,7 @@ gen r u rc gc rs = do
|
||||||
}
|
}
|
||||||
return $ Just $ specialRemote' specialcfg c
|
return $ Just $ specialRemote' specialcfg c
|
||||||
(store rs h)
|
(store rs h)
|
||||||
(retrieve rs h)
|
(retrieve gc rs h)
|
||||||
(remove h)
|
(remove h)
|
||||||
(checkKey rs h)
|
(checkKey rs h)
|
||||||
(this c cst h)
|
(this c cst h)
|
||||||
|
@ -367,7 +367,7 @@ getLFSEndpoint tro hv = do
|
||||||
-- Not for use in downloading an object.
|
-- Not for use in downloading an object.
|
||||||
makeSmallAPIRequest :: Request -> Annex (Response L.ByteString)
|
makeSmallAPIRequest :: Request -> Annex (Response L.ByteString)
|
||||||
makeSmallAPIRequest req = do
|
makeSmallAPIRequest req = do
|
||||||
uo <- getUrlOptions
|
uo <- getUrlOptions Nothing
|
||||||
let req' = applyRequest uo req
|
let req' = applyRequest uo req
|
||||||
fastDebug "Remote.GitLFS" (show req')
|
fastDebug "Remote.GitLFS" (show req')
|
||||||
resp <- liftIO $ httpLbs req' (httpManager uo)
|
resp <- liftIO $ httpLbs req' (httpManager uo)
|
||||||
|
@ -499,8 +499,8 @@ store rs h = fileStorer $ \k src p -> getLFSEndpoint LFS.RequestUpload h >>= \ca
|
||||||
Just reqs -> forM_ reqs $
|
Just reqs -> forM_ reqs $
|
||||||
makeSmallAPIRequest . setRequestCheckStatus
|
makeSmallAPIRequest . setRequestCheckStatus
|
||||||
|
|
||||||
retrieve :: RemoteStateHandle -> TVar LFSHandle -> Retriever
|
retrieve :: RemoteGitConfig -> RemoteStateHandle -> TVar LFSHandle -> Retriever
|
||||||
retrieve rs h = fileRetriever' $ \dest k p iv -> getLFSEndpoint LFS.RequestDownload h >>= \case
|
retrieve gc rs h = fileRetriever' $ \dest k p iv -> getLFSEndpoint LFS.RequestDownload h >>= \case
|
||||||
Nothing -> giveup "unable to connect to git-lfs endpoint"
|
Nothing -> giveup "unable to connect to git-lfs endpoint"
|
||||||
Just endpoint -> mkDownloadRequest rs k >>= \case
|
Just endpoint -> mkDownloadRequest rs k >>= \case
|
||||||
Nothing -> giveup "unable to download this object from git-lfs"
|
Nothing -> giveup "unable to download this object from git-lfs"
|
||||||
|
@ -520,7 +520,7 @@ retrieve rs h = fileRetriever' $ \dest k p iv -> getLFSEndpoint LFS.RequestDownl
|
||||||
Just op -> case LFS.downloadOperationRequest op of
|
Just op -> case LFS.downloadOperationRequest op of
|
||||||
Nothing -> giveup "unable to parse git-lfs server download url"
|
Nothing -> giveup "unable to parse git-lfs server download url"
|
||||||
Just req -> do
|
Just req -> do
|
||||||
uo <- getUrlOptions
|
uo <- getUrlOptions (Just gc)
|
||||||
liftIO $ downloadConduit p iv req dest uo
|
liftIO $ downloadConduit p iv req dest uo
|
||||||
|
|
||||||
-- Since git-lfs does not support removing content, nothing needs to be
|
-- Since git-lfs does not support removing content, nothing needs to be
|
||||||
|
|
|
@ -57,9 +57,9 @@ gen r u rc gc rs = do
|
||||||
ll <- liftIO newLearnedLayout
|
ll <- liftIO newLearnedLayout
|
||||||
return $ Just $ specialRemote c
|
return $ Just $ specialRemote c
|
||||||
cannotModify
|
cannotModify
|
||||||
(downloadKey url ll)
|
(downloadKey gc url ll)
|
||||||
cannotModify
|
cannotModify
|
||||||
(checkKey url ll)
|
(checkKey gc url ll)
|
||||||
(this url c cst)
|
(this url c cst)
|
||||||
where
|
where
|
||||||
this url c cst = Remote
|
this url c cst = Remote
|
||||||
|
@ -79,9 +79,9 @@ gen r u rc gc rs = do
|
||||||
, checkPresentCheap = False
|
, checkPresentCheap = False
|
||||||
, exportActions = ExportActions
|
, exportActions = ExportActions
|
||||||
{ storeExport = cannotModify
|
{ storeExport = cannotModify
|
||||||
, retrieveExport = retriveExportHttpAlso url
|
, retrieveExport = retriveExportHttpAlso gc url
|
||||||
, removeExport = cannotModify
|
, removeExport = cannotModify
|
||||||
, checkPresentExport = checkPresentExportHttpAlso url
|
, checkPresentExport = checkPresentExportHttpAlso gc url
|
||||||
, removeExportDirectory = Nothing
|
, removeExportDirectory = Nothing
|
||||||
, renameExport = cannotModify
|
, renameExport = cannotModify
|
||||||
}
|
}
|
||||||
|
@ -121,34 +121,35 @@ httpAlsoSetup _ (Just u) _ c gc = do
|
||||||
gitConfigSpecialRemote u c' [("httpalso", "true")]
|
gitConfigSpecialRemote u c' [("httpalso", "true")]
|
||||||
return (c', u)
|
return (c', u)
|
||||||
|
|
||||||
downloadKey :: Maybe URLString -> LearnedLayout -> Retriever
|
downloadKey :: RemoteGitConfig -> Maybe URLString -> LearnedLayout -> Retriever
|
||||||
downloadKey baseurl ll = fileRetriever' $ \dest key p iv ->
|
downloadKey gc baseurl ll = fileRetriever' $ \dest key p iv ->
|
||||||
downloadAction dest p iv (keyUrlAction baseurl ll key)
|
downloadAction gc dest p iv (keyUrlAction baseurl ll key)
|
||||||
|
|
||||||
retriveExportHttpAlso :: Maybe URLString -> Key -> ExportLocation -> OsPath -> MeterUpdate -> Annex Verification
|
retriveExportHttpAlso :: RemoteGitConfig -> Maybe URLString -> Key -> ExportLocation -> OsPath -> MeterUpdate -> Annex Verification
|
||||||
retriveExportHttpAlso baseurl key loc dest p = do
|
retriveExportHttpAlso gc baseurl key loc dest p = do
|
||||||
verifyKeyContentIncrementally AlwaysVerify key $ \iv ->
|
verifyKeyContentIncrementally AlwaysVerify key $ \iv ->
|
||||||
downloadAction dest p iv (exportLocationUrlAction baseurl loc)
|
downloadAction gc dest p iv (exportLocationUrlAction baseurl loc)
|
||||||
|
|
||||||
downloadAction :: OsPath -> MeterUpdate -> Maybe IncrementalVerifier -> ((URLString -> Annex (Either String ())) -> Annex (Either String ())) -> Annex ()
|
downloadAction :: RemoteGitConfig -> OsPath -> MeterUpdate -> Maybe IncrementalVerifier -> ((URLString -> Annex (Either String ())) -> Annex (Either String ())) -> Annex ()
|
||||||
downloadAction dest p iv run =
|
downloadAction gc dest p iv run =
|
||||||
Url.withUrlOptions $ \uo ->
|
Url.withUrlOptions (Just gc) $ \uo ->
|
||||||
run (\url -> Url.download' p iv url dest uo)
|
run (\url -> Url.download' p iv url dest uo)
|
||||||
>>= either giveup (const (return ()))
|
>>= either giveup (const (return ()))
|
||||||
|
|
||||||
checkKey :: Maybe URLString -> LearnedLayout -> CheckPresent
|
checkKey :: RemoteGitConfig -> Maybe URLString -> LearnedLayout -> CheckPresent
|
||||||
checkKey baseurl ll key =
|
checkKey gc baseurl ll key =
|
||||||
isRight <$> keyUrlAction baseurl ll key (checkKey' key)
|
isRight <$> keyUrlAction baseurl ll key (checkKey' gc key)
|
||||||
|
|
||||||
checkKey' :: Key -> URLString -> Annex (Either String ())
|
checkKey' :: RemoteGitConfig -> Key -> URLString -> Annex (Either String ())
|
||||||
checkKey' key url = ifM (Url.withUrlOptions $ Url.checkBoth url (fromKey keySize key))
|
checkKey' gc key url =
|
||||||
( return (Right ())
|
ifM (Url.withUrlOptions (Just gc) $ Url.checkBoth url (fromKey keySize key))
|
||||||
, return (Left "content not found")
|
( return (Right ())
|
||||||
)
|
, return (Left "content not found")
|
||||||
|
)
|
||||||
|
|
||||||
checkPresentExportHttpAlso :: Maybe URLString -> Key -> ExportLocation -> Annex Bool
|
checkPresentExportHttpAlso :: RemoteGitConfig -> Maybe URLString -> Key -> ExportLocation -> Annex Bool
|
||||||
checkPresentExportHttpAlso baseurl key loc =
|
checkPresentExportHttpAlso gc baseurl key loc =
|
||||||
isRight <$> exportLocationUrlAction baseurl loc (checkKey' key)
|
isRight <$> exportLocationUrlAction baseurl loc (checkKey' gc key)
|
||||||
|
|
||||||
type LearnedLayout = TVar (Maybe [Key -> URLString])
|
type LearnedLayout = TVar (Maybe [Key -> URLString])
|
||||||
|
|
||||||
|
|
10
Remote/S3.hs
10
Remote/S3.hs
|
@ -427,7 +427,7 @@ retrieve hv r rs c info = fileRetriever' $ \f k p iv -> withS3Handle hv $ \case
|
||||||
Left failreason -> do
|
Left failreason -> do
|
||||||
warning (UnquotedString failreason)
|
warning (UnquotedString failreason)
|
||||||
giveup "cannot download content"
|
giveup "cannot download content"
|
||||||
Right us -> unlessM (withUrlOptions $ downloadUrl False k p iv us f) $
|
Right us -> unlessM (withUrlOptions Nothing $ downloadUrl False k p iv us f) $
|
||||||
giveup "failed to download content"
|
giveup "failed to download content"
|
||||||
Left S3HandleAnonymousOldAws -> giveupS3HandleProblem S3HandleAnonymousOldAws (uuid r)
|
Left S3HandleAnonymousOldAws -> giveupS3HandleProblem S3HandleAnonymousOldAws (uuid r)
|
||||||
|
|
||||||
|
@ -475,7 +475,7 @@ checkKey hv r rs c info k = withS3Handle hv $ \case
|
||||||
warning (UnquotedString failreason)
|
warning (UnquotedString failreason)
|
||||||
giveup "cannot check content"
|
giveup "cannot check content"
|
||||||
Right us -> do
|
Right us -> do
|
||||||
let check u = withUrlOptions $
|
let check u = withUrlOptions Nothing $
|
||||||
Url.checkBoth u (fromKey keySize k)
|
Url.checkBoth u (fromKey keySize k)
|
||||||
anyM check us
|
anyM check us
|
||||||
Left S3HandleAnonymousOldAws -> giveupS3HandleProblem S3HandleAnonymousOldAws (uuid r)
|
Left S3HandleAnonymousOldAws -> giveupS3HandleProblem S3HandleAnonymousOldAws (uuid r)
|
||||||
|
@ -516,7 +516,7 @@ retrieveExportS3 hv r info k loc f p = verifyKeyContentIncrementally AlwaysVerif
|
||||||
Right h -> retrieveHelper info h (Left (T.pack exportloc)) f p iv
|
Right h -> retrieveHelper info h (Left (T.pack exportloc)) f p iv
|
||||||
Left S3HandleNeedCreds -> case getPublicUrlMaker info of
|
Left S3HandleNeedCreds -> case getPublicUrlMaker info of
|
||||||
Just geturl -> either giveup return =<<
|
Just geturl -> either giveup return =<<
|
||||||
Url.withUrlOptions
|
withUrlOptions Nothing
|
||||||
(Url.download' p iv (geturl exportloc) f)
|
(Url.download' p iv (geturl exportloc) f)
|
||||||
Nothing -> giveup $ needS3Creds (uuid r)
|
Nothing -> giveup $ needS3Creds (uuid r)
|
||||||
Left S3HandleAnonymousOldAws -> giveupS3HandleProblem S3HandleAnonymousOldAws (uuid r)
|
Left S3HandleAnonymousOldAws -> giveupS3HandleProblem S3HandleAnonymousOldAws (uuid r)
|
||||||
|
@ -537,7 +537,7 @@ checkPresentExportS3 :: S3HandleVar -> Remote -> S3Info -> Key -> ExportLocation
|
||||||
checkPresentExportS3 hv r info k loc = withS3Handle hv $ \case
|
checkPresentExportS3 hv r info k loc = withS3Handle hv $ \case
|
||||||
Right h -> checkKeyHelper info h (Left (T.pack $ bucketExportLocation info loc))
|
Right h -> checkKeyHelper info h (Left (T.pack $ bucketExportLocation info loc))
|
||||||
Left S3HandleNeedCreds -> case getPublicUrlMaker info of
|
Left S3HandleNeedCreds -> case getPublicUrlMaker info of
|
||||||
Just geturl -> withUrlOptions $
|
Just geturl -> withUrlOptions Nothing $
|
||||||
Url.checkBoth (geturl $ bucketExportLocation info loc) (fromKey keySize k)
|
Url.checkBoth (geturl $ bucketExportLocation info loc) (fromKey keySize k)
|
||||||
Nothing -> giveupS3HandleProblem S3HandleNeedCreds (uuid r)
|
Nothing -> giveupS3HandleProblem S3HandleNeedCreds (uuid r)
|
||||||
Left S3HandleAnonymousOldAws -> giveupS3HandleProblem S3HandleAnonymousOldAws (uuid r)
|
Left S3HandleAnonymousOldAws -> giveupS3HandleProblem S3HandleAnonymousOldAws (uuid r)
|
||||||
|
@ -913,7 +913,7 @@ mkS3HandleVar c gc u = liftIO $ newTVarIO $ Left $
|
||||||
Nothing -> return (Left S3HandleNeedCreds)
|
Nothing -> return (Left S3HandleNeedCreds)
|
||||||
where
|
where
|
||||||
go awscreds = do
|
go awscreds = do
|
||||||
ou <- getUrlOptions
|
ou <- getUrlOptions Nothing
|
||||||
ua <- getUserAgent
|
ua <- getUserAgent
|
||||||
let awscfg = AWS.Configuration AWS.Timestamp awscreds debugMapper Nothing
|
let awscfg = AWS.Configuration AWS.Timestamp awscreds debugMapper Nothing
|
||||||
let s3cfg = s3Configuration (Just ua) c
|
let s3cfg = s3Configuration (Just ua) c
|
||||||
|
|
|
@ -75,7 +75,7 @@ gen r u rc gc rs = do
|
||||||
, cost = cst
|
, cost = cst
|
||||||
, name = Git.repoDescribe r
|
, name = Git.repoDescribe r
|
||||||
, storeKey = uploadKey
|
, storeKey = uploadKey
|
||||||
, retrieveKeyFile = downloadKey urlincludeexclude
|
, retrieveKeyFile = downloadKey gc urlincludeexclude
|
||||||
, retrieveKeyFileInOrder = pure True
|
, retrieveKeyFileInOrder = pure True
|
||||||
, retrieveKeyFileCheap = Nothing
|
, retrieveKeyFileCheap = Nothing
|
||||||
-- HttpManagerRestricted is used here, so this is
|
-- HttpManagerRestricted is used here, so this is
|
||||||
|
@ -83,7 +83,7 @@ gen r u rc gc rs = do
|
||||||
, retrievalSecurityPolicy = RetrievalAllKeysSecure
|
, retrievalSecurityPolicy = RetrievalAllKeysSecure
|
||||||
, removeKey = dropKey urlincludeexclude
|
, removeKey = dropKey urlincludeexclude
|
||||||
, lockContent = Nothing
|
, lockContent = Nothing
|
||||||
, checkPresent = checkKey urlincludeexclude
|
, checkPresent = checkKey gc urlincludeexclude
|
||||||
, checkPresentCheap = False
|
, checkPresentCheap = False
|
||||||
, exportActions = exportUnsupported
|
, exportActions = exportUnsupported
|
||||||
, importActions = importUnsupported
|
, importActions = importUnsupported
|
||||||
|
@ -115,8 +115,8 @@ setupInstance _ mu _ c _ = do
|
||||||
gitConfigSpecialRemote u c [("web", "true")]
|
gitConfigSpecialRemote u c [("web", "true")]
|
||||||
return (c, u)
|
return (c, u)
|
||||||
|
|
||||||
downloadKey :: UrlIncludeExclude -> Key -> AssociatedFile -> OsPath -> MeterUpdate -> VerifyConfig -> Annex Verification
|
downloadKey :: RemoteGitConfig -> UrlIncludeExclude -> Key -> AssociatedFile -> OsPath -> MeterUpdate -> VerifyConfig -> Annex Verification
|
||||||
downloadKey urlincludeexclude key _af dest p vc =
|
downloadKey gc urlincludeexclude key _af dest p vc =
|
||||||
go =<< getWebUrls' urlincludeexclude key
|
go =<< getWebUrls' urlincludeexclude key
|
||||||
where
|
where
|
||||||
go [] = giveup "no known url"
|
go [] = giveup "no known url"
|
||||||
|
@ -138,7 +138,7 @@ downloadKey urlincludeexclude key _af dest p vc =
|
||||||
)
|
)
|
||||||
dl (us, ytus) = do
|
dl (us, ytus) = do
|
||||||
iv <- startVerifyKeyContentIncrementally vc key
|
iv <- startVerifyKeyContentIncrementally vc key
|
||||||
ifM (Url.withUrlOptions $ downloadUrl True key p iv (map fst us) dest)
|
ifM (Url.withUrlOptions (Just gc) $ downloadUrl True key p iv (map fst us) dest)
|
||||||
( finishVerifyKeyContentIncrementally iv >>= \case
|
( finishVerifyKeyContentIncrementally iv >>= \case
|
||||||
(True, v) -> postdl v
|
(True, v) -> postdl v
|
||||||
(False, _) -> dl ([], ytus)
|
(False, _) -> dl ([], ytus)
|
||||||
|
@ -177,19 +177,21 @@ uploadKey _ _ _ _ = giveup "upload to web not supported"
|
||||||
dropKey :: UrlIncludeExclude -> Maybe SafeDropProof -> Key -> Annex ()
|
dropKey :: UrlIncludeExclude -> Maybe SafeDropProof -> Key -> Annex ()
|
||||||
dropKey urlincludeexclude _proof k = mapM_ (setUrlMissing k) =<< getWebUrls' urlincludeexclude k
|
dropKey urlincludeexclude _proof k = mapM_ (setUrlMissing k) =<< getWebUrls' urlincludeexclude k
|
||||||
|
|
||||||
checkKey :: UrlIncludeExclude -> Key -> Annex Bool
|
checkKey :: RemoteGitConfig -> UrlIncludeExclude -> Key -> Annex Bool
|
||||||
checkKey urlincludeexclude key = do
|
checkKey gc urlincludeexclude key = do
|
||||||
us <- getWebUrls' urlincludeexclude key
|
us <- getWebUrls' urlincludeexclude key
|
||||||
if null us
|
if null us
|
||||||
then return False
|
then return False
|
||||||
else either giveup return =<< checkKey' key us
|
else either giveup return =<< checkKey' gc key us
|
||||||
checkKey' :: Key -> [URLString] -> Annex (Either String Bool)
|
|
||||||
checkKey' key us = firsthit us (Right False) $ \u -> do
|
checkKey' :: RemoteGitConfig -> Key -> [URLString] -> Annex (Either String Bool)
|
||||||
|
checkKey' gc key us = firsthit us (Right False) $ \u -> do
|
||||||
let (u', downloader) = getDownloader u
|
let (u', downloader) = getDownloader u
|
||||||
case downloader of
|
case downloader of
|
||||||
YoutubeDownloader -> youtubeDlCheck u'
|
YoutubeDownloader -> youtubeDlCheck u'
|
||||||
_ -> catchMsgIO $
|
_ -> catchMsgIO $
|
||||||
Url.withUrlOptions $ Url.checkBoth u' (fromKey keySize key)
|
Url.withUrlOptions (Just gc) $
|
||||||
|
Url.checkBoth u' (fromKey keySize key)
|
||||||
where
|
where
|
||||||
firsthit [] miss _ = return miss
|
firsthit [] miss _ = return miss
|
||||||
firsthit (u:rest) _ a = do
|
firsthit (u:rest) _ a = do
|
||||||
|
|
|
@ -404,6 +404,7 @@ data RemoteGitConfig = RemoteGitConfig
|
||||||
, remoteAnnexBwLimitUpload :: Maybe BwRate
|
, remoteAnnexBwLimitUpload :: Maybe BwRate
|
||||||
, remoteAnnexBwLimitDownload :: Maybe BwRate
|
, remoteAnnexBwLimitDownload :: Maybe BwRate
|
||||||
, remoteAnnexAllowUnverifiedDownloads :: Bool
|
, remoteAnnexAllowUnverifiedDownloads :: Bool
|
||||||
|
, remoteAnnexWebOptions :: [String]
|
||||||
, remoteAnnexUUID :: Maybe UUID
|
, remoteAnnexUUID :: Maybe UUID
|
||||||
, remoteAnnexConfigUUID :: Maybe UUID
|
, remoteAnnexConfigUUID :: Maybe UUID
|
||||||
, remoteAnnexMaxGitBundles :: Int
|
, remoteAnnexMaxGitBundles :: Int
|
||||||
|
@ -492,6 +493,7 @@ extractRemoteGitConfig r remotename = do
|
||||||
readBwRatePerSecond =<< getmaybe BWLimitDownloadField
|
readBwRatePerSecond =<< getmaybe BWLimitDownloadField
|
||||||
, remoteAnnexAllowUnverifiedDownloads = (== Just "ACKTHPPT") $
|
, remoteAnnexAllowUnverifiedDownloads = (== Just "ACKTHPPT") $
|
||||||
getmaybe SecurityAllowUnverifiedDownloadsField
|
getmaybe SecurityAllowUnverifiedDownloadsField
|
||||||
|
, remoteAnnexWebOptions = getwords WebOptionsField
|
||||||
, remoteAnnexUUID = toUUID <$> getmaybe UUIDField
|
, remoteAnnexUUID = toUUID <$> getmaybe UUIDField
|
||||||
, remoteAnnexConfigUUID = toUUID <$> getmaybe ConfigUUIDField
|
, remoteAnnexConfigUUID = toUUID <$> getmaybe ConfigUUIDField
|
||||||
, remoteAnnexMaxGitBundles =
|
, remoteAnnexMaxGitBundles =
|
||||||
|
@ -556,6 +558,7 @@ extractRemoteGitConfig r remotename = do
|
||||||
| B.null b -> Nothing
|
| B.null b -> Nothing
|
||||||
| otherwise -> Just (decodeBS b)
|
| otherwise -> Just (decodeBS b)
|
||||||
_ -> Nothing
|
_ -> Nothing
|
||||||
|
getwords k = fromMaybe [] $ words <$> getmaybe k
|
||||||
|
|
||||||
data RemoteGitConfigField
|
data RemoteGitConfigField
|
||||||
= CostField
|
= CostField
|
||||||
|
@ -588,6 +591,7 @@ data RemoteGitConfigField
|
||||||
| UUIDField
|
| UUIDField
|
||||||
| ConfigUUIDField
|
| ConfigUUIDField
|
||||||
| SecurityAllowUnverifiedDownloadsField
|
| SecurityAllowUnverifiedDownloadsField
|
||||||
|
| WebOptionsField
|
||||||
| MaxGitBundlesField
|
| MaxGitBundlesField
|
||||||
| AllowEncryptedGitRepoField
|
| AllowEncryptedGitRepoField
|
||||||
| ProxyField
|
| ProxyField
|
||||||
|
@ -656,6 +660,7 @@ remoteGitConfigField = \case
|
||||||
UUIDField -> uninherited True "uuid"
|
UUIDField -> uninherited True "uuid"
|
||||||
ConfigUUIDField -> uninherited True "config-uuid"
|
ConfigUUIDField -> uninherited True "config-uuid"
|
||||||
SecurityAllowUnverifiedDownloadsField -> inherited True "security-allow-unverified-downloads"
|
SecurityAllowUnverifiedDownloadsField -> inherited True "security-allow-unverified-downloads"
|
||||||
|
WebOptionsField -> inherited True "web-options"
|
||||||
MaxGitBundlesField -> inherited True "max-git-bundles"
|
MaxGitBundlesField -> inherited True "max-git-bundles"
|
||||||
AllowEncryptedGitRepoField -> inherited True "allow-encrypted-gitrepo"
|
AllowEncryptedGitRepoField -> inherited True "allow-encrypted-gitrepo"
|
||||||
-- Allow proxy chains.
|
-- Allow proxy chains.
|
||||||
|
|
|
@ -0,0 +1,37 @@
|
||||||
|
[[!comment format=mdwn
|
||||||
|
username="joey"
|
||||||
|
subject="""comment 1"""
|
||||||
|
date="2025-04-01T13:15:04Z"
|
||||||
|
content="""
|
||||||
|
Well curl does have a --cookie option. But setting that would make all
|
||||||
|
downloads from the web special remote have the same cookies set. So
|
||||||
|
exposing them to any other web servers you also use with that remote.
|
||||||
|
|
||||||
|
I think that generally, things involving authentication are a
|
||||||
|
good use case for writing a little external special remote of your own
|
||||||
|
that handles the particulars of a given service. Especially if you can
|
||||||
|
share it with others. [[doc/special_remotes/external/example.sh]] is a good
|
||||||
|
starting place for writing that.
|
||||||
|
|
||||||
|
That said, this is also right on the line to something it might be possible
|
||||||
|
for git-annex to support better without you needing to do that work. It's
|
||||||
|
actually possible to initremote a second web special remote that is limited
|
||||||
|
to a single host and is used preferentially to the web special remote:
|
||||||
|
|
||||||
|
git-annex initremote --sameas=web archiveorg type=web urlinclude='*archive.org/*'
|
||||||
|
git config remote.archiveorg.annex-cost 100
|
||||||
|
|
||||||
|
If `annex.web-options` had a per-remote config, like some other configs do,
|
||||||
|
but which it currently does not, you could then just set that to pass the
|
||||||
|
cookies to curl when using that archiveorg special remote:
|
||||||
|
|
||||||
|
git config remote.archiveorg.annex-web-options "--cookie=whatever"
|
||||||
|
|
||||||
|
Since that seems like a good idea, I've implemented it! Get it in the next
|
||||||
|
release or a daily build.
|
||||||
|
|
||||||
|
PS, you'll also need to set this, which does have its own security
|
||||||
|
ramifications:
|
||||||
|
|
||||||
|
git config annex.security.allowed-ip-addresses all
|
||||||
|
"""]]
|
|
@ -2067,18 +2067,18 @@ Remotes are configured using these settings in `.git/config`.
|
||||||
|
|
||||||
Used by hook special remotes to record the type of the remote.
|
Used by hook special remotes to record the type of the remote.
|
||||||
|
|
||||||
* `annex.web-options`
|
* `annex.web-options`, `remote.<name>.annex-web-options`
|
||||||
|
|
||||||
Options to pass to curl when git-annex uses it to download urls
|
Options to pass to curl when git-annex uses it to download urls
|
||||||
(rather than the default built-in url downloader).
|
(rather than the default built-in url downloader).
|
||||||
|
|
||||||
For example, to force IPv4 only, set it to "-4".
|
For example, to force IPv4 only, set it to "-4".
|
||||||
|
|
||||||
Setting this option makes git-annex use curl, but only
|
Setting this makes git-annex use curl, but only
|
||||||
when annex.security.allowed-ip-addresses is configured in a
|
when annex.security.allowed-ip-addresses is configured in a
|
||||||
specific way. See its documentation.
|
specific way. See its documentation.
|
||||||
|
|
||||||
Setting this option prevents git-annex from using git-credential
|
Setting this prevents git-annex from using git-credential
|
||||||
for prompting for http passwords. Instead, you can include "--netrc"
|
for prompting for http passwords. Instead, you can include "--netrc"
|
||||||
to make curl use your ~/.netrc file and record the passwords there.
|
to make curl use your ~/.netrc file and record the passwords there.
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue