clean up url removal presence update
* rmurl: Fix a case where removing the last url left git-annex thinking content was still present in the web special remote. * SETURLPRESENT, SETURIPRESENT, SETURLMISSING, and SETURIMISSING used to update the presence information of the external special remote that called them; this was not documented behavior and is no longer done. Done by making setUrlPresent and setUrlMissing only update presence info for the web, and only when the url is a web url. See the comment for reasoning about why that's the right thing to do. In AddUrl, had to make it update location tracking, to handle the non-web-url case. This commit was sponsored by Ewen McNeill on Patreon.
This commit is contained in:
parent
ca66e7b66a
commit
451171b7c1
14 changed files with 77 additions and 26 deletions
|
@ -83,7 +83,7 @@ startDistributionDownload d = go =<< liftIO . newVersionLocation d =<< liftIO ol
|
|||
where
|
||||
go Nothing = debug ["Skipping redundant upgrade"]
|
||||
go (Just dest) = do
|
||||
liftAnnex $ setUrlPresent webUUID k u
|
||||
liftAnnex $ setUrlPresent k u
|
||||
hook <- asIO1 $ distributionDownloadComplete d dest cleanup
|
||||
modifyDaemonStatus_ $ \s -> s
|
||||
{ transferHook = M.insert k hook (transferHook s) }
|
||||
|
@ -100,7 +100,7 @@ startDistributionDownload d = go =<< liftIO . newVersionLocation d =<< liftIO ol
|
|||
}
|
||||
cleanup = liftAnnex $ do
|
||||
lockContentForRemoval k removeAnnex
|
||||
setUrlMissing webUUID k u
|
||||
setUrlMissing k u
|
||||
logStatus k InfoMissing
|
||||
|
||||
{- Called once the download is done.
|
||||
|
|
|
@ -5,6 +5,11 @@ git-annex (6.20180927) UNRELEASED; urgency=medium
|
|||
* Improve display when git config download from a http remote fails.
|
||||
* Added annex.jobs setting, which is like using the -J option.
|
||||
* Fix reversion in support of annex.web-options.
|
||||
* rmurl: Fix a case where removing the last url left git-annex thinking
|
||||
content was still present in the web special remote.
|
||||
* SETURLPRESENT, SETURIPRESENT, SETURLMISSING, and SETURIMISSING
|
||||
used to update the presence information of the external special remote
|
||||
that called them; this was not documented behavior and is no longer done.
|
||||
|
||||
-- Joey Hess <id@joeyh.name> Thu, 27 Sep 2018 15:27:20 -0400
|
||||
|
||||
|
|
|
@ -233,7 +233,8 @@ addUrlChecked o url file u checkexistssize key =
|
|||
(exists, samesize, url') <- checkexistssize key
|
||||
if exists && (samesize || relaxedOption (downloadOptions o))
|
||||
then do
|
||||
setUrlPresent u key url'
|
||||
setUrlPresent key url'
|
||||
logChange key u InfoPresent
|
||||
next $ return True
|
||||
else do
|
||||
warning $ "while adding a new url to an already annexed file, " ++ if exists
|
||||
|
@ -397,7 +398,8 @@ addWorkTree u url file key mtmp = case mtmp of
|
|||
where
|
||||
go = do
|
||||
maybeShowJSON $ JSONChunk [("key", key2file key)]
|
||||
setUrlPresent u key url
|
||||
setUrlPresent key url
|
||||
logChange key u InfoPresent
|
||||
ifM (addAnnexedFile file key mtmp)
|
||||
( do
|
||||
when (isJust mtmp) $
|
||||
|
|
|
@ -17,7 +17,6 @@ import qualified Command.Fsck
|
|||
import qualified Annex
|
||||
import Logs.MetaData
|
||||
import Logs.Web
|
||||
import qualified Remote
|
||||
|
||||
cmd :: Command
|
||||
cmd = notDirect $ withGlobalOptions [annexedMatchingOptions] $
|
||||
|
@ -78,9 +77,8 @@ perform file oldkey oldbackend newbackend = go =<< genkey (fastMigrate oldbacken
|
|||
-- If the old key had some associated urls, record them for
|
||||
-- the new key as well.
|
||||
urls <- getUrls oldkey
|
||||
forM_ urls $ \url -> do
|
||||
r <- Remote.claimingUrl url
|
||||
setUrlPresent (Remote.uuid r) newkey url
|
||||
forM_ urls $ \url ->
|
||||
setUrlPresent newkey url
|
||||
next $ Command.ReKey.cleanup file oldkey newkey
|
||||
, error "failed"
|
||||
)
|
||||
|
|
|
@ -69,5 +69,5 @@ perform key url = do
|
|||
perform' :: Key -> URLString -> Annex Bool
|
||||
perform' key url = do
|
||||
r <- Remote.claimingUrl url
|
||||
setUrlPresent (Remote.uuid r) key (setDownloader' url r)
|
||||
setUrlPresent key (setDownloader' url r)
|
||||
return True
|
||||
|
|
|
@ -49,5 +49,5 @@ start (file, url) = flip whenAnnexed file $ \_ key -> do
|
|||
cleanup :: String -> Key -> CommandCleanup
|
||||
cleanup url key = do
|
||||
r <- Remote.claimingUrl url
|
||||
setUrlMissing (Remote.uuid r) key (setDownloader' url r)
|
||||
setUrlMissing key (setDownloader' url r)
|
||||
return True
|
||||
|
|
26
Logs/Web.hs
26
Logs/Web.hs
|
@ -56,20 +56,32 @@ getUrlsWithPrefix key prefix = filter (prefix `isPrefixOf`)
|
|||
. map (fst . getDownloader)
|
||||
<$> getUrls key
|
||||
|
||||
setUrlPresent :: UUID -> Key -> URLString -> Annex ()
|
||||
setUrlPresent uuid key url = do
|
||||
setUrlPresent :: Key -> URLString -> Annex ()
|
||||
setUrlPresent key url = do
|
||||
us <- getUrls key
|
||||
unless (url `elem` us) $ do
|
||||
config <- Annex.getGitConfig
|
||||
addLog (urlLogFile config key) =<< logNow InfoPresent url
|
||||
logChange key uuid InfoPresent
|
||||
-- If the url does not have an OtherDownloader, it must be present
|
||||
-- in the web.
|
||||
case snd (getDownloader url) of
|
||||
OtherDownloader -> return ()
|
||||
_ -> logChange key webUUID InfoPresent
|
||||
|
||||
setUrlMissing :: UUID -> Key -> URLString -> Annex ()
|
||||
setUrlMissing uuid key url = do
|
||||
setUrlMissing :: Key -> URLString -> Annex ()
|
||||
setUrlMissing key url = do
|
||||
config <- Annex.getGitConfig
|
||||
addLog (urlLogFile config key) =<< logNow InfoMissing url
|
||||
whenM (null <$> getUrls key) $
|
||||
logChange key uuid InfoMissing
|
||||
-- If the url was a web url (not OtherDownloader) and none of
|
||||
-- the remaining urls for the key are web urls, the key must not
|
||||
-- be present in the web.
|
||||
when (isweb url) $
|
||||
whenM (null . filter isweb <$> getUrls key) $
|
||||
logChange key webUUID InfoMissing
|
||||
where
|
||||
isweb u = case snd (getDownloader u) of
|
||||
OtherDownloader -> False
|
||||
_ -> True
|
||||
|
||||
{- Finds all known urls. -}
|
||||
knownUrls :: Annex [(Key, URLString)]
|
||||
|
|
|
@ -112,7 +112,7 @@ uploadKey _ _ _ = do
|
|||
|
||||
dropKey :: Key -> Annex Bool
|
||||
dropKey k = do
|
||||
mapM_ (setUrlMissing bitTorrentUUID k) =<< getBitTorrentUrls k
|
||||
mapM_ (setUrlMissing k) =<< getBitTorrentUrls k
|
||||
return True
|
||||
|
||||
{- We punt and don't try to check if a torrent has enough seeders
|
||||
|
|
|
@ -416,9 +416,9 @@ handleRequest' st external req mp responsehandler
|
|||
<$> getRemoteState (externalUUID external) key
|
||||
send $ VALUE state
|
||||
handleRemoteRequest (SETURLPRESENT key url) =
|
||||
setUrlPresent (externalUUID external) key url
|
||||
setUrlPresent key url
|
||||
handleRemoteRequest (SETURLMISSING key url) =
|
||||
setUrlMissing (externalUUID external) key url
|
||||
setUrlMissing key url
|
||||
handleRemoteRequest (SETURIPRESENT key uri) =
|
||||
withurl (SETURLPRESENT key) uri
|
||||
handleRemoteRequest (SETURIMISSING key uri) =
|
||||
|
|
|
@ -195,7 +195,7 @@ store _r info h = fileStorer $ \k f p -> do
|
|||
void $ storeHelper info h f (T.pack $ bucketObject info k) p
|
||||
-- Store public URL to item in Internet Archive.
|
||||
when (isIA info && not (isChunkKey k)) $
|
||||
setUrlPresent webUUID k (iaPublicUrl info (bucketObject info k))
|
||||
setUrlPresent k (iaPublicUrl info (bucketObject info k))
|
||||
return True
|
||||
|
||||
storeHelper :: S3Info -> S3Handle -> FilePath -> S3.Object -> MeterUpdate -> Annex (Maybe S3VersionID)
|
||||
|
|
|
@ -100,7 +100,7 @@ uploadKey _ _ _ = do
|
|||
|
||||
dropKey :: Key -> Annex Bool
|
||||
dropKey k = do
|
||||
mapM_ (setUrlMissing webUUID k) =<< getWebUrls k
|
||||
mapM_ (setUrlMissing k) =<< getWebUrls k
|
||||
return True
|
||||
|
||||
checkKey :: Key -> Annex Bool
|
||||
|
|
|
@ -39,4 +39,4 @@ ok
|
|||
|
||||
[[!meta name=yoh]]
|
||||
|
||||
> closing as yoh says "nothing to fix" [[done]] --[[Joey]]
|
||||
> I think I've now understood and fixed this, so [[done]] --[[Joey]]
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
[[!comment format=mdwn
|
||||
username="joey"
|
||||
subject="""comment 4"""
|
||||
date="2018-10-04T20:20:43Z"
|
||||
content="""
|
||||
I think that making rmurl filter out urls with an OtherDownloader
|
||||
when removing an url claimed by the web is sufficient.
|
||||
Then SETURIPRESENT won't confuse rmurl from the web.
|
||||
|
||||
But what about SETURLPRESENT? Well, that url ought to be a url that can be
|
||||
downloaded by the web special remote. And if the web special remote is marked
|
||||
as present, it will try to download from that url along with whatever other urls
|
||||
are recorded. So, it makes sense for rmurl to treat that url as an indication
|
||||
that the web still has content, when removing some other url.
|
||||
|
||||
But then if an url is added to the web, and then an external special remote
|
||||
uses SETURLPRESENT with another url, and then rmurl removes the first url,
|
||||
and then SETURLMISSING removes the other url, the content will still be marked
|
||||
as present in the web. So it seem that SETURLMISSING should mark the
|
||||
content as not present in the web if that was the last url. (So should other
|
||||
special remotes that remove regular urls, like bittorrent.)
|
||||
|
||||
Currently the content is marked as not present in the remote making the change,
|
||||
which is the wrong thing! (And SETURLPRESENT/SETURIPRESENT mark content
|
||||
as present in the external remote, which also seems unncessary.)
|
||||
|
||||
The remaining case is an rmurl on an url with an OtherDownloader,
|
||||
which is being removed from some other special remote than web, when there are
|
||||
some more urls with OtherDownloader that were set by other special remotes.
|
||||
|
||||
I think that it doesn't actually make sense for rmurl of such an url to
|
||||
mark the content as not being present in the special remote. The user should
|
||||
drop it if they want to remove it from the special remote. It's ok for it to
|
||||
remove the url but not update the presence information.
|
||||
"""]]
|
|
@ -6,7 +6,6 @@
|
|||
rmurl should mark the content as not being present in the web special
|
||||
remote once the last url is removed, and it does when I tested it just now.
|
||||
|
||||
Please file a bug report if git-annex does not behave as expected.. It's
|
||||
hard to keep track of comments to random man pages, and bloating the end of
|
||||
this page with comments about a possible bug is just not great.
|
||||
There was a recent bug, now fixed, that prevented it doing that in some unusual
|
||||
circumstances.
|
||||
"""]]
|
||||
|
|
Loading…
Reference in a new issue