2013-07-28 19:27:36 +00:00
|
|
|
{- git-annex command
|
|
|
|
-
|
2017-11-30 20:48:35 +00:00
|
|
|
- Copyright 2013-2017 Joey Hess <id@joeyh.name>
|
2013-07-28 19:27:36 +00:00
|
|
|
-
|
2019-03-13 19:48:14 +00:00
|
|
|
- Licensed under the GNU AGPL version 3 or higher.
|
2013-07-28 19:27:36 +00:00
|
|
|
-}
|
|
|
|
|
2019-01-07 19:51:05 +00:00
|
|
|
{-# LANGUAGE OverloadedStrings #-}
|
2013-12-29 19:52:20 +00:00
|
|
|
|
2013-07-28 19:27:36 +00:00
|
|
|
module Command.ImportFeed where
|
|
|
|
|
|
|
|
import Text.Feed.Import
|
|
|
|
import Text.Feed.Query
|
|
|
|
import Text.Feed.Types
|
|
|
|
import qualified Data.Set as S
|
|
|
|
import qualified Data.Map as M
|
2013-08-03 05:40:21 +00:00
|
|
|
import Data.Time.Clock
|
2014-04-07 20:55:04 +00:00
|
|
|
import Data.Time.Format
|
2017-08-28 16:29:00 +00:00
|
|
|
import qualified Data.Text as T
|
2019-06-20 16:37:07 +00:00
|
|
|
import System.Log.Logger
|
2013-07-28 19:27:36 +00:00
|
|
|
|
|
|
|
import Command
|
2016-01-20 20:36:33 +00:00
|
|
|
import qualified Annex
|
2013-09-28 18:35:21 +00:00
|
|
|
import qualified Annex.Url as Url
|
2014-12-11 20:43:46 +00:00
|
|
|
import qualified Remote
|
|
|
|
import qualified Types.Remote as Remote
|
|
|
|
import Types.UrlContents
|
2013-07-28 19:27:36 +00:00
|
|
|
import Logs.Web
|
2018-01-02 21:17:10 +00:00
|
|
|
import Logs.File
|
2013-07-28 19:27:36 +00:00
|
|
|
import qualified Utility.Format
|
|
|
|
import Utility.Tmp
|
2018-04-06 21:00:46 +00:00
|
|
|
import Utility.Metered
|
2017-11-30 20:48:35 +00:00
|
|
|
import Command.AddUrl (addUrlFile, downloadRemoteFile, parseDownloadOptions, DownloadOptions(..))
|
2014-12-17 17:57:52 +00:00
|
|
|
import Annex.UUID
|
2013-08-03 05:40:21 +00:00
|
|
|
import Backend.URL (fromUrl)
|
convert importfeed to youtube-dl
Fully working, including --fast/--relaxed.
Note that, while git-annex addurl --relaxed is not going to check
youtube-dl, I kept git annex importfeed --relaxed checking it.
Thinking is that, let's not break people's importfeed cron jobs, and
importfeed does not typically have to check a large number of new items,
so it's ok if it's a little bit slower when used with youtube playlist
feeds.
importfeed's behavior is also improved (?) when a feed has links in it
to non-media files. Before, those were skipped. Now, the content of the
link is downloaded. This had to be done, because trying to use
youtube-dl is slow, and if those were skipped, it would have to check
every time importfeed was run. While this behavior change may not be
desirable for some feeds, that intersperse links to web pages with
enclosures, it will be desirable for other feeds, that have
non-enclosure directy links to media files.
Remove old quvi modules.
This commit was sponsored by Øyvind Andersen Holm.
2017-11-29 21:05:27 +00:00
|
|
|
import Annex.Content
|
|
|
|
import Annex.YoutubeDl
|
import metadata from feeds
When annex.genmetadata is set, metadata from the feed is added to files
that are imported from it.
Reused the same feedtitle and itemtitle, feedauthor, itemauthor, etc names
that are used in --template.
Also added title and author, which are the item title/author if available,
falling back to the feed title/author. These are more likely to be common
metadata fields.
(There is a small bit of dupication here, but once git gets
around to packing the object, it will compress it away.)
The itempubdate field is not included in the metadata as a string; instead
it is used to generate year and month fields, same as is done when adding
files with annex.genmetadata set.
This commit was sponsored by Amitai Schlair, who cooincidentially
is responsible for ikiwiki generating nice feed metadata!
2014-07-03 17:46:09 +00:00
|
|
|
import Types.MetaData
|
|
|
|
import Logs.MetaData
|
|
|
|
import Annex.MetaData
|
convert importfeed to youtube-dl
Fully working, including --fast/--relaxed.
Note that, while git-annex addurl --relaxed is not going to check
youtube-dl, I kept git annex importfeed --relaxed checking it.
Thinking is that, let's not break people's importfeed cron jobs, and
importfeed does not typically have to check a large number of new items,
so it's ok if it's a little bit slower when used with youtube playlist
feeds.
importfeed's behavior is also improved (?) when a feed has links in it
to non-media files. Before, those were skipped. Now, the content of the
link is downloaded. This had to be done, because trying to use
youtube-dl is slow, and if those were skipped, it would have to check
every time importfeed was run. While this behavior change may not be
desirable for some feeds, that intersperse links to web pages with
enclosures, it will be desirable for other feeds, that have
non-enclosure directy links to media files.
Remove old quvi modules.
This commit was sponsored by Øyvind Andersen Holm.
2017-11-29 21:05:27 +00:00
|
|
|
import Command.AddUrl (addWorkTree)
|
2013-07-28 19:27:36 +00:00
|
|
|
|
2015-07-08 16:33:27 +00:00
|
|
|
cmd :: Command
|
2015-07-13 15:06:41 +00:00
|
|
|
cmd = notBareRepo $
|
2015-07-08 19:08:02 +00:00
|
|
|
command "importfeed" SectionCommon "import files from podcast feeds"
|
2015-07-13 15:06:41 +00:00
|
|
|
(paramRepeating paramUrl) (seek <$$> optParser)
|
|
|
|
|
|
|
|
data ImportFeedOptions = ImportFeedOptions
|
|
|
|
{ feedUrls :: CmdParams
|
|
|
|
, templateOption :: Maybe String
|
2017-11-30 20:48:35 +00:00
|
|
|
, downloadOptions :: DownloadOptions
|
2015-03-05 18:46:08 +00:00
|
|
|
}
|
|
|
|
|
2015-07-13 15:06:41 +00:00
|
|
|
optParser :: CmdParamsDesc -> Parser ImportFeedOptions
|
|
|
|
optParser desc = ImportFeedOptions
|
|
|
|
<$> cmdParams desc
|
|
|
|
<*> optional (strOption
|
|
|
|
( long "template" <> metavar paramFormat
|
|
|
|
<> help "template for filenames"
|
|
|
|
))
|
2017-11-30 20:48:35 +00:00
|
|
|
<*> parseDownloadOptions False
|
2015-07-13 15:06:41 +00:00
|
|
|
|
|
|
|
seek :: ImportFeedOptions -> CommandSeek
|
|
|
|
seek o = do
|
|
|
|
cache <- getCache (templateOption o)
|
make CommandStart return a StartMessage
The goal is to be able to run CommandStart in the main thread when -J is
used, rather than unncessarily passing it off to a worker thread, which
incurs overhead that is signficant when the CommandStart is going to
quickly decide to stop.
To do that, the message it displays needs to be displayed in the worker
thread, after the CommandStart has run.
Also, the change will mean that CommandStart will no longer necessarily
run with the same Annex state as CommandPerform. While its docs already
said it should avoid modifying Annex state, I audited all the
CommandStart code as part of the conversion. (Note that CommandSeek
already sometimes runs with a different Annex state, and that has not been
a source of any problems, so I am not too worried that this change will
lead to breakage going forward.)
The only modification of Annex state I found was it calling
allowMessages in some Commands that default to noMessages. Dealt with
that by adding a startCustomOutput and a startingUsualMessages.
This lets a command start with noMessages and then select the output it
wants for each CommandStart.
One bit of breakage: onlyActionOn has been removed from commands that used it.
The plan is that, since a StartMessage contains an ActionItem,
when a Key can be extracted from that, the parallel job runner can
run onlyActionOn' automatically. Then commands won't need to worry about
this detail. Future work.
Otherwise, this was a fairly straightforward process of making each
CommandStart compile again. Hopefully other behavior changes were mostly
avoided.
In a few cases, a command had a CommandStart that called a CommandPerform
that then called showStart multiple times. I have collapsed those
down to a single start action. The main command to perhaps suffer from it
is Command.Direct, which used to show a start for each file, and no
longer does.
Another minor behavior change is that some commands used showStart
before, but had an associated file and a Key available, so were changed
to ShowStart with an ActionItemAssociatedFile. That will not change the
normal output or behavior, but --json output will now include the key.
This should not break it for anyone using a real json parser.
2019-06-06 19:42:30 +00:00
|
|
|
forM_ (feedUrls o) (getFeed o cache)
|
2015-07-13 15:06:41 +00:00
|
|
|
|
make CommandStart return a StartMessage
The goal is to be able to run CommandStart in the main thread when -J is
used, rather than unncessarily passing it off to a worker thread, which
incurs overhead that is signficant when the CommandStart is going to
quickly decide to stop.
To do that, the message it displays needs to be displayed in the worker
thread, after the CommandStart has run.
Also, the change will mean that CommandStart will no longer necessarily
run with the same Annex state as CommandPerform. While its docs already
said it should avoid modifying Annex state, I audited all the
CommandStart code as part of the conversion. (Note that CommandSeek
already sometimes runs with a different Annex state, and that has not been
a source of any problems, so I am not too worried that this change will
lead to breakage going forward.)
The only modification of Annex state I found was it calling
allowMessages in some Commands that default to noMessages. Dealt with
that by adding a startCustomOutput and a startingUsualMessages.
This lets a command start with noMessages and then select the output it
wants for each CommandStart.
One bit of breakage: onlyActionOn has been removed from commands that used it.
The plan is that, since a StartMessage contains an ActionItem,
when a Key can be extracted from that, the parallel job runner can
run onlyActionOn' automatically. Then commands won't need to worry about
this detail. Future work.
Otherwise, this was a fairly straightforward process of making each
CommandStart compile again. Hopefully other behavior changes were mostly
avoided.
In a few cases, a command had a CommandStart that called a CommandPerform
that then called showStart multiple times. I have collapsed those
down to a single start action. The main command to perhaps suffer from it
is Command.Direct, which used to show a start for each file, and no
longer does.
Another minor behavior change is that some commands used showStart
before, but had an associated file and a Key available, so were changed
to ShowStart with an ActionItemAssociatedFile. That will not change the
normal output or behavior, but --json output will now include the key.
This should not break it for anyone using a real json parser.
2019-06-06 19:42:30 +00:00
|
|
|
getFeed :: ImportFeedOptions -> Cache -> URLString -> CommandSeek
|
|
|
|
getFeed opts cache url = do
|
|
|
|
showStart "importfeed" url
|
|
|
|
downloadFeed url >>= \case
|
|
|
|
Nothing -> showEndResult =<< feedProblem url
|
|
|
|
"downloading the feed failed"
|
|
|
|
Just feedcontent -> case parseFeedString feedcontent of
|
2019-06-20 16:37:07 +00:00
|
|
|
Nothing -> debugfeedcontent feedcontent "parsing the feed failed"
|
make CommandStart return a StartMessage
The goal is to be able to run CommandStart in the main thread when -J is
used, rather than unncessarily passing it off to a worker thread, which
incurs overhead that is signficant when the CommandStart is going to
quickly decide to stop.
To do that, the message it displays needs to be displayed in the worker
thread, after the CommandStart has run.
Also, the change will mean that CommandStart will no longer necessarily
run with the same Annex state as CommandPerform. While its docs already
said it should avoid modifying Annex state, I audited all the
CommandStart code as part of the conversion. (Note that CommandSeek
already sometimes runs with a different Annex state, and that has not been
a source of any problems, so I am not too worried that this change will
lead to breakage going forward.)
The only modification of Annex state I found was it calling
allowMessages in some Commands that default to noMessages. Dealt with
that by adding a startCustomOutput and a startingUsualMessages.
This lets a command start with noMessages and then select the output it
wants for each CommandStart.
One bit of breakage: onlyActionOn has been removed from commands that used it.
The plan is that, since a StartMessage contains an ActionItem,
when a Key can be extracted from that, the parallel job runner can
run onlyActionOn' automatically. Then commands won't need to worry about
this detail. Future work.
Otherwise, this was a fairly straightforward process of making each
CommandStart compile again. Hopefully other behavior changes were mostly
avoided.
In a few cases, a command had a CommandStart that called a CommandPerform
that then called showStart multiple times. I have collapsed those
down to a single start action. The main command to perhaps suffer from it
is Command.Direct, which used to show a start for each file, and no
longer does.
Another minor behavior change is that some commands used showStart
before, but had an associated file and a Key available, so were changed
to ShowStart with an ActionItemAssociatedFile. That will not change the
normal output or behavior, but --json output will now include the key.
This should not break it for anyone using a real json parser.
2019-06-06 19:42:30 +00:00
|
|
|
Just f -> case findDownloads url f of
|
2019-06-20 16:37:07 +00:00
|
|
|
[] -> debugfeedcontent feedcontent "bad feed content; no enclosures to download"
|
make CommandStart return a StartMessage
The goal is to be able to run CommandStart in the main thread when -J is
used, rather than unncessarily passing it off to a worker thread, which
incurs overhead that is signficant when the CommandStart is going to
quickly decide to stop.
To do that, the message it displays needs to be displayed in the worker
thread, after the CommandStart has run.
Also, the change will mean that CommandStart will no longer necessarily
run with the same Annex state as CommandPerform. While its docs already
said it should avoid modifying Annex state, I audited all the
CommandStart code as part of the conversion. (Note that CommandSeek
already sometimes runs with a different Annex state, and that has not been
a source of any problems, so I am not too worried that this change will
lead to breakage going forward.)
The only modification of Annex state I found was it calling
allowMessages in some Commands that default to noMessages. Dealt with
that by adding a startCustomOutput and a startingUsualMessages.
This lets a command start with noMessages and then select the output it
wants for each CommandStart.
One bit of breakage: onlyActionOn has been removed from commands that used it.
The plan is that, since a StartMessage contains an ActionItem,
when a Key can be extracted from that, the parallel job runner can
run onlyActionOn' automatically. Then commands won't need to worry about
this detail. Future work.
Otherwise, this was a fairly straightforward process of making each
CommandStart compile again. Hopefully other behavior changes were mostly
avoided.
In a few cases, a command had a CommandStart that called a CommandPerform
that then called showStart multiple times. I have collapsed those
down to a single start action. The main command to perhaps suffer from it
is Command.Direct, which used to show a start for each file, and no
longer does.
Another minor behavior change is that some commands used showStart
before, but had an associated file and a Key available, so were changed
to ShowStart with an ActionItemAssociatedFile. That will not change the
normal output or behavior, but --json output will now include the key.
This should not break it for anyone using a real json parser.
2019-06-06 19:42:30 +00:00
|
|
|
l -> do
|
|
|
|
showEndOk
|
|
|
|
ifM (and <$> mapM (performDownload opts cache) l)
|
|
|
|
( clearFeedProblem url
|
|
|
|
, void $ feedProblem url
|
|
|
|
"problem downloading some item(s) from feed"
|
|
|
|
)
|
2019-06-20 16:37:07 +00:00
|
|
|
where
|
|
|
|
debugfeedcontent feedcontent msg = do
|
|
|
|
liftIO $ debugM "feed content" $ unlines
|
|
|
|
[ "start of feed content"
|
|
|
|
, feedcontent
|
|
|
|
, "end of feed content"
|
|
|
|
]
|
|
|
|
showEndResult =<< feedProblem url
|
|
|
|
(msg ++ " (use --debug to see the feed content that was downloaded)")
|
2013-07-28 22:16:24 +00:00
|
|
|
|
2013-07-28 23:08:50 +00:00
|
|
|
data ToDownload = ToDownload
|
|
|
|
{ feed :: Feed
|
2013-08-03 05:40:21 +00:00
|
|
|
, feedurl :: URLString
|
2013-07-28 23:08:50 +00:00
|
|
|
, item :: Item
|
2013-12-29 19:52:20 +00:00
|
|
|
, location :: DownloadLocation
|
2013-07-28 23:08:50 +00:00
|
|
|
}
|
|
|
|
|
convert importfeed to youtube-dl
Fully working, including --fast/--relaxed.
Note that, while git-annex addurl --relaxed is not going to check
youtube-dl, I kept git annex importfeed --relaxed checking it.
Thinking is that, let's not break people's importfeed cron jobs, and
importfeed does not typically have to check a large number of new items,
so it's ok if it's a little bit slower when used with youtube playlist
feeds.
importfeed's behavior is also improved (?) when a feed has links in it
to non-media files. Before, those were skipped. Now, the content of the
link is downloaded. This had to be done, because trying to use
youtube-dl is slow, and if those were skipped, it would have to check
every time importfeed was run. While this behavior change may not be
desirable for some feeds, that intersperse links to web pages with
enclosures, it will be desirable for other feeds, that have
non-enclosure directy links to media files.
Remove old quvi modules.
This commit was sponsored by Øyvind Andersen Holm.
2017-11-29 21:05:27 +00:00
|
|
|
data DownloadLocation = Enclosure URLString | MediaLink URLString
|
2013-07-28 23:08:50 +00:00
|
|
|
|
2015-03-31 17:29:51 +00:00
|
|
|
type ItemId = String
|
|
|
|
|
2013-07-28 22:16:24 +00:00
|
|
|
data Cache = Cache
|
|
|
|
{ knownurls :: S.Set URLString
|
2015-03-31 17:29:51 +00:00
|
|
|
, knownitems :: S.Set ItemId
|
2013-07-28 22:16:24 +00:00
|
|
|
, template :: Utility.Format.Format
|
|
|
|
}
|
|
|
|
|
|
|
|
getCache :: Maybe String -> Annex Cache
|
2013-07-31 16:19:00 +00:00
|
|
|
getCache opttemplate = ifM (Annex.getState Annex.force)
|
2015-03-31 17:29:51 +00:00
|
|
|
( ret S.empty S.empty
|
2013-07-31 16:19:00 +00:00
|
|
|
, do
|
2017-02-28 18:41:08 +00:00
|
|
|
showStart "importfeed" "checking known urls"
|
2015-03-31 17:29:51 +00:00
|
|
|
(is, us) <- unzip <$> (mapM knownItems =<< knownUrls)
|
2017-02-28 18:41:08 +00:00
|
|
|
showEndOk
|
2015-03-31 17:29:51 +00:00
|
|
|
ret (S.fromList us) (S.fromList (concat is))
|
2013-07-31 16:19:00 +00:00
|
|
|
)
|
2013-07-28 19:27:36 +00:00
|
|
|
where
|
2013-07-28 22:16:24 +00:00
|
|
|
tmpl = Utility.Format.gen $ fromMaybe defaultTemplate opttemplate
|
2015-03-31 17:29:51 +00:00
|
|
|
ret us is = return $ Cache us is tmpl
|
|
|
|
|
|
|
|
knownItems :: (Key, URLString) -> Annex ([ItemId], URLString)
|
|
|
|
knownItems (k, u) = do
|
2019-01-07 19:51:05 +00:00
|
|
|
itemids <- S.toList . S.filter (/= noneValue)
|
|
|
|
. S.map (decodeBS . fromMetaValue)
|
2015-03-31 17:29:51 +00:00
|
|
|
. currentMetaDataValues itemIdField
|
|
|
|
<$> getCurrentMetaData k
|
|
|
|
return (itemids, u)
|
2013-07-28 19:27:36 +00:00
|
|
|
|
2018-12-30 20:14:55 +00:00
|
|
|
findDownloads :: URLString -> Feed -> [ToDownload]
|
|
|
|
findDownloads u f = catMaybes $ map mk (feedItems f)
|
2013-07-28 19:27:36 +00:00
|
|
|
where
|
2018-12-30 20:14:55 +00:00
|
|
|
mk i = case getItemEnclosure i of
|
2018-12-30 19:59:15 +00:00
|
|
|
Just (enclosureurl, _, _) ->
|
2017-08-28 16:29:00 +00:00
|
|
|
Just $ ToDownload f u i $ Enclosure $
|
2019-07-05 19:09:37 +00:00
|
|
|
T.unpack enclosureurl
|
convert importfeed to youtube-dl
Fully working, including --fast/--relaxed.
Note that, while git-annex addurl --relaxed is not going to check
youtube-dl, I kept git annex importfeed --relaxed checking it.
Thinking is that, let's not break people's importfeed cron jobs, and
importfeed does not typically have to check a large number of new items,
so it's ok if it's a little bit slower when used with youtube playlist
feeds.
importfeed's behavior is also improved (?) when a feed has links in it
to non-media files. Before, those were skipped. Now, the content of the
link is downloaded. This had to be done, because trying to use
youtube-dl is slow, and if those were skipped, it would have to check
every time importfeed was run. While this behavior change may not be
desirable for some feeds, that intersperse links to web pages with
enclosures, it will be desirable for other feeds, that have
non-enclosure directy links to media files.
Remove old quvi modules.
This commit was sponsored by Øyvind Andersen Holm.
2017-11-29 21:05:27 +00:00
|
|
|
Nothing -> case getItemLink i of
|
2018-12-30 19:59:15 +00:00
|
|
|
Just link -> Just $ ToDownload f u i $
|
2019-07-05 19:09:37 +00:00
|
|
|
MediaLink $ T.unpack link
|
2018-12-30 19:59:15 +00:00
|
|
|
Nothing -> Nothing
|
2013-07-28 19:27:36 +00:00
|
|
|
|
|
|
|
{- Feeds change, so a feed download cannot be resumed. -}
|
2019-04-21 14:35:08 +00:00
|
|
|
downloadFeed :: URLString -> Annex (Maybe String)
|
2015-04-09 17:09:06 +00:00
|
|
|
downloadFeed url
|
2016-11-16 01:29:54 +00:00
|
|
|
| Url.parseURIRelaxed url == Nothing = giveup "invalid feed url"
|
2018-04-06 21:00:46 +00:00
|
|
|
| otherwise = Url.withUrlOptions $ \uo ->
|
|
|
|
liftIO $ withTmpFile "feed" $ \f h -> do
|
|
|
|
hClose h
|
|
|
|
ifM (Url.download nullMeterUpdate url f uo)
|
2019-04-21 14:35:08 +00:00
|
|
|
( Just <$> readFileStrict f
|
2018-04-06 21:00:46 +00:00
|
|
|
, return Nothing
|
|
|
|
)
|
2013-07-28 19:27:36 +00:00
|
|
|
|
2015-07-13 15:06:41 +00:00
|
|
|
performDownload :: ImportFeedOptions -> Cache -> ToDownload -> Annex Bool
|
2015-03-05 18:46:08 +00:00
|
|
|
performDownload opts cache todownload = case location todownload of
|
2013-12-29 19:52:20 +00:00
|
|
|
Enclosure url -> checkknown url $
|
2016-10-17 20:02:05 +00:00
|
|
|
rundownload url (takeWhile (/= '?') $ takeExtension url) $ \f -> do
|
2014-12-11 20:43:46 +00:00
|
|
|
r <- Remote.claimingUrl url
|
2017-11-30 20:48:35 +00:00
|
|
|
if Remote.uuid r == webUUID || rawOption (downloadOptions opts)
|
2015-01-22 18:52:52 +00:00
|
|
|
then do
|
2017-11-30 20:48:35 +00:00
|
|
|
urlinfo <- if relaxedOption (downloadOptions opts)
|
2015-08-19 16:24:55 +00:00
|
|
|
then pure Url.assumeUrlExists
|
2018-04-04 19:15:12 +00:00
|
|
|
else Url.withUrlOptions $
|
|
|
|
liftIO . Url.getUrlInfo url
|
2017-11-30 20:48:35 +00:00
|
|
|
let dlopts = (downloadOptions opts)
|
|
|
|
-- force using the filename
|
|
|
|
-- chosen here
|
|
|
|
{ fileOption = Just f
|
|
|
|
-- don't use youtube-dl
|
|
|
|
, rawOption = True
|
|
|
|
}
|
|
|
|
maybeToList <$> addUrlFile dlopts url urlinfo f
|
2014-12-11 20:43:46 +00:00
|
|
|
else do
|
|
|
|
res <- tryNonAsync $ maybe
|
|
|
|
(error $ "unable to checkUrl of " ++ Remote.name r)
|
|
|
|
(flip id url)
|
|
|
|
(Remote.checkUrl r)
|
|
|
|
case res of
|
|
|
|
Left _ -> return []
|
|
|
|
Right (UrlContents sz _) ->
|
|
|
|
maybeToList <$>
|
2017-11-30 20:48:35 +00:00
|
|
|
downloadRemoteFile r (downloadOptions opts) url f sz
|
2014-12-11 20:43:46 +00:00
|
|
|
Right (UrlMulti l) -> do
|
|
|
|
kl <- forM l $ \(url', sz, subf) ->
|
2017-11-30 20:48:35 +00:00
|
|
|
downloadRemoteFile r (downloadOptions opts) url' (f </> fromSafeFilePath subf) sz
|
2014-12-11 20:43:46 +00:00
|
|
|
return $ if all isJust kl
|
|
|
|
then catMaybes kl
|
|
|
|
else []
|
|
|
|
|
convert importfeed to youtube-dl
Fully working, including --fast/--relaxed.
Note that, while git-annex addurl --relaxed is not going to check
youtube-dl, I kept git annex importfeed --relaxed checking it.
Thinking is that, let's not break people's importfeed cron jobs, and
importfeed does not typically have to check a large number of new items,
so it's ok if it's a little bit slower when used with youtube playlist
feeds.
importfeed's behavior is also improved (?) when a feed has links in it
to non-media files. Before, those were skipped. Now, the content of the
link is downloaded. This had to be done, because trying to use
youtube-dl is slow, and if those were skipped, it would have to check
every time importfeed was run. While this behavior change may not be
desirable for some feeds, that intersperse links to web pages with
enclosures, it will be desirable for other feeds, that have
non-enclosure directy links to media files.
Remove old quvi modules.
This commit was sponsored by Øyvind Andersen Holm.
2017-11-29 21:05:27 +00:00
|
|
|
MediaLink linkurl -> do
|
|
|
|
let mediaurl = setDownloader linkurl YoutubeDownloader
|
|
|
|
let mediakey = Backend.URL.fromUrl mediaurl Nothing
|
|
|
|
-- Old versions of git-annex that used quvi might have
|
2018-09-02 20:16:08 +00:00
|
|
|
-- used the quviurl for this, so check if it's known
|
convert importfeed to youtube-dl
Fully working, including --fast/--relaxed.
Note that, while git-annex addurl --relaxed is not going to check
youtube-dl, I kept git annex importfeed --relaxed checking it.
Thinking is that, let's not break people's importfeed cron jobs, and
importfeed does not typically have to check a large number of new items,
so it's ok if it's a little bit slower when used with youtube playlist
feeds.
importfeed's behavior is also improved (?) when a feed has links in it
to non-media files. Before, those were skipped. Now, the content of the
link is downloaded. This had to be done, because trying to use
youtube-dl is slow, and if those were skipped, it would have to check
every time importfeed was run. While this behavior change may not be
desirable for some feeds, that intersperse links to web pages with
enclosures, it will be desirable for other feeds, that have
non-enclosure directy links to media files.
Remove old quvi modules.
This commit was sponsored by Øyvind Andersen Holm.
2017-11-29 21:05:27 +00:00
|
|
|
-- to avoid adding it a second time.
|
|
|
|
let quviurl = setDownloader linkurl QuviDownloader
|
|
|
|
checkknown mediaurl $ checkknown quviurl $
|
2017-11-30 20:48:35 +00:00
|
|
|
ifM (Annex.getState Annex.fast <||> pure (relaxedOption (downloadOptions opts)))
|
convert importfeed to youtube-dl
Fully working, including --fast/--relaxed.
Note that, while git-annex addurl --relaxed is not going to check
youtube-dl, I kept git annex importfeed --relaxed checking it.
Thinking is that, let's not break people's importfeed cron jobs, and
importfeed does not typically have to check a large number of new items,
so it's ok if it's a little bit slower when used with youtube playlist
feeds.
importfeed's behavior is also improved (?) when a feed has links in it
to non-media files. Before, those were skipped. Now, the content of the
link is downloaded. This had to be done, because trying to use
youtube-dl is slow, and if those were skipped, it would have to check
every time importfeed was run. While this behavior change may not be
desirable for some feeds, that intersperse links to web pages with
enclosures, it will be desirable for other feeds, that have
non-enclosure directy links to media files.
Remove old quvi modules.
This commit was sponsored by Øyvind Andersen Holm.
2017-11-29 21:05:27 +00:00
|
|
|
( addmediafast linkurl mediaurl mediakey
|
|
|
|
, downloadmedia linkurl mediaurl mediakey
|
|
|
|
)
|
2013-07-31 16:19:00 +00:00
|
|
|
where
|
2014-10-09 18:53:13 +00:00
|
|
|
forced = Annex.getState Annex.force
|
2013-12-29 19:52:20 +00:00
|
|
|
|
2015-03-31 17:29:51 +00:00
|
|
|
{- Avoids downloading any items that are already known to be
|
2013-12-29 19:52:20 +00:00
|
|
|
- associated with a file in the annex, unless forced. -}
|
|
|
|
checkknown url a
|
2015-03-31 17:29:51 +00:00
|
|
|
| knownitemid || S.member url (knownurls cache)
|
|
|
|
= ifM forced (a, return True)
|
2013-12-29 19:52:20 +00:00
|
|
|
| otherwise = a
|
|
|
|
|
2015-03-31 17:29:51 +00:00
|
|
|
knownitemid = case getItemId (item todownload) of
|
2017-08-28 16:29:00 +00:00
|
|
|
Just (_, itemid) ->
|
2019-07-05 19:09:37 +00:00
|
|
|
S.member (T.unpack itemid) (knownitems cache)
|
2015-03-31 17:29:51 +00:00
|
|
|
_ -> False
|
|
|
|
|
2013-12-29 19:52:20 +00:00
|
|
|
rundownload url extension getter = do
|
|
|
|
dest <- makeunique url (1 :: Integer) $
|
|
|
|
feedFile (template cache) todownload extension
|
2013-08-01 15:57:05 +00:00
|
|
|
case dest of
|
2013-08-03 05:40:21 +00:00
|
|
|
Nothing -> return True
|
2013-08-01 15:57:05 +00:00
|
|
|
Just f -> do
|
2018-04-13 05:37:46 +00:00
|
|
|
showStart "addurl" url
|
2014-12-11 20:43:46 +00:00
|
|
|
ks <- getter f
|
|
|
|
if null ks
|
|
|
|
then do
|
2013-08-03 05:40:21 +00:00
|
|
|
showEndFail
|
2013-12-29 19:52:20 +00:00
|
|
|
checkFeedBroken (feedurl todownload)
|
2014-12-11 20:43:46 +00:00
|
|
|
else do
|
|
|
|
forM_ ks $ \key ->
|
2015-03-31 17:29:51 +00:00
|
|
|
ifM (annexGenMetaData <$> Annex.getGitConfig)
|
|
|
|
( addMetaData key $ extractMetaData todownload
|
|
|
|
, addMetaData key $ minimalMetaData todownload
|
|
|
|
)
|
2014-12-11 20:43:46 +00:00
|
|
|
showEndOk
|
|
|
|
return True
|
2013-12-29 19:52:20 +00:00
|
|
|
|
2013-08-01 15:57:05 +00:00
|
|
|
{- Find a unique filename to save the url to.
|
|
|
|
- If the file exists, prefixes it with a number.
|
|
|
|
- When forced, the file may already exist and have the same
|
|
|
|
- url, in which case Nothing is returned as it does not need
|
|
|
|
- to be re-downloaded. -}
|
2013-12-29 19:52:20 +00:00
|
|
|
makeunique url n file = ifM alreadyexists
|
2013-08-01 15:57:05 +00:00
|
|
|
( ifM forced
|
|
|
|
( ifAnnexed f checksameurl tryanother
|
|
|
|
, tryanother
|
|
|
|
)
|
|
|
|
, return $ Just f
|
|
|
|
)
|
|
|
|
where
|
2014-10-09 18:53:13 +00:00
|
|
|
f = if n < 2
|
2013-08-01 15:57:05 +00:00
|
|
|
then file
|
|
|
|
else
|
|
|
|
let (d, base) = splitFileName file
|
|
|
|
in d </> show n ++ "_" ++ base
|
2013-12-29 19:52:20 +00:00
|
|
|
tryanother = makeunique url (n + 1) file
|
2013-08-01 15:57:05 +00:00
|
|
|
alreadyexists = liftIO $ isJust <$> catchMaybeIO (getSymbolicLinkStatus f)
|
2014-04-17 22:03:39 +00:00
|
|
|
checksameurl k = ifM (elem url <$> getUrls k)
|
2013-08-01 15:57:05 +00:00
|
|
|
( return Nothing
|
|
|
|
, tryanother
|
2013-07-28 19:27:36 +00:00
|
|
|
)
|
convert importfeed to youtube-dl
Fully working, including --fast/--relaxed.
Note that, while git-annex addurl --relaxed is not going to check
youtube-dl, I kept git annex importfeed --relaxed checking it.
Thinking is that, let's not break people's importfeed cron jobs, and
importfeed does not typically have to check a large number of new items,
so it's ok if it's a little bit slower when used with youtube playlist
feeds.
importfeed's behavior is also improved (?) when a feed has links in it
to non-media files. Before, those were skipped. Now, the content of the
link is downloaded. This had to be done, because trying to use
youtube-dl is slow, and if those were skipped, it would have to check
every time importfeed was run. While this behavior change may not be
desirable for some feeds, that intersperse links to web pages with
enclosures, it will be desirable for other feeds, that have
non-enclosure directy links to media files.
Remove old quvi modules.
This commit was sponsored by Øyvind Andersen Holm.
2017-11-29 21:05:27 +00:00
|
|
|
|
2017-11-30 20:48:35 +00:00
|
|
|
downloadmedia linkurl mediaurl mediakey
|
|
|
|
| rawOption (downloadOptions opts) = downloadlink
|
|
|
|
| otherwise = do
|
|
|
|
r <- withTmpWorkDir mediakey $ \workdir -> do
|
|
|
|
dl <- youtubeDl linkurl workdir
|
|
|
|
case dl of
|
|
|
|
Right (Just mediafile) -> do
|
|
|
|
let ext = case takeExtension mediafile of
|
|
|
|
[] -> ".m"
|
|
|
|
s -> s
|
|
|
|
ok <- rundownload linkurl ext $ \f -> do
|
|
|
|
addWorkTree webUUID mediaurl f mediakey (Just mediafile)
|
|
|
|
return [mediakey]
|
|
|
|
return (Just ok)
|
|
|
|
-- youtude-dl didn't support it, so
|
|
|
|
-- download it as if the link were
|
|
|
|
-- an enclosure.
|
|
|
|
Right Nothing -> Just <$> downloadlink
|
|
|
|
Left msg -> do
|
|
|
|
warning msg
|
|
|
|
return Nothing
|
|
|
|
return (fromMaybe False r)
|
|
|
|
where
|
|
|
|
downloadlink = performDownload opts cache todownload
|
2017-11-30 17:39:20 +00:00
|
|
|
{ location = Enclosure linkurl }
|
2017-11-30 20:48:35 +00:00
|
|
|
|
|
|
|
addmediafast linkurl mediaurl mediakey =
|
|
|
|
ifM (pure (not (rawOption (downloadOptions opts)))
|
|
|
|
<&&> youtubeDlSupported linkurl)
|
|
|
|
( rundownload linkurl ".m" $ \f -> do
|
|
|
|
addWorkTree webUUID mediaurl f mediakey Nothing
|
|
|
|
return [mediakey]
|
|
|
|
, performDownload opts cache todownload
|
|
|
|
{ location = Enclosure linkurl }
|
|
|
|
)
|
import metadata from feeds
When annex.genmetadata is set, metadata from the feed is added to files
that are imported from it.
Reused the same feedtitle and itemtitle, feedauthor, itemauthor, etc names
that are used in --template.
Also added title and author, which are the item title/author if available,
falling back to the feed title/author. These are more likely to be common
metadata fields.
(There is a small bit of dupication here, but once git gets
around to packing the object, it will compress it away.)
The itempubdate field is not included in the metadata as a string; instead
it is used to generate year and month fields, same as is done when adding
files with annex.genmetadata set.
This commit was sponsored by Amitai Schlair, who cooincidentially
is responsible for ikiwiki generating nice feed metadata!
2014-07-03 17:46:09 +00:00
|
|
|
|
2013-07-28 22:16:24 +00:00
|
|
|
defaultTemplate :: String
|
2013-07-28 23:08:50 +00:00
|
|
|
defaultTemplate = "${feedtitle}/${itemtitle}${extension}"
|
2013-07-28 19:27:36 +00:00
|
|
|
|
2013-08-01 15:57:05 +00:00
|
|
|
{- Generates a filename to use for a feed item by filling out the template.
|
|
|
|
- The filename may not be unique. -}
|
2013-12-29 19:52:20 +00:00
|
|
|
feedFile :: Utility.Format.Format -> ToDownload -> String -> FilePath
|
import metadata from feeds
When annex.genmetadata is set, metadata from the feed is added to files
that are imported from it.
Reused the same feedtitle and itemtitle, feedauthor, itemauthor, etc names
that are used in --template.
Also added title and author, which are the item title/author if available,
falling back to the feed title/author. These are more likely to be common
metadata fields.
(There is a small bit of dupication here, but once git gets
around to packing the object, it will compress it away.)
The itempubdate field is not included in the metadata as a string; instead
it is used to generate year and month fields, same as is done when adding
files with annex.genmetadata set.
This commit was sponsored by Amitai Schlair, who cooincidentially
is responsible for ikiwiki generating nice feed metadata!
2014-07-03 17:46:09 +00:00
|
|
|
feedFile tmpl i extension = Utility.Format.format tmpl $
|
|
|
|
M.map sanitizeFilePath $ M.fromList $ extractFields i ++
|
|
|
|
[ ("extension", extension)
|
|
|
|
, extractField "itempubdate" [pubdate $ item i]
|
|
|
|
]
|
2013-07-28 19:27:36 +00:00
|
|
|
where
|
2014-04-07 20:55:04 +00:00
|
|
|
pubdate itm = case getItemPublishDate itm :: Maybe (Maybe UTCTime) of
|
|
|
|
Just (Just d) -> Just $
|
|
|
|
formatTime defaultTimeLocale "%F" d
|
|
|
|
-- if date cannot be parsed, use the raw string
|
2019-07-05 19:09:37 +00:00
|
|
|
_ -> replace "/" "-" . T.unpack
|
2017-08-28 16:29:00 +00:00
|
|
|
<$> getItemPublishDateString itm
|
2014-04-07 20:55:04 +00:00
|
|
|
|
import metadata from feeds
When annex.genmetadata is set, metadata from the feed is added to files
that are imported from it.
Reused the same feedtitle and itemtitle, feedauthor, itemauthor, etc names
that are used in --template.
Also added title and author, which are the item title/author if available,
falling back to the feed title/author. These are more likely to be common
metadata fields.
(There is a small bit of dupication here, but once git gets
around to packing the object, it will compress it away.)
The itempubdate field is not included in the metadata as a string; instead
it is used to generate year and month fields, same as is done when adding
files with annex.genmetadata set.
This commit was sponsored by Amitai Schlair, who cooincidentially
is responsible for ikiwiki generating nice feed metadata!
2014-07-03 17:46:09 +00:00
|
|
|
extractMetaData :: ToDownload -> MetaData
|
|
|
|
extractMetaData i = case getItemPublishDate (item i) :: Maybe (Maybe UTCTime) of
|
2014-07-03 18:35:20 +00:00
|
|
|
Just (Just d) -> unionMetaData meta (dateMetaData d meta)
|
import metadata from feeds
When annex.genmetadata is set, metadata from the feed is added to files
that are imported from it.
Reused the same feedtitle and itemtitle, feedauthor, itemauthor, etc names
that are used in --template.
Also added title and author, which are the item title/author if available,
falling back to the feed title/author. These are more likely to be common
metadata fields.
(There is a small bit of dupication here, but once git gets
around to packing the object, it will compress it away.)
The itempubdate field is not included in the metadata as a string; instead
it is used to generate year and month fields, same as is done when adding
files with annex.genmetadata set.
This commit was sponsored by Amitai Schlair, who cooincidentially
is responsible for ikiwiki generating nice feed metadata!
2014-07-03 17:46:09 +00:00
|
|
|
_ -> meta
|
|
|
|
where
|
2019-01-07 19:51:05 +00:00
|
|
|
tometa (k, v) = (mkMetaFieldUnchecked (T.pack k), S.singleton (toMetaValue (encodeBS v)))
|
import metadata from feeds
When annex.genmetadata is set, metadata from the feed is added to files
that are imported from it.
Reused the same feedtitle and itemtitle, feedauthor, itemauthor, etc names
that are used in --template.
Also added title and author, which are the item title/author if available,
falling back to the feed title/author. These are more likely to be common
metadata fields.
(There is a small bit of dupication here, but once git gets
around to packing the object, it will compress it away.)
The itempubdate field is not included in the metadata as a string; instead
it is used to generate year and month fields, same as is done when adding
files with annex.genmetadata set.
This commit was sponsored by Amitai Schlair, who cooincidentially
is responsible for ikiwiki generating nice feed metadata!
2014-07-03 17:46:09 +00:00
|
|
|
meta = MetaData $ M.fromList $ map tometa $ extractFields i
|
|
|
|
|
2015-03-31 17:29:51 +00:00
|
|
|
minimalMetaData :: ToDownload -> MetaData
|
|
|
|
minimalMetaData i = case getItemId (item i) of
|
|
|
|
(Nothing) -> emptyMetaData
|
|
|
|
(Just (_, itemid)) -> MetaData $ M.singleton itemIdField
|
2019-07-05 19:09:37 +00:00
|
|
|
(S.singleton $ toMetaValue $ encodeBS $ T.unpack itemid)
|
2015-03-31 17:29:51 +00:00
|
|
|
|
import metadata from feeds
When annex.genmetadata is set, metadata from the feed is added to files
that are imported from it.
Reused the same feedtitle and itemtitle, feedauthor, itemauthor, etc names
that are used in --template.
Also added title and author, which are the item title/author if available,
falling back to the feed title/author. These are more likely to be common
metadata fields.
(There is a small bit of dupication here, but once git gets
around to packing the object, it will compress it away.)
The itempubdate field is not included in the metadata as a string; instead
it is used to generate year and month fields, same as is done when adding
files with annex.genmetadata set.
This commit was sponsored by Amitai Schlair, who cooincidentially
is responsible for ikiwiki generating nice feed metadata!
2014-07-03 17:46:09 +00:00
|
|
|
{- Extract fields from the feed and item, that are both used as metadata,
|
|
|
|
- and to generate the filename. -}
|
|
|
|
extractFields :: ToDownload -> [(String, String)]
|
|
|
|
extractFields i = map (uncurry extractField)
|
|
|
|
[ ("feedtitle", [feedtitle])
|
|
|
|
, ("itemtitle", [itemtitle])
|
|
|
|
, ("feedauthor", [feedauthor])
|
|
|
|
, ("itemauthor", [itemauthor])
|
2019-07-05 19:09:37 +00:00
|
|
|
, ("itemsummary", [T.unpack <$> getItemSummary (item i)])
|
|
|
|
, ("itemdescription", [T.unpack <$> getItemDescription (item i)])
|
|
|
|
, ("itemrights", [T.unpack <$> getItemRights (item i)])
|
|
|
|
, ("itemid", [T.unpack . snd <$> getItemId (item i)])
|
import metadata from feeds
When annex.genmetadata is set, metadata from the feed is added to files
that are imported from it.
Reused the same feedtitle and itemtitle, feedauthor, itemauthor, etc names
that are used in --template.
Also added title and author, which are the item title/author if available,
falling back to the feed title/author. These are more likely to be common
metadata fields.
(There is a small bit of dupication here, but once git gets
around to packing the object, it will compress it away.)
The itempubdate field is not included in the metadata as a string; instead
it is used to generate year and month fields, same as is done when adding
files with annex.genmetadata set.
This commit was sponsored by Amitai Schlair, who cooincidentially
is responsible for ikiwiki generating nice feed metadata!
2014-07-03 17:46:09 +00:00
|
|
|
, ("title", [itemtitle, feedtitle])
|
|
|
|
, ("author", [itemauthor, feedauthor])
|
|
|
|
]
|
|
|
|
where
|
2019-07-05 19:09:37 +00:00
|
|
|
feedtitle = Just $ T.unpack $ getFeedTitle $ feed i
|
|
|
|
itemtitle = T.unpack <$> getItemTitle (item i)
|
|
|
|
feedauthor = T.unpack <$> getFeedAuthor (feed i)
|
|
|
|
itemauthor = T.unpack <$> getItemAuthor (item i)
|
import metadata from feeds
When annex.genmetadata is set, metadata from the feed is added to files
that are imported from it.
Reused the same feedtitle and itemtitle, feedauthor, itemauthor, etc names
that are used in --template.
Also added title and author, which are the item title/author if available,
falling back to the feed title/author. These are more likely to be common
metadata fields.
(There is a small bit of dupication here, but once git gets
around to packing the object, it will compress it away.)
The itempubdate field is not included in the metadata as a string; instead
it is used to generate year and month fields, same as is done when adding
files with annex.genmetadata set.
This commit was sponsored by Amitai Schlair, who cooincidentially
is responsible for ikiwiki generating nice feed metadata!
2014-07-03 17:46:09 +00:00
|
|
|
|
2015-03-31 17:29:51 +00:00
|
|
|
itemIdField :: MetaField
|
|
|
|
itemIdField = mkMetaFieldUnchecked "itemid"
|
|
|
|
|
import metadata from feeds
When annex.genmetadata is set, metadata from the feed is added to files
that are imported from it.
Reused the same feedtitle and itemtitle, feedauthor, itemauthor, etc names
that are used in --template.
Also added title and author, which are the item title/author if available,
falling back to the feed title/author. These are more likely to be common
metadata fields.
(There is a small bit of dupication here, but once git gets
around to packing the object, it will compress it away.)
The itempubdate field is not included in the metadata as a string; instead
it is used to generate year and month fields, same as is done when adding
files with annex.genmetadata set.
This commit was sponsored by Amitai Schlair, who cooincidentially
is responsible for ikiwiki generating nice feed metadata!
2014-07-03 17:46:09 +00:00
|
|
|
extractField :: String -> [Maybe String] -> (String, String)
|
2015-03-31 17:29:51 +00:00
|
|
|
extractField k [] = (k, noneValue)
|
import metadata from feeds
When annex.genmetadata is set, metadata from the feed is added to files
that are imported from it.
Reused the same feedtitle and itemtitle, feedauthor, itemauthor, etc names
that are used in --template.
Also added title and author, which are the item title/author if available,
falling back to the feed title/author. These are more likely to be common
metadata fields.
(There is a small bit of dupication here, but once git gets
around to packing the object, it will compress it away.)
The itempubdate field is not included in the metadata as a string; instead
it is used to generate year and month fields, same as is done when adding
files with annex.genmetadata set.
This commit was sponsored by Amitai Schlair, who cooincidentially
is responsible for ikiwiki generating nice feed metadata!
2014-07-03 17:46:09 +00:00
|
|
|
extractField k (Just v:_)
|
|
|
|
| not (null v) = (k, v)
|
|
|
|
extractField k (_:rest) = extractField k rest
|
|
|
|
|
2015-03-31 17:29:51 +00:00
|
|
|
noneValue :: String
|
|
|
|
noneValue = "none"
|
|
|
|
|
2013-08-03 05:40:21 +00:00
|
|
|
{- Called when there is a problem with a feed.
|
2018-11-04 21:41:49 +00:00
|
|
|
-
|
|
|
|
- If the feed has been broken for some time,
|
|
|
|
- returns False, otherwise only warns. -}
|
|
|
|
feedProblem :: URLString -> String -> Annex Bool
|
2013-08-03 05:40:21 +00:00
|
|
|
feedProblem url message = ifM (checkFeedBroken url)
|
2018-11-04 21:41:49 +00:00
|
|
|
( do
|
|
|
|
warning $ message ++ " (having repeated problems with feed: " ++ url ++ ")"
|
|
|
|
return False
|
|
|
|
, do
|
|
|
|
warning $ "warning: " ++ message
|
|
|
|
return True
|
2013-08-03 05:40:21 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
{- A feed is only broken if problems have occurred repeatedly, for at
|
|
|
|
- least 23 hours. -}
|
|
|
|
checkFeedBroken :: URLString -> Annex Bool
|
|
|
|
checkFeedBroken url = checkFeedBroken' url =<< feedState url
|
|
|
|
checkFeedBroken' :: URLString -> FilePath -> Annex Bool
|
|
|
|
checkFeedBroken' url f = do
|
|
|
|
prev <- maybe Nothing readish <$> liftIO (catchMaybeIO $ readFile f)
|
|
|
|
now <- liftIO getCurrentTime
|
|
|
|
case prev of
|
|
|
|
Nothing -> do
|
2018-01-02 21:17:10 +00:00
|
|
|
writeLogFile f $ show now
|
2013-08-03 05:40:21 +00:00
|
|
|
return False
|
|
|
|
Just prevtime -> do
|
|
|
|
let broken = diffUTCTime now prevtime > 60 * 60 * 23
|
|
|
|
when broken $
|
|
|
|
-- Avoid repeatedly complaining about
|
|
|
|
-- broken feed.
|
|
|
|
clearFeedProblem url
|
|
|
|
return broken
|
|
|
|
|
|
|
|
clearFeedProblem :: URLString -> Annex ()
|
|
|
|
clearFeedProblem url = void $ liftIO . tryIO . removeFile =<< feedState url
|
|
|
|
|
|
|
|
feedState :: URLString -> Annex FilePath
|
2015-05-23 02:41:36 +00:00
|
|
|
feedState url = fromRepo $ gitAnnexFeedState $ fromUrl url Nothing
|