2013-07-28 19:27:36 +00:00
|
|
|
{- git-annex command
|
|
|
|
-
|
2021-04-23 15:44:10 +00:00
|
|
|
- Copyright 2013-2021 Joey Hess <id@joeyh.name>
|
2013-07-28 19:27:36 +00:00
|
|
|
-
|
2019-03-13 19:48:14 +00:00
|
|
|
- Licensed under the GNU AGPL version 3 or higher.
|
2013-07-28 19:27:36 +00:00
|
|
|
-}
|
|
|
|
|
2019-01-07 19:51:05 +00:00
|
|
|
{-# LANGUAGE OverloadedStrings #-}
|
2020-07-14 18:35:26 +00:00
|
|
|
{-# LANGUAGE BangPatterns #-}
|
2021-11-23 20:06:51 +00:00
|
|
|
{-# LANGUAGE CPP #-}
|
2013-12-29 19:52:20 +00:00
|
|
|
|
2013-07-28 19:27:36 +00:00
|
|
|
module Command.ImportFeed where
|
|
|
|
|
|
|
|
import Text.Feed.Import
|
|
|
|
import Text.Feed.Query
|
|
|
|
import Text.Feed.Types
|
|
|
|
import qualified Data.Set as S
|
|
|
|
import qualified Data.Map as M
|
2013-08-03 05:40:21 +00:00
|
|
|
import Data.Time.Clock
|
2014-04-07 20:55:04 +00:00
|
|
|
import Data.Time.Format
|
2020-06-24 18:24:50 +00:00
|
|
|
import Data.Time.Calendar
|
|
|
|
import Data.Time.LocalTime
|
2017-08-28 16:29:00 +00:00
|
|
|
import qualified Data.Text as T
|
2021-11-15 17:32:31 +00:00
|
|
|
import qualified Data.Text.Encoding as TE
|
2020-11-04 18:20:37 +00:00
|
|
|
import qualified System.FilePath.ByteString as P
|
2021-11-15 17:32:31 +00:00
|
|
|
import qualified Data.ByteString as B
|
2013-07-28 19:27:36 +00:00
|
|
|
|
|
|
|
import Command
|
2016-01-20 20:36:33 +00:00
|
|
|
import qualified Annex
|
2013-09-28 18:35:21 +00:00
|
|
|
import qualified Annex.Url as Url
|
2014-12-11 20:43:46 +00:00
|
|
|
import qualified Remote
|
|
|
|
import qualified Types.Remote as Remote
|
|
|
|
import Types.UrlContents
|
2013-07-28 19:27:36 +00:00
|
|
|
import Logs.Web
|
2018-01-02 21:17:10 +00:00
|
|
|
import Logs.File
|
2013-07-28 19:27:36 +00:00
|
|
|
import qualified Utility.Format
|
|
|
|
import Utility.Tmp
|
2018-04-06 21:00:46 +00:00
|
|
|
import Utility.Metered
|
2020-09-29 17:00:41 +00:00
|
|
|
import Command.AddUrl (addUrlFile, downloadRemoteFile, parseDownloadOptions, DownloadOptions(..), checkCanAdd)
|
2014-12-17 17:57:52 +00:00
|
|
|
import Annex.UUID
|
2013-08-03 05:40:21 +00:00
|
|
|
import Backend.URL (fromUrl)
|
convert importfeed to youtube-dl
Fully working, including --fast/--relaxed.
Note that, while git-annex addurl --relaxed is not going to check
youtube-dl, I kept git annex importfeed --relaxed checking it.
Thinking is that, let's not break people's importfeed cron jobs, and
importfeed does not typically have to check a large number of new items,
so it's ok if it's a little bit slower when used with youtube playlist
feeds.
importfeed's behavior is also improved (?) when a feed has links in it
to non-media files. Before, those were skipped. Now, the content of the
link is downloaded. This had to be done, because trying to use
youtube-dl is slow, and if those were skipped, it would have to check
every time importfeed was run. While this behavior change may not be
desirable for some feeds, that intersperse links to web pages with
enclosures, it will be desirable for other feeds, that have
non-enclosure directy links to media files.
Remove old quvi modules.
This commit was sponsored by Øyvind Andersen Holm.
2017-11-29 21:05:27 +00:00
|
|
|
import Annex.Content
|
2022-10-26 17:58:20 +00:00
|
|
|
import Annex.WorkTree
|
convert importfeed to youtube-dl
Fully working, including --fast/--relaxed.
Note that, while git-annex addurl --relaxed is not going to check
youtube-dl, I kept git annex importfeed --relaxed checking it.
Thinking is that, let's not break people's importfeed cron jobs, and
importfeed does not typically have to check a large number of new items,
so it's ok if it's a little bit slower when used with youtube playlist
feeds.
importfeed's behavior is also improved (?) when a feed has links in it
to non-media files. Before, those were skipped. Now, the content of the
link is downloaded. This had to be done, because trying to use
youtube-dl is slow, and if those were skipped, it would have to check
every time importfeed was run. While this behavior change may not be
desirable for some feeds, that intersperse links to web pages with
enclosures, it will be desirable for other feeds, that have
non-enclosure directy links to media files.
Remove old quvi modules.
This commit was sponsored by Øyvind Andersen Holm.
2017-11-29 21:05:27 +00:00
|
|
|
import Annex.YoutubeDl
|
import metadata from feeds
When annex.genmetadata is set, metadata from the feed is added to files
that are imported from it.
Reused the same feedtitle and itemtitle, feedauthor, itemauthor, etc names
that are used in --template.
Also added title and author, which are the item title/author if available,
falling back to the feed title/author. These are more likely to be common
metadata fields.
(There is a small bit of dupication here, but once git gets
around to packing the object, it will compress it away.)
The itempubdate field is not included in the metadata as a string; instead
it is used to generate year and month fields, same as is done when adding
files with annex.genmetadata set.
This commit was sponsored by Amitai Schlair, who cooincidentially
is responsible for ikiwiki generating nice feed metadata!
2014-07-03 17:46:09 +00:00
|
|
|
import Types.MetaData
|
|
|
|
import Logs.MetaData
|
|
|
|
import Annex.MetaData
|
2019-12-20 19:01:34 +00:00
|
|
|
import Annex.FileMatcher
|
2021-06-27 15:13:38 +00:00
|
|
|
import Command.AddUrl (addWorkTree, checkRaw)
|
addurl --preserve-filename and a few related changes
* addurl --preserve-filename: New option, uses server-provided filename
without any sanitization, but with some security checking.
Not yet implemented for remotes other than the web.
* addurl, importfeed: Avoid adding filenames with leading '.', instead
it will be replaced with '_'.
This might be considered a security fix, but a CVE seems unwattanted.
It was possible for addurl to create a dotfile, which could change
behavior of some program. It was also possible for a web server to say
the file name was ".git" or "foo/.git". That would not overrwrite the
.git directory, but would cause addurl to fail; of course git won't
add "foo/.git".
sanitizeFilePath is too opinionated to remain in Utility, so moved it.
The changes to mkSafeFilePath are because it used sanitizeFilePath.
In particular:
isDrive will never succeed, because "c:" gets munged to "c_"
".." gets sanitized now
".git" gets sanitized now
It will never be null, because sanitizeFilePath keeps the length
the same, and splitDirectories never returns a null path.
Also, on the off chance a web server suggests a filename of "",
ignore that, rather than trying to save to such a filename, which would
fail in some way.
2020-05-08 20:09:29 +00:00
|
|
|
import Annex.UntrustedFilePath
|
2020-07-14 18:35:26 +00:00
|
|
|
import qualified Annex.Branch
|
|
|
|
import Logs
|
2013-07-28 19:27:36 +00:00
|
|
|
|
2015-07-08 16:33:27 +00:00
|
|
|
cmd :: Command
|
2022-07-05 19:34:49 +00:00
|
|
|
cmd = notBareRepo $ withAnnexOptions [backendOption] $
|
2015-07-08 19:08:02 +00:00
|
|
|
command "importfeed" SectionCommon "import files from podcast feeds"
|
2015-07-13 15:06:41 +00:00
|
|
|
(paramRepeating paramUrl) (seek <$$> optParser)
|
|
|
|
|
|
|
|
data ImportFeedOptions = ImportFeedOptions
|
|
|
|
{ feedUrls :: CmdParams
|
|
|
|
, templateOption :: Maybe String
|
2017-11-30 20:48:35 +00:00
|
|
|
, downloadOptions :: DownloadOptions
|
2015-03-05 18:46:08 +00:00
|
|
|
}
|
|
|
|
|
2015-07-13 15:06:41 +00:00
|
|
|
optParser :: CmdParamsDesc -> Parser ImportFeedOptions
|
|
|
|
optParser desc = ImportFeedOptions
|
|
|
|
<$> cmdParams desc
|
|
|
|
<*> optional (strOption
|
|
|
|
( long "template" <> metavar paramFormat
|
|
|
|
<> help "template for filenames"
|
|
|
|
))
|
2017-11-30 20:48:35 +00:00
|
|
|
<*> parseDownloadOptions False
|
2015-07-13 15:06:41 +00:00
|
|
|
|
|
|
|
seek :: ImportFeedOptions -> CommandSeek
|
|
|
|
seek o = do
|
2019-12-20 19:01:34 +00:00
|
|
|
addunlockedmatcher <- addUnlockedMatcher
|
2015-07-13 15:06:41 +00:00
|
|
|
cache <- getCache (templateOption o)
|
2019-12-20 19:01:34 +00:00
|
|
|
forM_ (feedUrls o) (getFeed addunlockedmatcher o cache)
|
2015-07-13 15:06:41 +00:00
|
|
|
|
2019-12-20 19:01:34 +00:00
|
|
|
getFeed :: AddUnlockedMatcher -> ImportFeedOptions -> Cache -> URLString -> CommandSeek
|
|
|
|
getFeed addunlockedmatcher opts cache url = do
|
2020-09-15 20:22:44 +00:00
|
|
|
showStartOther "importfeed" (Just url) (SeekInput [])
|
2021-11-15 19:31:02 +00:00
|
|
|
withTmpFile "feed" $ \tmpf h -> do
|
|
|
|
liftIO $ hClose h
|
|
|
|
ifM (downloadFeed url tmpf)
|
|
|
|
( go tmpf
|
|
|
|
, showEndResult =<< feedProblem url
|
|
|
|
"downloading the feed failed"
|
|
|
|
)
|
2019-06-20 16:37:07 +00:00
|
|
|
where
|
2021-11-15 19:31:02 +00:00
|
|
|
-- Use parseFeedFromFile rather than reading the file
|
|
|
|
-- ourselves because it goes out of its way to handle encodings.
|
2021-11-23 20:06:51 +00:00
|
|
|
go tmpf = liftIO (parseFeedFromFile' tmpf) >>= \case
|
2021-11-15 19:31:02 +00:00
|
|
|
Nothing -> debugfeedcontent tmpf "parsing the feed failed"
|
|
|
|
Just f -> case findDownloads url f of
|
|
|
|
[] -> debugfeedcontent tmpf "bad feed content; no enclosures to download"
|
|
|
|
l -> do
|
|
|
|
showEndOk
|
|
|
|
ifM (and <$> mapM (performDownload addunlockedmatcher opts cache) l)
|
|
|
|
( clearFeedProblem url
|
|
|
|
, void $ feedProblem url
|
|
|
|
"problem downloading some item(s) from feed"
|
|
|
|
)
|
|
|
|
debugfeedcontent tmpf msg = do
|
|
|
|
feedcontent <- liftIO $ readFile tmpf
|
2021-04-06 19:41:24 +00:00
|
|
|
fastDebug "Command.ImportFeed" $ unlines
|
2019-06-20 16:37:07 +00:00
|
|
|
[ "start of feed content"
|
|
|
|
, feedcontent
|
|
|
|
, "end of feed content"
|
|
|
|
]
|
|
|
|
showEndResult =<< feedProblem url
|
2021-04-05 19:21:20 +00:00
|
|
|
(msg ++ " (use --debug --debugfilter=ImportFeed to see the feed content that was downloaded)")
|
2013-07-28 22:16:24 +00:00
|
|
|
|
2021-11-23 20:06:51 +00:00
|
|
|
parseFeedFromFile' :: FilePath -> IO (Maybe Feed)
|
|
|
|
#if MIN_VERSION_feed(1,1,0)
|
|
|
|
parseFeedFromFile' = parseFeedFromFile
|
|
|
|
#else
|
|
|
|
parseFeedFromFile' f = catchMaybeIO (parseFeedFromFile f)
|
|
|
|
#endif
|
|
|
|
|
2013-07-28 23:08:50 +00:00
|
|
|
data ToDownload = ToDownload
|
|
|
|
{ feed :: Feed
|
2013-08-03 05:40:21 +00:00
|
|
|
, feedurl :: URLString
|
2013-07-28 23:08:50 +00:00
|
|
|
, item :: Item
|
2013-12-29 19:52:20 +00:00
|
|
|
, location :: DownloadLocation
|
2013-07-28 23:08:50 +00:00
|
|
|
}
|
|
|
|
|
convert importfeed to youtube-dl
Fully working, including --fast/--relaxed.
Note that, while git-annex addurl --relaxed is not going to check
youtube-dl, I kept git annex importfeed --relaxed checking it.
Thinking is that, let's not break people's importfeed cron jobs, and
importfeed does not typically have to check a large number of new items,
so it's ok if it's a little bit slower when used with youtube playlist
feeds.
importfeed's behavior is also improved (?) when a feed has links in it
to non-media files. Before, those were skipped. Now, the content of the
link is downloaded. This had to be done, because trying to use
youtube-dl is slow, and if those were skipped, it would have to check
every time importfeed was run. While this behavior change may not be
desirable for some feeds, that intersperse links to web pages with
enclosures, it will be desirable for other feeds, that have
non-enclosure directy links to media files.
Remove old quvi modules.
This commit was sponsored by Øyvind Andersen Holm.
2017-11-29 21:05:27 +00:00
|
|
|
data DownloadLocation = Enclosure URLString | MediaLink URLString
|
2013-07-28 23:08:50 +00:00
|
|
|
|
2015-03-31 17:29:51 +00:00
|
|
|
type ItemId = String
|
|
|
|
|
2013-07-28 22:16:24 +00:00
|
|
|
data Cache = Cache
|
|
|
|
{ knownurls :: S.Set URLString
|
2015-03-31 17:29:51 +00:00
|
|
|
, knownitems :: S.Set ItemId
|
2013-07-28 22:16:24 +00:00
|
|
|
, template :: Utility.Format.Format
|
|
|
|
}
|
|
|
|
|
|
|
|
getCache :: Maybe String -> Annex Cache
|
2022-06-28 19:28:14 +00:00
|
|
|
getCache opttemplate = ifM (Annex.getRead Annex.force)
|
2015-03-31 17:29:51 +00:00
|
|
|
( ret S.empty S.empty
|
2013-07-31 16:19:00 +00:00
|
|
|
, do
|
2021-11-17 16:48:28 +00:00
|
|
|
showStart "importfeed" "gathering known urls" (SeekInput [])
|
2021-04-23 16:36:56 +00:00
|
|
|
(us, is) <- knownItems
|
2017-02-28 18:41:08 +00:00
|
|
|
showEndOk
|
2021-04-23 16:36:56 +00:00
|
|
|
ret (S.fromList us) (S.fromList is)
|
2013-07-31 16:19:00 +00:00
|
|
|
)
|
2013-07-28 19:27:36 +00:00
|
|
|
where
|
2013-07-28 22:16:24 +00:00
|
|
|
tmpl = Utility.Format.gen $ fromMaybe defaultTemplate opttemplate
|
2015-03-31 17:29:51 +00:00
|
|
|
ret us is = return $ Cache us is tmpl
|
|
|
|
|
2021-04-23 16:36:56 +00:00
|
|
|
{- Scan all url logs and metadata logs in the branch and find urls
|
|
|
|
- and ItemIds that are already known. -}
|
|
|
|
knownItems :: Annex ([URLString], [ItemId])
|
2021-12-27 18:30:51 +00:00
|
|
|
knownItems = Annex.Branch.overBranchFileContents select (go [] []) >>= \case
|
|
|
|
Just r -> return r
|
|
|
|
Nothing -> giveup "This repository is read-only."
|
2020-07-14 18:35:26 +00:00
|
|
|
where
|
2021-04-23 16:36:56 +00:00
|
|
|
select f
|
|
|
|
| isUrlLog f = Just ()
|
|
|
|
| isMetaDataLog f = Just ()
|
|
|
|
| otherwise = Nothing
|
|
|
|
|
|
|
|
go uc ic reader = reader >>= \case
|
|
|
|
Just ((), f, Just content)
|
|
|
|
| isUrlLog f -> case parseUrlLog content of
|
|
|
|
[] -> go uc ic reader
|
|
|
|
us -> go (us++uc) ic reader
|
|
|
|
| isMetaDataLog f ->
|
|
|
|
let s = currentMetaDataValues itemIdField $
|
|
|
|
parseCurrentMetaData content
|
|
|
|
in if S.null s
|
|
|
|
then go uc ic reader
|
|
|
|
else go uc (map (decodeBS . fromMetaValue) (S.toList s)++ic) reader
|
|
|
|
| otherwise -> go uc ic reader
|
|
|
|
Just ((), _, Nothing) -> go uc ic reader
|
|
|
|
Nothing -> return (uc, ic)
|
2021-04-23 15:44:10 +00:00
|
|
|
|
2018-12-30 20:14:55 +00:00
|
|
|
findDownloads :: URLString -> Feed -> [ToDownload]
|
|
|
|
findDownloads u f = catMaybes $ map mk (feedItems f)
|
2013-07-28 19:27:36 +00:00
|
|
|
where
|
2018-12-30 20:14:55 +00:00
|
|
|
mk i = case getItemEnclosure i of
|
2018-12-30 19:59:15 +00:00
|
|
|
Just (enclosureurl, _, _) ->
|
2017-08-28 16:29:00 +00:00
|
|
|
Just $ ToDownload f u i $ Enclosure $
|
2021-11-15 17:32:31 +00:00
|
|
|
decodeBS $ fromFeedText enclosureurl
|
convert importfeed to youtube-dl
Fully working, including --fast/--relaxed.
Note that, while git-annex addurl --relaxed is not going to check
youtube-dl, I kept git annex importfeed --relaxed checking it.
Thinking is that, let's not break people's importfeed cron jobs, and
importfeed does not typically have to check a large number of new items,
so it's ok if it's a little bit slower when used with youtube playlist
feeds.
importfeed's behavior is also improved (?) when a feed has links in it
to non-media files. Before, those were skipped. Now, the content of the
link is downloaded. This had to be done, because trying to use
youtube-dl is slow, and if those were skipped, it would have to check
every time importfeed was run. While this behavior change may not be
desirable for some feeds, that intersperse links to web pages with
enclosures, it will be desirable for other feeds, that have
non-enclosure directy links to media files.
Remove old quvi modules.
This commit was sponsored by Øyvind Andersen Holm.
2017-11-29 21:05:27 +00:00
|
|
|
Nothing -> case getItemLink i of
|
2020-07-14 18:35:26 +00:00
|
|
|
Just l -> Just $ ToDownload f u i $
|
2021-11-15 17:32:31 +00:00
|
|
|
MediaLink $ decodeBS $ fromFeedText l
|
2018-12-30 19:59:15 +00:00
|
|
|
Nothing -> Nothing
|
2013-07-28 19:27:36 +00:00
|
|
|
|
|
|
|
{- Feeds change, so a feed download cannot be resumed. -}
|
2021-11-15 19:31:02 +00:00
|
|
|
downloadFeed :: URLString -> FilePath -> Annex Bool
|
|
|
|
downloadFeed url f
|
2016-11-16 01:29:54 +00:00
|
|
|
| Url.parseURIRelaxed url == Nothing = giveup "invalid feed url"
|
2021-11-15 19:31:02 +00:00
|
|
|
| otherwise = Url.withUrlOptions $
|
|
|
|
Url.download nullMeterUpdate Nothing url f
|
2013-07-28 19:27:36 +00:00
|
|
|
|
2019-12-20 19:01:34 +00:00
|
|
|
performDownload :: AddUnlockedMatcher -> ImportFeedOptions -> Cache -> ToDownload -> Annex Bool
|
2021-11-17 17:23:55 +00:00
|
|
|
performDownload = performDownload' False
|
|
|
|
|
|
|
|
performDownload' :: Bool -> AddUnlockedMatcher -> ImportFeedOptions -> Cache -> ToDownload -> Annex Bool
|
|
|
|
performDownload' started addunlockedmatcher opts cache todownload = case location todownload of
|
|
|
|
Enclosure url -> checkknown url $ do
|
|
|
|
starturl url
|
2016-10-17 20:02:05 +00:00
|
|
|
rundownload url (takeWhile (/= '?') $ takeExtension url) $ \f -> do
|
2020-11-04 18:20:37 +00:00
|
|
|
let f' = fromRawFilePath f
|
2014-12-11 20:43:46 +00:00
|
|
|
r <- Remote.claimingUrl url
|
2017-11-30 20:48:35 +00:00
|
|
|
if Remote.uuid r == webUUID || rawOption (downloadOptions opts)
|
2021-11-28 23:40:06 +00:00
|
|
|
then checkRaw (Just url) (downloadOptions opts) Nothing $ do
|
2017-11-30 20:48:35 +00:00
|
|
|
let dlopts = (downloadOptions opts)
|
|
|
|
-- force using the filename
|
|
|
|
-- chosen here
|
2020-11-04 18:20:37 +00:00
|
|
|
{ fileOption = Just f'
|
2017-11-30 20:48:35 +00:00
|
|
|
-- don't use youtube-dl
|
|
|
|
, rawOption = True
|
|
|
|
}
|
2020-09-29 17:00:41 +00:00
|
|
|
let go urlinfo = Just . maybeToList <$> addUrlFile addunlockedmatcher dlopts url urlinfo f
|
2020-04-27 17:48:14 +00:00
|
|
|
if relaxedOption (downloadOptions opts)
|
|
|
|
then go Url.assumeUrlExists
|
|
|
|
else Url.withUrlOptions (Url.getUrlInfo url) >>= \case
|
|
|
|
Right urlinfo -> go urlinfo
|
|
|
|
Left err -> do
|
|
|
|
warning err
|
2020-09-29 17:00:41 +00:00
|
|
|
return (Just [])
|
2014-12-11 20:43:46 +00:00
|
|
|
else do
|
|
|
|
res <- tryNonAsync $ maybe
|
|
|
|
(error $ "unable to checkUrl of " ++ Remote.name r)
|
|
|
|
(flip id url)
|
|
|
|
(Remote.checkUrl r)
|
|
|
|
case res of
|
2020-09-29 17:00:41 +00:00
|
|
|
Left _ -> return (Just [])
|
2014-12-11 20:43:46 +00:00
|
|
|
Right (UrlContents sz _) ->
|
2020-09-29 17:00:41 +00:00
|
|
|
Just . maybeToList <$>
|
2019-12-20 19:01:34 +00:00
|
|
|
downloadRemoteFile addunlockedmatcher r (downloadOptions opts) url f sz
|
2014-12-11 20:43:46 +00:00
|
|
|
Right (UrlMulti l) -> do
|
|
|
|
kl <- forM l $ \(url', sz, subf) ->
|
2020-11-04 18:20:37 +00:00
|
|
|
let dest = f P.</> toRawFilePath (sanitizeFilePath subf)
|
|
|
|
in downloadRemoteFile addunlockedmatcher r (downloadOptions opts) url' dest sz
|
2020-09-29 17:00:41 +00:00
|
|
|
return $ Just $ if all isJust kl
|
2014-12-11 20:43:46 +00:00
|
|
|
then catMaybes kl
|
|
|
|
else []
|
|
|
|
|
convert importfeed to youtube-dl
Fully working, including --fast/--relaxed.
Note that, while git-annex addurl --relaxed is not going to check
youtube-dl, I kept git annex importfeed --relaxed checking it.
Thinking is that, let's not break people's importfeed cron jobs, and
importfeed does not typically have to check a large number of new items,
so it's ok if it's a little bit slower when used with youtube playlist
feeds.
importfeed's behavior is also improved (?) when a feed has links in it
to non-media files. Before, those were skipped. Now, the content of the
link is downloaded. This had to be done, because trying to use
youtube-dl is slow, and if those were skipped, it would have to check
every time importfeed was run. While this behavior change may not be
desirable for some feeds, that intersperse links to web pages with
enclosures, it will be desirable for other feeds, that have
non-enclosure directy links to media files.
Remove old quvi modules.
This commit was sponsored by Øyvind Andersen Holm.
2017-11-29 21:05:27 +00:00
|
|
|
MediaLink linkurl -> do
|
|
|
|
let mediaurl = setDownloader linkurl YoutubeDownloader
|
|
|
|
let mediakey = Backend.URL.fromUrl mediaurl Nothing
|
|
|
|
-- Old versions of git-annex that used quvi might have
|
2018-09-02 20:16:08 +00:00
|
|
|
-- used the quviurl for this, so check if it's known
|
convert importfeed to youtube-dl
Fully working, including --fast/--relaxed.
Note that, while git-annex addurl --relaxed is not going to check
youtube-dl, I kept git annex importfeed --relaxed checking it.
Thinking is that, let's not break people's importfeed cron jobs, and
importfeed does not typically have to check a large number of new items,
so it's ok if it's a little bit slower when used with youtube playlist
feeds.
importfeed's behavior is also improved (?) when a feed has links in it
to non-media files. Before, those were skipped. Now, the content of the
link is downloaded. This had to be done, because trying to use
youtube-dl is slow, and if those were skipped, it would have to check
every time importfeed was run. While this behavior change may not be
desirable for some feeds, that intersperse links to web pages with
enclosures, it will be desirable for other feeds, that have
non-enclosure directy links to media files.
Remove old quvi modules.
This commit was sponsored by Øyvind Andersen Holm.
2017-11-29 21:05:27 +00:00
|
|
|
-- to avoid adding it a second time.
|
|
|
|
let quviurl = setDownloader linkurl QuviDownloader
|
|
|
|
checkknown mediaurl $ checkknown quviurl $
|
2022-06-28 19:28:14 +00:00
|
|
|
ifM (Annex.getRead Annex.fast <||> pure (relaxedOption (downloadOptions opts)))
|
convert importfeed to youtube-dl
Fully working, including --fast/--relaxed.
Note that, while git-annex addurl --relaxed is not going to check
youtube-dl, I kept git annex importfeed --relaxed checking it.
Thinking is that, let's not break people's importfeed cron jobs, and
importfeed does not typically have to check a large number of new items,
so it's ok if it's a little bit slower when used with youtube playlist
feeds.
importfeed's behavior is also improved (?) when a feed has links in it
to non-media files. Before, those were skipped. Now, the content of the
link is downloaded. This had to be done, because trying to use
youtube-dl is slow, and if those were skipped, it would have to check
every time importfeed was run. While this behavior change may not be
desirable for some feeds, that intersperse links to web pages with
enclosures, it will be desirable for other feeds, that have
non-enclosure directy links to media files.
Remove old quvi modules.
This commit was sponsored by Øyvind Andersen Holm.
2017-11-29 21:05:27 +00:00
|
|
|
( addmediafast linkurl mediaurl mediakey
|
|
|
|
, downloadmedia linkurl mediaurl mediakey
|
|
|
|
)
|
2013-07-31 16:19:00 +00:00
|
|
|
where
|
2022-06-28 19:28:14 +00:00
|
|
|
forced = Annex.getRead Annex.force
|
2013-12-29 19:52:20 +00:00
|
|
|
|
2015-03-31 17:29:51 +00:00
|
|
|
{- Avoids downloading any items that are already known to be
|
2013-12-29 19:52:20 +00:00
|
|
|
- associated with a file in the annex, unless forced. -}
|
|
|
|
checkknown url a
|
2015-03-31 17:29:51 +00:00
|
|
|
| knownitemid || S.member url (knownurls cache)
|
|
|
|
= ifM forced (a, return True)
|
2013-12-29 19:52:20 +00:00
|
|
|
| otherwise = a
|
|
|
|
|
2015-03-31 17:29:51 +00:00
|
|
|
knownitemid = case getItemId (item todownload) of
|
2017-08-28 16:29:00 +00:00
|
|
|
Just (_, itemid) ->
|
2021-11-15 17:32:31 +00:00
|
|
|
S.member (decodeBS $ fromFeedText itemid) (knownitems cache)
|
2015-03-31 17:29:51 +00:00
|
|
|
_ -> False
|
|
|
|
|
2013-12-29 19:52:20 +00:00
|
|
|
rundownload url extension getter = do
|
|
|
|
dest <- makeunique url (1 :: Integer) $
|
|
|
|
feedFile (template cache) todownload extension
|
2013-08-01 15:57:05 +00:00
|
|
|
case dest of
|
2013-08-03 05:40:21 +00:00
|
|
|
Nothing -> return True
|
2021-11-17 17:23:55 +00:00
|
|
|
Just f -> getter (toRawFilePath f) >>= \case
|
|
|
|
Just ks
|
|
|
|
-- Download problem.
|
|
|
|
| null ks -> do
|
2013-08-03 05:40:21 +00:00
|
|
|
showEndFail
|
2021-11-17 17:23:55 +00:00
|
|
|
checkFeedBroken (feedurl todownload)
|
|
|
|
| otherwise -> do
|
|
|
|
forM_ ks $ \key ->
|
|
|
|
ifM (annexGenMetaData <$> Annex.getGitConfig)
|
|
|
|
( addMetaData key $ extractMetaData todownload
|
|
|
|
, addMetaData key $ minimalMetaData todownload
|
|
|
|
)
|
|
|
|
showEndOk
|
|
|
|
return True
|
|
|
|
-- Was not able to add anything, but not
|
|
|
|
-- because of a download problem.
|
|
|
|
Nothing -> do
|
|
|
|
showEndFail
|
|
|
|
return False
|
2013-12-29 19:52:20 +00:00
|
|
|
|
2013-08-01 15:57:05 +00:00
|
|
|
{- Find a unique filename to save the url to.
|
|
|
|
- If the file exists, prefixes it with a number.
|
|
|
|
- When forced, the file may already exist and have the same
|
|
|
|
- url, in which case Nothing is returned as it does not need
|
|
|
|
- to be re-downloaded. -}
|
2013-12-29 19:52:20 +00:00
|
|
|
makeunique url n file = ifM alreadyexists
|
2013-08-01 15:57:05 +00:00
|
|
|
( ifM forced
|
2022-10-26 17:58:20 +00:00
|
|
|
( lookupKey (toRawFilePath f) >>= \case
|
|
|
|
Just k -> checksameurl k
|
|
|
|
Nothing -> tryanother
|
2013-08-01 15:57:05 +00:00
|
|
|
, tryanother
|
|
|
|
)
|
|
|
|
, return $ Just f
|
|
|
|
)
|
|
|
|
where
|
2014-10-09 18:53:13 +00:00
|
|
|
f = if n < 2
|
2013-08-01 15:57:05 +00:00
|
|
|
then file
|
|
|
|
else
|
|
|
|
let (d, base) = splitFileName file
|
|
|
|
in d </> show n ++ "_" ++ base
|
2013-12-29 19:52:20 +00:00
|
|
|
tryanother = makeunique url (n + 1) file
|
2013-08-01 15:57:05 +00:00
|
|
|
alreadyexists = liftIO $ isJust <$> catchMaybeIO (getSymbolicLinkStatus f)
|
2014-04-17 22:03:39 +00:00
|
|
|
checksameurl k = ifM (elem url <$> getUrls k)
|
2013-08-01 15:57:05 +00:00
|
|
|
( return Nothing
|
|
|
|
, tryanother
|
2013-07-28 19:27:36 +00:00
|
|
|
)
|
convert importfeed to youtube-dl
Fully working, including --fast/--relaxed.
Note that, while git-annex addurl --relaxed is not going to check
youtube-dl, I kept git annex importfeed --relaxed checking it.
Thinking is that, let's not break people's importfeed cron jobs, and
importfeed does not typically have to check a large number of new items,
so it's ok if it's a little bit slower when used with youtube playlist
feeds.
importfeed's behavior is also improved (?) when a feed has links in it
to non-media files. Before, those were skipped. Now, the content of the
link is downloaded. This had to be done, because trying to use
youtube-dl is slow, and if those were skipped, it would have to check
every time importfeed was run. While this behavior change may not be
desirable for some feeds, that intersperse links to web pages with
enclosures, it will be desirable for other feeds, that have
non-enclosure directy links to media files.
Remove old quvi modules.
This commit was sponsored by Øyvind Andersen Holm.
2017-11-29 21:05:27 +00:00
|
|
|
|
2017-11-30 20:48:35 +00:00
|
|
|
downloadmedia linkurl mediaurl mediakey
|
2021-11-17 17:23:55 +00:00
|
|
|
| rawOption (downloadOptions opts) = downloadlink False
|
2020-12-15 05:13:21 +00:00
|
|
|
| otherwise = ifM (youtubeDlSupported linkurl)
|
|
|
|
( do
|
2021-11-17 17:23:55 +00:00
|
|
|
starturl linkurl
|
2020-12-15 05:13:21 +00:00
|
|
|
r <- withTmpWorkDir mediakey $ \workdir -> do
|
|
|
|
dl <- youtubeDl linkurl (fromRawFilePath workdir) nullMeterUpdate
|
|
|
|
case dl of
|
|
|
|
Right (Just mediafile) -> do
|
|
|
|
let ext = case takeExtension mediafile of
|
|
|
|
[] -> ".m"
|
|
|
|
s -> s
|
|
|
|
ok <- rundownload linkurl ext $ \f ->
|
|
|
|
checkCanAdd (downloadOptions opts) f $ \canadd -> do
|
|
|
|
addWorkTree canadd addunlockedmatcher webUUID mediaurl f mediakey (Just (toRawFilePath mediafile))
|
|
|
|
return (Just [mediakey])
|
|
|
|
return (Just ok)
|
|
|
|
-- youtube-dl didn't support it, so
|
|
|
|
-- download it as if the link were
|
|
|
|
-- an enclosure.
|
2021-11-17 17:23:55 +00:00
|
|
|
Right Nothing -> Just <$> downloadlink True
|
2020-12-15 05:13:21 +00:00
|
|
|
Left msg -> do
|
|
|
|
warning $ linkurl ++ ": " ++ msg
|
|
|
|
return Nothing
|
|
|
|
return (fromMaybe False r)
|
2021-11-17 17:23:55 +00:00
|
|
|
, downloadlink False
|
2020-12-15 05:13:21 +00:00
|
|
|
)
|
2017-11-30 20:48:35 +00:00
|
|
|
where
|
2021-11-28 23:40:06 +00:00
|
|
|
downloadlink started' = checkRaw (Just linkurl) (downloadOptions opts) False $
|
2021-11-17 17:23:55 +00:00
|
|
|
performDownload' started' addunlockedmatcher opts cache todownload
|
2021-06-27 15:13:38 +00:00
|
|
|
{ location = Enclosure linkurl }
|
2017-11-30 20:48:35 +00:00
|
|
|
|
|
|
|
addmediafast linkurl mediaurl mediakey =
|
|
|
|
ifM (pure (not (rawOption (downloadOptions opts)))
|
|
|
|
<&&> youtubeDlSupported linkurl)
|
2021-11-17 17:23:55 +00:00
|
|
|
( do
|
|
|
|
starturl linkurl
|
|
|
|
rundownload linkurl ".m" $ \f ->
|
|
|
|
checkCanAdd (downloadOptions opts) f $ \canadd -> do
|
|
|
|
addWorkTree canadd addunlockedmatcher webUUID mediaurl f mediakey Nothing
|
|
|
|
return (Just [mediakey])
|
|
|
|
, performDownload' started addunlockedmatcher opts cache todownload
|
2017-11-30 20:48:35 +00:00
|
|
|
{ location = Enclosure linkurl }
|
|
|
|
)
|
import metadata from feeds
When annex.genmetadata is set, metadata from the feed is added to files
that are imported from it.
Reused the same feedtitle and itemtitle, feedauthor, itemauthor, etc names
that are used in --template.
Also added title and author, which are the item title/author if available,
falling back to the feed title/author. These are more likely to be common
metadata fields.
(There is a small bit of dupication here, but once git gets
around to packing the object, it will compress it away.)
The itempubdate field is not included in the metadata as a string; instead
it is used to generate year and month fields, same as is done when adding
files with annex.genmetadata set.
This commit was sponsored by Amitai Schlair, who cooincidentially
is responsible for ikiwiki generating nice feed metadata!
2014-07-03 17:46:09 +00:00
|
|
|
|
2021-11-17 17:23:55 +00:00
|
|
|
starturl u = unless started $
|
|
|
|
showStartOther "addurl" (Just u) (SeekInput [])
|
|
|
|
|
2013-07-28 22:16:24 +00:00
|
|
|
defaultTemplate :: String
|
2013-07-28 23:08:50 +00:00
|
|
|
defaultTemplate = "${feedtitle}/${itemtitle}${extension}"
|
2013-07-28 19:27:36 +00:00
|
|
|
|
2013-08-01 15:57:05 +00:00
|
|
|
{- Generates a filename to use for a feed item by filling out the template.
|
|
|
|
- The filename may not be unique. -}
|
2013-12-29 19:52:20 +00:00
|
|
|
feedFile :: Utility.Format.Format -> ToDownload -> String -> FilePath
|
2020-08-05 15:35:00 +00:00
|
|
|
feedFile tmpl i extension = sanitizeLeadingFilePathCharacter $
|
|
|
|
Utility.Format.format tmpl $
|
|
|
|
M.map sanitizeFilePathComponent $ M.fromList $ extractFields i ++
|
|
|
|
[ ("extension", extension)
|
|
|
|
, extractField "itempubdate" [itempubdate]
|
|
|
|
, extractField "itempubyear" [itempubyear]
|
|
|
|
, extractField "itempubmonth" [itempubmonth]
|
|
|
|
, extractField "itempubday" [itempubday]
|
|
|
|
, extractField "itempubhour" [itempubhour]
|
|
|
|
, extractField "itempubminute" [itempubminute]
|
|
|
|
, extractField "itempubsecond" [itempubsecond]
|
|
|
|
]
|
2013-07-28 19:27:36 +00:00
|
|
|
where
|
2020-06-24 18:24:50 +00:00
|
|
|
itm = item i
|
|
|
|
|
|
|
|
pubdate = case getItemPublishDate itm :: Maybe (Maybe UTCTime) of
|
|
|
|
Just (Just d) -> Just d
|
|
|
|
_ -> Nothing
|
|
|
|
|
|
|
|
itempubdate = case pubdate of
|
|
|
|
Just pd -> Just $
|
|
|
|
formatTime defaultTimeLocale "%F" pd
|
2014-04-07 20:55:04 +00:00
|
|
|
-- if date cannot be parsed, use the raw string
|
2021-11-15 17:32:31 +00:00
|
|
|
Nothing-> replace "/" "-" . decodeBS . fromFeedText
|
2017-08-28 16:29:00 +00:00
|
|
|
<$> getItemPublishDateString itm
|
2020-06-24 18:24:50 +00:00
|
|
|
|
|
|
|
(itempubyear, itempubmonth, itempubday) = case pubdate of
|
|
|
|
Nothing -> (Nothing, Nothing, Nothing)
|
|
|
|
Just pd ->
|
|
|
|
let (y, m, d) = toGregorian (utctDay pd)
|
|
|
|
in (Just (show y), Just (show m), Just (show d))
|
|
|
|
|
|
|
|
(itempubhour, itempubminute, itempubsecond) = case pubdate of
|
|
|
|
Nothing -> (Nothing, Nothing, Nothing)
|
|
|
|
Just pd ->
|
|
|
|
let tod = timeToTimeOfDay (utctDayTime pd)
|
|
|
|
in ( Just (show (todHour tod))
|
|
|
|
, Just (show (todMin tod))
|
|
|
|
-- avoid fractional seconds
|
|
|
|
, Just (takeWhile (/= '.') (show (todSec tod)))
|
|
|
|
)
|
2014-04-07 20:55:04 +00:00
|
|
|
|
import metadata from feeds
When annex.genmetadata is set, metadata from the feed is added to files
that are imported from it.
Reused the same feedtitle and itemtitle, feedauthor, itemauthor, etc names
that are used in --template.
Also added title and author, which are the item title/author if available,
falling back to the feed title/author. These are more likely to be common
metadata fields.
(There is a small bit of dupication here, but once git gets
around to packing the object, it will compress it away.)
The itempubdate field is not included in the metadata as a string; instead
it is used to generate year and month fields, same as is done when adding
files with annex.genmetadata set.
This commit was sponsored by Amitai Schlair, who cooincidentially
is responsible for ikiwiki generating nice feed metadata!
2014-07-03 17:46:09 +00:00
|
|
|
extractMetaData :: ToDownload -> MetaData
|
|
|
|
extractMetaData i = case getItemPublishDate (item i) :: Maybe (Maybe UTCTime) of
|
2014-07-03 18:35:20 +00:00
|
|
|
Just (Just d) -> unionMetaData meta (dateMetaData d meta)
|
import metadata from feeds
When annex.genmetadata is set, metadata from the feed is added to files
that are imported from it.
Reused the same feedtitle and itemtitle, feedauthor, itemauthor, etc names
that are used in --template.
Also added title and author, which are the item title/author if available,
falling back to the feed title/author. These are more likely to be common
metadata fields.
(There is a small bit of dupication here, but once git gets
around to packing the object, it will compress it away.)
The itempubdate field is not included in the metadata as a string; instead
it is used to generate year and month fields, same as is done when adding
files with annex.genmetadata set.
This commit was sponsored by Amitai Schlair, who cooincidentially
is responsible for ikiwiki generating nice feed metadata!
2014-07-03 17:46:09 +00:00
|
|
|
_ -> meta
|
|
|
|
where
|
2019-01-07 19:51:05 +00:00
|
|
|
tometa (k, v) = (mkMetaFieldUnchecked (T.pack k), S.singleton (toMetaValue (encodeBS v)))
|
import metadata from feeds
When annex.genmetadata is set, metadata from the feed is added to files
that are imported from it.
Reused the same feedtitle and itemtitle, feedauthor, itemauthor, etc names
that are used in --template.
Also added title and author, which are the item title/author if available,
falling back to the feed title/author. These are more likely to be common
metadata fields.
(There is a small bit of dupication here, but once git gets
around to packing the object, it will compress it away.)
The itempubdate field is not included in the metadata as a string; instead
it is used to generate year and month fields, same as is done when adding
files with annex.genmetadata set.
This commit was sponsored by Amitai Schlair, who cooincidentially
is responsible for ikiwiki generating nice feed metadata!
2014-07-03 17:46:09 +00:00
|
|
|
meta = MetaData $ M.fromList $ map tometa $ extractFields i
|
|
|
|
|
2015-03-31 17:29:51 +00:00
|
|
|
minimalMetaData :: ToDownload -> MetaData
|
|
|
|
minimalMetaData i = case getItemId (item i) of
|
|
|
|
(Nothing) -> emptyMetaData
|
|
|
|
(Just (_, itemid)) -> MetaData $ M.singleton itemIdField
|
2021-11-15 17:32:31 +00:00
|
|
|
(S.singleton $ toMetaValue $fromFeedText itemid)
|
2015-03-31 17:29:51 +00:00
|
|
|
|
import metadata from feeds
When annex.genmetadata is set, metadata from the feed is added to files
that are imported from it.
Reused the same feedtitle and itemtitle, feedauthor, itemauthor, etc names
that are used in --template.
Also added title and author, which are the item title/author if available,
falling back to the feed title/author. These are more likely to be common
metadata fields.
(There is a small bit of dupication here, but once git gets
around to packing the object, it will compress it away.)
The itempubdate field is not included in the metadata as a string; instead
it is used to generate year and month fields, same as is done when adding
files with annex.genmetadata set.
This commit was sponsored by Amitai Schlair, who cooincidentially
is responsible for ikiwiki generating nice feed metadata!
2014-07-03 17:46:09 +00:00
|
|
|
{- Extract fields from the feed and item, that are both used as metadata,
|
|
|
|
- and to generate the filename. -}
|
|
|
|
extractFields :: ToDownload -> [(String, String)]
|
|
|
|
extractFields i = map (uncurry extractField)
|
|
|
|
[ ("feedtitle", [feedtitle])
|
|
|
|
, ("itemtitle", [itemtitle])
|
|
|
|
, ("feedauthor", [feedauthor])
|
|
|
|
, ("itemauthor", [itemauthor])
|
2021-11-15 17:32:31 +00:00
|
|
|
, ("itemsummary", [decodeBS . fromFeedText <$> getItemSummary (item i)])
|
|
|
|
, ("itemdescription", [decodeBS . fromFeedText <$> getItemDescription (item i)])
|
|
|
|
, ("itemrights", [decodeBS . fromFeedText <$> getItemRights (item i)])
|
|
|
|
, ("itemid", [decodeBS . fromFeedText . snd <$> getItemId (item i)])
|
import metadata from feeds
When annex.genmetadata is set, metadata from the feed is added to files
that are imported from it.
Reused the same feedtitle and itemtitle, feedauthor, itemauthor, etc names
that are used in --template.
Also added title and author, which are the item title/author if available,
falling back to the feed title/author. These are more likely to be common
metadata fields.
(There is a small bit of dupication here, but once git gets
around to packing the object, it will compress it away.)
The itempubdate field is not included in the metadata as a string; instead
it is used to generate year and month fields, same as is done when adding
files with annex.genmetadata set.
This commit was sponsored by Amitai Schlair, who cooincidentially
is responsible for ikiwiki generating nice feed metadata!
2014-07-03 17:46:09 +00:00
|
|
|
, ("title", [itemtitle, feedtitle])
|
|
|
|
, ("author", [itemauthor, feedauthor])
|
|
|
|
]
|
|
|
|
where
|
2021-11-15 17:32:31 +00:00
|
|
|
feedtitle = Just $ decodeBS $ fromFeedText $ getFeedTitle $ feed i
|
|
|
|
itemtitle = decodeBS . fromFeedText <$> getItemTitle (item i)
|
|
|
|
feedauthor = decodeBS . fromFeedText <$> getFeedAuthor (feed i)
|
|
|
|
itemauthor = decodeBS . fromFeedText <$> getItemAuthor (item i)
|
import metadata from feeds
When annex.genmetadata is set, metadata from the feed is added to files
that are imported from it.
Reused the same feedtitle and itemtitle, feedauthor, itemauthor, etc names
that are used in --template.
Also added title and author, which are the item title/author if available,
falling back to the feed title/author. These are more likely to be common
metadata fields.
(There is a small bit of dupication here, but once git gets
around to packing the object, it will compress it away.)
The itempubdate field is not included in the metadata as a string; instead
it is used to generate year and month fields, same as is done when adding
files with annex.genmetadata set.
This commit was sponsored by Amitai Schlair, who cooincidentially
is responsible for ikiwiki generating nice feed metadata!
2014-07-03 17:46:09 +00:00
|
|
|
|
2015-03-31 17:29:51 +00:00
|
|
|
itemIdField :: MetaField
|
|
|
|
itemIdField = mkMetaFieldUnchecked "itemid"
|
|
|
|
|
import metadata from feeds
When annex.genmetadata is set, metadata from the feed is added to files
that are imported from it.
Reused the same feedtitle and itemtitle, feedauthor, itemauthor, etc names
that are used in --template.
Also added title and author, which are the item title/author if available,
falling back to the feed title/author. These are more likely to be common
metadata fields.
(There is a small bit of dupication here, but once git gets
around to packing the object, it will compress it away.)
The itempubdate field is not included in the metadata as a string; instead
it is used to generate year and month fields, same as is done when adding
files with annex.genmetadata set.
This commit was sponsored by Amitai Schlair, who cooincidentially
is responsible for ikiwiki generating nice feed metadata!
2014-07-03 17:46:09 +00:00
|
|
|
extractField :: String -> [Maybe String] -> (String, String)
|
2015-03-31 17:29:51 +00:00
|
|
|
extractField k [] = (k, noneValue)
|
import metadata from feeds
When annex.genmetadata is set, metadata from the feed is added to files
that are imported from it.
Reused the same feedtitle and itemtitle, feedauthor, itemauthor, etc names
that are used in --template.
Also added title and author, which are the item title/author if available,
falling back to the feed title/author. These are more likely to be common
metadata fields.
(There is a small bit of dupication here, but once git gets
around to packing the object, it will compress it away.)
The itempubdate field is not included in the metadata as a string; instead
it is used to generate year and month fields, same as is done when adding
files with annex.genmetadata set.
This commit was sponsored by Amitai Schlair, who cooincidentially
is responsible for ikiwiki generating nice feed metadata!
2014-07-03 17:46:09 +00:00
|
|
|
extractField k (Just v:_)
|
|
|
|
| not (null v) = (k, v)
|
|
|
|
extractField k (_:rest) = extractField k rest
|
|
|
|
|
2015-03-31 17:29:51 +00:00
|
|
|
noneValue :: String
|
|
|
|
noneValue = "none"
|
|
|
|
|
2013-08-03 05:40:21 +00:00
|
|
|
{- Called when there is a problem with a feed.
|
2018-11-04 21:41:49 +00:00
|
|
|
-
|
|
|
|
- If the feed has been broken for some time,
|
|
|
|
- returns False, otherwise only warns. -}
|
|
|
|
feedProblem :: URLString -> String -> Annex Bool
|
2013-08-03 05:40:21 +00:00
|
|
|
feedProblem url message = ifM (checkFeedBroken url)
|
2018-11-04 21:41:49 +00:00
|
|
|
( do
|
|
|
|
warning $ message ++ " (having repeated problems with feed: " ++ url ++ ")"
|
|
|
|
return False
|
|
|
|
, do
|
|
|
|
warning $ "warning: " ++ message
|
|
|
|
return True
|
2013-08-03 05:40:21 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
{- A feed is only broken if problems have occurred repeatedly, for at
|
|
|
|
- least 23 hours. -}
|
|
|
|
checkFeedBroken :: URLString -> Annex Bool
|
|
|
|
checkFeedBroken url = checkFeedBroken' url =<< feedState url
|
2020-11-03 22:34:27 +00:00
|
|
|
checkFeedBroken' :: URLString -> RawFilePath -> Annex Bool
|
2013-08-03 05:40:21 +00:00
|
|
|
checkFeedBroken' url f = do
|
2020-11-03 22:34:27 +00:00
|
|
|
prev <- maybe Nothing readish
|
2020-11-04 18:20:37 +00:00
|
|
|
<$> liftIO (catchMaybeIO $ readFile (fromRawFilePath f))
|
2013-08-03 05:40:21 +00:00
|
|
|
now <- liftIO getCurrentTime
|
|
|
|
case prev of
|
|
|
|
Nothing -> do
|
2018-01-02 21:17:10 +00:00
|
|
|
writeLogFile f $ show now
|
2013-08-03 05:40:21 +00:00
|
|
|
return False
|
|
|
|
Just prevtime -> do
|
|
|
|
let broken = diffUTCTime now prevtime > 60 * 60 * 23
|
|
|
|
when broken $
|
|
|
|
-- Avoid repeatedly complaining about
|
|
|
|
-- broken feed.
|
|
|
|
clearFeedProblem url
|
|
|
|
return broken
|
|
|
|
|
|
|
|
clearFeedProblem :: URLString -> Annex ()
|
2020-11-03 22:34:27 +00:00
|
|
|
clearFeedProblem url =
|
|
|
|
void $ liftIO . tryIO . removeFile . fromRawFilePath
|
|
|
|
=<< feedState url
|
2013-08-03 05:40:21 +00:00
|
|
|
|
2020-11-03 22:34:27 +00:00
|
|
|
feedState :: URLString -> Annex RawFilePath
|
2015-05-23 02:41:36 +00:00
|
|
|
feedState url = fromRepo $ gitAnnexFeedState $ fromUrl url Nothing
|
2021-11-15 17:32:31 +00:00
|
|
|
|
|
|
|
{- The feed library parses the feed to Text, and does not use the
|
|
|
|
- filesystem encoding to do it, so when the locale is not unicode
|
|
|
|
- capable, a Text value can still include unicode characters.
|
|
|
|
-
|
|
|
|
- So, it's not safe to use T.unpack to convert that to a String,
|
|
|
|
- because later use of that String by eg encodeBS will crash
|
|
|
|
- with an encoding error. Use this instad.
|
|
|
|
-
|
|
|
|
- This should not be used on a Text that is read using the
|
|
|
|
- filesystem encoding because it does not reverse that encoding.
|
|
|
|
-}
|
|
|
|
fromFeedText :: T.Text -> B.ByteString
|
|
|
|
fromFeedText = TE.encodeUtf8
|