importfeed: Support --json and --json-error-messages and --json-progress

Sponsored-By: the NIH-funded NICEMAN (ReproNim TR&D3) project
This commit is contained in:
Joey Hess 2023-05-09 16:43:16 -04:00
parent 6b54ea69e3
commit 7919349cee
No known key found for this signature in database
GPG key ID: DB12DB0FF05F8F38
4 changed files with 34 additions and 17 deletions

View file

@ -38,15 +38,16 @@ git-annex (10.20230408) UNRELEASED; urgency=medium
* initremote: Avoid creating a remote that is not encrypted when gpg is * initremote: Avoid creating a remote that is not encrypted when gpg is
broken. broken.
* Support --json and --json-error-messages in more commands * Support --json and --json-error-messages in more commands
(addunused, dead, describe, dropunused, expire, fix, init, log, migrate, (addunused, dead, describe, dropunused, expire, fix, importfeed, init,
reinit, reinject, rekey, renameremote, rmurl, semitrust, setpresentkey, log, migrate, reinit, reinject, rekey, renameremote, rmurl, semitrust,
trust, unannex, undo, untrust, unused) setpresentkey, trust, unannex, undo, untrust, unused)
* log: When --raw-date is used, display only seconds from the epoch, as * log: When --raw-date is used, display only seconds from the epoch, as
documented, omitting a trailing "s" that was included in the output documented, omitting a trailing "s" that was included in the output
before. before.
* addunused: Displays the names of the files that it adds. * addunused: Displays the names of the files that it adds.
* reinject: Fix support for operating on multiple pairs of files and keys. * reinject: Fix support for operating on multiple pairs of files and keys.
* importfeed: Support -J * importfeed: Support -J
* importfeed: Support --json-progress
-- Joey Hess <id@joeyh.name> Sat, 08 Apr 2023 13:57:18 -0400 -- Joey Hess <id@joeyh.name> Sat, 08 Apr 2023 13:57:18 -0400

View file

@ -54,9 +54,11 @@ import Logs
import qualified Utility.RawFilePath as R import qualified Utility.RawFilePath as R
cmd :: Command cmd :: Command
cmd = notBareRepo $ withAnnexOptions [jobsOption, backendOption] $ cmd = notBareRepo $ withAnnexOptions os $
command "importfeed" SectionCommon "import files from podcast feeds" command "importfeed" SectionCommon "import files from podcast feeds"
(paramRepeating paramUrl) (seek <$$> optParser) (paramRepeating paramUrl) (seek <$$> optParser)
where
os = [jobsOption, jsonOptions, jsonProgressOption, backendOption]
data ImportFeedOptions = ImportFeedOptions data ImportFeedOptions = ImportFeedOptions
{ feedUrls :: CmdParams { feedUrls :: CmdParams
@ -139,7 +141,7 @@ getFeed
-> TMVar (M.Map URLString (Maybe (Maybe [ToDownload]))) -> TMVar (M.Map URLString (Maybe (Maybe [ToDownload])))
-> CommandStart -> CommandStart
getFeed url st = getFeed url st =
starting "importfeed" (ActionItemOther (Just (UnquotedString url))) (SeekInput []) $ starting "importfeed" (ActionItemOther (Just (UnquotedString url))) (SeekInput [url]) $
get `onException` recordfail get `onException` recordfail
where where
record v = liftIO $ atomically $ do record v = liftIO $ atomically $ do
@ -210,9 +212,12 @@ getCache :: Maybe String -> Annex Cache
getCache opttemplate = ifM (Annex.getRead Annex.force) getCache opttemplate = ifM (Annex.getRead Annex.force)
( ret S.empty S.empty ( ret S.empty S.empty
, do , do
showStartMessage (StartMessage "importfeed" (ActionItemOther (Just "gathering known urls")) (SeekInput [])) j <- jsonOutputEnabled
unless j $
showStartMessage (StartMessage "importfeed" (ActionItemOther (Just "gathering known urls")) (SeekInput []))
(us, is) <- knownItems (us, is) <- knownItems
showEndOk unless j
showEndOk
ret (S.fromList us) (S.fromList is) ret (S.fromList us) (S.fromList is)
) )
where where
@ -295,7 +300,7 @@ startDownload addunlockedmatcher opts cache cv todownload = case location todown
recordsuccess = liftIO $ atomically $ putTMVar cv True recordsuccess = liftIO $ atomically $ putTMVar cv True
startdownloadenclosure :: URLString -> CommandStart startdownloadenclosure :: URLString -> CommandStart
startdownloadenclosure url = checkknown url $ startUrlDownload cv url $ startdownloadenclosure url = checkknown url $ startUrlDownload cv todownload url $
downloadEnclosure addunlockedmatcher opts cache cv todownload url downloadEnclosure addunlockedmatcher opts cache cv todownload url
knownitemid = case getItemId (item todownload) of knownitemid = case getItemId (item todownload) of
@ -306,7 +311,7 @@ startDownload addunlockedmatcher opts cache cv todownload = case location todown
downloadmedia linkurl mediaurl mediakey downloadmedia linkurl mediaurl mediakey
| rawOption (downloadOptions opts) = startdownloadlink | rawOption (downloadOptions opts) = startdownloadlink
| otherwise = ifM (youtubeDlSupported linkurl) | otherwise = ifM (youtubeDlSupported linkurl)
( startUrlDownload cv linkurl $ ( startUrlDownload cv todownload linkurl $
withTmpWorkDir mediakey $ \workdir -> do withTmpWorkDir mediakey $ \workdir -> do
dl <- youtubeDl linkurl (fromRawFilePath workdir) nullMeterUpdate dl <- youtubeDl linkurl (fromRawFilePath workdir) nullMeterUpdate
case dl of case dl of
@ -336,7 +341,7 @@ startDownload addunlockedmatcher opts cache cv todownload = case location todown
addmediafast linkurl mediaurl mediakey = addmediafast linkurl mediaurl mediakey =
ifM (pure (not (rawOption (downloadOptions opts))) ifM (pure (not (rawOption (downloadOptions opts)))
<&&> youtubeDlSupported linkurl) <&&> youtubeDlSupported linkurl)
( startUrlDownload cv linkurl $ do ( startUrlDownload cv todownload linkurl $ do
runDownload todownload linkurl ".m" cache cv $ \f -> runDownload todownload linkurl ".m" cache cv $ \f ->
checkCanAdd (downloadOptions opts) f $ \canadd -> do checkCanAdd (downloadOptions opts) f $ \canadd -> do
addWorkTree canadd addunlockedmatcher webUUID mediaurl f mediakey Nothing addWorkTree canadd addunlockedmatcher webUUID mediaurl f mediakey Nothing
@ -453,10 +458,10 @@ runDownload todownload url extension cache cv getter = do
, tryanother , tryanother
) )
startUrlDownload :: TMVar Bool -> URLString -> CommandPerform -> CommandStart startUrlDownload :: TMVar Bool -> ToDownload -> URLString -> CommandPerform -> CommandStart
startUrlDownload cv url a = starting "addurl" startUrlDownload cv todownload url a = starting "addurl"
(ActionItemOther (Just (UnquotedString url))) (ActionItemOther (Just (UnquotedString url)))
(SeekInput []) (SeekInput [feedurl todownload])
(a `onException` recordfailure) (a `onException` recordfailure)
where where
recordfailure = do recordfailure = do

View file

@ -112,6 +112,20 @@ resulting in the new url being downloaded to such a filename.
Specifies which key-value backend to use. Specifies which key-value backend to use.
* `--json`
Enable JSON output. This is intended to be parsed by programs that use
git-annex. Each line of output is a JSON object.
* `--json-progress`
Include progress objects in JSON output.
* `--json-error-messages`
Messages that would normally be output to standard error are included in
the JSON instead.
* Also the [[git-annex-common-options]](1) can be used. * Also the [[git-annex-common-options]](1) can be used.
# SEE ALSO # SEE ALSO

View file

@ -36,6 +36,7 @@ These commands have been updated to support --json:
* git-annex-reinit * git-annex-reinit
* git-annex-reinject * git-annex-reinject
* git-annex-renameremote * git-annex-renameremote
* git-annex-importfeed
Provisional list of commands that don't support --json and maybe should: Provisional list of commands that don't support --json and maybe should:
@ -55,10 +56,6 @@ These commands could support json, but I punted:
* git-annex-version (--raw already exists, and the output is fairly machine * git-annex-version (--raw already exists, and the output is fairly machine
parseable already. It would be possible to jsonize the output to make it parseable already. It would be possible to jsonize the output to make it
possibly more machine parseable. But I'm doubtful that would be useful. possibly more machine parseable. But I'm doubtful that would be useful.
* git-annex-importfeed (implemented w/o using usual command actions,
which makes warning messages not get put in any particular json record.
Same problem would also need to be fixed for [[doc/todo/importfeed_parallell]]
btw.)
These commands have been reviewed and should not support json: These commands have been reviewed and should not support json: