run codespell throughout fixing typos automagically
=== Do not change lines below === { "chain": [], "cmd": "codespell -w", "exit": 0, "extra_inputs": [], "inputs": [], "outputs": [], "pwd": "." } ^^^ Do not change lines above ^^^
This commit is contained in:
parent
aa9f9333ea
commit
87e2ae2014
31 changed files with 47 additions and 47 deletions
|
@ -173,7 +173,7 @@ adjustToSymlink' gitannexlink ti@(TreeItem f _m s) = catKey s >>= \case
|
|||
|
||||
-- This is a hidden branch ref, that's used as the basis for the AdjBranch,
|
||||
-- since pushes can overwrite the OrigBranch at any time. So, changes
|
||||
-- are propigated from the AdjBranch to the head of the BasisBranch.
|
||||
-- are propagated from the AdjBranch to the head of the BasisBranch.
|
||||
newtype BasisBranch = BasisBranch Ref
|
||||
|
||||
-- The basis for refs/heads/adjusted/master(unlocked) is
|
||||
|
@ -256,7 +256,7 @@ updateAdjustedBranch adj (AdjBranch currbranch) origbranch
|
|||
| not (adjustmentIsStable adj) = do
|
||||
(b, origheadfile, newheadfile) <- preventCommits $ \commitlck -> do
|
||||
-- Avoid losing any commits that the adjusted branch
|
||||
-- has that have not yet been propigated back to the
|
||||
-- has that have not yet been propagated back to the
|
||||
-- origbranch.
|
||||
_ <- propigateAdjustedCommits' True origbranch adj commitlck
|
||||
|
||||
|
@ -472,7 +472,7 @@ commitAdjustedTree' treesha (BasisBranch basis) parents =
|
|||
-- since that message is looked for later.
|
||||
-- After git-annex 10.20240227, it's possible to use
|
||||
-- commitTree instead of this, but this is being kept
|
||||
-- for some time, for compatability with older versions.
|
||||
-- for some time, for compatibility with older versions.
|
||||
mkcommit cmode = Git.Branch.commitTreeExactMessage cmode
|
||||
adjustedBranchCommitMessage parents treesha
|
||||
|
||||
|
@ -497,10 +497,10 @@ findAdjustingCommit (AdjBranch b) = go =<< catCommit b
|
|||
_ -> return Nothing
|
||||
|
||||
{- Check for any commits present on the adjusted branch that have not yet
|
||||
- been propigated to the basis branch, and propagate them to the basis
|
||||
- been propagated to the basis branch, and propagate them to the basis
|
||||
- branch and from there on to the orig branch.
|
||||
-
|
||||
- After propigating the commits back to the basis branch,
|
||||
- After propagating the commits back to the basis branch,
|
||||
- rebase the adjusted branch on top of the updated basis branch.
|
||||
-}
|
||||
propigateAdjustedCommits :: OrigBranch -> Adjustment -> Annex ()
|
||||
|
@ -642,7 +642,7 @@ data AdjustedClone = InAdjustedClone | NotInAdjustedClone
|
|||
- checked out adjusted branch; the origin could have the two branches
|
||||
- out of sync (eg, due to another branch having been pushed to the origin's
|
||||
- origbranch), or due to a commit on its adjusted branch not having been
|
||||
- propigated back to origbranch.
|
||||
- propagated back to origbranch.
|
||||
-
|
||||
- So, find the adjusting commit on the currently checked out adjusted
|
||||
- branch, and use the parent of that commit as the basis, and set the
|
||||
|
|
|
@ -263,7 +263,7 @@ autoInitialize remotelist = getInitializedVersion >>= maybe needsinit checkUpgra
|
|||
initialize Nothing Nothing
|
||||
autoEnableSpecialRemotes remotelist
|
||||
|
||||
{- Checks if a repository is initialized. Does not check version for ugrade. -}
|
||||
{- Checks if a repository is initialized. Does not check version for upgrade. -}
|
||||
isInitialized :: Annex Bool
|
||||
isInitialized = maybe Annex.Branch.hasSibling (const $ return True) =<< getVersion
|
||||
|
||||
|
|
|
@ -131,7 +131,7 @@ import qualified Utility.RawFilePath as R
|
|||
- trailing path separator. Most code does not rely on that, but a few
|
||||
- things do.
|
||||
-
|
||||
- Everything else should not end in a trailing path sepatator.
|
||||
- Everything else should not end in a trailing path separator.
|
||||
-
|
||||
- Only functions (with names starting with "git") that build a path
|
||||
- based on a git repository should return full path relative to the git
|
||||
|
|
|
@ -46,7 +46,7 @@ setRemoteTrackingBranch tb commit =
|
|||
-
|
||||
- The second parent of the merge commit is the past history of the
|
||||
- RemoteTrackingBranch as imported from a remote. When importing a
|
||||
- history of trees from a remote, commits can be sythesized from
|
||||
- history of trees from a remote, commits can be synthesized from
|
||||
- them, but such commits won't have the same sha due to eg date differing.
|
||||
- But since we know that the second parent consists entirely of such
|
||||
- import commits, they can be reused when updating the
|
||||
|
|
|
@ -407,7 +407,7 @@ fromSshOptionsEnv = map Param . lines
|
|||
{- Enables ssh caching for git push/pull to a particular
|
||||
- remote git repo. (Can safely be used on non-ssh remotes.)
|
||||
-
|
||||
- Also propigates any configured ssh-options.
|
||||
- Also propagates any configured ssh-options.
|
||||
-
|
||||
- Like inRepo, the action is run with the local git repo.
|
||||
- But here it's a modified version, with gitEnv to set GIT_SSH=git-annex,
|
||||
|
|
|
@ -372,7 +372,7 @@ youtubePlaylist' url cmd = withTmpFile "yt-dlp" $ \tmpfile h -> do
|
|||
<$> B.readFile tmpfile
|
||||
return $ case partitionEithers v of
|
||||
((parserr:_), _) ->
|
||||
Left $ "yt-dlp json parse errror: " ++ parserr
|
||||
Left $ "yt-dlp json parse error: " ++ parserr
|
||||
([], r) -> Right r
|
||||
else return $ Left $ if null outerr
|
||||
then "yt-dlp failed"
|
||||
|
|
|
@ -102,7 +102,7 @@ checkNetMonitor client = do
|
|||
networkd = "org.freedesktop.network1"
|
||||
wicd = "org.wicd.daemon"
|
||||
|
||||
{- Listens for systemd-networkd connections and diconnections.
|
||||
{- Listens for systemd-networkd connections and disconnections.
|
||||
-
|
||||
- Connection example (once fully connected):
|
||||
- [Variant {"OperationalState": Variant "routable"}]
|
||||
|
@ -128,7 +128,7 @@ listenNDConnections client setconnected =
|
|||
else setconnected False
|
||||
Nothing -> noop
|
||||
|
||||
{- Listens for NetworkManager connections and diconnections.
|
||||
{- Listens for NetworkManager connections and disconnections.
|
||||
-
|
||||
- Connection example (once fully connected):
|
||||
- [Variant {"ActivatingConnection": Variant (ObjectPath "/"), "PrimaryConnection": Variant (ObjectPath "/org/freedesktop/NetworkManager/ActiveConnection/34"), "State": Variant 70}]
|
||||
|
|
|
@ -77,7 +77,7 @@ upgradedEnv = "GIT_ANNEX_UPGRADED"
|
|||
-
|
||||
- Creates the destination directory where the upgrade will be installed
|
||||
- early, in order to check if another upgrade has happened (or is
|
||||
- happending). On failure, the directory is removed.
|
||||
- happening). On failure, the directory is removed.
|
||||
-}
|
||||
startDistributionDownload :: GitAnnexDistribution -> Assistant ()
|
||||
startDistributionDownload d = go =<< liftIO . newVersionLocation d =<< liftIO oldVersionLocation
|
||||
|
|
|
@ -41,7 +41,7 @@ backendVURL = Backend
|
|||
Nothing -> pure False
|
||||
anyM check eks
|
||||
, verifyKeyContentIncrementally = Just $ \k -> do
|
||||
-- Run incremental verifiers for each equivilant key together,
|
||||
-- Run incremental verifiers for each equivalent key together,
|
||||
-- and see if any of them succeed.
|
||||
eks <- equivkeys k
|
||||
let get = \ek -> getbackend ek >>= \case
|
||||
|
@ -53,7 +53,7 @@ backendVURL = Backend
|
|||
return $ IncrementalVerifier
|
||||
{ updateIncrementalVerifier = \s ->
|
||||
forM_ l $ flip updateIncrementalVerifier s
|
||||
-- If there are no equivilant keys recorded somehow,
|
||||
-- If there are no equivalent keys recorded somehow,
|
||||
-- or if none of them support incremental verification,
|
||||
-- this will return Nothing, which indicates that
|
||||
-- incremental verification was not able to be
|
||||
|
@ -80,9 +80,9 @@ backendVURL = Backend
|
|||
-- Not all keys using this backend are necessarily
|
||||
-- cryptographically secure.
|
||||
, isCryptographicallySecure = False
|
||||
-- A key is secure when all recorded equivilant keys are.
|
||||
-- A key is secure when all recorded equivalent keys are.
|
||||
-- If there are none recorded yet, it's secure because when
|
||||
-- downloaded, an equivilant key that is cryptographically secure
|
||||
-- downloaded, an equivalent key that is cryptographically secure
|
||||
-- will be constructed then.
|
||||
, isCryptographicallySecureKey = \k ->
|
||||
equivkeys k >>= \case
|
||||
|
@ -95,7 +95,7 @@ backendVURL = Backend
|
|||
}
|
||||
where
|
||||
equivkeys k = filter allowedequiv <$> getEquivilantKeys k
|
||||
-- Don't allow using VURL keys as equivilant keys, because that
|
||||
-- Don't allow using VURL keys as equivalent keys, because that
|
||||
-- could let a crafted git-annex branch cause an infinite loop.
|
||||
allowedequiv ek = fromKey keyVariety ek /= VURLKey
|
||||
varietymap = makeVarietyMap regularBackendList
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{- Generating and installing a desktop menu entry file and icon,
|
||||
- and a desktop autostart file. (And OSX equivilants.)
|
||||
- and a desktop autostart file. (And OSX equivalents.)
|
||||
-
|
||||
- Copyright 2012 Joey Hess <id@joeyh.name>
|
||||
-
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{- Generating and installing a desktop menu entry file and icon,
|
||||
- and a desktop autostart file. (And OSX equivilants.)
|
||||
- and a desktop autostart file. (And OSX equivalents.)
|
||||
-
|
||||
- Copyright 2012 Joey Hess <id@joeyh.name>
|
||||
-
|
||||
|
|
|
@ -111,7 +111,7 @@ checkHiddenService = bracket setup cleanup go
|
|||
-- we just want to know if the tor circuit works.
|
||||
liftIO (tryNonAsync $ connectPeer g addr) >>= \case
|
||||
Left e -> do
|
||||
warning $ UnquotedString $ "Unable to connect to hidden service. It may not yet have propigated to the Tor network. (" ++ show e ++ ") Will retry.."
|
||||
warning $ UnquotedString $ "Unable to connect to hidden service. It may not yet have propagated to the Tor network. (" ++ show e ++ ") Will retry.."
|
||||
liftIO $ threadDelaySeconds (Seconds 2)
|
||||
check (n-1) addrs
|
||||
Right conn -> do
|
||||
|
|
|
@ -573,7 +573,7 @@ playlistFields u i = map (uncurry extractField)
|
|||
, ("itemtitle", [youtube_title i])
|
||||
, ("feedauthor", [youtube_playlist_uploader i])
|
||||
, ("itemauthor", [youtube_playlist_uploader i])
|
||||
-- itemsummary omitted, no equivilant in yt-dlp data
|
||||
-- itemsummary omitted, no equivalent in yt-dlp data
|
||||
, ("itemdescription", [youtube_description i])
|
||||
, ("itemrights", [youtube_license i])
|
||||
, ("itemid", [youtube_url i])
|
||||
|
|
|
@ -169,7 +169,7 @@ startAll o outputter = do
|
|||
- same key. The method is to compare each value with the value
|
||||
- after it in the list, which is the old version of the value.
|
||||
-
|
||||
- This ncessarily buffers the whole list, so does not stream.
|
||||
- This necessarily buffers the whole list, so does not stream.
|
||||
- But, the number of location log changes for a single key tends to be
|
||||
- fairly small.
|
||||
-
|
||||
|
@ -377,7 +377,7 @@ sizeHistoryInfo mu o = do
|
|||
-- time across all git-annex repositories.
|
||||
--
|
||||
-- This combines the new location log with what has been
|
||||
-- accumulated so far, which is equivilant to merging together
|
||||
-- accumulated so far, which is equivalent to merging together
|
||||
-- all git-annex branches at that point in time.
|
||||
update k sizemap locmap (oldlog, oldlocs) newlog =
|
||||
( updatesize (updatesize sizemap sz (S.toList addedlocs))
|
||||
|
@ -490,7 +490,7 @@ sizeHistoryInfo mu o = do
|
|||
|
||||
posminus a b = max 0 (a - b)
|
||||
|
||||
-- A verison of sizemap where uuids that are currently dead
|
||||
-- A version of sizemap where uuids that are currently dead
|
||||
-- have 0 size.
|
||||
sizemap' = M.mapWithKey zerodead sizemap
|
||||
zerodead u v = case M.lookup u (simpleMap trustlog) of
|
||||
|
|
|
@ -40,7 +40,7 @@ start (_, key) = do
|
|||
)
|
||||
where
|
||||
{- No need to do any rollback; when sendAnnex fails, a nonzero
|
||||
- exit will be propigated, and the remote will know the transfer
|
||||
- exit will be propagated, and the remote will know the transfer
|
||||
- failed. -}
|
||||
rollback = noop
|
||||
|
||||
|
|
|
@ -267,7 +267,7 @@ seek' o = startConcurrency transferStages $ do
|
|||
|
||||
remotes <- syncRemotes (syncWith o)
|
||||
warnSyncContentTransition o remotes
|
||||
-- Remotes that are git repositories, not (necesarily) special remotes.
|
||||
-- Remotes that are git repositories, not (necessarily) special remotes.
|
||||
let gitremotes = filter (Remote.gitSyncableRemoteType . Remote.remotetype) remotes
|
||||
-- Remotes that contain annex object content.
|
||||
contentremotes <- filter (\r -> Remote.uuid r /= NoUUID)
|
||||
|
@ -978,7 +978,7 @@ seekExportContent :: Maybe SyncOptions -> [Remote] -> CurrBranch -> Annex Bool
|
|||
seekExportContent o rs (mcurrbranch, madj)
|
||||
| null rs = return False
|
||||
| otherwise = do
|
||||
-- Propigate commits from the adjusted branch, so that
|
||||
-- Propagate commits from the adjusted branch, so that
|
||||
-- when the remoteAnnexTrackingBranch is set to the parent
|
||||
-- branch, it will be up-to-date.
|
||||
case (mcurrbranch, madj) of
|
||||
|
|
|
@ -70,7 +70,7 @@ closeDb (DbHandle _db worker jobs _) = do
|
|||
- changes to the database!
|
||||
-
|
||||
- Note that the action is not run by the calling thread, but by a
|
||||
- worker thread. Exceptions are propigated to the calling thread.
|
||||
- worker thread. Exceptions are propagated to the calling thread.
|
||||
-
|
||||
- Only one action can be run at a time against a given DbHandle.
|
||||
- If called concurrently in the same process, this will block until
|
||||
|
|
|
@ -491,7 +491,7 @@ reconcileStaged dbisnew qh = ifM isBareRepo
|
|||
|
||||
-- How large is large? Too large and there will be a long
|
||||
-- delay before the message is shown; too short and the message
|
||||
-- will clutter things up unncessarily. It's uncommon for 1000
|
||||
-- will clutter things up unnecessarily. It's uncommon for 1000
|
||||
-- files to change in the index, and processing that many files
|
||||
-- takes less than half a second, so that seems about right.
|
||||
largediff :: Int
|
||||
|
|
2
Git.hs
2
Git.hs
|
@ -167,7 +167,7 @@ relPath = adjustPath torel
|
|||
p' <- relPathCwdToFile p
|
||||
return $ if B.null p' then "." else p'
|
||||
|
||||
{- Adusts the path to a local Repo using the provided function. -}
|
||||
{- Adjusts the path to a local Repo using the provided function. -}
|
||||
adjustPath :: (RawFilePath -> IO RawFilePath) -> Repo -> IO Repo
|
||||
adjustPath f r@(Repo { location = l@(Local { gitdir = d, worktree = w }) }) = do
|
||||
d' <- f d
|
||||
|
|
2
Logs.hs
2
Logs.hs
|
@ -211,7 +211,7 @@ chunkLogFile config key =
|
|||
chunkLogExt :: S.ByteString
|
||||
chunkLogExt = ".log.cnk"
|
||||
|
||||
{- The filename of the equivilant keys log for a given key. -}
|
||||
{- The filename of the equivalent keys log for a given key. -}
|
||||
equivilantKeysLogFile :: GitConfig -> Key -> RawFilePath
|
||||
equivilantKeysLogFile config key =
|
||||
(branchHashDir config key P.</> keyFile key)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
{- Logs listing keys that are equivilant to a key.
|
||||
{- Logs listing keys that are equivalent to a key.
|
||||
-
|
||||
- Copyright 2024 Joey Hess <id@joeyh.name>
|
||||
-
|
||||
|
|
|
@ -267,7 +267,7 @@ setupConsole = do
|
|||
hSetBuffering stderr LineBuffering
|
||||
#ifdef mingw32_HOST_OS
|
||||
{- Avoid outputting CR at end of line on Windows. git commands do
|
||||
- not ouput CR there. -}
|
||||
- not output CR there. -}
|
||||
hSetNewlineMode stdout noNewlineTranslation
|
||||
hSetNewlineMode stderr noNewlineTranslation
|
||||
#endif
|
||||
|
@ -353,7 +353,7 @@ mkPrompter = getConcurrency >>= \case
|
|||
(const $ run a)
|
||||
|
||||
{- Catch all (non-async and not ExitCode) exceptions and display,
|
||||
- santizing any control characters in the exceptions.
|
||||
- sanitizing any control characters in the exceptions.
|
||||
-
|
||||
- Exits nonzero on exception, so should only be used at topmost level.
|
||||
-}
|
||||
|
|
|
@ -348,7 +348,7 @@ listImportableContentsM serial adir c = adbfind >>= \case
|
|||
mk _ = Nothing
|
||||
|
||||
-- This does not guard against every possible race. As long as the adb
|
||||
-- connection is resonably fast, it's probably as good as
|
||||
-- connection is reasonably fast, it's probably as good as
|
||||
-- git's handling of similar situations with files being modified while
|
||||
-- it's updating the working tree for a merge.
|
||||
retrieveExportWithContentIdentifierM :: AndroidSerial -> AndroidPath -> ExportLocation -> [ContentIdentifier] -> FilePath -> Either Key (Annex Key) -> MeterUpdate -> Annex (Key, Verification)
|
||||
|
|
|
@ -413,7 +413,7 @@ mkContentIdentifier (IgnoreInodes ii) f st =
|
|||
|
||||
-- Since ignoreinodes can be changed by enableremote, and since previous
|
||||
-- versions of git-annex ignored inodes by default, treat two content
|
||||
-- idenfiers as the same if they differ only by one having the inode
|
||||
-- identifiers as the same if they differ only by one having the inode
|
||||
-- ignored.
|
||||
guardSameContentIdentifiers :: a -> [ContentIdentifier] -> Maybe ContentIdentifier -> a
|
||||
guardSameContentIdentifiers _ _ Nothing = giveup "file not found"
|
||||
|
|
|
@ -60,7 +60,7 @@ data KeyData = Key
|
|||
|
||||
instance NFData KeyData
|
||||
|
||||
{- Caching the seralization of a key is an optimization.
|
||||
{- Caching the serialization of a key is an optimization.
|
||||
-
|
||||
- This constructor is not exported, and all smart constructors maintain
|
||||
- the serialization.
|
||||
|
|
|
@ -27,9 +27,9 @@ data BwRate = BwRate ByteSize Duration
|
|||
|
||||
-- Parse eg, "0KiB/60s"
|
||||
--
|
||||
-- Also, it can be set to "true" (or other git config equivilants)
|
||||
-- Also, it can be set to "true" (or other git config equivalents)
|
||||
-- to enable ProbeStallDetection.
|
||||
-- And "false" (and other git config equivilants) explicitly
|
||||
-- And "false" (and other git config equivalents) explicitly
|
||||
-- disable stall detection.
|
||||
parseStallDetection :: String -> Either String StallDetection
|
||||
parseStallDetection s = case isTrueFalse s of
|
||||
|
|
|
@ -69,7 +69,7 @@ mkGpgCmd Nothing = GpgCmd (fromMaybe "gpg" BuildInfo.gpg)
|
|||
boolGpgCmd :: GpgCmd -> [CommandParam] -> IO Bool
|
||||
boolGpgCmd (GpgCmd cmd) = boolSystem cmd
|
||||
|
||||
-- Generate an argument list to asymetrically encrypt to the given recipients.
|
||||
-- Generate an argument list to asymmetrically encrypt to the given recipients.
|
||||
pkEncTo :: [String] -> [CommandParam]
|
||||
pkEncTo = concatMap (\r -> [Param "--recipient", Param r])
|
||||
|
||||
|
|
|
@ -253,7 +253,7 @@ describeMatchResult descop l prefix = Just $
|
|||
go (MatchedOpen:rest) = "(" : go rest
|
||||
go (MatchedClose:rest) = ")" : go rest
|
||||
|
||||
-- Remove unncessary outermost parens
|
||||
-- Remove unnecessary outermost parens
|
||||
simplify True (MatchedOpen:rest) = case lastMaybe rest of
|
||||
Just MatchedClose -> simplify False (dropFromEnd 1 rest)
|
||||
_ -> simplify False rest
|
||||
|
|
|
@ -48,7 +48,7 @@ absPathFrom dir path = simplifyPath (combine dir path)
|
|||
- already exists. -}
|
||||
absPath :: RawFilePath -> IO RawFilePath
|
||||
absPath file
|
||||
-- Avoid unncessarily getting the current directory when the path
|
||||
-- Avoid unnecessarily getting the current directory when the path
|
||||
-- is already absolute. absPathFrom uses simplifyPath
|
||||
-- so also used here for consistency.
|
||||
| isAbsolute file = return $ simplifyPath file
|
||||
|
|
|
@ -32,7 +32,7 @@ import Prelude
|
|||
- characters, except for ones in surrogate plane. Converting a string that
|
||||
- does contain other unicode characters to a ByteString using the
|
||||
- filesystem encoding (see GHC.IO.Encoding) will throw an exception,
|
||||
- so use this instead to avoid quickcheck tests breaking unncessarily.
|
||||
- so use this instead to avoid quickcheck tests breaking unnecessarily.
|
||||
-}
|
||||
newtype TestableString = TestableString
|
||||
{ fromTestableString :: String }
|
||||
|
@ -46,7 +46,7 @@ instance Arbitrary TestableString where
|
|||
-
|
||||
- No real-world filename can be empty or contain a NUL. So code can
|
||||
- well be written that assumes that and using this avoids quickcheck
|
||||
- tests breaking unncessarily.
|
||||
- tests breaking unnecessarily.
|
||||
-}
|
||||
newtype TestableFilePath = TestableFilePath
|
||||
{ fromTestableFilePath :: FilePath }
|
||||
|
|
|
@ -58,7 +58,7 @@ newtype Armoring = Armoring Bool
|
|||
- This is unfortunately needed because of an infelicity in the SOP
|
||||
- standard, as documented in section 9.9 "Be Careful with Special
|
||||
- Designators", when using "@FD:" and similar designators the SOP
|
||||
- command may test for the presense of a file with the same name on the
|
||||
- command may test for the presence of a file with the same name on the
|
||||
- filesystem, and fail with AMBIGUOUS_INPUT.
|
||||
-
|
||||
- Since we don't want to need to deal with such random failure due to
|
||||
|
|
Loading…
Reference in a new issue